hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a0305e1a30583d4b5af720652c7253821328032
| 469
|
py
|
Python
|
Ar_Script/ar_171_office_读取excel案例.py
|
archerckk/PyTest
|
610dd89df8d70c096f4670ca11ed2f0ca3196ca5
|
[
"MIT"
] | null | null | null |
Ar_Script/ar_171_office_读取excel案例.py
|
archerckk/PyTest
|
610dd89df8d70c096f4670ca11ed2f0ca3196ca5
|
[
"MIT"
] | 1
|
2020-01-19T01:19:57.000Z
|
2020-01-19T01:19:57.000Z
|
Ar_Script/ar_171_office_读取excel案例.py
|
archerckk/PyTest
|
610dd89df8d70c096f4670ca11ed2f0ca3196ca5
|
[
"MIT"
] | null | null | null |
import openpyxl
'读取已存在的excel文件'
wb=openpyxl.load_workbook('result/豆瓣top250.xlsx')
ws=wb.active
'用一个列表显示所有工作表的名字'
'get_sheet_names不建议使用'
names=wb.sheetnames
print(names)
# '创建工作表'
# wb.create_sheet(index=1,title='新建工作表')
# ws=wb.create_sheet(index=2,title='新建工作表2')
'复制工作表'
wb.copy_worksheet(ws)
# '删除工作表'
# wb.remove_sheet(wb.get_sheet_by_name('新建工作表'))
# wb.remove_sheet(wb.get_sheet_by_name('新建工作表2'))
# print(wb.sheetnames)
wb.save('result/豆瓣top250.xlsx')
| 16.172414
| 49
| 0.754797
|
4a03060adf7dad940159afc2030edbcbe84c1d86
| 109
|
py
|
Python
|
turtle1.py
|
abkumarggn/python-learning-1
|
df45396cd14f5762053728760953b3806d0069b6
|
[
"Apache-2.0"
] | null | null | null |
turtle1.py
|
abkumarggn/python-learning-1
|
df45396cd14f5762053728760953b3806d0069b6
|
[
"Apache-2.0"
] | null | null | null |
turtle1.py
|
abkumarggn/python-learning-1
|
df45396cd14f5762053728760953b3806d0069b6
|
[
"Apache-2.0"
] | null | null | null |
import turtle
tur=turtle.Turtle()
for i in range(50):
tur.forward(50)
tur.right(144)
turtle.done()
| 12.111111
| 19
| 0.669725
|
4a03063e3e062056324eb86aafef197e28bb815b
| 822
|
py
|
Python
|
Diena_7_functions/fun_scope_g2.py
|
edzya/Python_RTU_08_20
|
d2921d998c611c18328dd523daf976a27ce858c1
|
[
"MIT"
] | 8
|
2020-08-31T16:10:54.000Z
|
2021-11-24T06:37:37.000Z
|
Diena_7_functions/fun_scope_g2.py
|
edzya/Python_RTU_08_20
|
d2921d998c611c18328dd523daf976a27ce858c1
|
[
"MIT"
] | 8
|
2021-06-08T22:30:29.000Z
|
2022-03-12T00:48:55.000Z
|
Diena_7_functions/fun_scope_g2.py
|
edzya/Python_RTU_08_20
|
d2921d998c611c18328dd523daf976a27ce858c1
|
[
"MIT"
] | 12
|
2020-09-28T17:06:52.000Z
|
2022-02-17T12:12:46.000Z
|
global_var = 500
global_list = [1, 2, 3]
# primitive get assigned a new id upon modification
def my_fun(arg_var):
print("INSIDE my_fun", arg_var, id(arg_var))
arg_var += 20
print("After +=", arg_var, id(arg_var))
return arg_var
# mutable data can be modified inside function and reference stays
def fun_list(arg_list):
print(arg_list, id(arg_list))
arg_list.append(30)
print(arg_list, id(arg_list))
return arg_list
# if we do not write return we get None
print(global_list, id(global_list))
res_list = fun_list(global_list)
print(res_list, id(res_list))
print(global_list, id(global_list))
# print(global_var, id(global_var))
# my_result = my_fun(global_var)
# # arg_var is gone here
# print("Result", my_result, id(my_result))
# print("Global var", global_var, id(global_var))
| 24.909091
| 66
| 0.716545
|
4a03067bc24191307dfdf0f509f78fb0ec99790f
| 18,337
|
py
|
Python
|
pimsclient/client.py
|
sjoerdk/pimsclient
|
cb03e326ef87638cc10ae32badf453fed6391028
|
[
"MIT"
] | null | null | null |
pimsclient/client.py
|
sjoerdk/pimsclient
|
cb03e326ef87638cc10ae32badf453fed6391028
|
[
"MIT"
] | 224
|
2019-06-03T03:09:40.000Z
|
2022-03-28T08:17:17.000Z
|
pimsclient/client.py
|
sjoerdk/pimsclient
|
cb03e326ef87638cc10ae32badf453fed6391028
|
[
"MIT"
] | 1
|
2020-05-27T09:52:24.000Z
|
2020-05-27T09:52:24.000Z
|
"""Classes and functions for working with the PIMS pseudonym management system.
This module adds one level above the Swagger level, abstracting away details and
making it easy to work with multiple types of pseudonym under a single project
description
"""
from typing import List
from pimsclient.exceptions import PIMSException
from pimsclient.server import PIMSServer, PIMSServerException
from pimsclient.swagger import Identifier, KeyFile, Pseudonym, KeyFiles, Users, Key
def connect(pims_url, pims_key_file_id, user=None, password=None):
"""Convenience function to create a project connected to a keyfile
Parameters
----------
pims_url: str
url to PIMS swagger API
pims_key_file_id: int
PIMS id for the keyfile you are trying to link to
user: str, optional
username to connect to PIMS API use, defaults to reading environment
key ['PIMS_CLIENT_USER']
password: str, optional
password to connect to PIMS API, defaults to reading environment
key ['PIMS_CLIENT_PASSWORD']
Returns
-------
Project
A project connected to keyfile
"""
connection = PIMSConnection(
session=PIMSServer(pims_url).get_session(user=user, password=password)
)
return Project(key_file_id=pims_key_file_id, connection=connection)
class Project:
"""Main object for PIMS client. A project holds all pseudonymization
information for one or more value_type(s) of identifiers. It stores all its
data in a single PIMS keyfile.
"""
def __init__(self, key_file_id, connection=None):
"""Create a project
Parameters
----------
key_file_id: int
PIMS db id of keyfile that this project is linked to
connection: PIMSConnection
Connection to communicate over for this project
"""
self.key_file_id = key_file_id
self._connection = connection
self._key_file = None
self.factory = KeyTypeFactory()
def __str__(self):
return (
f"Project for keyfile {self.key_file_id} over connection "
f"{self.connection}"
)
@property
def connection(self):
if self._connection:
return self._connection
else:
raise NoConnectionException(
"This project is not connected to any PIMS server"
)
def get_key_file(self) -> KeyFile:
"""Caches keyfile got from PIMS locally
Raises
------
PIMSProjectException
If keyfile cannot be got for any reason
Returns
-------
KeyFile
The keyfile that this project stores its data in
"""
if not self._key_file:
try:
self._key_file = self.connection.get_key_file(key=self.key_file_id)
except PIMSServerException as e:
raise PIMSProjectException(f"Error getting key file from server: {e}")
return self._key_file
def get_name(self) -> str:
"""
Raises
------
PIMSProjectException
If name cannot be got for any reason
Returns
-------
str:
Name of the project in pims
"""
return self.get_key_file().name
def get_pims_pseudonym_template(self) -> str:
"""
Raises
------
PIMSProjectException
If template cannot be got for any reason
Returns
-------
str:
pseudonym template as defined in pims
"""
return self.get_key_file().pseudonym_template
def pseudonymize(self, identifiers):
"""Get a pseudonym from PIMS for each identifier in list
Parameters
----------
identifiers: List[TypedIdentifier]
identifiers to pseudonymize
Raises
------
PIMSProjectException
If pseudonymization fails
Returns
-------
List[TypedKey]
Each identifier mapped to PIMS pseudonym
"""
keys = self.connection.pseudonymize(
key_file=self.get_key_file(), identifiers=identifiers
)
return [self.factory.create_typed_key(x) for x in keys]
def reidentify(self, pseudonyms: List["TypedPseudonym"]) -> List["TypedKey"]:
"""Get identifiers for each pseudonym in list
Parameters
----------
pseudonyms: List[TypedPseudonym]
list of pseudonyms to process
Raises
------
PIMSProjectException
Returns
-------
List[TypedKey]
Pseudonym mapped to identifier if found. If a pseudonym is not
found in PIMS it is omitted from list
"""
keys = self.connection.reidentify(
key_file=self.get_key_file(), pseudonyms=pseudonyms
)
return [self.factory.create_typed_key(x) for x in keys]
def set_keys(self, keys: List[Key]):
"""Manually set the given pseudonym-identifier keys
Raises
------
PIMSProjectException
If any pseudonyms or identifiers are already in keyfile
"""
self.connection.set_keys(key_file=self.get_key_file(), keys=keys)
def assert_pseudonym_templates(self, should_have_a_template, should_exist):
"""Make sure the the pseudonym templates for the datatypes in this project
are as expected.
This check makes sure the format UID's makes sense. For example, if no
template is defined for StudyInstanceUID, de-identifying might yield a guid,
which is not a valid DICOM UID. Fail early in this case, because this will
cause headaches later if not fixed.
Notes
-----
In this client library a 'PseudonymTemplate' is for a single datatype.
In PIMS, the pseudonym template contains templates for all datatypes.
See notes for PseudonymTemplate
Parameters
----------
should_have_a_template: List[TypedPseudonym]
These pseudonym types should have a template defined in this project,
regardless of what the actual template
is.
should_exist: List[PseudonymTemplate]
These exact templates should be defined in this project. Requires the
template to be exactly a certain value
Raises
------
PIMSProjectException
When assertion cannot be done. For example when connection to server
fails
InvalidPseudonymTemplateException:
When this project's template is not as expected
"""
pims_template = self.get_pims_pseudonym_template()
for typed_pseudonym in should_have_a_template:
if f":{typed_pseudonym.value_type}" not in pims_template:
msg = (
f'Could not find any template for "{typed_pseudonym}" in '
f'project {self} template "{pims_template}".'
f" This is required"
)
raise InvalidPseudonymTemplateException(msg)
for template in should_exist:
if template.as_pims_string() not in pims_template:
msg = (
f'Could not find "{template.as_pims_string()}" in project'
f' {self} template "{pims_template}".'
f" This is required"
)
raise InvalidPseudonymTemplateException(msg)
class PIMSConnection:
def __init__(self, session):
"""A logged in session to a PIMS server. Main way in client lib of
interacting with PIMS
Parameters
----------
session: PIMSSession
session to use for communicating with PIMS
"""
self.session = session
self.key_files = KeyFiles(session=self.session)
self.users = Users(session=self.session)
def get_key_file(self, key):
"""Get specific key file
Parameters
----------
key: int or str
key for the key_file to get
Raises
------
PIMSServerException
When key file cannot be got for some reason
Returns
-------
KeyFile
"""
return self.key_files.get(key)
def pseudonymize(
self, key_file: KeyFile, identifiers: List[Identifier]
) -> List[Key]:
"""Get a pseudonym for each identifier. If identifier is known in PIMS,
return this. Otherwise, have PIMS generate a new pseudonym and return that.
Parameters
----------
key_file: KeyFile
The key_file to use
identifiers: List[Identifier]
The identifiers to get pseudonyms for
Returns
-------
List[Key]
The PIMS pseudonym for each identifier
"""
return self.key_files.pseudonymize(key_file=key_file, identifiers=identifiers)
def reidentify(self, key_file: KeyFile, pseudonyms: List[Pseudonym]) -> List[Key]:
"""Find the identifiers linked to the given pseudonyms.
Parameters
----------
key_file: KeyFile
The key_file to use
pseudonyms: List[Pseudonym]
The pseudonyms to get identifiers for
Notes
-----
Returned list might be shorter than input list. For unknown pseudonyms
no keys are returned
Returns
-------
List[Key]
A list of pseudonym-identifier keys
"""
return self.key_files.reidentify(key_file=key_file, pseudonyms=pseudonyms)
def set_keys(self, key_file: KeyFile, keys: List[Key]):
"""Manually set the given pseudonym-identifier keys
Raises
------
PIMSServerException
If any pseudonym or identifier already exists in keyfile
"""
# PIMS silently skips setting a pseudonym if the identity exists already.
# We want to avoid silent skiping, so manually check existing
reidentified = self.reidentify(
key_file=key_file, pseudonyms=[x.pseudonym for x in keys]
)
if reidentified:
raise PIMSServerException(
f"One or more identifiers already exist in keyfile: "
f"{[x.describe() for x in reidentified]}. Overwriting would make "
f"this keyfile inconsistent"
)
# No identities exist. Start setting
return self.key_files.set_keys(key_file=key_file, keys=keys)
class ValueTypes:
"""Types of identifiers or pseudonyms in PIMS.
Needed as a patientID should be treated differently then a SeriesInstanceUID.
Different patterns for generating for
example.
Whenever a DICOM tag is pseudonymized, the DICOM tag name is used as value_type
descriptor.
See for example name
https://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/DICOM.html
"""
PATIENT_ID = "PatientID"
STUDY_INSTANCE_UID = "StudyInstanceUID"
SERIES_INSTANCE_UID = "SeriesInstanceUID"
SOP_INSTANCE_UID = "SOPInstanceUID"
ACCESSION_NUMBER = "AccessionNumber"
SALT = "Salt"
NOT_SET = "NOT_SET"
all = [
PATIENT_ID,
STUDY_INSTANCE_UID,
SERIES_INSTANCE_UID,
SOP_INSTANCE_UID,
ACCESSION_NUMBER,
SALT,
]
class TypedIdentifier(Identifier):
"""An identifier with a specific value_type"""
value_type = ValueTypes.NOT_SET
def __init__(self, value):
super().__init__(value=value, source=self.value_type)
@property
def value_type(self):
"""In swagger layer value_type is saved as 'source'. Expose this here as
value_type because it fits the concepts better
"""
return self.source
def __str__(self):
return f"{self.value_type}: {self.value}"
class PatientID(TypedIdentifier):
value_type = ValueTypes.PATIENT_ID
class StudyInstanceUID(TypedIdentifier):
value_type = ValueTypes.STUDY_INSTANCE_UID
class SeriesInstanceUID(TypedIdentifier):
value_type = ValueTypes.SERIES_INSTANCE_UID
class SOPInstanceUID(TypedIdentifier):
"""Designates a single slice in a DICOM file"""
value_type = ValueTypes.SOP_INSTANCE_UID
class AccessionNumber(TypedIdentifier):
value_type = ValueTypes.ACCESSION_NUMBER
class SaltIdentifier(TypedIdentifier):
value_type = ValueTypes.SALT
class TypedPseudonym(Pseudonym):
"""A pseudonym with a specific value_type"""
value_type = ValueTypes.NOT_SET
def __init__(self, value):
super().__init__(value=value, source=self.value_type)
def __str__(self):
return f"Pseudo{self.value_type}: {self.value}"
class PseudoPatientID(TypedPseudonym):
value_type = ValueTypes.PATIENT_ID
class PseudoStudyInstanceUID(TypedPseudonym):
value_type = ValueTypes.STUDY_INSTANCE_UID
class PseudoSeriesInstanceUID(TypedPseudonym):
value_type = ValueTypes.SERIES_INSTANCE_UID
class PseudoSOPInstanceUID(TypedPseudonym):
value_type = ValueTypes.SOP_INSTANCE_UID
class PseudoAccessionNumber(TypedPseudonym):
value_type = ValueTypes.ACCESSION_NUMBER
class PseudoSalt(TypedPseudonym):
value_type = ValueTypes.SALT
class NoConnectionException(Exception):
pass
class TypedKey(Key):
"""An identity-pseudonym mapping where both have the same value_type"""
def __init__(self, identifier, pseudonym):
"""Create a typed Key
Parameters
----------
identifier: TypedIdentifier
Real identifier, like 'Yen Hu'
pseudonym: TypedPseudonym
Pseudonym used for the identifier, like 'Case3'
"""
super().__init__(identifier, pseudonym)
def __str__(self):
return f"Key <{self.value_type}>: {self.pseudonym.value}"
@property
def value_type(self):
"""According to convention, source is used to hold value_type information"""
return self.identifier.source
class KeyTypeFactory:
"""For casting swagger objects to typed objects"""
identifier_class_map = {
x.value_type: x
for x in [
PatientID,
StudyInstanceUID,
SeriesInstanceUID,
SOPInstanceUID,
AccessionNumber,
SaltIdentifier,
]
}
pseudonym_class_map = {
x.value_type: x
for x in [
PseudoPatientID,
PseudoStudyInstanceUID,
PseudoSeriesInstanceUID,
PseudoSOPInstanceUID,
PseudoAccessionNumber,
PseudoSalt,
]
}
def create_typed_key(self, key: Key) -> TypedKey:
"""Take given swagger.Key and cast to typed key
Parameters
----------
key: Key
Raises
------
TypedKeyFactoryException
If key cannot be cast to a known type
Returns
-------
TypedKey
"""
identifier = self.create_typed_identifier(identifier=key.identifier)
pseudonym = self.create_typed_pseudonym(
pseudonym=key.pseudonym, value_type=identifier.value_type
)
return TypedKey(identifier=identifier, pseudonym=pseudonym)
def create_typed_identifier(self, identifier: Identifier) -> TypedIdentifier:
"""Cast identifier to typed version
Parameters
----------
identifier: Identifier
Raises
------
TypedKeyFactoryException
If identifier cannot be cast to a known type
Returns
-------
TypedIdentifier
"""
try:
identifier_class = self.identifier_class_map[identifier.source]
return identifier_class(identifier.value)
except KeyError:
msg = (
f'Unknown value type "{identifier.source}". Known types: '
f"{list(self.identifier_class_map.keys())}"
)
raise TypedKeyFactoryException(msg)
def create_typed_pseudonym(
self, pseudonym: Pseudonym, value_type: str
) -> TypedPseudonym:
"""Cast identifier to typed version
Parameters
----------
pseudonym: Pseudonym
pseudonym to cast
value_type: str
one of ValueTypes
Raises
------
TypedKeyFactoryException
If pseudonym cannot be cast to a known type
Returns
-------
TypedPseudonym
"""
try:
identifier_class = self.pseudonym_class_map[value_type]
return identifier_class(pseudonym.value)
except KeyError:
msg = (
f"Unknown value type {pseudonym.source}. Known types: "
f"{list(self.pseudonym_class_map.keys())}"
)
raise TypedKeyFactoryException(msg)
class PseudonymTemplate:
"""The way new pseudonyms are generated in PIMS for a single pseudonym type"""
def __init__(self, template_string, pseudonym_class):
"""Create a new pseudonym template
Parameters
----------
template_string: str
string representing template. See PIMS documentation for options
pseudonym_class: class
The TypedPseudonym class for which this template holds
Notes
-----
In this client library a 'PseudonymTemplate' is the template used for
generating values for a single datatype In a PIMS KeyFile,
'pseudonym template' refers to a long string representing templates for
ALL datatypes, separated by a separator. The PIMS naming is outdated as
it was not designed with multiple datatypes in mind therefore the client
library will not follow this naming
"""
self.template_string = template_string
self.pseudonym_class = pseudonym_class
def as_pims_string(self):
return f":{self.pseudonym_class.value_type}|{self.template_string}"
class PIMSClientException(PIMSException):
pass
class PIMSProjectException(PIMSClientException):
pass
class TypedKeyFactoryException(PIMSClientException):
pass
class InvalidPseudonymTemplateException(PIMSClientException):
pass
| 28.651563
| 86
| 0.622785
|
4a030791dc5051de43384e33508514035233227f
| 2,280
|
py
|
Python
|
safe_control_gym/controllers/__init__.py
|
molumitu/safe-control-gym
|
81bec94d278c99e61fbf626ef39ac171e8c6f8c8
|
[
"MIT"
] | 1
|
2022-03-01T03:18:05.000Z
|
2022-03-01T03:18:05.000Z
|
safe_control_gym/controllers/__init__.py
|
molumitu/safe-control-gym
|
81bec94d278c99e61fbf626ef39ac171e8c6f8c8
|
[
"MIT"
] | null | null | null |
safe_control_gym/controllers/__init__.py
|
molumitu/safe-control-gym
|
81bec94d278c99e61fbf626ef39ac171e8c6f8c8
|
[
"MIT"
] | null | null | null |
"""Register controllers.
"""
from safe_control_gym.utils.registration import register
# register(id="cbf",
# entry_point="safe_control_gym.controllers.cbf.cbf_qp:CBF_QP",
# config_entry_point="safe_control_gym.controllers.cbf:cbf_qp.yaml")
# register(id="lqr",
# entry_point="safe_control_gym.controllers.lqr.lqr:LQR",
# config_entry_point="safe_control_gym.controllers.lqr:lqr.yaml")
# register(id="ilqr",
# entry_point="safe_control_gym.controllers.lqr.ilqr:iLQR",
# config_entry_point="safe_control_gym.controllers.lqr:ilqr.yaml")
register(id="mpc",
entry_point="safe_control_gym.controllers.mpc.mpc:MPC",
config_entry_point="safe_control_gym.controllers.mpc:mpc.yaml")
register(id="linear_mpc",
entry_point="safe_control_gym.controllers.mpc.linear_mpc:LinearMPC",
config_entry_point="safe_control_gym.controllers.mpc:linear_mpc.yaml")
register(id="gp_mpc",
entry_point="safe_control_gym.controllers.mpc.gp_mpc:GPMPC",
config_entry_point="safe_control_gym.controllers.mpc:gp_mpc.yaml")
register(id="mpsc",
entry_point="safe_control_gym.controllers.mpsc.mpsc:MPSC",
config_entry_point="safe_control_gym.controllers.mpsc:mpsc.yaml")
register(id="pid",
entry_point="safe_control_gym.controllers.pid.pid:PID",
config_entry_point="safe_control_gym.controllers.pid:pid.yaml")
register(id="ppo",
entry_point="safe_control_gym.controllers.ppo.ppo:PPO",
config_entry_point="safe_control_gym.controllers.ppo:ppo.yaml")
register(id="sac",
entry_point="safe_control_gym.controllers.sac.sac:SAC",
config_entry_point="safe_control_gym.controllers.sac:sac.yaml")
register(id="safe_explorer_ppo",
entry_point="safe_control_gym.controllers.safe_explorer.safe_ppo:SafeExplorerPPO",
config_entry_point="safe_control_gym.controllers.safe_explorer:safe_ppo.yaml")
register(id="rarl",
entry_point="safe_control_gym.controllers.rarl.rarl:RARL",
config_entry_point="safe_control_gym.controllers.rarl:rarl.yaml")
register(id="rap",
entry_point="safe_control_gym.controllers.rarl.rap:RAP",
config_entry_point="safe_control_gym.controllers.rarl:rap.yaml")
| 40
| 91
| 0.739474
|
4a03098dd08ac1ba355af08940138870846978ec
| 578
|
py
|
Python
|
recipes/recipes/gerrit_cq_test.py
|
mithro/chromium-infra
|
d27ac0b230bedae4bc968515b02927cf9e17c2b7
|
[
"BSD-3-Clause"
] | null | null | null |
recipes/recipes/gerrit_cq_test.py
|
mithro/chromium-infra
|
d27ac0b230bedae4bc968515b02927cf9e17c2b7
|
[
"BSD-3-Clause"
] | null | null | null |
recipes/recipes/gerrit_cq_test.py
|
mithro/chromium-infra
|
d27ac0b230bedae4bc968515b02927cf9e17c2b7
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'depot_tools/bot_update',
'depot_tools/gclient',
'recipe_engine/properties',
]
REPO = 'https://chromium.googlesource.com/playground/gerrit-cq/normal'
def RunSteps(api):
api.gclient.set_config('gerrit_test_cq_normal')
api.bot_update.ensure_checkout(patch=True);
def GenTests(api):
yield (
api.test('try') +
api.properties.tryserver(gerrit_project='playground/gerrit-cq/normal')
)
| 24.083333
| 74
| 0.740484
|
4a030a3845c7f490303895210afb286d4a581eb7
| 3,502
|
py
|
Python
|
azure-mgmt-network/azure/mgmt/network/v2018_12_01/models/express_route_gateway_py3.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 1
|
2021-09-07T18:36:04.000Z
|
2021-09-07T18:36:04.000Z
|
azure-mgmt-network/azure/mgmt/network/v2018_12_01/models/express_route_gateway_py3.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 2
|
2019-10-02T23:37:38.000Z
|
2020-10-02T01:17:31.000Z
|
azure-mgmt-network/azure/mgmt/network/v2018_12_01/models/express_route_gateway_py3.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource_py3 import Resource
class ExpressRouteGateway(Resource):
"""ExpressRoute gateway resource.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param auto_scale_configuration: Configuration for auto scaling.
:type auto_scale_configuration:
~azure.mgmt.network.v2018_12_01.models.ExpressRouteGatewayPropertiesAutoScaleConfiguration
:ivar express_route_connections: List of ExpressRoute connections to the
ExpressRoute gateway.
:vartype express_route_connections:
list[~azure.mgmt.network.v2018_12_01.models.ExpressRouteConnection]
:ivar provisioning_state: The provisioning state of the resource. Possible
values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'
:vartype provisioning_state: str or
~azure.mgmt.network.v2018_12_01.models.ProvisioningState
:param virtual_hub: Required. The Virtual Hub where the ExpressRoute
gateway is or will be deployed.
:type virtual_hub: ~azure.mgmt.network.v2018_12_01.models.VirtualHubId
:ivar etag: A unique read-only string that changes whenever the resource
is updated.
:vartype etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'express_route_connections': {'readonly': True},
'provisioning_state': {'readonly': True},
'virtual_hub': {'required': True},
'etag': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'auto_scale_configuration': {'key': 'properties.autoScaleConfiguration', 'type': 'ExpressRouteGatewayPropertiesAutoScaleConfiguration'},
'express_route_connections': {'key': 'properties.expressRouteConnections', 'type': '[ExpressRouteConnection]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'virtual_hub': {'key': 'properties.virtualHub', 'type': 'VirtualHubId'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, *, virtual_hub, id: str=None, location: str=None, tags=None, auto_scale_configuration=None, **kwargs) -> None:
super(ExpressRouteGateway, self).__init__(id=id, location=location, tags=tags, **kwargs)
self.auto_scale_configuration = auto_scale_configuration
self.express_route_connections = None
self.provisioning_state = None
self.virtual_hub = virtual_hub
self.etag = None
| 43.234568
| 144
| 0.653055
|
4a030adafb4f485c6fa491b32a5e44d3f3982f1d
| 26,713
|
py
|
Python
|
town_data/safegraph_processing/210701_safegraph_data_processing.py
|
atruszkowska/NR-population-revac
|
3f35be85755bc7233cd1330ea1fbc0346c8dc04c
|
[
"MIT"
] | null | null | null |
town_data/safegraph_processing/210701_safegraph_data_processing.py
|
atruszkowska/NR-population-revac
|
3f35be85755bc7233cd1330ea1fbc0346c8dc04c
|
[
"MIT"
] | null | null | null |
town_data/safegraph_processing/210701_safegraph_data_processing.py
|
atruszkowska/NR-population-revac
|
3f35be85755bc7233cd1330ea1fbc0346c8dc04c
|
[
"MIT"
] | 1
|
2021-09-21T18:56:10.000Z
|
2021-09-21T18:56:10.000Z
|
import csv
#
# The original SafeGraph data are uploaded to Google drive due to the size limit of GitHub
# Link:
# The data are in groups by year
# with 1 group (5 files) of 2020 and 3 groups (6 files each) of 2021
#
def convertFile(inFileName, outFileName):
''' Convert the comma separated file to a tab separated file for future processing '''
csv.writer(open(outFileName, 'w+'), delimiter='\t').writerows(csv.reader(open(inFileName)))
def filterFile2020(inFileName, outFileName):
''' Clean up the 2020 SafeGraph data file, keeping building's name, top_category, latitude, longitude, zip code '''
with open(inFileName, 'r') as inFile, open(outFileName, 'w') as outFile:
for line in inFile:
lineList = line.split('\t')
newLine = '\t'.join([lineList[2], lineList[5], lineList[8], lineList[9], lineList[13]])
outFile.write(newLine+'\n')
def filterFile2021(inFileName, outFileName):
''' Clean up the 2021 SafeGraph data file, keeping building's name, top_category, latitude, longitude, zip code '''
with open(inFileName, 'r') as inFile, open(outFileName, 'w') as outFile:
for line in inFile:
lineList = line.split('\t')
newLine = '\t'.join([lineList[4], lineList[7], lineList[10], lineList[11], lineList[15]])
outFile.write(newLine+'\n')
def preProcessing(year, group=0):
''' Pre-process the SafeGraph data, calling the convertFile and filterFile function, output text files '''
# Note that the 2020 data and 2021 data call different filterFile202x functions accordingly
# (Since the raw data from 2020 and 2021 has different layout)
if year == "2020":
for i in range(1,6):
inFile = "2020_core_poi-part" + str(i) + ".csv"
tabFile = "2020_core_poi-part" + str(i) + ".tsv"
outFile = "2020_poi-part" + str(i) + ".txt"
convertFile(inFile, tabFile)
filterFile2020(tabFile, outFile)
elif year == "2021" and group == 1:
for i in range(1,7):
inFile = "2021_core_poi-part" + str(i) + ".csv"
tabFile = "2021_core_poi-part" + str(i) + ".tsv"
outFile = "2021_poi-part" + str(i) + ".txt"
convertFile(inFile, tabFile)
filterFile2021(tabFile, outFile)
elif year == "2021" and group == 2:
for i in range(7, 13):
inFile = "2021_core_poi-part" + str(i) + ".csv"
tabFile = "2021_core_poi-part" + str(i) + ".tsv"
outFile = "2021_poi-part" + str(i) + ".txt"
convertFile(inFile, tabFile)
filterFile2021(tabFile, outFile)
elif year == "2021" and group == 3:
for i in range(13, 19):
inFile = "2021_core_poi-part" + str(i) + ".csv"
tabFile = "2021_core_poi-part" + str(i) + ".tsv"
outFile = "2021_poi-part" + str(i) + ".txt"
convertFile(inFile, tabFile)
filterFile2021(tabFile, outFile)
else:
raise Exception("The year/group is not supported.")
def buildLeisureMap():
''' Build and return a map that contains all categories that are considered leisure locations '''
# 1 is a dummy int value that indicates the existence of such key in the map
# In later part, leisureMap[category] == 1 means such a category is a leisure location
leisureMap = {}
leisureMap["Amusement Parks and Arcades"] = 1
leisureMap["Automobile Dealers"] = 1
leisureMap["Automotive Equipment Rental and Leasing"] = 1
leisureMap["Automotive Repair and Maintenance"] = 1
leisureMap["Bakeries and Tortilla Manufacturing"] = 1
leisureMap["Beer, Wine, and Liquor Stores"] = 1
leisureMap["Beverage Manufacturing"] = 1
leisureMap["Book Stores and News Dealers"] = 1
leisureMap["Building Material and Supplies Dealers"] = 1
leisureMap["Child Day Care Services"] = 1
leisureMap["Clothing Stores"] = 1
leisureMap["Consumer Goods Rental"] = 1
leisureMap["Department Stores"] = 1
leisureMap["Electronics and Appliance Stores"] = 1
leisureMap["Florists"] = 1
leisureMap["Furniture Stores"] = 1
leisureMap["Gasoline Stations"] = 1
leisureMap["General Medical and Surgical Hospitals"] = 1
leisureMap["General Merchandise Stores"] = 1
leisureMap["General Merchandise Stores, including Warehouse Clubs and Supercenters"] = 1
leisureMap["Grocery Stores"] = 1
leisureMap["Health and Personal Care Stores"] = 1
leisureMap["Home Furnishings Stores"] = 1
leisureMap["Home Health Care Services"] = 1
leisureMap["Interurban and Rural Bus Transportation"] = 1
leisureMap["Jewelry Luggage and Leather Goods Stores"] = 1
leisureMap["Lessors of Real Estate"] = 1
leisureMap["Liquor Stores"] = 1
leisureMap["Miscellaneous Durable Goods Merchant Wholesalers"] = 1
leisureMap["Motion Picture and Video Industries"] = 1
leisureMap["Museums, Historical Sites, and Similar Institutions"] = 1
leisureMap["Nursing Care Facilities (Skilled Nursing Facilities)"] = 1
leisureMap["Office Supplies, Stationery, and Gift Stores"] = 1
leisureMap["Other Ambulatory Health Care Services"] = 1
leisureMap["Other Amusement and Recreation Industries"] = 1
leisureMap["Other Miscellaneous Store Retailers"] = 1
leisureMap["Other Motor Vehicle Dealers"] = 1
leisureMap["Other Personal Services"] = 1
leisureMap["Personal Care Services"] = 1
leisureMap["Rail Transportation"] = 1
leisureMap["Religious Organizations"] = 1
leisureMap["Restaurants and Other Eating Places"] = 1
leisureMap["Shoe Stores"] = 1
leisureMap["Specialty Food Stores"] = 1
leisureMap["Spectator Sports"] = 1
leisureMap["Sporting Goods, Hobby, and Musical Instrument Stores"] = 1
leisureMap["Travel Arrangement and Reservation Services"] = 1
leisureMap["Traveler Accommodation"] = 1
leisureMap["Used Merchandise Stores"] = 1
return leisureMap
def buildWorkMap():
''' Build and return a map that contains all categories that are NOT considered workplaces '''
# 1 is a dummy int value that indicates the existence of such key in the map
# In later part, leisureMap[category] == 1 means such a category is NOT a leisure location
workMap = {}
workMap["Child Day Care Services"] = 1
workMap["Colleges, Universities, and Professional Schools"] = 1
workMap["Elementary and Secondary Schools"] = 1
workMap["Home Health Care Services"] = 1
workMap["Medical and Diagnostic Laboratories"] = 1
workMap["Nursing Care Facilities (Skilled Nursing Facilities)"] = 1
workMap["Other Ambulatory Health Care Services"] = 1
workMap["Other Schools and Instruction"] = 1
workMap["Outpatient Care Centers"] = 1
workMap["Personal Care Services"] = 1
workMap["Psychiatric and Substance Abuse Hospitals"] = 1
workMap["Continuing_Care_Retirement_Communities_and_Assisted_Living_Facilities_for_the_Elderly"] = 1
workMap["General_Medical_and_Surgical_Hospitals"] = 1
workMap["Junior_Colleges"] = 1
return workMap
def getInTownZip(cityName):
''' Build and return a list of all zip codes in-town for the city '''
if cityName == "Utica":
return ["13501", "13502", "13503", "13504", "13505", "13599"]
elif cityName == "Colonie":
return ["12205", "12110", "12309", "12047", "12303", "12189", "12203", "12304", "12211"]
elif cityName == "NewRochelle":
return ["10801", "10805", "10804", "10583", "10803", "10538"]
else:
print("getInTownZip: city name " + cityName + " not supported.")
return False
def getOutOfTownZip(cityName):
''' Build and return a list of all zip codes out-of-town for the city '''
if cityName == "Utica":
return ["13440", "13413", "13340", "13323", "13357", "13403", "13350", "13456",\
"13417", "13421", "13365"]
elif cityName == "Colonie":
return ["12180", "12065", "12306", "12302", "12020", "12144", "12866", "12208", "12206", \
"12054", "12182", "12308", "12188", "12118", "12019", "12204"]
elif cityName == "NewRochelle":
return ["10550", "10466", "10701", "10469", "10567", "10552", "10573", "10543",\
"10704", "10475", "10710"]
else:
print("getOutOfTownZip: city name " + cityName + " not supported.")
return False
def matchCategory():
''' Build and return a map from SafeGraph category to business category '''
catMap = {}
with open("TopCat_Codes.tsv", 'r') as inFile:
next(inFile)
for line in inFile:
infoList = line.split('\t')
catMap[infoList[0]] = infoList[1]
return catMap
def matchUncat():
''' Build and return a map from key word in uncategorized buildings' names to business category '''
catMap = {}
with open("uncategorized_key_words.tsv", 'r') as inFile:
next(inFile)
for line in inFile:
infoList = line.split('\t')
catMap[infoList[0]] = infoList[2]
return catMap
def matchTopCatOcc():
''' Build and return a map from SafeGraph category to occupation '''
occMap = {}
with open("match_topcat_occupation.tsv", 'r') as inFile:
next(inFile)
for line in inFile:
infoList = line.split('\t')
occMap[infoList[0]] = infoList[2]
return occMap
def matchCatCodeOcc():
''' Build and return a map from key word in uncategoirzed buildings' names to occupation '''
occMap = {}
with open("match_catcode_occupation.tsv", 'r') as inFile:
next(inFile)
for line in inFile:
infoList = line.split('\t')
occMap[infoList[0]] = infoList[2]
return occMap
def printIgnored(countIgnored, countTotal, city, category, inOrOut):
if countTotal != 0:
percent = countIgnored / countTotal * 100
else:
percent = 0
print(countIgnored, ",", percent, "% uncategorized buildings were ignored in", \
city, category, inOrOut)
def cleanLeisure(inFileNameList, outFileName, cityName, inOrOut):
''' Output all leisure locations in/out-of-town in given city, return number of ignored & total buildings '''
leisureMap = buildLeisureMap()
unCatMap = matchUncat()
countIgnored = 0
countTotal = 0
if inOrOut == "in":
zipList = getInTownZip(cityName)
elif inOrOut == "out":
zipList = getOutOfTownZip(cityName)
else:
return False
with open(outFileName, 'w') as outFile:
print("location_name\ttop_category\tlatitude\tlongitude", file=outFile)
for eachFileName in inFileNameList:
with open(eachFileName, 'r') as inFile:
next(inFile)
for line in inFile:
infoList = line.split('\t')
zipcode = infoList[4].strip()
if zipcode in zipList:
if infoList[1] == '': # uncategorized
posName = infoList[0].split()
for word in posName:
if infoList[1] != '':
break
try:
infoList[1] = unCatMap[word]
except KeyError:
continue
if infoList[1] == '':
countIgnored += 1
try:
if (leisureMap[infoList[1]] == 1):
newLine = '\t'.join(infoList[:4])
countTotal += 1
print(newLine, file=outFile)
except KeyError: # top_category not in leisureMap
continue
printIgnored(countIgnored, countTotal, cityName, "leisure", inOrOut)
def cleanWork(inFileNameList, outFileName, cityName, inOrOut, occupation = False):
''' Output all workplaces in/out-of-town in given city, return number of ignored & total buildings '''
workMap = buildWorkMap()
catMap = matchCategory()
unCatMap = matchUncat()
occMap_Top = matchTopCatOcc()
occMap_Code = matchCatCodeOcc()
countIgnored = 0
countTotal = 0
if inOrOut == "in":
zipList = getInTownZip(cityName)
elif inOrOut == "out":
zipList = getOutOfTownZip(cityName)
else:
return False
with open(outFileName, 'w') as outFile:
if occupation:
print("location_name\toccupation\tlatitude\tlongitude", file=outFile)
else:
print("location_name\tcategory\tlatitude\tlongitude", file=outFile)
for eachFileName in inFileNameList:
with open(eachFileName, 'r') as inFile:
next(inFile)
for line in inFile:
infoList = line.split('\t')
zipcode = infoList[4].strip()
if zipcode in zipList:
if infoList[1] == '': # uncategorized
posName = infoList[0].split()
infoList[1] = ''
for word in posName:
if infoList[1] != '':
break
try:
infoList[1] = unCatMap[word]
except KeyError:
continue
if infoList[1] == '':
countIgnored += 1
if occupation: # get occupation
try:
infoList[1] = occMap_Code[infoList[1]]
except KeyError:
try:
infoList[1] = occMap_Top[infoList[1]]
except KeyError:
continue
newLine = '\t'.join(infoList[:4])
countTotal += 1
print(newLine, file=outFile)
else: # get workplaces
try:
if (workMap[infoList[1]] == 1):
pass
except KeyError: # building is a workplace
topCat = infoList[1]
try:
infoList[1] = catMap[topCat]
except KeyError:
continue
newLine = '\t'.join(infoList[:4])
countTotal += 1
print(newLine, file=outFile)
if occupation:
printIgnored(countIgnored, countTotal, cityName, "occupation", inOrOut)
else:
printIgnored(countIgnored, countTotal, cityName, "work", inOrOut)
def dataProcessing(city, category, year, group=0):
''' Process SafeGraph data for given city, category, year, and group(optional) '''
if category == "leisure":
if year == "2020":
cleanLeisure(["2020_poi-part1.txt", "2020_poi-part2.txt", "2020_poi-part3.txt", \
"2020_poi-part4.txt", "2020_poi-part5.txt"], "2020_core_poi_"+city+"In_LeisureTrimmed.csv", city, "in")
cleanLeisure(["2020_poi-part1.txt", "2020_poi-part2.txt", "2020_poi-part3.txt", \
"2020_poi-part4.txt", "2020_poi-part5.txt"], "2020_core_poi_"+city+"Out_LeisureTrimmed.csv", city, "out")
elif year == "2021":
if group == 1:
cleanLeisure(["2021_poi-part1.txt", "2021_poi-part2.txt", "2021_poi-part3.txt", \
"2021_poi-part4.txt", "2021_poi-part5.txt", "2021_poi-part5.txt"], \
"2021_1_core_poi_"+city+"In_LeisureTrimmed.csv", city, "in")
cleanLeisure(["2021_poi-part1.txt", "2021_poi-part2.txt", "2021_poi-part3.txt", \
"2021_poi-part4.txt", "2021_poi-part5.txt", "2021_poi-part6.txt"], \
"2021_1_core_poi_"+city+"Out_LeisureTrimmed.csv", city, "out")
elif group == 2:
cleanLeisure(["2021_poi-part7.txt", "2021_poi-part8.txt", "2021_poi-part9.txt", \
"2021_poi-part10.txt", "2021_poi-part11.txt", "2021_poi-part12.txt"], \
"2021_2_core_poi_"+city+"In_LeisureTrimmed.csv", city, "in")
cleanLeisure(["2021_poi-part7.txt", "2021_poi-part8.txt", "2021_poi-part9.txt", \
"2021_poi-part10.txt", "2021_poi-part11.txt", "2021_poi-part12.txt"], \
"2021_2_core_poi_"+city+"Out_LeisureTrimmed.csv", city, "out")
elif group == 3:
cleanLeisure(["2021_poi-part13.txt", "2021_poi-part14.txt", "2021_poi-part15.txt",\
"2021_poi-part16.txt", "2021_poi-part17.txt", "2021_poi-part18.txt"], \
"2021_3_core_poi_"+city+"In_LeisureTrimmed.csv", city, "in")
cleanLeisure(["2021_poi-part13.txt", "2021_poi-part14.txt", "2021_poi-part15.txt", \
"2021_poi-part16.txt", "2021_poi-part17.txt", "2021_poi-part18.txt"], \
"2021_3_core_poi_"+city+"Out_LeisureTrimmed.csv", city, "out")
else:
raise Exception("The group is not supported.")
else:
raise Exception("The year is not supported.")
elif category == "work":
if year == "2020":
cleanWork(["2020_poi-part1.txt", "2020_poi-part2.txt", "2020_poi-part3.txt", \
"2020_poi-part4.txt", "2020_poi-part5.txt"], "2020_core_poi_"+city+"In_WorkTrimmed.csv", city, "in")
cleanWork(["2020_poi-part1.txt", "2020_poi-part2.txt", "2020_poi-part3.txt", \
"2020_poi-part4.txt", "2020_poi-part5.txt"], "2020_core_poi_"+city+"Out_WorkTrimmed.csv", city, "out")
elif year == "2021":
if group == 1:
cleanWork(["2021_poi-part1.txt", "2021_poi-part2.txt", "2021_poi-part3.txt", \
"2021_poi-part4.txt", "2021_poi-part5.txt", "2021_poi-part6.txt"], \
"2021_1_core_poi_"+city+"In_WorkTrimmed.csv", city, "in")
cleanWork(["2021_poi-part1.txt", "2021_poi-part2.txt", "2021_poi-part3.txt", \
"2021_poi-part4.txt", "2021_poi-part5.txt", "2021_poi-part6.txt"], \
"2021_1_core_poi_"+city+"Out_WorkTrimmed.csv", city, "out")
elif group == 2:
cleanWork(["2021_poi-part7.txt", "2021_poi-part8.txt", "2021_poi-part9.txt", \
"2021_poi-part10.txt", "2021_poi-part11.txt", "2021_poi-part12.txt"], \
"2021_2_core_poi_"+city+"In_WorkTrimmed.csv", city, "in")
cleanWork(["2021_poi-part7.txt", "2021_poi-part8.txt", "2021_poi-part9.txt", \
"2021_poi-part10.txt", "2021_poi-part11.txt", "2021_poi-part12.txt"], \
"2021_2_core_poi_"+city+"Out_WorkTrimmed.csv", city, "out")
elif group == 3:
cleanWork(["2021_poi-part13.txt", "2021_poi-part14.txt", "2021_poi-part15.txt", \
"2021_poi-part16.txt", "2021_poi-part17.txt", "2021_poi-part18.txt"], \
"2021_3_core_poi_"+city+"In_WorkTrimmed.csv", city, "in")
cleanWork(["2021_poi-part13.txt", "2021_poi-part14.txt", "2021_poi-part15.txt", \
"2021_poi-part16.txt", "2021_poi-part17.txt", "2021_poi-part18.txt"], \
"2021_3_core_poi_"+city+"Out_WorkTrimmed.csv", city, "out")
else:
raise Exception("The group is not supported.")
else:
raise Exception("The year is not supported.")
elif category == "occupation":
if year == "2020":
cleanWork(["2020_poi-part1.txt", "2020_poi-part2.txt", "2020_poi-part3.txt", \
"2020_poi-part4.txt", "2020_poi-part5.txt"], "2020_core_poi_"+city+"In_OccupationTrimmed.csv", city, "in", True)
cleanWork(["2020_poi-part1.txt", "2020_poi-part2.txt", "2020_poi-part3.txt", \
"2020_poi-part4.txt", "2020_poi-part5.txt"], "2020_core_poi_"+city+"Out_OccupationTrimmed.csv", city, "out", True)
elif year == "2021":
if group == 1:
cleanWork(["2021_poi-part1.txt", "2021_poi-part2.txt", "2021_poi-part3.txt", \
"2021_poi-part4.txt", "2021_poi-part5.txt", "2021_poi-part6.txt"], \
"2021_1_core_poi_"+city+"In_OccupationTrimmed.csv", city, "in", True)
cleanWork(["2021_poi-part1.txt", "2021_poi-part2.txt", "2021_poi-part3.txt", \
"2021_poi-part4.txt", "2021_poi-part5.txt", "2021_poi-part6.txt"], \
"2021_1_core_poi_"+city+"Out_OccupationTrimmed.csv", city, "out", True)
elif group == 2:
cleanWork(["2021_poi-part7.txt", "2021_poi-part8.txt", "2021_poi-part9.txt", \
"2021_poi-part10.txt", "2021_poi-part11.txt", "2021_poi-part12.txt"], \
"2021_2_core_poi_"+city+"In_OccupationTrimmed.csv", city, "in", True)
cleanWork(["2021_poi-part7.txt", "2021_poi-part8.txt", "2021_poi-part9.txt", \
"2021_poi-part10.txt", "2021_poi-part11.txt", "2021_poi-part12.txt"], \
"2021_2_core_poi_"+city+"Out_OccupationTrimmed.csv", city, "out", True)
elif group == 3:
cleanWork(["2021_poi-part13.txt", "2021_poi-part14.txt", "2021_poi-part15.txt", "2021_poi-part16.txt", \
"2021_poi-part17.txt", "2021_poi-part18.txt"], \
"2021_3_core_poi_"+city+"In_OccupationTrimmed.csv", city, "in", True)
cleanWork(["2021_poi-part13.txt", "2021_poi-part14.txt", "2021_poi-part15.txt", "2021_poi-part16.txt", \
"2021_poi-part17.txt", "2021_poi-part18.txt"], \
"2021_3_core_poi_"+city+"Out_OccupationTrimmed.csv", city, "out", True)
else:
raise Exception("The group is not supported.")
else:
raise Exception("The year is not supported.")
else:
raise Exception("The category is not supported.")
def testMain():
# Pre-processing for sample test file
convertFile("test_file_2020_raw_safegraph.csv", "test_file_2020_raw_safegraph.tsv")
filterFile2020("test_file_2020_raw_safegraph.tsv", "test_file_2020_clean_safegraph.txt")
convertFile("test_file_2021_raw_safegraph.csv", "test_file_2021_raw_safegraph.tsv")
filterFile2021("test_file_2021_raw_safegraph.tsv", "test_file_2021_clean_safegraph.txt")
# Data processing for sample test file
cleanLeisure(["test_file_2020_clean_safegraph.txt"], "2020_core_poi_" + "Utica" + "In_LeisureTrimmed.csv", \
"Utica", "in")
cleanWork(["test_file_2020_clean_safegraph.txt"], "2020_core_poi_" + "Utica" + "In_WorkTrimmed.csv", \
"Utica", "in")
cleanWork(["test_file_2020_clean_safegraph.txt"], "2020_core_poi_" + "Utica" + "Out_OccupationTrimmed.csv", \
"Utica", "out", True) # Occupation
cleanLeisure(["test_file_2021_clean_safegraph.txt"], "2021_core_poi_" + "Colonie" + "In_LeisureTrimmed.csv", \
"Colonie", "in")
cleanWork(["test_file_2021_clean_safegraph.txt"], "2021_core_poi_" + "Colonie" + "In_WorkTrimmed.csv", \
"Colonie", "in")
cleanWork(["test_file_2021_clean_safegraph.txt"], "2021_core_poi_" + "Colonie" + "Out_OccupationTrimmed.csv", \
"Colonie", "out", True) # Occupation
if __name__ == '__main__':
# For running test files
#testMain()
# Pre-process SafeGraph data to get a text file with buildings' name, top_category, latitude, longitude, zip code
# preProcessing(year, group), where param "group" is optional
preProcessing("2020")
preProcessing("2021", 1)
preProcessing("2021", 2)
preProcessing("2021", 3)
# Process SafeGraph data for specific city, category, and time frame
# city: Utica, Colonie, NewRochelle (note: no spaces)
# category: leisure, work, occupation
# year: 2020, 2021
# group: (only for year 2021) 1, 2, 3
# dataProcessing(city, category, year, group), where param "group" is optional
dataProcessing("Utica", "leisure", "2020")
dataProcessing("Utica", "leisure", "2021", 1)
dataProcessing("Utica", "leisure", "2021", 2)
dataProcessing("Utica", "leisure", "2021", 3)
dataProcessing("Colonie", "leisure", "2020")
dataProcessing("Colonie", "leisure", "2021", 1)
dataProcessing("Colonie", "leisure", "2021", 2)
dataProcessing("Colonie", "leisure", "2021", 3)
dataProcessing("NewRochelle", "leisure", "2020")
dataProcessing("NewRochelle", "leisure", "2021", 1)
dataProcessing("NewRochelle", "leisure", "2021", 2)
dataProcessing("NewRochelle", "leisure", "2021", 3)
dataProcessing("Utica", "work", "2020")
dataProcessing("Utica", "work", "2021", 1)
dataProcessing("Utica", "work", "2021", 2)
dataProcessing("Utica", "work", "2021", 3)
dataProcessing("Colonie", "work", "2020")
dataProcessing("Colonie", "work", "2021", 1)
dataProcessing("Colonie", "work", "2021", 2)
dataProcessing("Colonie", "work", "2021", 3)
dataProcessing("NewRochelle", "work", "2020")
dataProcessing("NewRochelle", "work", "2021", 1)
dataProcessing("NewRochelle", "work", "2021", 2)
dataProcessing("NewRochelle", "work", "2021", 3)
dataProcessing("Utica", "occupation", "2020")
dataProcessing("Utica", "occupation", "2021", 1)
dataProcessing("Utica", "occupation", "2021", 2)
dataProcessing("Utica", "occupation", "2021", 3)
dataProcessing("Colonie", "occupation", "2020")
dataProcessing("Colonie", "occupation", "2021", 1)
dataProcessing("Colonie", "occupation", "2021", 2)
dataProcessing("Colonie", "occupation", "2021", 3)
dataProcessing("NewRochelle", "occupation", "2020")
dataProcessing("NewRochelle", "occupation", "2021", 1)
dataProcessing("NewRochelle", "occupation", "2021", 2)
dataProcessing("NewRochelle", "occupation", "2021", 3)
| 48.835466
| 132
| 0.581477
|
4a030b8b7b874f1fddbdd31c4af3371ea716740e
| 1,247
|
py
|
Python
|
src/downhill-0.2.2/test/util.py
|
masterkeywikz/seq2graph
|
745cb09f10c67a77c0ef517f5d58ac45f2ade09d
|
[
"MIT"
] | 10
|
2017-02-25T17:26:15.000Z
|
2022-02-23T06:36:54.000Z
|
src/downhill-0.2.2/test/util.py
|
masterkeywikz/seq2graph
|
745cb09f10c67a77c0ef517f5d58ac45f2ade09d
|
[
"MIT"
] | null | null | null |
src/downhill-0.2.2/test/util.py
|
masterkeywikz/seq2graph
|
745cb09f10c67a77c0ef517f5d58ac45f2ade09d
|
[
"MIT"
] | 8
|
2016-12-22T00:36:33.000Z
|
2021-05-19T17:55:59.000Z
|
import downhill
import numpy as np
import theano
import theano.tensor as TT
def build_rosen(algo):
x = theano.shared(-3 + np.zeros((2, ), 'f'), name='x')
return downhill.build(
algo,
loss=(100 * (x[1:] - x[:-1] ** 2) ** 2 + (1 - x[:-1]) ** 2).sum(),
monitors=[('x', x[:-1].sum()), ('y', x[1:].sum())]), [[]]
def build_factor(algo):
a = np.arange(1000).reshape((100, 10)).astype('f')
b = 0.1 + np.zeros((10, 100), 'f')
x = TT.matrix('x')
u = theano.shared(a, name='u')
v = theano.shared(0.1 + b, name='v')
return downhill.build(
algo,
loss=TT.sum(TT.sqr(x - TT.dot(u, v))),
monitors=[
('u<1', (u < 1).mean()),
('u<-1', (u < -1).mean()),
('v<1', (v < 1).mean()),
('v<-1', (v < -1).mean()),
]), [[np.dot(a, b) + np.random.randn(100, 100).astype('f')]
for _ in range(10)]
def assert_progress(opt, train, valid=None, **kwargs):
mover = opt.iterate(train, valid=valid, **kwargs)
train0, valid0 = next(mover)
train1, valid1 = next(mover)
assert train1['loss'] < valid0['loss'] # should have made progress!
assert valid1['loss'] == valid0['loss'] # no new validation occurred
| 31.175
| 74
| 0.510024
|
4a030c4bf590165dc99c53d1376e0a1e13ab3167
| 2,987
|
py
|
Python
|
marltoolbox/utils/policy.py
|
tobiasbaumann1/amd
|
cb6190be92dea54db04ef9202d381b96f6f6218b
|
[
"MIT"
] | null | null | null |
marltoolbox/utils/policy.py
|
tobiasbaumann1/amd
|
cb6190be92dea54db04ef9202d381b96f6f6218b
|
[
"MIT"
] | null | null | null |
marltoolbox/utils/policy.py
|
tobiasbaumann1/amd
|
cb6190be92dea54db04ef9202d381b96f6f6218b
|
[
"MIT"
] | null | null | null |
import gym
from ray.rllib.policy.policy import Policy
from ray.rllib.utils.typing import TrainerConfigDict
from marltoolbox.utils.restore import LOAD_FROM_CONFIG_KEY
def get_tune_policy_class(PolicyClass):
"""
Allow to convert a Tune trainer into a frozen RLLib policy (no training possible).
:param PolicyClass: The base RLLib policy class to use. Can be needed if you need some statistics or postprocessing.
:return: an RLLib policy class that compute actions by calling the Tune trainer.
"""
class FrozenPolicyFromTuneTrainer(PolicyClass):
def __init__(self, observation_space: gym.spaces.Space,
action_space: gym.spaces.Space, config: TrainerConfigDict):
print("__init__ FrozenPolicyFromTuneTrainer")
self.tune_config = config["tune_config"]
TuneTrainerClass = self.tune_config["TuneTrainerClass"]
self.tune_trainer = TuneTrainerClass(config=self.tune_config)
self.load_checkpoint(config.pop(LOAD_FROM_CONFIG_KEY, (None, None)))
super().__init__(observation_space, action_space, config)
def compute_actions(self,
obs_batch,
state_batches=None,
prev_action_batch=None,
prev_reward_batch=None,
info_batch=None,
episodes=None,
**kwargs):
actions, state_out, extra_fetches = self.tune_trainer.compute_actions(self.policy_id, obs_batch)
return actions, state_out, extra_fetches
def learn_on_batch(self, samples):
raise NotImplementedError("FrozenPolicyFromTuneTrainer policy can't be trained")
def get_weights(self):
return {"checkpoint_path": self.checkpoint_path,
"policy_id": self.policy_id}
def set_weights(self, weights):
checkpoint_path = weights["checkpoint_path"]
policy_id = weights["policy_id"]
self.load_checkpoint((checkpoint_path, policy_id))
def load_checkpoint(self, checkpoint_tuple):
self.checkpoint_path, self.policy_id = checkpoint_tuple
if self.checkpoint_path is not None:
self.tune_trainer.load_checkpoint(self.checkpoint_path)
return FrozenPolicyFromTuneTrainer
import torch
from ray.rllib.agents.a3c.a3c_torch_policy import A3CTorchPolicy, ValueNetworkMixin
from ray.rllib.policy.torch_policy import LearningRateSchedule
from ray.rllib.agents.dqn.dqn_torch_policy import setup_early_mixins
def sgd_optimizer(policy: Policy,
config: TrainerConfigDict) -> "torch.optim.Optimizer":
return torch.optim.SGD(policy.model.parameters(), lr=policy.cur_lr)
A2CTorchPolicy = A3CTorchPolicy.with_updates(
optimizer_fn=sgd_optimizer,
before_init=setup_early_mixins,
mixins=[ValueNetworkMixin, LearningRateSchedule])
| 40.917808
| 120
| 0.678607
|
4a030c73a99b159ff7b42fb3a5f382b7ddb55615
| 107
|
py
|
Python
|
qy/main/__init__.py
|
v2up/queyue
|
0e6c034cb9eec8f4ab2f3e8983e78cb609a2af1d
|
[
"MIT"
] | null | null | null |
qy/main/__init__.py
|
v2up/queyue
|
0e6c034cb9eec8f4ab2f3e8983e78cb609a2af1d
|
[
"MIT"
] | null | null | null |
qy/main/__init__.py
|
v2up/queyue
|
0e6c034cb9eec8f4ab2f3e8983e78cb609a2af1d
|
[
"MIT"
] | null | null | null |
from flask import Blueprint #使用蓝本
main = Blueprint('main', __name__) #实例化一个蓝本
from . import views, errors
| 21.4
| 43
| 0.757009
|
4a030c821dac217e6b5669158ccb777cedfbf191
| 65,445
|
py
|
Python
|
test/unit/obj/test_ssync_receiver.py
|
IPVL/swift-kilo
|
fe4cdb597f70e40c667b001b446546d75a7a5ab0
|
[
"Apache-2.0"
] | null | null | null |
test/unit/obj/test_ssync_receiver.py
|
IPVL/swift-kilo
|
fe4cdb597f70e40c667b001b446546d75a7a5ab0
|
[
"Apache-2.0"
] | null | null | null |
test/unit/obj/test_ssync_receiver.py
|
IPVL/swift-kilo
|
fe4cdb597f70e40c667b001b446546d75a7a5ab0
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import os
import shutil
import StringIO
import tempfile
import unittest
import eventlet
import mock
from swift.common import constraints
from swift.common import exceptions
from swift.common import swob
from swift.common import utils
from swift.common.storage_policy import POLICIES
from swift.obj import diskfile
from swift.obj import server
from swift.obj import ssync_receiver
from test import unit
@unit.patch_policies()
class TestReceiver(unittest.TestCase):
def setUp(self):
utils.HASH_PATH_SUFFIX = 'endcap'
utils.HASH_PATH_PREFIX = 'startcap'
# Not sure why the test.unit stuff isn't taking effect here; so I'm
# reinforcing it.
diskfile.getxattr = unit._getxattr
diskfile.setxattr = unit._setxattr
self.testdir = os.path.join(
tempfile.mkdtemp(), 'tmp_test_ssync_receiver')
utils.mkdirs(os.path.join(self.testdir, 'sda1', 'tmp'))
self.conf = {
'devices': self.testdir,
'mount_check': 'false',
'replication_one_per_device': 'false',
'log_requests': 'false'}
self.controller = server.ObjectController(self.conf)
self.controller.bytes_per_sync = 1
self.account1 = 'a'
self.container1 = 'c'
self.object1 = 'o1'
self.name1 = '/' + '/'.join((
self.account1, self.container1, self.object1))
self.hash1 = utils.hash_path(
self.account1, self.container1, self.object1)
self.ts1 = '1372800001.00000'
self.metadata1 = {
'name': self.name1,
'X-Timestamp': self.ts1,
'Content-Length': '0'}
self.account2 = 'a'
self.container2 = 'c'
self.object2 = 'o2'
self.name2 = '/' + '/'.join((
self.account2, self.container2, self.object2))
self.hash2 = utils.hash_path(
self.account2, self.container2, self.object2)
self.ts2 = '1372800002.00000'
self.metadata2 = {
'name': self.name2,
'X-Timestamp': self.ts2,
'Content-Length': '0'}
def tearDown(self):
shutil.rmtree(os.path.dirname(self.testdir))
def body_lines(self, body):
lines = []
for line in body.split('\n'):
line = line.strip()
if line:
lines.append(line)
return lines
def test_SSYNC_semaphore_locked(self):
with mock.patch.object(
self.controller, 'replication_semaphore') as \
mocked_replication_semaphore:
self.controller.logger = mock.MagicMock()
mocked_replication_semaphore.acquire.return_value = False
req = swob.Request.blank(
'/device/partition', environ={'REQUEST_METHOD': 'SSYNC'})
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[":ERROR: 503 '<html><h1>Service Unavailable</h1><p>The "
"server is currently unavailable. Please try again at a "
"later time.</p></html>'"])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.error.called)
self.assertFalse(self.controller.logger.exception.called)
def test_SSYNC_calls_replication_lock(self):
with mock.patch.object(
self.controller._diskfile_router[POLICIES.legacy],
'replication_lock') as mocked_replication_lock:
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
mocked_replication_lock.assert_called_once_with('sda1')
def test_Receiver_with_default_storage_policy(self):
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
rcvr = ssync_receiver.Receiver(self.controller, req)
body_lines = [chunk.strip() for chunk in rcvr() if chunk.strip()]
self.assertEqual(
body_lines,
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(rcvr.policy, POLICIES[0])
def test_Receiver_with_storage_policy_index_header(self):
# update router post policy patch
self.controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.controller.logger)
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC',
'HTTP_X_BACKEND_STORAGE_POLICY_INDEX': '1'},
body=':MISSING_CHECK: START\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
rcvr = ssync_receiver.Receiver(self.controller, req)
body_lines = [chunk.strip() for chunk in rcvr() if chunk.strip()]
self.assertEqual(
body_lines,
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(rcvr.policy, POLICIES[1])
self.assertEqual(rcvr.frag_index, None)
def test_Receiver_with_bad_storage_policy_index_header(self):
valid_indices = sorted([int(policy) for policy in POLICIES])
bad_index = valid_indices[-1] + 1
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC',
'HTTP_X_BACKEND_SSYNC_FRAG_INDEX': '0',
'HTTP_X_BACKEND_STORAGE_POLICY_INDEX': bad_index},
body=':MISSING_CHECK: START\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
self.controller.logger = mock.MagicMock()
receiver = ssync_receiver.Receiver(self.controller, req)
body_lines = [chunk.strip() for chunk in receiver() if chunk.strip()]
self.assertEqual(body_lines, [":ERROR: 503 'No policy with index 2'"])
@unit.patch_policies()
def test_Receiver_with_frag_index_header(self):
# update router post policy patch
self.controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.controller.logger)
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC',
'HTTP_X_BACKEND_SSYNC_FRAG_INDEX': '7',
'HTTP_X_BACKEND_STORAGE_POLICY_INDEX': '1'},
body=':MISSING_CHECK: START\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
rcvr = ssync_receiver.Receiver(self.controller, req)
body_lines = [chunk.strip() for chunk in rcvr() if chunk.strip()]
self.assertEqual(
body_lines,
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(rcvr.policy, POLICIES[1])
self.assertEqual(rcvr.frag_index, 7)
def test_SSYNC_replication_lock_fail(self):
def _mock(path):
with exceptions.ReplicationLockTimeout(0.01, '/somewhere/' + path):
eventlet.sleep(0.05)
with mock.patch.object(
self.controller._diskfile_router[POLICIES.legacy],
'replication_lock', _mock):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[":ERROR: 0 '0.01 seconds: /somewhere/sda1'"])
self.controller.logger.debug.assert_called_once_with(
'None/sda1/1 SSYNC LOCK TIMEOUT: 0.01 seconds: '
'/somewhere/sda1')
def test_SSYNC_initial_path(self):
with mock.patch.object(
self.controller, 'replication_semaphore') as \
mocked_replication_semaphore:
req = swob.Request.blank(
'/device', environ={'REQUEST_METHOD': 'SSYNC'})
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[":ERROR: 400 'Invalid path: /device'"])
self.assertEqual(resp.status_int, 200)
self.assertFalse(mocked_replication_semaphore.acquire.called)
self.assertFalse(mocked_replication_semaphore.release.called)
with mock.patch.object(
self.controller, 'replication_semaphore') as \
mocked_replication_semaphore:
req = swob.Request.blank(
'/device/', environ={'REQUEST_METHOD': 'SSYNC'})
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[":ERROR: 400 'Invalid path: /device/'"])
self.assertEqual(resp.status_int, 200)
self.assertFalse(mocked_replication_semaphore.acquire.called)
self.assertFalse(mocked_replication_semaphore.release.called)
with mock.patch.object(
self.controller, 'replication_semaphore') as \
mocked_replication_semaphore:
req = swob.Request.blank(
'/device/partition', environ={'REQUEST_METHOD': 'SSYNC'})
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':ERROR: 0 "Looking for :MISSING_CHECK: START got \'\'"'])
self.assertEqual(resp.status_int, 200)
mocked_replication_semaphore.acquire.assert_called_once_with(0)
mocked_replication_semaphore.release.assert_called_once_with()
with mock.patch.object(
self.controller, 'replication_semaphore') as \
mocked_replication_semaphore:
req = swob.Request.blank(
'/device/partition/junk',
environ={'REQUEST_METHOD': 'SSYNC'})
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[":ERROR: 400 'Invalid path: /device/partition/junk'"])
self.assertEqual(resp.status_int, 200)
self.assertFalse(mocked_replication_semaphore.acquire.called)
self.assertFalse(mocked_replication_semaphore.release.called)
def test_SSYNC_mount_check(self):
with contextlib.nested(
mock.patch.object(
self.controller, 'replication_semaphore'),
mock.patch.object(
self.controller._diskfile_router[POLICIES.legacy],
'mount_check', False),
mock.patch.object(
constraints, 'check_mount', return_value=False)) as (
mocked_replication_semaphore,
mocked_mount_check,
mocked_check_mount):
req = swob.Request.blank(
'/device/partition', environ={'REQUEST_METHOD': 'SSYNC'})
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':ERROR: 0 "Looking for :MISSING_CHECK: START got \'\'"'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(mocked_check_mount.called)
with contextlib.nested(
mock.patch.object(
self.controller, 'replication_semaphore'),
mock.patch.object(
self.controller._diskfile_router[POLICIES.legacy],
'mount_check', True),
mock.patch.object(
constraints, 'check_mount', return_value=False)) as (
mocked_replication_semaphore,
mocked_mount_check,
mocked_check_mount):
req = swob.Request.blank(
'/device/partition', environ={'REQUEST_METHOD': 'SSYNC'})
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[":ERROR: 507 '<html><h1>Insufficient Storage</h1><p>There "
"was not enough space to save the resource. Drive: "
"device</p></html>'"])
self.assertEqual(resp.status_int, 200)
mocked_check_mount.assert_called_once_with(
self.controller._diskfile_router[POLICIES.legacy].devices,
'device')
mocked_check_mount.reset_mock()
mocked_check_mount.return_value = True
req = swob.Request.blank(
'/device/partition', environ={'REQUEST_METHOD': 'SSYNC'})
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':ERROR: 0 "Looking for :MISSING_CHECK: START got \'\'"'])
self.assertEqual(resp.status_int, 200)
mocked_check_mount.assert_called_once_with(
self.controller._diskfile_router[POLICIES.legacy].devices,
'device')
def test_SSYNC_Exception(self):
class _Wrapper(StringIO.StringIO):
def __init__(self, value):
StringIO.StringIO.__init__(self, value)
self.mock_socket = mock.MagicMock()
def get_socket(self):
return self.mock_socket
with mock.patch.object(
ssync_receiver.eventlet.greenio, 'shutdown_safe') as \
mock_shutdown_safe:
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\nBad content is here')
req.remote_addr = '1.2.3.4'
mock_wsgi_input = _Wrapper(req.body)
req.environ['wsgi.input'] = mock_wsgi_input
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'Got no headers for Bad content is here'"])
self.assertEqual(resp.status_int, 200)
mock_shutdown_safe.assert_called_once_with(
mock_wsgi_input.mock_socket)
mock_wsgi_input.mock_socket.close.assert_called_once_with()
self.controller.logger.exception.assert_called_once_with(
'1.2.3.4/device/partition EXCEPTION in replication.Receiver')
def test_SSYNC_Exception_Exception(self):
class _Wrapper(StringIO.StringIO):
def __init__(self, value):
StringIO.StringIO.__init__(self, value)
self.mock_socket = mock.MagicMock()
def get_socket(self):
return self.mock_socket
with mock.patch.object(
ssync_receiver.eventlet.greenio, 'shutdown_safe') as \
mock_shutdown_safe:
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\nBad content is here')
req.remote_addr = mock.MagicMock()
req.remote_addr.__str__ = mock.Mock(
side_effect=Exception("can't stringify this"))
mock_wsgi_input = _Wrapper(req.body)
req.environ['wsgi.input'] = mock_wsgi_input
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END'])
self.assertEqual(resp.status_int, 200)
mock_shutdown_safe.assert_called_once_with(
mock_wsgi_input.mock_socket)
mock_wsgi_input.mock_socket.close.assert_called_once_with()
self.controller.logger.exception.assert_called_once_with(
'EXCEPTION in replication.Receiver')
def test_MISSING_CHECK_timeout(self):
class _Wrapper(StringIO.StringIO):
def __init__(self, value):
StringIO.StringIO.__init__(self, value)
self.mock_socket = mock.MagicMock()
def readline(self, sizehint=-1):
line = StringIO.StringIO.readline(self)
if line.startswith('hash'):
eventlet.sleep(0.1)
return line
def get_socket(self):
return self.mock_socket
self.controller.client_timeout = 0.01
with mock.patch.object(
ssync_receiver.eventlet.greenio, 'shutdown_safe') as \
mock_shutdown_safe:
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n'
'hash ts\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
req.remote_addr = '2.3.4.5'
mock_wsgi_input = _Wrapper(req.body)
req.environ['wsgi.input'] = mock_wsgi_input
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[":ERROR: 408 '0.01 seconds: missing_check line'"])
self.assertEqual(resp.status_int, 200)
self.assertTrue(mock_shutdown_safe.called)
self.controller.logger.error.assert_called_once_with(
'2.3.4.5/sda1/1 TIMEOUT in replication.Receiver: '
'0.01 seconds: missing_check line')
def test_MISSING_CHECK_other_exception(self):
class _Wrapper(StringIO.StringIO):
def __init__(self, value):
StringIO.StringIO.__init__(self, value)
self.mock_socket = mock.MagicMock()
def readline(self, sizehint=-1):
line = StringIO.StringIO.readline(self)
if line.startswith('hash'):
raise Exception('test exception')
return line
def get_socket(self):
return self.mock_socket
self.controller.client_timeout = 0.01
with mock.patch.object(
ssync_receiver.eventlet.greenio, 'shutdown_safe') as \
mock_shutdown_safe:
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n'
'hash ts\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
req.remote_addr = '3.4.5.6'
mock_wsgi_input = _Wrapper(req.body)
req.environ['wsgi.input'] = mock_wsgi_input
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[":ERROR: 0 'test exception'"])
self.assertEqual(resp.status_int, 200)
self.assertTrue(mock_shutdown_safe.called)
self.controller.logger.exception.assert_called_once_with(
'3.4.5.6/sda1/1 EXCEPTION in replication.Receiver')
def test_MISSING_CHECK_empty_list(self):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.error.called)
self.assertFalse(self.controller.logger.exception.called)
def test_MISSING_CHECK_have_none(self):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n' +
self.hash1 + ' ' + self.ts1 + '\r\n' +
self.hash2 + ' ' + self.ts2 + '\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START',
self.hash1,
self.hash2,
':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.error.called)
self.assertFalse(self.controller.logger.exception.called)
def test_MISSING_CHECK_extra_line_parts(self):
# check that rx tolerates extra parts in missing check lines to
# allow for protocol upgrades
extra_1 = 'extra'
extra_2 = 'multiple extra parts'
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n' +
self.hash1 + ' ' + self.ts1 + ' ' + extra_1 + '\r\n' +
self.hash2 + ' ' + self.ts2 + ' ' + extra_2 + '\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START',
self.hash1,
self.hash2,
':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.error.called)
self.assertFalse(self.controller.logger.exception.called)
def test_MISSING_CHECK_have_one_exact(self):
object_dir = utils.storage_directory(
os.path.join(self.testdir, 'sda1',
diskfile.get_data_dir(POLICIES[0])),
'1', self.hash1)
utils.mkdirs(object_dir)
fp = open(os.path.join(object_dir, self.ts1 + '.data'), 'w+')
fp.write('1')
fp.flush()
self.metadata1['Content-Length'] = '1'
diskfile.write_metadata(fp, self.metadata1)
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n' +
self.hash1 + ' ' + self.ts1 + '\r\n' +
self.hash2 + ' ' + self.ts2 + '\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START',
self.hash2,
':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.error.called)
self.assertFalse(self.controller.logger.exception.called)
def test_MISSING_CHECK_storage_policy(self):
# update router post policy patch
self.controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.controller.logger)
object_dir = utils.storage_directory(
os.path.join(self.testdir, 'sda1',
diskfile.get_data_dir(POLICIES[1])),
'1', self.hash1)
utils.mkdirs(object_dir)
fp = open(os.path.join(object_dir, self.ts1 + '.data'), 'w+')
fp.write('1')
fp.flush()
self.metadata1['Content-Length'] = '1'
diskfile.write_metadata(fp, self.metadata1)
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC',
'HTTP_X_BACKEND_STORAGE_POLICY_INDEX': '1'},
body=':MISSING_CHECK: START\r\n' +
self.hash1 + ' ' + self.ts1 + '\r\n' +
self.hash2 + ' ' + self.ts2 + '\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START',
self.hash2,
':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.error.called)
self.assertFalse(self.controller.logger.exception.called)
def test_MISSING_CHECK_have_one_newer(self):
object_dir = utils.storage_directory(
os.path.join(self.testdir, 'sda1',
diskfile.get_data_dir(POLICIES[0])),
'1', self.hash1)
utils.mkdirs(object_dir)
newer_ts1 = utils.normalize_timestamp(float(self.ts1) + 1)
self.metadata1['X-Timestamp'] = newer_ts1
fp = open(os.path.join(object_dir, newer_ts1 + '.data'), 'w+')
fp.write('1')
fp.flush()
self.metadata1['Content-Length'] = '1'
diskfile.write_metadata(fp, self.metadata1)
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n' +
self.hash1 + ' ' + self.ts1 + '\r\n' +
self.hash2 + ' ' + self.ts2 + '\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START',
self.hash2,
':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.error.called)
self.assertFalse(self.controller.logger.exception.called)
def test_MISSING_CHECK_have_one_older(self):
object_dir = utils.storage_directory(
os.path.join(self.testdir, 'sda1',
diskfile.get_data_dir(POLICIES[0])),
'1', self.hash1)
utils.mkdirs(object_dir)
older_ts1 = utils.normalize_timestamp(float(self.ts1) - 1)
self.metadata1['X-Timestamp'] = older_ts1
fp = open(os.path.join(object_dir, older_ts1 + '.data'), 'w+')
fp.write('1')
fp.flush()
self.metadata1['Content-Length'] = '1'
diskfile.write_metadata(fp, self.metadata1)
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/sda1/1',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n' +
self.hash1 + ' ' + self.ts1 + '\r\n' +
self.hash2 + ' ' + self.ts2 + '\r\n'
':MISSING_CHECK: END\r\n'
':UPDATES: START\r\n:UPDATES: END\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START',
self.hash1,
self.hash2,
':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.error.called)
self.assertFalse(self.controller.logger.exception.called)
def test_UPDATES_timeout(self):
class _Wrapper(StringIO.StringIO):
def __init__(self, value):
StringIO.StringIO.__init__(self, value)
self.mock_socket = mock.MagicMock()
def readline(self, sizehint=-1):
line = StringIO.StringIO.readline(self)
if line.startswith('DELETE'):
eventlet.sleep(0.1)
return line
def get_socket(self):
return self.mock_socket
self.controller.client_timeout = 0.01
with mock.patch.object(
ssync_receiver.eventlet.greenio, 'shutdown_safe') as \
mock_shutdown_safe:
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n'
'X-Timestamp: 1364456113.76334\r\n'
'\r\n'
':UPDATES: END\r\n')
req.remote_addr = '2.3.4.5'
mock_wsgi_input = _Wrapper(req.body)
req.environ['wsgi.input'] = mock_wsgi_input
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 408 '0.01 seconds: updates line'"])
self.assertEqual(resp.status_int, 200)
mock_shutdown_safe.assert_called_once_with(
mock_wsgi_input.mock_socket)
mock_wsgi_input.mock_socket.close.assert_called_once_with()
self.controller.logger.error.assert_called_once_with(
'2.3.4.5/device/partition TIMEOUT in replication.Receiver: '
'0.01 seconds: updates line')
def test_UPDATES_other_exception(self):
class _Wrapper(StringIO.StringIO):
def __init__(self, value):
StringIO.StringIO.__init__(self, value)
self.mock_socket = mock.MagicMock()
def readline(self, sizehint=-1):
line = StringIO.StringIO.readline(self)
if line.startswith('DELETE'):
raise Exception('test exception')
return line
def get_socket(self):
return self.mock_socket
self.controller.client_timeout = 0.01
with mock.patch.object(
ssync_receiver.eventlet.greenio, 'shutdown_safe') as \
mock_shutdown_safe:
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n'
'X-Timestamp: 1364456113.76334\r\n'
'\r\n'
':UPDATES: END\r\n')
req.remote_addr = '3.4.5.6'
mock_wsgi_input = _Wrapper(req.body)
req.environ['wsgi.input'] = mock_wsgi_input
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'test exception'"])
self.assertEqual(resp.status_int, 200)
mock_shutdown_safe.assert_called_once_with(
mock_wsgi_input.mock_socket)
mock_wsgi_input.mock_socket.close.assert_called_once_with()
self.controller.logger.exception.assert_called_once_with(
'3.4.5.6/device/partition EXCEPTION in replication.Receiver')
def test_UPDATES_no_problems_no_hard_disconnect(self):
class _Wrapper(StringIO.StringIO):
def __init__(self, value):
StringIO.StringIO.__init__(self, value)
self.mock_socket = mock.MagicMock()
def get_socket(self):
return self.mock_socket
self.controller.client_timeout = 0.01
with contextlib.nested(
mock.patch.object(
ssync_receiver.eventlet.greenio, 'shutdown_safe'),
mock.patch.object(
self.controller, 'DELETE',
return_value=swob.HTTPNoContent())) as (
mock_shutdown_safe, mock_delete):
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n'
'X-Timestamp: 1364456113.76334\r\n'
'\r\n'
':UPDATES: END\r\n')
mock_wsgi_input = _Wrapper(req.body)
req.environ['wsgi.input'] = mock_wsgi_input
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(mock_shutdown_safe.called)
self.assertFalse(mock_wsgi_input.mock_socket.close.called)
def test_UPDATES_bad_subrequest_line(self):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'bad_subrequest_line\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'need more than 1 value to unpack'"])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
with mock.patch.object(
self.controller, 'DELETE',
return_value=swob.HTTPNoContent()):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n'
'X-Timestamp: 1364456113.76334\r\n'
'\r\n'
'bad_subrequest_line2')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'need more than 1 value to unpack'"])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
def test_UPDATES_no_headers(self):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'Got no headers for DELETE /a/c/o'"])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
def test_UPDATES_bad_headers(self):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n'
'Bad-Header Test\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'need more than 1 value to unpack'"])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n'
'Good-Header: Test\r\n'
'Bad-Header Test\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'need more than 1 value to unpack'"])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
def test_UPDATES_bad_content_length(self):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'PUT /a/c/o\r\n'
'Content-Length: a\r\n\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
':ERROR: 0 "invalid literal for int() with base 10: \'a\'"'])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
def test_UPDATES_content_length_with_DELETE(self):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n'
'Content-Length: 1\r\n\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'DELETE subrequest with content-length /a/c/o'"])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
def test_UPDATES_no_content_length_with_PUT(self):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'PUT /a/c/o\r\n\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'No content-length sent for PUT /a/c/o'"])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
def test_UPDATES_early_termination(self):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'PUT /a/c/o\r\n'
'Content-Length: 1\r\n\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'Early termination for PUT /a/c/o'"])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
def test_UPDATES_failures(self):
@server.public
def _DELETE(request):
if request.path == '/device/partition/a/c/works':
return swob.HTTPOk()
else:
return swob.HTTPInternalServerError()
# failures never hit threshold
with mock.patch.object(self.controller, 'DELETE', _DELETE):
self.controller.replication_failure_threshold = 4
self.controller.replication_failure_ratio = 1.5
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n\r\n'
'DELETE /a/c/o\r\n\r\n'
'DELETE /a/c/o\r\n\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 500 'ERROR: With :UPDATES: 3 failures to 0 "
"successes'"])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
# failures hit threshold and no successes, so ratio is like infinity
with mock.patch.object(self.controller, 'DELETE', _DELETE):
self.controller.replication_failure_threshold = 4
self.controller.replication_failure_ratio = 1.5
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n\r\n'
'DELETE /a/c/o\r\n\r\n'
'DELETE /a/c/o\r\n\r\n'
'DELETE /a/c/o\r\n\r\n'
'DELETE /a/c/o\r\n\r\n'
':UPDATES: END\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'Too many 4 failures to 0 successes'"])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
self.assertFalse(self.controller.logger.error.called)
# failures hit threshold and ratio hits 1.33333333333
with mock.patch.object(self.controller, 'DELETE', _DELETE):
self.controller.replication_failure_threshold = 4
self.controller.replication_failure_ratio = 1.5
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n\r\n'
'DELETE /a/c/o\r\n\r\n'
'DELETE /a/c/works\r\n\r\n'
'DELETE /a/c/works\r\n\r\n'
'DELETE /a/c/works\r\n\r\n'
'DELETE /a/c/o\r\n\r\n'
'DELETE /a/c/o\r\n\r\n'
':UPDATES: END\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 500 'ERROR: With :UPDATES: 4 failures to 3 "
"successes'"])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
# failures hit threshold and ratio hits 2.0
with mock.patch.object(self.controller, 'DELETE', _DELETE):
self.controller.replication_failure_threshold = 4
self.controller.replication_failure_ratio = 1.5
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n\r\n'
'DELETE /a/c/o\r\n\r\n'
'DELETE /a/c/works\r\n\r\n'
'DELETE /a/c/works\r\n\r\n'
'DELETE /a/c/o\r\n\r\n'
'DELETE /a/c/o\r\n\r\n'
':UPDATES: END\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'Too many 4 failures to 2 successes'"])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
self.assertFalse(self.controller.logger.error.called)
def test_UPDATES_PUT(self):
_PUT_request = [None]
@server.public
def _PUT(request):
_PUT_request[0] = request
request.read_body = request.environ['wsgi.input'].read()
return swob.HTTPOk()
with mock.patch.object(self.controller, 'PUT', _PUT):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'PUT /a/c/o\r\n'
'Content-Length: 1\r\n'
'X-Timestamp: 1364456113.12344\r\n'
'X-Object-Meta-Test1: one\r\n'
'Content-Encoding: gzip\r\n'
'Specialty-Header: value\r\n'
'\r\n'
'1')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
req = _PUT_request[0]
self.assertEqual(req.path, '/device/partition/a/c/o')
self.assertEqual(req.content_length, 1)
self.assertEqual(req.headers, {
'Content-Length': '1',
'X-Timestamp': '1364456113.12344',
'X-Object-Meta-Test1': 'one',
'Content-Encoding': 'gzip',
'Specialty-Header': 'value',
'Host': 'localhost:80',
'X-Backend-Storage-Policy-Index': '0',
'X-Backend-Replication': 'True',
'X-Backend-Replication-Headers': (
'content-length x-timestamp x-object-meta-test1 '
'content-encoding specialty-header')})
self.assertEqual(req.read_body, '1')
def test_UPDATES_with_storage_policy(self):
# update router post policy patch
self.controller._diskfile_router = diskfile.DiskFileRouter(
self.conf, self.controller.logger)
_PUT_request = [None]
@server.public
def _PUT(request):
_PUT_request[0] = request
request.read_body = request.environ['wsgi.input'].read()
return swob.HTTPOk()
with mock.patch.object(self.controller, 'PUT', _PUT):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC',
'HTTP_X_BACKEND_STORAGE_POLICY_INDEX': '1'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'PUT /a/c/o\r\n'
'Content-Length: 1\r\n'
'X-Timestamp: 1364456113.12344\r\n'
'X-Object-Meta-Test1: one\r\n'
'Content-Encoding: gzip\r\n'
'Specialty-Header: value\r\n'
'\r\n'
'1')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
req = _PUT_request[0]
self.assertEqual(req.path, '/device/partition/a/c/o')
self.assertEqual(req.content_length, 1)
self.assertEqual(req.headers, {
'Content-Length': '1',
'X-Timestamp': '1364456113.12344',
'X-Object-Meta-Test1': 'one',
'Content-Encoding': 'gzip',
'Specialty-Header': 'value',
'Host': 'localhost:80',
'X-Backend-Storage-Policy-Index': '1',
'X-Backend-Replication': 'True',
'X-Backend-Replication-Headers': (
'content-length x-timestamp x-object-meta-test1 '
'content-encoding specialty-header')})
self.assertEqual(req.read_body, '1')
def test_UPDATES_DELETE(self):
_DELETE_request = [None]
@server.public
def _DELETE(request):
_DELETE_request[0] = request
return swob.HTTPOk()
with mock.patch.object(self.controller, 'DELETE', _DELETE):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n'
'X-Timestamp: 1364456113.76334\r\n'
'\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
req = _DELETE_request[0]
self.assertEqual(req.path, '/device/partition/a/c/o')
self.assertEqual(req.headers, {
'X-Timestamp': '1364456113.76334',
'Host': 'localhost:80',
'X-Backend-Storage-Policy-Index': '0',
'X-Backend-Replication': 'True',
'X-Backend-Replication-Headers': 'x-timestamp'})
def test_UPDATES_BONK(self):
_BONK_request = [None]
@server.public
def _BONK(request):
_BONK_request[0] = request
return swob.HTTPOk()
self.controller.BONK = _BONK
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'BONK /a/c/o\r\n'
'X-Timestamp: 1364456113.76334\r\n'
'\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 0 'Invalid subrequest method BONK'"])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
self.assertEqual(_BONK_request[0], None)
def test_UPDATES_multiple(self):
_requests = []
@server.public
def _PUT(request):
_requests.append(request)
request.read_body = request.environ['wsgi.input'].read()
return swob.HTTPOk()
@server.public
def _DELETE(request):
_requests.append(request)
return swob.HTTPOk()
with contextlib.nested(
mock.patch.object(self.controller, 'PUT', _PUT),
mock.patch.object(self.controller, 'DELETE', _DELETE)):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'PUT /a/c/o1\r\n'
'Content-Length: 1\r\n'
'X-Timestamp: 1364456113.00001\r\n'
'X-Object-Meta-Test1: one\r\n'
'Content-Encoding: gzip\r\n'
'Specialty-Header: value\r\n'
'\r\n'
'1'
'DELETE /a/c/o2\r\n'
'X-Timestamp: 1364456113.00002\r\n'
'\r\n'
'PUT /a/c/o3\r\n'
'Content-Length: 3\r\n'
'X-Timestamp: 1364456113.00003\r\n'
'\r\n'
'123'
'PUT /a/c/o4\r\n'
'Content-Length: 4\r\n'
'X-Timestamp: 1364456113.00004\r\n'
'\r\n'
'1\r\n4'
'DELETE /a/c/o5\r\n'
'X-Timestamp: 1364456113.00005\r\n'
'\r\n'
'DELETE /a/c/o6\r\n'
'X-Timestamp: 1364456113.00006\r\n'
'\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
':UPDATES: START', ':UPDATES: END'])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
req = _requests.pop(0)
self.assertEqual(req.method, 'PUT')
self.assertEqual(req.path, '/device/partition/a/c/o1')
self.assertEqual(req.content_length, 1)
self.assertEqual(req.headers, {
'Content-Length': '1',
'X-Timestamp': '1364456113.00001',
'X-Object-Meta-Test1': 'one',
'Content-Encoding': 'gzip',
'Specialty-Header': 'value',
'Host': 'localhost:80',
'X-Backend-Storage-Policy-Index': '0',
'X-Backend-Replication': 'True',
'X-Backend-Replication-Headers': (
'content-length x-timestamp x-object-meta-test1 '
'content-encoding specialty-header')})
self.assertEqual(req.read_body, '1')
req = _requests.pop(0)
self.assertEqual(req.method, 'DELETE')
self.assertEqual(req.path, '/device/partition/a/c/o2')
self.assertEqual(req.headers, {
'X-Timestamp': '1364456113.00002',
'Host': 'localhost:80',
'X-Backend-Storage-Policy-Index': '0',
'X-Backend-Replication': 'True',
'X-Backend-Replication-Headers': 'x-timestamp'})
req = _requests.pop(0)
self.assertEqual(req.method, 'PUT')
self.assertEqual(req.path, '/device/partition/a/c/o3')
self.assertEqual(req.content_length, 3)
self.assertEqual(req.headers, {
'Content-Length': '3',
'X-Timestamp': '1364456113.00003',
'Host': 'localhost:80',
'X-Backend-Storage-Policy-Index': '0',
'X-Backend-Replication': 'True',
'X-Backend-Replication-Headers': (
'content-length x-timestamp')})
self.assertEqual(req.read_body, '123')
req = _requests.pop(0)
self.assertEqual(req.method, 'PUT')
self.assertEqual(req.path, '/device/partition/a/c/o4')
self.assertEqual(req.content_length, 4)
self.assertEqual(req.headers, {
'Content-Length': '4',
'X-Timestamp': '1364456113.00004',
'Host': 'localhost:80',
'X-Backend-Storage-Policy-Index': '0',
'X-Backend-Replication': 'True',
'X-Backend-Replication-Headers': (
'content-length x-timestamp')})
self.assertEqual(req.read_body, '1\r\n4')
req = _requests.pop(0)
self.assertEqual(req.method, 'DELETE')
self.assertEqual(req.path, '/device/partition/a/c/o5')
self.assertEqual(req.headers, {
'X-Timestamp': '1364456113.00005',
'Host': 'localhost:80',
'X-Backend-Storage-Policy-Index': '0',
'X-Backend-Replication': 'True',
'X-Backend-Replication-Headers': 'x-timestamp'})
req = _requests.pop(0)
self.assertEqual(req.method, 'DELETE')
self.assertEqual(req.path, '/device/partition/a/c/o6')
self.assertEqual(req.headers, {
'X-Timestamp': '1364456113.00006',
'Host': 'localhost:80',
'X-Backend-Storage-Policy-Index': '0',
'X-Backend-Replication': 'True',
'X-Backend-Replication-Headers': 'x-timestamp'})
self.assertEqual(_requests, [])
def test_UPDATES_subreq_does_not_read_all(self):
# This tests that if a SSYNC subrequest fails and doesn't read
# all the subrequest body that it will read and throw away the rest of
# the body before moving on to the next subrequest.
# If you comment out the part in ssync_receiver where it does:
# for junk in subreq.environ['wsgi.input']:
# pass
# You can then see this test fail.
_requests = []
@server.public
def _PUT(request):
_requests.append(request)
# Deliberately just reading up to first 2 bytes.
request.read_body = request.environ['wsgi.input'].read(2)
return swob.HTTPInternalServerError()
class _IgnoreReadlineHint(StringIO.StringIO):
def __init__(self, value):
StringIO.StringIO.__init__(self, value)
def readline(self, hint=-1):
return StringIO.StringIO.readline(self)
self.controller.PUT = _PUT
self.controller.network_chunk_size = 2
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'PUT /a/c/o1\r\n'
'Content-Length: 3\r\n'
'X-Timestamp: 1364456113.00001\r\n'
'\r\n'
'123'
'PUT /a/c/o2\r\n'
'Content-Length: 1\r\n'
'X-Timestamp: 1364456113.00002\r\n'
'\r\n'
'1')
req.environ['wsgi.input'] = _IgnoreReadlineHint(req.body)
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[':MISSING_CHECK: START', ':MISSING_CHECK: END',
":ERROR: 500 'ERROR: With :UPDATES: 2 failures to 0 successes'"])
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
req = _requests.pop(0)
self.assertEqual(req.path, '/device/partition/a/c/o1')
self.assertEqual(req.content_length, 3)
self.assertEqual(req.headers, {
'Content-Length': '3',
'X-Timestamp': '1364456113.00001',
'Host': 'localhost:80',
'X-Backend-Storage-Policy-Index': '0',
'X-Backend-Replication': 'True',
'X-Backend-Replication-Headers': (
'content-length x-timestamp')})
self.assertEqual(req.read_body, '12')
req = _requests.pop(0)
self.assertEqual(req.path, '/device/partition/a/c/o2')
self.assertEqual(req.content_length, 1)
self.assertEqual(req.headers, {
'Content-Length': '1',
'X-Timestamp': '1364456113.00002',
'Host': 'localhost:80',
'X-Backend-Storage-Policy-Index': '0',
'X-Backend-Replication': 'True',
'X-Backend-Replication-Headers': (
'content-length x-timestamp')})
self.assertEqual(req.read_body, '1')
self.assertEqual(_requests, [])
if __name__ == '__main__':
unittest.main()
| 44.189737
| 79
| 0.548797
|
4a030d0d10693ccc0dc01baebecedeef6667147f
| 298
|
py
|
Python
|
python_to_you/domain/users/user.py
|
jacksonsr45/python_to_you
|
f0016e0450f3f2a4ba1f592baff8a9c28ffeaec7
|
[
"MIT"
] | 1
|
2021-05-11T12:09:00.000Z
|
2021-05-11T12:09:00.000Z
|
python_to_you/domain/users/user.py
|
jacksonsr45/python_to_you
|
f0016e0450f3f2a4ba1f592baff8a9c28ffeaec7
|
[
"MIT"
] | null | null | null |
python_to_you/domain/users/user.py
|
jacksonsr45/python_to_you
|
f0016e0450f3f2a4ba1f592baff8a9c28ffeaec7
|
[
"MIT"
] | null | null | null |
from python_to_you.models import User
class User():
def __init__(self):
self.user = User
def get(self):
...
def create_account(self):
...
def register_user(self):
...
def update(self):
...
def delete(self):
...
| 11.461538
| 37
| 0.489933
|
4a030d39aef78d48d87925f1f271d313d1907fdd
| 1,730
|
py
|
Python
|
utils/auth.py
|
Misschl/flask-fresh
|
df17fd377b9e27aaad9fe0c5582c56098d09068c
|
[
"Apache-2.0"
] | null | null | null |
utils/auth.py
|
Misschl/flask-fresh
|
df17fd377b9e27aaad9fe0c5582c56098d09068c
|
[
"Apache-2.0"
] | null | null | null |
utils/auth.py
|
Misschl/flask-fresh
|
df17fd377b9e27aaad9fe0c5582c56098d09068c
|
[
"Apache-2.0"
] | 1
|
2020-12-21T14:01:53.000Z
|
2020-12-21T14:01:53.000Z
|
from utils.extentions import db
from functools import wraps
from flask import session, redirect
import flask
from config import LOGIN_URL, AUTH_USER_MODEL
import importlib
def login(user):
session['login'] = user.id
def logout():
session.clear()
def initialize_user_model():
app, user_model_class = AUTH_USER_MODEL.split('.')
model = importlib.import_module('apps.%s.models' % app)
user_model = getattr(model, user_model_class)
return user_model
def get_current_user():
user_id = session.get('login')
if not user_id:
user = AnonymousUser()
else:
user_model = initialize_user_model()
user = db.session.query(user_model).filter(user_model.id == user_id).first()
return user
class AnonymousUser(object):
id = None
pk = None
username = ''
@property
def is_authenticated(self):
return False
@property
def is_anonymous(self):
return True
class Request(object):
def __init__(self):
self._request = flask.request
def __getattr__(self, item):
try:
return getattr(self._request, item)
except AttributeError:
return self.__getattribute__(item)
@property
def user(self):
user = get_current_user()
return user
def login_required(func):
@wraps(func)
def wrapper(*args, **kwargs):
user = get_current_user()
if user.is_authenticated:
return func(*args, **kwargs)
else:
next_url = request.path
redirect_url = '%s?next=%s' % (LOGIN_URL, next_url) if next_url != LOGIN_URL else LOGIN_URL
return redirect(redirect_url)
return wrapper
request = Request()
| 21.097561
| 103
| 0.645665
|
4a030e72686480a11173b730a814d96b23f696b3
| 1,607
|
py
|
Python
|
lintreview/tools/jsonlint.py
|
esoergel/lint-review
|
3c93bee30259825653853b6d2c322d0f92e34e43
|
[
"MIT"
] | null | null | null |
lintreview/tools/jsonlint.py
|
esoergel/lint-review
|
3c93bee30259825653853b6d2c322d0f92e34e43
|
[
"MIT"
] | null | null | null |
lintreview/tools/jsonlint.py
|
esoergel/lint-review
|
3c93bee30259825653853b6d2c322d0f92e34e43
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Jsonlint(Tool):
name = 'jsonlint'
def check_dependencies(self):
"""
See if jsonlint is on the PATH
"""
return in_path('jsonlint')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext == '.json'
def process_files(self, files):
"""
Run code checks with jsonlint.
Only a single process is made for all files
to save resources.
Configuration is not supported at this time
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['jsonlint']
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No jsonlint errors found.')
return False
for line in output:
if (line[0] == ' ' or
line.find(': has errors') >= 0 or
line.find(': ok') >= 0):
continue
filename, line, error = self._parse_line(line)
self.problems.add(filename, line, error)
def _parse_line(self, line):
"""
jsonlint only generates results as stdout.
Parse the output for real data.
"""
parts = line.split(':', 3)
return (parts[0], int(parts[1]), parts[3][1:-1])
| 27.706897
| 68
| 0.579963
|
4a03106f3f23cef732acc3511fc74ad991561d71
| 22,370
|
py
|
Python
|
contrib/devtools/copyright_header.py
|
qogecoin/qogecoin
|
fce42076f1a2746525374f50f35939392f37ca84
|
[
"MIT"
] | 9
|
2021-10-30T01:01:50.000Z
|
2022-02-10T02:20:44.000Z
|
contrib/devtools/copyright_header.py
|
qogecoin/qogecoin
|
fce42076f1a2746525374f50f35939392f37ca84
|
[
"MIT"
] | 4
|
2021-10-17T19:59:16.000Z
|
2021-11-04T19:11:25.000Z
|
contrib/devtools/copyright_header.py
|
qogecoin/qogecoin
|
fce42076f1a2746525374f50f35939392f37ca84
|
[
"MIT"
] | 7
|
2021-11-01T09:09:41.000Z
|
2022-03-23T02:47:30.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2016-2021 The Bitcoin and Qogecoin Core Authors
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import re
import fnmatch
import sys
import subprocess
import datetime
import os
################################################################################
# file filtering
################################################################################
EXCLUDE = [
# auto generated:
'src/qt/qogecoinstrings.cpp',
'src/chainparamsseeds.h',
# other external copyrights:
'src/reverse_iterator.h',
'src/test/fuzz/FuzzedDataProvider.h',
'src/tinyformat.h',
'src/bench/nanobench.h',
'test/functional/test_framework/bignum.py',
# python init:
'*__init__.py',
]
EXCLUDE_COMPILED = re.compile('|'.join([fnmatch.translate(m) for m in EXCLUDE]))
EXCLUDE_DIRS = [
# git subtrees
"src/crypto/ctaes/",
"src/leveldb/",
"src/minisketch",
"src/secp256k1/",
"src/univalue/",
"src/crc32c/",
]
INCLUDE = ['*.h', '*.cpp', '*.cc', '*.c', '*.mm', '*.py', '*.sh', '*.bash-completion']
INCLUDE_COMPILED = re.compile('|'.join([fnmatch.translate(m) for m in INCLUDE]))
def applies_to_file(filename):
for excluded_dir in EXCLUDE_DIRS:
if filename.startswith(excluded_dir):
return False
return ((EXCLUDE_COMPILED.match(filename) is None) and
(INCLUDE_COMPILED.match(filename) is not None))
################################################################################
# obtain list of files in repo according to INCLUDE and EXCLUDE
################################################################################
GIT_LS_CMD = 'git ls-files --full-name'.split(' ')
GIT_TOPLEVEL_CMD = 'git rev-parse --show-toplevel'.split(' ')
def call_git_ls(base_directory):
out = subprocess.check_output([*GIT_LS_CMD, base_directory])
return [f for f in out.decode("utf-8").split('\n') if f != '']
def call_git_toplevel():
"Returns the absolute path to the project root"
return subprocess.check_output(GIT_TOPLEVEL_CMD).strip().decode("utf-8")
def get_filenames_to_examine(base_directory):
"Returns an array of absolute paths to any project files in the base_directory that pass the include/exclude filters"
root = call_git_toplevel()
filenames = call_git_ls(base_directory)
return sorted([os.path.join(root, filename) for filename in filenames if
applies_to_file(filename)])
################################################################################
# define and compile regexes for the patterns we are looking for
################################################################################
COPYRIGHT_WITH_C = r'Copyright \(c\)'
COPYRIGHT_WITHOUT_C = 'Copyright'
ANY_COPYRIGHT_STYLE = '(%s|%s)' % (COPYRIGHT_WITH_C, COPYRIGHT_WITHOUT_C)
YEAR = "20[0-9][0-9]"
YEAR_RANGE = '(%s)(-%s)?' % (YEAR, YEAR)
YEAR_LIST = '(%s)(, %s)+' % (YEAR, YEAR)
ANY_YEAR_STYLE = '(%s|%s)' % (YEAR_RANGE, YEAR_LIST)
ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE = ("%s %s" % (ANY_COPYRIGHT_STYLE,
ANY_YEAR_STYLE))
ANY_COPYRIGHT_COMPILED = re.compile(ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE)
def compile_copyright_regex(copyright_style, year_style, name):
return re.compile(r'%s %s,? %s( +\*)?\n' % (copyright_style, year_style, name))
EXPECTED_HOLDER_NAMES = [
r"Satoshi Nakamoto",
r"The Bitcoin and Qogecoin Core Authors",
r"BitPay Inc\.",
r"University of Illinois at Urbana-Champaign\.",
r"Pieter Wuille",
r"Wladimir J\. van der Laan",
r"Jeff Garzik",
r"Jan-Klaas Kollhof",
r"ArtForz -- public domain half-a-node",
r"Intel Corporation ?",
r"The Zcash developers",
r"Jeremy Rubin",
]
DOMINANT_STYLE_COMPILED = {}
YEAR_LIST_STYLE_COMPILED = {}
WITHOUT_C_STYLE_COMPILED = {}
for holder_name in EXPECTED_HOLDER_NAMES:
DOMINANT_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_RANGE, holder_name))
YEAR_LIST_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_LIST, holder_name))
WITHOUT_C_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITHOUT_C, ANY_YEAR_STYLE,
holder_name))
################################################################################
# search file contents for copyright message of particular category
################################################################################
def get_count_of_copyrights_of_any_style_any_holder(contents):
return len(ANY_COPYRIGHT_COMPILED.findall(contents))
def file_has_dominant_style_copyright_for_holder(contents, holder_name):
match = DOMINANT_STYLE_COMPILED[holder_name].search(contents)
return match is not None
def file_has_year_list_style_copyright_for_holder(contents, holder_name):
match = YEAR_LIST_STYLE_COMPILED[holder_name].search(contents)
return match is not None
def file_has_without_c_style_copyright_for_holder(contents, holder_name):
match = WITHOUT_C_STYLE_COMPILED[holder_name].search(contents)
return match is not None
################################################################################
# get file info
################################################################################
def read_file(filename):
return open(filename, 'r', encoding="utf8").read()
def gather_file_info(filename):
info = {}
info['filename'] = filename
c = read_file(filename)
info['contents'] = c
info['all_copyrights'] = get_count_of_copyrights_of_any_style_any_holder(c)
info['classified_copyrights'] = 0
info['dominant_style'] = {}
info['year_list_style'] = {}
info['without_c_style'] = {}
for holder_name in EXPECTED_HOLDER_NAMES:
has_dominant_style = (
file_has_dominant_style_copyright_for_holder(c, holder_name))
has_year_list_style = (
file_has_year_list_style_copyright_for_holder(c, holder_name))
has_without_c_style = (
file_has_without_c_style_copyright_for_holder(c, holder_name))
info['dominant_style'][holder_name] = has_dominant_style
info['year_list_style'][holder_name] = has_year_list_style
info['without_c_style'][holder_name] = has_without_c_style
if has_dominant_style or has_year_list_style or has_without_c_style:
info['classified_copyrights'] = info['classified_copyrights'] + 1
return info
################################################################################
# report execution
################################################################################
SEPARATOR = '-'.join(['' for _ in range(80)])
def print_filenames(filenames, verbose):
if not verbose:
return
for filename in filenames:
print("\t%s" % filename)
def print_report(file_infos, verbose):
print(SEPARATOR)
examined = [i['filename'] for i in file_infos]
print("%d files examined according to INCLUDE and EXCLUDE fnmatch rules" %
len(examined))
print_filenames(examined, verbose)
print(SEPARATOR)
print('')
zero_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 0]
print("%4d with zero copyrights" % len(zero_copyrights))
print_filenames(zero_copyrights, verbose)
one_copyright = [i['filename'] for i in file_infos if
i['all_copyrights'] == 1]
print("%4d with one copyright" % len(one_copyright))
print_filenames(one_copyright, verbose)
two_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 2]
print("%4d with two copyrights" % len(two_copyrights))
print_filenames(two_copyrights, verbose)
three_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 3]
print("%4d with three copyrights" % len(three_copyrights))
print_filenames(three_copyrights, verbose)
four_or_more_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] >= 4]
print("%4d with four or more copyrights" % len(four_or_more_copyrights))
print_filenames(four_or_more_copyrights, verbose)
print('')
print(SEPARATOR)
print('Copyrights with dominant style:\ne.g. "Copyright (c)" and '
'"<year>" or "<startYear>-<endYear>":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
dominant_style = [i['filename'] for i in file_infos if
i['dominant_style'][holder_name]]
if len(dominant_style) > 0:
print("%4d with '%s'" % (len(dominant_style),
holder_name.replace('\n', '\\n')))
print_filenames(dominant_style, verbose)
print('')
print(SEPARATOR)
print('Copyrights with year list style:\ne.g. "Copyright (c)" and '
'"<year1>, <year2>, ...":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
year_list_style = [i['filename'] for i in file_infos if
i['year_list_style'][holder_name]]
if len(year_list_style) > 0:
print("%4d with '%s'" % (len(year_list_style),
holder_name.replace('\n', '\\n')))
print_filenames(year_list_style, verbose)
print('')
print(SEPARATOR)
print('Copyrights with no "(c)" style:\ne.g. "Copyright" and "<year>" or '
'"<startYear>-<endYear>":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
without_c_style = [i['filename'] for i in file_infos if
i['without_c_style'][holder_name]]
if len(without_c_style) > 0:
print("%4d with '%s'" % (len(without_c_style),
holder_name.replace('\n', '\\n')))
print_filenames(without_c_style, verbose)
print('')
print(SEPARATOR)
unclassified_copyrights = [i['filename'] for i in file_infos if
i['classified_copyrights'] < i['all_copyrights']]
print("%d with unexpected copyright holder names" %
len(unclassified_copyrights))
print_filenames(unclassified_copyrights, verbose)
print(SEPARATOR)
def exec_report(base_directory, verbose):
filenames = get_filenames_to_examine(base_directory)
file_infos = [gather_file_info(f) for f in filenames]
print_report(file_infos, verbose)
################################################################################
# report cmd
################################################################################
REPORT_USAGE = """
Produces a report of all copyright header notices found inside the source files
of a repository.
Usage:
$ ./copyright_header.py report <base_directory> [verbose]
Arguments:
<base_directory> - The base directory of a qogecoin source code repository.
[verbose] - Includes a list of every file of each subcategory in the report.
"""
def report_cmd(argv):
if len(argv) == 2:
sys.exit(REPORT_USAGE)
base_directory = argv[2]
if not os.path.exists(base_directory):
sys.exit("*** bad <base_directory>: %s" % base_directory)
if len(argv) == 3:
verbose = False
elif argv[3] == 'verbose':
verbose = True
else:
sys.exit("*** unknown argument: %s" % argv[2])
exec_report(base_directory, verbose)
################################################################################
# query git for year of last change
################################################################################
GIT_LOG_CMD = "git log --pretty=format:%%ai %s"
def call_git_log(filename):
out = subprocess.check_output((GIT_LOG_CMD % filename).split(' '))
return out.decode("utf-8").split('\n')
def get_git_change_years(filename):
git_log_lines = call_git_log(filename)
if len(git_log_lines) == 0:
return [datetime.date.today().year]
# timestamp is in ISO 8601 format. e.g. "2016-09-05 14:25:32 -0600"
return [line.split(' ')[0].split('-')[0] for line in git_log_lines]
def get_most_recent_git_change_year(filename):
return max(get_git_change_years(filename))
################################################################################
# read and write to file
################################################################################
def read_file_lines(filename):
f = open(filename, 'r', encoding="utf8")
file_lines = f.readlines()
f.close()
return file_lines
def write_file_lines(filename, file_lines):
f = open(filename, 'w', encoding="utf8")
f.write(''.join(file_lines))
f.close()
################################################################################
# update header years execution
################################################################################
COPYRIGHT = r'Copyright \(c\)'
YEAR = "20[0-9][0-9]"
YEAR_RANGE = '(%s)(-%s)?' % (YEAR, YEAR)
HOLDER = 'The Bitcoin and Qogecoin Core Authors'
UPDATEABLE_LINE_COMPILED = re.compile(' '.join([COPYRIGHT, YEAR_RANGE, HOLDER]))
def get_updatable_copyright_line(file_lines):
index = 0
for line in file_lines:
if UPDATEABLE_LINE_COMPILED.search(line) is not None:
return index, line
index = index + 1
return None, None
def parse_year_range(year_range):
year_split = year_range.split('-')
start_year = year_split[0]
if len(year_split) == 1:
return start_year, start_year
return start_year, year_split[1]
def year_range_to_str(start_year, end_year):
if start_year == end_year:
return start_year
return "%s-%s" % (start_year, end_year)
def create_updated_copyright_line(line, last_git_change_year):
copyright_splitter = 'Copyright (c) '
copyright_split = line.split(copyright_splitter)
# Preserve characters on line that are ahead of the start of the copyright
# notice - they are part of the comment block and vary from file-to-file.
before_copyright = copyright_split[0]
after_copyright = copyright_split[1]
space_split = after_copyright.split(' ')
year_range = space_split[0]
start_year, end_year = parse_year_range(year_range)
if end_year >= last_git_change_year:
return line
return (before_copyright + copyright_splitter +
year_range_to_str(start_year, last_git_change_year) + ' ' +
' '.join(space_split[1:]))
def update_updatable_copyright(filename):
file_lines = read_file_lines(filename)
index, line = get_updatable_copyright_line(file_lines)
if not line:
print_file_action_message(filename, "No updatable copyright.")
return
last_git_change_year = get_most_recent_git_change_year(filename)
new_line = create_updated_copyright_line(line, last_git_change_year)
if line == new_line:
print_file_action_message(filename, "Copyright up-to-date.")
return
file_lines[index] = new_line
write_file_lines(filename, file_lines)
print_file_action_message(filename,
"Copyright updated! -> %s" % last_git_change_year)
def exec_update_header_year(base_directory):
for filename in get_filenames_to_examine(base_directory):
update_updatable_copyright(filename)
################################################################################
# update cmd
################################################################################
UPDATE_USAGE = """
Updates all the copyright headers of "The Bitcoin and Qogecoin Core Authors" which were
changed in a year more recent than is listed. For example:
// Copyright (c) <firstYear>-<lastYear> The Bitcoin and Qogecoin Core Authors
will be updated to:
// Copyright (c) <firstYear>-<lastModifiedYear> The Bitcoin and Qogecoin Core Authors
where <lastModifiedYear> is obtained from the 'git log' history.
This subcommand also handles copyright headers that have only a single year. In those cases:
// Copyright (c) <year> The Bitcoin and Qogecoin Core Authors
will be updated to:
// Copyright (c) <year>-<lastModifiedYear> The Bitcoin and Qogecoin Core Authors
where the update is appropriate.
Usage:
$ ./copyright_header.py update <base_directory>
Arguments:
<base_directory> - The base directory of a qogecoin source code repository.
"""
def print_file_action_message(filename, action):
print("%-52s %s" % (filename, action))
def update_cmd(argv):
if len(argv) != 3:
sys.exit(UPDATE_USAGE)
base_directory = argv[2]
if not os.path.exists(base_directory):
sys.exit("*** bad base_directory: %s" % base_directory)
exec_update_header_year(base_directory)
################################################################################
# inserted copyright header format
################################################################################
def get_header_lines(header, start_year, end_year):
lines = header.split('\n')[1:-1]
lines[0] = lines[0] % year_range_to_str(start_year, end_year)
return [line + '\n' for line in lines]
CPP_HEADER = '''
// Copyright (c) %s The Bitcoin and Qogecoin Core Authors
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
def get_cpp_header_lines_to_insert(start_year, end_year):
return reversed(get_header_lines(CPP_HEADER, start_year, end_year))
SCRIPT_HEADER = '''
# Copyright (c) %s The Bitcoin and Qogecoin Core Authors
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
def get_script_header_lines_to_insert(start_year, end_year):
return reversed(get_header_lines(SCRIPT_HEADER, start_year, end_year))
################################################################################
# query git for year of last change
################################################################################
def get_git_change_year_range(filename):
years = get_git_change_years(filename)
return min(years), max(years)
################################################################################
# check for existing core copyright
################################################################################
def file_already_has_core_copyright(file_lines):
index, _ = get_updatable_copyright_line(file_lines)
return index is not None
################################################################################
# insert header execution
################################################################################
def file_has_hashbang(file_lines):
if len(file_lines) < 1:
return False
if len(file_lines[0]) <= 2:
return False
return file_lines[0][:2] == '#!'
def insert_script_header(filename, file_lines, start_year, end_year):
if file_has_hashbang(file_lines):
insert_idx = 1
else:
insert_idx = 0
header_lines = get_script_header_lines_to_insert(start_year, end_year)
for line in header_lines:
file_lines.insert(insert_idx, line)
write_file_lines(filename, file_lines)
def insert_cpp_header(filename, file_lines, start_year, end_year):
file_lines.insert(0, '\n')
header_lines = get_cpp_header_lines_to_insert(start_year, end_year)
for line in header_lines:
file_lines.insert(0, line)
write_file_lines(filename, file_lines)
def exec_insert_header(filename, style):
file_lines = read_file_lines(filename)
if file_already_has_core_copyright(file_lines):
sys.exit('*** %s already has a copyright by The Bitcoin and Qogecoin Core Authors'
% (filename))
start_year, end_year = get_git_change_year_range(filename)
if style in ['python', 'shell']:
insert_script_header(filename, file_lines, start_year, end_year)
else:
insert_cpp_header(filename, file_lines, start_year, end_year)
################################################################################
# insert cmd
################################################################################
INSERT_USAGE = """
Inserts a copyright header for "The Bitcoin and Qogecoin Core Authors" at the top of the
file in either Python or C++ style as determined by the file extension. If the
file is a Python file and it has a '#!' starting the first line, the header is
inserted in the line below it.
The copyright dates will be set to be:
"<year_introduced>-<current_year>"
where <year_introduced> is according to the 'git log' history. If
<year_introduced> is equal to <current_year>, the date will be set to be:
"<current_year>"
If the file already has a copyright for "The Bitcoin and Qogecoin Core Authors", the
script will exit.
Usage:
$ ./copyright_header.py insert <file>
Arguments:
<file> - A source file in the qogecoin repository.
"""
def insert_cmd(argv):
if len(argv) != 3:
sys.exit(INSERT_USAGE)
filename = argv[2]
if not os.path.isfile(filename):
sys.exit("*** bad filename: %s" % filename)
_, extension = os.path.splitext(filename)
if extension not in ['.h', '.cpp', '.cc', '.c', '.py', '.sh']:
sys.exit("*** cannot insert for file extension %s" % extension)
if extension == '.py':
style = 'python'
elif extension == '.sh':
style = 'shell'
else:
style = 'cpp'
exec_insert_header(filename, style)
################################################################################
# UI
################################################################################
USAGE = """
copyright_header.py - utilities for managing copyright headers of 'The Qogecoin
Core developers' in repository source files.
Usage:
$ ./copyright_header <subcommand>
Subcommands:
report
update
insert
To see subcommand usage, run them without arguments.
"""
SUBCOMMANDS = ['report', 'update', 'insert']
if __name__ == "__main__":
if len(sys.argv) == 1:
sys.exit(USAGE)
subcommand = sys.argv[1]
if subcommand not in SUBCOMMANDS:
sys.exit(USAGE)
if subcommand == 'report':
report_cmd(sys.argv)
elif subcommand == 'update':
update_cmd(sys.argv)
elif subcommand == 'insert':
insert_cmd(sys.argv)
| 36.792763
| 121
| 0.601967
|
4a0310a3638b52c69196ebbf03e1ac23b2c28599
| 2,644
|
py
|
Python
|
app/implementation.py
|
Anguandia/i-reporter
|
61c8236174101a229a72a2a4a01c465062dba893
|
[
"MIT"
] | null | null | null |
app/implementation.py
|
Anguandia/i-reporter
|
61c8236174101a229a72a2a4a01c465062dba893
|
[
"MIT"
] | null | null | null |
app/implementation.py
|
Anguandia/i-reporter
|
61c8236174101a229a72a2a4a01c465062dba893
|
[
"MIT"
] | null | null | null |
from .models import RedFlag
import datetime
red_flags = {}
class Implementation:
def create(self, data):
others = {
'type': 'red-flag', 'status': 'draft', 'videos': '', 'images': '',
'comment': ''}
red_flag = RedFlag(
(len(red_flags)+1), data['location'], data['createdBy'],
data['title']
)
red_flag.__setattr__('createdOn', datetime.datetime.now())
for key in others:
if key in data:
red_flag.__setattr__(key, data[key])
else:
red_flag.__setattr__(key, others[key])
red_flags[str(red_flag.id)] = red_flag.__dict__
return [
201, 'data', [{'id': red_flag.id, 'message': 'Created red flag'}]
]
def get_flags(self):
res = [200, 'data', [red_flags[key] for key in red_flags.keys()]]
return res
def get_flag(self, red_flag_id):
try:
red_flag = red_flags[str(red_flag_id)]
res = [200, 'data', [red_flag]]
except Exception as e:
print(e)
res = [200, 'data', []]
return res
def edit(self, red_flag_id, data, field):
red_flag = self.get_flag(red_flag_id)[2]
if len(red_flag) == 0:
res = [400, 'error', 'red flag not found']
elif red_flag[0]['status'] in ['rejected', 'resolved']:
res = [
403, 'error', f'red flag already {red_flag[0]["status"]}'
]
elif field == 'location' and 'geolocation' not in red_flag[0][
'location']:
red_flag[0]['location'] += ' ' + data['location']
res = 'added'
elif field == 'location' and 'geolocation' in red_flag[0]['location']:
red_flag[0]['location'] =\
red_flag[0]['location'][:red_flag[0]['location'].index(
'geolocation')] + data['location']
res = 'updated'
else:
red_flag[0][field] = data[field]
res = 'updated'
if isinstance(res, str):
result = [200, 'data', [{
'id': int(red_flag_id), 'message':
f'{res} red-flag record\'s {field}'}]]
else:
result = res
return result
def delete(self, red_flag_id):
try:
red_flags.pop(str(red_flag_id))
res = [200, 'data', [{'id': int(red_flag_id), 'message':
'red-flag record has been deleted'}]]
except Exception:
res = [404, 'error', 'red flag not found']
return res
| 34.789474
| 78
| 0.496218
|
4a03114d83ccc6d1fa15c69df79ec51c5ef079fc
| 2,153
|
py
|
Python
|
IoT/Distance_sensor/lib/ultraSonicSensor.py
|
Palmen98/DashboardExJobb
|
b74defdff529b14cc1ce8a54206af0c71b3ac90c
|
[
"MIT"
] | null | null | null |
IoT/Distance_sensor/lib/ultraSonicSensor.py
|
Palmen98/DashboardExJobb
|
b74defdff529b14cc1ce8a54206af0c71b3ac90c
|
[
"MIT"
] | null | null | null |
IoT/Distance_sensor/lib/ultraSonicSensor.py
|
Palmen98/DashboardExJobb
|
b74defdff529b14cc1ce8a54206af0c71b3ac90c
|
[
"MIT"
] | null | null | null |
import utime
import pycom
import time
from machine import Pin
import lora
import light_manager
import keys
# initialise Ultrasonic Sensor pins
echo = Pin('P18', mode=Pin.IN)
trigger = Pin('P20', mode=Pin.OUT)
trigger(0)
# Ultrasonic distance measurment
def distance_measure():
# trigger pulse LOW for 2us (just in case)
trigger(0)
utime.sleep_us(2)
# trigger HIGH for a 10us pulse
trigger(1)
utime.sleep_us(10)
trigger(0)
# wait for the rising edge of the echo then start timer
while echo() == 0:
pass
start = utime.ticks_us()
# wait for end of echo pulse then stop timer
while echo() == 1:
pass
finish = utime.ticks_us()
# pause for 20ms to prevent overlapping echos
# utime.sleep_ms(20)
# calculate distance by using time difference between start and stop
# speed of sound 340m/s or .034cm/us. Time * .034cm/us = Distance sound travelled there and back
# divide by two for distance to object detected.
distance = ((utime.ticks_diff(finish, start)) * 0.034)/2
return int(distance)
def sendData(port, pin):
lora.s.bind(port)
lora.s.send(bytes(pin))
# to reduce errors we take ten readings and use the median
def distance_median():
# initialise the list
distance_samples = []
# take 10 samples and append them into the list
for count in range(10):
distance_samples.append(int(distance_measure()))
# sort the list
distance_samples = sorted(distance_samples)
# take the center list row value (median average)
distance_median = distance_samples[int(len(distance_samples)/2)]
# apply the function to scale to volts
print(distance_samples)
distance = int(distance_median)
sendData(20, distance)
light_manager.sendData()
print('Sending data')
return distance
print('Starting to measure distance')
# disable LED heartbeat (so we can control the LED)
pycom.heartbeat(False)
time.sleep(2)
while True:
# take distance measurment, turn the light blue when measuring
pycom.rgbled(0x00007d)
utime.sleep(1800)
distance = distance_median()
print("Distance: ", distance)
| 25.630952
| 100
| 0.69856
|
4a03116f1dcdb76c39cffbbc627aad796eea561c
| 9,259
|
py
|
Python
|
sympy/physics/optics/waves.py
|
msgoff/sympy
|
1e7daef7514902f5e89718fa957b7b36c6669a10
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/physics/optics/waves.py
|
msgoff/sympy
|
1e7daef7514902f5e89718fa957b7b36c6669a10
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/physics/optics/waves.py
|
msgoff/sympy
|
1e7daef7514902f5e89718fa957b7b36c6669a10
|
[
"BSD-3-Clause"
] | null | null | null |
"""
This module has all the classes and functions related to waves in optics.
**Contains**
* TWave
"""
from __future__ import print_function, division
__all__ = ["TWave"]
from sympy import sympify, pi, sin, cos, sqrt, Symbol, S, symbols, Derivative, atan2
from sympy.core.expr import Expr
from sympy.physics.units import speed_of_light, meter, second
c = speed_of_light.convert_to(meter / second)
class TWave(Expr):
r"""
This is a simple transverse sine wave travelling in a one-dimensional space.
Basic properties are required at the time of creation of the object,
but they can be changed later with respective methods provided.
It is represented as :math:`A \times cos(k*x - \omega \times t + \phi )`,
where :math:`A` is the amplitude, :math:`\omega` is the angular velocity,
:math:`k` is the wavenumber (spatial frequency), :math:`x` is a spatial variable
to represent the position on the dimension on which the wave propagates,
and :math:`\phi` is the phase angle of the wave.
Arguments
=========
amplitude : Sympifyable
Amplitude of the wave.
frequency : Sympifyable
Frequency of the wave.
phase : Sympifyable
Phase angle of the wave.
time_period : Sympifyable
Time period of the wave.
n : Sympifyable
Refractive index of the medium.
Raises
=======
ValueError : When neither frequency nor time period is provided
or they are not consistent.
TypeError : When anything other than TWave objects is added.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.optics import TWave
>>> A1, phi1, A2, phi2, f = symbols('A1, phi1, A2, phi2, f')
>>> w1 = TWave(A1, f, phi1)
>>> w2 = TWave(A2, f, phi2)
>>> w3 = w1 + w2 # Superposition of two waves
>>> w3
TWave(sqrt(A1**2 + 2*A1*A2*cos(phi1 - phi2) + A2**2), f,
atan2(A1*cos(phi1) + A2*cos(phi2), A1*sin(phi1) + A2*sin(phi2)))
>>> w3.amplitude
sqrt(A1**2 + 2*A1*A2*cos(phi1 - phi2) + A2**2)
>>> w3.phase
atan2(A1*cos(phi1) + A2*cos(phi2), A1*sin(phi1) + A2*sin(phi2))
>>> w3.speed
299792458*meter/(second*n)
>>> w3.angular_velocity
2*pi*f
"""
def __init__(
self, amplitude, frequency=None, phase=S.Zero, time_period=None, n=Symbol("n")
):
frequency = sympify(frequency)
amplitude = sympify(amplitude)
phase = sympify(phase)
time_period = sympify(time_period)
n = sympify(n)
self._frequency = frequency
self._amplitude = amplitude
self._phase = phase
self._time_period = time_period
self._n = n
if time_period is not None:
self._frequency = 1 / self._time_period
if frequency is not None:
self._time_period = 1 / self._frequency
if time_period is not None:
if frequency != 1 / time_period:
raise ValueError("frequency and time_period should be consistent.")
if frequency is None and time_period is None:
raise ValueError("Either frequency or time period is needed.")
@property
def frequency(self):
"""
Returns the frequency of the wave,
in cycles per second.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.optics import TWave
>>> A, phi, f = symbols('A, phi, f')
>>> w = TWave(A, f, phi)
>>> w.frequency
f
"""
return self._frequency
@property
def time_period(self):
"""
Returns the temporal period of the wave,
in seconds per cycle.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.optics import TWave
>>> A, phi, f = symbols('A, phi, f')
>>> w = TWave(A, f, phi)
>>> w.time_period
1/f
"""
return self._time_period
@property
def wavelength(self):
"""
Returns the wavelength (spatial period) of the wave,
in meters per cycle.
It depends on the medium of the wave.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.optics import TWave
>>> A, phi, f = symbols('A, phi, f')
>>> w = TWave(A, f, phi)
>>> w.wavelength
299792458*meter/(second*f*n)
"""
return c / (self._frequency * self._n)
@property
def amplitude(self):
"""
Returns the amplitude of the wave.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.optics import TWave
>>> A, phi, f = symbols('A, phi, f')
>>> w = TWave(A, f, phi)
>>> w.amplitude
A
"""
return self._amplitude
@property
def phase(self):
"""
Returns the phase angle of the wave,
in radians.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.optics import TWave
>>> A, phi, f = symbols('A, phi, f')
>>> w = TWave(A, f, phi)
>>> w.phase
phi
"""
return self._phase
@property
def speed(self):
"""
Returns the propagation speed of the wave,
in meters per second.
It is dependent on the propagation medium.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.optics import TWave
>>> A, phi, f = symbols('A, phi, f')
>>> w = TWave(A, f, phi)
>>> w.speed
299792458*meter/(second*n)
"""
return self.wavelength * self._frequency
@property
def angular_velocity(self):
"""
Returns the angular velocity of the wave,
in radians per second.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.optics import TWave
>>> A, phi, f = symbols('A, phi, f')
>>> w = TWave(A, f, phi)
>>> w.angular_velocity
2*pi*f
"""
return 2 * pi * self._frequency
@property
def wavenumber(self):
"""
Returns the wavenumber of the wave,
in radians per meter.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.optics import TWave
>>> A, phi, f = symbols('A, phi, f')
>>> w = TWave(A, f, phi)
>>> w.wavenumber
pi*second*f*n/(149896229*meter)
"""
return 2 * pi / self.wavelength
def __str__(self):
"""String representation of a TWave."""
from sympy.printing import sstr
return type(self).__name__ + sstr(self.args)
__repr__ = __str__
def __add__(self, other):
"""
Addition of two waves will result in their superposition.
The type of interference will depend on their phase angles.
"""
if isinstance(other, TWave):
if (
self._frequency == other._frequency
and self.wavelength == other.wavelength
):
return TWave(
sqrt(
self._amplitude ** 2
+ other._amplitude ** 2
+ 2
* self.amplitude
* other.amplitude
* cos(self._phase - other.phase)
),
self.frequency,
atan2(
self._amplitude * cos(self._phase)
+ other._amplitude * cos(other._phase),
self._amplitude * sin(self._phase)
+ other._amplitude * sin(other._phase),
),
)
else:
raise NotImplementedError(
"Interference of waves with different frequencies"
" has not been implemented."
)
else:
raise TypeError(type(other).__name__ + " and TWave objects can't be added.")
def _eval_rewrite_as_sin(self, *args, **kwargs):
return self._amplitude * sin(
self.wavenumber * Symbol("x")
- self.angular_velocity * Symbol("t")
+ self._phase
+ pi / 2,
evaluate=False,
)
def _eval_rewrite_as_cos(self, *args, **kwargs):
return self._amplitude * cos(
self.wavenumber * Symbol("x")
- self.angular_velocity * Symbol("t")
+ self._phase
)
def _eval_rewrite_as_pde(self, *args, **kwargs):
from sympy import Function
mu, epsilon, x, t = symbols("mu, epsilon, x, t")
E = Function("E")
return Derivative(E(x, t), x, 2) + mu * epsilon * Derivative(E(x, t), t, 2)
def _eval_rewrite_as_exp(self, *args, **kwargs):
from sympy import exp, I
return self._amplitude * exp(
I
* (
self.wavenumber * Symbol("x")
- self.angular_velocity * Symbol("t")
+ self._phase
)
)
| 28.489231
| 88
| 0.532779
|
4a0312128c64d1bc08da33e7e2ad816aad56b468
| 20,612
|
py
|
Python
|
yt/frontends/flash/data_structures.py
|
dpgrote/yt
|
74862c05f9243a674d2b4cc8d6adfa9eee5f2d96
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
yt/frontends/flash/data_structures.py
|
dpgrote/yt
|
74862c05f9243a674d2b4cc8d6adfa9eee5f2d96
|
[
"BSD-3-Clause-Clear"
] | 8
|
2020-04-02T16:51:49.000Z
|
2022-01-11T14:12:44.000Z
|
yt/frontends/flash/data_structures.py
|
dpgrote/yt
|
74862c05f9243a674d2b4cc8d6adfa9eee5f2d96
|
[
"BSD-3-Clause-Clear"
] | 2
|
2020-08-12T15:46:11.000Z
|
2021-02-09T13:09:17.000Z
|
import os
import weakref
import numpy as np
from yt.data_objects.index_subobjects.grid_patch import AMRGridPatch
from yt.data_objects.static_output import Dataset, ParticleFile, validate_index_order
from yt.funcs import mylog, setdefaultattr
from yt.geometry.grid_geometry_handler import GridIndex
from yt.geometry.particle_geometry_handler import ParticleIndex
from yt.utilities.file_handler import HDF5FileHandler, warn_h5py
from yt.utilities.physical_ratios import cm_per_mpc
from .fields import FLASHFieldInfo
class FLASHGrid(AMRGridPatch):
_id_offset = 1
# __slots__ = ["_level_id", "stop_index"]
def __init__(self, id, index, level):
AMRGridPatch.__init__(self, id, filename=index.index_filename, index=index)
self.Parent = None
self.Children = []
self.Level = level
def __repr__(self):
return "FLASHGrid_%04i (%s)" % (self.id, self.ActiveDimensions)
class FLASHHierarchy(GridIndex):
grid = FLASHGrid
_preload_implemented = True
def __init__(self, ds, dataset_type="flash_hdf5"):
self.dataset_type = dataset_type
self.field_indexes = {}
self.dataset = weakref.proxy(ds)
# for now, the index file is the dataset!
self.index_filename = self.dataset.parameter_filename
self.directory = os.path.dirname(self.index_filename)
self._handle = ds._handle
self._particle_handle = ds._particle_handle
self.float_type = np.float64
GridIndex.__init__(self, ds, dataset_type)
def _initialize_data_storage(self):
pass
def _detect_output_fields(self):
self.field_list = [
("flash", s.decode("ascii", "ignore"))
for s in self._handle["/unknown names"][:].flat
]
if "/particle names" in self._particle_handle:
self.field_list += [
("io", "particle_" + s[0].decode("ascii", "ignore").strip())
for s in self._particle_handle["/particle names"][:]
]
def _count_grids(self):
try:
self.num_grids = self.dataset._find_parameter(
"integer", "globalnumblocks", True
)
except KeyError:
try:
self.num_grids = self._handle["simulation parameters"]["total blocks"][
0
]
except KeyError:
self.num_grids = self._handle["/simulation parameters"][0][0]
def _parse_index(self):
f = self._handle # shortcut
ds = self.dataset # shortcut
f_part = self._particle_handle # shortcut
# Initialize to the domain left / domain right
ND = self.dataset.dimensionality
DLE = self.dataset.domain_left_edge
DRE = self.dataset.domain_right_edge
for i in range(3):
self.grid_left_edge[:, i] = DLE[i]
self.grid_right_edge[:, i] = DRE[i]
# We only go up to ND for 2D datasets
self.grid_left_edge[:, :ND] = f["/bounding box"][:, :ND, 0]
self.grid_right_edge[:, :ND] = f["/bounding box"][:, :ND, 1]
# Move this to the parameter file
try:
nxb = ds.parameters["nxb"]
nyb = ds.parameters["nyb"]
nzb = ds.parameters["nzb"]
except KeyError:
nxb, nyb, nzb = [
int(f["/simulation parameters"][f"n{ax}b"]) for ax in "xyz"
]
self.grid_dimensions[:] *= (nxb, nyb, nzb)
try:
self.grid_particle_count[:] = f_part["/localnp"][:][:, None]
except KeyError:
self.grid_particle_count[:] = 0.0
self._particle_indices = np.zeros(self.num_grids + 1, dtype="int64")
if self.num_grids > 1:
np.add.accumulate(
self.grid_particle_count.squeeze(), out=self._particle_indices[1:]
)
else:
self._particle_indices[1] = self.grid_particle_count.squeeze()
# This will become redundant, as _prepare_grid will reset it to its
# current value. Note that FLASH uses 1-based indexing for refinement
# levels, but we do not, so we reduce the level by 1.
self.grid_levels.flat[:] = f["/refine level"][:][:] - 1
self.grids = np.empty(self.num_grids, dtype="object")
for i in range(self.num_grids):
self.grids[i] = self.grid(i + 1, self, self.grid_levels[i, 0])
# This is a possibly slow and verbose fix, and should be re-examined!
rdx = self.dataset.domain_width / self.dataset.domain_dimensions
nlevels = self.grid_levels.max()
dxs = np.ones((nlevels + 1, 3), dtype="float64")
for i in range(nlevels + 1):
dxs[i, :ND] = rdx[:ND] / self.dataset.refine_by ** i
if ND < 3:
dxs[:, ND:] = rdx[ND:]
# Because we don't care about units, we're going to operate on views.
gle = self.grid_left_edge.ndarray_view()
gre = self.grid_right_edge.ndarray_view()
geom = self.dataset.geometry
if geom != "cartesian" and ND < 3:
if geom == "spherical" and ND < 2:
gle[:, 1] = 0.0
gre[:, 1] = np.pi
gle[:, 2] = 0.0
gre[:, 2] = 2.0 * np.pi
return
def _populate_grid_objects(self):
ii = np.argsort(self.grid_levels.flat)
gid = self._handle["/gid"][:]
first_ind = -(self.dataset.refine_by ** self.dataset.dimensionality)
for g in self.grids[ii].flat:
gi = g.id - g._id_offset
# FLASH uses 1-indexed group info
g.Children = [self.grids[i - 1] for i in gid[gi, first_ind:] if i > -1]
for g1 in g.Children:
g1.Parent = g
g._prepare_grid()
g._setup_dx()
if self.dataset.dimensionality < 3:
DD = self.dataset.domain_right_edge[2] - self.dataset.domain_left_edge[2]
for g in self.grids:
g.dds[2] = DD
if self.dataset.dimensionality < 2:
DD = self.dataset.domain_right_edge[1] - self.dataset.domain_left_edge[1]
for g in self.grids:
g.dds[1] = DD
self.max_level = self.grid_levels.max()
class FLASHDataset(Dataset):
_index_class = FLASHHierarchy
_field_info_class = FLASHFieldInfo
_handle = None
def __init__(
self,
filename,
dataset_type="flash_hdf5",
storage_filename=None,
particle_filename=None,
units_override=None,
unit_system="cgs",
):
self.fluid_types += ("flash",)
if self._handle is not None:
return
self._handle = HDF5FileHandler(filename)
self.particle_filename = particle_filename
if self.particle_filename is None:
# try to guess the particle filename
try:
self._particle_handle = HDF5FileHandler(
filename.replace("plt_cnt", "part")
)
self.particle_filename = filename.replace("plt_cnt", "part")
mylog.info(
"Particle file found: %s", self.particle_filename.split("/")[-1]
)
except OSError:
self._particle_handle = self._handle
else:
# particle_filename is specified by user
self._particle_handle = HDF5FileHandler(self.particle_filename)
# Check if the particle file has the same time
if self._particle_handle != self._handle:
part_time = self._particle_handle.handle.get("real scalars")[0][1]
plot_time = self._handle.handle.get("real scalars")[0][1]
if not np.isclose(part_time, plot_time):
self._particle_handle = self._handle
mylog.warning(
"%s and %s are not at the same time. "
"This particle file will not be used.",
self.particle_filename,
filename,
)
# These should be explicitly obtained from the file, but for now that
# will wait until a reorganization of the source tree and better
# generalization.
self.refine_by = 2
Dataset.__init__(
self,
filename,
dataset_type,
units_override=units_override,
unit_system=unit_system,
)
self.storage_filename = storage_filename
self.parameters["HydroMethod"] = "flash" # always PPM DE
self.parameters["Time"] = 1.0 # default unit is 1...
def _set_code_unit_attributes(self):
if "unitsystem" in self.parameters:
# Some versions of FLASH inject quotes in the runtime parameters
# See issue #1721
us = self["unitsystem"].replace("'", "").replace('"', "").lower()
if us == "cgs":
b_factor = 1.0
elif us == "si":
b_factor = np.sqrt(4 * np.pi / 1e7)
elif us == "none":
b_factor = np.sqrt(4 * np.pi)
else:
raise RuntimeError(
"Runtime parameter unitsystem with "
"value %s is unrecognized" % self["unitsystem"]
)
else:
b_factor = 1.0
if self.cosmological_simulation == 1:
length_factor = 1.0 / (1.0 + self.current_redshift)
temperature_factor = 1.0 / (1.0 + self.current_redshift) ** 2
else:
length_factor = 1.0
temperature_factor = 1.0
setdefaultattr(self, "magnetic_unit", self.quan(b_factor, "gauss"))
setdefaultattr(self, "length_unit", self.quan(length_factor, "cm"))
setdefaultattr(self, "mass_unit", self.quan(1.0, "g"))
setdefaultattr(self, "time_unit", self.quan(1.0, "s"))
setdefaultattr(self, "velocity_unit", self.quan(1.0, "cm/s"))
setdefaultattr(self, "temperature_unit", self.quan(temperature_factor, "K"))
def set_code_units(self):
super(FLASHDataset, self).set_code_units()
def _find_parameter(self, ptype, pname, scalar=False):
nn = "/%s %s" % (ptype, {False: "runtime parameters", True: "scalars"}[scalar])
if nn not in self._handle:
raise KeyError(nn)
for tpname, pval in zip(
self._handle[nn][:, "name"], self._handle[nn][:, "value"]
):
if tpname.decode("ascii", "ignore").strip() == pname:
if hasattr(pval, "decode"):
pval = pval.decode("ascii", "ignore")
if ptype == "string":
return pval.strip()
else:
return pval
raise KeyError(pname)
def _parse_parameter_file(self):
if "file format version" in self._handle:
self._flash_version = int(self._handle["file format version"][:])
elif "sim info" in self._handle:
self._flash_version = int(
self._handle["sim info"][:]["file format version"]
)
else:
raise RuntimeError("Can't figure out FLASH file version.")
# First we load all of the parameters
hns = ["simulation parameters"]
# note the ordering here is important: runtime parameters should
# overwrite scalars with the same name.
for ptype in ["scalars", "runtime parameters"]:
for vtype in ["integer", "real", "logical", "string"]:
hns.append(f"{vtype} {ptype}")
if self._flash_version > 7:
for hn in hns:
if hn not in self._handle:
continue
for varname, val in zip(
self._handle[hn][:, "name"], self._handle[hn][:, "value"]
):
vn = varname.strip()
if hn.startswith("string"):
pval = val.strip()
else:
pval = val
if vn in self.parameters and self.parameters[vn] != pval:
mylog.info(
"%s %s overwrites a simulation scalar of the same name",
hn[:-1],
vn,
)
if hasattr(pval, "decode"):
pval = pval.decode("ascii", "ignore")
self.parameters[vn.decode("ascii", "ignore")] = pval
if self._flash_version == 7:
for hn in hns:
if hn not in self._handle:
continue
if hn == "simulation parameters":
zipover = (
(name, self._handle[hn][name][0])
for name in self._handle[hn].dtype.names
)
else:
zipover = zip(
self._handle[hn][:, "name"], self._handle[hn][:, "value"]
)
for varname, val in zipover:
vn = varname.strip()
if hasattr(vn, "decode"):
vn = vn.decode("ascii", "ignore")
if hn.startswith("string"):
pval = val.strip()
else:
pval = val
if vn in self.parameters and self.parameters[vn] != pval:
mylog.info(
"%s %s overwrites a simulation scalar of the same name",
hn[:-1],
vn,
)
if hasattr(pval, "decode"):
pval = pval.decode("ascii", "ignore")
self.parameters[vn] = pval
# Determine block size
try:
nxb = self.parameters["nxb"]
nyb = self.parameters["nyb"]
nzb = self.parameters["nzb"]
except KeyError:
nxb, nyb, nzb = [
int(self._handle["/simulation parameters"][f"n{ax}b"]) for ax in "xyz"
] # FLASH2 only!
# Determine dimensionality
try:
dimensionality = self.parameters["dimensionality"]
except KeyError:
dimensionality = 3
if nzb == 1:
dimensionality = 2
if nyb == 1:
dimensionality = 1
if dimensionality < 3:
mylog.warning("Guessing dimensionality as %s", dimensionality)
self.dimensionality = dimensionality
self.geometry = self.parameters["geometry"]
# Determine base grid parameters
if "lrefine_min" in self.parameters.keys(): # PARAMESH
nblockx = self.parameters["nblockx"]
nblocky = self.parameters["nblocky"]
nblockz = self.parameters["nblockz"]
else: # Uniform Grid
nblockx = self.parameters["iprocs"]
nblocky = self.parameters["jprocs"]
nblockz = self.parameters["kprocs"]
# In case the user wasn't careful
if self.dimensionality <= 2:
nblockz = 1
if self.dimensionality == 1:
nblocky = 1
# Determine domain boundaries
dle = np.array([self.parameters[f"{ax}min"] for ax in "xyz"]).astype("float64")
dre = np.array([self.parameters[f"{ax}max"] for ax in "xyz"]).astype("float64")
if self.dimensionality < 3:
for d in [dimensionality] + list(range(3 - dimensionality)):
if dle[d] == dre[d]:
mylog.warning(
"Identical domain left edge and right edges "
"along dummy dimension (%i), attempting to read anyway",
d,
)
dre[d] = dle[d] + 1.0
if self.dimensionality < 3 and self.geometry == "cylindrical":
mylog.warning("Extending theta dimension to 2PI + left edge.")
dre[2] = dle[2] + 2 * np.pi
elif self.dimensionality < 3 and self.geometry == "polar":
mylog.warning("Extending theta dimension to 2PI + left edge.")
dre[1] = dle[1] + 2 * np.pi
elif self.dimensionality < 3 and self.geometry == "spherical":
mylog.warning("Extending phi dimension to 2PI + left edge.")
dre[2] = dle[2] + 2 * np.pi
if self.dimensionality == 1 and self.geometry == "spherical":
mylog.warning("Extending theta dimension to PI + left edge.")
dre[1] = dle[1] + np.pi
self.domain_left_edge = dle
self.domain_right_edge = dre
self.domain_dimensions = np.array([nblockx * nxb, nblocky * nyb, nblockz * nzb])
# Try to determine Gamma
try:
self.gamma = self.parameters["gamma"]
except Exception:
mylog.info("Cannot find Gamma")
pass
# Get the simulation time
self.current_time = self.parameters["time"]
# Determine if this is a periodic box
p = [
self.parameters.get(f"{ax}l_boundary_type", None) == "periodic"
for ax in "xyz"
]
self.periodicity = tuple(p)
# Determine cosmological parameters.
try:
self.parameters["usecosmology"]
self.cosmological_simulation = 1
self.current_redshift = 1.0 / self.parameters["scalefactor"] - 1.0
self.omega_lambda = self.parameters["cosmologicalconstant"]
self.omega_matter = self.parameters["omegamatter"]
self.hubble_constant = self.parameters["hubbleconstant"]
self.hubble_constant *= cm_per_mpc * 1.0e-5 * 1.0e-2 # convert to 'h'
except Exception:
self.current_redshift = (
self.omega_lambda
) = (
self.omega_matter
) = self.hubble_constant = self.cosmological_simulation = 0.0
@classmethod
def _is_valid(cls, filename, *args, **kwargs):
try:
fileh = HDF5FileHandler(filename)
if "bounding box" in fileh["/"].keys():
return True
except (OSError, ImportError):
pass
return False
@classmethod
def _guess_candidates(cls, base, directories, files):
candidates = [
_ for _ in files if ("_hdf5_plt_cnt_" in _) or ("_hdf5_chk_" in _)
]
# Typically, Flash won't have nested outputs.
return candidates, (len(candidates) == 0)
def close(self):
self._handle.close()
class FLASHParticleFile(ParticleFile):
pass
class FLASHParticleDataset(FLASHDataset):
_index_class = ParticleIndex
filter_bbox = False
_file_class = FLASHParticleFile
def __init__(
self,
filename,
dataset_type="flash_particle_hdf5",
storage_filename=None,
units_override=None,
index_order=None,
index_filename=None,
unit_system="cgs",
):
self.index_order = validate_index_order(index_order)
self.index_filename = index_filename
if self._handle is not None:
return
self._handle = HDF5FileHandler(filename)
self.refine_by = 2
Dataset.__init__(
self,
filename,
dataset_type,
units_override=units_override,
unit_system=unit_system,
)
self.storage_filename = storage_filename
def _parse_parameter_file(self):
# Let the superclass do all the work but then
# fix the domain dimensions
super(FLASHParticleDataset, self)._parse_parameter_file()
domain_dimensions = np.zeros(3, "int32")
domain_dimensions[: self.dimensionality] = 1
self.domain_dimensions = domain_dimensions
self.filename_template = self.parameter_filename
self.file_count = 1
@classmethod
def _is_valid(cls, filename, *args, **kwargs):
warn_h5py(filename)
try:
fileh = HDF5FileHandler(filename)
if (
"bounding box" not in fileh["/"].keys()
and "localnp" in fileh["/"].keys()
):
return True
except (OSError, ImportError):
pass
return False
@classmethod
def _guess_candidates(cls, base, directories, files):
candidates = [_ for _ in files if "_hdf5_part_" in _]
# Typically, Flash won't have nested outputs.
return candidates, (len(candidates) == 0)
| 38.17037
| 88
| 0.548661
|
4a031251254d73fc99e5900a15a2860deac1dcf3
| 2,907
|
py
|
Python
|
tests/unit/model/test_base.py
|
JawboneHealth/jhhalchemy
|
68854e1ac5ee959287de70fd156d187c9025703c
|
[
"Apache-2.0"
] | 2
|
2017-09-21T23:10:25.000Z
|
2018-01-20T16:21:29.000Z
|
tests/unit/model/test_base.py
|
JawboneHealth/jhhalchemy
|
68854e1ac5ee959287de70fd156d187c9025703c
|
[
"Apache-2.0"
] | 3
|
2018-06-27T16:13:54.000Z
|
2018-06-28T20:10:31.000Z
|
tests/unit/model/test_base.py
|
JawboneHealth/jhhalchemy
|
68854e1ac5ee959287de70fd156d187c9025703c
|
[
"Apache-2.0"
] | 1
|
2018-01-25T00:09:53.000Z
|
2018-01-25T00:09:53.000Z
|
"""
Unit tests for the Base model
"""
import jhhalchemy.model
import mock
import pytest
@pytest.fixture
def base_instance():
return jhhalchemy.model.Base()
def test_base_save(base_instance):
"""
Verify add and commit to DB.
:param base_instance: instance of Base model
"""
session = mock.Mock()
#
# Defaults to commit
#
base_instance.save(session)
session.add.assert_called_once_with(base_instance)
session.commit.assert_called_once_with()
#
# No commit
#
session.reset_mock()
base_instance.save(session, commit=False)
session.add.assert_called_once_with(base_instance)
assert not session.commit.called
@mock.patch('jhhalchemy.model.Base.query', autospec=True)
def test_base_read_by(mock_query):
"""
Verify soft-delete logic in read_by
:param mock_query: mocked model class query method
"""
#
# Default to no soft-deleted rows
#
jhhalchemy.model.Base.read_by(col='val')
mock_query.filter_by.assert_called_once_with(col='val', time_removed=0)
#
# Get soft-deleted rows
#
mock_query.reset_mock()
jhhalchemy.model.Base.read_by(removed=True, col='val')
mock_query.filter_by.assert_called_once_with(col='val')
@mock.patch('jhhalchemy.model.Base.query', autospec=True)
@mock.patch('jhhalchemy.model.Base.time_created', autospec=True)
@mock.patch('jhhalchemy.model.Base.time_removed', autospec=True)
def test_base_read(mock_time_removed, mock_time_created, mock_query):
"""
Verify soft-delete logic in read
:param mock_time_removed: mocked time_removed column
:param mock_time_created: mockec time_created column
:param mock_query: mocked model class query method
"""
#
# Default to no soft-deleted rows
#
jhhalchemy.model.Base.read(mock_time_created == 1)
mock_query.filter.assert_called_once_with(mock_time_removed == 0, mock_time_created == 1)
#
# Get soft-deleted rows
#
mock_query.reset_mock()
jhhalchemy.model.Base.read(mock_time_created == 1, removed=True)
mock_query.filter.assert_called_once_with(mock_time_created == 1)
@mock.patch('sqlalchemy.func.unix_timestamp', autospec=True)
def test_base_delete(mock_ut, base_instance):
"""
Verify soft delete and commit logic
:param base_instance: instance of Base model
"""
mock_session = mock.Mock()
#
# Default to soft delete and commit
#
base_instance.delete(mock_session)
mock_ut.assert_called_once_with()
assert not mock_session.delete.called
mock_session.commit.assert_called_once_with()
#
# Hard delete, no commit
#
mock_ut.reset_mock()
mock_session.reset_mock()
base_instance.delete(mock_session, commit=False, soft=False)
assert not mock_ut.called
mock_session.delete.assert_called_once_with(base_instance)
assert not mock_session.commit.called
| 26.427273
| 93
| 0.719298
|
4a0312bcecb3638e53efa03df91178bab90a5902
| 6,571
|
py
|
Python
|
homeassistant/components/hassio/handler.py
|
andersop91/core
|
0e0ef0aa17073609eae7c974cf4c73306b7c414b
|
[
"Apache-2.0"
] | 22,481
|
2020-03-02T13:09:59.000Z
|
2022-03-31T23:34:28.000Z
|
homeassistant/components/hassio/handler.py
|
andersop91/core
|
0e0ef0aa17073609eae7c974cf4c73306b7c414b
|
[
"Apache-2.0"
] | 31,101
|
2020-03-02T13:00:16.000Z
|
2022-03-31T23:57:36.000Z
|
homeassistant/components/hassio/handler.py
|
andersop91/core
|
0e0ef0aa17073609eae7c974cf4c73306b7c414b
|
[
"Apache-2.0"
] | 11,411
|
2020-03-02T14:19:20.000Z
|
2022-03-31T22:46:07.000Z
|
"""Handler for Hass.io."""
import asyncio
from http import HTTPStatus
import logging
import os
import aiohttp
from homeassistant.components.http import (
CONF_SERVER_HOST,
CONF_SERVER_PORT,
CONF_SSL_CERTIFICATE,
)
from homeassistant.const import SERVER_PORT
from .const import X_HASSIO
_LOGGER = logging.getLogger(__name__)
class HassioAPIError(RuntimeError):
"""Return if a API trow a error."""
def _api_bool(funct):
"""Return a boolean."""
async def _wrapper(*argv, **kwargs):
"""Wrap function."""
try:
data = await funct(*argv, **kwargs)
return data["result"] == "ok"
except HassioAPIError:
return False
return _wrapper
def api_data(funct):
"""Return data of an api."""
async def _wrapper(*argv, **kwargs):
"""Wrap function."""
data = await funct(*argv, **kwargs)
if data["result"] == "ok":
return data["data"]
raise HassioAPIError(data["message"])
return _wrapper
class HassIO:
"""Small API wrapper for Hass.io."""
def __init__(
self,
loop: asyncio.AbstractEventLoop,
websession: aiohttp.ClientSession,
ip: str,
) -> None:
"""Initialize Hass.io API."""
self.loop = loop
self.websession = websession
self._ip = ip
@_api_bool
def is_connected(self):
"""Return true if it connected to Hass.io supervisor.
This method return a coroutine.
"""
return self.send_command("/supervisor/ping", method="get", timeout=15)
@api_data
def get_info(self):
"""Return generic Supervisor information.
This method return a coroutine.
"""
return self.send_command("/info", method="get")
@api_data
def get_host_info(self):
"""Return data for Host.
This method return a coroutine.
"""
return self.send_command("/host/info", method="get")
@api_data
def get_os_info(self):
"""Return data for the OS.
This method return a coroutine.
"""
return self.send_command("/os/info", method="get")
@api_data
def get_core_info(self):
"""Return data for Home Asssistant Core.
This method returns a coroutine.
"""
return self.send_command("/core/info", method="get")
@api_data
def get_supervisor_info(self):
"""Return data for the Supervisor.
This method returns a coroutine.
"""
return self.send_command("/supervisor/info", method="get")
@api_data
def get_addon_info(self, addon):
"""Return data for a Add-on.
This method return a coroutine.
"""
return self.send_command(f"/addons/{addon}/info", method="get")
@api_data
def get_addon_stats(self, addon):
"""Return stats for an Add-on.
This method returns a coroutine.
"""
return self.send_command(f"/addons/{addon}/stats", method="get")
@api_data
def get_store(self):
"""Return data from the store.
This method return a coroutine.
"""
return self.send_command("/store", method="get")
@api_data
def get_ingress_panels(self):
"""Return data for Add-on ingress panels.
This method return a coroutine.
"""
return self.send_command("/ingress/panels", method="get")
@_api_bool
def restart_homeassistant(self):
"""Restart Home-Assistant container.
This method return a coroutine.
"""
return self.send_command("/homeassistant/restart")
@_api_bool
def stop_homeassistant(self):
"""Stop Home-Assistant container.
This method return a coroutine.
"""
return self.send_command("/homeassistant/stop")
@api_data
def retrieve_discovery_messages(self):
"""Return all discovery data from Hass.io API.
This method return a coroutine.
"""
return self.send_command("/discovery", method="get", timeout=60)
@api_data
def get_discovery_message(self, uuid):
"""Return a single discovery data message.
This method return a coroutine.
"""
return self.send_command(f"/discovery/{uuid}", method="get")
@_api_bool
async def update_hass_api(self, http_config, refresh_token):
"""Update Home Assistant API data on Hass.io."""
port = http_config.get(CONF_SERVER_PORT) or SERVER_PORT
options = {
"ssl": CONF_SSL_CERTIFICATE in http_config,
"port": port,
"watchdog": True,
"refresh_token": refresh_token.token,
}
if http_config.get(CONF_SERVER_HOST) is not None:
options["watchdog"] = False
_LOGGER.warning(
"Found incompatible HTTP option 'server_host'. Watchdog feature disabled"
)
return await self.send_command("/homeassistant/options", payload=options)
@_api_bool
def update_hass_timezone(self, timezone):
"""Update Home-Assistant timezone data on Hass.io.
This method return a coroutine.
"""
return self.send_command("/supervisor/options", payload={"timezone": timezone})
@_api_bool
def update_diagnostics(self, diagnostics: bool):
"""Update Supervisor diagnostics setting.
This method return a coroutine.
"""
return self.send_command(
"/supervisor/options", payload={"diagnostics": diagnostics}
)
async def send_command(self, command, method="post", payload=None, timeout=10):
"""Send API command to Hass.io.
This method is a coroutine.
"""
try:
request = await self.websession.request(
method,
f"http://{self._ip}{command}",
json=payload,
headers={X_HASSIO: os.environ.get("HASSIO_TOKEN", "")},
timeout=aiohttp.ClientTimeout(total=timeout),
)
if request.status not in (HTTPStatus.OK, HTTPStatus.BAD_REQUEST):
_LOGGER.error("%s return code %d", command, request.status)
raise HassioAPIError()
answer = await request.json()
return answer
except asyncio.TimeoutError:
_LOGGER.error("Timeout on %s request", command)
except aiohttp.ClientError as err:
_LOGGER.error("Client error on %s request %s", command, err)
raise HassioAPIError()
| 27.041152
| 89
| 0.602952
|
4a03132af8619849f94f5e95bea70afca93c1c80
| 693
|
py
|
Python
|
datasets/publaynet_gscnn.py
|
LivingSkyTechnologies/Document_Layout_Segmentation
|
0db00a18fb39afa1efa8ae183bbd57309a6ebfcf
|
[
"MIT"
] | 4
|
2021-01-28T23:06:43.000Z
|
2022-01-15T19:17:07.000Z
|
datasets/publaynet_gscnn.py
|
LivingSkyTechnologies/Document_Layout_Segmentation
|
0db00a18fb39afa1efa8ae183bbd57309a6ebfcf
|
[
"MIT"
] | 2
|
2021-01-25T21:54:05.000Z
|
2021-08-23T21:19:21.000Z
|
datasets/publaynet_gscnn.py
|
LivingSkyTechnologies/Document_Layout_Segmentation
|
0db00a18fb39afa1efa8ae183bbd57309a6ebfcf
|
[
"MIT"
] | 2
|
2021-01-28T13:39:33.000Z
|
2022-01-15T19:17:13.000Z
|
import os
from models.gated_scnn.gated_shape_cnn.datasets.publaynet.dataset import PubLayNet
class_mapping = {1: 'text', 2: 'title', 3: 'list', 4: 'table', 5: 'figure', 0: 'background'}
def build_gscnn_dataset(dataset_dir, img_size, batch_size, seed):
publaynet_dataset_loader = PubLayNet(
batch_size,
img_size,
img_size,
debug=False,
data_dir=dataset_dir,
n_classes=len(class_mapping),
seed=seed)
train = publaynet_dataset_loader.build_training_dataset()
valid = publaynet_dataset_loader.build_validation_dataset()
test = publaynet_dataset_loader.build_test_dataset()
return train, valid, test, class_mapping
| 28.875
| 92
| 0.712843
|
4a031332e610bf3529dd32768b8a11ccdf47135e
| 9,962
|
py
|
Python
|
tests/windows_packages/ntttcp_test.py
|
Nowasky/PerfKitBenchmarker
|
cfa88e269eb373780910896ed4bdc8db09469753
|
[
"Apache-2.0"
] | 3
|
2018-04-28T13:06:14.000Z
|
2020-06-09T02:39:44.000Z
|
tests/windows_packages/ntttcp_test.py
|
Nowasky/PerfKitBenchmarker
|
cfa88e269eb373780910896ed4bdc8db09469753
|
[
"Apache-2.0"
] | 1
|
2021-09-09T07:43:25.000Z
|
2021-09-09T10:47:56.000Z
|
tests/windows_packages/ntttcp_test.py
|
Nowasky/PerfKitBenchmarker
|
cfa88e269eb373780910896ed4bdc8db09469753
|
[
"Apache-2.0"
] | 6
|
2019-06-11T18:59:57.000Z
|
2021-03-02T19:14:42.000Z
|
# Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for ntttcp_benchmark."""
import os
import unittest
from absl import flags
from absl.testing import parameterized
from perfkitbenchmarker import sample
from perfkitbenchmarker import test_util
from perfkitbenchmarker.windows_packages import ntttcp
FLAGS = flags.FLAGS
FLAGS.mark_as_parsed()
NtttcpConf = ntttcp.NtttcpConf
class NtttcpBenchmarkTestCase(parameterized.TestCase, unittest.TestCase,
test_util.SamplesTestMixin):
def getDataContents(self, file_name):
path = os.path.join(os.path.dirname(__file__), '..', 'data', file_name)
with open(path) as fp:
contents = fp.read()
return contents
def setUp(self):
super(NtttcpBenchmarkTestCase, self).setUp()
self.xml_tcp_send_results = self.getDataContents('ntttcp_tcp_sender.xml')
self.xml_tcp_rec_results = self.getDataContents('ntttcp_tcp_receiver.xml')
self.xml_udp_send_results = self.getDataContents('ntttcp_udp_sender.xml')
self.xml_udp_rec_results = self.getDataContents('ntttcp_udp_receiver.xml')
def testNtttcpTcpParsing(self):
samples = ntttcp.ParseNtttcpResults(self.xml_tcp_send_results,
self.xml_tcp_rec_results, {})
expected_metadata = {
'async': 'False',
'bind_sender': 'False',
'cooldown_time': '30000',
'dash_n_timeout': '10800000',
'max_active_threads': '2',
'no_sync': 'False',
'port': '5003',
'receiver avg_bytes_per_compl': '149.998',
'receiver avg_frame_size': '1266.217',
'receiver avg_packets_per_dpc': '0.598',
'receiver avg_packets_per_interrupt': '0.379',
'receiver bufferCount': '9223372036854775807',
'receiver bufferLen': '150',
'receiver cpu': '36.872',
'receiver cycles': '89.055',
'receiver dpcs': '48156.278',
'receiver errors': '1',
'receiver interrupts': '75870.499',
'receiver io': '2',
'receiver packets_received': '1726938',
'receiver packets_retransmitted': '4',
'receiver packets_sent': '1092640',
'receiver realtime': '60.015000',
'receiver rb': -1,
'receiver sb': -1,
'receiver threads_avg_bytes_per_compl': '149.998',
'receiver throughput': '291.484',
'receiver total_buffers': '14577858.000',
'receiver total_bytes': '2085.379314',
'recv_socket_buff': '-1',
'run_time': '60000',
'sender avg_bytes_per_compl': '150.000',
'sender avg_frame_size': '751.222',
'sender avg_packets_per_dpc': '1.064',
'sender avg_packets_per_interrupt': '0.516',
'sender bufferCount': '9223372036854775807',
'sender bufferLen': '150',
'sender cpu': '36.234',
'sender cycles': '87.514',
'sender dpcs': '17108.590',
'sender errors': '0',
'sender interrupts': '35302.624',
'sender io': '2',
'sender_name': None,
'sender packets_received': '1092639',
'sender packets_retransmitted': '10',
'sender packets_sent': '2910833',
'sender realtime': '60.015000',
'sender rb': -1,
'sender sb': -1,
'sender threads_avg_bytes_per_compl': '150.000',
'sender total_buffers': '14577884.000',
'sender total_bytes': '2085.383034',
'send_socket_buff': '8192',
'sync_port': 'False',
'udp': 'False',
'use_ipv6': 'False',
'verbose': 'False',
'verify_data': 'False',
'wait_all': 'False',
'wait_timeout_milliseconds': '600000',
'warmup_time': '30000',
'wsa': 'False',
}
expected_thread_0_metadata = expected_metadata.copy()
expected_thread_0_metadata['thread_index'] = '0'
expected_thread_1_metadata = expected_metadata.copy()
expected_thread_1_metadata['thread_index'] = '1'
expected_samples = [
sample.Sample('Total Throughput', 291.485, 'Mbps', expected_metadata),
sample.Sample('Thread Throughput', 147.105, 'Mbps',
expected_thread_0_metadata),
sample.Sample('Thread Throughput', 144.379, 'Mbps',
expected_thread_1_metadata)
]
self.assertSampleListsEqualUpToTimestamp(expected_samples, samples)
def testNtttcpUdpParsing(self):
samples = ntttcp.ParseNtttcpResults(self.xml_udp_send_results,
self.xml_udp_rec_results, {})
expected_metadata = {
'async': 'False',
'bind_sender': 'False',
'cooldown_time': '30000',
'dash_n_timeout': '10800000',
'max_active_threads': '2',
'no_sync': 'False',
'port': '5003',
'receiver avg_bytes_per_compl': '128.000',
'receiver avg_frame_size': '99.200',
'receiver avg_packets_per_dpc': '6.147',
'receiver avg_packets_per_interrupt': '3.838',
'receiver bufferCount': '9223372036854775807',
'receiver bufferLen': '128',
'receiver cpu': '51.120',
'receiver cycles': '189.967',
'receiver dpcs': '38835.774',
'receiver errors': '0',
'receiver interrupts': '62200.183',
'receiver io': '2',
'receiver packets_received': '14326674',
'receiver packets_retransmitted': '0',
'receiver packets_sent': '0',
'receiver realtime': '60.015000',
'receiver rb': -1,
'receiver sb': -1,
'receiver threads_avg_bytes_per_compl': '128.000',
'receiver throughput': '189.447',
'receiver total_buffers': '11103157.000',
'receiver total_bytes': '1355.365845',
'recv_socket_buff': '-1',
'run_time': '60000',
'sender avg_bytes_per_compl': '128.000',
'sender avg_frame_size': '128.000',
'sender avg_packets_per_dpc': '0.000',
'sender avg_packets_per_interrupt': '0.000',
'sender bufferCount': '9223372036854775807',
'sender bufferLen': '128',
'sender cpu': '68.290',
'sender cycles': '196.108',
'sender dpcs': '250.737',
'sender errors': '0',
'sender interrupts': '1669.516',
'sender io': '2',
'sender_name': None,
'sender packets_received': '0',
'sender packets_retransmitted': '0',
'sender packets_sent': '14368008',
'sender realtime': '60.015000',
'sender rb': -1,
'sender sb': -1,
'sender threads_avg_bytes_per_compl': '128.000',
'sender total_buffers': '14368009.000',
'sender total_bytes': '1753.907349',
'send_socket_buff': '8192',
'sync_port': 'False',
'udp': 'True',
'use_ipv6': 'False',
'verbose': 'False',
'verify_data': 'False',
'wait_all': 'False',
'wait_timeout_milliseconds': '600000',
'warmup_time': '30000',
'wsa': 'False',
}
expected_thread_0_metadata = expected_metadata.copy()
expected_thread_0_metadata['thread_index'] = '0'
expected_thread_1_metadata = expected_metadata.copy()
expected_thread_1_metadata['thread_index'] = '1'
expected_samples = [
sample.Sample('Total Throughput', 245.153, 'Mbps', expected_metadata),
sample.Sample('Thread Throughput', 121.160, 'Mbps',
expected_thread_0_metadata),
sample.Sample('Thread Throughput', 123.993, 'Mbps',
expected_thread_1_metadata)
]
self.assertSampleListsEqualUpToTimestamp(expected_samples, samples)
def testSingleConfigParse(self):
ntttcp.FLAGS.ntttcp_config_list = ['True:7:800:INTERNAL:1']
expected_list = [
NtttcpConf(
udp=True, threads=7, time_s=800, ip_type='INTERNAL', packet_size=1)
]
conf_list = ntttcp.ParseConfigList()
self.assertListEqual(conf_list, expected_list)
def testEmptyConfig(self):
ntttcp.FLAGS.ntttcp_config_list = []
expected_list = [
NtttcpConf(
udp=FLAGS.ntttcp_udp,
threads=FLAGS.ntttcp_threads,
time_s=FLAGS.ntttcp_time,
ip_type=FLAGS.ip_addresses,
packet_size=FLAGS.ntttcp_packet_size)
]
conf_list = ntttcp.ParseConfigList()
self.assertListEqual(conf_list, expected_list)
def testMultiConfigParse(self):
ntttcp.FLAGS.ntttcp_config_list = [
'True:7:800:INTERNAL:1', 'False:1:2:EXTERNAL:2',
'True:44:1001:INTERNAL:3'
]
expected_list = [
NtttcpConf(
udp=True, threads=7, time_s=800, ip_type='INTERNAL', packet_size=1),
NtttcpConf(
udp=False, threads=1, time_s=2, ip_type='EXTERNAL', packet_size=2),
NtttcpConf(
udp=True,
threads=44,
time_s=1001,
ip_type='INTERNAL',
packet_size=3),
]
conf_list = ntttcp.ParseConfigList()
self.assertListEqual(conf_list, expected_list)
@parameterized.named_parameters(
('MissingVal', ['True:7:800:INTERNAL:1', 'False::2:EXTERNAL:2']),
('Misspell', ['rue:7:800:INTERNAL:3', 'True:44:1001:EXTERNAL:4']),
('WrongOrder', ['True:7:INTERNAL:800:1', '44:True:1001:EXTERNAL:6']))
def testMalformedConfig(self, conf):
with self.assertRaises(flags.IllegalFlagValueError):
ntttcp.FLAGS.ntttcp_config_list = conf
if __name__ == '__main__':
unittest.main()
| 36.490842
| 80
| 0.62367
|
4a0313d5f86a9d6edd0db484fe753aa6f9c30a99
| 2,660
|
py
|
Python
|
tests/oauth2/rfc6749/grant_types/test_implicit.py
|
smarie/oauthlib
|
6befed7747b27e0673b1fd121dc6897be70fa23a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/oauth2/rfc6749/grant_types/test_implicit.py
|
smarie/oauthlib
|
6befed7747b27e0673b1fd121dc6897be70fa23a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/oauth2/rfc6749/grant_types/test_implicit.py
|
smarie/oauthlib
|
6befed7747b27e0673b1fd121dc6897be70fa23a
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from unittest import mock
from oauthlib.common import Request
from oauthlib.oauth2.rfc6749.grant_types import ImplicitGrant
from oauthlib.oauth2.rfc6749.tokens import BearerToken
from ....unittest import TestCase
class ImplicitGrantTest(TestCase):
def setUp(self):
mock_client = mock.MagicMock()
mock_client.user.return_value = 'mocked user'
self.request = Request('http://a.b/path')
self.request.scopes = ('hello', 'world')
self.request.client = mock_client
self.request.client_id = 'abcdef'
self.request.response_type = 'token'
self.request.state = 'xyz'
self.request.redirect_uri = 'https://b.c/p'
self.mock_validator = mock.MagicMock()
self.auth = ImplicitGrant(request_validator=self.mock_validator)
@mock.patch('oauthlib.common.generate_token')
def test_create_token_response(self, generate_token):
generate_token.return_value = '1234'
bearer = BearerToken(self.mock_validator, expires_in=1800)
h, b, s = self.auth.create_token_response(self.request, bearer)
correct_uri = 'https://b.c/p#access_token=1234&token_type=Bearer&expires_in=1800&state=xyz&scope=hello+world'
self.assertEqual(s, 302)
self.assertURLEqual(h['Location'], correct_uri, parse_fragment=True)
self.assertEqual(self.mock_validator.save_token.call_count, 1)
correct_uri = 'https://b.c/p?access_token=1234&token_type=Bearer&expires_in=1800&state=xyz&scope=hello+world'
self.request.response_mode = 'query'
h, b, s = self.auth.create_token_response(self.request, bearer)
self.assertURLEqual(h['Location'], correct_uri)
def test_custom_validators(self):
self.authval1, self.authval2 = mock.Mock(), mock.Mock()
self.tknval1, self.tknval2 = mock.Mock(), mock.Mock()
for val in (self.authval1, self.authval2):
val.return_value = {}
for val in (self.tknval1, self.tknval2):
val.return_value = None
self.auth.custom_validators.pre_token.append(self.tknval1)
self.auth.custom_validators.post_token.append(self.tknval2)
self.auth.custom_validators.pre_auth.append(self.authval1)
self.auth.custom_validators.post_auth.append(self.authval2)
bearer = BearerToken(self.mock_validator)
self.auth.create_token_response(self.request, bearer)
self.assertTrue(self.tknval1.called)
self.assertTrue(self.tknval2.called)
self.assertTrue(self.authval1.called)
self.assertTrue(self.authval2.called)
def test_error_response(self):
pass
| 42.222222
| 117
| 0.695113
|
4a03147713d4c9c7c89fafcf14d4a3cc25c50564
| 126
|
py
|
Python
|
checkpoints/pretrained/get_model.py
|
worldlife123/maskrcnn-benchmark
|
6c8bc908c2b7299ca6ffb292ae2680ac354d0eec
|
[
"MIT"
] | null | null | null |
checkpoints/pretrained/get_model.py
|
worldlife123/maskrcnn-benchmark
|
6c8bc908c2b7299ca6ffb292ae2680ac354d0eec
|
[
"MIT"
] | null | null | null |
checkpoints/pretrained/get_model.py
|
worldlife123/maskrcnn-benchmark
|
6c8bc908c2b7299ca6ffb292ae2680ac354d0eec
|
[
"MIT"
] | null | null | null |
import torch
a = torch.load("model_final.pth")
torch.save(a['model'], "e2e_lr_rpn_mask_rcnn_R_50_FPN_1x_kitti_trained.pth")
| 21
| 76
| 0.785714
|
4a0314a468b076d4e5b4858bd3547ceed43371e5
| 2,027
|
py
|
Python
|
pony/orm/tests/test_lazy.py
|
luckydonald/pony
|
e733f14ef4e21514b49248b7b72aae0728029852
|
[
"Apache-2.0"
] | 1
|
2019-08-02T12:06:24.000Z
|
2019-08-02T12:06:24.000Z
|
pony/orm/tests/test_lazy.py
|
luckydonald/pony
|
e733f14ef4e21514b49248b7b72aae0728029852
|
[
"Apache-2.0"
] | null | null | null |
pony/orm/tests/test_lazy.py
|
luckydonald/pony
|
e733f14ef4e21514b49248b7b72aae0728029852
|
[
"Apache-2.0"
] | 1
|
2020-07-20T17:25:48.000Z
|
2020-07-20T17:25:48.000Z
|
from __future__ import absolute_import, print_function, division
import unittest
from pony.orm.core import *
class TestLazy(unittest.TestCase):
def setUp(self):
self.db = Database('sqlite', ':memory:')
class X(self.db.Entity):
a = Required(int)
b = Required(unicode, lazy=True)
self.X = X
self.db.generate_mapping(create_tables=True)
with db_session:
x1 = X(a=1, b='first')
x2 = X(a=2, b='second')
x3 = X(a=3, b='third')
@db_session
def test_lazy_1(self):
X = self.X
x1 = X[1]
self.assertTrue(X.a in x1._vals_)
self.assertTrue(X.b not in x1._vals_)
b = x1.b
self.assertEqual(b, 'first')
@db_session
def test_lazy_2(self):
X = self.X
x1 = X[1]
x2 = X[2]
x3 = X[3]
self.assertTrue(X.b not in x1._vals_)
self.assertTrue(X.b not in x2._vals_)
self.assertTrue(X.b not in x3._vals_)
b = x1.b
self.assertTrue(X.b in x1._vals_)
self.assertTrue(X.b not in x2._vals_)
self.assertTrue(X.b not in x3._vals_)
@db_session
def test_lazy_3(self): # coverage of https://github.com/ponyorm/pony/issues/49
X = self.X
x1 = X.get(b='first')
self.assertTrue(X._bits_[X.b] & x1._rbits_)
self.assertTrue(X.b, x1._vals_)
@db_session
def test_lazy_4(self): # coverage of https://github.com/ponyorm/pony/issues/49
X = self.X
result = select(x for x in X if x.b == 'first')[:]
for x in result:
self.assertTrue(X._bits_[X.b] & x._rbits_)
self.assertTrue(X.b in x._vals_)
@db_session
def test_lazy_5(self): # coverage of https://github.com/ponyorm/pony/issues/49
X = self.X
result = select(x for x in X if x.b == 'first' if count() > 0)[:]
for x in result:
self.assertFalse(X._bits_[X.b] & x._rbits_)
self.assertTrue(X.b not in x._vals_)
| 31.184615
| 83
| 0.567341
|
4a03152eea36038c3c16acded48ac3d4c310469d
| 2,343
|
py
|
Python
|
plugins/DDG/test.py
|
dregad/Limnoria
|
986913628929c9018e01b82b53638aced50ab0de
|
[
"BSD-3-Clause"
] | 2
|
2021-01-02T19:12:23.000Z
|
2021-01-21T22:28:51.000Z
|
plugins/DDG/test.py
|
dregad/Limnoria
|
986913628929c9018e01b82b53638aced50ab0de
|
[
"BSD-3-Clause"
] | null | null | null |
plugins/DDG/test.py
|
dregad/Limnoria
|
986913628929c9018e01b82b53638aced50ab0de
|
[
"BSD-3-Clause"
] | 1
|
2021-01-02T19:14:23.000Z
|
2021-01-02T19:14:23.000Z
|
###
# Copyright (c) 2014-2017, James Lu <james@overdrivenetworks.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import supybot.conf as conf
from supybot.test import *
class DDGTestCase(PluginTestCase):
plugins = ('DDG',)
if network:
def testSearch(self):
self.assertRegexp(
'ddg search wikipedia', 'Wikipedia.*? - .*?https?\:\/\/')
self.assertRegexp(
'ddg search en.wikipedia.org',
'Wikipedia, the free encyclopedia\x02 - '
'.* <https://en.wikipedia.org/>')
with conf.supybot.plugins.DDG.region.context('fr-fr'):
self.assertRegexp(
'ddg search wikipedia',
'Wikipédia, l\'encyclopédie libre - .*?https?\:\/\/')
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| 45.057692
| 79
| 0.702091
|
4a0315530428e9e876bcb5e89f2b5df1fd566577
| 419
|
py
|
Python
|
video/rest/rooms/list-rooms-multiple-filters/list-rooms-multiple-filters.6.x.py
|
azaddeveloper/api-snippets
|
f88b153cd7186fa70b33733b205886502db0d1f2
|
[
"MIT"
] | 3
|
2020-05-05T10:01:02.000Z
|
2021-02-06T14:23:13.000Z
|
video/rest/rooms/list-rooms-multiple-filters/list-rooms-multiple-filters.6.x.py
|
azaddeveloper/api-snippets
|
f88b153cd7186fa70b33733b205886502db0d1f2
|
[
"MIT"
] | null | null | null |
video/rest/rooms/list-rooms-multiple-filters/list-rooms-multiple-filters.6.x.py
|
azaddeveloper/api-snippets
|
f88b153cd7186fa70b33733b205886502db0d1f2
|
[
"MIT"
] | 1
|
2019-10-02T14:36:36.000Z
|
2019-10-02T14:36:36.000Z
|
# Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
api_key_sid = "SKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
api_key_secret = "your_api_key_secret"
client = Client(api_key_sid, api_key_secret)
rooms = client.video.rooms.list(status='completed', unique_name='DailyStandup')
for room in rooms:
print(room.sid)
| 32.230769
| 79
| 0.801909
|
4a031626ae80f13b4d8dd11b6b4a67d27d67318f
| 1,360
|
py
|
Python
|
heroes.py
|
elinuzum/dungeon-game
|
d8c80b7776351d73829aca6563cbbffa18a8c8dc
|
[
"Apache-2.0"
] | null | null | null |
heroes.py
|
elinuzum/dungeon-game
|
d8c80b7776351d73829aca6563cbbffa18a8c8dc
|
[
"Apache-2.0"
] | null | null | null |
heroes.py
|
elinuzum/dungeon-game
|
d8c80b7776351d73829aca6563cbbffa18a8c8dc
|
[
"Apache-2.0"
] | null | null | null |
from base import Person
class Warrior(Person):
hp = 100
strength = 2
accuracy = 5
defense = 4
attacks_by = "swings sword"
class Marksman(Person):
hp = 100
strength = 1
accuracy = 8
defense = 3
attacks_by = "shoots an arrow"
class Madman(Person):
hp = 100
strength = 3
accuracy = 3
defense = 2
attacks_by = "flails wildly"
class Mage(Person):
hp = 200
strength = 1
accuracy = 6
defense = 2
attacks_by = "casts a spell"
class Scout(Person):
hp = 150
strength = 4
accuracy = 6
defense = 3
attacks_by = "stabs with knives"
class Hunter(Person):
hp = 100
strength = 5
accuracy = 6
defense = 2
attacks_by = "throws tomahawk"
class Barbarian(Person):
hp = 175
strength = 5
accuracy =5
defense = 3
attacks_by = "swings ax"
class FloatingOrb(Person):
hp = 75
strength = 1
accuracy = 5
defense = 3
attacks_by = "shoots laser"
class Overlord(Person):
hp = 250
strength = 6
accuracy = 3
defense = 4
attacks_by = "Swings mace"
class OverGrownHamster(Person):
hp = 50
strength = 2
accuracy = 2
defense = 2
attacks_by = "Bites really hard"
class OverLord(Person):
hp = 50
strength = 2
accuracy = 2
defense = 2
attacks_by = "Lords over you"
| 16.385542
| 36
| 0.594853
|
4a0317a10ed4fc60f0e71547bf610b09bda9482c
| 256
|
py
|
Python
|
eda5/servisnaknjiga/templatetags.py
|
vasjapavlovic/eda5
|
bc4b387b24239ea1dfb927657f05ddabbf707479
|
[
"BSD-3-Clause"
] | null | null | null |
eda5/servisnaknjiga/templatetags.py
|
vasjapavlovic/eda5
|
bc4b387b24239ea1dfb927657f05ddabbf707479
|
[
"BSD-3-Clause"
] | null | null | null |
eda5/servisnaknjiga/templatetags.py
|
vasjapavlovic/eda5
|
bc4b387b24239ea1dfb927657f05ddabbf707479
|
[
"BSD-3-Clause"
] | null | null | null |
from django import template
register = template.Library()
@register.filter()
def to_int(value):
return int(value)
@register.filter()
def to_msec(value):
vrednost = value*year*365 + value*month*12+value*hours*24*60*3600*1000
return vrednost
| 18.285714
| 74
| 0.726563
|
4a0318c82f1f5fa327728ec19a7d34bca3843a6f
| 1,731
|
py
|
Python
|
examples/edit_config/async_edit_config_iosxr.py
|
kn-winter/scrapli_netconf
|
c23893173671351255ce634408c428f7a72550c4
|
[
"MIT"
] | null | null | null |
examples/edit_config/async_edit_config_iosxr.py
|
kn-winter/scrapli_netconf
|
c23893173671351255ce634408c428f7a72550c4
|
[
"MIT"
] | null | null | null |
examples/edit_config/async_edit_config_iosxr.py
|
kn-winter/scrapli_netconf
|
c23893173671351255ce634408c428f7a72550c4
|
[
"MIT"
] | null | null | null |
"""async_edit_config_iosxr"""
import asyncio
from scrapli_netconf.driver import AsyncNetconfScrape
IOSXR_DEVICE = {
"host": "172.18.0.13",
"auth_username": "vrnetlab",
"auth_password": "VR-netlab9",
"auth_strict_key": False,
"transport": "asyncssh",
}
EDIT_INTERFACE_G_0_0_0_0 = """
<config>
<interface-configurations xmlns="http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-cfg">
<interface-configuration>
<active>act</active>
<interface-name>GigabitEthernet0/0/0/0</interface-name>
<description>skfasjdlkfjdsf</description>
<ipv4-network xmlns="http://cisco.com/ns/yang/Cisco-IOS-XR-ipv4-io-cfg">
<addresses>
<primary>
<address>10.10.0.1</address>
<netmask>255.255.255.0</netmask>
</primary>
</addresses>
</ipv4-network>
</interface-configuration>
</interface-configurations>
</config>
"""
async def main():
"""Edit config example"""
# create scrapli_netconf connection just like with scrapli, open the connection
conn = AsyncNetconfScrape(**IOSXR_DEVICE)
await conn.open()
# lock the candidate config before starting because why not
result = await conn.lock(target="candidate")
print(result.result)
config = EDIT_INTERFACE_G_0_0_0_0
result = await conn.edit_config(config=config, target="candidate")
print(result.result)
# commit config changes
result = await conn.commit()
print(result.result)
# unlock the candidate now that we're done
result = await conn.unlock(target="candidate")
print(result.result)
# close the session
await conn.close()
if __name__ == "__main__":
asyncio.get_event_loop().run_until_complete(main())
| 27.47619
| 84
| 0.674754
|
4a031996e71d862e0d1af52e7acf361d5d2e5834
| 1,879
|
py
|
Python
|
results/ecc/plot_bitrate_ECC.py
|
drcut/streamline
|
71d221df151dd6bb757d3c609ac904d3f1c56408
|
[
"MIT"
] | 7
|
2021-05-03T04:41:31.000Z
|
2022-01-09T22:33:07.000Z
|
results/ecc/plot_bitrate_ECC.py
|
drcut/streamline
|
71d221df151dd6bb757d3c609ac904d3f1c56408
|
[
"MIT"
] | 1
|
2021-11-24T15:45:34.000Z
|
2021-11-24T20:54:43.000Z
|
results/ecc/plot_bitrate_ECC.py
|
drcut/streamline
|
71d221df151dd6bb757d3c609ac904d3f1c56408
|
[
"MIT"
] | 4
|
2021-08-30T11:29:10.000Z
|
2021-10-21T18:34:04.000Z
|
import seaborn as sns; sns.set()
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import pandas as pd
## File name with Data Columns
datafile_str="bitrate_ECC_results.txt"
datafile_col_x="Number of Bits"
datafile_col_y1="Bits-Per-Second (bps)"
datafile_col_y3="Bit-Rate (KB/s)"
datafile_col_y2="Bit-Error-Rate (%)"
datafile_col_y4="BER 1->0 (%)"
datafile_col_y5="BER 0->1 (%)"
datafile_col_y6="BER 1-bit (%)"
datafile_col_y7="BER multi-bit (%)"
fig_str="bitrate_ECC.eps"
#Read Data File using Pandas
data=pd.read_csv(datafile_str,sep="\s+")
data.columns=[datafile_col_x,datafile_col_y1,datafile_col_y2,datafile_col_y4,datafile_col_y5,datafile_col_y6,datafile_col_y7]
data[datafile_col_y3] = data[datafile_col_y1].div(8*1024)
#Make percentages to floats:
data[datafile_col_y2] = data[datafile_col_y2].str.rstrip('%').astype('float')
print data
#Plot Data using Seaborn
#--Set line/marker size
sns.set_context("notebook",rc={"lines.linewidth": 2,"lines.markersize": 10})
#--Plot
sns_plot = sns.lineplot(x=datafile_col_x,y=datafile_col_y2,
marker='*', color="blue", label="bit-error-rate (%)",
data=data)
ax1 = sns_plot.axes
ax2 = sns_plot.axes.twinx()
sns_plot = sns.lineplot(x=datafile_col_x,y=datafile_col_y3,
ax=ax2,
marker='*',color="red",label="bit-rate (KB/s)",
data=data)
#--Add semilog plot
sns_plot.set(xscale="log")
sns_plot.set_xlim(5*10**4,5*10**9)
#--Set range
#sns_plot.axes.set_xlim(10)
ax1.set_ylim(0,6)
ax1.legend(loc='upper left')
#--Other axis
ax2.set_ylim([0,2000])
ax2.grid(False)
ax2.legend(loc='upper right')
#--Set ticks location
#sns_plot.axes.xaxis.set_major_locator(ticker.MultipleLocator(10))
#--Show Plot
plt.show()
#--Save Figure
fig = sns_plot.get_figure()
fig.savefig(fig_str,bbox_inches='tight')
| 28.469697
| 125
| 0.704098
|
4a0319bfafd0ff873169b4177d20ab7ca94994d0
| 13,560
|
py
|
Python
|
climart/utils/utils.py
|
Venka97/climART
|
b2246231f3ba8372d33e564700b872c410e33036
|
[
"CC-BY-4.0"
] | 2
|
2021-09-28T00:44:00.000Z
|
2021-09-28T02:43:20.000Z
|
climart/utils/utils.py
|
Venka97/climART
|
b2246231f3ba8372d33e564700b872c410e33036
|
[
"CC-BY-4.0"
] | null | null | null |
climart/utils/utils.py
|
Venka97/climART
|
b2246231f3ba8372d33e564700b872c410e33036
|
[
"CC-BY-4.0"
] | null | null | null |
"""
Author: Salva Rühling Cachay
"""
import logging
import math
import os
from functools import wraps
from typing import Union, Sequence, List, Dict, Optional, Callable
import numpy as np
import xarray as xr
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
from climart.data_wrangling import constants, data_variables
def get_activation_function(name: str, functional: bool = False, num: int = 1):
name = name.lower().strip()
def get_functional(s: str) -> Optional[Callable]:
return {"softmax": F.softmax, "relu": F.relu, "tanh": torch.tanh, "sigmoid": torch.sigmoid,
"identity": nn.Identity(),
None: None, 'swish': F.silu, 'silu': F.silu, 'elu': F.elu, 'gelu': F.gelu, 'prelu': nn.PReLU(),
}[s]
def get_nn(s: str) -> Optional[Callable]:
return {"softmax": nn.Softmax(dim=1), "relu": nn.ReLU(), "tanh": nn.Tanh(), "sigmoid": nn.Sigmoid(),
"identity": nn.Identity(), 'silu': nn.SiLU(), 'elu': nn.ELU(), 'prelu': nn.PReLU(),
'swish': nn.SiLU(), 'gelu': nn.GELU(),
}[s]
if num == 1:
return get_functional(name) if functional else get_nn(name)
else:
return [get_nn(name) for _ in range(num)]
def get_normalization_layer(name, dims, num_groups=None, device='cpu'):
if not isinstance(name, str) or name.lower() == 'none':
return None
elif 'batch' in name:
return nn.BatchNorm1d(num_features=dims).to(device)
elif 'layer' in name:
return nn.LayerNorm(dims).to(device)
elif 'inst' in name:
return nn.InstanceNorm1d(num_features=dims).to(device)
elif 'group' in name:
if num_groups is None:
num_groups = int(dims / 10)
return nn.GroupNorm(num_groups=num_groups, num_channels=dims)
else:
raise ValueError("Unknown normalization name", name)
def identity(X):
return X
def rank_zero_only(fn):
@wraps(fn)
def wrapped_fn(*args, **kwargs):
if rank_zero_only.rank == 0:
return fn(*args, **kwargs)
return wrapped_fn
# TODO: this should be part of the cluster environment
def _get_rank() -> int:
rank_keys = ('RANK', 'SLURM_PROCID', 'LOCAL_RANK')
for key in rank_keys:
rank = os.environ.get(key)
if rank is not None:
return int(rank)
return 0
# add the attribute to the function but don't overwrite in case Trainer has already set it
rank_zero_only.rank = getattr(rank_zero_only, 'rank', _get_rank())
def get_logger(name=__name__, level=logging.INFO) -> logging.Logger:
"""Initializes multi-GPU-friendly python logger."""
logger = logging.getLogger(name)
logger.setLevel(level)
# this ensures all logging levels get marked with the rank zero decorator
# otherwise logs would get multiplied for each GPU process in multi-GPU setup
for level in ("debug", "info", "warning", "error", "exception", "fatal", "critical"):
setattr(logger, level, rank_zero_only(getattr(logger, level)))
return logger
def adj_to_edge_indices(adj: Union[torch.Tensor, np.ndarray]) -> Union[torch.Tensor, np.ndarray]:
"""
Args:
adj: a (N, N) adjacency matrix, where N is the number of nodes
Returns:
A (2, E) array, edge_idxs, where E is the number of edges,
and edge_idxs[0], edge_idxs[1] are the source & destination nodes, respectively.
"""
edge_tuples = torch.nonzero(adj, as_tuple=True) if torch.is_tensor(adj) else np.nonzero(adj)
edge_src = edge_tuples[0].unsqueeze(0) if torch.is_tensor(adj) else np.expand_dims(edge_tuples[0], axis=0)
edge_dest = edge_tuples[1].unsqueeze(0) if torch.is_tensor(adj) else np.expand_dims(edge_tuples[1], axis=0)
if torch.is_tensor(adj):
edge_idxs = torch.cat((edge_src, edge_dest), dim=0)
else:
edge_idxs = np.concatenate((edge_src, edge_dest), axis=0)
return edge_idxs
def normalize_adjacency_matrix_torch(adj: Tensor, improved: bool = True, add_self_loops: bool = False):
if add_self_loops:
fill_value = 2. if improved else 1.
adj = adj.fill_diagonal_(fill_value)
deg: Tensor = torch.sum(adj, dim=1)
deg_inv_sqrt: Tensor = deg.pow_(-0.5)
deg_inv_sqrt.masked_fill_(deg_inv_sqrt == float('inf'), 0.)
adj_t = torch.mul(adj, deg_inv_sqrt.view(-1, 1))
adj_t = torch.mul(adj_t, deg_inv_sqrt.view(1, -1))
return adj_t
def normalize_adjacency_matrix(adj: np.ndarray, improved: bool = True, add_self_loops: bool = True):
if add_self_loops:
fill_value = 2. if improved else 1.
np.fill_diagonal(adj, fill_value)
deg = np.sum(adj, axis=1)
deg_inv_sqrt = np.power(deg, -0.5)
deg_inv_sqrt[np.isinf(deg_inv_sqrt)] = 0.
deg_inv_sqrt_matrix = np.diag(deg_inv_sqrt)
adj_normed = deg_inv_sqrt_matrix @ adj @ deg_inv_sqrt_matrix
return adj_normed
def set_gpu(gpu_id):
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_id)
def set_seed(seed, device='cuda'):
import random, torch
# setting seeds
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if device != 'cpu':
torch.backends.cudnn.deterministic = True
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
def get_name(params):
ID = params['model'].upper()
if 'clear' in params['exp_type']:
ID += '_CS'
ID += f"_{params['train_years']}train_{params['validation_years']}val"
ID += f"_{params['in_normalize'].upper()}"
if params['spatial_normalization_in'] and params['spatial_normalization_out']:
ID += '+spatialNormed'
elif params['spatial_normalization_in']:
ID += '+spatialInNormed'
elif params['spatial_normalization_out']:
ID += '+spatialOutNormed'
ID += '_' + str(params['seed']) + 'seed'
return ID
def stem_word(word):
return word.lower().strip().replace('-', '').replace('&', '').replace('+', '').replace('_', '')
# CanAM specific functions to find out the year corresponding to CanAM snapshots/time steps
def canam_file_id_to_year_fraction(canam_filename: str) -> float:
if '/' in canam_filename:
canam_filename = canam_filename.split('/')[-1]
ID = canam_filename.replace('CanAM_snapshot_', '').replace('.nc', '')
ID = int(ID)
year = (ID / (365 * 24 * 4)) + 1
return year
def get_year_to_canam_files_dict(canam_filenames: Sequence[str]) -> Dict[int, List[str]]:
years = [
int(math.floor(canam_file_id_to_year_fraction(fname))) for fname in canam_filenames
]
mapping = dict()
for fname, year in zip(canam_filenames, years):
if year not in mapping.keys():
mapping[year] = []
mapping[year].append(fname)
return mapping
def year_string_to_list(year_string: str):
"""
Args:
year_string (str): must only contain {digits, '-', '+'}.
Examples:
'1988-90' will return [1988, 1989, 1990]
'1988-1990+2001-2004' will return [1988, 1989, 1990, 2001, 2002, 2003, 2004]
"""
if not isinstance(year_string, str):
return year_string
def year_string_to_full_year(year_string: str):
if len(year_string) == 4:
return int(year_string)
assert len(year_string) == 2, f'Year {year_string} had an unexpected length.'
if int(year_string[0]) < 3:
return int('20' + year_string)
else:
return int('19' + year_string)
def update_years(year_list: List[int], year_start, year_end):
if not isinstance(year_start, int):
year_start = year_string_to_full_year(year_start)
if year_end == '':
year_end = year_start
else:
year_end = year_string_to_full_year(year_end)
year_list += list(range(year_start, year_end + 1))
return year_list, '', ''
years = []
cur_year_start = cur_year_end = ''
for char in year_string:
if char == '-':
cur_year_start = year_string_to_full_year(cur_year_start)
elif char == '+':
years, cur_year_start, cur_year_end = update_years(years, cur_year_start, cur_year_end)
else:
if isinstance(cur_year_start, int):
cur_year_end += char
else:
cur_year_start += char
years, _, _ = update_years(years, cur_year_start, cur_year_end)
return years
def compute_absolute_level_height(dz_layer_heights: xr.DataArray) -> xr.DataArray:
""" Call with dz_layer_heights=YourDataset['dz'] """
# layers=slice(None, None, -1) or levels=slice(None, None, -1) will simply reverse the data along that dim
# Since levels=0 corresponds to TOA, this is needed, so that cumsum correctly accumulates from surface -> TOA
surface_to_toa = dz_layer_heights.pad(layers=(0, 1), constant_values=0).sel(layers=slice(None, None, -1))
# surface_to_toa[column = i] = [0, d_height_layer1, ..., d_height_lastLayer]
level_abs_heights = surface_to_toa.cumsum(dim='layers').rename({'layers': 'levels'})
toa_to_surface = level_abs_heights.sel(levels=slice(None, None, -1)) # reverse back to the existing format
return toa_to_surface
def compute_temperature_diff(level_temps: xr.DataArray) -> xr.DataArray:
"""
Usage:
Call with level_temps=YourDataset['tfrow'], assuming that 'tfrow' is the temperature var. at the levels
Returns:
A xr.DataArray with same dimensions as level_temps, except for `levels` being replaced by `layer`.
In the layer dimension, it will hold that:
layer_i_tempDiff = level_i+1_temp - level_i_temp
Note: This means that the temperature at *spatially higher* layers is subtracted from its adjacent lower layer.
E.g., the layer next to the surface will get surface - level_one_above_surface
"""
layer_temp_diffs = level_temps.diff(dim='levels', n=1).rename({'levels': 'layers'})
return layer_temp_diffs
def get_target_types(target_type: Union[str, List[str]]) -> List[str]:
if isinstance(target_type, list):
assert all([t in [constants.SHORTWAVE, constants.LONGWAVE] for t in target_type])
return target_type
target_type2 = target_type.lower().replace('&', '+').replace('-', '')
if target_type2 in ['sw+lw', 'lw+sw', 'shortwave+longwave', 'longwave+shortwave']:
return [constants.SHORTWAVE, constants.LONGWAVE]
elif target_type2 in ['sw', 'shortwave']:
return [constants.SHORTWAVE]
elif target_type2 in ['lw', 'longwave']:
return [constants.LONGWAVE]
else:
raise ValueError(f"Target type `{target_type}` must be one of shortwave, longwave or shortwave+longwave")
def get_target_variable_names(target_types: Union[str, List[str]],
target_variable: Union[str, List[str]]) -> List[str]:
out_vars = data_variables.OUT_SHORTWAVE_NOCLOUDS + data_variables.OUT_LONGWAVE_NOCLOUDS \
+ data_variables.OUT_HEATING_RATE_NOCLOUDS
if isinstance(target_variable, list):
if len(target_variable) == 1:
target_variable = target_variable[0]
else:
err_msg = f"Each target var must be in {out_vars}, but got {target_variable}"
assert all([t.lower() in out_vars for t in target_variable]), err_msg
return target_variable
target_types = get_target_types(target_types)
target_variable2 = target_variable.lower().replace('&', '+').replace('-', '').replace('_', '')
target_variable2 = target_variable2.replace('fluxes', 'flux').replace('heatingrate', 'hr')
target_vars: List[str] = []
if constants.LONGWAVE in target_types:
if 'flux' in target_variable2:
target_vars += data_variables.OUT_LONGWAVE_NOCLOUDS
if 'hr' in target_variable2:
target_vars += [data_variables.LW_HEATING_RATE]
if constants.SHORTWAVE in target_types:
if 'flux' in target_variable2:
target_vars += data_variables.OUT_SHORTWAVE_NOCLOUDS
if 'hr' in target_variable2:
target_vars += [data_variables.SW_HEATING_RATE]
if len(target_vars) == 0:
raise ValueError(f"Target var `{target_variable2}` must be one of fluxes, heating_rate.")
return target_vars
def get_target_variable(target_variable: Union[str, List[str]]) -> List[str]:
if isinstance(target_variable, list):
if len(target_variable) == 1 and 'flux' in target_variable[0]:
target_variable = target_variable[0]
else:
return target_variable
target_variable2 = target_variable.lower().replace('&', '+').replace('-', '').replace('_', '')
target_variable2 = target_variable2.replace('fluxes', 'flux').replace('heatingrate', 'hr')
target_vars: List[str] = []
if target_variable2 == 'hr':
return [constants.SURFACE_FLUXES, constants.TOA_FLUXES, constants.HEATING_RATES]
else:
if 'flux' in target_variable2:
target_vars += [constants.FLUXES]
if 'hr' in target_variable2:
target_vars += [constants.HEATING_RATES]
if len(target_vars) == 0:
raise ValueError(f"Target var `{target_variable2}` must be one of fluxes, heating_rate.")
return target_vars
def get_exp_ID(exp_type: str, target_types: Union[str, List[str]], target_variables: Union[str, List[str]]):
s = f"{exp_type.upper()} conditions, with {' '.join(target_types)} x {' '.join(target_variables)} targets"
return s
| 39.190751
| 119
| 0.658407
|
4a031b27b42d52239c0c1b9dea3aacbe42f5b814
| 5,994
|
py
|
Python
|
ml_tools/eolearn/tests/test_train_split.py
|
mohammadrezabk/eo-learn
|
8de3cfd64e74c1e4832e585954cdbf0ee9676eb3
|
[
"MIT"
] | null | null | null |
ml_tools/eolearn/tests/test_train_split.py
|
mohammadrezabk/eo-learn
|
8de3cfd64e74c1e4832e585954cdbf0ee9676eb3
|
[
"MIT"
] | null | null | null |
ml_tools/eolearn/tests/test_train_split.py
|
mohammadrezabk/eo-learn
|
8de3cfd64e74c1e4832e585954cdbf0ee9676eb3
|
[
"MIT"
] | null | null | null |
"""
Credits:
Copyright (c) 2017-2019 Matej Aleksandrov, Matej Batič, Andrej Burja (Sinergise)
Copyright (c) 2017-2019 Grega Milčinski, Matic Lubej, Devis Peresutti, Jernej Puc (Sinergise)
Copyright (c) 2017-2019 Jovan Višnjić, Anže Zupanc, Lojze Žust (Sinergise)
This source code is licensed under the MIT license found in the LICENSE
file in the root directory of this source tree.
"""
import unittest
import numpy as np
from eolearn.core import FeatureType, EOPatch
from eolearn.ml_tools import TrainTestSplitTask
class TestTrainSet(unittest.TestCase):
def test_train_split(self):
new_name = 'TEST_TRAIN_MASK'
input_mask_feature = (FeatureType.MASK_TIMELESS, 'TEST')
new_mask_feature = (FeatureType.MASK_TIMELESS, new_name)
self.assertRaises(ValueError, TrainTestSplitTask, input_mask_feature, None)
self.assertRaises(ValueError, TrainTestSplitTask, input_mask_feature, 1.5)
self.assertRaises(ValueError, TrainTestSplitTask, input_mask_feature, [0.5, 0.3, 0.7])
self.assertRaises(ValueError, TrainTestSplitTask, input_mask_feature, [0.5, 0.3, 0.7], split_type=None)
self.assertRaises(ValueError, TrainTestSplitTask, input_mask_feature, [0.5, 0.3, 0.7], split_type='nonsense')
shape = (1000, 1000, 3)
data = np.random.randint(10, size=shape, dtype=int)
indices = [(0, 2, 0, 2), (0, 2, 2, 4), (2, 4, 0, 2), (2, 4, 2, 4), (0, 4, 4, 8), (4, 8, 0, 4), (4, 8, 4, 8)]
for index, (i_1, i_2, j_1, j_2) in enumerate(indices, 1):
data[i_1:i_2, j_1:j_2, :] = index * 11
patch = EOPatch()
patch[input_mask_feature] = data
bins = [0.2, 0.5, 0.8]
expected_unique = set(range(1, len(bins) + 2))
patch = TrainTestSplitTask((*input_mask_feature, new_name), bins, split_type='per_class')(patch, seed=1)
self.assertTrue(set(np.unique(patch[new_mask_feature])) <= expected_unique)
result_seed1 = np.copy(patch[new_mask_feature])
unique = (np.unique(result_seed1[i_1:i_2, j_1:j_2, :], return_counts=True) for i_1, i_2, j_1, j_2 in indices)
expected = [(i_2 - i_1) * (j_2 - j_1) * shape[-1] for i_1, i_2, j_1, j_2 in indices]
for (unique_values, unique_counts), expected_count in zip(unique, expected):
self.assertTrue(len(unique_values) == 1)
self.assertTrue(len(unique_counts) == 1)
self.assertTrue(unique_counts[0] == expected_count)
# seed=2 should produce different result than seed=1
patch = TrainTestSplitTask((*input_mask_feature, new_name), bins, split_type='per_class')(patch, seed=2)
result_seed2 = np.copy(patch[new_mask_feature])
self.assertTrue(set(np.unique(result_seed2)) <= expected_unique)
self.assertFalse(np.array_equal(result_seed1, result_seed2))
# test with seed 1 should produce the same result as before
patch = TrainTestSplitTask((*input_mask_feature, new_name), bins, split_type='per_class')(patch, seed=1)
result_seed_equal = patch[new_mask_feature]
self.assertTrue(set(np.unique(result_seed2)) <= expected_unique)
self.assertTrue(np.array_equal(result_seed1, result_seed_equal))
# test ignore_values=[2]
bins = [0.2, 0.5, 0.7, 0.8]
expected_unique = set(range(0, len(bins) + 2))
data = np.random.randint(10, size=shape)
patch[(FeatureType.MASK_TIMELESS, 'TEST')] = data
split_task = TrainTestSplitTask((FeatureType.MASK_TIMELESS, 'TEST', 'BINS'), bins, split_type='per_class',
ignore_values=[2])
patch = split_task(patch, seed=542)
self.assertTrue(set(np.unique(patch[(FeatureType.MASK_TIMELESS, 'BINS')])) <= expected_unique)
self.assertTrue(np.all(patch[(FeatureType.MASK_TIMELESS, 'BINS')][data == 2] == 0))
def test_train_split_per_pixel(self):
new_name = 'TEST_TRAIN_MASK'
input_mask_feature = (FeatureType.MASK_TIMELESS, 'TEST')
shape = (1000, 1000, 3)
input_data = np.random.randint(10, size=shape, dtype=int)
patch = EOPatch()
patch[input_mask_feature] = input_data
bins = [0.2, 0.6]
patch = TrainTestSplitTask((*input_mask_feature, new_name), bins, split_type='per_pixel')(patch, seed=1)
output_data = patch[(FeatureType.MASK_TIMELESS, new_name)]
unique, counts = np.unique(output_data, return_counts=True)
class_percentages = np.round(counts / input_data.size, 1)
expected_unique = list(range(1, len(bins) + 2))
self.assertTrue(np.array_equal(unique, expected_unique))
self.assertTrue(np.array_equal(class_percentages, [0.2, 0.4, 0.4]))
def test_train_split_per_value(self):
""" Test if class ids get assigned to the same subclasses in multiple eopatches
"""
new_name = 'TEST_TRAIN_MASK'
input_mask_feature = (FeatureType.MASK_TIMELESS, 'TEST')
shape = (1000, 1000, 3)
input1 = np.random.randint(10, size=shape, dtype=int)
input2 = np.random.randint(10, size=shape, dtype=int)
patch1 = EOPatch()
patch1[input_mask_feature] = input1
patch2 = EOPatch()
patch2[input_mask_feature] = input2
bins = [0.2, 0.6]
split_task = TrainTestSplitTask((*input_mask_feature, new_name), bins, split_type='per_value')
# seeds should get ignored when splitting 'per_value'
patch1 = split_task(patch1, seed=1)
patch2 = split_task(patch2, seed=1)
otuput1 = patch1[(FeatureType.MASK_TIMELESS, new_name)]
otuput2 = patch2[(FeatureType.MASK_TIMELESS, new_name)]
unique = set(np.unique(input1)) | set(np.unique(input2))
for uniq in unique:
folds1 = otuput1[input1 == uniq]
folds2 = otuput2[input2 == uniq]
self.assertTrue(np.array_equal(np.unique(folds1), np.unique(folds2)))
if __name__ == '__main__':
unittest.main()
| 41.625
| 117
| 0.659493
|
4a031bb14ac4b9180dc2237dbcd4d057957e6152
| 483
|
py
|
Python
|
server/src/weaverbird/backends/pandas_executor/steps/sort.py
|
JeremyJacquemont/weaverbird
|
e04ab6f9c8381986ab71078e5199ece7a875e743
|
[
"BSD-3-Clause"
] | 54
|
2019-11-20T15:07:39.000Z
|
2022-03-24T22:13:51.000Z
|
server/src/weaverbird/backends/pandas_executor/steps/sort.py
|
JeremyJacquemont/weaverbird
|
e04ab6f9c8381986ab71078e5199ece7a875e743
|
[
"BSD-3-Clause"
] | 786
|
2019-10-20T11:48:37.000Z
|
2022-03-23T08:58:18.000Z
|
server/src/weaverbird/backends/pandas_executor/steps/sort.py
|
JeremyJacquemont/weaverbird
|
e04ab6f9c8381986ab71078e5199ece7a875e743
|
[
"BSD-3-Clause"
] | 10
|
2019-11-21T10:16:16.000Z
|
2022-03-21T10:34:06.000Z
|
from pandas import DataFrame
from weaverbird.backends.pandas_executor.types import DomainRetriever, PipelineExecutor
from weaverbird.pipeline.steps import SortStep
def execute_sort(
step: SortStep,
df: DataFrame,
domain_retriever: DomainRetriever = None,
execute_pipeline: PipelineExecutor = None,
) -> DataFrame:
return df.sort_values(
by=[sort.column for sort in step.columns],
ascending=[sort.order == 'asc' for sort in step.columns],
)
| 28.411765
| 87
| 0.73499
|
4a031c4a0ccd61d66eaf60a5439508d9f456e294
| 264
|
py
|
Python
|
tests/artificial/transf_Logit/trend_ConstantTrend/cycle_30/ar_/test_artificial_32_Logit_ConstantTrend_30__20.py
|
shaido987/pyaf
|
b9afd089557bed6b90b246d3712c481ae26a1957
|
[
"BSD-3-Clause"
] | 377
|
2016-10-13T20:52:44.000Z
|
2022-03-29T18:04:14.000Z
|
tests/artificial/transf_Logit/trend_ConstantTrend/cycle_30/ar_/test_artificial_32_Logit_ConstantTrend_30__20.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 160
|
2016-10-13T16:11:53.000Z
|
2022-03-28T04:21:34.000Z
|
tests/artificial/transf_Logit/trend_ConstantTrend/cycle_30/ar_/test_artificial_32_Logit_ConstantTrend_30__20.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 63
|
2017-03-09T14:51:18.000Z
|
2022-03-27T20:52:57.000Z
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 30, transform = "Logit", sigma = 0.0, exog_count = 20, ar_order = 0);
| 37.714286
| 164
| 0.731061
|
4a031d1327b560e63a0883fe87332efb22e6305f
| 2,294
|
py
|
Python
|
FocusOnWork_PC/FocusOnWork/bin/Debug/netcoreapp3.1/WikiSearch.py
|
RokurouIchihara/FocusOnWork
|
6a1e0a344f871f4f5fb45f51782185259238e0a2
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
FocusOnWork_PC/FocusOnWork/bin/Debug/netcoreapp3.1/WikiSearch.py
|
RokurouIchihara/FocusOnWork
|
6a1e0a344f871f4f5fb45f51782185259238e0a2
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
FocusOnWork_PC/FocusOnWork/bin/Debug/netcoreapp3.1/WikiSearch.py
|
RokurouIchihara/FocusOnWork
|
6a1e0a344f871f4f5fb45f51782185259238e0a2
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf8 -*-
import requests
from bs4 import BeautifulSoup
class SearchOnGoogle:
# コンストラクタ
def __init__(self):
directoryPath = os.getcwd()
self.__filename_ = 'SearchWordAndResulut.txt'
self.__file_ = open(self.__filename_, mode='r+', encoding="utf-8")
self.__urlRe_ = 'https://ja.wikipedia.org'
self.__url_ = 'https://ja.wikipedia.org/w/index.php?search='
self.__GetHtml()
# 指定ワードをグーグルで検索し,結果を保存
def __GetHtml(self):
# テキストから取得
word = self.__file_.readline().replace('\n', '')
# 全角のスペースは消す
word = word.replace(' ', ' ')
word = 'pubg'
response = requests.get(self.__url_ + word)
soup = BeautifulSoup(response.text, 'html.parser')
if 'を新規作成しましょう。' in str(soup):
# 直で検索結果に飛ばなかったら
print('error')
bodys = str(soup.find_all(
'div', class_='mw-search-result-heading'))
soup = BeautifulSoup(bodys, 'html.parser')
bodys = str(soup.find_all('a'))
# url抽出
self.__url_ = bodys.split(
'data-serp-pos=\"0\"')[-1].split('</a>')[0]
self.__url_ = self.__urlRe_ + \
self.__url_.split('href=\"')[-1].split('\"')[0]
response = requests.get(self.__url_)
soup = BeautifulSoup(response.text, 'html.parser')
else:
print('success')
category = soup.script
# ファイルをいったん閉じる
self.__file_.close()
# ファイルをリセット
self.__file_ = open(self.__filename_, mode='w', encoding="utf-8")
# ゲームか判定
'''
# 結果を書き込み
# True: game
# False: notgame
'''
if category is not None:
self.__file_.write(str(self.__Is_game(str(category))))
else:
self.__file_.write(str(False))
self.__file_.close()
def __Is_game(self, category):
jaGames = [
'ゲームソフト',
'パソコンゲーム',
'コンピュータゲーム'
]
for jaGame in jaGames:
if jaGame in category:
return True
return False
searchOnGoogle = SearchOnGoogle()
| 29.792208
| 75
| 0.516129
|
4a031ea1d7e2e735c95bc04c1eea97c734554f13
| 914
|
py
|
Python
|
gptchat/chatlm/config.py
|
noriyukipy/gptchat
|
15febcc69cf79ffbca50bd8897447b5804bcef54
|
[
"MIT"
] | 18
|
2020-05-10T09:10:01.000Z
|
2022-03-22T08:45:43.000Z
|
gptchat/chatlm/config.py
|
noriyukipy/gptchat
|
15febcc69cf79ffbca50bd8897447b5804bcef54
|
[
"MIT"
] | 2
|
2020-08-01T10:32:51.000Z
|
2021-07-30T06:04:31.000Z
|
gptchat/chatlm/config.py
|
noriyukipy/gptchat
|
15febcc69cf79ffbca50bd8897447b5804bcef54
|
[
"MIT"
] | 2
|
2020-08-11T07:17:54.000Z
|
2020-09-20T10:38:50.000Z
|
from pydantic import BaseModel
from typing import List, Union
class ConfigInput(BaseModel):
train_file: str
valid_file: str
tokenizer_file: str
pretrained_model_dir: Union[None, str]
class ConfigOutput(BaseModel):
model_dir: str
tensorboard_dir: str
checkpoint_path: str
class ConfigTrain(BaseModel):
max_length: int
seed: int
num_epochs: int
batch_size: int
learning_rate: float
max_grad_norm: float
warmup_rate: float
patience: float
class ConfigPred(BaseModel):
do_sample: bool
seed: int
max_length: int
top_k: int
top_p: float
bad_words: List[str]
class ConfigModelParams(BaseModel):
n_embd: int
n_layer: int
n_head: int
n_ctx: int
class Config(BaseModel):
input: ConfigInput
output: ConfigOutput
model_params: Union[None, ConfigModelParams]
train: ConfigTrain
pred: ConfigPred
| 17.921569
| 48
| 0.702407
|
4a031f88b29b5f3d7cc3ef7248ff39472bfd51d9
| 3,354
|
py
|
Python
|
gn4pions/modules/resolution_util.py
|
atlas-calo-ml/gn4pions_eastbay
|
e4093b691dcba1d6663464ba55760feb6033f86a
|
[
"Apache-2.0"
] | 1
|
2021-11-17T03:44:48.000Z
|
2021-11-17T03:44:48.000Z
|
gn4pions/modules/resolution_util.py
|
atlas-calo-ml/gn4pions_eastbay
|
e4093b691dcba1d6663464ba55760feb6033f86a
|
[
"Apache-2.0"
] | null | null | null |
gn4pions/modules/resolution_util.py
|
atlas-calo-ml/gn4pions_eastbay
|
e4093b691dcba1d6663464ba55760feb6033f86a
|
[
"Apache-2.0"
] | null | null | null |
# Let's define some utility functions we'll want to be using for resolutions
import os
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
import scipy.stats as stats
import seaborn as sns
from matplotlib.colors import ListedColormap
from . import plot_util as pu
def responsePlot(x, y, figfile='', statistic='median',
xlabel='Truth Particle Energy [GeV]', ylabel='Predicted Energy / Truth Particle Energy',
atlas_x=-1, atlas_y=-1, simulation=False, make_plot=True,
textlist=[]):
xbin = [10**exp for exp in np.arange(-1., 3.1, 0.05)]
ybin = np.arange(0., 3.1, 0.05)
xcenter = [(xbin[i] + xbin[i+1]) / 2 for i in range(len(xbin)-1)]
profileXMed = stats.binned_statistic(
x, y, bins=xbin, statistic=statistic).statistic
if make_plot:
c_map = ListedColormap(sns.color_palette("Blues", n_colors=100).as_hex())
# plt.cla()
# plt.clf()
fig = plt.figure(figsize=(12,8))
fig.patch.set_facecolor('white')
plt.hist2d(x, y, bins=[xbin, ybin], norm=LogNorm(),zorder = -1, cmap=c_map)
plt.plot([0.1, 1000], [1, 1], linestyle='--', color='black')
plt.plot(xcenter, profileXMed, color='indianred')
plt.xscale('log')
plt.ylim(0, 1.75)
plt.xlim(0.3, )
pu.ampl.set_xlabel(xlabel, fontsize=20)
pu.ampl.set_ylabel(ylabel, fontsize=20)
# ampl.set_zlabel('Clusters')
cb = plt.colorbar()
cb.ax.set_ylabel('Clusters')
# plt.legend()
pu.drawLabels(fig, atlas_x, atlas_y, simulation, textlist)
if figfile != '':
print('Saving figure to {}'.format(figfile))
plt.savefig(figfile)
plt.show()
return xcenter, profileXMed
def stdOverMean(x):
std = np.std(x)
mean = np.mean(x)
return std / mean
def iqrOverMed(x):
# get the IQR via the percentile function
# 84 is median + 1 sigma, 16 is median - 1 sigma
q84, q16 = np.percentile(x, [84, 16])
iqr = q84 - q16
med = np.median(x)
return iqr / (2*med)
def resolutionPlot(x, y, figfile='', statistic='std',
xlabel='Truth Particle Energy', ylabel='Energy IQR over 2xMedian',
atlas_x=-1, atlas_y=-1, simulation=False,
textlist=[]):
xbin = [10**exp for exp in np.arange(-1.0, 3.1, 0.1)]
xcenter = [(xbin[i] + xbin[i+1]) / 2 for i in range(len(xbin)-1)]
if statistic == 'std': # or any other baseline one?
resolution = stats.binned_statistic(x, y, bins=xbin,statistic=statistic).statistic
elif statistic == 'stdOverMean':
resolution = stats.binned_statistic(x, y, bins=xbin,statistic=stdOverMean).statistic
elif statistic == 'iqrOverMed':
resolution = stats.binned_statistic(x, y, bins=xbin,statistic=iqrOverMed).statistic
plt.cla(); plt.clf()
fig = plt.figure()
fig.patch.set_facecolor('white')
plt.plot(xcenter, resolution)
plt.xscale('log')
plt.xlim(0.1, 1000)
plt.ylim(0,0.1)
pu.ampl.set_xlabel(xlabel, fontsize=20)
pu.ampl.set_ylabel(ylabel, fontsize=20)
pu.drawLabels(fig, atlas_x, atlas_y, simulation, textlist)
if figfile != '':
plt.savefig(figfile)
plt.show()
return xcenter, resolution
| 34.57732
| 105
| 0.620155
|
4a031febe585837b3d3d2a85d3035b6b76bf9e5f
| 7,687
|
py
|
Python
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/resolution/resolvelib/provider.py
|
brianherrera/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
[
"AML"
] | 1,738
|
2017-09-21T10:59:12.000Z
|
2022-03-31T21:05:46.000Z
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/resolution/resolvelib/provider.py
|
ArchitectureStudios/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
[
"AML"
] | 427
|
2017-09-29T22:54:36.000Z
|
2022-02-15T19:26:50.000Z
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/resolution/resolvelib/provider.py
|
ArchitectureStudios/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
[
"AML"
] | 671
|
2017-09-21T08:04:01.000Z
|
2022-03-29T14:30:07.000Z
|
from pip._vendor.resolvelib.providers import AbstractProvider
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from .base import Constraint
if MYPY_CHECK_RUNNING:
from typing import Any, Dict, Iterable, Optional, Sequence, Set, Tuple, Union
from .base import Candidate, Requirement
from .factory import Factory
# Notes on the relationship between the provider, the factory, and the
# candidate and requirement classes.
#
# The provider is a direct implementation of the resolvelib class. Its role
# is to deliver the API that resolvelib expects.
#
# Rather than work with completely abstract "requirement" and "candidate"
# concepts as resolvelib does, pip has concrete classes implementing these two
# ideas. The API of Requirement and Candidate objects are defined in the base
# classes, but essentially map fairly directly to the equivalent provider
# methods. In particular, `find_matches` and `is_satisfied_by` are
# requirement methods, and `get_dependencies` is a candidate method.
#
# The factory is the interface to pip's internal mechanisms. It is stateless,
# and is created by the resolver and held as a property of the provider. It is
# responsible for creating Requirement and Candidate objects, and provides
# services to those objects (access to pip's finder and preparer).
class PipProvider(AbstractProvider):
"""Pip's provider implementation for resolvelib.
:params constraints: A mapping of constraints specified by the user. Keys
are canonicalized project names.
:params ignore_dependencies: Whether the user specified ``--no-deps``.
:params upgrade_strategy: The user-specified upgrade strategy.
:params user_requested: A set of canonicalized package names that the user
supplied for pip to install/upgrade.
"""
def __init__(
self,
factory, # type: Factory
constraints, # type: Dict[str, Constraint]
ignore_dependencies, # type: bool
upgrade_strategy, # type: str
user_requested, # type: Set[str]
):
# type: (...) -> None
self._factory = factory
self._constraints = constraints
self._ignore_dependencies = ignore_dependencies
self._upgrade_strategy = upgrade_strategy
self._user_requested = user_requested
def identify(self, dependency):
# type: (Union[Requirement, Candidate]) -> str
return dependency.name
def get_preference(
self,
resolution, # type: Optional[Candidate]
candidates, # type: Sequence[Candidate]
information # type: Sequence[Tuple[Requirement, Candidate]]
):
# type: (...) -> Any
"""Produce a sort key for given requirement based on preference.
The lower the return value is, the more preferred this group of
arguments is.
Currently pip considers the followings in order:
* Prefer if any of the known requirements points to an explicit URL.
* If equal, prefer if any requirements contain ``===`` and ``==``.
* If equal, prefer if requirements include version constraints, e.g.
``>=`` and ``<``.
* If equal, prefer user-specified (non-transitive) requirements.
* If equal, order alphabetically for consistency (helps debuggability).
"""
def _get_restrictive_rating(requirements):
# type: (Iterable[Requirement]) -> int
"""Rate how restrictive a set of requirements are.
``Requirement.get_candidate_lookup()`` returns a 2-tuple for
lookup. The first element is ``Optional[Candidate]`` and the
second ``Optional[InstallRequirement]``.
* If the requirement is an explicit one, the explicitly-required
candidate is returned as the first element.
* If the requirement is based on a PEP 508 specifier, the backing
``InstallRequirement`` is returned as the second element.
We use the first element to check whether there is an explicit
requirement, and the second for equality operator.
"""
lookups = (r.get_candidate_lookup() for r in requirements)
cands, ireqs = zip(*lookups)
if any(cand is not None for cand in cands):
return 0
spec_sets = (ireq.specifier for ireq in ireqs if ireq)
operators = [
specifier.operator
for spec_set in spec_sets
for specifier in spec_set
]
if any(op in ("==", "===") for op in operators):
return 1
if operators:
return 2
# A "bare" requirement without any version requirements.
return 3
restrictive = _get_restrictive_rating(req for req, _ in information)
transitive = all(parent is not None for _, parent in information)
key = next(iter(candidates)).name if candidates else ""
# HACK: Setuptools have a very long and solid backward compatibility
# track record, and extremely few projects would request a narrow,
# non-recent version range of it since that would break a lot things.
# (Most projects specify it only to request for an installer feature,
# which does not work, but that's another topic.) Intentionally
# delaying Setuptools helps reduce branches the resolver has to check.
# This serves as a temporary fix for issues like "apache-airlfow[all]"
# while we work on "proper" branch pruning techniques.
delay_this = (key == "setuptools")
return (delay_this, restrictive, transitive, key)
def find_matches(self, requirements):
# type: (Sequence[Requirement]) -> Iterable[Candidate]
if not requirements:
return []
name = requirements[0].project_name
def _eligible_for_upgrade(name):
# type: (str) -> bool
"""Are upgrades allowed for this project?
This checks the upgrade strategy, and whether the project was one
that the user specified in the command line, in order to decide
whether we should upgrade if there's a newer version available.
(Note that we don't need access to the `--upgrade` flag, because
an upgrade strategy of "to-satisfy-only" means that `--upgrade`
was not specified).
"""
if self._upgrade_strategy == "eager":
return True
elif self._upgrade_strategy == "only-if-needed":
return (name in self._user_requested)
return False
return self._factory.find_candidates(
requirements,
constraint=self._constraints.get(name, Constraint.empty()),
prefers_installed=(not _eligible_for_upgrade(name)),
)
def is_satisfied_by(self, requirement, candidate):
# type: (Requirement, Candidate) -> bool
return requirement.is_satisfied_by(candidate)
def get_dependencies(self, candidate):
# type: (Candidate) -> Sequence[Requirement]
with_requires = not self._ignore_dependencies
return [
r
for r in candidate.iter_dependencies(with_requires)
if r is not None
]
| 43.925714
| 83
| 0.623
|
4a03200a6c74349a1c21f67fc53dfbc05c4177dd
| 82
|
py
|
Python
|
tests/ut_repytests_testmemoryallocwithexceptions.py
|
SeattleTestbed/repy_v1
|
f40a02e2e398b1ec67fede84b41a264ae7356d2c
|
[
"MIT"
] | 1
|
2021-08-18T05:58:17.000Z
|
2021-08-18T05:58:17.000Z
|
tests/ut_repytests_testmemoryallocwithexceptions.py
|
SeattleTestbed/repy_v1
|
f40a02e2e398b1ec67fede84b41a264ae7356d2c
|
[
"MIT"
] | 3
|
2015-11-17T21:01:03.000Z
|
2016-07-14T09:08:04.000Z
|
tests/ut_repytests_testmemoryallocwithexceptions.py
|
SeattleTestbed/repy_v1
|
f40a02e2e398b1ec67fede84b41a264ae7356d2c
|
[
"MIT"
] | 5
|
2015-07-02T13:29:23.000Z
|
2021-09-25T07:48:30.000Z
|
import loggingskeleton
loggingskeleton.test("l_testmemoryallocwithexceptions.py")
| 27.333333
| 58
| 0.890244
|
4a03212e2fcdb0968b84add8a53b1b339189b5ba
| 1,355
|
py
|
Python
|
rocket/connectors/files.py
|
Contraz/pyrocket
|
bc1129ba30b32a3324f8416a698f9d93555f9e35
|
[
"Zlib"
] | 19
|
2017-04-14T09:52:16.000Z
|
2022-03-20T00:43:57.000Z
|
rocket/connectors/files.py
|
Contraz/pyrocket
|
bc1129ba30b32a3324f8416a698f9d93555f9e35
|
[
"Zlib"
] | 2
|
2018-07-03T21:31:01.000Z
|
2018-08-14T19:43:56.000Z
|
rocket/connectors/files.py
|
Contraz/pyrocket
|
bc1129ba30b32a3324f8416a698f9d93555f9e35
|
[
"Zlib"
] | 5
|
2017-07-29T20:59:34.000Z
|
2021-08-21T20:57:18.000Z
|
"""
Connector reading track files in binary format.
Each track is a separate file.
"""
import logging
import os
from .base import Connector
from rocket.tracks import Track
logger = logging.getLogger("rocket")
class FilesConnector(Connector):
"""Loads individual track files in a specific path"""
def __init__(self, track_path, controller=None, tracks=None):
"""
Load binary track files
:param path: Path to track directory
:param controller: The controller
:param tracks: Track container
"""
logger.info("Initialize loading binary track data")
self.controller = controller
self.tracks = tracks
self.path = track_path
self.controller.connector = self
self.tracks.connector = self
if self.path is None:
raise ValueError("track path is None")
if not os.path.exists(self.path):
raise ValueError("Track directory do not exist: {}".format(self.path))
logger.info("Looking for track files in '%s'", self.path)
for f in os.listdir(self.path):
if not f.endswith(".track"):
continue
name = Track.trackname(f)
logger.info("Attempting to load ''", name)
t = self.tracks.get_or_create(name)
t.load(os.path.join(self.path, f))
| 31.511628
| 82
| 0.62583
|
4a0322d9b96e4bcba8c4b24fce00cd49d11bf349
| 12,030
|
py
|
Python
|
mars/dataframe/base/apply.py
|
vibhatha/mars
|
7a6b78ca4befd1a46d82cfb0163ffcd49293f7b5
|
[
"Apache-2.0"
] | null | null | null |
mars/dataframe/base/apply.py
|
vibhatha/mars
|
7a6b78ca4befd1a46d82cfb0163ffcd49293f7b5
|
[
"Apache-2.0"
] | null | null | null |
mars/dataframe/base/apply.py
|
vibhatha/mars
|
7a6b78ca4befd1a46d82cfb0163ffcd49293f7b5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 1999-2020 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import numpy as np
import pandas as pd
from ... import opcodes
from ...config import options
from ...core import OutputType
from ...serialize import StringField, AnyField, BoolField, \
TupleField, DictField, FunctionField
from ..operands import DataFrameOperandMixin, DataFrameOperand
from ..utils import build_df, build_series, parse_index, validate_axis
class ApplyOperand(DataFrameOperand, DataFrameOperandMixin):
_op_type_ = opcodes.APPLY
_func = FunctionField('func')
_axis = AnyField('axis')
_convert_dtype = BoolField('convert_dtype')
_raw = BoolField('raw')
_result_type = StringField('result_type')
_elementwise = BoolField('elementwise')
_args = TupleField('args')
_kwds = DictField('kwds')
def __init__(self, func=None, axis=None, convert_dtype=None, raw=None, result_type=None,
args=None, kwds=None, output_type=None, elementwise=None, **kw):
if output_type:
kw['_output_types'] = [output_type]
super().__init__(_func=func, _axis=axis, _convert_dtype=convert_dtype, _raw=raw,
_result_type=result_type, _args=args, _kwds=kwds,
_elementwise=elementwise, **kw)
@property
def func(self):
return self._func
@property
def axis(self):
return self._axis
@property
def convert_dtype(self):
return self._convert_dtype
@property
def raw(self):
return self._raw
@property
def result_type(self):
return self._result_type
@property
def elementwise(self):
return self._elementwise
@property
def args(self):
return getattr(self, '_args', None) or ()
@property
def kwds(self):
return getattr(self, '_kwds', None) or dict()
@classmethod
def execute(cls, ctx, op):
input_data = ctx[op.inputs[0].key]
if isinstance(input_data, pd.DataFrame):
result = input_data.apply(op.func, axis=op.axis, raw=op.raw, result_type=op.result_type,
args=op.args, **op.kwds)
else:
result = input_data.apply(op.func, convert_dtype=op.convert_dtype, args=op.args,
**op.kwds)
ctx[op.outputs[0].key] = result
@classmethod
def _tile_df(cls, op):
in_df = op.inputs[0]
out_df = op.outputs[0]
axis = op.axis
elementwise = op.elementwise
if not elementwise and in_df.chunk_shape[axis] > 1:
chunk_size = (
in_df.shape[axis],
max(1, options.chunk_store_limit // in_df.shape[axis]),
)
if axis == 1:
chunk_size = chunk_size[::-1]
in_df = in_df.rechunk(chunk_size)._inplace_tile()
chunks = []
if out_df.ndim == 2:
for c in in_df.chunks:
if elementwise:
new_shape = c.shape
new_index_value, new_columns_value = c.index_value, c.columns_value
else:
new_shape = [np.nan, np.nan]
new_shape[1 - axis] = c.shape[1 - axis]
if axis == 0:
new_index_value = out_df.index_value
new_columns_value = c.columns_value
else:
new_index_value = c.index_value
new_columns_value = out_df.columns_value
if op.axis == 0:
new_dtypes = out_df.dtypes[c.dtypes.keys()]
else:
new_dtypes = out_df.dtypes
new_op = op.copy().reset_key()
chunks.append(new_op.new_chunk([c], shape=tuple(new_shape), index=c.index, dtypes=new_dtypes,
index_value=new_index_value, columns_value=new_columns_value))
new_nsplits = list(in_df.nsplits)
if not elementwise:
new_nsplits[axis] = (np.nan,) * len(new_nsplits[axis])
else:
for c in in_df.chunks:
shape_len = c.shape[1 - axis]
new_index_value = c.index_value if axis == 1 else c.columns_value
new_index = (c.index[1 - axis],)
new_op = op.copy().reset_key()
chunks.append(new_op.new_chunk([c], shape=(shape_len,), index=new_index, dtype=out_df.dtype,
index_value=new_index_value))
new_nsplits = (in_df.nsplits[1 - axis],)
new_op = op.copy().reset_key()
kw = out_df.params.copy()
kw.update(dict(chunks=chunks, nsplits=tuple(new_nsplits)))
return new_op.new_tileables(op.inputs, **kw)
@classmethod
def _tile_series(cls, op):
in_series = op.inputs[0]
out_series = op.outputs[0]
chunks = []
for c in in_series.chunks:
new_op = op.copy().reset_key()
kw = c.params.copy()
kw['dtype'] = out_series.dtype
if out_series.ndim == 2:
kw['columns_value'] = out_series.columns_value
chunks.append(new_op.new_chunk([c], **kw))
new_op = op.copy().reset_key()
kw = out_series.params.copy()
kw.update(dict(chunks=chunks, nsplits=in_series.nsplits))
if out_series.ndim == 2:
kw['columns_value'] = out_series.columns_value
return new_op.new_tileables(op.inputs, **kw)
@classmethod
def tile(cls, op):
if op.inputs[0].ndim == 2:
return cls._tile_df(op)
else:
return cls._tile_series(op)
def _infer_df_func_returns(self, df, dtypes, index):
if isinstance(self._func, np.ufunc):
output_type, new_dtypes, index_value, new_elementwise = \
OutputType.dataframe, None, 'inherit', True
else:
output_type, new_dtypes, index_value, new_elementwise = None, None, None, False
try:
empty_df = build_df(df, size=2)
with np.errstate(all='ignore'):
infer_df = empty_df.apply(self._func, axis=self._axis, raw=self._raw,
result_type=self._result_type, args=self.args, **self.kwds)
if index_value is None:
if infer_df.index is empty_df.index:
index_value = 'inherit'
else:
index_value = parse_index(pd.RangeIndex(-1))
if isinstance(infer_df, pd.DataFrame):
output_type = output_type or OutputType.dataframe
new_dtypes = new_dtypes or infer_df.dtypes
else:
output_type = output_type or OutputType.series
new_dtypes = new_dtypes or infer_df.dtype
new_elementwise = False if new_elementwise is None else new_elementwise
except: # noqa: E722 # nosec
pass
self.output_types = [output_type] if not self.output_types else self.output_types
dtypes = new_dtypes if dtypes is None else dtypes
index_value = index_value if index is None else parse_index(index)
self._elementwise = new_elementwise if self._elementwise is None else self._elementwise
return dtypes, index_value
def _infer_series_func_returns(self, df):
try:
empty_series = build_series(df, size=2, name=df.name)
with np.errstate(all='ignore'):
infer_series = empty_series.apply(self._func, args=self.args, **self.kwds)
new_dtype = infer_series.dtype
name = infer_series.name
except: # noqa: E722 # nosec # pylint: disable=bare-except
new_dtype = np.dtype('object')
name = None
return new_dtype, name
def _call_dataframe(self, df, dtypes=None, index=None):
dtypes, index_value = self._infer_df_func_returns(df, dtypes, index)
for arg, desc in zip((self.output_types, dtypes, index_value),
('output_types', 'dtypes', 'index')):
if arg is None:
raise TypeError(f'Cannot determine {desc} by calculating with enumerate data, '
'please specify it as arguments')
if index_value == 'inherit':
index_value = df.index_value
if self._elementwise:
shape = df.shape
elif self.output_types[0] == OutputType.dataframe:
shape = [np.nan, np.nan]
shape[1 - self.axis] = df.shape[1 - self.axis]
shape = tuple(shape)
else:
shape = (df.shape[1 - self.axis],)
if self.output_types[0] == OutputType.dataframe:
if self.axis == 0:
return self.new_dataframe([df], shape=shape, dtypes=dtypes, index_value=index_value,
columns_value=parse_index(dtypes.index))
else:
return self.new_dataframe([df], shape=shape, dtypes=dtypes, index_value=df.index_value,
columns_value=parse_index(dtypes.index))
else:
return self.new_series([df], shape=shape, dtype=dtypes, index_value=index_value)
def _call_series(self, series):
if self._convert_dtype:
dtype, name = self._infer_series_func_returns(series)
else:
dtype, name = np.dtype('object'), None
return self.new_series([series], dtype=dtype, shape=series.shape,
index_value=series.index_value, name=name)
def __call__(self, df, dtypes=None, index=None):
axis = getattr(self, 'axis', None) or 0
self._axis = validate_axis(axis, df)
if df.op.output_types[0] == OutputType.dataframe:
return self._call_dataframe(df, dtypes=dtypes, index=index)
else:
return self._call_series(df)
def df_apply(df, func, axis=0, raw=False, result_type=None, args=(), dtypes=None,
output_type=None, index=None, elementwise=None, **kwds):
if isinstance(func, (list, dict)):
return df.aggregate(func)
if isinstance(output_type, str):
output_type = getattr(OutputType, output_type.lower())
# calling member function
if isinstance(func, str):
func = getattr(df, func)
sig = inspect.getfullargspec(func)
if "axis" in sig.args:
kwds["axis"] = axis
return func(*args, **kwds)
op = ApplyOperand(func=func, axis=axis, raw=raw, result_type=result_type, args=args, kwds=kwds,
output_type=output_type, elementwise=elementwise)
return op(df, dtypes=dtypes, index=index)
def series_apply(series, func, convert_dtype=True, args=(), **kwds):
if isinstance(func, (list, dict)):
return series.aggregate(func)
# calling member function
if isinstance(func, str):
func_body = getattr(series, func, None)
if func_body is not None:
return func_body(*args, **kwds)
func = getattr(np, func, None)
if func is None:
raise AttributeError(f"'{func!r}' is not a valid function for '{type(series.__name__)}' object")
op = ApplyOperand(func=func, convert_dtype=convert_dtype, args=args, kwds=kwds,
output_type=OutputType.series)
return op(series)
| 38.806452
| 109
| 0.592103
|
4a0323504a247613758402728ad9dffac4131bca
| 2,273
|
py
|
Python
|
config/urls.py
|
ASU-CodeDevils/codedevils.org
|
0f7c62bdad58c9907c903899cd12555f07584d37
|
[
"MIT"
] | 2
|
2021-02-19T02:37:01.000Z
|
2021-04-18T22:20:22.000Z
|
config/urls.py
|
ASU-CodeDevils/codedevils_org
|
0f7c62bdad58c9907c903899cd12555f07584d37
|
[
"MIT"
] | 4
|
2020-07-10T05:25:24.000Z
|
2020-10-07T05:01:01.000Z
|
config/urls.py
|
ASU-CodeDevils/codedevils_org
|
0f7c62bdad58c9907c903899cd12555f07584d37
|
[
"MIT"
] | 1
|
2021-02-19T04:23:46.000Z
|
2021-02-19T04:23:46.000Z
|
from django.conf import settings
from django.conf.urls.i18n import i18n_patterns
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import include, path
from django.views import defaults as default_views
from django_cas_ng import views as cas_views
from codedevils_org import page_views
# locale
urlpatterns = i18n_patterns(
path("", page_views.home, name="home"),
path("about/", page_views.about, name="about"),
path("contactus/", page_views.contact_us, name="contactus"),
path("workspace/", page_views.workspace, name="workspace"),
# Django Admin, use {% url 'admin:index' %}
path(settings.ADMIN_URL, admin.site.urls),
# cas log in
path("login/", cas_views.LoginView.as_view(), name="cas_ng_login"),
path("logout/", cas_views.LogoutView.as_view(), name="cas_ng_logout"),
# User management
path("users/", include("codedevils_org.users.urls", namespace="users")),
# rosetta translation page
path("rosetta/", include("rosetta.urls")),
# custom urls
path("", include("codedevils_org.contrib.cd_url.urls", namespace="cd_url")),
)
# API URLS
urlpatterns += [
# API base url
path("api/", include("config.api_router"))
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
# custom error pages for debugging in development
# these will be replaced by the server's error pages
path(
"400/",
default_views.bad_request,
kwargs={"exception": Exception("Bad Request")},
),
path(
"403/",
default_views.permission_denied,
kwargs={"exception": Exception("Permission Denied")},
),
path(
"404/",
default_views.page_not_found,
kwargs={"exception": Exception("Page not Found")},
),
path("500/", default_views.server_error),
]
if "debug_toolbar" in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + urlpatterns
| 35.515625
| 85
| 0.66344
|
4a032394c32e29440e32a8bbdab78f5296b6caa8
| 10,200
|
py
|
Python
|
kits19cnn/io/preprocess.py
|
jchen42703/kits19-cnn
|
a1c78beaaf981fa039be62a5178fb16e3713bb64
|
[
"Apache-2.0"
] | 15
|
2019-08-07T06:27:54.000Z
|
2022-03-20T20:20:06.000Z
|
kits19cnn/io/preprocess.py
|
jchen42703/kits19-cnn
|
a1c78beaaf981fa039be62a5178fb16e3713bb64
|
[
"Apache-2.0"
] | 14
|
2019-08-05T12:57:26.000Z
|
2019-12-09T06:45:45.000Z
|
kits19cnn/io/preprocess.py
|
jchen42703/kits19-cnn
|
a1c78beaaf981fa039be62a5178fb16e3713bb64
|
[
"Apache-2.0"
] | 4
|
2019-08-13T08:49:32.000Z
|
2022-02-04T14:07:36.000Z
|
import os
from os.path import join, isdir
from pathlib import Path
from collections import defaultdict
from tqdm import tqdm
import nibabel as nib
import numpy as np
import json
from kits19cnn.io.resample import resample_patient
class Preprocessor(object):
"""
Preprocesses the original dataset (interpolated).
Procedures:
* clipping (ROI)
* save as .npy array
* imaging.npy
* segmentation.npy (if with_masks)
* resampling from `orig_spacing` to `target_spacing`
currently uses spacing reported in the #1 solution
"""
def __init__(self, in_dir, out_dir, cases=None, kits_json_path=None,
target_spacing=(3.22, 1.62, 1.62),
clip_values=None, with_mask=False, fg_classes=[1, 2]):
"""
Attributes:
in_dir (str): directory with the input data. Should be the
kits19/data directory.
out_dir (str): output directory where you want to save each case
cases: list of case folders to preprocess
kits_json_path (str): path to the kits.json file in the kits19/data
directory. This only should be specfied if you're resampling.
Defaults to None.
target_spacing (list/tuple): spacing to resample to
clip_values (list, tuple): values you want to clip CT scans to.
Defaults to None for no clipping.
with_mask (bool): whether or not to preprocess with masks or no
masks. Applicable to preprocessing test set (no labels
available).
fg_classes (list): of foreground class indices
"""
self.in_dir = in_dir
self.out_dir = out_dir
self._load_kits_json(kits_json_path)
self.clip_values = clip_values
self.target_spacing = np.array(target_spacing)
self.with_mask = with_mask
self.fg_classes = fg_classes
self.cases = cases
# automatically collecting all of the case folder names
if self.cases is None:
self.cases = [os.path.join(self.in_dir, case) \
for case in os.listdir(self.in_dir) \
if case.startswith("case")]
self.cases = sorted(self.cases)
assert len(self.cases) > 0, \
"Please make sure that in_dir refers to the proper directory."
# making directory if out_dir doesn't exist
if not isdir(out_dir):
os.mkdir(out_dir)
print("Created directory: {0}".format(out_dir))
def gen_data(self):
"""
Generates and saves preprocessed data
Args:
task_path: file path to the task directory (must have the corresponding "dataset.json" in it)
Returns:
preprocessed input image and mask
"""
# Generating data and saving them recursively
for case in tqdm(self.cases):
x_path, y_path = join(case, "imaging.nii.gz"), join(case, "segmentation.nii.gz")
image = nib.load(x_path).get_fdata()[None]
label = nib.load(y_path).get_fdata()[None] if self.with_mask \
else None
preprocessed_img, preprocessed_label = self.preprocess(image,
label,
case)
self.save_imgs(preprocessed_img, preprocessed_label, case)
def preprocess(self, image, mask, case=None):
"""
Clipping, cropping, and resampling.
Args:
image: numpy array
mask: numpy array or None
case (str): path to a case folder
Returns:
tuple of:
- preprocessed image
- preprocessed mask or None
"""
raw_case = Path(case).name # raw case name, i.e. case_00000
if self.target_spacing is not None:
for info_dict in self.kits_json:
# guaranteeing that the info is corresponding to the right
# case
if info_dict["case_id"] == raw_case:
case_info_dict = info_dict
break
orig_spacing = (case_info_dict["captured_slice_thickness"],
case_info_dict["captured_pixel_width"],
case_info_dict["captured_pixel_width"])
image, mask = resample_patient(image, mask, np.array(orig_spacing),
target_spacing=self.target_spacing)
if self.clip_values is not None:
image = np.clip(image, self.clip_values[0], self.clip_values[1])
mask = mask[None] if mask is not None else mask
return (image[None], mask)
def save_imgs(self, image, mask, case):
"""
Saves an image and mask pair as .npy arrays in the KiTS19 file structure
Args:
image: numpy array
mask: numpy array
case: path to a case folder (each element of self.cases)
"""
# saving the generated dataset
# output dir in KiTS19 format
# extracting the raw case folder name
case = Path(case).name
out_case_dir = join(self.out_dir, case)
# checking to make sure that the output directories exist
if not isdir(out_case_dir):
os.mkdir(out_case_dir)
np.save(os.path.join(out_case_dir, "imaging.npy"), image)
if mask is not None:
np.save(os.path.join(out_case_dir, "segmentation.npy"), mask)
def save_dir_as_2d(self):
"""
Takes preprocessed 3D numpy arrays and saves them as slices
in the same directory.
"""
self.pos_slice_dict = {}
# Generating data and saving them recursively
for case in tqdm(self.cases):
# assumes the .npy files have shape: (n_channels, d, h, w)
image = np.load(join(case, "imaging.npy"))
label = np.load(join(case, "segmentation.npy"))
image = image.squeeze(axis=0) if len(image.shape)==5 else image
label = label.squeeze(axis=0) if len(label.shape)==5 else label
self.save_3d_as_2d(image, label, case)
self._save_pos_slice_dict()
def save_3d_as_2d(self, image, mask, case):
"""
Saves an image and mask pair as .npy arrays in the
KiTS19 file structure
Args:
image: numpy array
mask: numpy array
case: path to a case folder (each element of self.cases)
"""
# saving the generated dataset
# output dir in KiTS19 format
# extracting the raw case folder name
case = Path(case).name
out_case_dir = join(self.out_dir, case)
# checking to make sure that the output directories exist
if not isdir(out_case_dir):
os.mkdir(out_case_dir)
# iterates through all slices and saves them individually as 2D arrays
fg_indices = defaultdict(list)
if mask.shape[1] <= 1:
print("WARNING: Please double check your mask shape;",
f"Masks have shape {mask.shape} when it should be",
"shape (n_channels, d, h, w)")
raise Exception("Please fix shapes.")
for slice_idx in range(mask.shape[1]):
label_slice = mask[:, slice_idx]
# appending fg slice indices
for idx in self.fg_classes:
if (label_slice == idx).any():
fg_indices[idx].append(slice_idx)
# naming convention: {type of slice}_{case}_{slice_idx}
slice_idx_str = str(slice_idx)
# adding 0s to slice_idx until it reaches 3 digits,
# so sorting files is easier when stacking
while len(slice_idx_str) < 3:
slice_idx_str = "0"+slice_idx_str
np.save(join(out_case_dir, f"imaging_{slice_idx_str}.npy"),
image[:, slice_idx])
np.save(join(out_case_dir, f"segmentation_{slice_idx_str}.npy"),
label_slice)
# {case1: [idx1, idx2,...], case2: ...}
self.pos_slice_dict[case] = fg_indices
def _save_pos_slice_dict(self):
"""
Saves the foreground (positive) class dictionaries:
- slice_indices.json
saves the slice indices per class
{
case: {fg_class1: [slice indices...],
fg_class2: [slice indices...],
...}
}
- slice_indices_general.json
saves the slice indices for all foreground classes into a
single list
{case: [slice indices...],}
"""
# converting pos_slice_dict to general_slice_dict
general_slice_dict = defaultdict(list)
for case, slice_idx_dict in self.pos_slice_dict.items():
for slice_idx_list in list(slice_idx_dict.values()):
for slice_idx in slice_idx_list:
general_slice_dict[case].append(slice_idx)
save_path = join(self.out_dir, "slice_indices.json")
save_path_general = join(self.out_dir, "slice_indices_general.json")
# saving the dictionaries
print(f"Logged the slice indices for each class in {self.fg_classes} at"
f"{save_path}.")
with open(save_path, "w") as fp:
json.dump(self.pos_slice_dict, fp)
print("Logged slice indices for all fg classes instead of for each",
f"class separately at {save_path_general}.")
with open(save_path_general, "w") as fp:
json.dump(general_slice_dict, fp)
def _load_kits_json(self, json_path):
"""
Loads the kits.json file into `self.kits_json`
"""
if json_path is None:
print("`kits_json_path is empty, so not resampling.`")
elif json_path is not None:
with open(json_path, "r") as fp:
self.kits_json = json.load(fp)
| 42.323651
| 105
| 0.575294
|
4a0323b7e2de46d2f0d2d6af9711b933c9e674b8
| 2,278
|
pyt
|
Python
|
src/python_boilerplate/templates/setup.pyt
|
LeticiaISilveira/python-boilerplate
|
af36891e7c2f0dfe81e64f29d2739ecd7691b4ee
|
[
"CNRI-Python"
] | 76
|
2016-10-23T14:06:31.000Z
|
2022-02-15T14:13:22.000Z
|
src/python_boilerplate/templates/setup.pyt
|
saber13812002/python-boilerplate
|
af36891e7c2f0dfe81e64f29d2739ecd7691b4ee
|
[
"CNRI-Python"
] | 6
|
2016-08-24T20:02:21.000Z
|
2021-07-08T05:49:54.000Z
|
src/python_boilerplate/templates/setup.pyt
|
saber13812002/python-boilerplate
|
af36891e7c2f0dfe81e64f29d2739ecd7691b4ee
|
[
"CNRI-Python"
] | 34
|
2016-08-24T20:12:12.000Z
|
2022-03-03T03:55:52.000Z
|
# -*- coding: utf-8 -*-
{%- if boilerplate_header|default(True) %}
#
# This file were created by Python Boilerplate. Use boilerplate to start simple
# usable and best-practices compliant Python projects.
#
# Learn more about it at: http://github.com/fabiommendes/python-boilerplate/
#
{% endif %}
import os
import codecs
from setuptools import setup, find_packages
# Save version and author to __meta__.py
version = open('VERSION').read().strip()
dirname = os.path.dirname(__file__)
path = os.path.join(dirname, 'src', {{ pyname|repr }}, '__meta__.py')
meta = '''# Automatically created. Please do not edit.
__version__ = '%s'
__author__ = {{ author|unicode_escape|repr }}
''' % version
with open(path, 'w') as F:
F.write(meta)
setup(
# Basic info
name={{ pyname|replace('_', '-')|repr }},
version=version,
author={{ author|repr }},
author_email='{{ email }}',
url='{{ url|default(github) }}',
description='{{ short_description|default("A short description for your project.") }}',
long_description=codecs.open('README.rst', 'rb', 'utf8').read(),
# Classifiers (see https://pypi.python.org/pypi?%3Aaction=list_classifiers)
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
{%- for classifier in classifiers %}
'{{ classifier }}',
{%- endfor %}
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
],
# Packages and dependencies
package_dir={'': 'src'},
packages=find_packages('src'),
install_requires=[{{ requirements|indent(8) }}
],
extras_require={
'dev': [
'python-boilerplate[dev]',
],
},
{%- if has_script|default(True) %}
# Scripts
entry_points={
'console_scripts': ['{{ pip_name }} = {{ package_name }}.__main__:main'],
},
{%- endif %}
# Other configurations
zip_safe=False,
platforms='any',
)
| 31.205479
| 91
| 0.617208
|
4a032409a5c241647a3b75654c3025943c22cb55
| 6,775
|
py
|
Python
|
data/dataloader.py
|
hyperconnect/LADE
|
cfe96b7ca6520f3410d4cae9cc10919e6114bbb9
|
[
"BSD-3-Clause"
] | 78
|
2020-11-30T09:46:01.000Z
|
2022-03-30T02:42:48.000Z
|
data/dataloader.py
|
hyperconnect/LADE
|
cfe96b7ca6520f3410d4cae9cc10919e6114bbb9
|
[
"BSD-3-Clause"
] | 18
|
2020-12-30T10:39:11.000Z
|
2022-03-21T07:27:27.000Z
|
data/dataloader.py
|
hyperconnect/LADE
|
cfe96b7ca6520f3410d4cae9cc10919e6114bbb9
|
[
"BSD-3-Clause"
] | 8
|
2020-12-02T15:41:23.000Z
|
2022-02-26T11:57:37.000Z
|
"""Copyright (c) Hyperconnect, Inc. and its affiliates.
All rights reserved.
Copyright (c) Facebook, Inc. and its affiliates.
All rights reserved.
This source code is licensed under the license found in the
LICENSE file in the root directory of this source tree.
Portions of the source code are from the OLTR project which
notice below and in LICENSE in the root directory of
this source tree.
Copyright (c) 2019, Zhongqi Miao
All rights reserved.
"""
from collections import Counter
import torch
import numpy as np
import torchvision
from torch.utils.data import Dataset, DataLoader, ConcatDataset
from torchvision import transforms
import os
from PIL import Image
from data.ImbalanceCIFAR import IMBALANCECIFAR10, IMBALANCECIFAR100
# Image statistics
RGB_statistics = {
'iNaturalist18': {
'mean': [0.466, 0.471, 0.380],
'std': [0.195, 0.194, 0.192]
},
'default': {
'mean': [0.485, 0.456, 0.406],
'std':[0.229, 0.224, 0.225]
}
}
# Data transformation with augmentation
def get_data_transform(split, rgb_mean, rbg_std, key='default'):
data_transforms = {
'train': transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(rgb_mean, rbg_std)
]) if key == 'iNaturalist18' else transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ColorJitter(brightness=0.4, contrast=0.4, saturation=0.4, hue=0),
transforms.ToTensor(),
transforms.Normalize(rgb_mean, rbg_std)
]),
'val': transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(rgb_mean, rbg_std)
]),
'test': transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(rgb_mean, rbg_std)
])
}
return data_transforms[split]
# Dataset
class LT_Dataset(Dataset):
def __init__(self, root, txt, transform=None, template=None, top_k=None):
self.img_path = []
self.labels = []
self.transform = transform
with open(txt) as f:
for line in f:
self.img_path.append(os.path.join(root, line.split()[0]))
self.labels.append(int(line.split()[1]))
# get image number list
occur_dict = dict(Counter(self.labels))
self.img_num_list = [occur_dict[i] for i in sorted(occur_dict.keys())]
# select top k class
if top_k:
# only select top k in training, in case train/val/test not matching.
if 'train' in txt:
max_len = max(self.labels) + 1
dist = [[i, 0] for i in range(max_len)]
for i in self.labels:
dist[i][-1] += 1
dist.sort(key = lambda x:x[1], reverse=True)
# saving
torch.save(dist, template + '_top_{}_mapping'.format(top_k))
else:
# loading
dist = torch.load(template + '_top_{}_mapping'.format(top_k))
selected_labels = {item[0]:i for i, item in enumerate(dist[:top_k])}
# replace original path and labels
self.new_img_path = []
self.new_labels = []
for path, label in zip(self.img_path, self.labels):
if label in selected_labels:
self.new_img_path.append(path)
self.new_labels.append(selected_labels[label])
self.img_path = self.new_img_path
self.labels = self.new_labels
def __len__(self):
return len(self.labels)
def __getitem__(self, index):
path = self.img_path[index]
label = self.labels[index]
with open(path, 'rb') as f:
sample = Image.open(f).convert('RGB')
if self.transform is not None:
sample = self.transform(sample)
return sample, label, index
# Load datasets
def load_data(data_root, dataset, phase, batch_size, top_k_class=None,
sampler_dic=None, num_workers=4, shuffle=True, cifar_imb_ratio=None,
test_imb_ratio=None, reverse=False):
txt_split = phase
if dataset == "Places_LT":
txt = f"./data/Places_LT_v2/Places_LT_{phase}.txt"
template = None
else:
txt = './data/%s/%s_%s.txt'%(dataset, dataset, txt_split)
template = './data/%s/%s'%(dataset, dataset)
print('Loading data from %s' % (txt))
if dataset == 'iNaturalist18':
print('===> Loading iNaturalist18 statistics')
key = 'iNaturalist18'
else:
key = 'default'
if dataset == 'CIFAR10_LT':
print('====> CIFAR10 Imbalance Ratio: ', cifar_imb_ratio)
set_ = IMBALANCECIFAR10(phase, imbalance_ratio=cifar_imb_ratio, root=data_root,
test_imb_ratio=test_imb_ratio, reverse=reverse)
elif dataset == 'CIFAR100_LT':
print('====> CIFAR100 Imbalance Ratio: ', cifar_imb_ratio)
set_ = IMBALANCECIFAR100(phase, imbalance_ratio=cifar_imb_ratio, root=data_root,
test_imb_ratio=test_imb_ratio, reverse=reverse)
else:
rgb_mean, rgb_std = RGB_statistics[key]['mean'], RGB_statistics[key]['std']
if phase not in ['train', 'val']:
transform = get_data_transform('test', rgb_mean, rgb_std, key)
else:
transform = get_data_transform(phase, rgb_mean, rgb_std, key)
print('Use data transformation:', transform)
set_ = LT_Dataset(data_root, txt, transform, template=template, top_k=top_k_class)
print(len(set_))
if sampler_dic and phase == 'train':
print('=====> Using sampler: ', sampler_dic['sampler'])
# print('Sample %s samples per-class.' % sampler_dic['num_samples_cls'])
print('=====> Sampler parameters: ', sampler_dic['params'])
return torch.FloatTensor(set_.img_num_list) / torch.FloatTensor(set_.img_num_list).sum(), \
DataLoader(dataset=set_, batch_size=batch_size, shuffle=False,
sampler=sampler_dic['sampler'](set_, **sampler_dic['params']),
num_workers=num_workers)
else:
print('=====> No sampler.')
print('=====> Shuffle is %s.' % (shuffle))
return torch.FloatTensor(set_.img_num_list) / torch.FloatTensor(set_.img_num_list).sum(), \
DataLoader(dataset=set_, batch_size=batch_size,
shuffle=shuffle, num_workers=num_workers)
| 37.021858
| 99
| 0.608561
|
4a0324a054444733278a6267fda230d2ac010f07
| 6,436
|
py
|
Python
|
zerver/tests/test_muting.py
|
Debilski/zulip
|
ff4b5d8ce699d43ffc648986354592235274b70c
|
[
"Apache-2.0"
] | 1
|
2020-03-17T14:58:50.000Z
|
2020-03-17T14:58:50.000Z
|
zerver/tests/test_muting.py
|
Debilski/zulip
|
ff4b5d8ce699d43ffc648986354592235274b70c
|
[
"Apache-2.0"
] | null | null | null |
zerver/tests/test_muting.py
|
Debilski/zulip
|
ff4b5d8ce699d43ffc648986354592235274b70c
|
[
"Apache-2.0"
] | null | null | null |
from django.utils.timezone import now as timezone_now
from datetime import timedelta
from typing import Any, Dict
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.stream_topic import StreamTopicTarget
from zerver.models import (
get_stream,
UserProfile,
MutedTopic
)
from zerver.lib.topic_mutes import (
add_topic_mute,
get_topic_mutes,
remove_topic_mute,
topic_is_muted,
)
class MutedTopicsTests(ZulipTestCase):
def test_user_ids_muting_topic(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
realm = hamlet.realm
stream = get_stream(u'Verona', realm)
recipient = stream.recipient
topic_name = 'teST topic'
stream_topic_target = StreamTopicTarget(
stream_id=stream.id,
topic_name=topic_name,
)
user_ids = stream_topic_target.user_ids_muting_topic()
self.assertEqual(user_ids, set())
def mute_user(user: UserProfile) -> None:
add_topic_mute(
user_profile=user,
stream_id=stream.id,
recipient_id=recipient.id,
topic_name='test TOPIC',
date_muted=timezone_now(),
)
mute_user(hamlet)
user_ids = stream_topic_target.user_ids_muting_topic()
self.assertEqual(user_ids, {hamlet.id})
hamlet_date_muted = MutedTopic.objects.filter(user_profile=hamlet)[0].date_muted
self.assertTrue(timezone_now() - hamlet_date_muted <= timedelta(seconds=100))
mute_user(cordelia)
user_ids = stream_topic_target.user_ids_muting_topic()
self.assertEqual(user_ids, {hamlet.id, cordelia.id})
cordelia_date_muted = MutedTopic.objects.filter(user_profile=cordelia)[0].date_muted
self.assertTrue(timezone_now() - cordelia_date_muted <= timedelta(seconds=100))
def test_add_muted_topic(self) -> None:
user = self.example_user('hamlet')
self.login_user(user)
stream = get_stream('Verona', user.realm)
url = '/api/v1/users/me/subscriptions/muted_topics'
payloads = [
{'stream': stream.name, 'topic': 'Verona3', 'op': 'add'},
{'stream_id': stream.id, 'topic': 'Verona3', 'op': 'add'},
]
for data in payloads:
result = self.api_patch(user, url, data)
self.assert_json_success(result)
self.assertIn([stream.name, 'Verona3'], get_topic_mutes(user))
self.assertTrue(topic_is_muted(user, stream.id, 'Verona3'))
self.assertTrue(topic_is_muted(user, stream.id, 'verona3'))
remove_topic_mute(
user_profile=user,
stream_id=stream.id,
topic_name='Verona3',
)
def test_remove_muted_topic(self) -> None:
user = self.example_user('hamlet')
realm = user.realm
self.login_user(user)
stream = get_stream(u'Verona', realm)
recipient = stream.recipient
url = '/api/v1/users/me/subscriptions/muted_topics'
payloads = [
{'stream': stream.name, 'topic': 'vERONA3', 'op': 'remove'},
{'stream_id': stream.id, 'topic': 'vEroNA3', 'op': 'remove'},
]
for data in payloads:
add_topic_mute(
user_profile=user,
stream_id=stream.id,
recipient_id=recipient.id,
topic_name='Verona3',
date_muted=timezone_now(),
)
self.assertIn([stream.name, 'Verona3'], get_topic_mutes(user))
result = self.api_patch(user, url, data)
self.assert_json_success(result)
self.assertNotIn([stream.name, 'Verona3'], get_topic_mutes(user))
self.assertFalse(topic_is_muted(user, stream.id, 'verona3'))
def test_muted_topic_add_invalid(self) -> None:
user = self.example_user('hamlet')
realm = user.realm
self.login_user(user)
stream = get_stream('Verona', realm)
recipient = stream.recipient
add_topic_mute(
user_profile=user,
stream_id=stream.id,
recipient_id=recipient.id,
topic_name=u'Verona3',
date_muted=timezone_now(),
)
url = '/api/v1/users/me/subscriptions/muted_topics'
data = {'stream': stream.name, 'topic': 'Verona3', 'op': 'add'} # type: Dict[str, Any]
result = self.api_patch(user, url, data)
self.assert_json_error(result, "Topic already muted")
data = {'stream_id': 999999999, 'topic': 'Verona3', 'op': 'add'}
result = self.api_patch(user, url, data)
self.assert_json_error(result, "Invalid stream id")
data = {'topic': 'Verona3', 'op': 'add'}
result = self.api_patch(user, url, data)
self.assert_json_error(result, "Please supply 'stream'.")
data = {'stream': stream.name, 'stream_id': stream.id, 'topic': 'Verona3', 'op': 'add'}
result = self.api_patch(user, url, data)
self.assert_json_error(result, "Please choose one: 'stream' or 'stream_id'.")
def test_muted_topic_remove_invalid(self) -> None:
user = self.example_user('hamlet')
realm = user.realm
self.login_user(user)
stream = get_stream('Verona', realm)
url = '/api/v1/users/me/subscriptions/muted_topics'
data = {'stream': 'BOGUS', 'topic': 'Verona3', 'op': 'remove'} # type: Dict[str, Any]
result = self.api_patch(user, url, data)
self.assert_json_error(result, "Topic is not muted")
data = {'stream': stream.name, 'topic': 'BOGUS', 'op': 'remove'}
result = self.api_patch(user, url, data)
self.assert_json_error(result, "Topic is not muted")
data = {'stream_id': 999999999, 'topic': 'BOGUS', 'op': 'remove'}
result = self.api_patch(user, url, data)
self.assert_json_error(result, "Topic is not muted")
data = {'topic': 'Verona3', 'op': 'remove'}
result = self.api_patch(user, url, data)
self.assert_json_error(result, "Please supply 'stream'.")
data = {'stream': stream.name, 'stream_id': stream.id, 'topic': 'Verona3', 'op': 'remove'}
result = self.api_patch(user, url, data)
self.assert_json_error(result, "Please choose one: 'stream' or 'stream_id'.")
| 36.568182
| 98
| 0.611715
|
4a0325441604cfcb1cb6225a18cda4a60a39b9f1
| 1,067
|
py
|
Python
|
conohadnsclient/tests/test_v1/test_touch.py
|
naototty/python-conohadns-client
|
04f360450d2e1a6020d2870272d8125cb112fa01
|
[
"Apache-2.0"
] | null | null | null |
conohadnsclient/tests/test_v1/test_touch.py
|
naototty/python-conohadns-client
|
04f360450d2e1a6020d2870272d8125cb112fa01
|
[
"Apache-2.0"
] | null | null | null |
conohadnsclient/tests/test_v1/test_touch.py
|
naototty/python-conohadns-client
|
04f360450d2e1a6020d2870272d8125cb112fa01
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from mock import patch
from conohadnsclient.tests import test_v1
from conohadnsclient.v1 import touch
class TestTouch(test_v1.APIV1TestCase, test_v1.CrudMixin):
@patch.object(touch.TouchController, "domain")
def test_domain(self, domain):
args = mock.MagicMock()
args.domain_id = "1234"
self.client.touch.domain(args.domain_id)
self.client.touch.domain.assert_called_with("1234")
| 35.566667
| 78
| 0.731022
|
4a03254e907763313c05d154ce60201be3a452c6
| 135
|
py
|
Python
|
src/config.py
|
berkaytrhn/Facial-Emotion-API
|
cbd496e9dea704818e3c9e7682d276cb413f94f2
|
[
"MIT"
] | null | null | null |
src/config.py
|
berkaytrhn/Facial-Emotion-API
|
cbd496e9dea704818e3c9e7682d276cb413f94f2
|
[
"MIT"
] | null | null | null |
src/config.py
|
berkaytrhn/Facial-Emotion-API
|
cbd496e9dea704818e3c9e7682d276cb413f94f2
|
[
"MIT"
] | null | null | null |
image_size=96
emotions=['Happy', 'Neutral', 'Sad', 'Surprised', 'Fearful']
max_number_of_faces=1
model_name = "5class_emotion_model.h5"
| 33.75
| 60
| 0.762963
|
4a032604880a5697beb76f0a298ecc3c07a1bc23
| 5,501
|
py
|
Python
|
scripts/kuehr1Jy_sources.py
|
ska-sa/katpoint
|
7cbac9c2f461e4209a147bda93572b7f523531d4
|
[
"BSD-3-Clause"
] | 1
|
2019-08-26T06:26:47.000Z
|
2019-08-26T06:26:47.000Z
|
scripts/kuehr1Jy_sources.py
|
ska-sa/katpoint
|
7cbac9c2f461e4209a147bda93572b7f523531d4
|
[
"BSD-3-Clause"
] | 23
|
2018-11-20T15:41:40.000Z
|
2021-08-03T20:39:21.000Z
|
scripts/kuehr1Jy_sources.py
|
ska-sa/katpoint
|
7cbac9c2f461e4209a147bda93572b7f523531d4
|
[
"BSD-3-Clause"
] | 4
|
2019-07-22T08:01:03.000Z
|
2021-02-23T07:09:04.000Z
|
#! /usr/bin/python
################################################################################
# Copyright (c) 2009-2021, National Research Foundation (SARAO)
#
# Licensed under the BSD 3-Clause License (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy
# of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
#
# Tool that extracts sources from the Catalog of Extragalactic Radio Sources Having Flux Densities
# Greater Than 1 Jy at 5 GHz (1Jy).
#
# This builds a katpoint catalogue from the included kuehr1Jy.vot file.
# This file is obtained as follows:
#
# - Visit the VizieR web site: http://vizier.u-strasbg.fr/
# - In the leftmost text entry field for the catalogue name, enter "1Jy"
#
# - Click on "VIII/5/sources" ("List of the 518 sources of the sample")
# - Add more search columns by clicking on "Columns with UCDs: ALL" button at bottom of page
# - Select unlimited maximum entries per table and computed J2000 output positions
# - Select at least the following fields: 1Jy 3C Fct A B C D
# - Select "VOTable" output layout and click on "Submit query"
# - This downloads a file named vizier_votable.vot
# - Rename file as kuehr1Jy.vot
#
# - Return to the top-level 1Jy page (there is a "VIII/5" button at the top left)
# - Click on "VIII/5/fluxes" ("The flux data for the sources")
# - Select at least the following fields: 1Jy Freq S
# - Select unlimited maximum entries per table and "VOTable" output layout, and click on "Submit query"
# - This downloads a file named vizier_votable.vot
# - Rename file as kuehr1Jy_flux.vot
#
# Thereafter, install the vo Python package from https://www.stsci.edu/trac/ssb/astrolib/
# (also referred to as votable2recarray). I used vo-0.5.tar.gz. Then this script can be
# run for the rest.
#
# Ludwig Schwardt
# 15 March 2010
#
import numpy as np
import matplotlib.pyplot as plt
import katpoint
from astropy.table import Table
# Load tables in one shot (don't verify, as the VizieR VOTables contain a deprecated DEFINITIONS element)
table = Table.read('kuehr1Jy.vot')
flux_table = Table.read('kuehr1Jy_flux.vot')
src_strings = []
plot_freqs = [flux_table['Freq'].min(), flux_table['Freq'].max()]
test_log_freq = np.linspace(np.log10(plot_freqs[0]), np.log10(plot_freqs[1]), 200)
plot_rows = 8
plots_per_fig = plot_rows * plot_rows
# Iterate through sources
for src in table:
names = '1Jy ' + src['_1Jy']
if len(src['_3C']) > 0:
names += ' | *' + src['_3C']
ra, dec = katpoint.deg2rad(src['_RAJ2000']), katpoint.deg2rad(src['_DEJ2000'])
tags_ra_dec = katpoint.construct_radec_target(ra, dec).add_tags('J2000').description
# Extract flux data for the current source from flux table
flux = flux_table[flux_table['_1Jy'] == src['_1Jy']]
# Determine widest possible frequency range where flux is defined (ignore internal gaps in this range)
# For better or worse, extend range to at least KAT7 frequency band (also handles empty frequency lists)
flux_freqs = flux['Freq'].tolist() + [800.0, 2400.0]
min_freq, max_freq = min(flux_freqs), max(flux_freqs)
log_freq, log_flux = np.log10(flux['Freq']), np.log10(flux['S'])
if src['Fct'] == 'LIN':
flux_str = katpoint.FluxDensityModel(min_freq, max_freq, [src['A'], src['B']]).description
elif src['Fct'] == 'EXP':
flux_str = katpoint.FluxDensityModel(min_freq, max_freq, [src['A'], src['B'],
0.0, 0.0, src['C'], src['D']]).description
else:
# No flux data found for source - skip it (only two sources, 1334-127 and 2342+82, are discarded)
if len(flux) == 0:
continue
# Fit straight-line flux model log10(S) = a + b*log10(v) to frequencies close to KAT7 band
mid_freqs = (flux['Freq'] > 400) & (flux['Freq'] < 12000)
flux_poly = np.polyfit(log_freq[mid_freqs], log_flux[mid_freqs], 1)
flux_str = katpoint.FluxDensityModel(min_freq, max_freq, flux_poly[::-1]).description
src_strings.append(', '.join((names, tags_ra_dec, flux_str)) + '\n')
print(src_strings[-1].strip())
# Display flux model fit
test_log_flux = np.log10(katpoint.FluxDensityModel(flux_str).flux_density(10 ** test_log_freq))
plot_ind = len(src_strings) - 1
plt.figure((plot_ind // plots_per_fig) + 1)
if plot_ind % plots_per_fig == 0:
plt.clf()
plt.figtext(0.5, 0.93, 'Spectra (log S vs. log v) for sources %d to %d' %
(plot_ind + 1, plot_ind + plots_per_fig), ha='center', va='center')
plt.subplot(plot_rows, plot_rows, 1 + plot_ind % plots_per_fig)
plt.plot(log_freq, log_flux, 'ob')
plt.plot(test_log_freq, test_log_flux, 'r')
plt.xticks([])
plt.yticks([])
colorcode = 'g' if src['Fct'] == 'LIN' else 'y' if src['Fct'] == 'EXP' else 'k'
plt.axvspan(np.log10(min_freq), np.log10(max_freq), facecolor=colorcode, alpha=0.5)
plt.xlim(test_log_freq[0], test_log_freq[-1])
with open('kuehr1Jy_source_list.csv', 'w') as f:
f.writelines(src_strings)
plt.show()
| 46.618644
| 108
| 0.667151
|
4a0329b68604971bdc0082b1a77b9aef47a6b646
| 107
|
py
|
Python
|
scripts-python/desafio03.py
|
matheus-rosario/curso-python
|
ac9ccf7fc4b3f708821e44787a1bdc231d9426ac
|
[
"MIT"
] | null | null | null |
scripts-python/desafio03.py
|
matheus-rosario/curso-python
|
ac9ccf7fc4b3f708821e44787a1bdc231d9426ac
|
[
"MIT"
] | null | null | null |
scripts-python/desafio03.py
|
matheus-rosario/curso-python
|
ac9ccf7fc4b3f708821e44787a1bdc231d9426ac
|
[
"MIT"
] | null | null | null |
nun1 = int(input('Primeiro número '))
nun2 = int(input('Segundo número '))
print('A soma é ', nun1 + nun2)
| 26.75
| 37
| 0.654206
|
4a0329efedc3b5dd3fd033e4eb2dc9aebf01e2c7
| 2,883
|
py
|
Python
|
tools/Canvas/tests/case_control/case_control.py
|
Oshlack/Slinker
|
725d2c0861156034ef4d16293e2a3b74ac23c9e7
|
[
"MIT"
] | 15
|
2021-08-23T14:36:35.000Z
|
2022-03-17T06:56:17.000Z
|
tools/Canvas/tests/case_control/case_control.py
|
Oshlack/Slinker
|
725d2c0861156034ef4d16293e2a3b74ac23c9e7
|
[
"MIT"
] | 2
|
2021-08-17T03:00:23.000Z
|
2022-02-08T23:24:16.000Z
|
tools/Canvas/tests/case_control/case_control.py
|
Oshlack/Slinker
|
725d2c0861156034ef4d16293e2a3b74ac23c9e7
|
[
"MIT"
] | null | null | null |
#=======================================================================================================================
#
# CASE vs. CONTROL TEST - Build a simple case vs. control plot with some annotation
# Output is both a html file and a png of the resulting plot.
#
# Author: Breon Schmidt
# License: MIT
#
#=======================================================================================================================
''' --------------------------------------------------------------------------------------------------------------------
Imports
---------------------------------------------------------------------------------------------------------------------'''
import Canvas as cv
''' --------------------------------------------------------------------------------------------------------------------
R U N T E S T
---------------------------------------------------------------------------------------------------------------------'''
''' Set plot variables'''
#region = {"chr": 12, "start": 11802788, "end": 12048325} # Whole gene
region = {"chr": 12, "start": 11976000, "end": 11995000}
height = 1000
width = 1000
''' Set junctions variables'''
min_junctions = 10
''' Load the samples '''
bam_dir = "source"
samples = cv.load_samples(bam=bam_dir)
''' Then construct the plot layout. We simply need to create a numbered dictionary object. '''
layout = {}
layout[1] = {'type': 'axis', 'size': 1}
layout[2] = {'title': "Case",
'type': 'coverage',
'data': samples[0],
'size': 3,
'title_bgcolor': 'rgba(87, 22, 162, 1)',
'bgcolor': 'rgba(243, 232, 255, 1)',
'fill': 'rgba(137, 58, 228, 0.5)',
'line': 'rgba(87, 22, 162, 0.5)',
'log': False,
'cpm': False}
layout[3] = {'type': 'junctions',
'data': samples[0],
'size': 1,
'title_bgcolor': 'rgba(137, 58, 228, 1)',
'line': 'rgba(137, 58, 228, 0.5)',
'bgcolor': 'rgba(243, 232, 255, 1)',
'support': min_junctions}
layout[4] = {'type': 'gene',
'title_bgcolor': "rgba(255, 177, 51, 1)",
'bgcolor': "#fcf2d4",
'form': "gene",
'path': "etv6.gtf",
'title': "Transcripts",
'size': 4}
layout[5] = {'title': "Control",
'type': 'coverage',
'data': samples[1],
'size': 3,
'title_bgcolor': 'rgba(3, 181, 170, 1)',
'bgcolor': 'rgba(128, 161, 212, 1)',
'fill': 'rgba(23, 195, 178, 0.5)',
'line': 'rgba(3, 181, 170, 0.3)',
'log': False,
'cpm': False}
layout[6] = {'type': 'junctions',
'data': samples[1],
'size': 1,
'title_bgcolor': 'rgba(23, 195, 178, 1)',
'line': 'rgba(23, 195, 178, 1)',
'bgcolor': 'rgba(204, 252, 248, 1)',
'support': min_junctions}
highlights = [(11978578, 11979490, "rgba(249, 233, 0, 0.2)")]
''' The create the plot '''
test_plot = cv.Plot(layout, region, highlights=highlights,
title="Example", height=height, width=width)
| 28.83
| 120
| 0.42768
|
4a032a35163d1a09bab7acb56f19257a0e3ffcc1
| 259
|
py
|
Python
|
.history/myblog/admin_20200416030041.py
|
abhinavmarwaha/demo-django-blog
|
c80a7d825e44d7e1589d9272c3583764562a2515
|
[
"MIT"
] | null | null | null |
.history/myblog/admin_20200416030041.py
|
abhinavmarwaha/demo-django-blog
|
c80a7d825e44d7e1589d9272c3583764562a2515
|
[
"MIT"
] | null | null | null |
.history/myblog/admin_20200416030041.py
|
abhinavmarwaha/demo-django-blog
|
c80a7d825e44d7e1589d9272c3583764562a2515
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Post
class PostAdmin(admin.ModelAdmin):
list_display = ('title', 'slug', 'status', 'created_on')
list_filter = ("status")
search_fields = ['title', 'content']
prepopulated_fields = {'slug'}
| 32.375
| 60
| 0.69112
|
4a032ad827fd8f25bdba8c5aa82ea8ad4803fe4c
| 10,056
|
py
|
Python
|
data/od_dataset_from_file.py
|
eric612/Mobilenet-YOLO-Pytorch
|
cd8d99425c51c3f37d03633302076bd94738f174
|
[
"MIT"
] | 23
|
2021-02-05T10:07:26.000Z
|
2022-03-15T15:02:26.000Z
|
data/od_dataset_from_file.py
|
eric612/Mobilenet-YOLO-Pytorch
|
cd8d99425c51c3f37d03633302076bd94738f174
|
[
"MIT"
] | 3
|
2021-06-10T04:12:09.000Z
|
2021-07-13T06:38:34.000Z
|
data/od_dataset_from_file.py
|
eric612/Mobilenet-YOLO-Pytorch
|
cd8d99425c51c3f37d03633302076bd94738f174
|
[
"MIT"
] | 9
|
2021-06-10T03:47:40.000Z
|
2022-02-07T08:57:16.000Z
|
import numpy as np
from PIL import Image
import glob
import os
import torch
from torch.utils.data.dataset import Dataset # For custom datasets
import json
from tqdm import tqdm
import pickle
import xml.etree.ElementTree as ET
#import image_augmentation as img_aug
import cv2
'''
CLASSES = ('__background__',
'aeroplane', 'bicycle', 'bird', 'boat',
'bottle', 'bus', 'car', 'cat', 'chair',
'cow', 'diningtable', 'dog', 'horse',
'motorbike', 'person', 'pottedplant',
'sheep', 'sofa', 'train', 'tvmonitor')
'''
#classes_map['background'] = 0
class DatasetFromFile(Dataset):
def __init__(self, image_path,anno_path,seg_path,imageset_list,classes,dataset_name,phase='train',has_seg = False,difficultie = True,ext_img = ['jpg','bmp'],ext_anno = ['xml','json'],ext_seg=['png'],ori_classes_name=None):
# Get image list
#self.img_folder_list = glob.glob(folder_path+'*')
self.item_list = list()
self.phase = phase
self.difficultie = difficultie
self.classes = classes
self.classes_map = {k: v for v, k in enumerate(classes)}
self.ext_img = ext_img
self.ext_anno = ext_anno
self.has_seg = has_seg
self.ext_seg = ext_seg
self.seg_path = seg_path
im_list = list()
if ori_classes_name!=None:
self.ori_classes_name = ori_classes_name
else:
self.ori_classes_name = classes
#print(type(image_path))
self.list_name = 'data/%s.txt'%dataset_name
if os.path.isfile(self.list_name):
print(self.list_name)
with open(self.list_name, "rb") as fp: # Unpickling
self.item_list = pickle.load(fp)
else:
if type(imageset_list) is str and type(image_path) is str and type(anno_path) is str:
with open(imageset_list,'r') as f:
for line in f:
for word in line.split():
im_list.append(word)
if self.has_seg:
self.parse_list(image_path,anno_path,im_list,seg_path)
else:
self.parse_list(image_path,anno_path,im_list)
elif type(imageset_list) is list :
assert len(imageset_list) == len(image_path) == len(anno_path)
for idx in range(len(imageset_list)) :
set = imageset_list[idx]
im_list.clear()
with open(set,'r') as f:
for line in f:
for word in line.split():
im_list.append(word)
if self.has_seg:
self.parse_list(image_path[idx],anno_path[idx],im_list,seg_path[idx])
else:
self.parse_list(image_path[idx],anno_path[idx],im_list)
with open(self.list_name, "wb") as fp: #Pickling
pickle.dump(self.item_list, fp)
self.data_len = len(self.item_list)
print('total files of %s : %d'%(dataset_name,self.data_len))
#print(self.item_list)
def __getitem__(self, index):
# Get image name from the pandas df
if self.has_seg :
single_image_path, single_anno_path, single_seg_path = self.item_list[index]
else:
single_image_path, single_anno_path = self.item_list[index]
# Open image
im = cv2.imread(single_image_path)
boxes, labels, difficulties = self.parse_annotation(single_anno_path)
yolo_labels = list()
height, width, channels = im.shape
im = cv2.imencode('.jpg', im,[int(cv2.IMWRITE_JPEG_QUALITY), 98])
yolo_labels = self.to_yolo_label(boxes,labels,difficulties,width,height)
if self.has_seg :
im2 = cv2.imread(single_seg_path)
im2 = cv2.imencode('.png', im2,[int(cv2.IMWRITE_PNG_COMPRESSION),1])
return (im, yolo_labels, im2)
else :
return (im, yolo_labels)
def __len__(self):
return self.data_len
def to_yolo_label(self,boxes,labels,difficulties,width = 0,height = 0):
yolo_labels = list()
float = width == 0 and height == 0
for index,box in enumerate(boxes):
if self.difficultie or not difficulties[index]:
#print(box)
yolo_label = list()
yolo_label.clear()
#print(box,labels[index])
x = (box[0] + box[2])/2
y = (box[1] + box[3])/2
w = box[2] - box[0]
h = box[3] - box[1]
if not float :
x = x / width
y = y / height
w = w / width
h = h / height
yolo_label.append(labels[index])
yolo_label.append(x)
yolo_label.append(y)
yolo_label.append(w)
yolo_label.append(h)
yolo_labels.append(yolo_label)
return yolo_labels
def parse_list(self,image_path,anno_path,im_list,seg_path=None):
image_list = list()
image_list.clear()
seg_list = list()
seg_list.clear()
im_lists = tqdm(im_list)
seg_files = list()
if self.has_seg:
for i in self.ext_seg :
seg_files = seg_files + glob.glob(seg_path+'/*.%s'%i)
for s in im_lists :
img_file = None
for i in self.ext_img :
filepath = "{}/{}.{}".format(image_path,s,i)
if os.path.isfile(filepath):
img_file = filepath
anno_file = None
for i in self.ext_anno :
filepath = "{}/{}.{}".format(anno_path,s,i)
if os.path.isfile(filepath):
anno_file = filepath
if self.has_seg:
for seg in seg_files:
if s in seg :
if img_file!=None and anno_file!=None :
self.item_list.append([img_file,anno_file,seg])
im_lists.set_description("Processing %s" % img_file)
else:
im_lists.set_description("Not find file %s" % s)
break
elif img_file!=None and anno_file!=None :
self.item_list.append([img_file,anno_file])
im_lists.set_description("Processing %s" % img_file)
else:
im_lists.set_description("Not find file %s" % s)
def bound(low, high, value):
return max(low, min(high, value))
def parse_annotation(self,annotation_path):
filename, file_extension = os.path.splitext(annotation_path)
boxes = list()
labels = list()
difficulties = list()
# VOC format xml
if file_extension == '.xml':
source = open(annotation_path)
tree = ET.parse(source)
root = tree.getroot()
for object in root.iter('object'):
difficult = int(object.find('difficult').text == '1')
label = object.find('name').text.lower().strip()
if label not in self.classes:
continue
bbox = object.find('bndbox')
xmin = int(bbox.find('xmin').text) - 1
ymin = int(bbox.find('ymin').text) - 1
xmax = int(bbox.find('xmax').text) - 1
ymax = int(bbox.find('ymax').text) - 1
boxes.append([xmin, ymin, xmax, ymax])
#print(label)
labels.append(self.classes_map[label])
difficulties.append(difficult)
source.close()
return boxes, labels, difficulties
# COCO format json
elif file_extension == '.json':
with open(annotation_path, 'r') as f:
data=json.load(f)
width = int(data['image']['width'])-1
height = int(data['image']['height'])-1
object_number = len(data['annotation'])
for j in range(object_number):
class_id = int(data['annotation'][j]['category_id'])-1
category_name = self.ori_classes_name[class_id]
if category_name in self.classes:
new_class_id = self.classes.index(category_name)
xmin = int(float(data['annotation'][j]['bbox'][0])+0.5)
ymin = int(float(data['annotation'][j]['bbox'][1])+0.5)
if xmin<0:
xmin = 0
if ymin<0:
ymin = 0
xmax = int(float(data['annotation'][j]['bbox'][0])+float(data['annotation'][j]['bbox'][2])+0.5)
ymax = int(float(data['annotation'][j]['bbox'][1])+float(data['annotation'][j]['bbox'][3])+0.5)
if xmax>width:
xmax = width
if ymax>height:
ymax = height
boxes.append([xmin, ymin, xmax, ymax])
labels.append(new_class_id)
difficulties.append(0)
#print(xmin,ymin,class_id)
return boxes, labels, difficulties
def collate_fn(self, batch):
images = list()
boxes = list()
labels = list()
difficulties = list()
for b in batch:
images.append(b[0])
boxes.append(b[1])
labels.append(b[2])
difficulties.append(b[3])
images = torch.stack(images, dim=0)
return images, boxes, labels, difficulties # tensor (N, 3, H, W), 3 lists of N tensors each
| 40.878049
| 226
| 0.512132
|
4a032aefe72b65a44670128ba6b2cc9cf739ed51
| 14,906
|
py
|
Python
|
ironic/drivers/modules/oneview/deploy_utils.py
|
NaohiroTamura/ironic
|
1fcb6c52a22c9c025dbf27931720ce2eda08704f
|
[
"Apache-2.0"
] | null | null | null |
ironic/drivers/modules/oneview/deploy_utils.py
|
NaohiroTamura/ironic
|
1fcb6c52a22c9c025dbf27931720ce2eda08704f
|
[
"Apache-2.0"
] | null | null | null |
ironic/drivers/modules/oneview/deploy_utils.py
|
NaohiroTamura/ironic
|
1fcb6c52a22c9c025dbf27931720ce2eda08704f
|
[
"Apache-2.0"
] | 1
|
2022-03-25T14:26:10.000Z
|
2022-03-25T14:26:10.000Z
|
# Copyright 2016 Hewlett Packard Enterprise Development LP.
# Copyright 2016 Universidade Federal de Campina Grande
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import operator
from oslo_log import log as logging
from oslo_utils import importutils
from ironic.common import exception
from ironic.common.i18n import _, _LE, _LI, _LW
from ironic.common import states
from ironic.drivers.modules.oneview import common
LOG = logging.getLogger(__name__)
oneview_exception = importutils.try_import('oneview_client.exceptions')
oneview_utils = importutils.try_import('oneview_client.utils')
def get_properties():
return common.COMMON_PROPERTIES
def prepare(oneview_client, task):
"""Applies Server Profile and update the node when preparing.
This method is responsible for applying a Server Profile to the Server
Hardware and add the uri of the applied Server Profile in the node's
'applied_server_profile_uri' field on properties/capabilities.
:param oneview_client: an instance of the OneView client
:param task: A TaskManager object
:raises InstanceDeployFailure: If the node doesn't have the needed OneView
informations, if Server Hardware is in use by an OneView user, or
if the Server Profile can't be applied.
"""
if task.node.provision_state == states.DEPLOYING:
try:
instance_display_name = task.node.instance_info.get('display_name')
instance_uuid = task.node.instance_uuid
server_profile_name = (
"%(instance_name)s [%(instance_uuid)s]" %
{"instance_name": instance_display_name,
"instance_uuid": instance_uuid}
)
allocate_server_hardware_to_ironic(oneview_client, task.node,
server_profile_name)
except exception.OneViewError as e:
raise exception.InstanceDeployFailure(node=task.node.uuid,
reason=e)
def tear_down(oneview_client, task):
"""Remove Server profile and update the node when tear down.
This method is responsible for power a Server Hardware off, remove a Server
Profile from the Server Hardware and remove the uri of the applied Server
Profile from the node's 'applied_server_profile_uri' in
properties/capabilities.
:param oneview_client: an instance of the OneView client
:param task: A TaskManager object
:raises InstanceDeployFailure: If node has no uri of applied Server
Profile, or if some error occur while deleting Server Profile.
"""
try:
deallocate_server_hardware_from_ironic(oneview_client, task.node)
except exception.OneViewError as e:
raise exception.InstanceDeployFailure(node=task.node.uuid, reason=e)
def prepare_cleaning(oneview_client, task):
"""Applies Server Profile and update the node when preparing cleaning.
This method is responsible for applying a Server Profile to the Server
Hardware and add the uri of the applied Server Profile in the node's
'applied_server_profile_uri' field on properties/capabilities.
:param oneview_client: an instance of the OneView client
:param task: A TaskManager object
:raises NodeCleaningFailure: If the node doesn't have the needed OneView
informations, if Server Hardware is in use by an OneView user, or
if the Server Profile can't be applied.
"""
try:
server_profile_name = "Ironic Cleaning [%s]" % task.node.uuid
allocate_server_hardware_to_ironic(oneview_client, task.node,
server_profile_name)
except exception.OneViewError as e:
oneview_error = common.SERVER_HARDWARE_ALLOCATION_ERROR
driver_internal_info = task.node.driver_internal_info
driver_internal_info['oneview_error'] = oneview_error
task.node.driver_internal_info = driver_internal_info
task.node.save()
raise exception.NodeCleaningFailure(node=task.node.uuid,
reason=e)
def tear_down_cleaning(oneview_client, task):
"""Remove Server profile and update the node when tear down cleaning.
This method is responsible for power a Server Hardware off, remove a Server
Profile from the Server Hardware and remove the uri of the applied Server
Profile from the node's 'applied_server_profile_uri' in
properties/capabilities.
:param oneview_client: an instance of the OneView client
:param task: A TaskManager object
:raises NodeCleaningFailure: If node has no uri of applied Server Profile,
or if some error occur while deleting Server Profile.
"""
try:
deallocate_server_hardware_from_ironic(oneview_client, task.node)
except exception.OneViewError as e:
raise exception.NodeCleaningFailure(node=task.node.uuid, reason=e)
def _is_node_in_use(server_hardware, applied_sp_uri, by_oneview=False):
"""Check if node is in use by ironic or by OneView.
:param by_oneview: Boolean value. True when want to verify if node is in
use by OneView. False to verify if node is in use by
ironic.
:param node: an ironic node object
:returns: Boolean value. True if by_oneview param is also True and node is
in use by OneView, False otherwise. True if by_oneview param is
False and node is in use by ironic, False otherwise.
"""
operation = operator.ne if by_oneview else operator.eq
return (server_hardware.server_profile_uri not in (None, '') and
operation(applied_sp_uri, server_hardware.server_profile_uri))
def is_node_in_use_by_oneview(oneview_client, node):
"""Check if node is in use by OneView user.
:param oneview_client: an instance of the OneView client
:param node: an ironic node object
:returns: Boolean value. True if node is in use by OneView,
False otherwise.
:raises OneViewError: if not possible to get OneView's informations
for the given node, if not possible to retrieve Server Hardware
from OneView.
"""
positive = _("Node '%s' is in use by OneView.") % node.uuid
negative = _("Node '%s' is not in use by OneView.") % node.uuid
def predicate(server_hardware, applied_sp_uri):
# Check if Profile exists in Oneview and it is different of the one
# applied by ironic
return _is_node_in_use(server_hardware, applied_sp_uri,
by_oneview=True)
return _check_applied_server_profile(oneview_client, node,
predicate, positive, negative)
def is_node_in_use_by_ironic(oneview_client, node):
"""Check if node is in use by ironic in OneView.
:param oneview_client: an instance of the OneView client
:param node: an ironic node object
:returns: Boolean value. True if node is in use by ironic,
False otherwise.
:raises OneViewError: if not possible to get OneView's information
for the given node, if not possible to retrieve Server Hardware
from OneView.
"""
positive = _("Node '%s' is in use by Ironic.") % node.uuid
negative = _("Node '%s' is not in use by Ironic.") % node.uuid
def predicate(server_hardware, applied_sp_uri):
# Check if Profile exists in Oneview and it is equals of the one
# applied by ironic
return _is_node_in_use(server_hardware, applied_sp_uri,
by_oneview=False)
return _check_applied_server_profile(oneview_client, node,
predicate, positive, negative)
def _check_applied_server_profile(oneview_client, node,
predicate, positive, negative):
"""Check if node is in use by ironic in OneView.
:param oneview_client: an instance of the OneView client
:param node: an ironic node object
:returns: Boolean value. True if node is in use by ironic,
False otherwise.
:raises OneViewError: if not possible to get OneView's information
for the given node, if not possible to retrieve Server Hardware
from OneView.
"""
oneview_info = common.get_oneview_info(node)
sh_uuid = oneview_utils.get_uuid_from_uri(
oneview_info.get("server_hardware_uri")
)
try:
server_hardware = oneview_client.get_server_hardware_by_uuid(
sh_uuid
)
except oneview_exception.OneViewResourceNotFoundError as e:
msg = (_("Error while obtaining Server Hardware from node "
"%(node_uuid)s. Error: %(error)s") %
{'node_uuid': node.uuid, 'error': e})
raise exception.OneViewError(error=msg)
applied_sp_uri = (
node.driver_info.get('applied_server_profile_uri')
)
result = predicate(server_hardware, applied_sp_uri)
if result:
LOG.debug(positive)
else:
LOG.debug(negative)
return result
def _add_applied_server_profile_uri_field(node, applied_profile):
"""Adds the applied Server Profile uri to a node.
:param node: an ironic node object
"""
driver_info = node.driver_info
driver_info['applied_server_profile_uri'] = applied_profile.uri
node.driver_info = driver_info
node.save()
def _del_applied_server_profile_uri_field(node):
"""Delete the applied Server Profile uri from a node if it exists.
:param node: an ironic node object
"""
driver_info = node.driver_info
driver_info.pop('applied_server_profile_uri', None)
node.driver_info = driver_info
node.save()
def allocate_server_hardware_to_ironic(oneview_client, node,
server_profile_name):
"""Allocate Server Hardware to ironic.
:param oneview_client: an instance of the OneView client
:param node: an ironic node object
:param server_profile_name: a formatted string with the Server Profile
name
:raises OneViewError: if an error occurs while allocating the Server
Hardware to ironic
"""
node_in_use_by_oneview = is_node_in_use_by_oneview(oneview_client, node)
if not node_in_use_by_oneview:
oneview_info = common.get_oneview_info(node)
applied_sp_uri = node.driver_info.get('applied_server_profile_uri')
sh_uuid = oneview_utils.get_uuid_from_uri(
oneview_info.get("server_hardware_uri")
)
spt_uuid = oneview_utils.get_uuid_from_uri(
oneview_info.get("server_profile_template_uri")
)
server_hardware = oneview_client.get_server_hardware_by_uuid(sh_uuid)
# Don't have Server Profile on OneView but has
# `applied_server_profile_uri` on driver_info
if (server_hardware.server_profile_uri in (None, '') and
applied_sp_uri is not (None, '')):
_del_applied_server_profile_uri_field(node)
LOG.info(_LI(
"Inconsistent 'applied_server_profile_uri' parameter "
"value in driver_info. There is no Server Profile "
"applied to node %(node_uuid)s. Value deleted."),
{"node_uuid": node.uuid}
)
# applied_server_profile_uri exists and is equal to Server profile
# applied on Hardware. Do not apply again.
if (applied_sp_uri and server_hardware.server_profile_uri and
server_hardware.server_profile_uri == applied_sp_uri):
LOG.info(_LI(
"The Server Profile %(applied_sp_uri)s was already applied "
"by ironic on node %(node_uuid)s. Reusing."),
{"node_uuid": node.uuid, "applied_sp_uri": applied_sp_uri}
)
return
try:
applied_profile = oneview_client.clone_template_and_apply(
server_profile_name, sh_uuid, spt_uuid
)
_add_applied_server_profile_uri_field(node, applied_profile)
LOG.info(
_LI("Server Profile %(server_profile_uuid)s was successfully"
" applied to node %(node_uuid)s."),
{"node_uuid": node.uuid,
"server_profile_uuid": applied_profile.uri}
)
except oneview_exception.OneViewServerProfileAssignmentError as e:
LOG.error(_LE("An error occurred during allocating server "
"hardware to ironic during prepare: %s"), e)
raise exception.OneViewError(error=e)
else:
msg = (_("Node %s is already in use by OneView.") %
node.uuid)
raise exception.OneViewError(error=msg)
def deallocate_server_hardware_from_ironic(oneview_client, node):
"""Deallocate Server Hardware from ironic.
:param oneview_client: an instance of the OneView client
:param node: an ironic node object
:raises OneViewError: if an error occurs while deallocating the Server
Hardware to ironic
"""
if is_node_in_use_by_ironic(oneview_client, node):
oneview_info = common.get_oneview_info(node)
server_profile_uuid = oneview_utils.get_uuid_from_uri(
oneview_info.get('applied_server_profile_uri')
)
try:
oneview_client.power_off(oneview_info)
oneview_client.delete_server_profile(server_profile_uuid)
_del_applied_server_profile_uri_field(node)
LOG.info(_LI("Server Profile %(server_profile_uuid)s was deleted "
"from node %(node_uuid)s in OneView."),
{'server_profile_uuid': server_profile_uuid,
'node_uuid': node.uuid})
except (ValueError, oneview_exception.OneViewException) as e:
msg = (_("Error while deleting applied Server Profile from node "
"%(node_uuid)s. Error: %(error)s") %
{'node_uuid': node.uuid, 'error': e})
raise exception.OneViewError(error=msg)
else:
LOG.warning(_LW("Cannot deallocate node %(node_uuid)s "
"in OneView because it is not in use by "
"ironic."), {'node_uuid': node.uuid})
| 38.817708
| 79
| 0.668456
|
4a032b05087ab63f05c72e8092b39c91ad87cb7d
| 811
|
py
|
Python
|
django2/fuck/django/config/urls.py
|
Gozeon/code-collections
|
7304e2b9c4c91a809125198d22cf40dcbb45a23b
|
[
"MIT"
] | null | null | null |
django2/fuck/django/config/urls.py
|
Gozeon/code-collections
|
7304e2b9c4c91a809125198d22cf40dcbb45a23b
|
[
"MIT"
] | 1
|
2020-07-17T09:25:42.000Z
|
2020-07-17T09:25:42.000Z
|
django2/fuck/django/config/urls.py
|
Gozeon/code-collections
|
7304e2b9c4c91a809125198d22cf40dcbb45a23b
|
[
"MIT"
] | null | null | null |
"""config URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('core.urls', namespace='core')),
]
| 35.26087
| 77
| 0.701603
|
4a032be5d1b43e725143b0c597cc83f0af62e150
| 10,964
|
py
|
Python
|
citation_graph.py
|
Azzaare/multiplex-carbonara
|
c911bd08aa4120a3ebf099b03eb1fa1670a3a255
|
[
"MIT"
] | null | null | null |
citation_graph.py
|
Azzaare/multiplex-carbonara
|
c911bd08aa4120a3ebf099b03eb1fa1670a3a255
|
[
"MIT"
] | null | null | null |
citation_graph.py
|
Azzaare/multiplex-carbonara
|
c911bd08aa4120a3ebf099b03eb1fa1670a3a255
|
[
"MIT"
] | null | null | null |
import os
from py2neo import Graph, Node, Relationship, authenticate
#lists and tables required to parse the date
months = {
"jan": 1,
"january": 1,
"feb": 2,
"february": 2,
"mar": 3,
"march": 3,
"apr": 4,
"april": 4,
"may": 5,
"jun": 6,
"june": 6,
"jul": 7,
"july": 7,
"aug": 8,
"august": 8,
"sep": 9,
"september": 9,
"oct": 10,
"october": 10,
"nov": 11,
"november": 11,
"dec": 12,
"december":12
}
days = ["mon","tue","wed","thu","fri","sat","sun"]
dates = ["1","01","2","02","3","03","4","04","5","05","6","06","7","07","8","08","9","09","10","11","12","13","14","15","16","17","18","19","20","21","22","23","24","25","26","27","28","29","30","31"]
years = ["1991","1992","1993","1994","1995","1996","1997","1998","1999","2000","2001","2002","2003"]
years_short = {"91":1991,"92":1992,"93":1993,"94":1994,"95":1995,"96":1996,"97":1997,"98":1998,"99":1999,"00":2000,"01":2001,"02":2002,"03":2003}
#function used in parsing authors list
def remove_text_inside_brackets(text, brackets="()[]"): #taken from http://stackoverflow.com/questions/14596884/remove-text-between-and-in-python
count = [0] * (len(brackets) // 2) # count open/close brackets
saved_chars = []
for character in text:
for i, b in enumerate(brackets):
if character == b: # found bracket
kind, is_close = divmod(i, 2)
count[kind] += (-1)**is_close # `+1`: open, `-1`: close
if count[kind] < 0: # unbalanced bracket
count[kind] = 0
break
else: # character is not a bracket
if not any(count): # outside brackets
saved_chars.append(character)
return ''.join(saved_chars)
#function used to determine the publication at which to start push_neo_graph() in function of the total number of citations already loaded
def citation_no(pub_data,l):
k=0
h=0
for i in pub_data:
for j in pub_data[i][0]:
if h == l:
return k
h=h+1
k=k+1
#Parsing functions for parsing the date and the author list
def parse_date(line):
l =" ".join(line.split()) #remove extra spaces
l = l.lower() #remove capitals
l = l.split(' ') #split the sentence into words
j=0
m=0
a=0
for i in l :
if i in dates:
j = int(i)
if i in months:
m = months[i]
if i in years :
a = int(i)
if i in years_short:
a = years_short[i]
return [j,m,a]
def adjust_initials(aut):
l = aut.split(".")
ll = []
lll = []
for i in l:
ll+= [i.lstrip().rstrip()]
ll = ". ".join(ll)
ll = ll.split(" ")
for i in ll:
if len(i)==1: #if it's an initial
lll += [i+"."]
else:
lll += [i]
lll = " ".join(lll)
return lll
def parse_author(line): # Can be better
l = line.strip() #remove special chars
l = remove_text_inside_brackets(l)
#remove all instances of special accents (\\)
l = l.replace("\\'","")
l = l.replace("\\\"","")
#l = l.replace("\\","") there are still special accents to remove
l =" ".join(l.split()) #remove extra spaces
l = l.split(' ',2) #delete the "Authors:"
l = " ".join(l[1:])
l = l.split('and ') #remove the "and"s and commas
lp = []
for i in l:
lp += i.split(',')
lp = [adjust_initials(x.lstrip().rstrip()).lower() for x in lp if x.lstrip().rstrip() != ""] #remove the spaces at the beginning and end of authors name, and add spaces between initials
return lp
#Function for loading the data structure which associates for each publication the other publications which it cites, its publication date and its list of authors
#function to return list of unique authors
def author_list(pub_data):
autlist = []
for i in pub_data:
autlist+=pub_data[i][2]
autlist = list(set(autlist))
return autlist
#function to return count of authors
def count_authors(pub_data):
return len(author_list(pub_data))
#function which adjusts the initials to the correct format
def author_initials(name):
tnames = name.lower().split(" ")
tname = ""
for s in tnames[:len(tnames)-1]:
if s[len(s)-1]!='.':
tname += s[0]+'.'
else:
tname+=s
return tname+tnames[len(tnames)-1]
#function which checks if there are conflicts between different authors sharing the same initials
def check_author_initials_conflict(pub_data):
autlist = author_list(pub_data)
initial_table = {}
for a in autlist:
initial_table[author_initials(a)] = []
for a in autlist:
#if "".join(a.lower().split()) != author_initials(a):
initial_table[author_initials(a)] += [a]
#corrections
#remove singletons
to_delete = []
for i in initial_table:
if len(initial_table[i]) <= 1:
to_delete+=[i]
for i in to_delete:
del initial_table[i]
k=0
for i in initial_table:
print i,initial_table[i]
if len(initial_table[i])>2:
k+=1
print k
#function to reduce the number of authors by fusioning authors according to whether one authors is just the initials of another author
def reduce_authors(pub_data): #PROBLEMATIC if the authors have the same initials especially if one of the authors only appears with his initials and the other authors has both initials and full name
#First get lists of all authors, then classify authors by initials. If two (and only two) authors share the same initials, and if one of them is equal to the initials, then mark the change to use the other author name
#######BUGGGGGGG with jr.
autlist = author_list(pub_data)
initial_table = {}
change_table = {}
for a in autlist: #build initials tables
initial_table[author_initials(a)] = []
for a in autlist:
initial_table[author_initials(a)] += [a]
#if one author corresponds to one initial, nothing to do. If two authors correspond to one initial check if we can reduce. If 3 or more authors correspond to the same initial too complicated to do anything
for i in initial_table:
if len(initial_table[i]) == 2:
if "".join(initial_table[i][0].lower().split()) == author_initials(initial_table[i][0]):
change_table[initial_table[i][0]] = initial_table[i][1]
elif "".join(initial_table[i][1].lower().split()) == author_initials(initial_table[i][1]):
change_table[initial_table[i][1]] = initial_table[i][0]
#now we reduce
for id in pub_data:
for i in range(len(pub_data[id][2])):
if pub_data[id][2][i] in change_table:
pub_data[id][2][i] = change_table[pub_data[id][2][i]]
#Function which loads the data into the data structure
def load_data():
pub_data = {} #Data structure for our program. Associates to an id (int) a list of 3 lists : the list of citations, the date and the list of authors
print "Loading data..."
#First we will load the file with the citation data to add the citations to the data structure
f = open('/home/vivek/prog/multiplex-carbonara/Cit-HepTh.txt','r')
for i in range(4): #first four lines are useless
line = f.readline()
for line in f : #read lines
l = line.strip().split('\t')
i1 = int(l[0])
if i1 not in pub_data:
pub_data[i1] = [[],[],[]] #if the entry for that publication doesn't exit, initialize it
i2 = int(l[1])
if i2 not in pub_data:
pub_data[i2] = [[],[],[]] #if the entry for that publication doesn't exit, initialize it
pub_data[i1][0].append(i2) #add citation
#Secondly we will load the files with the metadata to add the dates and authors of the publications to the data structure
for root,dirs,fns in os.walk("/home/vivek/prog/multiplex-carbonara/cit-HepTh-abstracts/") :
for fn in fns :
if fn.endswith(".abs") :
f = open(os.path.join(root, fn),'r')
id = int(fn.split('.')[0]) #the ID of the publication is its filename
if id in pub_data: #if the publication is in our citations data
lauthors = [] #list of authors for the publication
ldate = [] #date for the publication, in the format [day,month,year] (int)
line=f.readline()
while line != "" :
if line.split(' ')[0] == "Date:" :
ldate=parse_date(line)
if line.split(' ')[0] == "Authors:" or line.split(' ')[0] == "Author:" : #Authors can be written over several lines...
laut = line
line = f.readline()
while (line.split(' ')[0] != "Comments:" and line.split(' ')[0] != "Report-no:" and
line.split(' ')[0] != "Subj-class:" and line.split(' ')[0] != "Journal-ref:" and
line.split(' ')[0].strip() != "\\\\") : #we read until we reach another section
laut+=line
line = f.readline()
lauthors = parse_author(laut)
line = f.readline()
pub_data[id][1] = ldate #add the metadata to the data structure
pub_data[id][2] = lauthors
reduce_authors(pub_data) #reduce the number of authors (check if some different authors are the same author but with name written differently
print "Data loaded"
return pub_data
| 40.758364
| 225
| 0.513134
|
4a032c60d386df05b4787a3dd41b86c97ad18a30
| 81,980
|
py
|
Python
|
pandas/core/arrays/datetimes.py
|
juliansmidek/pandas
|
8945a4267588ec2608bec7be6745f6beff0373da
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
pandas/core/arrays/datetimes.py
|
juliansmidek/pandas
|
8945a4267588ec2608bec7be6745f6beff0373da
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
pandas/core/arrays/datetimes.py
|
juliansmidek/pandas
|
8945a4267588ec2608bec7be6745f6beff0373da
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
from __future__ import annotations
from datetime import (
datetime,
time,
timedelta,
tzinfo,
)
from typing import (
TYPE_CHECKING,
Optional,
Union,
cast,
overload,
)
import warnings
import numpy as np
from pandas._libs import (
lib,
tslib,
)
from pandas._libs.tslibs import (
BaseOffset,
NaT,
NaTType,
Resolution,
Timestamp,
conversion,
fields,
get_resolution,
iNaT,
ints_to_pydatetime,
is_date_array_normalized,
normalize_i8_timestamps,
timezones,
to_offset,
tzconversion,
)
from pandas.errors import PerformanceWarning
from pandas.core.dtypes.cast import astype_dt64_to_dt64tz
from pandas.core.dtypes.common import (
DT64NS_DTYPE,
INT64_DTYPE,
is_bool_dtype,
is_categorical_dtype,
is_datetime64_any_dtype,
is_datetime64_dtype,
is_datetime64_ns_dtype,
is_datetime64tz_dtype,
is_dtype_equal,
is_extension_array_dtype,
is_float_dtype,
is_object_dtype,
is_period_dtype,
is_sparse,
is_string_dtype,
is_timedelta64_dtype,
pandas_dtype,
)
from pandas.core.dtypes.dtypes import DatetimeTZDtype
from pandas.core.dtypes.generic import ABCMultiIndex
from pandas.core.dtypes.missing import isna
from pandas.core.algorithms import checked_add_with_arr
from pandas.core.arrays import (
ExtensionArray,
datetimelike as dtl,
)
from pandas.core.arrays._ranges import generate_regular_range
from pandas.core.arrays.integer import IntegerArray
import pandas.core.common as com
from pandas.core.construction import extract_array
from pandas.tseries.frequencies import get_period_alias
from pandas.tseries.offsets import (
BDay,
Day,
Tick,
)
if TYPE_CHECKING:
from typing import Literal
_midnight = time(0, 0)
def tz_to_dtype(tz):
"""
Return a datetime64[ns] dtype appropriate for the given timezone.
Parameters
----------
tz : tzinfo or None
Returns
-------
np.dtype or Datetime64TZDType
"""
if tz is None:
return DT64NS_DTYPE
else:
return DatetimeTZDtype(tz=tz)
def _field_accessor(name, field, docstring=None):
def f(self):
values = self._local_timestamps()
if field in self._bool_ops:
if field.endswith(("start", "end")):
freq = self.freq
month_kw = 12
if freq:
kwds = freq.kwds
month_kw = kwds.get("startingMonth", kwds.get("month", 12))
result = fields.get_start_end_field(
values, field, self.freqstr, month_kw
)
else:
result = fields.get_date_field(values, field)
# these return a boolean by-definition
return result
if field in self._object_ops:
result = fields.get_date_name_field(values, field)
result = self._maybe_mask_results(result, fill_value=None)
else:
result = fields.get_date_field(values, field)
result = self._maybe_mask_results(
result, fill_value=None, convert="float64"
)
return result
f.__name__ = name
f.__doc__ = docstring
return property(f)
class DatetimeArray(dtl.TimelikeOps, dtl.DatelikeOps):
"""
Pandas ExtensionArray for tz-naive or tz-aware datetime data.
.. versionadded:: 0.24.0
.. warning::
DatetimeArray is currently experimental, and its API may change
without warning. In particular, :attr:`DatetimeArray.dtype` is
expected to change to always be an instance of an ``ExtensionDtype``
subclass.
Parameters
----------
values : Series, Index, DatetimeArray, ndarray
The datetime data.
For DatetimeArray `values` (or a Series or Index boxing one),
`dtype` and `freq` will be extracted from `values`.
dtype : numpy.dtype or DatetimeTZDtype
Note that the only NumPy dtype allowed is 'datetime64[ns]'.
freq : str or Offset, optional
The frequency.
copy : bool, default False
Whether to copy the underlying array of values.
Attributes
----------
None
Methods
-------
None
"""
_typ = "datetimearray"
_scalar_type = Timestamp
_recognized_scalars = (datetime, np.datetime64)
_is_recognized_dtype = is_datetime64_any_dtype
_infer_matches = ("datetime", "datetime64", "date")
# define my properties & methods for delegation
_bool_ops = [
"is_month_start",
"is_month_end",
"is_quarter_start",
"is_quarter_end",
"is_year_start",
"is_year_end",
"is_leap_year",
]
_object_ops = ["freq", "tz"]
_field_ops = [
"year",
"month",
"day",
"hour",
"minute",
"second",
"weekofyear",
"week",
"weekday",
"dayofweek",
"day_of_week",
"dayofyear",
"day_of_year",
"quarter",
"days_in_month",
"daysinmonth",
"microsecond",
"nanosecond",
]
_other_ops = ["date", "time", "timetz"]
_datetimelike_ops = _field_ops + _object_ops + _bool_ops + _other_ops
_datetimelike_methods = [
"to_period",
"tz_localize",
"tz_convert",
"normalize",
"strftime",
"round",
"floor",
"ceil",
"month_name",
"day_name",
]
# ndim is inherited from ExtensionArray, must exist to ensure
# Timestamp.__richcmp__(DateTimeArray) operates pointwise
# ensure that operations with numpy arrays defer to our implementation
__array_priority__ = 1000
# -----------------------------------------------------------------
# Constructors
_dtype: Union[np.dtype, DatetimeTZDtype]
_freq = None
def __init__(self, values, dtype=DT64NS_DTYPE, freq=None, copy=False):
values = extract_array(values, extract_numpy=True)
if isinstance(values, IntegerArray):
values = values.to_numpy("int64", na_value=iNaT)
inferred_freq = getattr(values, "_freq", None)
if isinstance(values, type(self)):
# validation
dtz = getattr(dtype, "tz", None)
if dtz and values.tz is None:
dtype = DatetimeTZDtype(tz=dtype.tz)
elif dtz and values.tz:
if not timezones.tz_compare(dtz, values.tz):
msg = (
"Timezone of the array and 'dtype' do not match. "
f"'{dtz}' != '{values.tz}'"
)
raise TypeError(msg)
elif values.tz:
dtype = values.dtype
if freq is None:
freq = values.freq
values = values._ndarray
if not isinstance(values, np.ndarray):
raise ValueError(
f"Unexpected type '{type(values).__name__}'. 'values' must be "
"a DatetimeArray, ndarray, or Series or Index containing one of those."
)
if values.ndim not in [1, 2]:
raise ValueError("Only 1-dimensional input arrays are supported.")
if values.dtype == "i8":
# for compat with datetime/timedelta/period shared methods,
# we can sometimes get here with int64 values. These represent
# nanosecond UTC (or tz-naive) unix timestamps
values = values.view(DT64NS_DTYPE)
if values.dtype != DT64NS_DTYPE:
raise ValueError(
"The dtype of 'values' is incorrect. Must be 'datetime64[ns]'. "
f"Got {values.dtype} instead."
)
dtype = _validate_dt64_dtype(dtype)
if freq == "infer":
raise ValueError(
"Frequency inference not allowed in DatetimeArray.__init__. "
"Use 'pd.array()' instead."
)
if copy:
values = values.copy()
if freq:
freq = to_offset(freq)
if getattr(dtype, "tz", None):
# https://github.com/pandas-dev/pandas/issues/18595
# Ensure that we have a standard timezone for pytz objects.
# Without this, things like adding an array of timedeltas and
# a tz-aware Timestamp (with a tz specific to its datetime) will
# be incorrect(ish?) for the array as a whole
dtype = DatetimeTZDtype(tz=timezones.tz_standardize(dtype.tz))
self._ndarray = values
self._dtype = dtype
self._freq = freq
if inferred_freq is None and freq is not None:
type(self)._validate_frequency(self, freq)
@classmethod
def _simple_new(
cls, values, freq: Optional[BaseOffset] = None, dtype=DT64NS_DTYPE
) -> DatetimeArray:
assert isinstance(values, np.ndarray)
assert values.dtype == DT64NS_DTYPE
result = object.__new__(cls)
result._ndarray = values
result._freq = freq
result._dtype = dtype
return result
@classmethod
def _from_sequence(cls, scalars, *, dtype=None, copy: bool = False):
return cls._from_sequence_not_strict(scalars, dtype=dtype, copy=copy)
@classmethod
def _from_sequence_not_strict(
cls,
data,
dtype=None,
copy=False,
tz=None,
freq=lib.no_default,
dayfirst=False,
yearfirst=False,
ambiguous="raise",
):
explicit_none = freq is None
freq = freq if freq is not lib.no_default else None
freq, freq_infer = dtl.maybe_infer_freq(freq)
subarr, tz, inferred_freq = sequence_to_dt64ns(
data,
dtype=dtype,
copy=copy,
tz=tz,
dayfirst=dayfirst,
yearfirst=yearfirst,
ambiguous=ambiguous,
)
freq, freq_infer = dtl.validate_inferred_freq(freq, inferred_freq, freq_infer)
if explicit_none:
freq = None
dtype = tz_to_dtype(tz)
result = cls._simple_new(subarr, freq=freq, dtype=dtype)
if inferred_freq is None and freq is not None:
# this condition precludes `freq_infer`
cls._validate_frequency(result, freq, ambiguous=ambiguous)
elif freq_infer:
# Set _freq directly to bypass duplicative _validate_frequency
# check.
result._freq = to_offset(result.inferred_freq)
return result
@classmethod
def _generate_range(
cls,
start,
end,
periods,
freq,
tz=None,
normalize=False,
ambiguous="raise",
nonexistent="raise",
closed=None,
):
periods = dtl.validate_periods(periods)
if freq is None and any(x is None for x in [periods, start, end]):
raise ValueError("Must provide freq argument if no data is supplied")
if com.count_not_none(start, end, periods, freq) != 3:
raise ValueError(
"Of the four parameters: start, end, periods, "
"and freq, exactly three must be specified"
)
freq = to_offset(freq)
if start is not None:
start = Timestamp(start)
if end is not None:
end = Timestamp(end)
if start is NaT or end is NaT:
raise ValueError("Neither `start` nor `end` can be NaT")
left_closed, right_closed = dtl.validate_endpoints(closed)
start, end, _normalized = _maybe_normalize_endpoints(start, end, normalize)
tz = _infer_tz_from_endpoints(start, end, tz)
if tz is not None:
# Localize the start and end arguments
start_tz = None if start is None else start.tz
end_tz = None if end is None else end.tz
start = _maybe_localize_point(
start, start_tz, start, freq, tz, ambiguous, nonexistent
)
end = _maybe_localize_point(
end, end_tz, end, freq, tz, ambiguous, nonexistent
)
if freq is not None:
# We break Day arithmetic (fixed 24 hour) here and opt for
# Day to mean calendar day (23/24/25 hour). Therefore, strip
# tz info from start and day to avoid DST arithmetic
if isinstance(freq, Day):
if start is not None:
start = start.tz_localize(None)
if end is not None:
end = end.tz_localize(None)
if isinstance(freq, Tick):
values = generate_regular_range(start, end, periods, freq)
else:
xdr = generate_range(start=start, end=end, periods=periods, offset=freq)
values = np.array([x.value for x in xdr], dtype=np.int64)
_tz = start.tz if start is not None else end.tz
values = values.view("M8[ns]")
index = cls._simple_new(values, freq=freq, dtype=tz_to_dtype(_tz))
if tz is not None and index.tz is None:
arr = tzconversion.tz_localize_to_utc(
index.asi8, tz, ambiguous=ambiguous, nonexistent=nonexistent
)
index = cls(arr)
# index is localized datetime64 array -> have to convert
# start/end as well to compare
if start is not None:
start = start.tz_localize(tz, ambiguous, nonexistent).asm8
if end is not None:
end = end.tz_localize(tz, ambiguous, nonexistent).asm8
else:
# Create a linearly spaced date_range in local time
# Nanosecond-granularity timestamps aren't always correctly
# representable with doubles, so we limit the range that we
# pass to np.linspace as much as possible
arr = (
np.linspace(0, end.value - start.value, periods, dtype="int64")
+ start.value
)
dtype = tz_to_dtype(tz)
arr = arr.astype("M8[ns]", copy=False)
index = cls._simple_new(arr, freq=None, dtype=dtype)
if not left_closed and len(index) and index[0] == start:
# TODO: overload DatetimeLikeArrayMixin.__getitem__
index = cast(DatetimeArray, index[1:])
if not right_closed and len(index) and index[-1] == end:
# TODO: overload DatetimeLikeArrayMixin.__getitem__
index = cast(DatetimeArray, index[:-1])
dtype = tz_to_dtype(tz)
return cls._simple_new(index._ndarray, freq=freq, dtype=dtype)
# -----------------------------------------------------------------
# DatetimeLike Interface
def _unbox_scalar(self, value, setitem: bool = False) -> np.datetime64:
if not isinstance(value, self._scalar_type) and value is not NaT:
raise ValueError("'value' should be a Timestamp.")
self._check_compatible_with(value, setitem=setitem)
return value.asm8
def _scalar_from_string(self, value):
return Timestamp(value, tz=self.tz)
def _check_compatible_with(self, other, setitem: bool = False):
if other is NaT:
return
self._assert_tzawareness_compat(other)
if setitem:
# Stricter check for setitem vs comparison methods
if not timezones.tz_compare(self.tz, other.tz):
raise ValueError(f"Timezones don't match. '{self.tz}' != '{other.tz}'")
# -----------------------------------------------------------------
# Descriptive Properties
def _box_func(self, x) -> Union[Timestamp, NaTType]:
return Timestamp(x, freq=self.freq, tz=self.tz)
@property
# error: Return type "Union[dtype, DatetimeTZDtype]" of "dtype"
# incompatible with return type "ExtensionDtype" in supertype
# "ExtensionArray"
def dtype(self) -> Union[np.dtype, DatetimeTZDtype]: # type: ignore[override]
"""
The dtype for the DatetimeArray.
.. warning::
A future version of pandas will change dtype to never be a
``numpy.dtype``. Instead, :attr:`DatetimeArray.dtype` will
always be an instance of an ``ExtensionDtype`` subclass.
Returns
-------
numpy.dtype or DatetimeTZDtype
If the values are tz-naive, then ``np.dtype('datetime64[ns]')``
is returned.
If the values are tz-aware, then the ``DatetimeTZDtype``
is returned.
"""
return self._dtype
@property
def tz(self):
"""
Return timezone, if any.
Returns
-------
datetime.tzinfo, pytz.tzinfo.BaseTZInfo, dateutil.tz.tz.tzfile, or None
Returns None when the array is tz-naive.
"""
# GH 18595
return getattr(self.dtype, "tz", None)
@tz.setter
def tz(self, value):
# GH 3746: Prevent localizing or converting the index by setting tz
raise AttributeError(
"Cannot directly set timezone. Use tz_localize() "
"or tz_convert() as appropriate"
)
@property
def tzinfo(self):
"""
Alias for tz attribute
"""
return self.tz
@property # NB: override with cache_readonly in immutable subclasses
def is_normalized(self):
"""
Returns True if all of the dates are at midnight ("no time")
"""
return is_date_array_normalized(self.asi8, self.tz)
@property # NB: override with cache_readonly in immutable subclasses
def _resolution_obj(self) -> Resolution:
return get_resolution(self.asi8, self.tz)
# ----------------------------------------------------------------
# Array-Like / EA-Interface Methods
def __array__(self, dtype=None) -> np.ndarray:
if dtype is None and self.tz:
# The default for tz-aware is object, to preserve tz info
dtype = object
return super().__array__(dtype=dtype)
def __iter__(self):
"""
Return an iterator over the boxed values
Yields
------
tstamp : Timestamp
"""
if self.ndim > 1:
for i in range(len(self)):
yield self[i]
else:
# convert in chunks of 10k for efficiency
data = self.asi8
length = len(self)
chunksize = 10000
chunks = (length // chunksize) + 1
for i in range(chunks):
start_i = i * chunksize
end_i = min((i + 1) * chunksize, length)
converted = ints_to_pydatetime(
data[start_i:end_i], tz=self.tz, freq=self.freq, box="timestamp"
)
yield from converted
def astype(self, dtype, copy=True):
# We handle
# --> datetime
# --> period
# DatetimeLikeArrayMixin Super handles the rest.
dtype = pandas_dtype(dtype)
if is_dtype_equal(dtype, self.dtype):
if copy:
return self.copy()
return self
elif is_datetime64_ns_dtype(dtype):
return astype_dt64_to_dt64tz(self, dtype, copy, via_utc=False)
elif self.tz is None and is_datetime64_dtype(dtype) and dtype != self.dtype:
# unit conversion e.g. datetime64[s]
return self._ndarray.astype(dtype)
elif is_period_dtype(dtype):
return self.to_period(freq=dtype.freq)
return dtl.DatetimeLikeArrayMixin.astype(self, dtype, copy)
# -----------------------------------------------------------------
# Rendering Methods
@dtl.ravel_compat
def _format_native_types(self, na_rep="NaT", date_format=None, **kwargs):
from pandas.io.formats.format import get_format_datetime64_from_values
fmt = get_format_datetime64_from_values(self, date_format)
return tslib.format_array_from_datetime(
self.asi8, tz=self.tz, format=fmt, na_rep=na_rep
)
# -----------------------------------------------------------------
# Comparison Methods
def _has_same_tz(self, other) -> bool:
# vzone shouldn't be None if value is non-datetime like
if isinstance(other, np.datetime64):
# convert to Timestamp as np.datetime64 doesn't have tz attr
other = Timestamp(other)
if not hasattr(other, "tzinfo"):
return False
other_tz = other.tzinfo
return timezones.tz_compare(self.tzinfo, other_tz)
def _assert_tzawareness_compat(self, other):
# adapted from _Timestamp._assert_tzawareness_compat
other_tz = getattr(other, "tzinfo", None)
other_dtype = getattr(other, "dtype", None)
if is_datetime64tz_dtype(other_dtype):
# Get tzinfo from Series dtype
other_tz = other.dtype.tz
if other is NaT:
# pd.NaT quacks both aware and naive
pass
elif self.tz is None:
if other_tz is not None:
raise TypeError(
"Cannot compare tz-naive and tz-aware datetime-like objects."
)
elif other_tz is None:
raise TypeError(
"Cannot compare tz-naive and tz-aware datetime-like objects"
)
# -----------------------------------------------------------------
# Arithmetic Methods
def _sub_datetime_arraylike(self, other):
"""subtract DatetimeArray/Index or ndarray[datetime64]"""
if len(self) != len(other):
raise ValueError("cannot add indices of unequal length")
if isinstance(other, np.ndarray):
assert is_datetime64_dtype(other)
other = type(self)(other)
if not self._has_same_tz(other):
# require tz compat
raise TypeError(
f"{type(self).__name__} subtraction must have the same "
"timezones or no timezones"
)
self_i8 = self.asi8
other_i8 = other.asi8
arr_mask = self._isnan | other._isnan
new_values = checked_add_with_arr(self_i8, -other_i8, arr_mask=arr_mask)
if self._hasnans or other._hasnans:
np.putmask(new_values, arr_mask, iNaT)
return new_values.view("timedelta64[ns]")
def _add_offset(self, offset):
if self.ndim == 2:
return self.ravel()._add_offset(offset).reshape(self.shape)
assert not isinstance(offset, Tick)
try:
if self.tz is not None:
values = self.tz_localize(None)
else:
values = self
result = offset._apply_array(values).view("M8[ns]")
result = DatetimeArray._simple_new(result)
result = result.tz_localize(self.tz)
except NotImplementedError:
warnings.warn(
"Non-vectorized DateOffset being applied to Series or DatetimeIndex",
PerformanceWarning,
)
result = self.astype("O") + offset
if not len(self):
# GH#30336 _from_sequence won't be able to infer self.tz
return type(self)._from_sequence(result).tz_localize(self.tz)
return type(self)._from_sequence(result)
def _sub_datetimelike_scalar(self, other):
# subtract a datetime from myself, yielding a ndarray[timedelta64[ns]]
assert isinstance(other, (datetime, np.datetime64))
assert other is not NaT
other = Timestamp(other)
if other is NaT:
return self - NaT
if not self._has_same_tz(other):
# require tz compat
raise TypeError(
"Timestamp subtraction must have the same timezones or no timezones"
)
i8 = self.asi8
result = checked_add_with_arr(i8, -other.value, arr_mask=self._isnan)
result = self._maybe_mask_results(result)
return result.view("timedelta64[ns]")
# -----------------------------------------------------------------
# Timezone Conversion and Localization Methods
def _local_timestamps(self):
"""
Convert to an i8 (unix-like nanosecond timestamp) representation
while keeping the local timezone and not using UTC.
This is used to calculate time-of-day information as if the timestamps
were timezone-naive.
"""
if self.tz is None or timezones.is_utc(self.tz):
return self.asi8
return tzconversion.tz_convert_from_utc(self.asi8, self.tz)
def tz_convert(self, tz):
"""
Convert tz-aware Datetime Array/Index from one time zone to another.
Parameters
----------
tz : str, pytz.timezone, dateutil.tz.tzfile or None
Time zone for time. Corresponding timestamps would be converted
to this time zone of the Datetime Array/Index. A `tz` of None will
convert to UTC and remove the timezone information.
Returns
-------
Array or Index
Raises
------
TypeError
If Datetime Array/Index is tz-naive.
See Also
--------
DatetimeIndex.tz : A timezone that has a variable offset from UTC.
DatetimeIndex.tz_localize : Localize tz-naive DatetimeIndex to a
given time zone, or remove timezone from a tz-aware DatetimeIndex.
Examples
--------
With the `tz` parameter, we can change the DatetimeIndex
to other time zones:
>>> dti = pd.date_range(start='2014-08-01 09:00',
... freq='H', periods=3, tz='Europe/Berlin')
>>> dti
DatetimeIndex(['2014-08-01 09:00:00+02:00',
'2014-08-01 10:00:00+02:00',
'2014-08-01 11:00:00+02:00'],
dtype='datetime64[ns, Europe/Berlin]', freq='H')
>>> dti.tz_convert('US/Central')
DatetimeIndex(['2014-08-01 02:00:00-05:00',
'2014-08-01 03:00:00-05:00',
'2014-08-01 04:00:00-05:00'],
dtype='datetime64[ns, US/Central]', freq='H')
With the ``tz=None``, we can remove the timezone (after converting
to UTC if necessary):
>>> dti = pd.date_range(start='2014-08-01 09:00', freq='H',
... periods=3, tz='Europe/Berlin')
>>> dti
DatetimeIndex(['2014-08-01 09:00:00+02:00',
'2014-08-01 10:00:00+02:00',
'2014-08-01 11:00:00+02:00'],
dtype='datetime64[ns, Europe/Berlin]', freq='H')
>>> dti.tz_convert(None)
DatetimeIndex(['2014-08-01 07:00:00',
'2014-08-01 08:00:00',
'2014-08-01 09:00:00'],
dtype='datetime64[ns]', freq='H')
"""
tz = timezones.maybe_get_tz(tz)
if self.tz is None:
# tz naive, use tz_localize
raise TypeError(
"Cannot convert tz-naive timestamps, use tz_localize to localize"
)
# No conversion since timestamps are all UTC to begin with
dtype = tz_to_dtype(tz)
return self._simple_new(self._ndarray, dtype=dtype, freq=self.freq)
@dtl.ravel_compat
def tz_localize(self, tz, ambiguous="raise", nonexistent="raise"):
"""
Localize tz-naive Datetime Array/Index to tz-aware
Datetime Array/Index.
This method takes a time zone (tz) naive Datetime Array/Index object
and makes this time zone aware. It does not move the time to another
time zone.
Time zone localization helps to switch from time zone aware to time
zone unaware objects.
Parameters
----------
tz : str, pytz.timezone, dateutil.tz.tzfile or None
Time zone to convert timestamps to. Passing ``None`` will
remove the time zone information preserving local time.
ambiguous : 'infer', 'NaT', bool array, default 'raise'
When clocks moved backward due to DST, ambiguous times may arise.
For example in Central European Time (UTC+01), when going from
03:00 DST to 02:00 non-DST, 02:30:00 local time occurs both at
00:30:00 UTC and at 01:30:00 UTC. In such a situation, the
`ambiguous` parameter dictates how ambiguous times should be
handled.
- 'infer' will attempt to infer fall dst-transition hours based on
order
- bool-ndarray where True signifies a DST time, False signifies a
non-DST time (note that this flag is only applicable for
ambiguous times)
- 'NaT' will return NaT where there are ambiguous times
- 'raise' will raise an AmbiguousTimeError if there are ambiguous
times.
nonexistent : 'shift_forward', 'shift_backward, 'NaT', timedelta, \
default 'raise'
A nonexistent time does not exist in a particular timezone
where clocks moved forward due to DST.
- 'shift_forward' will shift the nonexistent time forward to the
closest existing time
- 'shift_backward' will shift the nonexistent time backward to the
closest existing time
- 'NaT' will return NaT where there are nonexistent times
- timedelta objects will shift nonexistent times by the timedelta
- 'raise' will raise an NonExistentTimeError if there are
nonexistent times.
.. versionadded:: 0.24.0
Returns
-------
Same type as self
Array/Index converted to the specified time zone.
Raises
------
TypeError
If the Datetime Array/Index is tz-aware and tz is not None.
See Also
--------
DatetimeIndex.tz_convert : Convert tz-aware DatetimeIndex from
one time zone to another.
Examples
--------
>>> tz_naive = pd.date_range('2018-03-01 09:00', periods=3)
>>> tz_naive
DatetimeIndex(['2018-03-01 09:00:00', '2018-03-02 09:00:00',
'2018-03-03 09:00:00'],
dtype='datetime64[ns]', freq='D')
Localize DatetimeIndex in US/Eastern time zone:
>>> tz_aware = tz_naive.tz_localize(tz='US/Eastern')
>>> tz_aware
DatetimeIndex(['2018-03-01 09:00:00-05:00',
'2018-03-02 09:00:00-05:00',
'2018-03-03 09:00:00-05:00'],
dtype='datetime64[ns, US/Eastern]', freq=None)
With the ``tz=None``, we can remove the time zone information
while keeping the local time (not converted to UTC):
>>> tz_aware.tz_localize(None)
DatetimeIndex(['2018-03-01 09:00:00', '2018-03-02 09:00:00',
'2018-03-03 09:00:00'],
dtype='datetime64[ns]', freq=None)
Be careful with DST changes. When there is sequential data, pandas can
infer the DST time:
>>> s = pd.to_datetime(pd.Series(['2018-10-28 01:30:00',
... '2018-10-28 02:00:00',
... '2018-10-28 02:30:00',
... '2018-10-28 02:00:00',
... '2018-10-28 02:30:00',
... '2018-10-28 03:00:00',
... '2018-10-28 03:30:00']))
>>> s.dt.tz_localize('CET', ambiguous='infer')
0 2018-10-28 01:30:00+02:00
1 2018-10-28 02:00:00+02:00
2 2018-10-28 02:30:00+02:00
3 2018-10-28 02:00:00+01:00
4 2018-10-28 02:30:00+01:00
5 2018-10-28 03:00:00+01:00
6 2018-10-28 03:30:00+01:00
dtype: datetime64[ns, CET]
In some cases, inferring the DST is impossible. In such cases, you can
pass an ndarray to the ambiguous parameter to set the DST explicitly
>>> s = pd.to_datetime(pd.Series(['2018-10-28 01:20:00',
... '2018-10-28 02:36:00',
... '2018-10-28 03:46:00']))
>>> s.dt.tz_localize('CET', ambiguous=np.array([True, True, False]))
0 2018-10-28 01:20:00+02:00
1 2018-10-28 02:36:00+02:00
2 2018-10-28 03:46:00+01:00
dtype: datetime64[ns, CET]
If the DST transition causes nonexistent times, you can shift these
dates forward or backwards with a timedelta object or `'shift_forward'`
or `'shift_backwards'`.
>>> s = pd.to_datetime(pd.Series(['2015-03-29 02:30:00',
... '2015-03-29 03:30:00']))
>>> s.dt.tz_localize('Europe/Warsaw', nonexistent='shift_forward')
0 2015-03-29 03:00:00+02:00
1 2015-03-29 03:30:00+02:00
dtype: datetime64[ns, Europe/Warsaw]
>>> s.dt.tz_localize('Europe/Warsaw', nonexistent='shift_backward')
0 2015-03-29 01:59:59.999999999+01:00
1 2015-03-29 03:30:00+02:00
dtype: datetime64[ns, Europe/Warsaw]
>>> s.dt.tz_localize('Europe/Warsaw', nonexistent=pd.Timedelta('1H'))
0 2015-03-29 03:30:00+02:00
1 2015-03-29 03:30:00+02:00
dtype: datetime64[ns, Europe/Warsaw]
"""
nonexistent_options = ("raise", "NaT", "shift_forward", "shift_backward")
if nonexistent not in nonexistent_options and not isinstance(
nonexistent, timedelta
):
raise ValueError(
"The nonexistent argument must be one of 'raise', "
"'NaT', 'shift_forward', 'shift_backward' or "
"a timedelta object"
)
if self.tz is not None:
if tz is None:
new_dates = tzconversion.tz_convert_from_utc(self.asi8, self.tz)
else:
raise TypeError("Already tz-aware, use tz_convert to convert.")
else:
tz = timezones.maybe_get_tz(tz)
# Convert to UTC
new_dates = tzconversion.tz_localize_to_utc(
self.asi8, tz, ambiguous=ambiguous, nonexistent=nonexistent
)
new_dates = new_dates.view(DT64NS_DTYPE)
dtype = tz_to_dtype(tz)
freq = None
if timezones.is_utc(tz) or (len(self) == 1 and not isna(new_dates[0])):
# we can preserve freq
# TODO: Also for fixed-offsets
freq = self.freq
elif tz is None and self.tz is None:
# no-op
freq = self.freq
return self._simple_new(new_dates, dtype=dtype, freq=freq)
# ----------------------------------------------------------------
# Conversion Methods - Vectorized analogues of Timestamp methods
def to_pydatetime(self) -> np.ndarray:
"""
Return Datetime Array/Index as object ndarray of datetime.datetime
objects.
Returns
-------
datetimes : ndarray
"""
return ints_to_pydatetime(self.asi8, tz=self.tz)
def normalize(self):
"""
Convert times to midnight.
The time component of the date-time is converted to midnight i.e.
00:00:00. This is useful in cases, when the time does not matter.
Length is unaltered. The timezones are unaffected.
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on Datetime Array/Index.
Returns
-------
DatetimeArray, DatetimeIndex or Series
The same type as the original data. Series will have the same
name and index. DatetimeIndex will have the same name.
See Also
--------
floor : Floor the datetimes to the specified freq.
ceil : Ceil the datetimes to the specified freq.
round : Round the datetimes to the specified freq.
Examples
--------
>>> idx = pd.date_range(start='2014-08-01 10:00', freq='H',
... periods=3, tz='Asia/Calcutta')
>>> idx
DatetimeIndex(['2014-08-01 10:00:00+05:30',
'2014-08-01 11:00:00+05:30',
'2014-08-01 12:00:00+05:30'],
dtype='datetime64[ns, Asia/Calcutta]', freq='H')
>>> idx.normalize()
DatetimeIndex(['2014-08-01 00:00:00+05:30',
'2014-08-01 00:00:00+05:30',
'2014-08-01 00:00:00+05:30'],
dtype='datetime64[ns, Asia/Calcutta]', freq=None)
"""
new_values = normalize_i8_timestamps(self.asi8, self.tz)
return type(self)(new_values)._with_freq("infer").tz_localize(self.tz)
@dtl.ravel_compat
def to_period(self, freq=None):
"""
Cast to PeriodArray/Index at a particular frequency.
Converts DatetimeArray/Index to PeriodArray/Index.
Parameters
----------
freq : str or Offset, optional
One of pandas' :ref:`offset strings <timeseries.offset_aliases>`
or an Offset object. Will be inferred by default.
Returns
-------
PeriodArray/Index
Raises
------
ValueError
When converting a DatetimeArray/Index with non-regular values,
so that a frequency cannot be inferred.
See Also
--------
PeriodIndex: Immutable ndarray holding ordinal values.
DatetimeIndex.to_pydatetime: Return DatetimeIndex as object.
Examples
--------
>>> df = pd.DataFrame({"y": [1, 2, 3]},
... index=pd.to_datetime(["2000-03-31 00:00:00",
... "2000-05-31 00:00:00",
... "2000-08-31 00:00:00"]))
>>> df.index.to_period("M")
PeriodIndex(['2000-03', '2000-05', '2000-08'],
dtype='period[M]', freq='M')
Infer the daily frequency
>>> idx = pd.date_range("2017-01-01", periods=2)
>>> idx.to_period()
PeriodIndex(['2017-01-01', '2017-01-02'],
dtype='period[D]', freq='D')
"""
from pandas.core.arrays import PeriodArray
if self.tz is not None:
warnings.warn(
"Converting to PeriodArray/Index representation "
"will drop timezone information.",
UserWarning,
)
if freq is None:
freq = self.freqstr or self.inferred_freq
if freq is None:
raise ValueError(
"You must pass a freq argument as current index has none."
)
res = get_period_alias(freq)
# https://github.com/pandas-dev/pandas/issues/33358
if res is None:
res = freq
freq = res
return PeriodArray._from_datetime64(self._ndarray, freq, tz=self.tz)
def to_perioddelta(self, freq):
"""
Calculate TimedeltaArray of difference between index
values and index converted to PeriodArray at specified
freq. Used for vectorized offsets.
Parameters
----------
freq : Period frequency
Returns
-------
TimedeltaArray/Index
"""
# Deprecaation GH#34853
warnings.warn(
"to_perioddelta is deprecated and will be removed in a "
"future version. "
"Use `dtindex - dtindex.to_period(freq).to_timestamp()` instead",
FutureWarning,
stacklevel=3,
)
from pandas.core.arrays.timedeltas import TimedeltaArray
i8delta = self.asi8 - self.to_period(freq).to_timestamp().asi8
m8delta = i8delta.view("m8[ns]")
return TimedeltaArray(m8delta)
# -----------------------------------------------------------------
# Properties - Vectorized Timestamp Properties/Methods
def month_name(self, locale=None):
"""
Return the month names of the DateTimeIndex with specified locale.
Parameters
----------
locale : str, optional
Locale determining the language in which to return the month name.
Default is English locale.
Returns
-------
Index
Index of month names.
Examples
--------
>>> idx = pd.date_range(start='2018-01', freq='M', periods=3)
>>> idx
DatetimeIndex(['2018-01-31', '2018-02-28', '2018-03-31'],
dtype='datetime64[ns]', freq='M')
>>> idx.month_name()
Index(['January', 'February', 'March'], dtype='object')
"""
values = self._local_timestamps()
result = fields.get_date_name_field(values, "month_name", locale=locale)
result = self._maybe_mask_results(result, fill_value=None)
return result
def day_name(self, locale=None):
"""
Return the day names of the DateTimeIndex with specified locale.
Parameters
----------
locale : str, optional
Locale determining the language in which to return the day name.
Default is English locale.
Returns
-------
Index
Index of day names.
Examples
--------
>>> idx = pd.date_range(start='2018-01-01', freq='D', periods=3)
>>> idx
DatetimeIndex(['2018-01-01', '2018-01-02', '2018-01-03'],
dtype='datetime64[ns]', freq='D')
>>> idx.day_name()
Index(['Monday', 'Tuesday', 'Wednesday'], dtype='object')
"""
values = self._local_timestamps()
result = fields.get_date_name_field(values, "day_name", locale=locale)
result = self._maybe_mask_results(result, fill_value=None)
return result
@property
def time(self):
"""
Returns numpy array of datetime.time. The time part of the Timestamps.
"""
# If the Timestamps have a timezone that is not UTC,
# convert them into their i8 representation while
# keeping their timezone and not using UTC
timestamps = self._local_timestamps()
return ints_to_pydatetime(timestamps, box="time")
@property
def timetz(self):
"""
Returns numpy array of datetime.time also containing timezone
information. The time part of the Timestamps.
"""
return ints_to_pydatetime(self.asi8, self.tz, box="time")
@property
def date(self):
"""
Returns numpy array of python datetime.date objects (namely, the date
part of Timestamps without timezone information).
"""
# If the Timestamps have a timezone that is not UTC,
# convert them into their i8 representation while
# keeping their timezone and not using UTC
timestamps = self._local_timestamps()
return ints_to_pydatetime(timestamps, box="date")
def isocalendar(self):
"""
Returns a DataFrame with the year, week, and day calculated according to
the ISO 8601 standard.
.. versionadded:: 1.1.0
Returns
-------
DataFrame
with columns year, week and day
See Also
--------
Timestamp.isocalendar : Function return a 3-tuple containing ISO year,
week number, and weekday for the given Timestamp object.
datetime.date.isocalendar : Return a named tuple object with
three components: year, week and weekday.
Examples
--------
>>> idx = pd.date_range(start='2019-12-29', freq='D', periods=4)
>>> idx.isocalendar()
year week day
2019-12-29 2019 52 7
2019-12-30 2020 1 1
2019-12-31 2020 1 2
2020-01-01 2020 1 3
>>> idx.isocalendar().week
2019-12-29 52
2019-12-30 1
2019-12-31 1
2020-01-01 1
Freq: D, Name: week, dtype: UInt32
"""
from pandas import DataFrame
values = self._local_timestamps()
sarray = fields.build_isocalendar_sarray(values)
iso_calendar_df = DataFrame(
sarray, columns=["year", "week", "day"], dtype="UInt32"
)
if self._hasnans:
iso_calendar_df.iloc[self._isnan] = None
return iso_calendar_df
@property
def weekofyear(self):
"""
The week ordinal of the year.
.. deprecated:: 1.1.0
weekofyear and week have been deprecated.
Please use DatetimeIndex.isocalendar().week instead.
"""
warnings.warn(
"weekofyear and week have been deprecated, please use "
"DatetimeIndex.isocalendar().week instead, which returns "
"a Series. To exactly reproduce the behavior of week and "
"weekofyear and return an Index, you may call "
"pd.Int64Index(idx.isocalendar().week)",
FutureWarning,
stacklevel=3,
)
week_series = self.isocalendar().week
if week_series.hasnans:
return week_series.to_numpy(dtype="float64", na_value=np.nan)
return week_series.to_numpy(dtype="int64")
week = weekofyear
year = _field_accessor(
"year",
"Y",
"""
The year of the datetime.
Examples
--------
>>> datetime_series = pd.Series(
... pd.date_range("2000-01-01", periods=3, freq="Y")
... )
>>> datetime_series
0 2000-12-31
1 2001-12-31
2 2002-12-31
dtype: datetime64[ns]
>>> datetime_series.dt.year
0 2000
1 2001
2 2002
dtype: int64
""",
)
month = _field_accessor(
"month",
"M",
"""
The month as January=1, December=12.
Examples
--------
>>> datetime_series = pd.Series(
... pd.date_range("2000-01-01", periods=3, freq="M")
... )
>>> datetime_series
0 2000-01-31
1 2000-02-29
2 2000-03-31
dtype: datetime64[ns]
>>> datetime_series.dt.month
0 1
1 2
2 3
dtype: int64
""",
)
day = _field_accessor(
"day",
"D",
"""
The day of the datetime.
Examples
--------
>>> datetime_series = pd.Series(
... pd.date_range("2000-01-01", periods=3, freq="D")
... )
>>> datetime_series
0 2000-01-01
1 2000-01-02
2 2000-01-03
dtype: datetime64[ns]
>>> datetime_series.dt.day
0 1
1 2
2 3
dtype: int64
""",
)
hour = _field_accessor(
"hour",
"h",
"""
The hours of the datetime.
Examples
--------
>>> datetime_series = pd.Series(
... pd.date_range("2000-01-01", periods=3, freq="h")
... )
>>> datetime_series
0 2000-01-01 00:00:00
1 2000-01-01 01:00:00
2 2000-01-01 02:00:00
dtype: datetime64[ns]
>>> datetime_series.dt.hour
0 0
1 1
2 2
dtype: int64
""",
)
minute = _field_accessor(
"minute",
"m",
"""
The minutes of the datetime.
Examples
--------
>>> datetime_series = pd.Series(
... pd.date_range("2000-01-01", periods=3, freq="T")
... )
>>> datetime_series
0 2000-01-01 00:00:00
1 2000-01-01 00:01:00
2 2000-01-01 00:02:00
dtype: datetime64[ns]
>>> datetime_series.dt.minute
0 0
1 1
2 2
dtype: int64
""",
)
second = _field_accessor(
"second",
"s",
"""
The seconds of the datetime.
Examples
--------
>>> datetime_series = pd.Series(
... pd.date_range("2000-01-01", periods=3, freq="s")
... )
>>> datetime_series
0 2000-01-01 00:00:00
1 2000-01-01 00:00:01
2 2000-01-01 00:00:02
dtype: datetime64[ns]
>>> datetime_series.dt.second
0 0
1 1
2 2
dtype: int64
""",
)
microsecond = _field_accessor(
"microsecond",
"us",
"""
The microseconds of the datetime.
Examples
--------
>>> datetime_series = pd.Series(
... pd.date_range("2000-01-01", periods=3, freq="us")
... )
>>> datetime_series
0 2000-01-01 00:00:00.000000
1 2000-01-01 00:00:00.000001
2 2000-01-01 00:00:00.000002
dtype: datetime64[ns]
>>> datetime_series.dt.microsecond
0 0
1 1
2 2
dtype: int64
""",
)
nanosecond = _field_accessor(
"nanosecond",
"ns",
"""
The nanoseconds of the datetime.
Examples
--------
>>> datetime_series = pd.Series(
... pd.date_range("2000-01-01", periods=3, freq="ns")
... )
>>> datetime_series
0 2000-01-01 00:00:00.000000000
1 2000-01-01 00:00:00.000000001
2 2000-01-01 00:00:00.000000002
dtype: datetime64[ns]
>>> datetime_series.dt.nanosecond
0 0
1 1
2 2
dtype: int64
""",
)
_dayofweek_doc = """
The day of the week with Monday=0, Sunday=6.
Return the day of the week. It is assumed the week starts on
Monday, which is denoted by 0 and ends on Sunday which is denoted
by 6. This method is available on both Series with datetime
values (using the `dt` accessor) or DatetimeIndex.
Returns
-------
Series or Index
Containing integers indicating the day number.
See Also
--------
Series.dt.dayofweek : Alias.
Series.dt.weekday : Alias.
Series.dt.day_name : Returns the name of the day of the week.
Examples
--------
>>> s = pd.date_range('2016-12-31', '2017-01-08', freq='D').to_series()
>>> s.dt.dayofweek
2016-12-31 5
2017-01-01 6
2017-01-02 0
2017-01-03 1
2017-01-04 2
2017-01-05 3
2017-01-06 4
2017-01-07 5
2017-01-08 6
Freq: D, dtype: int64
"""
day_of_week = _field_accessor("day_of_week", "dow", _dayofweek_doc)
dayofweek = day_of_week
weekday = day_of_week
day_of_year = _field_accessor(
"dayofyear",
"doy",
"""
The ordinal day of the year.
""",
)
dayofyear = day_of_year
quarter = _field_accessor(
"quarter",
"q",
"""
The quarter of the date.
""",
)
days_in_month = _field_accessor(
"days_in_month",
"dim",
"""
The number of days in the month.
""",
)
daysinmonth = days_in_month
_is_month_doc = """
Indicates whether the date is the {first_or_last} day of the month.
Returns
-------
Series or array
For Series, returns a Series with boolean values.
For DatetimeIndex, returns a boolean array.
See Also
--------
is_month_start : Return a boolean indicating whether the date
is the first day of the month.
is_month_end : Return a boolean indicating whether the date
is the last day of the month.
Examples
--------
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on DatetimeIndex.
>>> s = pd.Series(pd.date_range("2018-02-27", periods=3))
>>> s
0 2018-02-27
1 2018-02-28
2 2018-03-01
dtype: datetime64[ns]
>>> s.dt.is_month_start
0 False
1 False
2 True
dtype: bool
>>> s.dt.is_month_end
0 False
1 True
2 False
dtype: bool
>>> idx = pd.date_range("2018-02-27", periods=3)
>>> idx.is_month_start
array([False, False, True])
>>> idx.is_month_end
array([False, True, False])
"""
is_month_start = _field_accessor(
"is_month_start", "is_month_start", _is_month_doc.format(first_or_last="first")
)
is_month_end = _field_accessor(
"is_month_end", "is_month_end", _is_month_doc.format(first_or_last="last")
)
is_quarter_start = _field_accessor(
"is_quarter_start",
"is_quarter_start",
"""
Indicator for whether the date is the first day of a quarter.
Returns
-------
is_quarter_start : Series or DatetimeIndex
The same type as the original data with boolean values. Series will
have the same name and index. DatetimeIndex will have the same
name.
See Also
--------
quarter : Return the quarter of the date.
is_quarter_end : Similar property for indicating the quarter start.
Examples
--------
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on DatetimeIndex.
>>> df = pd.DataFrame({'dates': pd.date_range("2017-03-30",
... periods=4)})
>>> df.assign(quarter=df.dates.dt.quarter,
... is_quarter_start=df.dates.dt.is_quarter_start)
dates quarter is_quarter_start
0 2017-03-30 1 False
1 2017-03-31 1 False
2 2017-04-01 2 True
3 2017-04-02 2 False
>>> idx = pd.date_range('2017-03-30', periods=4)
>>> idx
DatetimeIndex(['2017-03-30', '2017-03-31', '2017-04-01', '2017-04-02'],
dtype='datetime64[ns]', freq='D')
>>> idx.is_quarter_start
array([False, False, True, False])
""",
)
is_quarter_end = _field_accessor(
"is_quarter_end",
"is_quarter_end",
"""
Indicator for whether the date is the last day of a quarter.
Returns
-------
is_quarter_end : Series or DatetimeIndex
The same type as the original data with boolean values. Series will
have the same name and index. DatetimeIndex will have the same
name.
See Also
--------
quarter : Return the quarter of the date.
is_quarter_start : Similar property indicating the quarter start.
Examples
--------
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on DatetimeIndex.
>>> df = pd.DataFrame({'dates': pd.date_range("2017-03-30",
... periods=4)})
>>> df.assign(quarter=df.dates.dt.quarter,
... is_quarter_end=df.dates.dt.is_quarter_end)
dates quarter is_quarter_end
0 2017-03-30 1 False
1 2017-03-31 1 True
2 2017-04-01 2 False
3 2017-04-02 2 False
>>> idx = pd.date_range('2017-03-30', periods=4)
>>> idx
DatetimeIndex(['2017-03-30', '2017-03-31', '2017-04-01', '2017-04-02'],
dtype='datetime64[ns]', freq='D')
>>> idx.is_quarter_end
array([False, True, False, False])
""",
)
is_year_start = _field_accessor(
"is_year_start",
"is_year_start",
"""
Indicate whether the date is the first day of a year.
Returns
-------
Series or DatetimeIndex
The same type as the original data with boolean values. Series will
have the same name and index. DatetimeIndex will have the same
name.
See Also
--------
is_year_end : Similar property indicating the last day of the year.
Examples
--------
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on DatetimeIndex.
>>> dates = pd.Series(pd.date_range("2017-12-30", periods=3))
>>> dates
0 2017-12-30
1 2017-12-31
2 2018-01-01
dtype: datetime64[ns]
>>> dates.dt.is_year_start
0 False
1 False
2 True
dtype: bool
>>> idx = pd.date_range("2017-12-30", periods=3)
>>> idx
DatetimeIndex(['2017-12-30', '2017-12-31', '2018-01-01'],
dtype='datetime64[ns]', freq='D')
>>> idx.is_year_start
array([False, False, True])
""",
)
is_year_end = _field_accessor(
"is_year_end",
"is_year_end",
"""
Indicate whether the date is the last day of the year.
Returns
-------
Series or DatetimeIndex
The same type as the original data with boolean values. Series will
have the same name and index. DatetimeIndex will have the same
name.
See Also
--------
is_year_start : Similar property indicating the start of the year.
Examples
--------
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on DatetimeIndex.
>>> dates = pd.Series(pd.date_range("2017-12-30", periods=3))
>>> dates
0 2017-12-30
1 2017-12-31
2 2018-01-01
dtype: datetime64[ns]
>>> dates.dt.is_year_end
0 False
1 True
2 False
dtype: bool
>>> idx = pd.date_range("2017-12-30", periods=3)
>>> idx
DatetimeIndex(['2017-12-30', '2017-12-31', '2018-01-01'],
dtype='datetime64[ns]', freq='D')
>>> idx.is_year_end
array([False, True, False])
""",
)
is_leap_year = _field_accessor(
"is_leap_year",
"is_leap_year",
"""
Boolean indicator if the date belongs to a leap year.
A leap year is a year, which has 366 days (instead of 365) including
29th of February as an intercalary day.
Leap years are years which are multiples of four with the exception
of years divisible by 100 but not by 400.
Returns
-------
Series or ndarray
Booleans indicating if dates belong to a leap year.
Examples
--------
This method is available on Series with datetime values under
the ``.dt`` accessor, and directly on DatetimeIndex.
>>> idx = pd.date_range("2012-01-01", "2015-01-01", freq="Y")
>>> idx
DatetimeIndex(['2012-12-31', '2013-12-31', '2014-12-31'],
dtype='datetime64[ns]', freq='A-DEC')
>>> idx.is_leap_year
array([ True, False, False])
>>> dates_series = pd.Series(idx)
>>> dates_series
0 2012-12-31
1 2013-12-31
2 2014-12-31
dtype: datetime64[ns]
>>> dates_series.dt.is_leap_year
0 True
1 False
2 False
dtype: bool
""",
)
def to_julian_date(self):
"""
Convert Datetime Array to float64 ndarray of Julian Dates.
0 Julian date is noon January 1, 4713 BC.
https://en.wikipedia.org/wiki/Julian_day
"""
# http://mysite.verizon.net/aesir_research/date/jdalg2.htm
year = np.asarray(self.year)
month = np.asarray(self.month)
day = np.asarray(self.day)
testarr = month < 3
year[testarr] -= 1
month[testarr] += 12
return (
day
+ np.fix((153 * month - 457) / 5)
+ 365 * year
+ np.floor(year / 4)
- np.floor(year / 100)
+ np.floor(year / 400)
+ 1_721_118.5
+ (
self.hour
+ self.minute / 60
+ self.second / 3600
+ self.microsecond / 3600 / 10 ** 6
+ self.nanosecond / 3600 / 10 ** 9
)
/ 24
)
# -----------------------------------------------------------------
# Reductions
def std(
self,
axis=None,
dtype=None,
out=None,
ddof: int = 1,
keepdims: bool = False,
skipna: bool = True,
):
# Because std is translation-invariant, we can get self.std
# by calculating (self - Timestamp(0)).std, and we can do it
# without creating a copy by using a view on self._ndarray
from pandas.core.arrays import TimedeltaArray
tda = TimedeltaArray(self._ndarray.view("i8"))
return tda.std(
axis=axis, dtype=dtype, out=out, ddof=ddof, keepdims=keepdims, skipna=skipna
)
# -------------------------------------------------------------------
# Constructor Helpers
@overload
def sequence_to_datetimes(
data, allow_object: Literal[False] = ..., require_iso8601: bool = ...
) -> DatetimeArray:
...
@overload
def sequence_to_datetimes(
data, allow_object: Literal[True] = ..., require_iso8601: bool = ...
) -> Union[np.ndarray, DatetimeArray]:
...
def sequence_to_datetimes(
data, allow_object: bool = False, require_iso8601: bool = False
) -> Union[np.ndarray, DatetimeArray]:
"""
Parse/convert the passed data to either DatetimeArray or np.ndarray[object].
"""
result, tz, freq = sequence_to_dt64ns(
data,
allow_object=allow_object,
allow_mixed=True,
require_iso8601=require_iso8601,
)
if result.dtype == object:
return result
dtype = tz_to_dtype(tz)
dta = DatetimeArray._simple_new(result, freq=freq, dtype=dtype)
return dta
def sequence_to_dt64ns(
data,
dtype=None,
copy=False,
tz=None,
dayfirst=False,
yearfirst=False,
ambiguous="raise",
*,
allow_object: bool = False,
allow_mixed: bool = False,
require_iso8601: bool = False,
):
"""
Parameters
----------
data : list-like
dtype : dtype, str, or None, default None
copy : bool, default False
tz : tzinfo, str, or None, default None
dayfirst : bool, default False
yearfirst : bool, default False
ambiguous : str, bool, or arraylike, default 'raise'
See pandas._libs.tslibs.tzconversion.tz_localize_to_utc.
allow_object : bool, default False
Whether to return an object-dtype ndarray instead of raising if the
data contains more than one timezone.
allow_mixed : bool, default False
Interpret integers as timestamps when datetime objects are also present.
require_iso8601 : bool, default False
Only consider ISO-8601 formats when parsing strings.
Returns
-------
result : numpy.ndarray
The sequence converted to a numpy array with dtype ``datetime64[ns]``.
tz : tzinfo or None
Either the user-provided tzinfo or one inferred from the data.
inferred_freq : Tick or None
The inferred frequency of the sequence.
Raises
------
TypeError : PeriodDType data is passed
"""
inferred_freq = None
dtype = _validate_dt64_dtype(dtype)
tz = timezones.maybe_get_tz(tz)
# if dtype has an embedded tz, capture it
tz = validate_tz_from_dtype(dtype, tz)
if not hasattr(data, "dtype"):
# e.g. list, tuple
if np.ndim(data) == 0:
# i.e. generator
data = list(data)
data = np.asarray(data)
copy = False
elif isinstance(data, ABCMultiIndex):
raise TypeError("Cannot create a DatetimeArray from a MultiIndex.")
else:
data = extract_array(data, extract_numpy=True)
if isinstance(data, IntegerArray):
data = data.to_numpy("int64", na_value=iNaT)
elif not isinstance(data, (np.ndarray, ExtensionArray)):
# GH#24539 e.g. xarray, dask object
data = np.asarray(data)
if isinstance(data, DatetimeArray):
inferred_freq = data.freq
# By this point we are assured to have either a numpy array or Index
data, copy = maybe_convert_dtype(data, copy)
data_dtype = getattr(data, "dtype", None)
if (
is_object_dtype(data_dtype)
or is_string_dtype(data_dtype)
or is_sparse(data_dtype)
):
# TODO: We do not have tests specific to string-dtypes,
# also complex or categorical or other extension
copy = False
if lib.infer_dtype(data, skipna=False) == "integer":
data = data.astype(np.int64)
else:
# data comes back here as either i8 to denote UTC timestamps
# or M8[ns] to denote wall times
data, inferred_tz = objects_to_datetime64ns(
data,
dayfirst=dayfirst,
yearfirst=yearfirst,
allow_object=allow_object,
allow_mixed=allow_mixed,
require_iso8601=require_iso8601,
)
if tz and inferred_tz:
# two timezones: convert to intended from base UTC repr
data = tzconversion.tz_convert_from_utc(data.view("i8"), tz)
data = data.view(DT64NS_DTYPE)
elif inferred_tz:
tz = inferred_tz
elif allow_object and data.dtype == object:
# We encountered mixed-timezones.
return data, None, None
data_dtype = data.dtype
# `data` may have originally been a Categorical[datetime64[ns, tz]],
# so we need to handle these types.
if is_datetime64tz_dtype(data_dtype):
# DatetimeArray -> ndarray
tz = _maybe_infer_tz(tz, data.tz)
result = data._ndarray
elif is_datetime64_dtype(data_dtype):
# tz-naive DatetimeArray or ndarray[datetime64]
data = getattr(data, "_ndarray", data)
if data.dtype != DT64NS_DTYPE:
data = conversion.ensure_datetime64ns(data)
copy = False
if tz is not None:
# Convert tz-naive to UTC
tz = timezones.maybe_get_tz(tz)
data = tzconversion.tz_localize_to_utc(
data.view("i8"), tz, ambiguous=ambiguous
)
data = data.view(DT64NS_DTYPE)
assert data.dtype == DT64NS_DTYPE, data.dtype
result = data
else:
# must be integer dtype otherwise
# assume this data are epoch timestamps
if tz:
tz = timezones.maybe_get_tz(tz)
if data.dtype != INT64_DTYPE:
data = data.astype(np.int64, copy=False)
result = data.view(DT64NS_DTYPE)
if copy:
# TODO: should this be deepcopy?
result = result.copy()
assert isinstance(result, np.ndarray), type(result)
assert result.dtype == "M8[ns]", result.dtype
# We have to call this again after possibly inferring a tz above
validate_tz_from_dtype(dtype, tz)
return result, tz, inferred_freq
def objects_to_datetime64ns(
data: np.ndarray,
dayfirst,
yearfirst,
utc=False,
errors="raise",
require_iso8601: bool = False,
allow_object: bool = False,
allow_mixed: bool = False,
):
"""
Convert data to array of timestamps.
Parameters
----------
data : np.ndarray[object]
dayfirst : bool
yearfirst : bool
utc : bool, default False
Whether to convert timezone-aware timestamps to UTC.
errors : {'raise', 'ignore', 'coerce'}
require_iso8601 : bool, default False
allow_object : bool
Whether to return an object-dtype ndarray instead of raising if the
data contains more than one timezone.
allow_mixed : bool, default False
Interpret integers as timestamps when datetime objects are also present.
Returns
-------
result : ndarray
np.int64 dtype if returned values represent UTC timestamps
np.datetime64[ns] if returned values represent wall times
object if mixed timezones
inferred_tz : tzinfo or None
Raises
------
ValueError : if data cannot be converted to datetimes
"""
assert errors in ["raise", "ignore", "coerce"]
# if str-dtype, convert
data = np.array(data, copy=False, dtype=np.object_)
flags = data.flags
order = "F" if flags.f_contiguous else "C"
try:
result, tz_parsed = tslib.array_to_datetime(
data.ravel("K"),
errors=errors,
utc=utc,
dayfirst=dayfirst,
yearfirst=yearfirst,
require_iso8601=require_iso8601,
allow_mixed=allow_mixed,
)
result = result.reshape(data.shape, order=order)
except ValueError as err:
try:
values, tz_parsed = conversion.datetime_to_datetime64(data.ravel("K"))
# If tzaware, these values represent unix timestamps, so we
# return them as i8 to distinguish from wall times
values = values.reshape(data.shape, order=order)
return values.view("i8"), tz_parsed
except (ValueError, TypeError):
raise err
if tz_parsed is not None:
# We can take a shortcut since the datetime64 numpy array
# is in UTC
# Return i8 values to denote unix timestamps
return result.view("i8"), tz_parsed
elif is_datetime64_dtype(result):
# returning M8[ns] denotes wall-times; since tz is None
# the distinction is a thin one
return result, tz_parsed
elif is_object_dtype(result):
# GH#23675 when called via `pd.to_datetime`, returning an object-dtype
# array is allowed. When called via `pd.DatetimeIndex`, we can
# only accept datetime64 dtype, so raise TypeError if object-dtype
# is returned, as that indicates the values can be recognized as
# datetimes but they have conflicting timezones/awareness
if allow_object:
return result, tz_parsed
raise TypeError(result)
else: # pragma: no cover
# GH#23675 this TypeError should never be hit, whereas the TypeError
# in the object-dtype branch above is reachable.
raise TypeError(result)
def maybe_convert_dtype(data, copy: bool):
"""
Convert data based on dtype conventions, issuing deprecation warnings
or errors where appropriate.
Parameters
----------
data : np.ndarray or pd.Index
copy : bool
Returns
-------
data : np.ndarray or pd.Index
copy : bool
Raises
------
TypeError : PeriodDType data is passed
"""
if not hasattr(data, "dtype"):
# e.g. collections.deque
return data, copy
if is_float_dtype(data.dtype):
# Note: we must cast to datetime64[ns] here in order to treat these
# as wall-times instead of UTC timestamps.
data = data.astype(DT64NS_DTYPE)
copy = False
# TODO: deprecate this behavior to instead treat symmetrically
# with integer dtypes. See discussion in GH#23675
elif is_timedelta64_dtype(data.dtype) or is_bool_dtype(data.dtype):
# GH#29794 enforcing deprecation introduced in GH#23539
raise TypeError(f"dtype {data.dtype} cannot be converted to datetime64[ns]")
elif is_period_dtype(data.dtype):
# Note: without explicitly raising here, PeriodIndex
# test_setops.test_join_does_not_recur fails
raise TypeError(
"Passing PeriodDtype data is invalid. Use `data.to_timestamp()` instead"
)
elif is_categorical_dtype(data.dtype):
# GH#18664 preserve tz in going DTI->Categorical->DTI
# TODO: cases where we need to do another pass through this func,
# e.g. the categories are timedelta64s
data = data.categories.take(data.codes, fill_value=NaT)._values
copy = False
elif is_extension_array_dtype(data.dtype) and not is_datetime64tz_dtype(data.dtype):
# Includes categorical
# TODO: We have no tests for these
data = np.array(data, dtype=np.object_)
copy = False
return data, copy
# -------------------------------------------------------------------
# Validation and Inference
def _maybe_infer_tz(
tz: Optional[tzinfo], inferred_tz: Optional[tzinfo]
) -> Optional[tzinfo]:
"""
If a timezone is inferred from data, check that it is compatible with
the user-provided timezone, if any.
Parameters
----------
tz : tzinfo or None
inferred_tz : tzinfo or None
Returns
-------
tz : tzinfo or None
Raises
------
TypeError : if both timezones are present but do not match
"""
if tz is None:
tz = inferred_tz
elif inferred_tz is None:
pass
elif not timezones.tz_compare(tz, inferred_tz):
raise TypeError(
f"data is already tz-aware {inferred_tz}, unable to "
f"set specified tz: {tz}"
)
return tz
def _validate_dt64_dtype(dtype):
"""
Check that a dtype, if passed, represents either a numpy datetime64[ns]
dtype or a pandas DatetimeTZDtype.
Parameters
----------
dtype : object
Returns
-------
dtype : None, numpy.dtype, or DatetimeTZDtype
Raises
------
ValueError : invalid dtype
Notes
-----
Unlike validate_tz_from_dtype, this does _not_ allow non-existent
tz errors to go through
"""
if dtype is not None:
dtype = pandas_dtype(dtype)
if is_dtype_equal(dtype, np.dtype("M8")):
# no precision, disallowed GH#24806
msg = (
"Passing in 'datetime64' dtype with no precision is not allowed. "
"Please pass in 'datetime64[ns]' instead."
)
raise ValueError(msg)
if (isinstance(dtype, np.dtype) and dtype != DT64NS_DTYPE) or not isinstance(
dtype, (np.dtype, DatetimeTZDtype)
):
raise ValueError(
f"Unexpected value for 'dtype': '{dtype}'. "
"Must be 'datetime64[ns]' or DatetimeTZDtype'."
)
return dtype
def validate_tz_from_dtype(dtype, tz: Optional[tzinfo]) -> Optional[tzinfo]:
"""
If the given dtype is a DatetimeTZDtype, extract the implied
tzinfo object from it and check that it does not conflict with the given
tz.
Parameters
----------
dtype : dtype, str
tz : None, tzinfo
Returns
-------
tz : consensus tzinfo
Raises
------
ValueError : on tzinfo mismatch
"""
if dtype is not None:
if isinstance(dtype, str):
try:
dtype = DatetimeTZDtype.construct_from_string(dtype)
except TypeError:
# Things like `datetime64[ns]`, which is OK for the
# constructors, but also nonsense, which should be validated
# but not by us. We *do* allow non-existent tz errors to
# go through
pass
dtz = getattr(dtype, "tz", None)
if dtz is not None:
if tz is not None and not timezones.tz_compare(tz, dtz):
raise ValueError("cannot supply both a tz and a dtype with a tz")
tz = dtz
if tz is not None and is_datetime64_dtype(dtype):
# We also need to check for the case where the user passed a
# tz-naive dtype (i.e. datetime64[ns])
if tz is not None and not timezones.tz_compare(tz, dtz):
raise ValueError(
"cannot supply both a tz and a "
"timezone-naive dtype (i.e. datetime64[ns])"
)
return tz
def _infer_tz_from_endpoints(
start: Timestamp, end: Timestamp, tz: Optional[tzinfo]
) -> Optional[tzinfo]:
"""
If a timezone is not explicitly given via `tz`, see if one can
be inferred from the `start` and `end` endpoints. If more than one
of these inputs provides a timezone, require that they all agree.
Parameters
----------
start : Timestamp
end : Timestamp
tz : tzinfo or None
Returns
-------
tz : tzinfo or None
Raises
------
TypeError : if start and end timezones do not agree
"""
try:
inferred_tz = timezones.infer_tzinfo(start, end)
except AssertionError as err:
# infer_tzinfo raises AssertionError if passed mismatched timezones
raise TypeError(
"Start and end cannot both be tz-aware with different timezones"
) from err
inferred_tz = timezones.maybe_get_tz(inferred_tz)
tz = timezones.maybe_get_tz(tz)
if tz is not None and inferred_tz is not None:
if not timezones.tz_compare(inferred_tz, tz):
raise AssertionError("Inferred time zone not equal to passed time zone")
elif inferred_tz is not None:
tz = inferred_tz
return tz
def _maybe_normalize_endpoints(
start: Optional[Timestamp], end: Optional[Timestamp], normalize: bool
):
_normalized = True
if start is not None:
if normalize:
start = start.normalize()
_normalized = True
else:
_normalized = _normalized and start.time() == _midnight
if end is not None:
if normalize:
end = end.normalize()
_normalized = True
else:
_normalized = _normalized and end.time() == _midnight
return start, end, _normalized
def _maybe_localize_point(ts, is_none, is_not_none, freq, tz, ambiguous, nonexistent):
"""
Localize a start or end Timestamp to the timezone of the corresponding
start or end Timestamp
Parameters
----------
ts : start or end Timestamp to potentially localize
is_none : argument that should be None
is_not_none : argument that should not be None
freq : Tick, DateOffset, or None
tz : str, timezone object or None
ambiguous: str, localization behavior for ambiguous times
nonexistent: str, localization behavior for nonexistent times
Returns
-------
ts : Timestamp
"""
# Make sure start and end are timezone localized if:
# 1) freq = a Timedelta-like frequency (Tick)
# 2) freq = None i.e. generating a linspaced range
if is_none is None and is_not_none is not None:
# Note: We can't ambiguous='infer' a singular ambiguous time; however,
# we have historically defaulted ambiguous=False
ambiguous = ambiguous if ambiguous != "infer" else False
localize_args = {"ambiguous": ambiguous, "nonexistent": nonexistent, "tz": None}
if isinstance(freq, Tick) or freq is None:
localize_args["tz"] = tz
ts = ts.tz_localize(**localize_args)
return ts
def generate_range(start=None, end=None, periods=None, offset=BDay()):
"""
Generates a sequence of dates corresponding to the specified time
offset. Similar to dateutil.rrule except uses pandas DateOffset
objects to represent time increments.
Parameters
----------
start : datetime, (default None)
end : datetime, (default None)
periods : int, (default None)
offset : DateOffset, (default BDay())
Notes
-----
* This method is faster for generating weekdays than dateutil.rrule
* At least two of (start, end, periods) must be specified.
* If both start and end are specified, the returned dates will
satisfy start <= date <= end.
Returns
-------
dates : generator object
"""
offset = to_offset(offset)
start = Timestamp(start)
start = start if start is not NaT else None
end = Timestamp(end)
end = end if end is not NaT else None
if start and not offset.is_on_offset(start):
start = offset.rollforward(start)
elif end and not offset.is_on_offset(end):
end = offset.rollback(end)
if periods is None and end < start and offset.n >= 0:
end = None
periods = 0
if end is None:
end = start + (periods - 1) * offset
if start is None:
start = end - (periods - 1) * offset
cur = start
if offset.n >= 0:
while cur <= end:
yield cur
if cur == end:
# GH#24252 avoid overflows by not performing the addition
# in offset.apply unless we have to
break
# faster than cur + offset
next_date = offset.apply(cur)
if next_date <= cur:
raise ValueError(f"Offset {offset} did not increment date")
cur = next_date
else:
while cur >= end:
yield cur
if cur == end:
# GH#24252 avoid overflows by not performing the addition
# in offset.apply unless we have to
break
# faster than cur + offset
next_date = offset.apply(cur)
if next_date >= cur:
raise ValueError(f"Offset {offset} did not decrement date")
cur = next_date
| 32.035952
| 88
| 0.564211
|
4a032cda18c46cac59eedbd9061814e94020587e
| 220
|
py
|
Python
|
enci/f2g/doctype/furniture_to_go_product_bullet_points/furniture_to_go_product_bullet_points.py
|
artykbasar/enci
|
e65ed17ff3414f04db54ee53b83ddcd3808811d2
|
[
"MIT"
] | null | null | null |
enci/f2g/doctype/furniture_to_go_product_bullet_points/furniture_to_go_product_bullet_points.py
|
artykbasar/enci
|
e65ed17ff3414f04db54ee53b83ddcd3808811d2
|
[
"MIT"
] | null | null | null |
enci/f2g/doctype/furniture_to_go_product_bullet_points/furniture_to_go_product_bullet_points.py
|
artykbasar/enci
|
e65ed17ff3414f04db54ee53b83ddcd3808811d2
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021, Artyk Basarov and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class FurnitureToGoProductBulletPoints(Document):
pass
| 24.444444
| 52
| 0.813636
|
4a032f3af30d17334a77c06756682c5207c86f82
| 1,386
|
py
|
Python
|
plenum/server/quorums.py
|
jandayanan/indy-plenum
|
2815e994404c77ad87eddcfd09062d5fe6efc1c5
|
[
"Apache-2.0"
] | 148
|
2017-07-11T19:05:25.000Z
|
2022-03-16T21:31:20.000Z
|
plenum/server/quorums.py
|
jandayanan/indy-plenum
|
2815e994404c77ad87eddcfd09062d5fe6efc1c5
|
[
"Apache-2.0"
] | 561
|
2017-06-29T17:59:56.000Z
|
2022-03-09T15:47:14.000Z
|
plenum/server/quorums.py
|
jandayanan/indy-plenum
|
2815e994404c77ad87eddcfd09062d5fe6efc1c5
|
[
"Apache-2.0"
] | 378
|
2017-06-29T17:45:27.000Z
|
2022-03-26T07:27:59.000Z
|
from plenum.common.util import getMaxFailures
class Quorum:
def __init__(self, value: int):
self.value = value
def is_reached(self, msg_count: int) -> bool:
return msg_count >= self.value
def __repr__(self):
return "{}({!r})".format(self.__class__.__name__, self.value)
class Quorums:
def __init__(self, n):
f = getMaxFailures(n)
self.n = n
self.f = f
self.weak = Quorum(f + 1)
self.strong = Quorum(n - f)
self.propagate = Quorum(f + 1)
self.prepare = Quorum(n - f - 1)
self.commit = Quorum(n - f)
self.reply = Quorum(f + 1)
self.view_change = Quorum(n - f)
self.election = Quorum(n - f)
self.view_change = Quorum(n - f)
self.view_change_ack = Quorum(n - f - 1)
self.view_change_done = Quorum(n - f)
self.same_consistency_proof = Quorum(f + 1)
self.consistency_proof = Quorum(f + 1)
self.ledger_status = Quorum(n - f - 1)
self.ledger_status_last_3PC = Quorum(f + 1)
self.checkpoint = Quorum(n - f - 1)
self.timestamp = Quorum(f + 1)
self.bls_signatures = Quorum(n - f)
self.observer_data = Quorum(f + 1)
self.backup_instance_faulty = Quorum(f + 1)
def __str__(self):
# TODO more robust implementation
return "{}".format(self.__dict__)
| 31.5
| 69
| 0.583694
|
4a032fc00436361a1665a2a9902862a6256a5d5f
| 3,276
|
py
|
Python
|
Lib/distutils/tests/test_config.py
|
hashiqizaizai/hashiqizaizai.github.io
|
7217400802f6b944dfd1e29d4b00d268957ff769
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/distutils/tests/test_config.py
|
hashiqizaizai/hashiqizaizai.github.io
|
7217400802f6b944dfd1e29d4b00d268957ff769
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/distutils/tests/test_config.py
|
hashiqizaizai/hashiqizaizai.github.io
|
7217400802f6b944dfd1e29d4b00d268957ff769
|
[
"bzip2-1.0.6"
] | null | null | null |
"""Tests for distutils.pypirc.pypirc."""
import sys
import os
import unittest
import tempfile
import shutil
from distutils.core import PyPIRCCommand
from distutils.core import Distribution
from distutils.log import set_threshold
from distutils.log import WARN
from distutils.tests import support
PYPIRC = """\
[distutils]
index-servers =
server1
server2
[server1]
username:me
password:secret
[server2]
username:meagain
password: secret
realm:acme
repository:http://another.pypi/
"""
PYPIRC_OLD = """\
[server-login]
username:tarek
password:secret
"""
WANTED = """\
[distutils]
index-servers =
pypi
[pypi]
username:tarek
password:xxx
"""
class PyPIRCCommandTestCase(support.TempdirManager,
support.LoggingSilencer,
support.EnvironGuard,
unittest.TestCase):
def setUp(self):
"""Patches the environment."""
super(PyPIRCCommandTestCase, self).setUp()
self.tmp_dir = self.mkdtemp()
os.environ['HOME'] = self.tmp_dir
self.rc = os.path.join(self.tmp_dir, '.pypirc')
self.dist = Distribution()
class command(PyPIRCCommand):
def __init__(self, dist):
PyPIRCCommand.__init__(self, dist)
def initialize_options(self):
pass
finalize_options = initialize_options
self._cmd = command
self.old_threshold = set_threshold(WARN)
def tearDown(self):
"""Removes the patch."""
set_threshold(self.old_threshold)
super(PyPIRCCommandTestCase, self).tearDown()
def test_server_registration(self):
# This test makes sure PyPIRCCommand knows how to:
# 1. handle several sections in .pypirc
# 2. handle the old format
# new format
self.write_file(self.rc, PYPIRC)
cmd = self._cmd(self.dist)
config = cmd._read_pypirc()
config = config.items()
config.sort()
waited = [('password', 'secret'), ('realm', 'pypi'),
('repository', 'http://pypi.python.org/pypi'),
('server', 'server1'), ('username', 'me')]
self.assertEqual(config, waited)
# old format
self.write_file(self.rc, PYPIRC_OLD)
config = cmd._read_pypirc()
config = config.items()
config.sort()
waited = [('password', 'secret'), ('realm', 'pypi'),
('repository', 'http://pypi.python.org/pypi'),
('server', 'server-login'), ('username', 'tarek')]
self.assertEqual(config, waited)
def test_server_empty_registration(self):
cmd = self._cmd(self.dist)
rc = cmd._get_rc_file()
self.assertTrue(not os.path.exists(rc))
cmd._store_pypirc('tarek', 'xxx')
self.assertTrue(os.path.exists(rc))
f = open(rc)
try:
content = f.read()
self.assertEqual(content, WANTED)
finally:
f.close()
def test_suite():
return unittest.makeSuite(PyPIRCCommandTestCase)
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
| 26.634146
| 69
| 0.584554
|
4a0331795867bf7c9e53c1b0026b5a3effdd60c1
| 399
|
py
|
Python
|
Beecrowd/Python/1074.py
|
felipemsalles/Programming-Studies
|
63100fb22a165c4582b10a95d5a583f9bc1e990f
|
[
"MIT"
] | null | null | null |
Beecrowd/Python/1074.py
|
felipemsalles/Programming-Studies
|
63100fb22a165c4582b10a95d5a583f9bc1e990f
|
[
"MIT"
] | null | null | null |
Beecrowd/Python/1074.py
|
felipemsalles/Programming-Studies
|
63100fb22a165c4582b10a95d5a583f9bc1e990f
|
[
"MIT"
] | null | null | null |
n = int(input())
x = ['']
for i in range(1, n + 1):
x.append(int(input()))
for i in range(1, n + 1):
if x[i] == 0:
print('NULL')
if x[i] > 0:
if x[i] % 2 == 0:
print('EVEN POSITIVE')
else:
print('ODD POSITIVE')
if x[i] < 0:
if x[i] % 2 == 0:
print('EVEN NEGATIVE')
else:
print('ODD NEGATIVE')
| 19.95
| 34
| 0.408521
|
4a0332ed3e174fca24947c1d21e84ff9e8956d07
| 8,642
|
py
|
Python
|
keras/layers/preprocessing/category_encoding.py
|
zhjunqin/keras
|
f5171d521acbf2ebbb6414352d5792163c41479f
|
[
"Apache-2.0"
] | 1
|
2022-03-01T20:20:12.000Z
|
2022-03-01T20:20:12.000Z
|
keras/layers/preprocessing/category_encoding.py
|
sairamadithya/keras
|
42bf9972492f47c3d3c249de9c20942ba217937d
|
[
"Apache-2.0"
] | null | null | null |
keras/layers/preprocessing/category_encoding.py
|
sairamadithya/keras
|
42bf9972492f47c3d3c249de9c20942ba217937d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras CategoryEncoding preprocessing layer."""
# pylint: disable=g-classes-have-attributes
# pylint: disable=g-direct-tensorflow-import
from keras import backend
from keras.engine import base_layer
from keras.engine import base_preprocessing_layer
from keras.layers.preprocessing import preprocessing_utils as utils
from keras.utils import layer_utils
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
INT = utils.INT
ONE_HOT = utils.ONE_HOT
MULTI_HOT = utils.MULTI_HOT
COUNT = utils.COUNT
@keras_export("keras.layers.CategoryEncoding",
"keras.layers.experimental.preprocessing.CategoryEncoding")
class CategoryEncoding(base_layer.Layer):
"""A preprocessing layer which encodes integer features.
This layer provides options for condensing data into a categorical encoding
when the total number of tokens are known in advance. It accepts integer
values as inputs, and it outputs a dense or sparse representation of those
inputs. For integer inputs where the total number of tokens is not known, use
`tf.keras.layers.IntegerLookup` instead.
For an overview and full list of preprocessing layers, see the preprocessing
[guide](https://www.tensorflow.org/guide/keras/preprocessing_layers).
Examples:
**One-hot encoding data**
>>> layer = tf.keras.layers.CategoryEncoding(
... num_tokens=4, output_mode="one_hot")
>>> layer([3, 2, 0, 1])
<tf.Tensor: shape=(4, 4), dtype=float32, numpy=
array([[0., 0., 0., 1.],
[0., 0., 1., 0.],
[1., 0., 0., 0.],
[0., 1., 0., 0.]], dtype=float32)>
**Multi-hot encoding data**
>>> layer = tf.keras.layers.CategoryEncoding(
... num_tokens=4, output_mode="multi_hot")
>>> layer([[0, 1], [0, 0], [1, 2], [3, 1]])
<tf.Tensor: shape=(4, 4), dtype=float32, numpy=
array([[1., 1., 0., 0.],
[1., 0., 0., 0.],
[0., 1., 1., 0.],
[0., 1., 0., 1.]], dtype=float32)>
**Using weighted inputs in `"count"` mode**
>>> layer = tf.keras.layers.CategoryEncoding(
... num_tokens=4, output_mode="count")
>>> count_weights = np.array([[.1, .2], [.1, .1], [.2, .3], [.4, .2]])
>>> layer([[0, 1], [0, 0], [1, 2], [3, 1]], count_weights=count_weights)
<tf.Tensor: shape=(4, 4), dtype=float64, numpy=
array([[0.1, 0.2, 0. , 0. ],
[0.2, 0. , 0. , 0. ],
[0. , 0.2, 0.3, 0. ],
[0. , 0.2, 0. , 0.4]])>
Args:
num_tokens: The total number of tokens the layer should support. All inputs
to the layer must integers in the range `0 <= value < num_tokens`, or an
error will be thrown.
output_mode: Specification for the output of the layer.
Defaults to `"multi_hot"`. Values can be `"one_hot"`, `"multi_hot"` or
`"count"`, configuring the layer as follows:
- `"one_hot"`: Encodes each individual element in the input into an
array of `num_tokens` size, containing a 1 at the element index. If
the last dimension is size 1, will encode on that dimension. If the
last dimension is not size 1, will append a new dimension for the
encoded output.
- `"multi_hot"`: Encodes each sample in the input into a single array
of `num_tokens` size, containing a 1 for each vocabulary term present
in the sample. Treats the last dimension as the sample dimension, if
input shape is `(..., sample_length)`, output shape will be
`(..., num_tokens)`.
- `"count"`: Like `"multi_hot"`, but the int array contains a count of
the number of times the token at that index appeared in the sample.
For all output modes, currently only output up to rank 2 is supported.
sparse: Boolean. If true, returns a `SparseTensor` instead of a dense
`Tensor`. Defaults to `False`.
Call arguments:
inputs: A 1D or 2D tensor of integer inputs.
count_weights: A tensor in the same shape as `inputs` indicating the
weight for each sample value when summing up in `count` mode. Not used in
`"multi_hot"` or `"one_hot"` modes.
"""
def __init__(self,
num_tokens=None,
output_mode="multi_hot",
sparse=False,
**kwargs):
# max_tokens is an old name for the num_tokens arg we continue to support
# because of usage.
if "max_tokens" in kwargs:
logging.warning(
"max_tokens is deprecated, please use num_tokens instead.")
num_tokens = kwargs["max_tokens"]
del kwargs["max_tokens"]
# By default, output floats. This is already default for TF2, but in TF1
# dtype is inferred from inputs, and would default to int.
if "dtype" not in kwargs:
kwargs["dtype"] = backend.floatx()
super(CategoryEncoding, self).__init__(**kwargs)
base_preprocessing_layer.keras_kpl_gauge.get_cell("CategoryEncoding").set(
True)
# Support deprecated names for output_modes.
if output_mode == "binary":
output_mode = MULTI_HOT
# 'output_mode' must be one of (COUNT, ONE_HOT, MULTI_HOT)
layer_utils.validate_string_arg(
output_mode,
allowable_strings=(COUNT, ONE_HOT, MULTI_HOT),
layer_name="CategoryEncoding",
arg_name="output_mode")
if num_tokens is None:
raise ValueError("num_tokens must be set to use this layer. If the "
"number of tokens is not known beforehand, use the "
"IntegerLookup layer instead.")
if num_tokens < 1:
raise ValueError(
f"`num_tokens` must be >= 1. Received: num_tokens={num_tokens}.")
self.num_tokens = num_tokens
self.output_mode = output_mode
self.sparse = sparse
def compute_output_shape(self, input_shape):
if not input_shape:
return tf.TensorShape([self.num_tokens])
if self.output_mode == ONE_HOT and input_shape[-1] != 1:
return tf.TensorShape(input_shape + [self.num_tokens])
else:
return tf.TensorShape(input_shape[:-1] + [self.num_tokens])
def compute_output_signature(self, input_spec):
output_shape = self.compute_output_shape(input_spec.shape.as_list())
if self.sparse:
return tf.SparseTensorSpec(
shape=output_shape, dtype=tf.int64)
else:
return tf.TensorSpec(shape=output_shape, dtype=tf.int64)
def get_config(self):
config = {
"num_tokens": self.num_tokens,
"output_mode": self.output_mode,
"sparse": self.sparse,
}
base_config = super(CategoryEncoding, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def call(self, inputs, count_weights=None):
if isinstance(inputs, (list, np.ndarray)):
inputs = tf.convert_to_tensor(inputs)
if count_weights is not None and self.output_mode != COUNT:
raise ValueError(
"`count_weights` is not used when `output_mode` is not `'count'`. "
"Received `count_weights={}`.".format(count_weights))
depth = self.num_tokens
if isinstance(inputs, tf.SparseTensor):
max_value = tf.reduce_max(inputs.values)
min_value = tf.reduce_min(inputs.values)
else:
max_value = tf.reduce_max(inputs)
min_value = tf.reduce_min(inputs)
condition = tf.logical_and(
tf.greater(tf.cast(depth, max_value.dtype), max_value),
tf.greater_equal(min_value, tf.cast(0, min_value.dtype)))
assertion = tf.Assert(condition, [
"Input values must be in the range 0 <= values < num_tokens"
" with num_tokens={}".format(depth)
])
with tf.control_dependencies([assertion]):
return utils.encode_categorical_inputs(
inputs,
output_mode=self.output_mode,
depth=depth,
dtype=self.compute_dtype,
sparse=self.sparse,
count_weights=count_weights)
| 40.009259
| 80
| 0.655288
|
4a0334a3d8e6fa8559ec7e11b3fbc903c10cf3cb
| 3,418
|
py
|
Python
|
node_modules/dropbox/generator/stone/stone/backends/swift_helpers.py
|
dropboxdssd/dropbox2
|
48158eca6747287bdc9012bff9c1b985483a4370
|
[
"MIT"
] | 1
|
2021-12-13T02:17:05.000Z
|
2021-12-13T02:17:05.000Z
|
node_modules/dropbox/generator/stone/stone/backends/swift_helpers.py
|
dropboxdssd/dropbox2
|
48158eca6747287bdc9012bff9c1b985483a4370
|
[
"MIT"
] | 2
|
2021-04-16T20:39:33.000Z
|
2021-08-04T03:11:54.000Z
|
node_modules/dropbox/generator/stone/stone/backends/swift_helpers.py
|
dropboxdssd/dropbox2
|
48158eca6747287bdc9012bff9c1b985483a4370
|
[
"MIT"
] | 1
|
2020-11-04T06:01:11.000Z
|
2020-11-04T06:01:11.000Z
|
from __future__ import absolute_import, division, print_function, unicode_literals
import pprint
from stone.ir import (
Boolean,
Bytes,
Float32,
Float64,
Int32,
Int64,
List,
String,
Timestamp,
UInt32,
UInt64,
Void,
is_boolean_type,
is_list_type,
is_numeric_type,
is_string_type,
is_tag_ref,
is_user_defined_type,
unwrap_nullable,
)
from .helpers import split_words
# This file defines *stylistic* choices for Swift
# (ie, that class names are UpperCamelCase and that variables are lowerCamelCase)
_type_table = {
Boolean: 'Bool',
Bytes: 'Data',
Float32: 'Float',
Float64: 'Double',
Int32: 'Int32',
Int64: 'Int64',
List: 'Array',
String: 'String',
Timestamp: 'Date',
UInt32: 'UInt32',
UInt64: 'UInt64',
Void: 'Void',
}
_reserved_words = {
'description',
'bool',
'nsdata'
'float',
'double',
'int32',
'int64',
'list',
'string',
'timestamp',
'uint32',
'uint64',
'void',
'associatedtype',
'class',
'deinit',
'enum',
'extension',
'func',
'import',
'init',
'inout',
'internal',
'let',
'operator',
'private',
'protocol',
'public',
'static',
'struct',
'subscript',
'typealias',
'var',
'default',
}
def fmt_obj(o):
assert not isinstance(o, dict), "Only use for base type literals"
if o is True:
return 'true'
if o is False:
return 'false'
if o is None:
return 'nil'
return pprint.pformat(o, width=1)
def _format_camelcase(name, lower_first=True):
words = [word.capitalize() for word in split_words(name)]
if lower_first:
words[0] = words[0].lower()
ret = ''.join(words)
if ret.lower() in _reserved_words:
ret += '_'
return ret
def fmt_class(name):
return _format_camelcase(name, lower_first=False)
def fmt_func(name):
return _format_camelcase(name)
def fmt_type(data_type):
data_type, nullable = unwrap_nullable(data_type)
if is_user_defined_type(data_type):
result = '{}.{}'.format(fmt_class(data_type.namespace.name),
fmt_class(data_type.name))
else:
result = _type_table.get(data_type.__class__, fmt_class(data_type.name))
if is_list_type(data_type):
result = result + '<{}>'.format(fmt_type(data_type.data_type))
return result if not nullable else result + '?'
def fmt_var(name):
return _format_camelcase(name)
def fmt_default_value(namespace, field):
if is_tag_ref(field.default):
return '{}.{}Serializer().serialize(.{})'.format(
fmt_class(namespace.name),
fmt_class(field.default.union_data_type.name),
fmt_var(field.default.tag_name))
elif is_list_type(field.data_type):
return '.array({})'.format(field.default)
elif is_numeric_type(field.data_type):
return '.number({})'.format(field.default)
elif is_string_type(field.data_type):
return '.str({})'.format(field.default)
elif is_boolean_type(field.data_type):
if field.default:
bool_str = '1'
else:
bool_str = '0'
return '.number({})'.format(bool_str)
else:
raise TypeError('Can\'t handle default value type %r' %
type(field.data_type))
| 22.194805
| 82
| 0.605325
|
4a03351542876f06426dc30d162b79b726e7e774
| 334,235
|
py
|
Python
|
code/plyj/parsetab.py
|
jmflorezff/cs-6301
|
89fe2668af3911f3a112adfdd46a5b649c62ec61
|
[
"MIT"
] | null | null | null |
code/plyj/parsetab.py
|
jmflorezff/cs-6301
|
89fe2668af3911f3a112adfdd46a5b649c62ec61
|
[
"MIT"
] | null | null | null |
code/plyj/parsetab.py
|
jmflorezff/cs-6301
|
89fe2668af3911f3a112adfdd46a5b649c62ec61
|
[
"MIT"
] | 1
|
2021-08-17T09:16:17.000Z
|
2021-08-17T09:16:17.000Z
|
# parsetab.py
# This file is automatically generated. Do not edit.
_tabversion = '3.8'
_lr_method = 'LALR'
_lr_signature = '8671335849935BC5A722EAB243FC04C0'
_lr_action_items = {'?':([12,31,38,42,44,46,48,51,55,56,57,62,67,72,85,89,101,107,112,133,134,135,136,137,138,139,141,142,143,145,146,148,150,151,152,156,159,160,161,162,163,164,165,168,213,216,219,221,230,231,238,239,240,241,242,245,246,247,248,250,251,253,256,258,259,260,261,262,263,264,265,277,291,292,300,302,303,308,318,336,345,349,362,366,368,369,410,412,416,419,423,431,433,434,476,498,533,534,535,538,585,586,588,592,593,597,598,602,604,605,619,623,625,626,631,633,634,635,636,637,638,639,640,641,642,643,644,646,649,650,653,656,657,658,659,660,661,662,663,664,665,692,703,730,731,732,733,734,735,736,737,739,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,777,804,805,809,811,813,818,819,820,821,838,843,844,845,846,848,851,855,942,945,946,947,948,959,961,964,967,992,993,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1114,1115,1117,1118,1123,1124,1125,1135,1136,1141,1143,1145,1148,1149,1155,],[-345,-380,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-348,-350,-139,297,-381,-140,-108,-25,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,364,-119,-111,-130,-58,-30,-40,-50,-129,-45,-122,-35,-343,-383,-117,-119,297,-385,-113,-27,-68,-100,507,-112,509,-37,-60,-123,-131,-133,-47,-42,-132,-81,-53,-32,-126,-116,-91,-118,-156,-155,-398,-392,-399,-391,-389,-134,-135,-120,-109,509,-110,-121,-131,-132,509,-133,-341,-388,-387,509,-386,-124,-114,-115,-125,-146,-343,-149,-344,-158,-157,297,-417,-418,817,-400,-384,-335,-339,-340,-152,-31,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-26,-145,297,-64,-67,-65,-66,-36,-46,-59,-52,-51,-41,-145,-466,-33,-71,-86,-84,-82,-69,-73,-75,-28,-44,-85,-106,-39,-55,-95,-87,-29,-93,-70,-72,-83,-102,-34,-74,-49,-76,-61,-104,-57,-43,-54,-56,-48,-92,-94,-62,-38,-105,-101,-103,-332,-331,-221,-330,-150,-393,-419,-420,-402,-405,-401,-404,-390,-338,-333,-336,-334,-151,-162,-128,817,-421,-422,1042,-406,-623,-337,-163,-159,-217,-216,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-426,-425,1042,-412,-323,-623,-319,-322,-623,-427,-428,-409,-324,-161,-321,]),'CLASS':([1,4,6,7,8,9,10,18,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,71,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,176,178,179,180,181,182,183,185,186,187,188,190,204,205,207,213,218,226,227,228,229,232,233,234,236,266,271,286,289,311,328,347,360,381,382,383,386,387,388,391,394,405,406,438,439,440,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,465,466,467,468,469,470,471,473,474,475,477,478,479,482,483,484,485,488,514,549,551,559,579,588,594,611,612,614,615,628,630,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,725,783,831,833,834,835,858,862,872,878,906,909,916,917,925,941,953,956,962,969,973,975,976,1000,1007,1011,1015,1016,1018,1063,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[-623,-623,-186,-204,-194,-187,-361,201,-623,-594,-351,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-352,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,-623,-610,-608,-362,-352,-623,-618,-611,-451,-612,-623,-613,-450,-447,-351,-205,-167,-352,-623,-343,-354,-508,-623,-623,-452,-623,-553,-529,-249,-175,-279,-277,586,-286,-588,631,586,-623,-623,-351,-609,-623,-619,-605,-281,-164,-168,-525,-519,-524,-352,-521,201,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,201,-481,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-567,201,-566,-623,-623,586,-283,-280,-260,-278,-344,805,-290,-287,-284,-623,-228,848,-623,-614,-587,-595,-516,-503,-520,-466,-470,-502,-623,-484,-483,-559,-563,-564,-533,586,-285,-288,-296,-289,-616,-263,-231,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,-623,-482,-500,-535,-309,-310,586,-617,-276,-274,-265,-623,-272,-266,-623,-232,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'RSHIFT':([12,38,42,44,46,48,51,55,56,57,62,67,72,85,89,101,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,163,165,213,219,221,231,238,240,241,245,246,250,251,253,259,260,263,264,265,277,291,292,299,300,302,303,304,308,318,336,345,349,362,366,368,369,410,412,416,419,423,431,433,434,476,498,533,534,535,538,585,586,588,592,593,598,602,605,623,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,692,703,731,732,733,734,735,736,737,742,743,746,747,749,750,751,752,753,755,757,759,765,766,769,770,771,772,773,777,804,805,809,811,813,815,817,818,819,820,821,822,823,838,843,844,845,846,848,851,855,945,946,948,959,961,964,967,992,993,1020,1022,1030,1036,1037,1038,1039,1040,1041,1042,1043,1045,1046,1047,1048,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1142,1143,1144,1145,1146,1148,1149,1155,],[-345,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-348,-350,-139,-140,-108,-343,-88,355,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,-129,-122,-343,-117,-119,-385,-113,502,-100,-112,511,-123,-131,-133,-132,-81,-126,-116,-91,-118,-156,-155,-381,-398,-392,-399,-383,-391,-389,-134,-135,-120,-109,511,-110,-121,-131,-132,511,-133,-341,-388,-387,511,-386,-124,-114,-115,-125,-146,-343,-149,-344,-158,-157,-417,-418,-400,-335,-339,-340,-152,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,355,355,355,355,-145,-466,355,-86,-84,-82,355,355,355,-85,-106,-95,-87,-93,355,355,-83,-102,355,355,-104,-92,-94,-105,-101,-103,-332,-331,-221,-330,-150,-393,-419,-420,-384,945,-402,-405,-401,-404,-380,948,-390,-338,-333,-336,-334,-151,-162,-128,-421,-422,-406,-623,-337,-163,-159,-217,-216,-227,-226,-225,-403,948,-423,948,-424,948,945,-407,-408,-410,-411,948,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,948,-428,948,-409,948,-324,-161,-321,]),'THIS':([1,2,6,7,8,9,11,16,20,25,30,37,40,45,47,49,53,58,59,60,61,64,65,69,73,76,77,79,82,84,88,91,92,93,95,98,99,110,122,127,132,147,154,155,157,190,194,203,204,207,208,220,226,229,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,288,289,290,294,295,300,302,303,308,311,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,360,364,365,370,371,372,373,374,375,377,378,380,394,405,406,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,549,551,552,553,559,560,579,584,590,598,602,605,611,612,614,615,621,624,628,650,653,671,677,685,687,692,700,703,709,713,725,729,776,783,784,785,786,790,791,792,798,799,800,807,809,811,813,818,819,820,821,831,833,834,835,836,841,849,850,852,857,862,872,874,878,882,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,945,946,948,951,953,956,958,962,965,973,975,976,977,983,984,986,987,988,991,1011,1015,1016,1018,1019,1026,1036,1038,1040,1043,1045,1046,1047,1050,1064,1065,1067,1068,1069,1070,1071,1080,1092,1101,1102,1108,1109,1114,1115,1118,1120,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1141,1143,1145,1154,1156,1157,1160,],[70,133,-186,-204,-194,-187,70,133,70,133,-170,70,133,-206,-262,-203,-193,133,-192,-174,-202,-189,274,-172,133,-171,133,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,133,133,133,133,133,-205,133,133,-167,70,133,133,-508,-452,-553,-529,-249,133,133,133,133,-175,133,133,-279,133,133,133,133,-277,581,587,133,133,133,-398,-392,-399,-391,-286,70,-17,-11,-9,-10,133,-18,-12,-15,-8,-19,-16,-14,-13,133,133,133,133,133,133,133,133,133,133,643,133,133,133,133,133,133,133,133,133,133,133,-281,-164,-168,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,643,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,-283,-280,133,133,-260,133,-278,797,802,-417,-418,-400,-290,-287,-284,-623,133,133,-228,133,133,133,70,133,133,133,-516,-466,70,-559,-533,133,133,643,133,133,133,133,133,133,133,133,133,-251,-393,-419,-420,-402,-405,-401,-404,-285,-288,-296,-289,133,133,70,133,133,133,-263,-231,70,-185,133,-536,-534,133,133,-307,-261,133,-308,133,133,133,133,133,133,133,-282,-421,-422,-406,133,-306,-297,133,-234,133,-271,-264,70,133,133,133,70,70,133,133,-535,-309,-310,643,133,133,-403,-423,-424,-407,-408,-410,-411,133,-276,-274,-265,70,-272,-266,70,-232,133,-313,-311,-314,-312,-426,-425,-412,-291,133,-267,-273,70,133,70,70,-315,-317,-318,-316,-427,-428,-409,-236,70,133,70,]),')':([12,13,15,21,23,27,31,32,34,36,38,41,42,43,44,46,48,51,52,55,56,57,62,66,67,72,78,85,86,89,90,96,101,112,117,121,124,128,133,134,135,136,137,138,139,141,142,143,144,145,146,148,150,151,152,153,156,159,160,161,162,163,164,165,167,168,208,210,213,216,219,221,231,238,239,240,241,242,243,244,245,246,247,248,250,251,253,255,256,257,258,259,260,261,262,263,264,265,275,276,277,281,291,292,300,302,303,308,318,321,324,328,336,345,349,362,366,367,368,369,393,404,407,408,410,412,413,414,415,416,417,418,419,420,421,423,425,426,427,431,433,434,441,463,476,480,498,533,534,535,538,552,553,554,555,556,557,558,569,575,585,586,588,592,593,595,596,598,602,605,606,609,619,621,623,625,626,629,631,632,633,634,635,636,637,638,639,640,641,642,643,644,646,649,650,651,652,654,655,656,657,658,659,660,661,662,663,664,665,679,682,684,688,689,692,694,695,696,697,703,707,712,729,730,731,732,733,734,735,736,737,739,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,777,786,790,791,792,798,799,800,804,805,809,811,813,818,819,820,821,824,825,827,836,838,839,840,841,843,844,845,846,848,851,853,855,861,881,883,885,886,899,910,915,920,923,924,927,928,929,930,932,933,934,935,936,937,938,939,940,945,946,948,950,954,957,958,959,960,961,963,964,966,967,990,992,993,994,996,999,1002,1009,1013,1014,1019,1020,1021,1022,1025,1026,1027,1028,1029,1030,1033,1034,1035,1036,1038,1040,1043,1045,1046,1047,1049,1055,1056,1057,1058,1059,1060,1061,1062,1078,1082,1083,1085,1087,1095,1096,1097,1098,1099,1100,1103,1104,1105,1110,1114,1115,1118,1119,1121,1123,1124,1125,1135,1136,1141,1143,1145,1148,1149,1155,1157,1159,],[-345,-375,-370,-594,-212,-374,-380,-372,-209,-369,-347,-211,-138,-585,-378,-349,-341,-346,-376,-142,-137,-342,-136,-373,-379,-144,-210,-348,-371,-350,-208,-586,-139,-381,-213,-207,-584,-377,-140,-108,-25,-343,-88,-63,-141,-77,-143,-107,-3,-128,-96,-127,-20,-119,-111,-1,-130,-58,-30,-40,-50,-129,-45,-122,-4,-35,-623,-623,-343,-383,-117,-119,-385,-113,-27,-68,-100,-22,-5,-2,-112,512,-37,-60,-123,-131,-133,-6,-47,538,-42,-132,-81,-53,-32,-126,-116,-91,-623,-623,-118,-623,-156,-155,-398,-392,-399,-391,-389,-381,-383,-588,-134,-135,-120,-109,650,-623,-110,-121,672,677,-591,-590,-131,-132,682,-596,-577,-575,-589,684,-133,-574,-576,-341,-153,-154,-182,-388,-387,692,-623,-623,-386,-623,-124,-114,-115,-125,-146,-623,-623,-547,-546,-548,777,778,-242,788,-343,-149,-344,-158,-157,806,807,-417,-418,-400,-623,-301,-384,-623,-335,-339,-340,-7,-152,849,-31,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-26,-145,852,-623,857,-153,-64,-67,-65,-66,-36,-46,-59,-52,-51,-41,-581,-587,-595,-184,-183,-145,-488,895,-490,-489,-466,901,903,-623,-33,-71,-86,-84,-82,-69,-73,-75,-28,-44,-85,-106,-39,-55,-95,-87,-29,-93,-70,-72,-83,-102,-34,-74,-49,-76,-61,-104,-57,-43,-54,-56,-48,-92,-94,-62,-38,-105,-101,-103,-332,-331,913,914,-221,-623,-623,-623,-623,-623,-623,-623,-330,-150,-393,-419,-420,-402,-405,-401,-404,-299,949,-300,-623,-390,-384,959,-623,-338,-333,-336,-334,-151,-162,965,-128,970,-580,-579,-592,-593,-487,1012,-549,-243,1020,-623,1022,1023,1024,-623,-623,-623,-623,1030,1031,1032,-623,-623,-623,-421,-422,-406,-302,1051,1055,-623,-623,1060,-337,-21,-163,-623,-159,-578,-217,-216,-492,-491,-506,-486,-570,-23,-24,-623,-227,1100,-226,1103,-623,1105,1106,1107,-225,1110,1111,1112,-403,-423,-424,-407,-408,-410,-411,-304,-623,1124,-326,-327,-320,-623,-160,1126,1129,1131,-218,-493,-505,-569,-246,-247,1134,-248,-224,-623,1136,-223,-222,-426,-425,-412,-305,-292,-323,-623,-319,-322,-623,-427,-428,-409,-324,-161,-321,-623,1160,]),'NEW':([1,2,6,7,8,9,11,16,20,25,30,37,40,45,47,49,53,58,59,60,61,64,69,73,76,77,79,82,84,88,91,92,93,95,98,99,110,111,122,127,132,147,154,155,157,190,194,203,204,207,208,220,226,229,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,288,289,290,294,295,311,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,360,363,364,365,370,371,372,373,374,375,377,378,380,394,405,406,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,549,551,552,553,559,560,579,611,612,614,615,621,624,628,650,653,671,677,685,687,692,700,703,709,713,725,729,776,780,783,784,785,786,790,791,792,798,799,800,807,831,833,834,835,836,841,849,850,852,857,862,872,874,878,882,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,951,953,956,958,962,965,973,975,976,977,983,984,986,987,988,991,1011,1015,1016,1018,1019,1026,1050,1064,1065,1067,1068,1069,1070,1071,1080,1092,1101,1102,1108,1109,1120,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1154,1156,1157,1160,],[116,158,-186,-204,-194,-187,116,158,116,158,-170,116,158,-206,-262,-203,-193,158,-192,-174,-202,-189,-172,158,-171,158,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,317,-196,-195,158,158,158,158,158,-205,158,158,-167,116,158,158,-508,-452,-553,-529,-249,158,158,158,158,-175,158,158,-279,158,158,116,158,-277,583,-325,158,158,158,-286,116,-17,-11,-9,-10,158,-18,-12,-15,-8,-19,-16,-14,-13,158,158,158,158,158,158,158,158,158,158,-325,583,158,158,158,158,158,158,158,158,158,158,158,-281,-164,-168,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,-325,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,158,-283,-280,158,158,-260,158,-278,-290,-287,-284,-623,158,158,-228,158,158,158,116,158,158,158,-516,-466,116,-559,-533,158,158,583,-325,116,158,158,158,158,158,158,158,158,-251,-285,-288,-296,-289,158,158,116,158,158,158,-263,-231,116,-185,158,-536,-534,158,158,-307,-261,158,-308,158,158,158,158,158,158,158,-282,158,-306,-297,158,-234,158,-271,-264,116,158,158,116,116,116,158,158,-535,-309,-310,-325,116,158,158,-276,-274,-265,116,-272,-266,116,-232,158,-313,-311,-314,-312,-291,158,-267,-273,116,158,116,116,-315,-317,-318,-316,-236,116,116,116,]),'LTEQ':([12,38,42,46,48,51,55,56,57,62,72,85,89,101,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,159,163,165,219,221,238,240,241,245,246,250,251,253,259,260,263,264,265,277,336,345,349,362,366,368,369,410,412,416,419,423,434,498,533,534,535,538,585,586,588,623,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,692,703,732,733,734,742,743,746,747,749,752,753,759,765,766,769,770,771,772,773,777,804,805,843,844,845,846,848,851,959,961,964,967,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1149,1155,],[-345,-347,-138,-349,-341,-346,-142,-137,-342,-136,-144,-348,-350,-139,-140,-108,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,371,-129,-122,-117,-119,-113,506,-100,-112,528,-123,-131,-133,-132,-81,-126,-116,-91,-118,-134,-135,-120,-109,528,-110,-121,-131,-132,528,-133,-341,528,-124,-114,-115,-125,-146,-343,-149,-344,-335,-339,-340,-152,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,-64,-67,-65,-66,-145,-466,-86,-84,-82,-85,-106,-95,-87,-93,-83,-102,-104,-92,-94,-105,-101,-103,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-162,-623,-337,-163,-159,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-161,-321,]),'ENUM':([1,4,6,7,8,9,10,18,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,71,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,176,178,179,180,181,182,183,185,186,187,188,190,204,205,207,213,218,226,227,228,229,232,233,234,236,266,271,286,311,328,381,382,383,386,387,388,391,394,405,406,438,439,440,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,465,466,467,468,469,470,471,473,474,475,477,478,479,482,483,484,485,488,549,551,559,579,588,611,612,614,615,628,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,725,831,833,834,835,858,862,872,878,906,909,916,917,925,941,953,956,962,969,973,975,976,1000,1007,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[-623,-623,-186,-204,-194,-187,-361,200,-623,-594,-351,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-352,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,-623,-610,-608,-362,-352,-623,-618,-611,-451,-612,-623,-613,-450,-447,-351,-205,-167,-352,-623,-343,-354,-508,-623,-623,-452,-623,-553,-529,-249,-175,-279,-277,-286,-588,-623,-623,-351,-609,-623,-619,-605,-281,-164,-168,-525,-519,-524,-352,-521,200,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,200,-481,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-567,200,-566,-623,-623,-283,-280,-260,-278,-344,-290,-287,-284,-623,-228,-623,-614,-587,-595,-516,-503,-520,-466,-470,-502,-623,-484,-483,-559,-563,-564,-533,-285,-288,-296,-289,-616,-263,-231,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,-623,-482,-500,-535,-309,-310,-617,-276,-274,-265,-623,-272,-266,-623,-232,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'CONTINUE':([1,6,7,8,9,11,20,30,37,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,204,207,226,229,233,234,236,266,271,286,311,330,394,405,406,549,551,559,579,611,612,614,615,628,677,700,703,709,713,725,807,831,833,834,835,849,862,872,874,878,906,909,916,917,925,941,953,956,962,973,975,976,986,987,1011,1015,1016,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1129,1131,1134,1137,1138,1139,1140,1154,1156,1160,],[63,-186,-204,-194,-187,63,63,-170,63,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,-167,63,-508,-452,-553,-529,-249,-175,-279,-277,-286,63,-281,-164,-168,-283,-280,-260,-278,-290,-287,-284,-623,-228,63,-516,-466,63,-559,-533,-251,-285,-288,-296,-289,63,-263,-231,63,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-271,-264,63,63,63,-535,-309,-310,-276,-274,-265,63,-272,-266,63,-232,-313,-311,-314,-312,-291,-267,-273,63,63,63,-315,-317,-318,-316,-236,63,63,]),'STRING_LITERAL':([1,2,6,7,8,9,11,16,20,25,30,37,40,45,47,49,53,58,59,60,61,64,69,73,76,77,79,82,84,88,91,92,93,95,98,99,110,122,127,132,147,154,155,157,190,194,203,204,207,208,220,226,229,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,311,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,394,405,406,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,549,551,552,553,559,560,579,611,612,614,615,621,624,628,650,653,671,677,685,687,692,700,703,709,713,725,729,776,784,785,786,790,791,792,798,799,800,807,831,833,834,835,836,841,849,850,852,857,862,872,874,878,882,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,951,953,956,958,962,965,973,975,976,977,983,984,986,987,988,991,1011,1015,1016,1019,1026,1050,1064,1065,1067,1068,1069,1070,1071,1080,1092,1101,1102,1108,1109,1120,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1154,1156,1157,1160,],[38,38,-186,-204,-194,-187,38,38,38,38,-170,38,38,-206,-262,-203,-193,38,-192,-174,-202,-189,-172,38,-171,38,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,38,38,38,38,38,-205,38,38,-167,38,38,38,-508,-452,-553,-529,-249,38,38,38,38,-175,38,38,-279,38,38,38,38,-277,38,38,38,-286,38,-17,-11,-9,-10,38,-18,-12,-15,-8,-19,-16,-14,-13,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,-281,-164,-168,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,-283,-280,38,38,-260,38,-278,-290,-287,-284,-623,38,38,-228,38,38,38,38,38,38,38,-516,-466,38,-559,-533,38,38,38,38,38,38,38,38,38,38,38,-251,-285,-288,-296,-289,38,38,38,38,38,38,-263,-231,38,-185,38,-536,-534,38,38,-307,-261,38,-308,38,38,38,38,38,38,38,-282,38,-306,-297,38,-234,38,-271,-264,38,38,38,38,38,38,38,38,-535,-309,-310,38,38,38,-276,-274,-265,38,-272,-266,38,-232,38,-313,-311,-314,-312,-291,38,-267,-273,38,38,38,38,-315,-317,-318,-316,-236,38,38,38,]),'NATIVE':([1,4,6,7,8,9,10,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,178,179,180,181,182,183,185,186,187,188,190,204,207,213,218,226,227,228,229,232,233,234,235,236,266,271,279,286,309,311,328,381,382,383,386,387,388,391,394,405,406,438,439,440,441,443,446,448,449,450,451,452,453,454,455,456,458,460,462,463,465,466,467,468,469,470,471,473,474,475,477,479,480,482,484,485,488,491,549,551,559,566,579,588,608,611,612,614,615,628,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,720,725,824,826,831,832,833,834,835,858,862,872,878,894,899,906,909,916,917,925,941,953,956,962,969,973,975,976,984,999,1000,1002,1007,1009,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1087,1095,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[10,10,-186,-204,-194,-187,-361,10,-594,10,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,10,-610,-608,-362,10,-618,-611,-451,-612,10,-613,-450,-447,10,-205,-167,10,-343,-354,-508,10,10,-452,10,-553,-529,10,-249,-175,-279,10,-277,10,-286,-588,10,10,10,-609,10,-619,-605,-281,-164,-168,-525,-519,-524,10,-521,-527,-526,-523,-528,10,-522,10,-479,-478,-469,10,-472,-481,10,-480,-476,-475,10,-473,-501,-474,-358,-471,-477,-562,-565,10,-567,-566,10,10,10,-283,-280,-260,10,-278,-344,10,-290,-287,-284,-623,-228,10,-614,-587,-595,-516,-503,-520,-466,-470,-502,10,-484,-483,-559,-563,-564,10,-533,-303,10,-285,10,-288,-296,-289,-616,-263,-231,-185,10,-487,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,10,10,-506,-482,-486,-500,-570,-535,-309,-310,-617,-276,-274,-265,10,-272,-266,10,-232,-505,-569,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'DO':([1,6,7,8,9,11,20,30,37,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,204,207,226,229,233,234,236,266,271,286,311,330,394,405,406,549,551,559,579,611,612,614,615,628,677,700,703,709,713,725,807,831,833,834,835,849,862,872,874,878,906,909,916,917,925,941,953,956,962,973,975,976,986,987,1011,1015,1016,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1129,1131,1134,1137,1138,1139,1140,1154,1156,1160,],[11,-186,-204,-194,-187,11,11,-170,11,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,-167,11,-508,-452,-553,-529,-249,-175,-279,-277,-286,11,-281,-164,-168,-283,-280,-260,-278,-290,-287,-284,-623,-228,11,-516,-466,11,-559,-533,-251,-285,-288,-296,-289,11,-263,-231,11,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-271,-264,11,11,11,-535,-309,-310,-276,-274,-265,11,-272,-266,11,-232,-313,-311,-314,-312,-291,-267,-273,11,11,11,-315,-317,-318,-316,-236,11,11,]),'^':([12,31,38,42,44,46,48,51,55,56,57,62,67,72,85,89,101,112,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,159,161,162,163,164,165,168,213,216,219,221,231,238,240,241,245,246,247,248,250,251,253,256,258,259,260,261,263,264,265,277,291,292,300,302,303,308,318,336,345,349,362,366,368,369,410,412,416,419,423,431,433,434,476,498,533,534,535,538,585,586,588,592,593,598,602,605,619,623,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,660,661,662,663,664,665,692,703,731,732,733,734,735,736,737,741,742,743,744,745,746,747,749,750,751,752,753,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,777,804,805,809,811,813,818,819,820,821,838,843,844,845,846,848,851,855,945,946,948,959,961,964,967,992,993,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1143,1145,1148,1149,1155,],[-345,-380,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-348,-350,-139,-381,-140,-108,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,-58,-40,-50,-129,-45,-122,380,-343,-383,-117,-119,-385,-113,-68,-100,-112,510,532,-60,-123,-131,-133,-47,-42,-132,-81,-53,-126,-116,-91,-118,-156,-155,-398,-392,-399,-391,-389,-134,-135,-120,-109,510,-110,-121,-131,-132,510,-133,-341,-388,-387,510,-386,-124,-114,-115,-125,-146,-343,-149,-344,-158,-157,-417,-418,-400,-384,-335,-339,-340,-152,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,-64,-67,-65,-66,380,-46,-59,-52,-51,-41,-145,-466,-71,-86,-84,-82,-69,-73,-75,-44,-85,-106,380,-55,-95,-87,-93,-70,-72,-83,-102,-74,-49,-76,-61,-104,-57,-43,-54,-56,-48,-92,-94,-62,380,-105,-101,-103,-332,-331,-221,-330,-150,-393,-419,-420,-402,-405,-401,-404,-390,-338,-333,-336,-334,-151,-162,-128,-421,-422,-406,-623,-337,-163,-159,-217,-216,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-428,-409,-324,-161,-321,]),'NAME':([1,2,6,7,8,9,10,11,13,15,16,20,21,22,24,25,27,30,31,32,36,37,40,43,44,45,47,48,49,52,53,57,58,59,60,61,63,64,66,67,68,69,71,73,74,75,76,77,79,82,84,86,87,88,91,92,93,94,95,96,98,99,100,102,104,105,106,107,110,112,113,114,115,116,118,122,124,125,127,128,129,130,131,132,147,154,155,157,158,169,170,175,177,181,184,186,187,190,194,199,200,201,202,203,204,207,208,213,214,215,216,217,218,220,223,226,227,228,229,230,231,232,233,234,235,236,237,249,252,254,266,268,270,271,275,276,279,280,281,285,286,288,289,290,291,292,293,294,295,300,302,303,308,309,311,315,317,318,319,323,327,328,330,331,332,333,334,335,337,338,339,340,341,342,343,344,346,348,350,351,352,353,354,355,356,357,358,359,360,363,364,365,370,371,372,373,374,375,376,377,378,380,384,390,394,396,397,405,406,411,422,424,428,430,431,432,433,438,439,440,441,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,463,465,466,467,468,469,470,471,473,474,475,476,477,478,479,480,482,483,484,485,488,491,494,495,499,500,501,502,503,504,505,506,507,508,509,510,511,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,549,551,552,553,559,560,561,565,566,567,574,579,583,584,585,588,590,592,593,597,598,599,600,602,603,604,605,607,608,611,612,614,615,618,619,620,621,624,628,645,647,650,653,669,671,673,674,675,677,682,683,684,685,687,692,693,697,698,699,700,701,702,703,704,705,706,708,709,710,711,713,714,715,716,717,720,725,729,776,780,782,783,784,785,786,789,790,791,792,794,798,799,800,807,809,811,813,818,819,820,821,824,826,829,831,832,833,834,835,836,838,841,849,850,852,854,857,859,862,864,866,872,874,878,882,893,894,898,899,900,905,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,942,943,944,945,946,947,948,951,953,955,956,958,962,965,973,975,976,977,978,979,981,983,984,986,987,988,991,995,999,1000,1002,1004,1007,1009,1011,1015,1016,1018,1019,1026,1036,1038,1040,1043,1045,1046,1047,1050,1052,1053,1054,1064,1065,1067,1068,1069,1070,1071,1074,1077,1080,1087,1092,1095,1101,1102,1108,1109,1113,1114,1115,1116,1117,1118,1120,1122,1126,1127,1128,1129,1130,1131,1132,1134,1137,1138,1139,1140,1141,1143,1145,1147,1154,1156,1157,1160,],[68,136,-186,-204,-194,-187,-361,68,-375,-370,136,68,-594,210,213,136,-374,-170,-380,-372,-369,68,136,-585,-378,-206,-262,-341,-203,-376,-193,-342,136,-192,-174,-202,272,-189,-373,-379,-343,-172,-352,136,-363,-365,-171,136,-188,-200,-173,-371,287,-191,-201,-197,-199,-368,-190,-586,-169,-198,-383,-362,-360,-356,-353,213,-185,-381,-357,-355,-366,213,213,-196,-584,-367,-195,-377,-359,-358,-364,136,136,136,136,136,213,-449,-448,-362,213,-451,213,-450,-447,-205,136,395,401,402,403,136,-167,68,136,-343,210,213,-383,-367,-354,136,213,-508,-623,-623,-452,213,-385,-623,-553,-529,-623,-249,136,136,136,136,-175,136,136,-279,136,136,561,573,136,213,-277,580,585,136,-156,-155,-388,136,136,-398,-392,-399,-391,213,-286,213,213,-389,213,213,627,-588,68,-17,-11,-9,-10,136,-18,-12,-15,-8,-19,-16,-14,-13,-387,136,136,136,136,136,136,136,136,640,136,136,585,646,136,136,136,136,136,136,136,136,213,136,136,136,213,213,-281,-439,-430,-164,-168,136,213,210,136,691,-388,213,-387,-525,-519,-524,-623,-352,-521,213,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,705,-481,-623,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-386,-562,-352,-565,-623,-567,705,-566,-623,-623,-351,-352,724,136,136,136,136,136,136,136,136,136,136,136,136,136,136,585,136,136,136,136,136,136,136,136,136,136,136,136,136,136,213,136,136,136,136,136,136,136,136,213,136,136,136,136,-283,-280,136,136,-260,136,-343,781,213,-383,787,-278,213,795,-343,-344,801,-158,-157,213,-417,213,213,-418,213,213,-400,210,213,-290,-287,-284,-623,213,-384,213,136,136,-228,801,795,136,136,213,136,395,-441,213,68,-587,213,-595,136,136,136,213,-352,896,213,-516,-503,-520,-466,-470,-343,900,-502,68,-484,-483,-559,904,900,-563,-564,-623,-533,136,136,580,919,585,561,136,136,213,136,136,136,213,136,136,136,-251,-393,-419,-420,-402,-405,-401,-404,-303,213,210,-285,-623,-288,-296,-289,136,-390,136,68,136,136,213,136,213,-263,-440,-442,-231,68,-185,136,210,-623,1001,-487,-343,1010,-536,-534,136,136,-307,-261,136,-308,136,136,136,136,136,136,136,-282,213,213,213,-421,-422,213,-406,136,-306,213,-297,136,-234,136,-271,-264,68,136,-443,213,-444,136,561,68,68,136,136,210,-506,-482,-486,213,-500,-570,-535,-309,-310,585,561,136,-403,-423,-424,-407,-408,-410,-411,136,210,-294,-293,-276,-274,-265,68,-272,-266,68,-446,-445,-232,-505,136,-569,-313,-311,-314,-312,213,-426,-425,213,213,-412,-291,213,136,-267,-273,68,136,68,213,68,-315,-317,-318,-316,-427,-428,-409,-295,-236,68,561,68,]),'.':([12,13,15,23,27,32,36,38,42,46,48,51,52,55,56,57,62,66,68,70,72,81,85,86,89,97,100,101,112,117,125,128,133,136,139,140,142,145,148,166,192,213,216,231,246,251,291,292,293,299,300,302,303,304,308,321,324,328,346,361,366,367,379,385,389,410,416,423,434,476,512,538,561,563,564,567,573,580,585,586,587,588,589,592,593,598,602,605,619,623,625,626,631,640,643,644,646,650,651,652,655,667,670,692,703,705,772,773,777,804,805,809,811,813,815,818,819,820,821,839,843,844,845,846,848,855,856,900,921,945,946,948,959,961,992,993,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1143,1145,1148,1155,],[-345,-375,-370,-143,-374,-372,-369,-347,-138,-349,-341,-346,-376,-142,-137,-342,-136,-373,-343,-140,-144,280,-348,-371,-350,288,289,-139,319,-141,347,-377,-140,-343,-141,357,-143,360,363,347,289,-343,432,-385,514,363,-156,-155,594,603,-398,-392,-399,432,-391,620,432,432,630,594,514,347,630,669,432,363,514,-341,514,-386,-145,-146,-343,780,280,783,-329,-328,-343,-149,-147,-344,-148,-158,-157,-417,-418,-400,432,-335,-339,-340,-152,-329,-147,-148,-328,-145,594,854,630,859,432,-145,-466,-343,-332,-331,-221,-330,-150,-393,-419,-420,432,-402,-405,-401,-404,432,-338,-333,-336,-334,-151,514,347,-343,1018,-421,-422,-406,-623,-337,-217,-216,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-428,-409,-324,-321,]),'/':([12,38,42,46,48,51,55,56,57,62,72,85,89,101,133,134,136,137,139,142,143,145,146,148,151,152,156,163,165,219,221,238,241,245,246,250,251,253,259,263,264,265,277,336,345,349,362,366,368,369,410,412,416,419,423,434,498,533,534,535,538,585,586,588,623,625,626,631,634,635,636,640,641,642,643,644,646,650,692,703,743,746,749,753,759,765,766,769,770,771,772,773,777,804,805,843,844,845,846,848,851,855,959,961,964,967,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1149,1155,],[-345,-347,-138,-349,-341,-346,-142,-137,-342,-136,-144,-348,-350,-139,-140,-108,-343,353,-141,-143,-107,-128,-96,-127,-119,-111,-130,-129,-122,-117,-119,-113,-100,-112,530,-123,-131,-133,-132,-126,-116,546,-118,-134,-135,-120,-109,530,-110,-121,-131,-132,530,-133,-341,530,-124,-114,-115,-125,-146,-343,-149,-344,-335,-339,-340,-152,-99,-97,-98,-329,353,353,-147,-148,-328,-145,-145,-466,-106,353,353,-102,-104,353,353,-105,-101,-103,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-162,-128,-623,-337,-163,-159,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-161,-321,]),'|':([12,13,15,27,31,32,36,38,42,44,46,48,51,52,55,56,57,62,66,67,72,85,86,89,94,101,112,128,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,159,160,161,162,163,164,165,168,213,216,217,219,221,231,238,240,241,245,246,247,248,250,251,253,256,258,259,260,261,262,263,264,265,277,291,292,300,302,303,308,318,336,345,349,362,366,368,369,410,412,416,419,423,431,433,434,476,498,533,534,535,538,585,586,588,592,593,598,602,605,619,623,625,626,631,633,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,660,661,662,663,664,665,692,703,730,731,732,733,734,735,736,737,741,742,743,744,745,746,747,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,777,804,805,809,811,813,818,819,820,821,838,843,844,845,846,848,851,855,945,946,948,959,961,964,967,992,993,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1053,1054,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1143,1145,1147,1148,1149,1155,],[-345,-375,-370,-374,-380,-372,-369,-347,-138,-378,-349,-341,-346,-376,-142,-137,-342,-136,-373,-379,-144,-348,-371,-350,-368,-139,-381,-377,-140,-108,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,-58,374,-40,-50,-129,-45,-122,-35,-343,-383,-367,-117,-119,-385,-113,-68,-100,-112,515,-37,-60,-123,-131,-133,-47,-42,-132,-81,-53,543,-126,-116,-91,-118,-156,-155,-398,-392,-399,-391,-389,-134,-135,-120,-109,515,-110,-121,-131,-132,515,-133,-341,-388,-387,515,-386,-124,-114,-115,-125,-146,-343,-149,-344,-158,-157,-417,-418,-400,-384,-335,-339,-340,-152,374,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,-64,-67,-65,-66,-36,-46,-59,-52,-51,-41,-145,-466,374,-71,-86,-84,-82,-69,-73,-75,-44,-85,-106,-39,-55,-95,-87,-93,-70,-72,-83,-102,374,-74,-49,-76,-61,-104,-57,-43,-54,-56,-48,-92,-94,-62,-38,-105,-101,-103,-332,-331,-221,-330,-150,-393,-419,-420,-402,-405,-401,-404,-390,-338,-333,-336,-334,-151,-162,-128,-421,-422,-406,-623,-337,-163,-159,-217,-216,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-294,1122,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-428,-409,-295,-324,-161,-321,]),'CATCH':([310,311,405,612,614,615,833,835,1120,],[613,-286,-164,-287,613,613,-288,613,-291,]),'CHAR':([1,2,6,7,8,9,10,11,16,20,21,24,25,30,37,40,43,45,47,48,49,53,57,58,59,60,61,64,69,71,73,74,75,76,77,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,107,110,113,114,115,116,122,124,127,129,130,131,132,147,154,155,157,158,169,170,175,181,186,187,190,194,203,204,207,208,213,218,220,226,227,228,229,230,232,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,309,311,328,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,376,377,378,380,394,396,397,405,406,411,428,438,439,440,441,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,463,465,466,467,468,469,470,471,473,474,475,477,478,479,480,482,483,484,485,488,491,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,549,551,552,553,559,560,566,579,588,597,599,600,604,605,608,611,612,614,615,621,624,628,650,653,671,674,675,677,682,684,685,687,692,693,697,699,700,701,702,703,704,706,708,709,710,711,713,715,716,717,725,729,776,784,785,786,790,791,792,798,799,800,807,818,819,820,821,824,826,831,832,833,834,835,836,841,849,850,852,857,862,864,866,872,874,878,882,894,899,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,942,943,944,945,946,947,948,951,953,955,956,958,962,965,973,975,976,977,978,979,981,983,984,986,987,988,991,999,1000,1002,1007,1009,1011,1015,1016,1019,1026,1036,1038,1040,1043,1045,1046,1047,1050,1064,1065,1067,1068,1069,1070,1071,1074,1077,1080,1087,1092,1095,1101,1102,1108,1109,1113,1114,1115,1116,1117,1118,1120,1122,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1141,1143,1145,1154,1156,1157,1160,],[13,13,-186,-204,-194,-187,-361,13,13,13,-594,13,13,-170,13,13,-585,-206,-262,-341,-203,-193,-342,13,-192,-174,-202,-189,-172,-352,13,-363,-365,-171,13,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,13,-185,-357,-355,-366,13,-196,-584,-195,-359,-358,-364,13,13,13,13,13,13,-449,-448,-362,-451,-450,-447,-205,13,13,-167,13,13,-343,-354,13,-508,-623,-623,-452,13,-623,-553,-529,-249,13,13,13,13,-175,13,13,-279,13,13,13,13,-277,13,13,13,13,-286,-588,13,-17,-11,-9,-10,13,-18,-12,-15,-8,-19,-16,-14,-13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,-281,-439,-430,-164,-168,13,13,-525,-519,-524,-623,-352,-521,13,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,13,-481,-623,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-623,-567,13,-566,-623,-623,-351,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,-283,-280,13,13,-260,13,13,-278,-344,13,13,13,13,-400,13,-290,-287,-284,-623,13,13,-228,13,13,13,-441,13,13,-587,-595,13,13,13,13,-352,13,-516,-503,-520,-466,-470,13,-502,13,-484,-483,-559,13,-563,-564,-533,13,13,13,13,13,13,13,13,13,13,13,-251,-402,-405,-401,-404,-303,13,-285,-623,-288,-296,-289,13,13,13,13,13,13,-263,-440,-442,-231,13,-185,13,-623,-487,-536,-534,13,13,-307,-261,13,-308,13,13,13,13,13,13,13,-282,13,13,13,-421,-422,13,-406,13,-306,13,-297,13,-234,13,-271,-264,13,13,-443,13,-444,13,13,13,13,13,13,-506,-482,-486,-500,-570,-535,-309,-310,13,13,-403,-423,-424,-407,-408,-410,-411,13,-276,-274,-265,13,-272,-266,13,-446,-445,-232,-505,13,-569,-313,-311,-314,-312,13,-426,-425,13,13,-412,-291,13,13,-267,-273,13,13,13,13,-315,-317,-318,-316,-427,-428,-409,-236,13,13,13,]),'RRSHIFT':([12,38,42,44,46,48,51,55,56,57,62,67,72,85,89,101,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,163,165,213,219,221,231,238,240,241,245,246,250,251,253,259,260,263,264,265,277,291,292,299,300,302,303,304,308,318,336,345,349,362,366,368,369,410,412,416,419,423,431,433,434,476,498,533,534,535,538,585,586,588,592,593,598,602,605,623,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,692,703,731,732,733,734,735,736,737,742,743,746,747,749,750,751,752,753,755,757,759,765,766,769,770,771,772,773,777,804,805,809,811,813,815,818,819,820,821,822,838,843,844,845,846,848,851,855,945,946,948,959,961,964,967,992,993,1020,1022,1030,1036,1038,1040,1042,1043,1045,1046,1047,1048,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1142,1143,1144,1145,1146,1148,1149,1155,],[-345,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-348,-350,-139,-140,-108,-343,-88,354,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,-129,-122,-343,-117,-119,-385,-113,501,-100,-112,518,-123,-131,-133,-132,-81,-126,-116,-91,-118,-156,-155,-381,-398,-392,-399,-383,-391,-389,-134,-135,-120,-109,518,-110,-121,-131,-132,518,-133,-341,-388,-387,518,-386,-124,-114,-115,-125,-146,-343,-149,-344,-158,-157,-417,-418,-400,-335,-339,-340,-152,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,354,354,354,354,-145,-466,354,-86,-84,-82,354,354,354,-85,-106,-95,-87,-93,354,354,-83,-102,354,354,-104,-92,-94,-105,-101,-103,-332,-331,-221,-330,-150,-393,-419,-420,-384,-402,-405,-401,-404,-380,-390,-338,-333,-336,-334,-151,-162,-128,-421,-422,-406,-623,-337,-163,-159,-217,-216,-227,-226,-225,-403,-423,-424,1115,-407,-408,-410,-411,1118,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,1118,-428,1118,-409,1118,-324,-161,-321,]),'=':([12,23,38,41,42,46,48,51,55,56,57,62,68,70,72,78,85,89,97,100,101,117,123,133,136,139,142,145,148,151,156,163,192,210,211,213,246,251,253,259,291,292,336,345,366,423,425,426,427,434,512,538,561,563,567,573,580,585,586,587,588,589,592,593,623,625,626,631,640,643,644,646,650,703,772,773,777,781,804,805,828,843,844,845,846,848,884,896,904,918,919,921,952,959,961,992,993,1017,1020,1022,1030,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1155,],[-345,-143,-347,-130,-138,-349,-341,-346,-142,-137,-342,-136,-343,-140,-144,-129,-348,-350,-127,-128,-139,-141,340,-140,-343,-141,-143,-128,-127,340,-130,-129,-128,-623,428,-343,-128,-127,-130,-129,-156,-155,-134,-135,-128,685,-153,-154,-182,-128,-145,-146,-343,-127,-128,-329,-328,-343,-149,-147,-344,-148,-158,-157,-335,-339,-340,-152,-329,-147,-148,-328,-145,-466,-332,-331,-221,-623,-330,-150,951,-338,-333,-336,-334,-151,685,-623,-623,-182,-623,-128,1050,-623,-337,-217,-216,-182,-227,-226,-225,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-321,]),'IMPORT':([4,171,173,174,178,180,182,185,381,386,391,668,858,969,1063,],[177,177,-610,-608,177,-611,-612,-613,177,-609,-605,-614,-616,-615,-617,]),'OR':([12,31,38,42,44,46,48,51,55,56,57,62,67,72,85,89,101,112,133,134,135,136,137,138,139,141,142,143,145,146,148,150,151,152,156,159,160,161,162,163,164,165,168,213,216,219,221,231,238,239,240,241,242,245,246,247,248,250,251,253,256,258,259,260,261,262,263,264,265,277,291,292,300,302,303,308,318,336,345,349,362,366,368,369,410,412,416,419,423,431,433,434,476,498,533,534,535,538,585,586,588,592,593,598,602,605,619,623,625,626,631,633,634,635,636,637,638,639,640,641,642,643,644,646,649,650,656,657,658,659,660,661,662,663,664,665,692,703,730,731,732,733,734,735,736,737,739,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,777,804,805,809,811,813,818,819,820,821,838,843,844,845,846,848,851,855,945,946,948,959,961,964,967,992,993,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1143,1145,1148,1149,1155,],[-345,-380,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-348,-350,-139,-381,-140,-108,-25,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,365,-119,-111,-130,-58,-30,-40,-50,-129,-45,-122,-35,-343,-383,-117,-119,-385,-113,-27,-68,-100,508,-112,519,-37,-60,-123,-131,-133,-47,-42,-132,-81,-53,-32,-126,-116,-91,-118,-156,-155,-398,-392,-399,-391,-389,-134,-135,-120,-109,519,-110,-121,-131,-132,519,-133,-341,-388,-387,519,-386,-124,-114,-115,-125,-146,-343,-149,-344,-158,-157,-417,-418,-400,-384,-335,-339,-340,-152,-31,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-26,-145,-64,-67,-65,-66,-36,-46,-59,-52,-51,-41,-145,-466,-33,-71,-86,-84,-82,-69,-73,-75,-28,-44,-85,-106,-39,-55,-95,-87,-29,-93,-70,-72,-83,-102,-34,-74,-49,-76,-61,-104,-57,-43,-54,-56,-48,-92,-94,-62,-38,-105,-101,-103,-332,-331,-221,-330,-150,-393,-419,-420,-402,-405,-401,-404,-390,-338,-333,-336,-334,-151,-162,-128,-421,-422,-406,-623,-337,-163,-159,-217,-216,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-428,-409,-324,-161,-321,]),'SWITCH':([1,6,7,8,9,11,20,30,37,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,204,207,226,229,233,234,236,266,271,286,311,330,394,405,406,549,551,559,579,611,612,614,615,628,677,700,703,709,713,725,807,831,833,834,835,849,862,872,874,878,906,909,916,917,925,941,953,956,962,973,975,976,986,987,1011,1015,1016,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1129,1131,1134,1137,1138,1139,1140,1154,1156,1160,],[14,-186,-204,-194,-187,14,14,-170,14,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,-167,14,-508,-452,-553,-529,-249,-175,-279,-277,-286,14,-281,-164,-168,-283,-280,-260,-278,-290,-287,-284,-623,-228,14,-516,-466,14,-559,-533,-251,-285,-288,-296,-289,14,-263,-231,14,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-271,-264,14,14,14,-535,-309,-310,-276,-274,-265,14,-272,-266,14,-232,-313,-311,-314,-312,-291,-267,-273,14,14,14,-315,-317,-318,-316,-236,14,14,]),'*':([0,12,38,42,46,48,51,55,56,57,62,72,85,89,101,133,134,136,137,139,142,143,145,146,148,151,152,156,163,165,219,221,238,241,245,246,250,251,253,259,263,264,265,277,336,345,349,362,366,368,369,410,412,416,419,423,434,498,533,534,535,538,585,586,588,623,625,626,631,634,635,636,640,641,642,643,644,646,650,669,692,703,743,746,749,753,759,765,766,769,770,771,772,773,777,804,805,843,844,845,846,848,851,855,859,959,961,964,967,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1149,1155,],[1,-345,-347,-138,-349,-341,-346,-142,-137,-342,-136,-144,-348,-350,-139,-140,-108,-343,352,-141,-143,-107,-128,-96,-127,-119,-111,-130,-129,-122,-117,-119,-113,-100,-112,524,-123,-131,-133,-132,-126,-116,545,-118,-134,-135,-120,-109,524,-110,-121,-131,-132,524,-133,-341,524,-124,-114,-115,-125,-146,-343,-149,-344,-335,-339,-340,-152,-99,-97,-98,-329,352,352,-147,-148,-328,-145,860,-145,-466,-106,352,352,-102,-104,352,352,-105,-101,-103,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-162,-128,968,-623,-337,-163,-159,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-161,-321,]),'MINUSMINUS':([0,1,2,6,7,8,9,11,12,16,20,23,25,30,37,38,40,41,42,45,46,47,48,49,51,53,55,56,57,58,59,60,61,62,64,68,69,70,72,73,76,77,78,79,82,84,85,88,89,91,92,93,95,97,98,99,100,101,110,117,122,123,127,132,133,136,139,142,145,147,148,151,154,155,156,157,163,190,192,194,203,204,207,208,220,221,226,229,233,234,236,237,246,249,251,252,253,254,259,266,268,270,271,275,276,279,281,286,290,294,295,311,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,348,350,351,352,353,354,355,356,358,359,364,365,366,370,371,372,373,374,375,377,378,380,394,405,406,409,410,411,412,416,419,423,428,434,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,538,539,540,541,543,544,545,546,549,551,552,553,559,560,561,563,567,573,579,580,585,586,587,588,589,611,612,614,615,621,623,624,625,626,628,631,640,643,644,646,650,653,671,677,685,687,692,700,703,709,713,725,729,772,773,776,777,784,785,786,790,791,792,798,799,800,804,805,807,831,833,834,835,836,841,843,844,845,846,848,849,850,855,857,862,872,874,878,882,906,909,911,912,916,917,921,924,925,930,932,933,934,938,939,940,941,951,953,956,958,959,961,962,973,975,976,977,983,984,986,987,988,991,992,993,1011,1015,1016,1019,1020,1022,1026,1030,1050,1055,1057,1058,1059,1060,1064,1065,1067,1068,1069,1070,1071,1080,1083,1092,1100,1101,1102,1103,1105,1108,1109,1110,1120,1123,1124,1125,1127,1128,1129,1130,1131,1134,1135,1136,1137,1138,1139,1140,1148,1154,1155,1156,1157,1160,],[2,73,73,-186,-204,-194,-187,73,-345,73,73,-143,73,-170,73,-347,73,-130,-138,-206,-349,-262,-341,-203,-346,-193,-142,-137,-342,73,-192,-174,-202,-136,-189,-343,-172,-140,-144,73,-171,73,-129,-188,-200,-173,-348,-191,-350,-201,-197,-199,-190,-127,-169,-198,-128,-139,-185,-141,-196,345,-195,73,-140,-343,-141,-143,-128,73,-127,345,73,73,-130,73,-129,-205,-128,73,73,-167,73,73,73,345,-508,-452,-553,-529,-249,73,-128,73,-127,73,-130,73,-129,-175,73,73,-279,73,73,73,73,-277,73,73,73,-286,73,-17,-11,-9,-10,73,-134,-18,-12,-15,-8,-19,-16,-14,-13,-135,73,73,73,73,73,73,73,73,73,73,73,73,-128,73,73,73,73,73,73,73,73,73,-281,-164,-168,345,-127,73,-129,-128,-130,-341,73,-128,73,73,73,73,73,73,73,73,73,73,73,73,73,-145,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,73,-146,73,73,73,73,73,73,73,-283,-280,73,73,-260,73,-343,-127,-128,-329,-278,-328,-343,-149,-147,-344,-148,-290,-287,-284,-623,73,-335,73,-339,-340,-228,-152,-329,-147,-148,-328,-145,73,73,73,73,73,-145,-516,-466,73,-559,-533,73,-332,-331,73,-221,73,73,73,73,73,73,73,73,73,-330,-150,-251,-285,-288,-296,-289,73,73,-338,-333,-336,-334,-151,73,73,-128,73,-263,-231,73,-185,73,-536,-534,73,73,-307,-261,-128,73,-308,73,73,73,73,73,73,73,-282,73,-306,-297,73,-623,-337,-234,-271,-264,73,73,73,73,73,73,73,73,-217,-216,-535,-309,-310,73,-227,-226,73,-225,73,-623,-326,-327,-320,-623,-276,-274,-265,73,-272,-266,73,-232,-218,73,-224,-313,-311,-623,-223,-314,-312,-222,-291,-323,-623,-319,-267,-273,73,73,73,73,-322,-623,-315,-317,-318,-316,-324,-236,-321,73,73,73,]),':':([12,31,38,42,44,46,48,51,55,56,57,62,67,68,72,85,89,101,103,112,120,133,134,135,136,137,138,139,141,142,143,144,145,146,148,150,151,152,153,156,159,160,161,162,163,164,165,167,168,213,216,219,221,231,277,278,291,292,300,302,303,308,318,336,345,349,362,368,369,425,426,431,433,476,538,585,586,588,592,593,598,602,605,619,623,625,626,629,631,633,634,635,636,637,638,639,640,641,642,643,644,646,648,649,650,656,657,658,659,660,661,662,663,664,665,692,703,738,740,772,773,777,781,804,805,809,811,813,818,819,820,821,838,843,844,845,846,848,851,877,918,919,945,946,948,959,961,963,964,967,971,992,993,1017,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1061,1072,1073,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1143,1145,1148,1149,1155,],[-345,-380,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-230,-144,-348,-350,-139,295,-381,330,-140,-108,-25,-343,-88,-63,-141,-77,-143,-107,-3,-128,-96,-127,-20,-119,-111,-1,-130,-58,-30,-40,-50,-129,-45,-122,-4,-35,-343,-383,-117,-119,-385,-118,560,-156,-155,-398,-392,-399,-391,-389,-134,-135,-120,-109,-110,-121,-153,-154,-388,-387,-386,-146,-343,-149,-344,-158,-157,-417,-418,-400,-384,-335,-339,-340,-7,-152,-31,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,850,-26,-145,-64,-67,-65,-66,-36,-46,-59,-52,-51,-41,-145,-466,911,912,-332,-331,-221,-623,-330,-150,-393,-419,-420,-402,-405,-401,-404,-390,-338,-333,-336,-334,-151,-162,987,-252,-623,-421,-422,-406,-623,-337,-21,-163,-159,1065,-217,-216,-253,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-160,1128,-275,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-428,-409,-324,-161,-321,]),'~':([2,16,25,40,58,73,77,132,147,154,155,157,194,203,208,220,237,249,252,254,268,270,275,276,281,290,294,295,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,685,687,692,729,776,785,786,790,791,792,798,799,800,836,841,850,852,857,882,911,912,924,930,932,933,934,938,939,940,951,958,965,977,983,988,991,1026,1050,1092,1126,1130,],[132,132,132,237,132,132,132,132,132,237,132,132,132,132,237,237,132,132,132,132,132,132,132,132,132,132,132,132,-17,-11,-9,-10,132,-18,-12,-15,-8,-19,-16,-14,-13,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,237,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,237,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,237,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,237,132,132,]),'RETURN':([1,6,7,8,9,11,20,30,37,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,204,207,226,229,233,234,236,266,271,286,311,330,394,405,406,549,551,559,579,611,612,614,615,628,677,700,703,709,713,725,807,831,833,834,835,849,862,872,874,878,906,909,916,917,925,941,953,956,962,973,975,976,986,987,1011,1015,1016,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1129,1131,1134,1137,1138,1139,1140,1154,1156,1160,],[16,-186,-204,-194,-187,16,16,-170,16,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,-167,16,-508,-452,-553,-529,-249,-175,-279,-277,-286,16,-281,-164,-168,-283,-280,-260,-278,-290,-287,-284,-623,-228,16,-516,-466,16,-559,-533,-251,-285,-288,-296,-289,16,-263,-231,16,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-271,-264,16,16,16,-535,-309,-310,-276,-274,-265,16,-272,-266,16,-232,-313,-311,-314,-312,-291,-267,-273,16,16,16,-315,-317,-318,-316,-236,16,16,]),'OR_ASSIGN':([12,23,38,41,42,46,48,51,55,56,57,62,68,70,72,78,85,89,97,100,101,117,123,133,136,139,142,145,148,151,156,163,192,246,251,253,259,336,345,366,434,512,538,561,563,567,573,580,585,586,587,588,589,623,625,626,631,640,643,644,646,650,703,772,773,777,804,805,843,844,845,846,848,921,959,961,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1155,],[-345,-143,-347,-130,-138,-349,-341,-346,-142,-137,-342,-136,-343,-140,-144,-129,-348,-350,-127,-128,-139,-141,337,-140,-343,-141,-143,-128,-127,337,-130,-129,-128,-128,-127,-130,-129,-134,-135,-128,-128,-145,-146,-343,-127,-128,-329,-328,-343,-149,-147,-344,-148,-335,-339,-340,-152,-329,-147,-148,-328,-145,-466,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-128,-623,-337,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-321,]),'IF':([1,6,7,8,9,11,20,30,37,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,204,207,226,229,233,234,236,266,271,286,311,330,394,405,406,549,551,559,579,611,612,614,615,628,677,700,703,709,713,725,807,831,833,834,835,849,862,872,874,878,906,909,916,917,925,941,953,956,962,973,975,976,986,987,1011,1015,1016,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1129,1131,1134,1137,1138,1139,1140,1154,1156,1160,],[19,-186,-204,-194,-187,19,19,-170,19,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,-167,19,-508,-452,-553,-529,-249,-175,-279,-277,-286,19,-281,-164,-168,-283,-280,-260,-278,-290,-287,-284,-623,-228,870,-516,-466,19,-559,-533,-251,-285,-288,-296,-289,19,-263,-231,870,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-271,-264,19,19,870,-535,-309,-310,-276,-274,-265,19,-272,-266,19,-232,-313,-311,-314,-312,-291,-267,-273,870,870,19,-315,-317,-318,-316,-236,870,870,]),'{':([1,6,7,8,9,11,17,20,26,28,29,30,31,33,35,37,45,47,48,49,53,54,57,59,60,61,64,69,76,79,82,83,84,88,91,92,93,95,98,99,108,109,110,119,122,127,190,198,204,207,208,213,222,224,225,226,228,229,231,233,234,236,266,267,271,282,283,284,286,291,292,300,302,303,308,311,312,313,314,316,321,324,329,330,394,396,397,401,402,403,405,406,411,425,426,428,435,436,437,454,455,456,458,460,462,464,465,466,467,469,470,471,472,473,474,475,476,488,489,497,547,549,551,559,576,577,578,579,588,592,593,598,602,605,611,612,614,615,616,623,625,626,627,628,672,674,676,677,685,687,691,700,701,703,704,708,709,710,711,713,724,725,726,727,728,806,807,809,811,813,818,819,820,821,830,831,833,834,835,839,843,845,847,849,862,864,866,872,874,878,882,892,895,901,906,909,916,917,925,926,941,945,946,948,949,951,953,956,959,961,962,973,975,976,978,981,986,987,991,997,998,1000,1003,1005,1006,1007,1011,1012,1015,1016,1036,1038,1040,1043,1045,1046,1047,1050,1051,1055,1060,1064,1065,1067,1068,1069,1070,1071,1074,1077,1080,1086,1088,1089,1090,1092,1101,1102,1103,1108,1109,1114,1115,1118,1120,1124,1127,1128,1129,1131,1134,1136,1137,1138,1139,1140,1141,1143,1145,1153,1154,1156,1160,],[20,-186,-204,-194,-187,20,-455,20,-623,227,228,-170,-380,232,235,20,-206,-262,-341,-203,-193,-623,-342,-192,-174,-202,-189,-172,-171,-188,-200,-623,-173,-191,-201,-197,-199,-190,-169,-198,20,-623,-185,-511,-196,-195,-205,-454,-167,20,411,-343,-458,-457,-623,-508,20,-452,-385,-553,-529,-249,-175,-623,-279,-461,-530,-460,-277,-156,-155,-398,-392,-399,-391,-286,20,-514,-509,-513,-381,-383,-510,20,-281,-439,-430,-531,-456,-512,-164,-168,411,-153,-154,687,-459,-382,-554,-479,-478,-469,20,-472,-481,709,-480,-476,-475,-473,-501,-474,709,20,-471,-477,-386,20,228,-623,-453,-283,-280,-260,-463,-462,-465,-278,-344,-158,-157,-417,-418,-400,-290,-287,-284,-623,-515,-335,687,687,-558,-228,863,-441,-532,20,411,687,-555,-516,-503,-466,-470,-502,20,-484,-483,-559,-542,-533,-541,-543,-544,20,-251,-393,-419,-420,-402,-405,-401,-404,20,-285,-288,-296,-289,-384,-338,-336,-557,20,-263,-440,-442,-231,20,-185,411,-556,-623,-623,-536,-534,-307,-261,-308,-464,-282,-421,-422,-406,-298,687,-306,-297,228,-337,-234,-271,-264,20,-443,-444,20,20,687,-623,-507,-482,-495,-494,-485,-500,-535,-545,-309,-310,-403,-423,-424,-407,-408,-410,-411,687,20,228,228,-276,-274,-265,20,-272,-266,20,-446,-445,-232,-504,-499,-496,-497,411,-313,-311,228,-314,-312,-426,-425,-412,-291,228,-267,-273,20,20,20,228,-315,-317,-318,-316,-427,-428,-409,-498,-236,20,20,]),'&':([12,31,38,42,44,46,48,51,55,56,57,62,67,72,85,89,101,112,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,159,161,162,163,164,165,213,216,219,221,231,238,240,241,245,246,248,250,251,253,256,258,259,260,261,263,264,265,277,291,292,299,300,302,303,304,306,308,318,336,345,349,362,366,368,369,410,412,416,419,423,431,433,434,476,498,533,534,535,538,585,586,588,592,593,598,602,605,619,623,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,661,662,663,664,665,692,703,731,732,733,734,735,736,737,741,742,743,745,746,747,749,750,751,752,753,755,756,757,758,759,760,761,762,763,764,765,766,767,769,770,771,772,773,777,804,805,809,811,813,815,818,819,820,821,838,843,844,845,846,848,851,855,867,945,946,948,959,961,964,967,980,982,992,993,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1061,1075,1076,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1143,1145,1148,1149,1155,],[-345,-380,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-348,-350,-139,-381,-140,-108,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,-58,375,-50,-129,-45,-122,-343,-383,-117,-119,-385,-113,-68,-100,-112,527,-60,-123,-131,-133,-47,539,-132,-81,-53,-126,-116,-91,-118,-156,-155,-381,-398,-392,-399,-383,-380,-391,-389,-134,-135,-120,-109,527,-110,-121,-131,-132,527,-133,-341,-388,-387,527,-386,-124,-114,-115,-125,-146,-343,-149,-344,-158,-157,-417,-418,-400,-384,-335,-339,-340,-152,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,-64,-67,-65,-66,-46,-59,-52,-51,375,-145,-466,-71,-86,-84,-82,-69,-73,-75,375,-85,-106,-55,-95,-87,-93,-70,-72,-83,-102,-74,-49,-76,-61,-104,-57,375,-54,-56,-48,-92,-94,-62,-105,-101,-103,-332,-331,-221,-330,-150,-393,-419,-420,-384,-402,-405,-401,-404,-390,-338,-333,-336,-334,-151,-162,-128,979,-421,-422,-406,-623,-337,-163,-159,-436,979,-217,-216,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-160,-438,-437,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-428,-409,-324,-161,-321,]),'TIMES_ASSIGN':([12,23,38,41,42,46,48,51,55,56,57,62,68,70,72,78,85,89,97,100,101,117,123,133,136,139,142,145,148,151,156,163,192,246,251,253,259,336,345,366,434,512,538,561,563,567,573,580,585,586,587,588,589,623,625,626,631,640,643,644,646,650,703,772,773,777,804,805,843,844,845,846,848,921,959,961,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1155,],[-345,-143,-347,-130,-138,-349,-341,-346,-142,-137,-342,-136,-343,-140,-144,-129,-348,-350,-127,-128,-139,-141,333,-140,-343,-141,-143,-128,-127,333,-130,-129,-128,-128,-127,-130,-129,-134,-135,-128,-128,-145,-146,-343,-127,-128,-329,-328,-343,-149,-147,-344,-148,-335,-339,-340,-152,-329,-147,-148,-328,-145,-466,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-128,-623,-337,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-321,]),'AND_ASSIGN':([12,23,38,41,42,46,48,51,55,56,57,62,68,70,72,78,85,89,97,100,101,117,123,133,136,139,142,145,148,151,156,163,192,246,251,253,259,336,345,366,434,512,538,561,563,567,573,580,585,586,587,588,589,623,625,626,631,640,643,644,646,650,703,772,773,777,804,805,843,844,845,846,848,921,959,961,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1155,],[-345,-143,-347,-130,-138,-349,-341,-346,-142,-137,-342,-136,-343,-140,-144,-129,-348,-350,-127,-128,-139,-141,331,-140,-343,-141,-143,-128,-127,331,-130,-129,-128,-128,-127,-130,-129,-134,-135,-128,-128,-145,-146,-343,-127,-128,-329,-328,-343,-149,-147,-344,-148,-335,-339,-340,-152,-329,-147,-148,-328,-145,-466,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-128,-623,-337,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-321,]),'FOR':([1,6,7,8,9,11,20,30,37,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,204,207,226,229,233,234,236,266,271,286,311,330,394,405,406,549,551,559,579,611,612,614,615,628,677,700,703,709,713,725,807,831,833,834,835,849,862,872,874,878,906,909,916,917,925,941,953,956,962,973,975,976,986,987,1011,1015,1016,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1129,1131,1134,1137,1138,1139,1140,1154,1156,1160,],[80,-186,-204,-194,-187,80,80,-170,80,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,-167,80,-508,-452,-553,-529,-249,-175,-279,-277,-286,80,-281,-164,-168,-283,-280,-260,-278,-290,-287,-284,-623,-228,871,-516,-466,80,-559,-533,-251,-285,-288,-296,-289,80,-263,-231,871,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-271,-264,80,80,871,-535,-309,-310,-276,-274,-265,80,-272,-266,80,-232,-313,-311,-314,-312,-291,-267,-273,871,871,80,-315,-317,-318,-316,-236,871,871,]),'LSHIFT':([12,38,42,46,48,51,55,56,57,62,72,85,89,101,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,163,165,219,221,238,240,241,245,246,250,251,253,259,260,263,264,265,277,336,345,349,362,366,368,369,410,412,416,419,423,434,498,533,534,535,538,585,586,588,623,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,692,703,731,732,733,734,735,736,737,742,743,746,747,749,750,751,752,753,755,757,759,765,766,769,770,771,772,773,777,804,805,843,844,845,846,848,851,855,959,961,964,967,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1149,1155,],[-345,-347,-138,-349,-341,-346,-142,-137,-342,-136,-144,-348,-350,-139,-140,-108,-343,-88,356,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,-129,-122,-117,-119,-113,503,-100,-112,523,-123,-131,-133,-132,-81,-126,-116,-91,-118,-134,-135,-120,-109,523,-110,-121,-131,-132,523,-133,-341,523,-124,-114,-115,-125,-146,-343,-149,-344,-335,-339,-340,-152,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,356,356,356,356,-145,-466,356,-86,-84,-82,356,356,356,-85,-106,-95,-87,-93,356,356,-83,-102,356,356,-104,-92,-94,-105,-101,-103,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-162,-128,-623,-337,-163,-159,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-161,-321,]),'SUPER':([1,2,6,7,8,9,11,16,20,25,30,37,40,45,47,49,53,58,59,60,61,64,65,69,73,76,77,79,82,84,88,91,92,93,95,98,99,110,122,127,132,147,154,155,157,190,194,203,204,207,208,220,226,229,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,288,289,290,294,295,297,300,302,303,308,311,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,360,364,365,370,371,372,373,374,375,377,378,380,394,405,406,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,549,551,552,553,559,560,579,584,590,598,602,605,611,612,614,615,621,624,628,650,653,671,677,685,687,692,700,703,709,713,725,729,776,783,784,785,786,790,791,792,798,799,800,807,809,811,813,817,818,819,820,821,831,833,834,835,836,841,849,850,852,857,862,872,874,878,882,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,945,946,948,951,953,956,958,962,965,973,975,976,977,983,984,986,987,988,991,1011,1015,1016,1018,1019,1026,1036,1038,1040,1042,1043,1045,1046,1047,1050,1064,1065,1067,1068,1069,1070,1071,1080,1092,1101,1102,1108,1109,1114,1115,1118,1120,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1141,1143,1145,1154,1156,1157,1160,],[81,140,-186,-204,-194,-187,81,140,81,140,-170,81,140,-206,-262,-203,-193,140,-192,-174,-202,-189,273,-172,140,-171,140,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,140,140,140,140,140,-205,140,140,-167,81,140,140,-508,-452,-553,-529,-249,140,140,140,140,-175,140,140,-279,140,140,564,140,-277,582,589,140,140,140,600,-398,-392,-399,-391,-286,81,-17,-11,-9,-10,140,-18,-12,-15,-8,-19,-16,-14,-13,140,140,140,140,140,140,140,140,140,140,644,140,140,140,140,140,140,140,140,140,140,140,-281,-164,-168,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,644,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,-283,-280,140,140,-260,140,-278,796,803,-417,-418,-400,-290,-287,-284,-623,140,140,-228,140,140,140,81,140,140,140,-516,-466,81,-559,-533,140,140,644,564,140,140,140,140,140,140,140,140,-251,-393,-419,-420,944,-402,-405,-401,-404,-285,-288,-296,-289,140,140,81,140,140,140,-263,-231,81,-185,140,-536,-534,140,140,-307,-261,140,-308,140,140,140,140,140,140,140,-282,-421,-422,-406,140,-306,-297,140,-234,140,-271,-264,81,140,140,564,81,81,140,140,-535,-309,-310,644,564,140,-403,-423,-424,1116,-407,-408,-410,-411,140,-276,-274,-265,81,-272,-266,81,-232,140,-313,-311,-314,-312,-426,-425,-412,-291,140,-267,-273,81,140,81,81,-315,-317,-318,-316,-427,-428,-409,-236,81,564,81,]),'ELLIPSIS':([13,15,27,31,32,36,44,48,52,57,66,67,86,94,112,128,213,216,217,231,291,292,300,302,303,308,318,431,433,476,588,592,593,598,602,605,619,809,811,813,818,819,820,821,838,893,945,946,948,1036,1038,1040,1043,1045,1046,1047,1114,1115,1118,1141,1143,1145,],[-375,-370,-374,-380,-372,-369,-378,-341,-376,-342,-373,-379,-371,-368,-381,-377,-343,-383,-367,-385,-156,-155,-398,-392,-399,-391,-389,-388,-387,-386,-344,-158,-157,-417,-418,-400,-384,-393,-419,-420,-402,-405,-401,-404,-390,995,-421,-422,-406,-403,-423,-424,-407,-408,-410,-411,-426,-425,-412,-427,-428,-409,]),'TRY':([1,6,7,8,9,11,20,30,37,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,204,207,226,229,233,234,236,266,271,286,311,330,394,405,406,549,551,559,579,611,612,614,615,628,677,700,703,709,713,725,807,831,833,834,835,849,862,872,874,878,906,909,916,917,925,941,953,956,962,973,975,976,986,987,1011,1015,1016,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1129,1131,1134,1137,1138,1139,1140,1154,1156,1160,],[108,-186,-204,-194,-187,108,108,-170,108,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,-167,108,-508,-452,-553,-529,-249,-175,-279,-277,-286,108,-281,-164,-168,-283,-280,-260,-278,-290,-287,-284,-623,-228,108,-516,-466,108,-559,-533,-251,-285,-288,-296,-289,108,-263,-231,108,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-271,-264,108,108,108,-535,-309,-310,-276,-274,-265,108,-272,-266,108,-232,-313,-311,-314,-312,-291,-267,-273,108,108,108,-315,-317,-318,-316,-236,108,108,]),'TRUE':([1,2,6,7,8,9,11,16,20,25,30,37,40,45,47,49,53,58,59,60,61,64,69,73,76,77,79,82,84,88,91,92,93,95,98,99,110,122,127,132,147,154,155,157,190,194,203,204,207,208,220,226,229,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,311,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,394,405,406,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,549,551,552,553,559,560,579,611,612,614,615,621,624,628,650,653,671,677,685,687,692,700,703,709,713,725,729,776,784,785,786,790,791,792,798,799,800,807,831,833,834,835,836,841,849,850,852,857,862,872,874,878,882,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,951,953,956,958,962,965,973,975,976,977,983,984,986,987,988,991,1011,1015,1016,1019,1026,1050,1064,1065,1067,1068,1069,1070,1071,1080,1092,1101,1102,1108,1109,1120,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1154,1156,1157,1160,],[85,85,-186,-204,-194,-187,85,85,85,85,-170,85,85,-206,-262,-203,-193,85,-192,-174,-202,-189,-172,85,-171,85,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,85,85,85,85,85,-205,85,85,-167,85,85,85,-508,-452,-553,-529,-249,85,85,85,85,-175,85,85,-279,85,85,85,85,-277,85,85,85,-286,85,-17,-11,-9,-10,85,-18,-12,-15,-8,-19,-16,-14,-13,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,-281,-164,-168,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,-283,-280,85,85,-260,85,-278,-290,-287,-284,-623,85,85,-228,85,85,85,85,85,85,85,-516,-466,85,-559,-533,85,85,85,85,85,85,85,85,85,85,85,-251,-285,-288,-296,-289,85,85,85,85,85,85,-263,-231,85,-185,85,-536,-534,85,85,-307,-261,85,-308,85,85,85,85,85,85,85,-282,85,-306,-297,85,-234,85,-271,-264,85,85,85,85,85,85,85,85,-535,-309,-310,85,85,85,-276,-274,-265,85,-272,-266,85,-232,85,-313,-311,-314,-312,-291,85,-267,-273,85,85,85,85,-315,-317,-318,-316,-236,85,85,85,]),'BYTE':([1,2,6,7,8,9,10,11,16,20,21,24,25,30,37,40,43,45,47,48,49,53,57,58,59,60,61,64,69,71,73,74,75,76,77,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,107,110,113,114,115,116,122,124,127,129,130,131,132,147,154,155,157,158,169,170,175,181,186,187,190,194,203,204,207,208,213,218,220,226,227,228,229,230,232,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,309,311,328,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,376,377,378,380,394,396,397,405,406,411,428,438,439,440,441,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,463,465,466,467,468,469,470,471,473,474,475,477,478,479,480,482,483,484,485,488,491,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,549,551,552,553,559,560,566,579,588,597,599,600,604,605,608,611,612,614,615,621,624,628,650,653,671,674,675,677,682,684,685,687,692,693,697,699,700,701,702,703,704,706,708,709,710,711,713,715,716,717,725,729,776,784,785,786,790,791,792,798,799,800,807,818,819,820,821,824,826,831,832,833,834,835,836,841,849,850,852,857,862,864,866,872,874,878,882,894,899,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,942,943,944,945,946,947,948,951,953,955,956,958,962,965,973,975,976,977,978,979,981,983,984,986,987,988,991,999,1000,1002,1007,1009,1011,1015,1016,1019,1026,1036,1038,1040,1043,1045,1046,1047,1050,1064,1065,1067,1068,1069,1070,1071,1074,1077,1080,1087,1092,1095,1101,1102,1108,1109,1113,1114,1115,1116,1117,1118,1120,1122,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1141,1143,1145,1154,1156,1157,1160,],[86,86,-186,-204,-194,-187,-361,86,86,86,-594,86,86,-170,86,86,-585,-206,-262,-341,-203,-193,-342,86,-192,-174,-202,-189,-172,-352,86,-363,-365,-171,86,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,86,-185,-357,-355,-366,86,-196,-584,-195,-359,-358,-364,86,86,86,86,86,86,-449,-448,-362,-451,-450,-447,-205,86,86,-167,86,86,-343,-354,86,-508,-623,-623,-452,86,-623,-553,-529,-249,86,86,86,86,-175,86,86,-279,86,86,86,86,-277,86,86,86,86,-286,-588,86,-17,-11,-9,-10,86,-18,-12,-15,-8,-19,-16,-14,-13,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,-281,-439,-430,-164,-168,86,86,-525,-519,-524,-623,-352,-521,86,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,86,-481,-623,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-623,-567,86,-566,-623,-623,-351,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,-283,-280,86,86,-260,86,86,-278,-344,86,86,86,86,-400,86,-290,-287,-284,-623,86,86,-228,86,86,86,-441,86,86,-587,-595,86,86,86,86,-352,86,-516,-503,-520,-466,-470,86,-502,86,-484,-483,-559,86,-563,-564,-533,86,86,86,86,86,86,86,86,86,86,86,-251,-402,-405,-401,-404,-303,86,-285,-623,-288,-296,-289,86,86,86,86,86,86,-263,-440,-442,-231,86,-185,86,-623,-487,-536,-534,86,86,-307,-261,86,-308,86,86,86,86,86,86,86,-282,86,86,86,-421,-422,86,-406,86,-306,86,-297,86,-234,86,-271,-264,86,86,-443,86,-444,86,86,86,86,86,86,-506,-482,-486,-500,-570,-535,-309,-310,86,86,-403,-423,-424,-407,-408,-410,-411,86,-276,-274,-265,86,-272,-266,86,-446,-445,-232,-505,86,-569,-313,-311,-314,-312,86,-426,-425,86,86,-412,-291,86,86,-267,-273,86,86,86,86,-315,-317,-318,-316,-427,-428,-409,-236,86,86,86,]),'BREAK':([1,6,7,8,9,11,20,30,37,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,204,207,226,229,233,234,236,266,271,286,311,330,394,405,406,549,551,559,579,611,612,614,615,628,677,700,703,709,713,725,807,831,833,834,835,849,862,872,874,878,906,909,916,917,925,941,953,956,962,973,975,976,986,987,1011,1015,1016,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1129,1131,1134,1137,1138,1139,1140,1154,1156,1160,],[87,-186,-204,-194,-187,87,87,-170,87,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,-167,87,-508,-452,-553,-529,-249,-175,-279,-277,-286,87,-281,-164,-168,-283,-280,-260,-278,-290,-287,-284,-623,-228,87,-516,-466,87,-559,-533,-251,-285,-288,-296,-289,87,-263,-231,87,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-271,-264,87,87,87,-535,-309,-310,-276,-274,-265,87,-272,-266,87,-232,-313,-311,-314,-312,-291,-267,-273,87,87,87,-315,-317,-318,-316,-236,87,87,]),'AND':([12,31,38,42,44,46,48,51,55,56,57,62,67,72,85,89,101,112,133,134,135,136,137,138,139,141,142,143,145,146,148,151,152,156,159,160,161,162,163,164,165,168,213,216,219,221,231,238,239,240,241,245,246,247,248,250,251,253,256,258,259,260,261,262,263,264,265,277,291,292,300,302,303,308,318,336,345,349,362,366,368,369,410,412,416,419,423,431,433,434,476,498,533,534,535,538,585,586,588,592,593,598,602,605,619,623,625,626,631,633,634,635,636,637,638,639,640,641,642,643,644,646,649,650,656,657,658,659,660,661,662,663,664,665,692,703,730,731,732,733,734,735,736,737,739,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,777,804,805,809,811,813,818,819,820,821,838,843,844,845,846,848,851,855,945,946,948,959,961,964,967,992,993,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1143,1145,1148,1149,1155,],[-345,-380,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-348,-350,-139,-381,-140,-108,350,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,-58,-30,-40,-50,-129,-45,-122,-35,-343,-383,-117,-119,-385,-113,499,-68,-100,-112,525,-37,-60,-123,-131,-133,-47,-42,-132,-81,-53,-32,-126,-116,-91,-118,-156,-155,-398,-392,-399,-391,-389,-134,-135,-120,-109,525,-110,-121,-131,-132,525,-133,-341,-388,-387,525,-386,-124,-114,-115,-125,-146,-343,-149,-344,-158,-157,-417,-418,-400,-384,-335,-339,-340,-152,-31,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,350,-145,-64,-67,-65,-66,-36,-46,-59,-52,-51,-41,-145,-466,-33,-71,-86,-84,-82,-69,-73,-75,350,-44,-85,-106,-39,-55,-95,-87,350,-93,-70,-72,-83,-102,-34,-74,-49,-76,-61,-104,-57,-43,-54,-56,-48,-92,-94,-62,-38,-105,-101,-103,-332,-331,-221,-330,-150,-393,-419,-420,-402,-405,-401,-404,-390,-338,-333,-336,-334,-151,-162,-128,-421,-422,-406,-623,-337,-163,-159,-217,-216,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-428,-409,-324,-161,-321,]),'PLUS_ASSIGN':([12,23,38,41,42,46,48,51,55,56,57,62,68,70,72,78,85,89,97,100,101,117,123,133,136,139,142,145,148,151,156,163,192,246,251,253,259,336,345,366,434,512,538,561,563,567,573,580,585,586,587,588,589,623,625,626,631,640,643,644,646,650,703,772,773,777,804,805,843,844,845,846,848,921,959,961,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1155,],[-345,-143,-347,-130,-138,-349,-341,-346,-142,-137,-342,-136,-343,-140,-144,-129,-348,-350,-127,-128,-139,-141,338,-140,-343,-141,-143,-128,-127,338,-130,-129,-128,-128,-127,-130,-129,-134,-135,-128,-128,-145,-146,-343,-127,-128,-329,-328,-343,-149,-147,-344,-148,-335,-339,-340,-152,-329,-147,-148,-328,-145,-466,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-128,-623,-337,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-321,]),'}':([6,7,8,9,12,20,21,30,31,38,42,43,44,45,46,47,48,49,51,53,55,56,57,59,60,61,62,64,67,69,72,76,79,82,84,85,88,89,91,92,93,95,96,98,99,101,110,112,122,124,127,133,134,135,136,137,138,139,141,142,143,144,145,146,148,150,151,152,153,156,159,160,161,162,163,164,165,167,168,169,170,181,186,187,190,204,205,206,207,213,216,219,221,226,227,228,229,231,232,233,234,235,236,238,239,240,241,242,245,247,248,250,256,258,260,261,262,263,264,265,266,271,277,286,291,292,300,302,303,308,311,318,328,336,345,349,362,368,369,394,405,406,410,411,412,415,416,419,420,421,431,433,438,439,440,442,443,445,446,448,449,450,452,453,454,455,456,457,458,459,460,462,465,466,467,469,470,471,474,475,476,477,478,479,481,482,484,485,487,488,489,490,492,493,494,496,497,498,533,534,535,538,549,551,559,579,585,586,588,592,593,598,602,605,611,612,614,615,619,623,625,626,628,629,631,633,634,635,636,637,638,639,640,641,642,643,644,646,649,650,656,657,658,659,660,661,662,663,664,665,678,679,680,681,682,684,687,688,689,692,700,701,702,703,704,708,709,710,711,713,716,717,718,719,720,721,722,723,724,725,726,727,728,730,731,732,733,734,735,736,737,739,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,777,804,805,809,811,813,818,819,820,821,831,833,834,835,838,843,844,845,846,848,851,862,863,872,878,881,882,883,887,888,889,890,891,902,906,907,908,909,916,917,925,941,945,946,948,953,956,959,961,962,963,964,967,972,973,974,975,976,989,990,991,992,993,1000,1007,1011,1012,1013,1014,1015,1016,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1061,1064,1065,1066,1067,1068,1069,1070,1071,1080,1083,1084,1100,1101,1102,1103,1105,1108,1109,1110,1114,1115,1118,1120,1123,1124,1125,1127,1128,1135,1136,1137,1138,1139,1140,1141,1143,1145,1148,1149,1154,1155,],[-186,-204,-194,-187,-345,-623,-594,-170,-380,-347,-138,-585,-378,-206,-349,-262,-341,-203,-346,-193,-142,-137,-342,-192,-174,-202,-136,-189,-379,-172,-144,-171,-188,-200,-173,-348,-191,-350,-201,-197,-199,-190,-586,-169,-198,-139,-185,-381,-196,-584,-195,-140,-108,-25,-343,-88,-63,-141,-77,-143,-107,-3,-128,-96,-127,-20,-119,-111,-1,-130,-58,-30,-40,-50,-129,-45,-122,-4,-35,-449,-448,-451,-450,-447,-205,-167,-166,405,-165,-343,-383,-117,-119,-508,-623,-623,-452,-385,-623,-553,-529,-623,-249,-113,-27,-68,-100,-22,-112,-37,-60,-123,-47,-42,-81,-53,-32,-126,-116,-91,-175,-279,-118,-277,-156,-155,-398,-392,-399,-391,-286,-389,-588,-134,-135,-120,-109,-110,-121,-281,-164,-168,-131,679,-132,-577,-575,-133,-574,-576,-388,-387,-525,-519,-524,-518,-521,700,-527,-526,-523,-528,-522,-517,-479,-478,-469,703,-467,-468,-472,-481,-480,-476,-475,-473,-501,-474,-471,-477,-386,-562,-561,-565,713,-567,-566,-560,-623,-623,-540,-537,-623,-550,-551,725,-623,-124,-114,-115,-125,-146,-283,-280,-260,-278,-343,-149,-344,-158,-157,-417,-418,-400,-290,-287,-284,-623,-384,-335,-339,-340,-228,-7,-152,-31,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-26,-145,-64,-67,-65,-66,-36,-46,-59,-52,-51,-41,-582,-581,881,883,-587,-595,-623,-184,-183,-145,-516,-503,-520,-466,-470,-502,-623,-484,-483,-559,-563,-564,-551,906,-623,-552,-539,909,-542,-533,-541,-543,-544,-33,-71,-86,-84,-82,-69,-73,-75,-28,-44,-85,-106,-39,-55,-95,-87,-29,-93,-70,-72,-83,-102,-34,-74,-49,-76,-61,-104,-57,-43,-54,-56,-48,-92,-94,-62,-38,-105,-101,-103,-332,-331,-221,-330,-150,-393,-419,-420,-402,-405,-401,-404,-285,-288,-296,-289,-390,-338,-333,-336,-334,-151,-162,-263,975,-231,-185,-580,990,-579,-215,-214,992,-219,993,1007,-536,-538,1011,-534,-307,-261,-308,-282,-421,-422,-406,-306,-297,-623,-337,-234,-21,-163,-159,1067,-271,-268,-264,1070,-583,-578,1083,-217,-216,-482,-500,-535,-545,-23,-24,-309,-310,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-160,-276,-274,-269,-265,1127,-272,-266,-270,-232,-218,-220,-224,-313,-311,-623,-223,-314,-312,-222,-426,-425,-412,-291,-323,-623,-319,-267,-273,-322,-623,-315,-317,-318,-316,-427,-428,-409,-324,-161,-236,-321,]),'INTERFACE':([1,4,6,7,8,9,10,18,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,71,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,118,122,124,127,129,130,131,169,170,171,173,174,175,176,178,179,180,181,182,183,185,186,187,188,190,204,205,207,213,215,218,226,227,228,229,232,233,234,236,266,271,286,311,328,381,382,383,386,387,388,391,394,405,406,438,439,440,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,465,466,467,468,469,470,471,473,474,475,477,478,479,482,483,484,485,488,549,551,559,579,588,611,612,614,615,628,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,725,831,833,834,835,858,862,872,878,906,909,916,917,925,941,953,956,962,969,973,975,976,1000,1007,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[-623,-623,-186,-204,-194,-187,-361,202,-623,-594,-351,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-352,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,327,-196,-584,-195,-359,-358,-364,-449,-448,-623,-610,-608,-362,-352,-623,-618,-611,-451,-612,-623,-613,-450,-447,-351,-205,-167,-352,-623,-343,430,-354,-508,-623,-623,-452,-623,-553,-529,-249,-175,-279,-277,-286,-588,-623,-623,-351,-609,-623,-619,-605,-281,-164,-168,-525,-519,-524,-352,-521,202,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,202,-481,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-567,202,-566,-623,-623,-283,-280,-260,-278,-344,-290,-287,-284,-623,-228,-623,-614,-587,-595,-516,-503,-520,-466,-470,-502,-623,-484,-483,-559,-563,-564,-533,-285,-288,-296,-289,-616,-263,-231,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,-623,-482,-500,-535,-309,-310,-617,-276,-274,-265,-623,-272,-266,-623,-232,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'LONG':([1,2,6,7,8,9,10,11,16,20,21,24,25,30,37,40,43,45,47,48,49,53,57,58,59,60,61,64,69,71,73,74,75,76,77,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,107,110,113,114,115,116,122,124,127,129,130,131,132,147,154,155,157,158,169,170,175,181,186,187,190,194,203,204,207,208,213,218,220,226,227,228,229,230,232,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,309,311,328,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,376,377,378,380,394,396,397,405,406,411,428,438,439,440,441,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,463,465,466,467,468,469,470,471,473,474,475,477,478,479,480,482,483,484,485,488,491,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,549,551,552,553,559,560,566,579,588,597,599,600,604,605,608,611,612,614,615,621,624,628,650,653,671,674,675,677,682,684,685,687,692,693,697,699,700,701,702,703,704,706,708,709,710,711,713,715,716,717,725,729,776,784,785,786,790,791,792,798,799,800,807,818,819,820,821,824,826,831,832,833,834,835,836,841,849,850,852,857,862,864,866,872,874,878,882,894,899,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,942,943,944,945,946,947,948,951,953,955,956,958,962,965,973,975,976,977,978,979,981,983,984,986,987,988,991,999,1000,1002,1007,1009,1011,1015,1016,1019,1026,1036,1038,1040,1043,1045,1046,1047,1050,1064,1065,1067,1068,1069,1070,1071,1074,1077,1080,1087,1092,1095,1101,1102,1108,1109,1113,1114,1115,1116,1117,1118,1120,1122,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1141,1143,1145,1154,1156,1157,1160,],[27,27,-186,-204,-194,-187,-361,27,27,27,-594,27,27,-170,27,27,-585,-206,-262,-341,-203,-193,-342,27,-192,-174,-202,-189,-172,-352,27,-363,-365,-171,27,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,27,-185,-357,-355,-366,27,-196,-584,-195,-359,-358,-364,27,27,27,27,27,27,-449,-448,-362,-451,-450,-447,-205,27,27,-167,27,27,-343,-354,27,-508,-623,-623,-452,27,-623,-553,-529,-249,27,27,27,27,-175,27,27,-279,27,27,27,27,-277,27,27,27,27,-286,-588,27,-17,-11,-9,-10,27,-18,-12,-15,-8,-19,-16,-14,-13,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,-281,-439,-430,-164,-168,27,27,-525,-519,-524,-623,-352,-521,27,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,27,-481,-623,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-623,-567,27,-566,-623,-623,-351,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,-283,-280,27,27,-260,27,27,-278,-344,27,27,27,27,-400,27,-290,-287,-284,-623,27,27,-228,27,27,27,-441,27,27,-587,-595,27,27,27,27,-352,27,-516,-503,-520,-466,-470,27,-502,27,-484,-483,-559,27,-563,-564,-533,27,27,27,27,27,27,27,27,27,27,27,-251,-402,-405,-401,-404,-303,27,-285,-623,-288,-296,-289,27,27,27,27,27,27,-263,-440,-442,-231,27,-185,27,-623,-487,-536,-534,27,27,-307,-261,27,-308,27,27,27,27,27,27,27,-282,27,27,27,-421,-422,27,-406,27,-306,27,-297,27,-234,27,-271,-264,27,27,-443,27,-444,27,27,27,27,27,27,-506,-482,-486,-500,-570,-535,-309,-310,27,27,-403,-423,-424,-407,-408,-410,-411,27,-276,-274,-265,27,-272,-266,27,-446,-445,-232,-505,27,-569,-313,-311,-314,-312,27,-426,-425,27,27,-412,-291,27,27,-267,-273,27,27,27,27,-315,-317,-318,-316,-427,-428,-409,-236,27,27,27,]),'NULL':([1,2,6,7,8,9,11,16,20,25,30,37,40,45,47,49,53,58,59,60,61,64,69,73,76,77,79,82,84,88,91,92,93,95,98,99,110,122,127,132,147,154,155,157,190,194,203,204,207,208,220,226,229,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,311,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,394,405,406,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,549,551,552,553,559,560,579,611,612,614,615,621,624,628,650,653,671,677,685,687,692,700,703,709,713,725,729,776,784,785,786,790,791,792,798,799,800,807,831,833,834,835,836,841,849,850,852,857,862,872,874,878,882,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,951,953,956,958,962,965,973,975,976,977,983,984,986,987,988,991,1011,1015,1016,1019,1026,1050,1064,1065,1067,1068,1069,1070,1071,1080,1092,1101,1102,1108,1109,1120,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1154,1156,1157,1160,],[89,89,-186,-204,-194,-187,89,89,89,89,-170,89,89,-206,-262,-203,-193,89,-192,-174,-202,-189,-172,89,-171,89,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,89,89,89,89,89,-205,89,89,-167,89,89,89,-508,-452,-553,-529,-249,89,89,89,89,-175,89,89,-279,89,89,89,89,-277,89,89,89,-286,89,-17,-11,-9,-10,89,-18,-12,-15,-8,-19,-16,-14,-13,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,-281,-164,-168,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,89,-283,-280,89,89,-260,89,-278,-290,-287,-284,-623,89,89,-228,89,89,89,89,89,89,89,-516,-466,89,-559,-533,89,89,89,89,89,89,89,89,89,89,89,-251,-285,-288,-296,-289,89,89,89,89,89,89,-263,-231,89,-185,89,-536,-534,89,89,-307,-261,89,-308,89,89,89,89,89,89,89,-282,89,-306,-297,89,-234,89,-271,-264,89,89,89,89,89,89,89,89,-535,-309,-310,89,89,89,-276,-274,-265,89,-272,-266,89,-232,89,-313,-311,-314,-312,-291,89,-267,-273,89,89,89,89,-315,-317,-318,-316,-236,89,89,89,]),'INSTANCEOF':([12,31,38,42,44,46,48,51,55,56,57,62,67,72,85,89,101,112,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,159,162,163,165,213,216,219,221,231,238,240,241,245,246,248,250,251,253,259,260,261,263,264,265,277,291,292,300,302,303,308,318,336,345,349,362,366,368,369,410,412,416,419,423,431,433,434,476,498,533,534,535,538,585,586,588,592,593,598,602,605,619,623,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,662,663,664,692,703,731,732,733,734,735,736,737,742,743,745,746,747,749,750,751,752,753,755,757,758,759,760,762,763,765,766,767,769,770,771,772,773,777,804,805,809,811,813,818,819,820,821,838,843,844,845,846,848,851,855,945,946,948,959,961,964,967,992,993,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1143,1145,1148,1149,1155,],[-345,-380,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-348,-350,-139,-381,-140,-108,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,-58,376,-129,-122,-343,-383,-117,-119,-385,-113,-68,-100,-112,529,-60,-123,-131,-133,-132,-81,542,-126,-116,-91,-118,-156,-155,-398,-392,-399,-391,-389,-134,-135,-120,-109,529,-110,-121,-131,-132,529,-133,-341,-388,-387,529,-386,-124,-114,-115,-125,-146,-343,-149,-344,-158,-157,-417,-418,-400,-384,-335,-339,-340,-152,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,-64,-67,-65,-66,-59,376,376,-145,-466,-71,-86,-84,-82,-69,-73,-75,-85,-106,376,-95,-87,-93,-70,-72,-83,-102,-74,-76,-61,-104,376,376,376,-92,-94,-62,-105,-101,-103,-332,-331,-221,-330,-150,-393,-419,-420,-402,-405,-401,-404,-390,-338,-333,-336,-334,-151,-162,-128,-421,-422,-406,-623,-337,-163,-159,-217,-216,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-428,-409,-324,-161,-321,]),',':([12,21,23,31,34,38,41,42,43,44,46,48,51,55,56,57,62,67,72,78,85,89,90,96,101,112,117,121,124,133,134,135,136,137,138,139,141,142,143,144,145,146,148,150,151,152,153,156,159,160,161,162,163,164,165,167,168,209,210,211,212,213,216,219,221,231,235,238,239,240,241,242,245,247,248,250,256,258,260,261,262,263,264,265,277,291,292,293,296,297,298,299,300,301,302,303,304,306,307,308,318,321,324,328,336,345,346,349,362,368,369,395,398,399,400,407,410,411,412,415,416,417,419,420,421,425,426,427,429,431,433,436,476,487,489,490,497,498,533,534,535,538,555,556,568,569,576,577,578,585,586,588,592,593,598,601,602,605,616,619,623,625,626,629,631,633,634,635,636,637,638,639,640,641,642,643,644,646,649,650,656,657,658,659,660,661,662,663,664,665,678,679,681,682,684,686,687,688,689,690,692,694,696,703,722,724,726,727,728,730,731,732,733,734,735,736,737,739,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,777,781,804,805,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,838,839,843,844,845,846,848,851,855,865,867,881,883,885,886,889,890,896,897,904,907,915,918,919,920,926,945,946,948,959,961,963,964,967,980,982,989,990,992,993,994,996,1012,1013,1014,1017,1020,1022,1030,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1055,1057,1058,1059,1060,1061,1075,1076,1083,1084,1085,1088,1089,1090,1099,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1142,1143,1144,1145,1146,1148,1149,1153,1155,],[-345,-594,-212,-380,-209,-347,-211,-138,-585,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-210,-348,-350,-208,-586,-139,-381,-213,-207,-584,-140,-108,-25,-343,-88,-63,-141,-77,-143,-107,-3,-128,-96,-127,-20,-119,-111,-1,-130,-58,-30,-40,-50,-129,-45,-122,-4,-35,424,-623,-180,-178,-343,-383,-117,-119,-385,492,-113,-27,-68,-100,-22,-112,-37,-60,-123,-47,-42,-81,-53,-32,-126,-116,-91,-118,-156,-155,-388,597,-413,-397,-381,-398,-394,-392,-399,-383,-380,-396,-391,-389,-381,-383,-588,-134,-135,-387,-120,-109,-110,-121,-429,-431,673,-433,-591,-131,680,-132,-577,-575,683,-133,-574,-576,-153,-154,-182,424,-388,-387,-382,-386,720,-540,-537,-623,-124,-114,-115,-125,-146,776,-548,784,-242,-463,789,-465,-343,-149,-344,-158,-157,-417,-414,-418,-400,789,-384,-335,-339,-340,-7,-152,-31,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-26,-145,-64,-67,-65,-66,-36,-46,-59,-52,-51,-41,-582,-581,882,-587,-595,-179,888,-184,-183,-181,-145,894,-490,-466,-539,-542,-541,-543,-544,-33,-71,-86,-84,-82,-69,-73,-75,-28,-44,-85,-106,-39,-55,-95,-87,-29,-93,-70,-72,-83,-102,-34,-74,-49,-76,-61,-104,-57,-43,-54,-56,-48,-92,-94,-62,-38,-105,-101,-103,-332,-331,-221,-623,-330,-150,-395,-393,-396,-419,-415,-420,-416,-384,942,-413,-402,-405,-401,-404,-380,-396,-390,-384,-338,-333,-336,-334,-151,-162,-383,-432,-434,-580,-579,-592,-593,991,-219,-623,424,-623,-538,-549,-182,-623,-243,-464,-421,-422,-406,-623,-337,-21,-163,-159,-436,-435,-583,-578,-217,-216,-492,-491,-545,-23,-24,-182,-227,-226,-225,-403,-396,-423,-415,-424,-416,-413,-407,1117,-408,-410,-411,-396,-623,-326,-327,-320,-623,-160,-438,-437,-218,-220,-493,-499,1132,-497,784,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-415,-428,-416,-409,-396,-324,-161,-498,-321,]),'CASE':([6,7,8,9,30,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,204,226,229,233,234,236,266,271,286,311,394,405,406,549,551,559,579,611,612,614,615,628,700,703,713,725,831,833,834,835,862,863,872,878,906,909,916,917,925,941,953,956,962,972,973,974,975,976,1011,1015,1016,1064,1065,1066,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[-186,-204,-194,-187,-170,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,-167,-508,-452,-553,-529,-249,-175,-279,-277,-286,-281,-164,-168,-283,-280,-260,-278,-290,-287,-284,-623,-228,-516,-466,-559,-533,-285,-288,-296,-289,-263,977,-231,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,977,-271,-268,-264,977,-535,-309,-310,-276,-274,-269,-265,977,-272,-266,-270,-232,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'VOID':([1,2,6,7,8,9,10,11,16,20,21,24,25,30,37,40,43,45,47,48,49,53,57,58,59,60,61,64,69,71,73,74,75,76,77,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,107,110,113,114,115,116,122,124,127,129,130,131,132,147,154,155,157,158,169,170,175,181,186,187,190,194,203,204,207,208,213,218,220,226,227,228,229,230,232,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,309,311,328,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,376,377,378,380,394,396,397,405,406,411,428,438,439,440,441,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,463,465,466,467,468,469,470,471,473,474,475,477,478,479,480,482,483,484,485,488,491,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,549,551,552,553,559,560,566,579,588,597,599,600,604,605,608,611,612,614,615,621,624,628,650,653,671,674,675,677,682,684,685,687,692,693,697,699,700,701,702,703,704,706,708,709,710,711,713,715,716,717,725,729,776,784,785,786,790,791,792,798,799,800,807,818,819,820,821,824,826,831,832,833,834,835,836,841,849,850,852,857,862,864,866,872,874,878,882,894,899,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,942,943,944,945,946,947,948,951,953,955,956,958,962,965,973,975,976,977,978,979,981,983,984,986,987,988,991,999,1000,1002,1007,1009,1011,1015,1016,1019,1026,1036,1038,1040,1043,1045,1046,1047,1050,1064,1065,1067,1068,1069,1070,1071,1074,1077,1080,1087,1092,1095,1101,1102,1108,1109,1113,1114,1115,1116,1117,1118,1120,1122,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1141,1143,1145,1154,1156,1157,1160,],[15,15,-186,-204,-194,-187,-361,15,15,15,-594,15,15,-170,15,15,-585,-206,-262,-341,-203,-193,-342,15,-192,-174,-202,-189,-172,-352,15,-363,-365,-171,15,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,15,-185,-357,-355,-366,15,-196,-584,-195,-359,-358,-364,15,15,15,15,15,15,-449,-448,-362,-451,-450,-447,-205,15,15,-167,15,15,-343,-354,15,-508,-623,-623,-452,15,-623,-553,-529,-249,15,15,15,15,-175,15,15,-279,15,15,15,15,-277,15,15,15,15,-286,-588,15,-17,-11,-9,-10,15,-18,-12,-15,-8,-19,-16,-14,-13,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,-281,-439,-430,-164,-168,15,15,-525,-519,-524,-623,-352,-521,15,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,15,-481,-623,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-623,-567,15,-566,-623,-623,-351,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,-283,-280,15,15,-260,15,15,-278,-344,15,15,15,15,-400,15,-290,-287,-284,-623,15,15,-228,15,15,15,-441,15,15,-587,-595,15,15,15,15,-352,15,-516,-503,-520,-466,-470,15,-502,15,-484,-483,-559,15,-563,-564,-533,15,15,15,15,15,15,15,15,15,15,15,-251,-402,-405,-401,-404,-303,15,-285,-623,-288,-296,-289,15,15,15,15,15,15,-263,-440,-442,-231,15,-185,15,-623,-487,-536,-534,15,15,-307,-261,15,-308,15,15,15,15,15,15,15,-282,15,15,15,-421,-422,15,-406,15,-306,15,-297,15,-234,15,-271,-264,15,15,-443,15,-444,15,15,15,15,15,15,-506,-482,-486,-500,-570,-535,-309,-310,15,15,-403,-423,-424,-407,-408,-410,-411,15,-276,-274,-265,15,-272,-266,15,-446,-445,-232,-505,15,-569,-313,-311,-314,-312,15,-426,-425,15,15,-412,-291,15,15,-267,-273,15,15,15,15,-315,-317,-318,-316,-427,-428,-409,-236,15,15,15,]),'GTEQ':([12,38,42,46,48,51,55,56,57,62,72,85,89,101,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,159,163,165,219,221,238,240,241,245,246,250,251,253,259,260,263,264,265,277,336,345,349,362,366,368,369,410,412,416,419,423,434,498,533,534,535,538,585,586,588,623,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,692,703,732,733,734,742,743,746,747,749,752,753,759,765,766,769,770,771,772,773,777,804,805,843,844,845,846,848,851,959,961,964,967,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1149,1155,],[-345,-347,-138,-349,-341,-346,-142,-137,-342,-136,-144,-348,-350,-139,-140,-108,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,373,-129,-122,-117,-119,-113,505,-100,-112,526,-123,-131,-133,-132,-81,-126,-116,-91,-118,-134,-135,-120,-109,526,-110,-121,-131,-132,526,-133,-341,526,-124,-114,-115,-125,-146,-343,-149,-344,-335,-339,-340,-152,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,-64,-67,-65,-66,-145,-466,-86,-84,-82,-85,-106,-95,-87,-93,-83,-102,-104,-92,-94,-105,-101,-103,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-162,-623,-337,-163,-159,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-161,-321,]),'MINUS_ASSIGN':([12,23,38,41,42,46,48,51,55,56,57,62,68,70,72,78,85,89,97,100,101,117,123,133,136,139,142,145,148,151,156,163,192,246,251,253,259,336,345,366,434,512,538,561,563,567,573,580,585,586,587,588,589,623,625,626,631,640,643,644,646,650,703,772,773,777,804,805,843,844,845,846,848,921,959,961,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1155,],[-345,-143,-347,-130,-138,-349,-341,-346,-142,-137,-342,-136,-343,-140,-144,-129,-348,-350,-127,-128,-139,-141,344,-140,-343,-141,-143,-128,-127,344,-130,-129,-128,-128,-127,-130,-129,-134,-135,-128,-128,-145,-146,-343,-127,-128,-329,-328,-343,-149,-147,-344,-148,-335,-339,-340,-152,-329,-147,-148,-328,-145,-466,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-128,-623,-337,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-321,]),'SHORT':([1,2,6,7,8,9,10,11,16,20,21,24,25,30,37,40,43,45,47,48,49,53,57,58,59,60,61,64,69,71,73,74,75,76,77,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,107,110,113,114,115,116,122,124,127,129,130,131,132,147,154,155,157,158,169,170,175,181,186,187,190,194,203,204,207,208,213,218,220,226,227,228,229,230,232,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,309,311,328,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,376,377,378,380,394,396,397,405,406,411,428,438,439,440,441,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,463,465,466,467,468,469,470,471,473,474,475,477,478,479,480,482,483,484,485,488,491,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,549,551,552,553,559,560,566,579,588,597,599,600,604,605,608,611,612,614,615,621,624,628,650,653,671,674,675,677,682,684,685,687,692,693,697,699,700,701,702,703,704,706,708,709,710,711,713,715,716,717,725,729,776,784,785,786,790,791,792,798,799,800,807,818,819,820,821,824,826,831,832,833,834,835,836,841,849,850,852,857,862,864,866,872,874,878,882,894,899,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,942,943,944,945,946,947,948,951,953,955,956,958,962,965,973,975,976,977,978,979,981,983,984,986,987,988,991,999,1000,1002,1007,1009,1011,1015,1016,1019,1026,1036,1038,1040,1043,1045,1046,1047,1050,1064,1065,1067,1068,1069,1070,1071,1074,1077,1080,1087,1092,1095,1101,1102,1108,1109,1113,1114,1115,1116,1117,1118,1120,1122,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1141,1143,1145,1154,1156,1157,1160,],[32,32,-186,-204,-194,-187,-361,32,32,32,-594,32,32,-170,32,32,-585,-206,-262,-341,-203,-193,-342,32,-192,-174,-202,-189,-172,-352,32,-363,-365,-171,32,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,32,-185,-357,-355,-366,32,-196,-584,-195,-359,-358,-364,32,32,32,32,32,32,-449,-448,-362,-451,-450,-447,-205,32,32,-167,32,32,-343,-354,32,-508,-623,-623,-452,32,-623,-553,-529,-249,32,32,32,32,-175,32,32,-279,32,32,32,32,-277,32,32,32,32,-286,-588,32,-17,-11,-9,-10,32,-18,-12,-15,-8,-19,-16,-14,-13,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,-281,-439,-430,-164,-168,32,32,-525,-519,-524,-623,-352,-521,32,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,32,-481,-623,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-623,-567,32,-566,-623,-623,-351,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,-283,-280,32,32,-260,32,32,-278,-344,32,32,32,32,-400,32,-290,-287,-284,-623,32,32,-228,32,32,32,-441,32,32,-587,-595,32,32,32,32,-352,32,-516,-503,-520,-466,-470,32,-502,32,-484,-483,-559,32,-563,-564,-533,32,32,32,32,32,32,32,32,32,32,32,-251,-402,-405,-401,-404,-303,32,-285,-623,-288,-296,-289,32,32,32,32,32,32,-263,-440,-442,-231,32,-185,32,-623,-487,-536,-534,32,32,-307,-261,32,-308,32,32,32,32,32,32,32,-282,32,32,32,-421,-422,32,-406,32,-306,32,-297,32,-234,32,-271,-264,32,32,-443,32,-444,32,32,32,32,32,32,-506,-482,-486,-500,-570,-535,-309,-310,32,32,-403,-423,-424,-407,-408,-410,-411,32,-276,-274,-265,32,-272,-266,32,-446,-445,-232,-505,32,-569,-313,-311,-314,-312,32,-426,-425,32,32,-412,-291,32,32,-267,-273,32,32,32,32,-315,-317,-318,-316,-427,-428,-409,-236,32,32,32,]),'+':([2,12,16,25,38,40,42,46,48,51,55,56,57,58,62,72,73,77,85,89,101,132,133,134,136,137,139,141,142,143,145,146,147,148,151,152,154,155,156,157,163,165,194,203,208,219,220,221,237,238,241,245,246,249,250,251,252,253,254,259,260,263,264,265,268,270,275,276,277,281,290,294,295,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,348,349,350,351,352,353,354,355,356,358,359,362,364,365,366,368,369,370,371,372,373,374,375,377,378,380,410,411,412,416,419,423,428,434,498,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,533,534,535,536,537,538,539,540,541,543,544,545,546,552,553,560,585,586,588,621,623,624,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,653,671,685,687,692,703,729,732,733,734,742,743,746,747,749,752,753,759,765,766,769,770,771,772,773,776,777,785,786,790,791,792,798,799,800,804,805,836,841,843,844,845,846,848,850,851,855,857,882,911,912,924,930,932,933,934,938,939,940,951,958,959,961,964,967,977,983,988,991,992,993,1020,1022,1026,1030,1050,1055,1057,1058,1059,1060,1061,1083,1092,1100,1103,1105,1110,1123,1124,1125,1130,1135,1136,1148,1149,1155,],[147,-345,147,147,-347,249,-138,-349,-341,-346,-142,-137,-342,147,-136,-144,147,147,-348,-350,-139,147,-140,-108,-343,-88,-141,359,-143,-107,-128,-96,147,-127,-119,-111,249,147,-130,147,-129,-122,147,147,249,-117,249,-119,147,-113,-100,-112,520,147,-123,-131,147,-133,147,-132,540,-126,-116,-91,147,147,147,147,-118,147,147,147,147,-17,-11,-9,-10,147,-134,-18,-12,-15,-8,-19,-16,-14,-13,-135,147,-120,147,147,147,147,147,147,147,147,147,-109,147,147,520,-110,-121,147,147,147,147,147,147,147,147,147,-131,249,-132,520,-133,-341,147,520,-124,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,-114,-115,-125,147,147,-146,147,147,147,147,147,147,147,147,147,147,-343,-149,-344,147,-335,147,-339,-340,-152,-99,-97,-98,359,359,359,-329,-90,-89,-147,-148,-328,-145,147,147,249,147,-145,-466,147,359,359,359,359,-106,-95,359,-93,359,-102,-104,-92,-94,-105,-101,-103,-332,-331,147,-221,147,147,147,147,147,147,147,147,-330,-150,147,147,-338,-333,-336,-334,-151,147,-162,-128,147,249,147,147,147,147,147,147,147,147,147,147,147,147,-623,-337,-163,-159,147,147,147,147,-217,-216,-227,-226,147,-225,147,-623,-326,-327,-320,-623,-160,-218,249,-224,-623,-223,-222,-323,-623,-319,147,-322,-623,-324,-161,-321,]),'STRICTFP':([1,4,6,7,8,9,10,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,178,179,180,181,182,183,185,186,187,188,190,204,207,213,218,226,227,228,229,232,233,234,235,236,266,271,279,286,309,311,328,381,382,383,386,387,388,391,394,405,406,438,439,440,441,443,446,448,449,450,451,452,453,454,455,456,458,460,462,463,465,466,467,468,469,470,471,473,474,475,477,479,480,482,484,485,488,491,549,551,559,566,579,588,608,611,612,614,615,628,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,720,725,824,826,831,832,833,834,835,858,862,872,878,894,899,906,909,916,917,925,941,953,956,962,969,973,975,976,984,999,1000,1002,1007,1009,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1087,1095,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[75,75,-186,-204,-194,-187,-361,75,-594,75,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,75,-610,-608,-362,75,-618,-611,-451,-612,75,-613,-450,-447,75,-205,-167,75,-343,-354,-508,75,75,-452,75,-553,-529,75,-249,-175,-279,75,-277,75,-286,-588,75,75,75,-609,75,-619,-605,-281,-164,-168,-525,-519,-524,75,-521,-527,-526,-523,-528,75,-522,75,-479,-478,-469,75,-472,-481,75,-480,-476,-475,75,-473,-501,-474,-358,-471,-477,-562,-565,75,-567,-566,75,75,75,-283,-280,-260,75,-278,-344,75,-290,-287,-284,-623,-228,75,-614,-587,-595,-516,-503,-520,-466,-470,-502,75,-484,-483,-559,-563,-564,75,-533,-303,75,-285,75,-288,-296,-289,-616,-263,-231,-185,75,-487,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,75,75,-506,-482,-486,-500,-570,-535,-309,-310,-617,-276,-274,-265,75,-272,-266,75,-232,-505,-569,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'RSHIFT_ASSIGN':([12,23,38,41,42,46,48,51,55,56,57,62,68,70,72,78,85,89,97,100,101,117,123,133,136,139,142,145,148,151,156,163,192,246,251,253,259,336,345,366,434,512,538,561,563,567,573,580,585,586,587,588,589,623,625,626,631,640,643,644,646,650,703,772,773,777,804,805,843,844,845,846,848,921,959,961,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1155,],[-345,-143,-347,-130,-138,-349,-341,-346,-142,-137,-342,-136,-343,-140,-144,-129,-348,-350,-127,-128,-139,-141,339,-140,-343,-141,-143,-128,-127,339,-130,-129,-128,-128,-127,-130,-129,-134,-135,-128,-128,-145,-146,-343,-127,-128,-329,-328,-343,-149,-147,-344,-148,-335,-339,-340,-152,-329,-147,-148,-328,-145,-466,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-128,-623,-337,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-321,]),'TRANSIENT':([1,4,6,7,8,9,10,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,178,179,180,181,182,183,185,186,187,188,190,204,207,213,218,226,227,228,229,232,233,234,235,236,266,271,279,286,309,311,328,381,382,383,386,387,388,391,394,405,406,438,439,440,441,443,446,448,449,450,451,452,453,454,455,456,458,460,462,463,465,466,467,468,469,470,471,473,474,475,477,479,480,482,484,485,488,491,549,551,559,566,579,588,608,611,612,614,615,628,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,720,725,824,826,831,832,833,834,835,858,862,872,878,894,899,906,909,916,917,925,941,953,956,962,969,973,975,976,984,999,1000,1002,1007,1009,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1087,1095,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[74,74,-186,-204,-194,-187,-361,74,-594,74,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,74,-610,-608,-362,74,-618,-611,-451,-612,74,-613,-450,-447,74,-205,-167,74,-343,-354,-508,74,74,-452,74,-553,-529,74,-249,-175,-279,74,-277,74,-286,-588,74,74,74,-609,74,-619,-605,-281,-164,-168,-525,-519,-524,74,-521,-527,-526,-523,-528,74,-522,74,-479,-478,-469,74,-472,-481,74,-480,-476,-475,74,-473,-501,-474,-358,-471,-477,-562,-565,74,-567,-566,74,74,74,-283,-280,-260,74,-278,-344,74,-290,-287,-284,-623,-228,74,-614,-587,-595,-516,-503,-520,-466,-470,-502,74,-484,-483,-559,-563,-564,74,-533,-303,74,-285,74,-288,-296,-289,-616,-263,-231,-185,74,-487,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,74,74,-506,-482,-486,-500,-570,-535,-309,-310,-617,-276,-274,-265,74,-272,-266,74,-232,-505,-569,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'PACKAGE':([4,10,21,43,48,57,74,75,96,104,105,106,113,114,115,124,129,130,131,175,188,213,218,328,588,682,684,],[184,-361,-594,-585,-341,-342,-363,-365,-586,-360,-356,-353,-357,-355,-366,-584,-359,-358,-364,-362,390,-343,-354,-588,-344,-587,-595,]),'%':([12,38,42,46,48,51,55,56,57,62,72,85,89,101,133,134,136,137,139,142,143,145,146,148,151,152,156,163,165,219,221,238,241,245,246,250,251,253,259,263,264,265,277,336,345,349,362,366,368,369,410,412,416,419,423,434,498,533,534,535,538,585,586,588,623,625,626,631,634,635,636,640,641,642,643,644,646,650,692,703,743,746,749,753,759,765,766,769,770,771,772,773,777,804,805,843,844,845,846,848,851,855,959,961,964,967,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1149,1155,],[-345,-347,-138,-349,-341,-346,-142,-137,-342,-136,-144,-348,-350,-139,-140,-108,-343,351,-141,-143,-107,-128,-96,-127,-119,-111,-130,-129,-122,-117,-119,-113,-100,-112,513,-123,-131,-133,-132,-126,-116,544,-118,-134,-135,-120,-109,513,-110,-121,-131,-132,513,-133,-341,513,-124,-114,-115,-125,-146,-343,-149,-344,-335,-339,-340,-152,-99,-97,-98,-329,351,351,-147,-148,-328,-145,-145,-466,-106,351,351,-102,-104,351,351,-105,-101,-103,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-162,-128,-623,-337,-163,-159,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-161,-321,]),'LSHIFT_ASSIGN':([12,23,38,41,42,46,48,51,55,56,57,62,68,70,72,78,85,89,97,100,101,117,123,133,136,139,142,145,148,151,156,163,192,246,251,253,259,336,345,366,434,512,538,561,563,567,573,580,585,586,587,588,589,623,625,626,631,640,643,644,646,650,703,772,773,777,804,805,843,844,845,846,848,921,959,961,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1155,],[-345,-143,-347,-130,-138,-349,-341,-346,-142,-137,-342,-136,-343,-140,-144,-129,-348,-350,-127,-128,-139,-141,343,-140,-343,-141,-143,-128,-127,343,-130,-129,-128,-128,-127,-130,-129,-134,-135,-128,-128,-145,-146,-343,-127,-128,-329,-328,-343,-149,-147,-344,-148,-335,-339,-340,-152,-329,-147,-148,-328,-145,-466,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-128,-623,-337,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-321,]),'BOOLEAN':([1,2,6,7,8,9,10,11,16,20,21,24,25,30,37,40,43,45,47,48,49,53,57,58,59,60,61,64,69,71,73,74,75,76,77,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,107,110,113,114,115,116,122,124,127,129,130,131,132,147,154,155,157,158,169,170,175,181,186,187,190,194,203,204,207,208,213,218,220,226,227,228,229,230,232,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,309,311,328,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,376,377,378,380,394,396,397,405,406,411,428,438,439,440,441,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,463,465,466,467,468,469,470,471,473,474,475,477,478,479,480,482,483,484,485,488,491,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,549,551,552,553,559,560,566,579,588,597,599,600,604,605,608,611,612,614,615,621,624,628,650,653,671,674,675,677,682,684,685,687,692,693,697,699,700,701,702,703,704,706,708,709,710,711,713,715,716,717,725,729,776,784,785,786,790,791,792,798,799,800,807,818,819,820,821,824,826,831,832,833,834,835,836,841,849,850,852,857,862,864,866,872,874,878,882,894,899,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,942,943,944,945,946,947,948,951,953,955,956,958,962,965,973,975,976,977,978,979,981,983,984,986,987,988,991,999,1000,1002,1007,1009,1011,1015,1016,1019,1026,1036,1038,1040,1043,1045,1046,1047,1050,1064,1065,1067,1068,1069,1070,1071,1074,1077,1080,1087,1092,1095,1101,1102,1108,1109,1113,1114,1115,1116,1117,1118,1120,1122,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1141,1143,1145,1154,1156,1157,1160,],[36,36,-186,-204,-194,-187,-361,36,36,36,-594,36,36,-170,36,36,-585,-206,-262,-341,-203,-193,-342,36,-192,-174,-202,-189,-172,-352,36,-363,-365,-171,36,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,36,-185,-357,-355,-366,36,-196,-584,-195,-359,-358,-364,36,36,36,36,36,36,-449,-448,-362,-451,-450,-447,-205,36,36,-167,36,36,-343,-354,36,-508,-623,-623,-452,36,-623,-553,-529,-249,36,36,36,36,-175,36,36,-279,36,36,36,36,-277,36,36,36,36,-286,-588,36,-17,-11,-9,-10,36,-18,-12,-15,-8,-19,-16,-14,-13,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,-281,-439,-430,-164,-168,36,36,-525,-519,-524,-623,-352,-521,36,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,36,-481,-623,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-623,-567,36,-566,-623,-623,-351,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,36,-283,-280,36,36,-260,36,36,-278,-344,36,36,36,36,-400,36,-290,-287,-284,-623,36,36,-228,36,36,36,-441,36,36,-587,-595,36,36,36,36,-352,36,-516,-503,-520,-466,-470,36,-502,36,-484,-483,-559,36,-563,-564,-533,36,36,36,36,36,36,36,36,36,36,36,-251,-402,-405,-401,-404,-303,36,-285,-623,-288,-296,-289,36,36,36,36,36,36,-263,-440,-442,-231,36,-185,36,-623,-487,-536,-534,36,36,-307,-261,36,-308,36,36,36,36,36,36,36,-282,36,36,36,-421,-422,36,-406,36,-306,36,-297,36,-234,36,-271,-264,36,36,-443,36,-444,36,36,36,36,36,36,-506,-482,-486,-500,-570,-535,-309,-310,36,36,-403,-423,-424,-407,-408,-410,-411,36,-276,-274,-265,36,-272,-266,36,-446,-445,-232,-505,36,-569,-313,-311,-314,-312,36,-426,-425,36,36,-412,-291,36,36,-267,-273,36,36,36,36,-315,-317,-318,-316,-427,-428,-409,-236,36,36,36,]),']':([12,31,38,42,44,46,48,51,55,56,57,62,67,72,85,89,101,112,133,134,135,136,137,138,139,141,142,143,144,145,146,148,150,151,152,153,156,159,160,161,162,163,164,165,167,168,213,216,219,221,231,277,290,291,292,300,302,303,308,318,320,336,345,349,362,368,369,431,433,476,538,548,550,585,586,588,591,592,593,598,602,605,619,623,624,625,626,629,631,633,634,635,636,637,638,639,640,641,642,643,644,646,649,650,656,657,658,659,660,661,662,663,664,665,692,703,772,773,777,804,805,809,811,813,818,819,820,821,838,842,843,844,845,846,848,851,945,946,948,959,961,963,964,967,992,993,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1143,1145,1148,1149,1155,],[-345,-380,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-348,-350,-139,-381,-140,-108,-25,-343,-88,-63,-141,-77,-143,-107,-3,-128,-96,-127,-20,-119,-111,-1,-130,-58,-30,-40,-50,-129,-45,-122,-4,-35,-343,-383,-117,-119,-385,-118,592,-156,-155,-398,-392,-399,-391,-389,592,-134,-135,-120,-109,-110,-121,-388,-387,-386,-146,772,773,-343,-149,-344,804,-158,-157,-417,-418,-400,-384,-335,843,-339,-340,-7,-152,-31,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-26,-145,-64,-67,-65,-66,-36,-46,-59,-52,-51,-41,-145,-466,-332,-331,-221,-330,-150,-393,-419,-420,-402,-405,-401,-404,-390,961,-338,-333,-336,-334,-151,-162,-421,-422,-406,-623,-337,-21,-163,-159,-217,-216,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-428,-409,-324,-161,-321,]),'INT':([1,2,6,7,8,9,10,11,16,20,21,24,25,30,37,40,43,45,47,48,49,53,57,58,59,60,61,64,69,71,73,74,75,76,77,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,107,110,113,114,115,116,122,124,127,129,130,131,132,147,154,155,157,158,169,170,175,181,186,187,190,194,203,204,207,208,213,218,220,226,227,228,229,230,232,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,309,311,328,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,376,377,378,380,394,396,397,405,406,411,428,438,439,440,441,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,463,465,466,467,468,469,470,471,473,474,475,477,478,479,480,482,483,484,485,488,491,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,549,551,552,553,559,560,566,579,588,597,599,600,604,605,608,611,612,614,615,621,624,628,650,653,671,674,675,677,682,684,685,687,692,693,697,699,700,701,702,703,704,706,708,709,710,711,713,715,716,717,725,729,776,784,785,786,790,791,792,798,799,800,807,818,819,820,821,824,826,831,832,833,834,835,836,841,849,850,852,857,862,864,866,872,874,878,882,894,899,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,942,943,944,945,946,947,948,951,953,955,956,958,962,965,973,975,976,977,978,979,981,983,984,986,987,988,991,999,1000,1002,1007,1009,1011,1015,1016,1019,1026,1036,1038,1040,1043,1045,1046,1047,1050,1064,1065,1067,1068,1069,1070,1071,1074,1077,1080,1087,1092,1095,1101,1102,1108,1109,1113,1114,1115,1116,1117,1118,1120,1122,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1141,1143,1145,1154,1156,1157,1160,],[66,66,-186,-204,-194,-187,-361,66,66,66,-594,66,66,-170,66,66,-585,-206,-262,-341,-203,-193,-342,66,-192,-174,-202,-189,-172,-352,66,-363,-365,-171,66,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,66,-185,-357,-355,-366,66,-196,-584,-195,-359,-358,-364,66,66,66,66,66,66,-449,-448,-362,-451,-450,-447,-205,66,66,-167,66,66,-343,-354,66,-508,-623,-623,-452,66,-623,-553,-529,-249,66,66,66,66,-175,66,66,-279,66,66,66,66,-277,66,66,66,66,-286,-588,66,-17,-11,-9,-10,66,-18,-12,-15,-8,-19,-16,-14,-13,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,-281,-439,-430,-164,-168,66,66,-525,-519,-524,-623,-352,-521,66,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,66,-481,-623,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-623,-567,66,-566,-623,-623,-351,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,66,-283,-280,66,66,-260,66,66,-278,-344,66,66,66,66,-400,66,-290,-287,-284,-623,66,66,-228,66,66,66,-441,66,66,-587,-595,66,66,66,66,-352,66,-516,-503,-520,-466,-470,66,-502,66,-484,-483,-559,66,-563,-564,-533,66,66,66,66,66,66,66,66,66,66,66,-251,-402,-405,-401,-404,-303,66,-285,-623,-288,-296,-289,66,66,66,66,66,66,-263,-440,-442,-231,66,-185,66,-623,-487,-536,-534,66,66,-307,-261,66,-308,66,66,66,66,66,66,66,-282,66,66,66,-421,-422,66,-406,66,-306,66,-297,66,-234,66,-271,-264,66,66,-443,66,-444,66,66,66,66,66,66,-506,-482,-486,-500,-570,-535,-309,-310,66,66,-403,-423,-424,-407,-408,-410,-411,66,-276,-274,-265,66,-272,-266,66,-446,-445,-232,-505,66,-569,-313,-311,-314,-312,66,-426,-425,66,66,-412,-291,66,66,-267,-273,66,66,66,66,-315,-317,-318,-316,-427,-428,-409,-236,66,66,66,]),'EXTENDS':([17,26,54,109,119,198,297,329,395,396,397,400,402,403,605,627,674,691,817,818,819,820,821,847,864,866,892,945,946,948,978,981,1036,1038,1040,1042,1043,1045,1046,1047,1074,1077,1114,1115,1118,1141,1143,1145,],[-455,223,223,315,-511,-454,599,-510,-429,-439,-430,675,-456,-512,-400,-558,-441,-555,943,-402,-405,-401,-404,-557,-440,-442,-556,-421,-422,-406,-443,-444,-403,-423,-424,1113,-407,-408,-410,-411,-446,-445,-426,-425,-412,-427,-428,-409,]),'DIVIDE_ASSIGN':([12,23,38,41,42,46,48,51,55,56,57,62,68,70,72,78,85,89,97,100,101,117,123,133,136,139,142,145,148,151,156,163,192,246,251,253,259,336,345,366,434,512,538,561,563,567,573,580,585,586,587,588,589,623,625,626,631,640,643,644,646,650,703,772,773,777,804,805,843,844,845,846,848,921,959,961,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1155,],[-345,-143,-347,-130,-138,-349,-341,-346,-142,-137,-342,-136,-343,-140,-144,-129,-348,-350,-127,-128,-139,-141,334,-140,-343,-141,-143,-128,-127,334,-130,-129,-128,-128,-127,-130,-129,-134,-135,-128,-128,-145,-146,-343,-127,-128,-329,-328,-343,-149,-147,-344,-148,-335,-339,-340,-152,-329,-147,-148,-328,-145,-466,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-128,-623,-337,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-321,]),'[':([12,13,15,23,27,31,32,36,38,46,48,51,52,55,56,57,62,66,68,70,72,85,86,89,100,101,112,117,125,128,133,136,139,142,145,166,192,210,213,216,217,231,246,291,292,299,300,302,303,304,305,308,321,324,325,326,366,367,416,423,434,476,512,538,561,567,573,580,585,586,587,588,589,592,593,598,602,605,619,623,625,626,631,640,643,644,646,650,652,692,703,705,772,773,777,781,804,805,809,811,813,815,818,819,820,821,839,843,844,845,846,848,855,856,895,896,900,903,904,919,921,945,946,948,959,961,966,992,993,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1143,1145,1148,1155,],[-345,-375,-370,-143,-374,-380,-372,-369,-347,-349,-341,-346,-376,-142,268,-342,270,-373,-343,-140,-144,-348,-371,-350,290,-139,320,-141,320,-377,-140,-343,-141,-143,290,320,290,320,-343,320,320,-385,290,-156,320,320,-398,-392,-399,320,320,-391,-381,-383,624,624,290,320,290,-341,290,-386,-145,-146,-343,290,-329,-328,-343,-149,-147,-344,-148,-158,-157,-417,-418,-400,320,-335,624,624,-152,-329,-147,-148,-328,-145,320,-145,-466,-343,-332,-331,-221,320,-330,-150,-393,-419,-420,320,-402,-405,-401,-404,-384,-338,-333,-336,-334,-151,290,320,320,320,-343,320,320,320,290,-421,-422,-406,-623,-337,320,-217,-216,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-428,-409,-324,-321,]),'SYNCHRONIZED':([1,4,6,7,8,9,10,11,20,21,24,30,37,43,45,47,48,49,53,57,59,60,61,64,69,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,178,179,180,181,182,183,185,186,187,188,190,204,207,213,218,226,227,228,229,232,233,234,235,236,266,271,279,286,309,311,328,330,381,382,383,386,387,388,391,394,405,406,438,439,440,441,443,446,448,449,450,451,452,453,454,455,456,458,460,462,463,465,466,467,468,469,470,471,473,474,475,477,479,480,482,484,485,488,491,549,551,559,566,579,588,608,611,612,614,615,628,666,668,677,682,684,700,701,702,703,704,708,709,710,711,713,716,717,720,725,807,824,826,831,832,833,834,835,849,858,862,872,874,878,894,899,906,909,916,917,925,941,953,956,962,969,973,975,976,984,986,987,999,1000,1002,1007,1009,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1087,1095,1101,1102,1108,1109,1120,1127,1128,1129,1131,1134,1137,1138,1139,1140,1154,1156,1160,],[102,175,-186,-204,-194,-187,-361,193,102,-594,175,-170,193,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,175,-610,-608,-362,175,-618,-611,-451,-612,175,-613,-450,-447,175,-205,-167,102,-343,-354,-508,175,175,-452,175,-553,-529,175,-249,-175,-279,175,-277,175,-286,-588,193,175,175,175,-609,175,-619,-605,-281,-164,-168,-525,-519,-524,175,-521,-527,-526,-523,-528,175,-522,175,-479,-478,-469,175,-472,-481,175,-480,-476,-475,175,-473,-501,-474,-358,-471,-477,-562,-565,175,-567,-566,175,175,175,-283,-280,-260,175,-278,-344,175,-290,-287,-284,-623,-228,175,-614,193,-587,-595,-516,-503,-520,-466,-470,-502,102,-484,-483,-559,-563,-564,175,-533,-251,-303,175,-285,175,-288,-296,-289,193,-616,-263,-231,193,-185,175,-487,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,102,175,193,193,-506,-482,-486,-500,-570,-535,-309,-310,-617,-276,-274,-265,102,-272,-266,102,-232,-505,-569,-313,-311,-314,-312,-291,-267,-273,193,193,193,-315,-317,-318,-316,-236,193,193,]),'EQ':([12,31,38,42,44,46,48,51,55,56,57,62,67,72,85,89,101,112,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,159,162,163,164,165,213,216,219,221,231,238,240,241,245,246,248,250,251,253,256,259,260,261,263,264,265,277,291,292,300,302,303,308,318,336,345,349,362,366,368,369,410,412,416,419,423,431,433,434,476,498,533,534,535,538,585,586,588,592,593,598,602,605,619,623,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,661,662,663,664,692,703,731,732,733,734,735,736,737,742,743,745,746,747,749,750,751,752,753,755,756,757,758,759,760,762,763,764,765,766,767,769,770,771,772,773,777,804,805,809,811,813,818,819,820,821,838,843,844,845,846,848,851,855,945,946,948,959,961,964,967,992,993,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1143,1145,1148,1149,1155,],[-345,-380,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-348,-350,-139,-381,-140,-108,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,-58,-50,-129,378,-122,-343,-383,-117,-119,-385,-113,-68,-100,-112,516,-60,-123,-131,-133,536,-132,-81,-53,-126,-116,-91,-118,-156,-155,-398,-392,-399,-391,-389,-134,-135,-120,-109,516,-110,-121,-131,-132,516,-133,-341,-388,-387,516,-386,-124,-114,-115,-125,-146,-343,-149,-344,-158,-157,-417,-418,-400,-384,-335,-339,-340,-152,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,-64,-67,-65,-66,378,-59,-52,-51,-145,-466,-71,-86,-84,-82,-69,-73,-75,-85,-106,-55,-95,-87,-93,-70,-72,-83,-102,-74,378,-76,-61,-104,-57,-54,-56,378,-92,-94,-62,-105,-101,-103,-332,-331,-221,-330,-150,-393,-419,-420,-402,-405,-401,-404,-390,-338,-333,-336,-334,-151,-162,-128,-421,-422,-406,-623,-337,-163,-159,-217,-216,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-428,-409,-324,-161,-321,]),'(':([1,2,6,7,8,9,11,14,16,19,20,21,25,30,31,37,40,45,47,48,49,53,57,58,59,60,61,64,68,69,70,73,76,77,79,80,81,82,84,88,91,92,93,95,98,99,102,108,110,122,126,127,132,136,147,154,155,157,190,193,194,203,204,207,208,213,220,226,229,231,233,234,236,237,249,252,254,266,268,270,271,273,274,275,276,279,281,286,290,294,295,300,302,303,308,311,321,322,324,326,328,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,392,394,405,406,411,428,436,476,497,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,549,551,552,553,559,560,561,573,579,580,581,582,585,587,588,589,598,602,605,611,612,613,614,615,617,621,622,624,628,640,646,650,653,671,677,685,687,692,700,703,705,709,713,724,725,729,776,784,785,786,787,790,791,792,793,795,796,797,798,799,800,801,802,803,807,809,811,813,818,819,820,821,831,833,834,835,836,837,839,841,849,850,852,857,862,870,871,872,874,878,880,882,896,900,904,906,909,911,912,916,917,924,925,930,931,932,933,934,938,939,940,941,945,946,948,951,953,956,958,962,965,973,975,976,977,983,984,986,987,988,991,1001,1010,1011,1015,1016,1019,1026,1036,1038,1040,1043,1045,1046,1047,1050,1064,1065,1067,1068,1069,1070,1071,1080,1092,1101,1102,1108,1109,1114,1115,1118,1120,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1141,1143,1145,1154,1156,1157,1160,],[40,154,-186,-204,-194,-187,40,194,154,203,40,208,220,-170,-380,40,154,-206,-262,-341,-203,-193,-342,154,-192,-174,-202,-189,275,-172,276,220,-171,154,-188,279,281,-200,-173,-191,-201,-197,-199,-190,-169,-198,294,309,-185,-196,348,-195,220,275,220,154,220,220,-205,294,154,154,-167,40,220,-343,154,-508,-452,-385,-553,-529,-249,220,220,220,220,-175,154,154,-279,552,553,154,154,40,154,-277,154,154,154,-398,-392,-399,-391,-286,-381,621,-383,-382,-588,40,-17,-11,-9,-10,154,-18,-12,-15,-8,-19,-16,-14,-13,154,220,220,220,220,220,220,220,220,220,154,220,220,220,220,220,220,220,220,220,220,671,-281,-164,-168,220,154,-382,-386,729,220,220,220,220,220,220,220,220,154,220,154,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,220,-283,-280,154,154,-260,154,275,786,-278,790,791,792,798,799,-344,800,-417,-418,-400,-290,-287,832,-284,-623,836,154,841,154,-228,786,790,220,220,154,40,220,154,220,-516,-466,899,40,-559,-542,-533,154,154,40,154,154,924,154,154,154,930,932,933,934,154,154,154,938,939,940,-251,-393,-419,-420,-402,-405,-401,-404,-285,-288,-296,-289,154,958,-384,154,40,220,220,220,-263,983,984,-231,40,-185,988,220,999,1002,1009,-536,-534,220,220,-307,-261,154,-308,154,1026,154,154,154,154,154,154,-282,-421,-422,-406,154,-306,-297,154,-234,220,-271,-264,40,154,154,40,40,40,154,154,1087,1095,-535,-309,-310,40,154,-403,-423,-424,-407,-408,-410,-411,154,-276,-274,-265,40,-272,-266,40,-232,220,-313,-311,-314,-312,-426,-425,-412,-291,220,-267,-273,40,154,40,40,-315,-317,-318,-316,-427,-428,-409,-236,40,40,40,]),'-':([2,12,16,25,38,40,42,46,48,51,55,56,57,58,62,72,73,77,85,89,101,132,133,134,136,137,139,141,142,143,145,146,147,148,151,152,154,155,156,157,163,165,194,203,208,219,220,221,237,238,241,245,246,249,250,251,252,253,254,259,260,263,264,265,268,270,275,276,277,281,290,294,295,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,348,349,350,351,352,353,354,355,356,358,359,362,364,365,366,368,369,370,371,372,373,374,375,377,378,380,410,411,412,416,419,423,428,434,498,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,533,534,535,536,537,538,539,540,541,543,544,545,546,552,553,560,585,586,588,621,623,624,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,653,671,685,687,692,703,729,732,733,734,742,743,746,747,749,752,753,759,765,766,769,770,771,772,773,776,777,785,786,790,791,792,798,799,800,804,805,836,841,843,844,845,846,848,850,851,855,857,882,911,912,924,930,932,933,934,938,939,940,951,958,959,961,964,967,977,983,988,991,992,993,1020,1022,1026,1030,1050,1055,1057,1058,1059,1060,1061,1083,1092,1100,1103,1105,1110,1123,1124,1125,1130,1135,1136,1148,1149,1155,],[155,-345,155,155,-347,252,-138,-349,-341,-346,-142,-137,-342,155,-136,-144,155,155,-348,-350,-139,155,-140,-108,-343,-88,-141,358,-143,-107,-128,-96,155,-127,-119,-111,252,155,-130,155,-129,-122,155,155,252,-117,252,-119,155,-113,-100,-112,517,155,-123,-131,155,-133,155,-132,541,-126,-116,-91,155,155,155,155,-118,155,155,155,155,-17,-11,-9,-10,155,-134,-18,-12,-15,-8,-19,-16,-14,-13,-135,155,-120,155,155,155,155,155,155,155,155,155,-109,155,155,517,-110,-121,155,155,155,155,155,155,155,155,155,-131,252,-132,517,-133,-341,155,517,-124,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,155,-114,-115,-125,155,155,-146,155,155,155,155,155,155,155,155,155,155,-343,-149,-344,155,-335,155,-339,-340,-152,-99,-97,-98,358,358,358,-329,-90,-89,-147,-148,-328,-145,155,155,252,155,-145,-466,155,358,358,358,358,-106,-95,358,-93,358,-102,-104,-92,-94,-105,-101,-103,-332,-331,155,-221,155,155,155,155,155,155,155,155,-330,-150,155,155,-338,-333,-336,-334,-151,155,-162,-128,155,252,155,155,155,155,155,155,155,155,155,155,155,155,-623,-337,-163,-159,155,155,155,155,-217,-216,-227,-226,155,-225,155,-623,-326,-327,-320,-623,-160,-218,252,-224,-623,-223,-222,-323,-623,-319,155,-322,-623,-324,-161,-321,]),'FINAL':([1,4,6,7,8,9,10,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,178,179,180,181,182,183,185,186,187,188,190,204,207,213,218,226,227,228,229,232,233,234,235,236,266,271,279,286,309,311,328,381,382,383,386,387,388,391,394,405,406,438,439,440,441,443,446,448,449,450,451,452,453,454,455,456,458,460,462,463,465,466,467,468,469,470,471,473,474,475,477,479,480,482,484,485,488,491,549,551,559,566,579,588,608,611,612,614,615,628,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,720,725,824,826,831,832,833,834,835,858,862,872,878,894,899,906,909,916,917,925,941,953,956,962,969,973,975,976,984,999,1000,1002,1007,1009,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1087,1095,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[104,104,-186,-204,-194,-187,-361,104,-594,104,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,104,-610,-608,-362,104,-618,-611,-451,-612,104,-613,-450,-447,104,-205,-167,104,-343,-354,-508,104,104,-452,104,-553,-529,104,-249,-175,-279,104,-277,104,-286,-588,104,104,104,-609,104,-619,-605,-281,-164,-168,-525,-519,-524,104,-521,-527,-526,-523,-528,104,-522,104,-479,-478,-469,104,-472,-481,104,-480,-476,-475,104,-473,-501,-474,-358,-471,-477,-562,-565,104,-567,-566,104,104,104,-283,-280,-260,104,-278,-344,104,-290,-287,-284,-623,-228,104,-614,-587,-595,-516,-503,-520,-466,-470,-502,104,-484,-483,-559,-563,-564,104,-533,-303,104,-285,104,-288,-296,-289,-616,-263,-231,-185,104,-487,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,104,104,-506,-482,-486,-500,-570,-535,-309,-310,-617,-276,-274,-265,104,-272,-266,104,-232,-505,-569,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'PROTECTED':([1,4,6,7,8,9,10,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,178,179,180,181,182,183,185,186,187,188,190,204,207,213,218,226,227,228,229,232,233,234,235,236,266,271,279,286,309,311,328,381,382,383,386,387,388,391,394,405,406,438,439,440,441,443,446,448,449,450,451,452,453,454,455,456,458,460,462,463,465,466,467,468,469,470,471,473,474,475,477,479,480,482,484,485,488,491,549,551,559,566,579,588,608,611,612,614,615,628,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,720,725,824,826,831,832,833,834,835,858,862,872,878,894,899,906,909,916,917,925,941,953,956,962,969,973,975,976,984,999,1000,1002,1007,1009,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1087,1095,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[105,105,-186,-204,-194,-187,-361,105,-594,105,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,105,-610,-608,-362,105,-618,-611,-451,-612,105,-613,-450,-447,105,-205,-167,105,-343,-354,-508,105,105,-452,105,-553,-529,105,-249,-175,-279,105,-277,105,-286,-588,105,105,105,-609,105,-619,-605,-281,-164,-168,-525,-519,-524,105,-521,-527,-526,-523,-528,105,-522,105,-479,-478,-469,105,-472,-481,105,-480,-476,-475,105,-473,-501,-474,-358,-471,-477,-562,-565,105,-567,-566,105,105,105,-283,-280,-260,105,-278,-344,105,-290,-287,-284,-623,-228,105,-614,-587,-595,-516,-503,-520,-466,-470,-502,105,-484,-483,-559,-563,-564,105,-533,-303,105,-285,105,-288,-296,-289,-616,-263,-231,-185,105,-487,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,105,105,-506,-482,-486,-500,-570,-535,-309,-310,-617,-276,-274,-265,105,-272,-266,105,-232,-505,-569,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'!':([2,16,25,40,58,73,77,132,147,154,155,157,194,203,208,220,237,249,252,254,268,270,275,276,281,290,294,295,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,685,687,692,729,776,785,786,790,791,792,798,799,800,836,841,850,852,857,882,911,912,924,930,932,933,934,938,939,940,951,958,965,977,983,988,991,1026,1050,1092,1126,1130,],[157,157,157,254,157,157,157,157,157,254,157,157,157,157,254,254,157,157,157,157,157,157,157,157,157,157,157,157,-17,-11,-9,-10,157,-18,-12,-15,-8,-19,-16,-14,-13,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,254,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,254,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,254,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,254,157,157,]),'FINALLY':([310,311,405,610,611,612,614,615,833,834,835,1120,],[-623,-286,-164,830,-290,-287,-289,-623,-288,830,-289,-291,]),'RRSHIFT_ASSIGN':([12,23,38,41,42,46,48,51,55,56,57,62,68,70,72,78,85,89,97,100,101,117,123,133,136,139,142,145,148,151,156,163,192,246,251,253,259,336,345,366,434,512,538,561,563,567,573,580,585,586,587,588,589,623,625,626,631,640,643,644,646,650,703,772,773,777,804,805,843,844,845,846,848,921,959,961,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1155,],[-345,-143,-347,-130,-138,-349,-341,-346,-142,-137,-342,-136,-343,-140,-144,-129,-348,-350,-127,-128,-139,-141,342,-140,-343,-141,-143,-128,-127,342,-130,-129,-128,-128,-127,-130,-129,-134,-135,-128,-128,-145,-146,-343,-127,-128,-329,-328,-343,-149,-147,-344,-148,-335,-339,-340,-152,-329,-147,-148,-328,-145,-466,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-128,-623,-337,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-321,]),'<':([1,6,7,8,9,10,11,12,17,20,21,30,31,37,38,42,43,45,46,47,48,49,51,53,55,56,57,59,60,61,62,64,68,69,71,72,74,75,76,79,82,84,85,88,89,91,92,93,95,96,98,99,100,101,104,105,106,110,113,114,115,116,119,122,124,127,129,130,131,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,158,159,163,165,169,170,175,181,186,187,190,204,207,213,216,218,219,221,226,227,228,229,232,233,234,236,238,240,241,245,246,250,251,253,259,260,263,264,265,266,271,277,280,286,288,289,304,306,311,317,324,328,330,336,345,349,357,360,362,363,366,368,369,394,401,402,403,405,406,410,412,416,419,423,434,438,439,440,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,465,466,467,468,469,470,471,473,474,475,477,478,479,482,483,484,485,488,498,514,533,534,535,538,549,551,559,561,567,579,583,585,586,588,611,612,614,615,619,623,625,626,627,628,631,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,677,682,684,691,692,700,701,702,703,704,705,708,709,710,711,713,716,717,725,732,733,734,742,743,746,747,749,752,753,759,765,766,769,770,771,772,773,777,780,783,804,805,807,815,822,831,833,834,835,839,843,844,845,846,848,849,851,855,862,872,874,878,900,906,909,916,917,925,941,953,956,959,961,962,964,967,973,975,976,986,987,992,993,1000,1007,1011,1015,1016,1018,1020,1022,1030,1055,1057,1058,1059,1060,1061,1064,1065,1067,1068,1069,1070,1071,1080,1083,1100,1101,1102,1103,1105,1108,1109,1110,1120,1123,1124,1125,1127,1128,1129,1131,1134,1135,1136,1137,1138,1139,1140,1148,1149,1154,1155,1156,1160,],[107,-186,-204,-194,-187,-361,107,-345,199,107,-594,-170,230,107,-347,-138,-585,-206,-349,-262,-341,-203,-346,-193,-142,-137,-342,-192,-174,-202,-136,-189,-343,-172,-352,-144,-363,-365,-171,-188,-200,-173,-348,-191,-350,-201,-197,-199,-190,-586,-169,-198,-383,-139,-360,-356,-353,-185,-357,-355,-366,107,199,-196,-584,-195,-359,-358,-364,-140,-108,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,107,372,-129,-122,-449,-448,-362,-451,-450,-447,-205,-167,107,-343,-383,-354,-117,-119,-508,-623,-623,-452,-623,-553,-529,-249,-113,504,-100,-112,521,-123,-131,-133,-132,-81,-126,-116,-91,-175,-279,-118,107,-277,107,107,-383,604,-286,107,-383,-588,107,-134,-135,-120,107,107,-109,107,653,-110,-121,-281,199,-456,-512,-164,-168,-131,-132,521,-133,-341,653,-525,-519,-524,-352,-521,199,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,199,-481,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-567,199,-566,-623,-623,-124,107,-114,-115,-125,-146,-283,-280,-260,-343,-383,-278,107,-343,-149,-344,-290,-287,-284,-623,-384,-335,-339,-340,199,-228,-152,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,-64,-67,-65,-66,107,-587,-595,199,-145,-516,-503,-520,-466,-470,-343,-502,107,-484,-483,-559,-563,-564,-533,-86,-84,-82,-85,-106,-95,-87,-93,-83,-102,-104,-92,-94,-105,-101,-103,-332,-331,-221,107,107,-330,-150,-251,-384,947,-285,-288,-296,-289,-384,-338,-333,-336,-334,-151,107,-162,-383,-263,-231,107,-185,-343,-536,-534,-307,-261,-308,-282,-306,-297,-623,-337,-234,-163,-159,-271,-264,107,107,107,-217,-216,-482,-500,-535,-309,-310,107,-227,-226,-225,-623,-326,-327,-320,-623,-160,-276,-274,-265,107,-272,-266,107,-232,-218,-224,-313,-311,-623,-223,-314,-312,-222,-291,-323,-623,-319,-267,-273,107,107,107,-322,-623,-315,-317,-318,-316,-324,-161,-236,-321,107,107,]),'NUM':([1,2,6,7,8,9,11,16,20,25,30,37,40,45,47,49,53,58,59,60,61,64,69,73,76,77,79,82,84,88,91,92,93,95,98,99,110,122,127,132,147,154,155,157,190,194,203,204,207,208,220,226,229,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,311,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,394,405,406,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,549,551,552,553,559,560,579,611,612,614,615,621,624,628,650,653,671,677,685,687,692,700,703,709,713,725,729,776,784,785,786,790,791,792,798,799,800,807,831,833,834,835,836,841,849,850,852,857,862,872,874,878,882,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,951,953,956,958,962,965,973,975,976,977,983,984,986,987,988,991,1011,1015,1016,1019,1026,1050,1064,1065,1067,1068,1069,1070,1071,1080,1092,1101,1102,1108,1109,1120,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1154,1156,1157,1160,],[12,12,-186,-204,-194,-187,12,12,12,12,-170,12,12,-206,-262,-203,-193,12,-192,-174,-202,-189,-172,12,-171,12,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,12,12,12,12,12,-205,12,12,-167,12,12,12,-508,-452,-553,-529,-249,12,12,12,12,-175,12,12,-279,12,12,12,12,-277,12,12,12,-286,12,-17,-11,-9,-10,12,-18,-12,-15,-8,-19,-16,-14,-13,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,-281,-164,-168,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,-283,-280,12,12,-260,12,-278,-290,-287,-284,-623,12,12,-228,12,12,12,12,12,12,12,-516,-466,12,-559,-533,12,12,12,12,12,12,12,12,12,12,12,-251,-285,-288,-296,-289,12,12,12,12,12,12,-263,-231,12,-185,12,-536,-534,12,12,-307,-261,12,-308,12,12,12,12,12,12,12,-282,12,-306,-297,12,-234,12,-271,-264,12,12,12,12,12,12,12,12,-535,-309,-310,12,12,12,-276,-274,-265,12,-272,-266,12,-232,12,-313,-311,-314,-312,-291,12,-267,-273,12,12,12,12,-315,-317,-318,-316,-236,12,12,12,]),'>':([12,38,42,44,46,48,51,55,56,57,62,67,72,85,89,101,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,159,163,165,213,219,221,230,231,238,240,241,245,246,250,251,253,259,260,263,264,265,277,291,292,293,297,299,300,302,303,304,306,307,308,318,336,345,346,349,362,366,368,369,395,400,410,412,416,419,423,431,433,434,476,498,533,534,535,538,585,586,588,592,593,598,602,604,605,623,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,692,703,732,733,734,742,743,746,747,749,752,753,759,765,766,769,770,771,772,773,777,804,805,809,810,811,812,813,814,815,817,818,819,820,821,822,823,838,843,844,845,846,848,851,855,867,945,946,947,948,959,961,964,967,992,993,1020,1022,1030,1036,1037,1038,1039,1040,1041,1042,1043,1045,1046,1047,1048,1055,1057,1058,1059,1060,1061,1075,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1142,1143,1144,1145,1146,1148,1149,1155,],[-345,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-348,-350,-139,-140,-108,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,370,-129,-122,-343,-117,-119,476,-385,-113,500,-100,-112,522,-123,-131,-133,-132,-81,-126,-116,-91,-118,-156,-155,-388,598,-381,-398,-392,-399,-383,-380,605,-391,-389,-134,-135,-387,-120,-109,522,-110,-121,-429,674,-131,-132,522,-133,-341,-388,-387,522,-386,-124,-114,-115,-125,-146,-343,-149,-344,-158,-157,-417,-418,476,-400,-335,-339,-340,-152,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,-64,-67,-65,-66,-145,-466,-86,-84,-82,-85,-106,-95,-87,-93,-83,-102,-104,-92,-94,-105,-101,-103,-332,-331,-221,-330,-150,-393,605,-419,605,-420,605,-384,598,-402,-405,-401,-404,-380,605,-390,-338,-333,-336,-334,-151,-162,-383,605,-421,-422,476,-406,-623,-337,-163,-159,-217,-216,-227,-226,-225,-403,605,-423,605,-424,605,598,-407,-408,-410,-411,605,-623,-326,-327,-320,-623,-160,605,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,605,-428,605,-409,605,-324,-161,-321,]),'REMAINDER_ASSIGN':([12,23,38,41,42,46,48,51,55,56,57,62,68,70,72,78,85,89,97,100,101,117,123,133,136,139,142,145,148,151,156,163,192,246,251,253,259,336,345,366,434,512,538,561,563,567,573,580,585,586,587,588,589,623,625,626,631,640,643,644,646,650,703,772,773,777,804,805,843,844,845,846,848,921,959,961,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1155,],[-345,-143,-347,-130,-138,-349,-341,-346,-142,-137,-342,-136,-343,-140,-144,-129,-348,-350,-127,-128,-139,-141,332,-140,-343,-141,-143,-128,-127,332,-130,-129,-128,-128,-127,-130,-129,-134,-135,-128,-128,-145,-146,-343,-127,-128,-329,-328,-343,-149,-147,-344,-148,-335,-339,-340,-152,-329,-147,-148,-328,-145,-466,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-128,-623,-337,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-321,]),'PRIVATE':([1,4,6,7,8,9,10,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,178,179,180,181,182,183,185,186,187,188,190,204,207,213,218,226,227,228,229,232,233,234,235,236,266,271,279,286,309,311,328,381,382,383,386,387,388,391,394,405,406,438,439,440,441,443,446,448,449,450,451,452,453,454,455,456,458,460,462,463,465,466,467,468,469,470,471,473,474,475,477,479,480,482,484,485,488,491,549,551,559,566,579,588,608,611,612,614,615,628,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,720,725,824,826,831,832,833,834,835,858,862,872,878,894,899,906,909,916,917,925,941,953,956,962,969,973,975,976,984,999,1000,1002,1007,1009,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1087,1095,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[113,113,-186,-204,-194,-187,-361,113,-594,113,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,113,-610,-608,-362,113,-618,-611,-451,-612,113,-613,-450,-447,113,-205,-167,113,-343,-354,-508,113,113,-452,113,-553,-529,113,-249,-175,-279,113,-277,113,-286,-588,113,113,113,-609,113,-619,-605,-281,-164,-168,-525,-519,-524,113,-521,-527,-526,-523,-528,113,-522,113,-479,-478,-469,113,-472,-481,113,-480,-476,-475,113,-473,-501,-474,-358,-471,-477,-562,-565,113,-567,-566,113,113,113,-283,-280,-260,113,-278,-344,113,-290,-287,-284,-623,-228,113,-614,-587,-595,-516,-503,-520,-466,-470,-502,113,-484,-483,-559,-563,-564,113,-533,-303,113,-285,113,-288,-296,-289,-616,-263,-231,-185,113,-487,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,113,113,-506,-482,-486,-500,-570,-535,-309,-310,-617,-276,-274,-265,113,-272,-266,113,-232,-505,-569,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'FALSE':([1,2,6,7,8,9,11,16,20,25,30,37,40,45,47,49,53,58,59,60,61,64,69,73,76,77,79,82,84,88,91,92,93,95,98,99,110,122,127,132,147,154,155,157,190,194,203,204,207,208,220,226,229,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,311,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,394,405,406,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,549,551,552,553,559,560,579,611,612,614,615,621,624,628,650,653,671,677,685,687,692,700,703,709,713,725,729,776,784,785,786,790,791,792,798,799,800,807,831,833,834,835,836,841,849,850,852,857,862,872,874,878,882,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,951,953,956,958,962,965,973,975,976,977,983,984,986,987,988,991,1011,1015,1016,1019,1026,1050,1064,1065,1067,1068,1069,1070,1071,1080,1092,1101,1102,1108,1109,1120,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1154,1156,1157,1160,],[46,46,-186,-204,-194,-187,46,46,46,46,-170,46,46,-206,-262,-203,-193,46,-192,-174,-202,-189,-172,46,-171,46,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,46,46,46,46,46,-205,46,46,-167,46,46,46,-508,-452,-553,-529,-249,46,46,46,46,-175,46,46,-279,46,46,46,46,-277,46,46,46,-286,46,-17,-11,-9,-10,46,-18,-12,-15,-8,-19,-16,-14,-13,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,-281,-164,-168,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,-283,-280,46,46,-260,46,-278,-290,-287,-284,-623,46,46,-228,46,46,46,46,46,46,46,-516,-466,46,-559,-533,46,46,46,46,46,46,46,46,46,46,46,-251,-285,-288,-296,-289,46,46,46,46,46,46,-263,-231,46,-185,46,-536,-534,46,46,-307,-261,46,-308,46,46,46,46,46,46,46,-282,46,-306,-297,46,-234,46,-271,-264,46,46,46,46,46,46,46,46,-535,-309,-310,46,46,46,-276,-274,-265,46,-272,-266,46,-232,46,-313,-311,-314,-312,-291,46,-267,-273,46,46,46,46,-315,-317,-318,-316,-236,46,46,46,]),';':([1,4,5,6,7,8,9,11,12,16,20,21,23,30,31,34,37,38,41,42,43,44,45,46,47,48,49,50,51,53,55,56,57,59,60,61,62,63,64,67,69,72,76,78,79,82,84,85,87,88,89,90,91,92,93,95,96,98,99,101,110,112,117,121,122,124,127,133,134,135,136,137,138,139,141,142,143,144,145,146,148,150,151,152,153,156,159,160,161,162,163,164,165,167,168,169,170,171,173,174,178,179,180,181,182,183,185,186,187,189,190,195,196,197,204,207,209,210,211,212,213,216,219,221,226,227,228,229,231,232,233,234,235,236,238,239,240,241,242,245,247,248,250,256,258,260,261,262,263,264,265,266,269,271,272,277,278,279,286,287,291,292,300,302,303,308,311,318,321,324,328,330,336,345,349,362,368,369,381,382,385,386,387,388,389,391,394,405,406,410,412,415,416,419,420,421,425,426,427,429,431,433,436,438,439,440,443,446,447,448,449,450,452,453,454,455,456,458,460,462,464,465,466,467,469,470,471,474,475,476,477,479,482,484,485,486,487,488,489,490,492,497,498,533,534,535,538,549,551,559,562,568,569,570,571,572,579,585,586,588,592,593,598,602,605,606,609,611,612,614,615,619,623,625,626,628,629,631,633,634,635,636,637,638,639,640,641,642,643,644,646,649,650,656,657,658,659,660,661,662,663,664,665,666,667,668,670,677,679,682,684,686,688,689,690,692,700,701,702,703,704,708,709,710,711,713,716,717,720,722,724,725,726,727,728,730,731,732,733,734,735,736,737,739,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,777,778,779,781,785,788,804,805,807,809,811,813,818,819,820,821,831,833,834,835,838,839,843,844,845,846,848,849,851,858,860,862,872,874,878,881,883,895,896,897,903,904,906,907,909,913,914,916,917,918,919,920,922,925,941,945,946,948,950,953,956,959,961,962,963,964,967,968,969,970,973,975,976,984,986,987,990,992,993,997,998,1000,1003,1005,1007,1008,1011,1012,1013,1014,1015,1016,1017,1020,1022,1023,1024,1030,1031,1032,1036,1038,1040,1043,1045,1046,1047,1049,1055,1057,1058,1059,1060,1061,1063,1064,1065,1067,1068,1069,1070,1071,1079,1080,1083,1086,1088,1089,1090,1091,1093,1094,1100,1101,1102,1103,1105,1106,1107,1108,1109,1110,1111,1112,1114,1115,1118,1119,1120,1123,1124,1125,1127,1128,1129,1130,1131,1133,1134,1135,1136,1137,1138,1139,1140,1141,1143,1145,1148,1149,1151,1153,1154,1155,1156,1160,],[47,181,190,-186,-204,-194,-187,47,-345,-623,47,-594,-212,-170,-380,-209,47,-347,-211,-138,-585,-378,-206,-349,-262,-341,-203,266,-346,-193,-142,-137,-342,-192,-174,-202,-136,271,-189,-379,-172,-144,-171,-210,-188,-200,-173,-348,286,-191,-350,-208,-201,-197,-199,-190,-586,-169,-198,-139,-185,-381,-213,-207,-196,-584,-195,-140,-108,-25,-343,-88,-63,-141,-77,-143,-107,-3,-128,-96,-127,-20,-119,-111,-1,-130,-58,-30,-40,-50,-129,-45,-122,-4,-35,-449,-448,181,-610,-608,181,-618,-611,-451,-612,181,-613,-450,-447,391,-205,-245,394,-244,-167,47,-176,-623,-180,-178,-343,-383,-117,-119,-508,446,462,-452,-385,181,-553,-529,488,-249,-113,-27,-68,-100,-22,-112,-37,-60,-123,-47,-42,-81,-53,-32,-126,-116,-91,-175,549,-279,551,-118,559,-623,-277,579,-156,-155,-398,-392,-399,-391,-286,-389,-381,-383,-588,47,-134,-135,-120,-109,-110,-121,181,181,668,-609,181,-619,-607,-605,-281,-164,-168,-131,-132,-577,-575,-133,-574,-576,-153,-154,-182,-177,-388,-387,-382,-525,-519,-524,-521,-527,701,-526,-523,-528,-522,446,-479,-478,-469,462,-472,-481,701,-480,-476,-475,-473,-501,-474,-471,-477,-386,-562,-565,-567,-566,181,717,488,462,-540,-537,488,-623,-124,-114,-115,-125,-146,-283,-280,-260,-239,-240,-242,-238,-241,785,-278,-343,-149,-344,-158,-157,-417,-418,-400,824,-301,-290,-287,-284,-623,-384,-335,-339,-340,-228,-7,-152,-31,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-26,-145,-64,-67,-65,-66,-36,-46,-59,-52,-51,-41,181,858,-614,-606,47,-581,-587,-595,-179,-184,-183,-181,-145,-516,-503,-520,-466,-470,-502,47,-484,-483,-559,-563,-564,488,-539,-542,-533,-541,-543,-544,-33,-71,-86,-84,-82,-69,-73,-75,-28,-44,-85,-106,-39,-55,-95,-87,-29,-93,-70,-72,-83,-102,-34,-74,-49,-76,-61,-104,-57,-43,-54,-56,-48,-92,-94,-62,-38,-105,-101,-103,-332,-331,-221,916,917,-623,-623,925,-330,-150,-251,-393,-419,-420,-402,-405,-401,-404,-285,-288,-296,-289,-390,-384,-338,-333,-336,-334,-151,47,-162,-616,969,-263,-231,47,-185,-580,-579,-623,-623,1000,-623,-623,-536,-538,-534,1015,1016,-307,-261,-182,-623,-243,1019,-308,-282,-421,-422,-406,-302,-306,-297,-623,-337,-234,-21,-163,-159,1063,-615,1064,-271,-264,47,-623,47,47,-578,-217,-216,-623,-507,-482,-495,-494,-500,-623,-535,-545,-23,-24,-309,-310,-182,-227,-226,1101,1102,-225,1108,1109,-403,-423,-424,-407,-408,-410,-411,-304,-623,-326,-327,-320,-623,-160,-617,-276,-274,-265,47,-272,-266,47,1130,-232,-218,-504,-499,-496,-497,-571,-568,-572,-224,-313,-311,-623,-223,1137,1138,-314,-312,-222,1139,1140,-426,-425,-412,-305,-291,-323,-623,-319,-267,-273,47,-623,47,-573,47,-322,-623,-315,-317,-318,-316,-427,-428,-409,-324,-161,1157,-498,-236,-321,47,47,]),'ABSTRACT':([1,4,6,7,8,9,10,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,178,179,180,181,182,183,185,186,187,188,190,204,207,213,218,226,227,228,229,232,233,234,235,236,266,271,279,286,309,311,328,381,382,383,386,387,388,391,394,405,406,438,439,440,441,443,446,448,449,450,451,452,453,454,455,456,458,460,462,463,465,466,467,468,469,470,471,473,474,475,477,479,480,482,484,485,488,491,549,551,559,566,579,588,608,611,612,614,615,628,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,720,725,824,826,831,832,833,834,835,858,862,872,878,894,899,906,909,916,917,925,941,953,956,962,969,973,975,976,984,999,1000,1002,1007,1009,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1087,1095,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[129,129,-186,-204,-194,-187,-361,129,-594,129,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,129,-610,-608,-362,129,-618,-611,-451,-612,129,-613,-450,-447,129,-205,-167,129,-343,-354,-508,129,129,-452,129,-553,-529,129,-249,-175,-279,129,-277,129,-286,-588,129,129,129,-609,129,-619,-605,-281,-164,-168,-525,-519,-524,129,-521,-527,-526,-523,-528,129,-522,129,-479,-478,-469,129,-472,-481,129,-480,-476,-475,129,-473,-501,-474,-358,-471,-477,-562,-565,129,-567,-566,129,129,129,-283,-280,-260,129,-278,-344,129,-290,-287,-284,-623,-228,129,-614,-587,-595,-516,-503,-520,-466,-470,-502,129,-484,-483,-559,-563,-564,129,-533,-303,129,-285,129,-288,-296,-289,-616,-263,-231,-185,129,-487,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,129,129,-506,-482,-486,-500,-570,-535,-309,-310,-617,-276,-274,-265,129,-272,-266,129,-232,-505,-569,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'PUBLIC':([1,4,6,7,8,9,10,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,178,179,180,181,182,183,185,186,187,188,190,204,207,213,218,226,227,228,229,232,233,234,235,236,266,271,279,286,309,311,328,381,382,383,386,387,388,391,394,405,406,438,439,440,441,443,446,448,449,450,451,452,453,454,455,456,458,460,462,463,465,466,467,468,469,470,471,473,474,475,477,479,480,482,484,485,488,491,549,551,559,566,579,588,608,611,612,614,615,628,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,720,725,824,826,831,832,833,834,835,858,862,872,878,894,899,906,909,916,917,925,941,953,956,962,969,973,975,976,984,999,1000,1002,1007,1009,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1087,1095,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[114,114,-186,-204,-194,-187,-361,114,-594,114,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,114,-610,-608,-362,114,-618,-611,-451,-612,114,-613,-450,-447,114,-205,-167,114,-343,-354,-508,114,114,-452,114,-553,-529,114,-249,-175,-279,114,-277,114,-286,-588,114,114,114,-609,114,-619,-605,-281,-164,-168,-525,-519,-524,114,-521,-527,-526,-523,-528,114,-522,114,-479,-478,-469,114,-472,-481,114,-480,-476,-475,114,-473,-501,-474,-358,-471,-477,-562,-565,114,-567,-566,114,114,114,-283,-280,-260,114,-278,-344,114,-290,-287,-284,-623,-228,114,-614,-587,-595,-516,-503,-520,-466,-470,-502,114,-484,-483,-559,-563,-564,114,-533,-303,114,-285,114,-288,-296,-289,-616,-263,-231,-185,114,-487,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,114,114,-506,-482,-486,-500,-570,-535,-309,-310,-617,-276,-274,-265,114,-272,-266,114,-232,-505,-569,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'CHAR_LITERAL':([1,2,6,7,8,9,11,16,20,25,30,37,40,45,47,49,53,58,59,60,61,64,69,73,76,77,79,82,84,88,91,92,93,95,98,99,110,122,127,132,147,154,155,157,190,194,203,204,207,208,220,226,229,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,311,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,394,405,406,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,549,551,552,553,559,560,579,611,612,614,615,621,624,628,650,653,671,677,685,687,692,700,703,709,713,725,729,776,784,785,786,790,791,792,798,799,800,807,831,833,834,835,836,841,849,850,852,857,862,872,874,878,882,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,951,953,956,958,962,965,973,975,976,977,983,984,986,987,988,991,1011,1015,1016,1019,1026,1050,1064,1065,1067,1068,1069,1070,1071,1080,1092,1101,1102,1108,1109,1120,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1154,1156,1157,1160,],[51,51,-186,-204,-194,-187,51,51,51,51,-170,51,51,-206,-262,-203,-193,51,-192,-174,-202,-189,-172,51,-171,51,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,51,51,51,51,51,-205,51,51,-167,51,51,51,-508,-452,-553,-529,-249,51,51,51,51,-175,51,51,-279,51,51,51,51,-277,51,51,51,-286,51,-17,-11,-9,-10,51,-18,-12,-15,-8,-19,-16,-14,-13,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,-281,-164,-168,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,-283,-280,51,51,-260,51,-278,-290,-287,-284,-623,51,51,-228,51,51,51,51,51,51,51,-516,-466,51,-559,-533,51,51,51,51,51,51,51,51,51,51,51,-251,-285,-288,-296,-289,51,51,51,51,51,51,-263,-231,51,-185,51,-536,-534,51,51,-307,-261,51,-308,51,51,51,51,51,51,51,-282,51,-306,-297,51,-234,51,-271,-264,51,51,51,51,51,51,51,51,-535,-309,-310,51,51,51,-276,-274,-265,51,-272,-266,51,-232,51,-313,-311,-314,-312,-291,51,-267,-273,51,51,51,51,-315,-317,-318,-316,-236,51,51,51,]),'FLOAT':([1,2,6,7,8,9,10,11,16,20,21,24,25,30,37,40,43,45,47,48,49,53,57,58,59,60,61,64,69,71,73,74,75,76,77,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,107,110,113,114,115,116,122,124,127,129,130,131,132,147,154,155,157,158,169,170,175,181,186,187,190,194,203,204,207,208,213,218,220,226,227,228,229,230,232,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,309,311,328,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,376,377,378,380,394,396,397,405,406,411,428,438,439,440,441,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,463,465,466,467,468,469,470,471,473,474,475,477,478,479,480,482,483,484,485,488,491,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,549,551,552,553,559,560,566,579,588,597,599,600,604,605,608,611,612,614,615,621,624,628,650,653,671,674,675,677,682,684,685,687,692,693,697,699,700,701,702,703,704,706,708,709,710,711,713,715,716,717,725,729,776,784,785,786,790,791,792,798,799,800,807,818,819,820,821,824,826,831,832,833,834,835,836,841,849,850,852,857,862,864,866,872,874,878,882,894,899,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,942,943,944,945,946,947,948,951,953,955,956,958,962,965,973,975,976,977,978,979,981,983,984,986,987,988,991,999,1000,1002,1007,1009,1011,1015,1016,1019,1026,1036,1038,1040,1043,1045,1046,1047,1050,1064,1065,1067,1068,1069,1070,1071,1074,1077,1080,1087,1092,1095,1101,1102,1108,1109,1113,1114,1115,1116,1117,1118,1120,1122,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1141,1143,1145,1154,1156,1157,1160,],[52,52,-186,-204,-194,-187,-361,52,52,52,-594,52,52,-170,52,52,-585,-206,-262,-341,-203,-193,-342,52,-192,-174,-202,-189,-172,-352,52,-363,-365,-171,52,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,52,-185,-357,-355,-366,52,-196,-584,-195,-359,-358,-364,52,52,52,52,52,52,-449,-448,-362,-451,-450,-447,-205,52,52,-167,52,52,-343,-354,52,-508,-623,-623,-452,52,-623,-553,-529,-249,52,52,52,52,-175,52,52,-279,52,52,52,52,-277,52,52,52,52,-286,-588,52,-17,-11,-9,-10,52,-18,-12,-15,-8,-19,-16,-14,-13,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,-281,-439,-430,-164,-168,52,52,-525,-519,-524,-623,-352,-521,52,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,52,-481,-623,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-623,-567,52,-566,-623,-623,-351,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,-283,-280,52,52,-260,52,52,-278,-344,52,52,52,52,-400,52,-290,-287,-284,-623,52,52,-228,52,52,52,-441,52,52,-587,-595,52,52,52,52,-352,52,-516,-503,-520,-466,-470,52,-502,52,-484,-483,-559,52,-563,-564,-533,52,52,52,52,52,52,52,52,52,52,52,-251,-402,-405,-401,-404,-303,52,-285,-623,-288,-296,-289,52,52,52,52,52,52,-263,-440,-442,-231,52,-185,52,-623,-487,-536,-534,52,52,-307,-261,52,-308,52,52,52,52,52,52,52,-282,52,52,52,-421,-422,52,-406,52,-306,52,-297,52,-234,52,-271,-264,52,52,-443,52,-444,52,52,52,52,52,52,-506,-482,-486,-500,-570,-535,-309,-310,52,52,-403,-423,-424,-407,-408,-410,-411,52,-276,-274,-265,52,-272,-266,52,-446,-445,-232,-505,52,-569,-313,-311,-314,-312,52,-426,-425,52,52,-412,-291,52,52,-267,-273,52,52,52,52,-315,-317,-318,-316,-427,-428,-409,-236,52,52,52,]),'ASSERT':([1,6,7,8,9,11,20,30,37,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,204,207,226,229,233,234,236,266,271,286,311,330,394,405,406,549,551,559,579,611,612,614,615,628,677,700,703,709,713,725,807,831,833,834,835,849,862,872,874,878,906,909,916,917,925,941,953,956,962,973,975,976,986,987,1011,1015,1016,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1129,1131,1134,1137,1138,1139,1140,1154,1156,1160,],[77,-186,-204,-194,-187,77,77,-170,77,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,-167,77,-508,-452,-553,-529,-249,-175,-279,-277,-286,77,-281,-164,-168,-283,-280,-260,-278,-290,-287,-284,-623,-228,77,-516,-466,77,-559,-533,-251,-285,-288,-296,-289,77,-263,-231,77,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-271,-264,77,77,77,-535,-309,-310,-276,-274,-265,77,-272,-266,77,-232,-313,-311,-314,-312,-291,-267,-273,77,77,77,-315,-317,-318,-316,-236,77,77,]),'@':([1,4,6,7,8,9,10,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,178,179,180,181,182,183,185,186,187,188,190,204,207,208,213,218,226,227,228,229,232,233,234,235,236,266,271,279,286,309,311,328,381,382,383,386,387,388,391,394,405,406,411,438,439,440,441,443,446,448,449,450,451,452,453,454,455,456,458,460,462,463,465,466,467,468,469,470,471,473,474,475,477,479,480,482,484,485,488,491,549,551,559,566,579,588,608,611,612,614,615,628,666,668,682,684,685,700,701,702,703,704,708,709,710,711,713,716,717,720,725,824,826,831,832,833,834,835,858,862,872,878,882,894,899,906,909,916,917,925,941,953,956,962,969,973,975,976,984,999,1000,1002,1007,1009,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1087,1092,1095,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[118,118,-186,-204,-194,-187,-361,118,-594,215,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,118,-610,-608,-362,118,-618,-611,-451,-612,118,-613,-450,-447,215,-205,-167,118,422,-343,-354,-508,118,118,-452,118,-553,-529,422,-249,-175,-279,422,-277,422,-286,-588,118,118,215,-609,118,-619,-605,-281,-164,-168,422,-525,-519,-524,422,-521,-527,-526,-523,-528,215,-522,118,-479,-478,-469,118,-472,-481,422,-480,-476,-475,215,-473,-501,-474,-358,-471,-477,-562,-565,422,-567,-566,118,118,422,-283,-280,-260,422,-278,-344,422,-290,-287,-284,-623,-228,118,-614,-587,-595,422,-516,-503,-520,-466,-470,-502,118,-484,-483,-559,-563,-564,422,-533,-303,422,-285,422,-288,-296,-289,-616,-263,-231,-185,422,422,-487,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,118,422,-506,-482,-486,-500,-570,-535,-309,-310,-617,-276,-274,-265,118,-272,-266,118,-232,-505,422,-569,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'ELSE':([7,8,45,47,49,53,59,61,82,91,92,93,99,122,127,190,271,286,311,394,405,549,551,559,579,611,612,614,615,831,833,834,835,862,868,869,873,875,876,878,879,916,917,925,941,953,956,975,985,1015,1016,1064,1067,1070,1081,1101,1102,1108,1109,1120,1127,1137,1138,1139,1140,1150,1152,1158,1161,],[-204,-194,-206,-262,-203,-193,-192,-202,-200,-201,-197,-199,-198,-196,-195,-205,-279,-277,-286,-281,-164,-283,-280,-260,-278,-290,-287,-284,-623,-285,-288,-296,-289,-263,-255,-257,-256,-258,986,-254,-259,-307,-261,-308,-282,-306,-297,-264,-250,-309,-310,-276,-265,-266,-229,-313,-311,-314,-312,-291,-267,-315,-317,-318,-316,1156,-235,-233,-237,]),'XOR_ASSIGN':([12,23,38,41,42,46,48,51,55,56,57,62,68,70,72,78,85,89,97,100,101,117,123,133,136,139,142,145,148,151,156,163,192,246,251,253,259,336,345,366,434,512,538,561,563,567,573,580,585,586,587,588,589,623,625,626,631,640,643,644,646,650,703,772,773,777,804,805,843,844,845,846,848,921,959,961,992,993,1020,1022,1030,1055,1057,1058,1059,1060,1083,1100,1103,1105,1110,1123,1124,1125,1135,1136,1148,1155,],[-345,-143,-347,-130,-138,-349,-341,-346,-142,-137,-342,-136,-343,-140,-144,-129,-348,-350,-127,-128,-139,-141,341,-140,-343,-141,-143,-128,-127,341,-130,-129,-128,-128,-127,-130,-129,-134,-135,-128,-128,-145,-146,-343,-127,-128,-329,-328,-343,-149,-147,-344,-148,-335,-339,-340,-152,-329,-147,-148,-328,-145,-466,-332,-331,-221,-330,-150,-338,-333,-336,-334,-151,-128,-623,-337,-217,-216,-227,-226,-225,-623,-326,-327,-320,-623,-218,-224,-623,-223,-222,-323,-623,-319,-322,-623,-324,-321,]),'NEQ':([12,31,38,42,44,46,48,51,55,56,57,62,67,72,85,89,101,112,133,134,136,137,138,139,141,142,143,145,146,148,151,152,156,159,162,163,164,165,213,216,219,221,231,238,240,241,245,246,248,250,251,253,256,259,260,261,263,264,265,277,291,292,300,302,303,308,318,336,345,349,362,366,368,369,410,412,416,419,423,431,433,434,476,498,533,534,535,538,585,586,588,592,593,598,602,605,619,623,625,626,631,634,635,636,637,638,639,640,641,642,643,644,646,650,656,657,658,659,661,662,663,664,692,703,731,732,733,734,735,736,737,742,743,745,746,747,749,750,751,752,753,755,756,757,758,759,760,762,763,764,765,766,767,769,770,771,772,773,777,804,805,809,811,813,818,819,820,821,838,843,844,845,846,848,851,855,945,946,948,959,961,964,967,992,993,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1061,1083,1100,1103,1105,1110,1114,1115,1118,1123,1124,1125,1135,1136,1141,1143,1145,1148,1149,1155,],[-345,-380,-347,-138,-378,-349,-341,-346,-142,-137,-342,-136,-379,-144,-348,-350,-139,-381,-140,-108,-343,-88,-63,-141,-77,-143,-107,-128,-96,-127,-119,-111,-130,-58,-50,-129,377,-122,-343,-383,-117,-119,-385,-113,-68,-100,-112,531,-60,-123,-131,-133,537,-132,-81,-53,-126,-116,-91,-118,-156,-155,-398,-392,-399,-391,-389,-134,-135,-120,-109,531,-110,-121,-131,-132,531,-133,-341,-388,-387,531,-386,-124,-114,-115,-125,-146,-343,-149,-344,-158,-157,-417,-418,-400,-384,-335,-339,-340,-152,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-145,-64,-67,-65,-66,377,-59,-52,-51,-145,-466,-71,-86,-84,-82,-69,-73,-75,-85,-106,-55,-95,-87,-93,-70,-72,-83,-102,-74,377,-76,-61,-104,-57,-54,-56,377,-92,-94,-62,-105,-101,-103,-332,-331,-221,-330,-150,-393,-419,-420,-402,-405,-401,-404,-390,-338,-333,-336,-334,-151,-162,-128,-421,-422,-406,-623,-337,-163,-159,-217,-216,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-160,-218,-224,-623,-223,-222,-426,-425,-412,-323,-623,-319,-322,-623,-427,-428,-409,-324,-161,-321,]),'THROWS':([291,292,425,426,592,593,895,901,997,998,],[-156,-155,-153,-154,-158,-157,-623,1004,1004,-507,]),'IMPLEMENTS':([17,26,31,48,54,57,83,198,213,222,224,225,231,267,300,302,303,308,321,324,396,397,401,402,435,436,476,588,598,602,605,627,674,676,691,809,811,813,818,819,820,821,839,847,864,866,892,945,946,948,978,981,1036,1038,1040,1043,1045,1046,1047,1074,1077,1114,1115,1118,1141,1143,1145,],[-455,-623,-380,-341,-623,-342,285,-454,-343,-458,-457,285,-385,285,-398,-392,-399,-391,-381,-383,-439,-430,-531,-456,-459,-382,-386,-344,-417,-418,-400,-558,-441,-532,-555,-393,-419,-420,-402,-405,-401,-404,-384,-557,-440,-442,-556,-421,-422,-406,-443,-444,-403,-423,-424,-407,-408,-410,-411,-446,-445,-426,-425,-412,-427,-428,-409,]),'PLUSPLUS':([0,1,2,6,7,8,9,11,12,16,20,23,25,30,37,38,40,41,42,45,46,47,48,49,51,53,55,56,57,58,59,60,61,62,64,68,69,70,72,73,76,77,78,79,82,84,85,88,89,91,92,93,95,97,98,99,100,101,110,117,122,123,127,132,133,136,139,142,145,147,148,151,154,155,156,157,163,190,192,194,203,204,207,208,220,221,226,229,233,234,236,237,246,249,251,252,253,254,259,266,268,270,271,275,276,279,281,286,290,294,295,311,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,348,350,351,352,353,354,355,356,358,359,364,365,366,370,371,372,373,374,375,377,378,380,394,405,406,409,410,411,412,416,419,423,428,434,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,538,539,540,541,543,544,545,546,549,551,552,553,559,560,561,563,567,573,579,580,585,586,587,588,589,611,612,614,615,621,623,624,625,626,628,631,640,643,644,646,650,653,671,677,685,687,692,700,703,709,713,725,729,772,773,776,777,784,785,786,790,791,792,798,799,800,804,805,807,831,833,834,835,836,841,843,844,845,846,848,849,850,855,857,862,872,874,878,882,906,909,911,912,916,917,921,924,925,930,932,933,934,938,939,940,941,951,953,956,958,959,961,962,973,975,976,977,983,984,986,987,988,991,992,993,1011,1015,1016,1019,1020,1022,1026,1030,1050,1055,1057,1058,1059,1060,1064,1065,1067,1068,1069,1070,1071,1080,1083,1092,1100,1101,1102,1103,1105,1108,1109,1110,1120,1123,1124,1125,1127,1128,1129,1130,1131,1134,1135,1136,1137,1138,1139,1140,1148,1154,1155,1156,1157,1160,],[4,25,25,-186,-204,-194,-187,25,-345,25,25,-143,25,-170,25,-347,25,-130,-138,-206,-349,-262,-341,-203,-346,-193,-142,-137,-342,25,-192,-174,-202,-136,-189,-343,-172,-140,-144,25,-171,25,-129,-188,-200,-173,-348,-191,-350,-201,-197,-199,-190,-127,-169,-198,-128,-139,-185,-141,-196,336,-195,25,-140,-343,-141,-143,-128,25,-127,336,25,25,-130,25,-129,-205,-128,25,25,-167,25,25,25,336,-508,-452,-553,-529,-249,25,-128,25,-127,25,-130,25,-129,-175,25,25,-279,25,25,25,25,-277,25,25,25,-286,25,-17,-11,-9,-10,25,-134,-18,-12,-15,-8,-19,-16,-14,-13,-135,25,25,25,25,25,25,25,25,25,25,25,25,-128,25,25,25,25,25,25,25,25,25,-281,-164,-168,336,-127,25,-129,-128,-130,-341,25,-128,25,25,25,25,25,25,25,25,25,25,25,25,25,-145,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,-146,25,25,25,25,25,25,25,-283,-280,25,25,-260,25,-343,-127,-128,-329,-278,-328,-343,-149,-147,-344,-148,-290,-287,-284,-623,25,-335,25,-339,-340,-228,-152,-329,-147,-148,-328,-145,25,25,25,25,25,-145,-516,-466,25,-559,-533,25,-332,-331,25,-221,25,25,25,25,25,25,25,25,25,-330,-150,-251,-285,-288,-296,-289,25,25,-338,-333,-336,-334,-151,25,25,-128,25,-263,-231,25,-185,25,-536,-534,25,25,-307,-261,-128,25,-308,25,25,25,25,25,25,25,-282,25,-306,-297,25,-623,-337,-234,-271,-264,25,25,25,25,25,25,25,25,-217,-216,-535,-309,-310,25,-227,-226,25,-225,25,-623,-326,-327,-320,-623,-276,-274,-265,25,-272,-266,25,-232,-218,25,-224,-313,-311,-623,-223,-314,-312,-222,-291,-323,-623,-319,-267,-273,25,25,25,25,-322,-623,-315,-317,-318,-316,-324,-236,-321,25,25,25,]),'DEFAULT':([6,7,8,9,30,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,204,226,229,233,234,236,266,271,286,291,292,311,394,405,406,425,426,549,551,559,579,592,593,611,612,614,615,628,700,703,713,725,831,833,834,835,862,863,872,878,903,906,909,916,917,925,941,953,956,962,972,973,974,975,976,998,1008,1011,1015,1016,1064,1065,1066,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[-186,-204,-194,-187,-170,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,-167,-508,-452,-553,-529,-249,-175,-279,-277,-156,-155,-286,-281,-164,-168,-153,-154,-283,-280,-260,-278,-158,-157,-290,-287,-284,-623,-228,-516,-466,-559,-533,-285,-288,-296,-289,-263,971,-231,-185,-623,-536,-534,-307,-261,-308,-282,-306,-297,-234,971,-271,-268,-264,971,-507,1092,-535,-309,-310,-276,-274,-269,-265,971,-272,-266,-270,-232,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'WHILE':([1,6,7,8,9,11,20,30,37,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,191,204,207,226,229,233,234,236,266,271,286,311,330,394,405,406,549,551,559,579,611,612,614,615,628,677,700,703,709,713,725,807,831,833,834,835,849,862,872,874,878,906,909,916,917,925,941,953,956,962,973,975,976,986,987,1011,1015,1016,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1129,1131,1134,1137,1138,1139,1140,1154,1156,1160,],[126,-186,-204,-194,-187,126,126,-170,126,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,392,-167,126,-508,-452,-553,-529,-249,-175,-279,-277,-286,126,-281,-164,-168,-283,-280,-260,-278,-290,-287,-284,-623,-228,880,-516,-466,126,-559,-533,-251,-285,-288,-296,-289,126,-263,-231,880,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-271,-264,126,126,880,-535,-309,-310,-276,-274,-265,126,-272,-266,126,-232,-313,-311,-314,-312,-291,-267,-273,880,880,126,-315,-317,-318,-316,-236,880,880,]),'DOUBLE':([1,2,6,7,8,9,10,11,16,20,21,24,25,30,37,40,43,45,47,48,49,53,57,58,59,60,61,64,69,71,73,74,75,76,77,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,107,110,113,114,115,116,122,124,127,129,130,131,132,147,154,155,157,158,169,170,175,181,186,187,190,194,203,204,207,208,213,218,220,226,227,228,229,230,232,233,234,236,237,249,252,254,266,268,270,271,275,276,279,281,286,290,294,295,309,311,328,330,331,332,333,334,335,337,338,339,340,341,342,343,344,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,376,377,378,380,394,396,397,405,406,411,428,438,439,440,441,442,443,444,446,448,449,450,451,452,453,454,455,456,458,459,460,461,462,463,465,466,467,468,469,470,471,473,474,475,477,478,479,480,482,483,484,485,488,491,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,549,551,552,553,559,560,566,579,588,597,599,600,604,605,608,611,612,614,615,621,624,628,650,653,671,674,675,677,682,684,685,687,692,693,697,699,700,701,702,703,704,706,708,709,710,711,713,715,716,717,725,729,776,784,785,786,790,791,792,798,799,800,807,818,819,820,821,824,826,831,832,833,834,835,836,841,849,850,852,857,862,864,866,872,874,878,882,894,899,906,909,911,912,916,917,924,925,930,932,933,934,938,939,940,941,942,943,944,945,946,947,948,951,953,955,956,958,962,965,973,975,976,977,978,979,981,983,984,986,987,988,991,999,1000,1002,1007,1009,1011,1015,1016,1019,1026,1036,1038,1040,1043,1045,1046,1047,1050,1064,1065,1067,1068,1069,1070,1071,1074,1077,1080,1087,1092,1095,1101,1102,1108,1109,1113,1114,1115,1116,1117,1118,1120,1122,1126,1127,1128,1129,1130,1131,1134,1137,1138,1139,1140,1141,1143,1145,1154,1156,1157,1160,],[128,128,-186,-204,-194,-187,-361,128,128,128,-594,128,128,-170,128,128,-585,-206,-262,-341,-203,-193,-342,128,-192,-174,-202,-189,-172,-352,128,-363,-365,-171,128,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,128,-185,-357,-355,-366,128,-196,-584,-195,-359,-358,-364,128,128,128,128,128,128,-449,-448,-362,-451,-450,-447,-205,128,128,-167,128,128,-343,-354,128,-508,-623,-623,-452,128,-623,-553,-529,-249,128,128,128,128,-175,128,128,-279,128,128,128,128,-277,128,128,128,128,-286,-588,128,-17,-11,-9,-10,128,-18,-12,-15,-8,-19,-16,-14,-13,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,-281,-439,-430,-164,-168,128,128,-525,-519,-524,-623,-352,-521,128,-527,-526,-523,-528,-351,-522,-623,-479,-478,-469,-623,-352,-472,128,-481,-623,-480,-476,-475,-351,-473,-501,-474,-358,-471,-477,-562,-352,-565,-623,-567,128,-566,-623,-623,-351,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,-283,-280,128,128,-260,128,128,-278,-344,128,128,128,128,-400,128,-290,-287,-284,-623,128,128,-228,128,128,128,-441,128,128,-587,-595,128,128,128,128,-352,128,-516,-503,-520,-466,-470,128,-502,128,-484,-483,-559,128,-563,-564,-533,128,128,128,128,128,128,128,128,128,128,128,-251,-402,-405,-401,-404,-303,128,-285,-623,-288,-296,-289,128,128,128,128,128,128,-263,-440,-442,-231,128,-185,128,-623,-487,-536,-534,128,128,-307,-261,128,-308,128,128,128,128,128,128,128,-282,128,128,128,-421,-422,128,-406,128,-306,128,-297,128,-234,128,-271,-264,128,128,-443,128,-444,128,128,128,128,128,128,-506,-482,-486,-500,-570,-535,-309,-310,128,128,-403,-423,-424,-407,-408,-410,-411,128,-276,-274,-265,128,-272,-266,128,-446,-445,-232,-505,128,-569,-313,-311,-314,-312,128,-426,-425,128,128,-412,-291,128,128,-267,-273,128,128,128,128,-315,-317,-318,-316,-427,-428,-409,-236,128,128,128,]),'THROW':([1,6,7,8,9,11,20,30,37,45,47,49,53,59,60,61,64,69,76,79,82,84,88,91,92,93,95,98,99,110,122,127,190,204,207,226,229,233,234,236,266,271,286,311,330,394,405,406,549,551,559,579,611,612,614,615,628,677,700,703,709,713,725,807,831,833,834,835,849,862,872,874,878,906,909,916,917,925,941,953,956,962,973,975,976,986,987,1011,1015,1016,1064,1065,1067,1068,1069,1070,1071,1080,1101,1102,1108,1109,1120,1127,1128,1129,1131,1134,1137,1138,1139,1140,1154,1156,1160,],[58,-186,-204,-194,-187,58,58,-170,58,-206,-262,-203,-193,-192,-174,-202,-189,-172,-171,-188,-200,-173,-191,-201,-197,-199,-190,-169,-198,-185,-196,-195,-205,-167,58,-508,-452,-553,-529,-249,-175,-279,-277,-286,58,-281,-164,-168,-283,-280,-260,-278,-290,-287,-284,-623,-228,58,-516,-466,58,-559,-533,-251,-285,-288,-296,-289,58,-263,-231,58,-185,-536,-534,-307,-261,-308,-282,-306,-297,-234,-271,-264,58,58,58,-535,-309,-310,-276,-274,-265,58,-272,-266,58,-232,-313,-311,-314,-312,-291,-267,-273,58,58,58,-315,-317,-318,-316,-236,58,58,]),'$end':([3,4,6,7,8,9,12,30,31,38,39,42,44,45,46,47,48,49,51,53,55,56,57,59,60,61,62,64,67,69,72,76,79,82,84,85,88,89,91,92,93,95,98,99,101,110,112,122,127,133,134,135,136,137,138,139,141,142,143,144,145,146,148,149,150,151,152,153,156,159,160,161,162,163,164,165,167,168,169,170,171,172,173,174,176,178,179,180,181,182,183,185,186,187,190,213,216,219,221,226,229,231,233,234,236,266,271,277,286,291,292,300,302,303,308,311,318,336,345,349,362,368,369,381,382,386,387,388,391,394,405,431,433,476,538,549,551,559,579,585,586,588,592,593,598,602,605,611,612,614,615,619,623,625,626,628,629,631,633,634,635,636,637,638,639,640,641,642,643,644,646,649,650,656,657,658,659,660,661,662,663,664,665,666,668,692,700,703,713,725,772,773,777,804,805,809,811,813,818,819,820,821,831,833,834,835,838,843,844,845,846,848,851,858,862,872,878,906,909,916,917,925,941,945,946,948,953,956,959,961,962,963,964,967,969,975,992,993,1011,1015,1016,1020,1022,1030,1036,1038,1040,1043,1045,1046,1047,1055,1057,1058,1059,1060,1061,1063,1064,1067,1070,1080,1083,1100,1101,1102,1103,1105,1108,1109,1110,1114,1115,1118,1120,1123,1124,1125,1127,1135,1136,1137,1138,1139,1140,1141,1143,1145,1148,1149,1154,1155,],[0,-623,-186,-204,-194,-187,-345,-170,-380,-347,-622,-138,-378,-206,-349,-262,-341,-203,-346,-193,-142,-137,-342,-192,-174,-202,-136,-189,-379,-172,-144,-171,-188,-200,-173,-348,-191,-350,-201,-197,-199,-190,-169,-198,-139,-185,-381,-196,-195,-140,-108,-25,-343,-88,-63,-141,-77,-143,-107,-3,-128,-96,-127,-621,-20,-119,-111,-1,-130,-58,-30,-40,-50,-129,-45,-122,-4,-35,-449,-448,-597,-620,-610,-608,-604,-601,-618,-611,-451,-612,-602,-613,-450,-447,-205,-343,-383,-117,-119,-508,-452,-385,-553,-529,-249,-175,-279,-118,-277,-156,-155,-398,-392,-399,-391,-286,-389,-134,-135,-120,-109,-110,-121,-598,-600,-609,-603,-619,-605,-281,-164,-388,-387,-386,-146,-283,-280,-260,-278,-343,-149,-344,-158,-157,-417,-418,-400,-290,-287,-284,-623,-384,-335,-339,-340,-228,-7,-152,-31,-99,-97,-98,-80,-79,-78,-329,-90,-89,-147,-148,-328,-26,-145,-64,-67,-65,-66,-36,-46,-59,-52,-51,-41,-599,-614,-145,-516,-466,-559,-533,-332,-331,-221,-330,-150,-393,-419,-420,-402,-405,-401,-404,-285,-288,-296,-289,-390,-338,-333,-336,-334,-151,-162,-616,-263,-231,-185,-536,-534,-307,-261,-308,-282,-421,-422,-406,-306,-297,-623,-337,-234,-21,-163,-159,-615,-264,-217,-216,-535,-309,-310,-227,-226,-225,-403,-423,-424,-407,-408,-410,-411,-623,-326,-327,-320,-623,-160,-617,-276,-265,-266,-232,-218,-224,-313,-311,-623,-223,-314,-312,-222,-426,-425,-412,-291,-323,-623,-319,-267,-322,-623,-315,-317,-318,-316,-427,-428,-409,-324,-161,-236,-321,]),'STATIC':([1,4,6,7,8,9,10,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,177,178,179,180,181,182,183,185,186,187,188,190,204,207,213,218,226,227,228,229,232,233,234,235,236,266,271,279,286,309,311,328,381,382,383,386,387,388,391,394,405,406,438,439,440,441,443,446,448,449,450,451,452,453,454,455,456,458,460,462,463,465,466,467,468,469,470,471,473,474,475,477,479,480,482,484,485,488,491,549,551,559,566,579,588,608,611,612,614,615,628,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,720,725,824,826,831,832,833,834,835,858,862,872,878,894,899,906,909,916,917,925,941,953,956,962,969,973,975,976,984,999,1000,1002,1007,1009,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1087,1095,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[130,130,-186,-204,-194,-187,-361,130,-594,130,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,130,-610,-608,-362,384,130,-618,-611,-451,-612,130,-613,-450,-447,130,-205,-167,130,-343,-354,-508,130,473,-452,130,-553,-529,130,-249,-175,-279,130,-277,130,-286,-588,130,130,130,-609,130,-619,-605,-281,-164,-168,-525,-519,-524,130,-521,-527,-526,-523,-528,130,-522,130,-479,-478,-469,473,-472,-481,130,-480,-476,-475,130,-473,-501,-474,-358,-471,-477,-562,-565,130,-567,-566,130,473,130,-283,-280,-260,130,-278,-344,130,-290,-287,-284,-623,-228,130,-614,-587,-595,-516,-503,-520,-466,-470,-502,130,-484,-483,-559,-563,-564,130,-533,-303,130,-285,130,-288,-296,-289,-616,-263,-231,-185,130,-487,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,130,130,-506,-482,-486,-500,-570,-535,-309,-310,-617,-276,-274,-265,130,-272,-266,130,-232,-505,-569,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),'VOLATILE':([1,4,6,7,8,9,10,20,21,24,30,43,45,47,48,49,53,57,59,60,61,64,69,74,75,76,79,82,84,88,91,92,93,95,96,98,99,102,104,105,106,110,113,114,115,122,124,127,129,130,131,169,170,171,173,174,175,178,179,180,181,182,183,185,186,187,188,190,204,207,213,218,226,227,228,229,232,233,234,235,236,266,271,279,286,309,311,328,381,382,383,386,387,388,391,394,405,406,438,439,440,441,443,446,448,449,450,451,452,453,454,455,456,458,460,462,463,465,466,467,468,469,470,471,473,474,475,477,479,480,482,484,485,488,491,549,551,559,566,579,588,608,611,612,614,615,628,666,668,682,684,700,701,702,703,704,708,709,710,711,713,716,717,720,725,824,826,831,832,833,834,835,858,862,872,878,894,899,906,909,916,917,925,941,953,956,962,969,973,975,976,984,999,1000,1002,1007,1009,1011,1015,1016,1063,1064,1065,1067,1068,1069,1070,1071,1080,1087,1095,1101,1102,1108,1109,1120,1127,1128,1137,1138,1139,1140,1154,],[131,131,-186,-204,-194,-187,-361,131,-594,131,-170,-585,-206,-262,-341,-203,-193,-342,-192,-174,-202,-189,-172,-363,-365,-171,-188,-200,-173,-191,-201,-197,-199,-190,-586,-169,-198,-362,-360,-356,-353,-185,-357,-355,-366,-196,-584,-195,-359,-358,-364,-449,-448,131,-610,-608,-362,131,-618,-611,-451,-612,131,-613,-450,-447,131,-205,-167,131,-343,-354,-508,131,131,-452,131,-553,-529,131,-249,-175,-279,131,-277,131,-286,-588,131,131,131,-609,131,-619,-605,-281,-164,-168,-525,-519,-524,131,-521,-527,-526,-523,-528,131,-522,131,-479,-478,-469,131,-472,-481,131,-480,-476,-475,131,-473,-501,-474,-358,-471,-477,-562,-565,131,-567,-566,131,131,131,-283,-280,-260,131,-278,-344,131,-290,-287,-284,-623,-228,131,-614,-587,-595,-516,-503,-520,-466,-470,-502,131,-484,-483,-559,-563,-564,131,-533,-303,131,-285,131,-288,-296,-289,-616,-263,-231,-185,131,-487,-536,-534,-307,-261,-308,-282,-306,-297,-234,-615,-271,-264,131,131,-506,-482,-486,-500,-570,-535,-309,-310,-617,-276,-274,-265,131,-272,-266,131,-232,-505,-569,-313,-311,-314,-312,-291,-267,-273,-315,-317,-318,-316,-236,]),}
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'member_values':([411,],[681,]),'statement_expression':([1,11,20,37,207,279,330,677,709,784,849,874,976,984,986,987,1019,1068,1071,1129,1131,1134,1156,1157,1160,],[5,5,5,5,5,569,5,5,5,920,5,5,5,569,5,5,569,5,5,5,5,5,5,569,5,]),'type_parameter1':([199,673,],[396,864,]),'for_update':([1019,1157,],[1096,1096,]),'switch_block':([672,],[862,]),'class_body_declarations':([228,488,],[458,458,]),'try_statement_with_resources':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,]),'catch_type':([955,],[1052,]),'if_then_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,]),'conditional_and_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,364,365,428,507,508,509,519,552,553,560,621,624,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,649,135,135,739,135,748,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,]),'additional_bound':([867,982,],[980,1076,]),'annotation_name':([1,4,20,24,171,178,183,188,207,208,227,228,232,235,279,309,381,382,383,387,411,441,451,453,458,463,468,480,485,488,491,566,608,666,685,709,720,826,832,882,894,976,984,1068,1071,1092,],[21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,]),'single_static_import_declaration':([4,171,178,381,],[182,182,182,182,]),'empty_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,]),'type_argument1':([107,230,597,604,653,942,947,1117,],[302,302,809,302,302,809,302,809,]),'while_statement_no_short_if':([677,874,987,1129,1131,1156,1160,],[869,869,869,869,869,869,869,]),'method_body':([464,472,],[708,710,]),'catch_clause':([310,614,615,835,],[612,833,612,833,]),'default_value':([1008,],[1091,]),'static_initializer':([228,458,488,],[460,460,460,]),'enum_header_name':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,83,]),'synchronized_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,91,]),'statement_expression_list':([279,984,1019,1157,],[568,568,1099,1099,]),'array_initializer':([428,625,626,687,951,991,1050,],[688,844,846,688,688,688,688,]),'import_declarations':([4,171,],[178,381,]),'class_header_name1':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,]),'modifiers_opt':([1,4,20,171,178,183,207,227,228,232,235,381,382,387,441,453,458,463,480,485,488,666,709,720,832,894,976,1068,1071,],[18,18,18,18,18,18,18,444,461,483,495,18,18,18,693,444,461,693,693,483,461,18,18,495,955,693,18,18,18,]),'interface_member_declarations_opt':([227,],[445,]),'constant_declaration':([227,232,453,485,],[443,479,443,479,]),'constant_expression':([977,],[1072,]),'constructor_header_name':([228,232,458,485,488,],[463,463,463,463,463,]),'try_block':([108,312,],[310,615,]),'member_value_pairs_opt':([208,],[413,]),'constructor_declaration':([228,232,458,485,488,],[469,484,469,484,469,]),'enum_body_declarations_opt':([235,487,492,720,],[496,719,723,908,]),'additive_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,350,354,355,356,364,365,370,371,372,373,374,375,377,378,380,428,499,500,501,502,503,504,505,506,507,508,509,510,511,515,516,518,519,521,522,523,525,526,527,528,531,532,536,537,539,543,552,553,560,621,624,653,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,637,638,639,141,141,141,141,141,141,141,141,141,141,141,141,141,141,732,733,734,141,141,141,141,141,141,141,742,141,141,747,141,141,141,752,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,141,]),'static_import_on_demand_declaration':([4,171,178,381,],[185,185,185,185,]),'type':([1,20,24,207,279,309,444,461,483,566,608,693,699,706,709,715,826,955,976,984,1068,1071,1122,],[22,22,214,22,565,607,698,698,714,782,829,893,898,898,22,905,607,1053,22,565,22,22,1147,]),'method_invocation':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[23,142,23,142,23,142,23,142,142,142,142,142,142,142,142,142,142,142,23,142,142,142,142,142,142,142,142,142,142,23,142,142,142,142,23,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,23,142,142,142,23,142,142,23,142,142,142,142,142,142,142,142,142,142,23,142,142,142,23,142,142,142,142,142,142,142,142,142,142,142,142,142,142,23,142,142,23,23,23,142,142,23,142,142,23,23,142,142,23,142,23,23,23,23,23,]),'assignment_expression_not_name':([40,154,220,],[244,244,244,]),'modifiers':([1,4,20,171,178,183,207,227,228,232,235,279,309,381,382,387,441,453,458,463,480,485,488,666,709,720,826,832,894,976,984,1068,1071,],[24,188,24,383,383,383,24,451,468,468,491,566,608,383,383,383,491,451,468,491,491,468,468,383,24,491,608,491,491,24,566,24,24,]),'annotation_method_header_default_value_opt':([1008,],[1093,]),'annotation_type_declaration_header_name':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,]),'literal':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,]),'method_header_throws_clause':([901,997,],[1005,1005,]),'class_header_extends':([26,54,],[224,224,]),'class_header':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,]),'resource':([309,826,],[609,950,]),'inclusive_or_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,350,364,365,428,499,507,508,509,519,525,552,553,560,621,624,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,633,160,160,160,730,160,160,160,160,754,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,160,]),'statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[30,191,30,236,30,628,872,30,962,236,30,1080,628,30,30,872,962,1154,1080,1154,]),'switch_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,]),'wildcard3':([947,1117,],[1047,1047,]),'interface_header_name':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,]),'interface_type':([285,315,789,],[576,576,926,]),'exclusive_or_expression_not_name':([40,154,208,220,411,685,882,1092,],[247,247,247,247,247,247,247,247,]),'arguments_opt':([497,],[726,]),'wildcard_bounds2':([817,1042,],[946,946,]),'relational_expression_not_name':([40,154,208,220,411,685,882,1092,],[248,248,248,248,248,248,248,248,]),'annotation_type_declaration_header':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,]),'enum_constants':([235,],[487,]),'single_member_annotation_member_value':([208,],[418,]),'goal':([0,],[3,]),'package_declaration':([4,],[171,]),'class_body_opt':([959,1055,1060,1103,1124,1136,],[1059,1123,1125,1135,1148,1155,]),'pre_decrement_expression':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,653,671,677,685,687,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1129,1130,1131,1134,1156,1157,1160,],[34,134,34,134,34,134,34,238,134,134,134,134,134,238,134,134,134,134,34,238,238,134,134,134,134,134,134,134,134,34,134,134,134,134,34,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,238,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,34,238,134,34,134,134,34,134,134,134,134,134,134,134,134,134,134,34,134,134,34,238,134,134,134,134,134,134,134,134,134,134,134,134,34,134,134,34,34,34,134,134,34,134,134,34,34,238,34,134,34,34,34,34,34,]),'wildcard':([107,230,597,604,653,942,947,1117,],[298,298,298,298,298,298,298,298,]),'type_argument_list3':([947,],[1043,]),'conditional_or_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,364,428,507,509,552,553,560,621,624,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,150,]),'break_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,]),'enhanced_for_statement_header':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[37,37,37,37,37,37,874,37,37,874,37,37,874,37,37,874,874,37,874,874,]),'block_statement':([1,20,207,709,976,1068,1071,],[39,204,406,204,204,204,406,]),'additional_bound_list1':([867,],[978,]),'method_header_extended_dims':([895,903,],[997,1008,]),'class_body':([29,489,959,1055,1060,1103,1124,1136,],[229,722,1057,1057,1057,1057,1057,1057,]),'method_header_name':([227,228,453,458,488,],[441,441,441,441,441,]),'semi_opt':([606,],[825,]),'additive_expression_not_name':([40,154,208,220,411,685,882,1092,],[260,260,260,260,260,260,260,260,]),'interface_header':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,]),'type_declaration':([4,171,178,183,232,381,382,387,485,666,],[179,179,179,388,482,179,388,388,482,388,]),'post_decrement_expression':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[41,156,41,156,41,156,41,253,156,156,156,156,156,253,156,156,156,156,41,419,253,156,156,156,156,156,156,156,156,41,156,156,156,156,41,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,419,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,41,419,156,156,41,156,156,41,156,156,156,156,156,156,156,156,156,156,41,156,156,156,41,419,156,156,156,156,156,156,156,156,156,156,156,156,156,41,156,156,41,41,41,156,156,41,156,156,41,41,419,156,41,156,41,41,41,41,41,]),'one_dim_loop':([100,112,125,145,166,192,210,216,217,246,292,299,304,305,366,367,416,434,567,619,652,781,815,855,856,895,896,903,904,919,921,966,],[291,291,291,291,291,291,291,291,291,291,593,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,291,]),'class_body_declarations_opt':([228,488,],[457,721,]),'array_creation_without_array_initializer':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,]),'marker_annotation':([1,4,20,24,171,178,183,188,207,208,227,228,232,235,279,309,381,382,383,387,411,441,451,453,458,463,468,480,485,488,491,566,608,666,685,709,720,826,832,882,894,976,984,1068,1071,1092,],[43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,]),'class_or_interface_type':([1,20,24,107,116,158,207,223,230,279,285,309,315,317,323,376,444,461,483,529,542,566,583,597,599,600,604,608,618,653,675,693,699,706,709,715,789,794,826,854,942,943,944,947,955,976,979,984,1004,1068,1071,1113,1116,1117,1122,1132,],[44,44,44,44,326,326,44,436,44,44,578,44,578,436,436,44,44,44,44,44,44,44,436,44,44,44,44,44,436,44,44,44,44,44,44,44,578,436,44,966,44,44,44,44,44,44,44,44,436,44,44,44,44,44,44,436,]),'relational_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,350,364,365,374,375,377,378,380,428,499,507,508,509,510,515,516,519,525,527,531,532,536,537,539,543,552,553,560,621,624,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,159,]),'annotation_type_member_declarations_opt':([232,],[481,]),'member_value_pairs':([208,],[417,]),'type_argument_list':([107,230,604,653,947,],[296,296,816,296,1044,]),'enum_header':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,]),'unary_expression_not_plus_minus':([2,16,25,58,73,77,132,147,155,157,194,203,237,249,252,254,268,270,275,276,281,290,294,295,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,687,692,729,776,785,786,790,791,792,798,799,800,836,841,850,852,857,911,912,924,930,932,933,934,938,939,940,951,958,965,977,983,988,991,1026,1050,1126,1130,],[152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,851,152,152,152,851,152,152,152,152,152,152,152,152,152,152,152,152,152,964,152,152,152,152,152,152,152,152,152,152,152,152,152,1061,152,152,152,152,152,152,1149,152,]),'assignment_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,364,428,507,509,552,553,560,621,624,671,687,729,776,785,786,790,791,792,798,799,800,836,841,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[153,153,153,153,153,153,153,153,153,153,153,153,153,153,629,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,153,]),'switch_labels':([863,972,],[976,1068,]),'explicit_constructor_invocation':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,]),'interface_header_extends':([109,],[316,]),'expression_not_name':([40,154,220,],[257,257,257,]),'instanceof_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,350,364,365,374,375,377,378,380,428,499,507,508,509,510,515,516,519,525,527,531,532,536,537,539,543,552,553,560,621,624,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,663,664,162,162,162,162,162,162,162,162,745,162,162,162,760,162,762,763,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,162,]),'simple_name':([1,2,11,16,20,24,25,37,40,58,73,77,107,116,118,132,147,154,155,157,158,177,184,194,203,207,208,215,220,223,230,237,249,252,254,268,270,275,276,279,281,285,289,290,294,295,309,315,317,319,323,330,335,348,350,351,352,353,354,355,356,358,359,360,364,365,370,371,372,373,374,375,376,377,378,380,384,390,411,422,428,432,444,461,483,499,500,501,502,503,504,505,506,507,508,509,510,511,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,552,553,560,566,583,597,599,600,603,604,608,618,620,621,624,650,653,669,671,675,677,683,685,687,692,693,699,706,709,715,729,776,783,784,785,786,789,790,791,792,794,798,799,800,826,836,841,849,850,852,854,857,859,874,882,911,912,924,930,932,933,934,938,939,940,942,943,944,947,951,955,958,965,976,977,979,983,984,986,987,988,991,1004,1018,1019,1026,1050,1068,1071,1092,1113,1116,1117,1122,1126,1129,1130,1131,1132,1134,1156,1157,1160,],[48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,423,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,588,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,588,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,588,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,588,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,588,48,48,48,884,48,48,48,48,48,48,48,48,48,48,588,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,588,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,588,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,]),'try_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,]),'class_header_implements_opt':([83,225,267,],[283,437,547,]),'local_variable_declaration':([1,20,207,279,709,976,984,1068,1071,],[50,50,50,571,50,50,571,50,50,]),'variable_declarator_id':([22,214,424,565,607,698,714,782,829,893,995,1052,],[211,211,211,211,828,211,211,211,952,994,1085,1121,]),'type_declarations':([4,171,178,381,],[183,382,387,666,]),'annotation_type_body':([33,],[233,]),'postfix_expression_not_name':([40,154,208,220,411,685,882,1092,],[250,250,250,250,250,250,250,250,]),'field_declaration':([227,228,232,453,458,485,488,],[450,467,450,450,467,450,467,]),'type_argument_list2':([604,947,],[820,820,]),'type_parameter_list1':([199,],[397,]),'instanceof_expression_not_name':([40,154,208,220,411,685,882,1092,],[261,261,261,261,261,261,261,261,]),'inclusive_or_expression_not_name':([40,154,208,220,411,685,882,1092,],[262,262,262,262,262,262,262,262,]),'cast_expression':([2,16,25,40,58,73,77,132,147,154,155,157,194,203,208,220,237,249,252,254,268,270,275,276,281,290,294,295,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,685,687,692,729,776,785,786,790,791,792,798,799,800,836,841,850,852,857,882,911,912,924,930,932,933,934,938,939,940,951,958,965,977,983,988,991,1026,1050,1092,1126,1130,],[165,165,165,263,165,165,165,165,165,263,165,165,165,165,263,263,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,263,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,263,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,263,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,165,263,165,165,]),'wildcard_bounds1':([297,817,1042,],[602,602,602,]),'reference_type':([1,20,24,107,207,230,279,309,376,444,461,483,529,542,566,597,599,600,604,608,653,675,693,699,706,709,715,826,942,943,944,947,955,976,979,984,1068,1071,1113,1116,1117,1122,],[94,94,94,307,94,307,94,94,662,94,94,94,758,767,94,810,812,814,823,94,307,867,94,94,94,94,94,94,1037,1039,1041,1048,94,94,1075,94,94,94,1142,1144,1146,94,]),'array_creation_with_array_initializer':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,]),'enhanced_for_statement_header_init':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,]),'class_declaration':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[76,187,76,187,187,187,76,449,466,187,187,187,187,449,466,187,466,187,76,76,76,76,]),'do_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,92,]),'enhanced_for_statement_no_short_if':([677,874,987,1129,1131,1156,1160,],[879,879,879,879,879,879,879,]),'catch_formal_parameter':([832,],[954,]),'variable_declarator':([22,214,424,565,698,714,782,],[212,212,686,212,212,212,212,]),'method_declaration':([228,458,488,],[475,475,475,]),'assignment':([1,2,11,16,20,37,40,58,77,154,194,203,207,220,268,270,275,276,279,281,290,294,295,330,335,348,364,428,507,509,552,553,560,621,624,671,677,687,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,874,924,930,932,933,934,938,939,940,951,958,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1129,1130,1131,1134,1156,1157,1160,],[121,144,121,144,121,121,243,144,144,243,144,144,121,243,144,144,144,144,121,144,144,144,144,121,144,144,144,144,144,144,144,144,144,144,144,144,121,144,121,144,144,121,144,144,144,144,144,144,144,144,144,144,121,121,144,144,144,144,144,144,144,144,144,144,121,144,144,121,121,121,144,144,121,144,144,121,121,121,144,121,121,121,121,121,]),'type_argument_list1':([107,230,604,653,947,],[308,308,308,308,308,]),'enum_declaration':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[60,169,60,169,169,169,60,438,454,169,169,169,169,438,454,169,454,169,60,60,60,60,]),'block':([1,11,20,37,108,207,228,312,330,458,473,488,677,709,806,830,849,874,976,986,987,1051,1068,1071,1129,1131,1134,1156,1160,],[59,59,59,59,311,59,471,311,59,471,711,471,59,59,941,953,59,59,59,59,59,1120,59,59,59,59,59,59,59,]),'additional_bound1':([867,982,],[981,1077,]),'arguments':([497,],[727,]),'class_header_extends_opt':([26,54,],[225,267,]),'member_value_pair':([208,683,],[407,885,]),'throw_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,]),'type_parameter_list':([199,],[399,]),'dims':([100,112,125,145,166,192,210,216,217,246,299,304,305,366,367,416,434,567,619,652,781,815,855,856,895,896,903,904,919,921,966,],[293,318,346,361,379,361,425,431,433,361,318,431,433,651,655,361,651,293,838,425,425,838,293,346,425,425,425,425,425,361,425,]),'primary_no_new_array':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,]),'enum_body':([35,],[234,]),'while_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,]),'resource_specification':([108,],[312,]),'type_arguments':([1,11,20,31,37,116,158,207,280,288,289,306,317,330,357,360,363,366,434,514,583,677,709,780,783,822,849,874,976,986,987,1018,1068,1071,1129,1131,1134,1156,1160,],[65,65,65,231,65,323,323,65,574,584,590,231,618,65,574,645,647,652,652,645,794,65,65,647,645,231,65,65,65,65,65,645,65,65,65,65,65,65,65,]),'assignment_operator':([123,151,],[335,335,]),'array_type':([1,20,24,107,207,230,279,309,376,444,461,483,529,542,566,597,599,600,604,608,653,675,693,699,706,709,715,826,942,943,944,947,955,976,979,984,1068,1071,1113,1116,1117,1122,],[67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,67,]),'and_expression_not_name':([40,154,208,220,411,685,882,1092,],[258,258,258,258,258,258,258,258,]),'class_header_name':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,]),'enum_constant_header_name':([235,720,],[497,497,]),'dims_opt':([210,367,652,781,895,896,903,904,919,966,],[427,654,853,918,998,427,998,427,1017,1062,]),'expression_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,]),'dims_loop':([100,112,125,145,166,192,210,216,217,246,299,304,305,366,367,416,434,567,619,652,781,815,855,856,895,896,903,904,919,921,966,],[292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,292,]),'multiplicative_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,350,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,428,499,500,501,502,503,504,505,506,507,508,509,510,511,515,516,517,518,519,520,521,522,523,525,526,527,528,531,532,536,537,539,540,541,543,552,553,560,621,624,653,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,641,642,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,746,137,137,749,137,137,137,137,137,137,137,137,137,137,137,137,765,766,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,]),'additional_bound_list':([867,],[982,]),'conditional_and_expression_not_name':([40,154,208,220,411,685,882,1092,],[239,239,239,239,239,239,239,239,]),'empty':([1,4,16,20,26,54,83,109,171,178,183,207,208,210,225,227,228,232,235,267,275,276,279,281,310,367,381,382,387,441,453,458,463,480,485,487,488,492,497,552,553,606,615,621,652,666,687,709,720,729,781,785,786,790,791,792,798,799,800,832,836,841,894,895,896,901,903,904,919,924,930,932,933,934,938,939,940,958,959,966,976,984,997,1008,1019,1026,1055,1060,1068,1071,1103,1124,1130,1136,1157,],[71,176,195,205,222,222,282,313,71,71,71,71,408,426,282,442,459,478,494,282,554,554,562,554,611,426,71,71,71,697,71,71,697,697,71,718,459,718,728,554,554,827,611,554,426,71,887,205,494,554,426,195,554,554,554,554,554,554,554,71,554,554,71,426,426,1003,426,426,426,554,554,554,554,554,554,554,554,554,1058,426,71,562,1003,1094,1097,554,1058,1058,71,71,1058,1058,195,1058,1097,]),'interface_type_list':([285,315,],[577,616,]),'array_access':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,]),'for_update_opt':([1019,1157,],[1098,1159,]),'primary':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[97,148,97,148,97,148,97,251,148,148,148,148,148,251,148,148,148,148,97,410,251,148,148,148,148,148,148,148,148,563,148,148,148,148,97,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,410,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,97,410,148,148,97,148,148,563,148,148,148,148,148,148,148,148,148,148,97,148,148,148,97,410,148,148,148,148,148,148,148,148,148,148,148,148,148,97,148,148,563,97,97,148,148,563,148,148,97,97,410,148,97,148,97,97,97,563,97,]),'resources':([309,],[606,]),'switch_block_statements':([863,],[972,]),'block_statements_opt':([20,709,],[206,902,]),'shift_expression_not_name':([40,154,208,220,411,685,882,1092,],[240,240,240,240,240,240,240,240,]),'formal_parameter_list':([441,463,480,],[694,694,694,]),'unary_expression_not_name':([40,154,208,220,411,685,882,1092,],[241,241,241,241,241,241,241,241,]),'interface_member_declarations':([227,],[453,]),'field_access':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,]),'if_then_else_statement_no_short_if':([677,874,987,1129,1131,1156,1160,],[873,873,873,873,873,873,873,]),'class_instance_creation_expression':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[117,139,117,139,117,139,117,139,139,139,139,139,139,139,139,139,139,139,117,139,139,139,139,139,139,139,139,139,139,117,139,139,139,139,117,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,139,117,139,139,139,117,139,139,117,139,139,139,139,139,139,139,139,139,139,117,139,139,139,117,139,139,139,139,139,139,139,139,139,139,139,139,139,139,117,139,139,117,117,117,139,139,117,139,139,117,117,139,139,117,139,117,117,117,117,117,]),'if_then_else_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,]),'wildcard_bounds3':([1042,],[1114,]),'return_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,82,]),'member_value':([208,411,685,882,1092,],[414,678,886,989,1133,]),'finally':([610,834,],[831,956,]),'expression_opt':([16,785,1130,],[196,922,1151,]),'type_parameter':([199,673,],[398,865,]),'annotation_type_declaration':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[84,186,84,186,186,186,84,448,465,186,186,186,186,448,465,186,465,186,84,84,84,84,]),'switch_label':([863,972,976,1068,],[973,973,1069,1069,]),'enum_constant_header':([235,720,],[489,489,]),'argument_list':([275,276,281,552,553,621,729,786,790,791,792,798,799,800,836,841,924,930,932,933,934,938,939,940,958,1026,],[555,555,555,555,555,555,555,555,555,555,555,555,555,555,555,555,555,555,555,555,555,555,555,555,555,555,]),'assert_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,]),'abstract_method_declaration':([227,228,453,458,488,],[452,470,452,470,470,]),'enhanced_for_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,]),'type_argument2':([604,942,947,1117,],[818,1036,818,1036,]),'class_type_list':([1004,],[1089,]),'enum_declarations':([235,487,492,720,],[493,493,493,493,]),'pre_increment_expression':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,653,671,677,685,687,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1129,1130,1131,1134,1156,1157,1160,],[90,143,90,143,90,143,90,245,143,143,143,143,143,245,143,143,143,143,90,245,245,143,143,143,143,143,143,143,143,90,143,143,143,143,90,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,245,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,90,245,143,90,143,143,90,143,143,143,143,143,143,143,143,143,143,90,143,143,90,245,143,143,143,143,143,143,143,143,143,143,143,143,90,143,143,90,90,90,143,143,90,143,143,90,90,245,90,143,90,90,90,90,90,]),'package_declaration_name':([4,],[189,]),'annotation_type_member_declarations':([232,],[485,]),'name':([1,2,11,16,20,24,25,37,40,58,73,77,107,116,118,132,147,154,155,157,158,177,184,194,203,207,208,215,220,223,230,237,249,252,254,268,270,275,276,279,281,285,290,294,295,309,315,317,319,323,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,376,377,378,380,384,390,411,422,428,444,461,483,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,552,553,560,566,583,597,599,600,603,604,608,618,620,621,624,650,653,671,675,677,685,687,692,693,699,706,709,715,729,776,784,785,786,789,790,791,792,794,798,799,800,826,836,841,849,850,852,854,857,874,882,911,912,924,930,932,933,934,938,939,940,942,943,944,947,951,955,958,965,976,977,979,983,984,986,987,988,991,1004,1019,1026,1050,1068,1071,1092,1113,1116,1117,1122,1126,1129,1130,1131,1132,1134,1156,1157,1160,],[100,145,192,145,100,216,145,192,246,145,145,145,304,324,328,145,145,366,145,145,324,385,389,145,145,100,416,328,434,324,304,145,145,145,145,145,145,145,145,567,145,324,145,145,145,216,324,324,619,324,192,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,216,145,145,145,667,670,416,328,145,216,216,216,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,216,145,145,145,145,145,145,145,145,216,145,145,145,145,145,145,145,216,324,304,304,304,815,304,216,324,839,145,145,145,855,145,304,192,416,145,145,216,216,216,100,216,145,145,921,145,145,324,145,145,145,324,145,145,145,216,145,145,192,145,145,324,145,192,416,145,145,145,145,145,145,145,145,145,145,304,304,304,304,145,216,145,145,100,145,304,145,567,192,192,145,145,324,921,145,145,100,100,416,304,304,304,216,145,192,145,192,324,192,192,921,192,]),'class_type_elt':([1004,1132,],[1090,1153,]),'continue_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,]),'class_member_declaration':([228,458,488,],[474,474,474,]),'and_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,350,364,365,374,380,428,499,507,508,509,510,515,519,525,532,543,552,553,560,621,624,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,665,161,161,161,161,161,741,161,161,161,761,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,161,]),'dim_with_or_without_exprs':([325,326,],[625,626,]),'method_header_throws_clause_opt':([901,997,],[1006,1086,]),'interface_member_declaration':([227,453,],[439,702,]),'unary_expression':([2,16,25,58,73,77,132,147,155,157,194,203,237,249,252,254,268,270,275,276,281,290,294,295,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,653,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,857,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[146,146,219,146,277,146,349,362,368,369,146,146,498,533,534,535,146,146,146,146,146,146,146,146,146,146,146,634,635,636,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,743,146,146,146,146,146,146,146,146,146,753,146,146,146,146,759,146,146,146,146,146,146,146,146,769,770,771,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,967,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,]),'for_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,95,]),'trailing_semicolon':([606,],[826,]),'single_member_annotation':([1,4,20,24,171,178,183,188,207,208,227,228,232,235,279,309,381,382,383,387,411,441,451,453,458,463,468,480,485,488,491,566,608,666,685,709,720,826,832,882,894,976,984,1068,1071,1092,],[96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,]),'comma_opt':([687,],[891,]),'class_header_implements':([83,225,267,],[284,284,284,]),'local_variable_declaration_statement':([1,20,207,709,976,1068,1071,],[98,98,98,98,98,98,98,]),'formal_parameter':([441,463,480,894,],[696,696,696,996,]),'class_body_declaration':([228,458,488,],[456,704,456,]),'expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,348,364,428,507,509,552,553,560,621,624,671,687,729,776,785,786,790,791,792,798,799,800,836,841,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[149,197,269,278,393,404,548,550,556,556,556,591,595,596,632,648,689,738,740,556,556,779,556,842,861,689,556,915,197,556,556,556,556,556,556,556,556,556,556,556,556,556,556,556,556,556,689,556,1073,1078,1082,689,556,689,197,]),'single_type_import_declaration':([4,171,178,381,],[173,173,173,173,]),'wildcard2':([604,942,947,1117,],[819,819,819,819,]),'enum_constant':([235,720,],[490,907,]),'postfix_expression':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[123,151,123,151,123,221,123,123,151,221,151,221,221,123,221,221,151,151,123,409,123,221,221,221,221,151,151,151,151,123,151,151,151,151,123,151,151,221,221,221,221,221,221,221,221,221,151,221,221,221,221,221,221,221,221,221,221,409,151,221,221,221,221,221,221,221,221,151,221,151,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,221,151,151,151,151,151,221,221,151,123,409,151,221,123,151,151,123,151,151,151,151,151,151,151,151,151,151,123,221,221,221,123,409,221,221,151,151,151,151,151,151,151,151,151,151,221,123,151,151,123,123,123,151,151,123,151,151,123,123,409,221,123,151,123,123,123,123,123,]),'annotation_type_member_declaration':([232,485,],[477,716,]),'variable_declarators':([22,214,565,698,714,782,],[209,429,209,897,897,429,]),'interface_header_extends_opt':([109,],[314,]),'dim_with_or_without_expr':([325,326,625,626,],[623,623,845,845,]),'shift_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,350,364,365,370,371,372,373,374,375,377,378,380,428,499,500,504,505,506,507,508,509,510,515,516,519,521,522,525,526,527,528,531,532,536,537,539,543,552,553,560,621,624,653,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,656,657,658,659,138,138,138,138,138,138,138,731,735,736,737,138,138,138,138,138,138,138,750,751,138,755,138,757,138,138,138,138,138,138,138,138,138,138,138,750,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,]),'qualified_name':([1,2,11,16,20,24,25,37,40,58,73,77,107,116,118,132,147,154,155,157,158,177,184,194,203,207,208,215,220,223,230,237,249,252,254,268,270,275,276,279,281,285,290,294,295,309,315,317,319,323,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,376,377,378,380,384,390,411,422,428,444,461,483,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,552,553,560,566,583,597,599,600,603,604,608,618,620,621,624,650,653,671,675,677,685,687,692,693,699,706,709,715,729,776,784,785,786,789,790,791,792,794,798,799,800,826,836,841,849,850,852,854,857,874,882,911,912,924,930,932,933,934,938,939,940,942,943,944,947,951,955,958,965,976,977,979,983,984,986,987,988,991,1004,1019,1026,1050,1068,1071,1092,1113,1116,1117,1122,1126,1129,1130,1131,1132,1134,1156,1157,1160,],[57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,]),'for_statement_no_short_if':([677,874,987,1129,1131,1156,1160,],[875,875,875,875,875,875,875,]),'import_declaration':([4,171,178,381,],[174,174,386,386,]),'reference_type2':([604,942,943,944,947,1113,1116,1117,],[821,821,1038,1040,821,1038,1040,821,]),'for_init':([279,984,],[570,570,]),'annotation_method_header_name':([232,485,],[480,480,]),'type_argument':([107,230,597,604,653,942,947,1117,],[301,301,808,301,301,808,301,808,]),'reference_type3':([947,1113,1116,1117,],[1046,1141,1143,1046,]),'conditional_expression_not_name':([40,154,208,220,411,685,882,1092,],[255,255,420,255,420,420,420,420,]),'equality_expression_not_name':([40,154,208,220,411,685,882,1092,],[256,256,256,256,256,256,256,256,]),'type_parameter_header':([199,673,],[400,400,]),'class_or_interface':([1,20,24,107,116,158,207,223,230,279,285,309,315,317,323,376,444,461,483,529,542,566,583,597,599,600,604,608,618,653,675,693,699,706,709,715,789,794,826,854,942,943,944,947,955,976,979,984,1004,1068,1071,1113,1116,1117,1122,1132,],[31,31,31,306,31,31,31,31,306,31,31,31,31,31,31,31,31,31,31,31,31,31,31,306,306,306,822,31,31,306,306,31,31,31,31,31,31,31,31,31,822,822,822,822,31,31,306,31,31,31,31,822,822,822,31,31,]),'statement_no_short_if':([677,874,987,1129,1131,1156,1160,],[876,985,1081,1150,1152,1158,1161,]),'variable_initializers':([687,],[889,]),'compilation_unit':([4,],[172,]),'statement_without_trailing_substatement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[110,110,110,110,110,110,878,110,110,878,110,110,878,110,110,878,878,110,878,878,]),'class_instance_creation_expression_name':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,]),'generic_type':([1,20,24,107,116,158,207,223,230,279,285,309,315,317,323,376,444,461,483,529,542,566,583,597,599,600,604,608,618,653,675,693,699,706,709,715,789,794,826,854,942,943,944,947,955,976,979,984,1004,1068,1071,1113,1116,1117,1122,1132,],[112,112,112,299,321,321,112,321,299,112,321,112,321,321,321,112,112,112,112,112,112,112,321,299,299,299,299,112,321,299,299,112,112,112,112,112,321,321,112,321,299,299,299,299,112,112,299,112,321,112,112,299,299,299,112,321,]),'class_type':([116,158,223,317,323,583,618,794,1004,1132,],[322,322,435,617,622,793,837,931,1088,1088,]),'type_import_on_demand_declaration':([4,171,178,381,],[180,180,180,180,]),'interface_body':([28,],[226,]),'labeled_statement_no_short_if':([677,874,987,1129,1131,1156,1160,],[868,868,868,868,868,868,868,]),'argument_list_opt':([275,276,281,552,553,621,729,786,790,791,792,798,799,800,836,841,924,930,932,933,934,938,939,940,958,1026,],[557,558,575,774,775,840,910,923,927,928,929,935,936,937,957,960,1021,1025,1027,1028,1029,1033,1034,1035,1056,1104,]),'type_argument3':([947,1117,],[1045,1145,]),'union_type':([955,],[1054,]),'variable_initializer':([428,687,951,991,1050,],[690,890,1049,1084,1119,]),'reference_type1':([107,230,597,599,600,604,653,675,942,943,944,947,979,1113,1116,1117,],[300,300,300,811,813,300,300,866,300,811,813,300,1074,811,813,300,]),'method_header':([227,228,453,458,488,],[447,464,447,464,464,]),'annotation':([1,4,20,24,171,178,183,188,207,208,227,228,232,235,279,309,381,382,383,387,411,441,451,453,458,463,468,480,485,488,491,566,608,666,685,709,720,826,832,882,894,976,984,1068,1071,1092,],[115,115,115,115,115,115,115,115,115,421,115,115,115,115,115,115,115,115,115,115,421,115,115,115,115,115,115,115,115,115,115,115,115,115,421,115,115,115,115,421,115,115,115,115,115,421,]),'catches_opt':([310,615,],[610,834,]),'switch_block_statement':([863,972,],[974,1066,]),'block_statements':([20,709,976,1068,],[207,207,1071,1071,]),'post_increment_expression':([1,2,11,16,20,25,37,40,58,73,77,132,147,154,155,157,194,203,207,208,220,237,249,252,254,268,270,275,276,279,281,290,294,295,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,377,378,380,411,428,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,530,531,532,536,537,539,540,541,543,544,545,546,552,553,560,621,624,650,653,671,677,685,687,692,709,729,776,784,785,786,790,791,792,798,799,800,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,951,958,965,976,977,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1126,1129,1130,1131,1134,1156,1157,1160,],[78,163,78,163,78,163,78,259,163,163,163,163,163,259,163,163,163,163,78,412,259,163,163,163,163,163,163,163,163,78,163,163,163,163,78,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,412,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,163,78,412,163,163,78,163,163,78,163,163,163,163,163,163,163,163,163,163,78,163,163,163,78,412,163,163,163,163,163,163,163,163,163,163,163,163,163,78,163,163,78,78,78,163,163,78,163,163,78,78,412,163,78,163,78,78,78,78,78,]),'type_parameters':([17,119,401,444,461,483,627,691,],[198,329,676,699,706,715,847,892,]),'annotation_method_header':([232,485,],[486,486,]),'formal_parameter_list_opt':([441,463,480,],[695,707,712,]),'interface_header_name1':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,]),'label':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[120,120,120,120,120,120,877,120,120,877,120,120,877,120,120,877,877,120,877,877,]),'equality_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,350,364,365,374,375,380,428,499,507,508,509,510,515,519,525,527,532,539,543,552,553,560,621,624,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,661,164,164,164,164,164,164,164,164,164,164,756,164,764,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,164,]),'member_value_array_initializer':([208,411,685,882,1092,],[415,415,415,415,415,]),'wildcard1':([107,230,597,604,653,942,947,1117,],[303,303,303,303,303,303,303,303,]),'wildcard_bounds':([297,817,1042,],[601,601,601,]),'labeled_statement':([1,11,20,37,207,330,677,709,849,874,976,986,987,1068,1071,1129,1131,1134,1156,1160,],[6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,]),'conditional_or_expression_not_name':([40,154,208,220,411,685,882,1092,],[242,242,242,242,242,242,242,242,]),'for_init_opt':([279,984,],[572,1079,]),'normal_annotation':([1,4,20,24,171,178,183,188,207,208,227,228,232,235,279,309,381,382,383,387,411,441,451,453,458,463,468,480,485,488,491,566,608,666,685,709,720,826,832,882,894,976,984,1068,1071,1092,],[124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,]),'primitive_type':([1,2,11,16,20,24,25,37,40,58,73,77,107,116,132,147,154,155,157,158,194,203,207,208,220,230,237,249,252,254,268,270,275,276,279,281,290,294,295,309,330,335,348,350,351,352,353,354,355,356,358,359,364,365,370,371,372,373,374,375,376,377,378,380,411,428,444,461,483,499,500,501,502,503,504,505,506,507,508,509,510,511,513,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,536,537,539,540,541,542,543,544,545,546,552,553,560,566,597,599,600,604,608,621,624,650,653,671,675,677,685,687,692,693,699,706,709,715,729,776,784,785,786,790,791,792,798,799,800,826,836,841,849,850,852,857,874,882,911,912,924,930,932,933,934,938,939,940,942,943,944,947,951,955,958,965,976,977,979,983,984,986,987,988,991,1019,1026,1050,1068,1071,1092,1113,1116,1117,1122,1126,1129,1130,1131,1134,1156,1157,1160,],[125,166,166,166,125,217,166,166,166,166,166,166,305,325,166,166,367,166,166,325,166,166,125,166,367,305,166,166,166,166,166,166,166,166,125,166,166,166,166,217,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,305,166,166,166,166,166,217,217,217,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,305,166,166,166,166,166,166,166,166,305,166,166,166,166,166,166,166,217,305,305,305,305,217,166,166,166,856,166,305,166,166,166,166,217,217,217,125,217,166,166,166,166,166,166,166,166,166,166,166,217,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,166,305,305,305,305,166,217,166,166,125,166,305,166,125,166,166,166,166,166,166,166,125,125,166,305,305,305,217,166,166,166,166,166,166,166,166,]),'modifier':([1,4,20,24,171,178,183,188,207,227,228,232,235,279,309,381,382,383,387,441,451,453,458,463,468,480,485,488,491,566,608,666,709,720,826,832,894,976,984,1068,1071,],[106,106,106,218,106,106,106,218,106,106,106,106,106,106,106,106,106,218,106,106,218,106,106,106,218,106,106,106,218,218,218,106,106,106,106,106,106,106,106,106,106,]),'constructor_header':([228,232,458,485,488,],[472,472,472,472,472,]),'unary_expression_not_plus_minus_not_name':([40,154,208,220,411,685,882,1092,],[264,264,264,264,264,264,264,264,]),'conditional_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,364,428,507,509,552,553,560,621,624,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,963,1013,1014,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,167,]),'catches':([310,615,],[614,835,]),'exclusive_or_expression':([2,16,58,77,194,203,268,270,275,276,281,290,294,295,335,348,350,364,365,374,428,499,507,508,509,515,519,525,543,552,553,560,621,624,671,687,729,776,785,786,790,791,792,798,799,800,836,841,850,911,912,924,930,932,933,934,938,939,940,951,958,977,983,988,991,1026,1050,1130,],[168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,660,168,168,168,168,168,744,168,168,768,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,168,]),'interface_declaration':([1,4,20,171,178,183,207,227,228,232,381,382,387,453,458,485,488,666,709,976,1068,1071,],[69,170,69,170,170,170,69,440,455,170,170,170,170,440,455,170,455,170,69,69,69,69,]),'multiplicative_expression_not_name':([40,154,208,220,411,685,882,1092,],[265,265,265,265,265,265,265,265,]),}
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> goal","S'",1,None,None,None),
('expression -> assignment_expression','expression',1,'p_expression','parser.py',123),
('expression_not_name -> assignment_expression_not_name','expression_not_name',1,'p_expression_not_name','parser.py',127),
('assignment_expression -> assignment','assignment_expression',1,'p_assignment_expression','parser.py',131),
('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','parser.py',132),
('assignment_expression_not_name -> assignment','assignment_expression_not_name',1,'p_assignment_expression_not_name','parser.py',136),
('assignment_expression_not_name -> conditional_expression_not_name','assignment_expression_not_name',1,'p_assignment_expression_not_name','parser.py',137),
('assignment -> postfix_expression assignment_operator assignment_expression','assignment',3,'p_assignment','parser.py',141),
('assignment_operator -> =','assignment_operator',1,'p_assignment_operator','parser.py',145),
('assignment_operator -> TIMES_ASSIGN','assignment_operator',1,'p_assignment_operator','parser.py',146),
('assignment_operator -> DIVIDE_ASSIGN','assignment_operator',1,'p_assignment_operator','parser.py',147),
('assignment_operator -> REMAINDER_ASSIGN','assignment_operator',1,'p_assignment_operator','parser.py',148),
('assignment_operator -> PLUS_ASSIGN','assignment_operator',1,'p_assignment_operator','parser.py',149),
('assignment_operator -> MINUS_ASSIGN','assignment_operator',1,'p_assignment_operator','parser.py',150),
('assignment_operator -> LSHIFT_ASSIGN','assignment_operator',1,'p_assignment_operator','parser.py',151),
('assignment_operator -> RSHIFT_ASSIGN','assignment_operator',1,'p_assignment_operator','parser.py',152),
('assignment_operator -> RRSHIFT_ASSIGN','assignment_operator',1,'p_assignment_operator','parser.py',153),
('assignment_operator -> AND_ASSIGN','assignment_operator',1,'p_assignment_operator','parser.py',154),
('assignment_operator -> OR_ASSIGN','assignment_operator',1,'p_assignment_operator','parser.py',155),
('assignment_operator -> XOR_ASSIGN','assignment_operator',1,'p_assignment_operator','parser.py',156),
('conditional_expression -> conditional_or_expression','conditional_expression',1,'p_conditional_expression','parser.py',160),
('conditional_expression -> conditional_or_expression ? expression : conditional_expression','conditional_expression',5,'p_conditional_expression','parser.py',161),
('conditional_expression_not_name -> conditional_or_expression_not_name','conditional_expression_not_name',1,'p_conditional_expression_not_name','parser.py',168),
('conditional_expression_not_name -> conditional_or_expression_not_name ? expression : conditional_expression','conditional_expression_not_name',5,'p_conditional_expression_not_name','parser.py',169),
('conditional_expression_not_name -> name ? expression : conditional_expression','conditional_expression_not_name',5,'p_conditional_expression_not_name','parser.py',170),
('conditional_or_expression -> conditional_and_expression','conditional_or_expression',1,'p_conditional_or_expression','parser.py',183),
('conditional_or_expression -> conditional_or_expression OR conditional_and_expression','conditional_or_expression',3,'p_conditional_or_expression','parser.py',184),
('conditional_or_expression_not_name -> conditional_and_expression_not_name','conditional_or_expression_not_name',1,'p_conditional_or_expression_not_name','parser.py',188),
('conditional_or_expression_not_name -> conditional_or_expression_not_name OR conditional_and_expression','conditional_or_expression_not_name',3,'p_conditional_or_expression_not_name','parser.py',189),
('conditional_or_expression_not_name -> name OR conditional_and_expression','conditional_or_expression_not_name',3,'p_conditional_or_expression_not_name','parser.py',190),
('conditional_and_expression -> inclusive_or_expression','conditional_and_expression',1,'p_conditional_and_expression','parser.py',194),
('conditional_and_expression -> conditional_and_expression AND inclusive_or_expression','conditional_and_expression',3,'p_conditional_and_expression','parser.py',195),
('conditional_and_expression_not_name -> inclusive_or_expression_not_name','conditional_and_expression_not_name',1,'p_conditional_and_expression_not_name','parser.py',199),
('conditional_and_expression_not_name -> conditional_and_expression_not_name AND inclusive_or_expression','conditional_and_expression_not_name',3,'p_conditional_and_expression_not_name','parser.py',200),
('conditional_and_expression_not_name -> name AND inclusive_or_expression','conditional_and_expression_not_name',3,'p_conditional_and_expression_not_name','parser.py',201),
('inclusive_or_expression -> exclusive_or_expression','inclusive_or_expression',1,'p_inclusive_or_expression','parser.py',205),
('inclusive_or_expression -> inclusive_or_expression | exclusive_or_expression','inclusive_or_expression',3,'p_inclusive_or_expression','parser.py',206),
('inclusive_or_expression_not_name -> exclusive_or_expression_not_name','inclusive_or_expression_not_name',1,'p_inclusive_or_expression_not_name','parser.py',210),
('inclusive_or_expression_not_name -> inclusive_or_expression_not_name | exclusive_or_expression','inclusive_or_expression_not_name',3,'p_inclusive_or_expression_not_name','parser.py',211),
('inclusive_or_expression_not_name -> name | exclusive_or_expression','inclusive_or_expression_not_name',3,'p_inclusive_or_expression_not_name','parser.py',212),
('exclusive_or_expression -> and_expression','exclusive_or_expression',1,'p_exclusive_or_expression','parser.py',216),
('exclusive_or_expression -> exclusive_or_expression ^ and_expression','exclusive_or_expression',3,'p_exclusive_or_expression','parser.py',217),
('exclusive_or_expression_not_name -> and_expression_not_name','exclusive_or_expression_not_name',1,'p_exclusive_or_expression_not_name','parser.py',221),
('exclusive_or_expression_not_name -> exclusive_or_expression_not_name ^ and_expression','exclusive_or_expression_not_name',3,'p_exclusive_or_expression_not_name','parser.py',222),
('exclusive_or_expression_not_name -> name ^ and_expression','exclusive_or_expression_not_name',3,'p_exclusive_or_expression_not_name','parser.py',223),
('and_expression -> equality_expression','and_expression',1,'p_and_expression','parser.py',227),
('and_expression -> and_expression & equality_expression','and_expression',3,'p_and_expression','parser.py',228),
('and_expression_not_name -> equality_expression_not_name','and_expression_not_name',1,'p_and_expression_not_name','parser.py',232),
('and_expression_not_name -> and_expression_not_name & equality_expression','and_expression_not_name',3,'p_and_expression_not_name','parser.py',233),
('and_expression_not_name -> name & equality_expression','and_expression_not_name',3,'p_and_expression_not_name','parser.py',234),
('equality_expression -> instanceof_expression','equality_expression',1,'p_equality_expression','parser.py',238),
('equality_expression -> equality_expression EQ instanceof_expression','equality_expression',3,'p_equality_expression','parser.py',239),
('equality_expression -> equality_expression NEQ instanceof_expression','equality_expression',3,'p_equality_expression','parser.py',240),
('equality_expression_not_name -> instanceof_expression_not_name','equality_expression_not_name',1,'p_equality_expression_not_name','parser.py',244),
('equality_expression_not_name -> equality_expression_not_name EQ instanceof_expression','equality_expression_not_name',3,'p_equality_expression_not_name','parser.py',245),
('equality_expression_not_name -> name EQ instanceof_expression','equality_expression_not_name',3,'p_equality_expression_not_name','parser.py',246),
('equality_expression_not_name -> equality_expression_not_name NEQ instanceof_expression','equality_expression_not_name',3,'p_equality_expression_not_name','parser.py',247),
('equality_expression_not_name -> name NEQ instanceof_expression','equality_expression_not_name',3,'p_equality_expression_not_name','parser.py',248),
('instanceof_expression -> relational_expression','instanceof_expression',1,'p_instanceof_expression','parser.py',252),
('instanceof_expression -> instanceof_expression INSTANCEOF reference_type','instanceof_expression',3,'p_instanceof_expression','parser.py',253),
('instanceof_expression_not_name -> relational_expression_not_name','instanceof_expression_not_name',1,'p_instanceof_expression_not_name','parser.py',257),
('instanceof_expression_not_name -> name INSTANCEOF reference_type','instanceof_expression_not_name',3,'p_instanceof_expression_not_name','parser.py',258),
('instanceof_expression_not_name -> instanceof_expression_not_name INSTANCEOF reference_type','instanceof_expression_not_name',3,'p_instanceof_expression_not_name','parser.py',259),
('relational_expression -> shift_expression','relational_expression',1,'p_relational_expression','parser.py',263),
('relational_expression -> relational_expression > shift_expression','relational_expression',3,'p_relational_expression','parser.py',264),
('relational_expression -> relational_expression < shift_expression','relational_expression',3,'p_relational_expression','parser.py',265),
('relational_expression -> relational_expression GTEQ shift_expression','relational_expression',3,'p_relational_expression','parser.py',266),
('relational_expression -> relational_expression LTEQ shift_expression','relational_expression',3,'p_relational_expression','parser.py',267),
('relational_expression_not_name -> shift_expression_not_name','relational_expression_not_name',1,'p_relational_expression_not_name','parser.py',271),
('relational_expression_not_name -> shift_expression_not_name < shift_expression','relational_expression_not_name',3,'p_relational_expression_not_name','parser.py',272),
('relational_expression_not_name -> name < shift_expression','relational_expression_not_name',3,'p_relational_expression_not_name','parser.py',273),
('relational_expression_not_name -> shift_expression_not_name > shift_expression','relational_expression_not_name',3,'p_relational_expression_not_name','parser.py',274),
('relational_expression_not_name -> name > shift_expression','relational_expression_not_name',3,'p_relational_expression_not_name','parser.py',275),
('relational_expression_not_name -> shift_expression_not_name GTEQ shift_expression','relational_expression_not_name',3,'p_relational_expression_not_name','parser.py',276),
('relational_expression_not_name -> name GTEQ shift_expression','relational_expression_not_name',3,'p_relational_expression_not_name','parser.py',277),
('relational_expression_not_name -> shift_expression_not_name LTEQ shift_expression','relational_expression_not_name',3,'p_relational_expression_not_name','parser.py',278),
('relational_expression_not_name -> name LTEQ shift_expression','relational_expression_not_name',3,'p_relational_expression_not_name','parser.py',279),
('shift_expression -> additive_expression','shift_expression',1,'p_shift_expression','parser.py',283),
('shift_expression -> shift_expression LSHIFT additive_expression','shift_expression',3,'p_shift_expression','parser.py',284),
('shift_expression -> shift_expression RSHIFT additive_expression','shift_expression',3,'p_shift_expression','parser.py',285),
('shift_expression -> shift_expression RRSHIFT additive_expression','shift_expression',3,'p_shift_expression','parser.py',286),
('shift_expression_not_name -> additive_expression_not_name','shift_expression_not_name',1,'p_shift_expression_not_name','parser.py',290),
('shift_expression_not_name -> shift_expression_not_name LSHIFT additive_expression','shift_expression_not_name',3,'p_shift_expression_not_name','parser.py',291),
('shift_expression_not_name -> name LSHIFT additive_expression','shift_expression_not_name',3,'p_shift_expression_not_name','parser.py',292),
('shift_expression_not_name -> shift_expression_not_name RSHIFT additive_expression','shift_expression_not_name',3,'p_shift_expression_not_name','parser.py',293),
('shift_expression_not_name -> name RSHIFT additive_expression','shift_expression_not_name',3,'p_shift_expression_not_name','parser.py',294),
('shift_expression_not_name -> shift_expression_not_name RRSHIFT additive_expression','shift_expression_not_name',3,'p_shift_expression_not_name','parser.py',295),
('shift_expression_not_name -> name RRSHIFT additive_expression','shift_expression_not_name',3,'p_shift_expression_not_name','parser.py',296),
('additive_expression -> multiplicative_expression','additive_expression',1,'p_additive_expression','parser.py',300),
('additive_expression -> additive_expression + multiplicative_expression','additive_expression',3,'p_additive_expression','parser.py',301),
('additive_expression -> additive_expression - multiplicative_expression','additive_expression',3,'p_additive_expression','parser.py',302),
('additive_expression_not_name -> multiplicative_expression_not_name','additive_expression_not_name',1,'p_additive_expression_not_name','parser.py',306),
('additive_expression_not_name -> additive_expression_not_name + multiplicative_expression','additive_expression_not_name',3,'p_additive_expression_not_name','parser.py',307),
('additive_expression_not_name -> name + multiplicative_expression','additive_expression_not_name',3,'p_additive_expression_not_name','parser.py',308),
('additive_expression_not_name -> additive_expression_not_name - multiplicative_expression','additive_expression_not_name',3,'p_additive_expression_not_name','parser.py',309),
('additive_expression_not_name -> name - multiplicative_expression','additive_expression_not_name',3,'p_additive_expression_not_name','parser.py',310),
('multiplicative_expression -> unary_expression','multiplicative_expression',1,'p_multiplicative_expression','parser.py',314),
('multiplicative_expression -> multiplicative_expression * unary_expression','multiplicative_expression',3,'p_multiplicative_expression','parser.py',315),
('multiplicative_expression -> multiplicative_expression / unary_expression','multiplicative_expression',3,'p_multiplicative_expression','parser.py',316),
('multiplicative_expression -> multiplicative_expression % unary_expression','multiplicative_expression',3,'p_multiplicative_expression','parser.py',317),
('multiplicative_expression_not_name -> unary_expression_not_name','multiplicative_expression_not_name',1,'p_multiplicative_expression_not_name','parser.py',321),
('multiplicative_expression_not_name -> multiplicative_expression_not_name * unary_expression','multiplicative_expression_not_name',3,'p_multiplicative_expression_not_name','parser.py',322),
('multiplicative_expression_not_name -> name * unary_expression','multiplicative_expression_not_name',3,'p_multiplicative_expression_not_name','parser.py',323),
('multiplicative_expression_not_name -> multiplicative_expression_not_name / unary_expression','multiplicative_expression_not_name',3,'p_multiplicative_expression_not_name','parser.py',324),
('multiplicative_expression_not_name -> name / unary_expression','multiplicative_expression_not_name',3,'p_multiplicative_expression_not_name','parser.py',325),
('multiplicative_expression_not_name -> multiplicative_expression_not_name % unary_expression','multiplicative_expression_not_name',3,'p_multiplicative_expression_not_name','parser.py',326),
('multiplicative_expression_not_name -> name % unary_expression','multiplicative_expression_not_name',3,'p_multiplicative_expression_not_name','parser.py',327),
('unary_expression -> pre_increment_expression','unary_expression',1,'p_unary_expression','parser.py',331),
('unary_expression -> pre_decrement_expression','unary_expression',1,'p_unary_expression','parser.py',332),
('unary_expression -> + unary_expression','unary_expression',2,'p_unary_expression','parser.py',333),
('unary_expression -> - unary_expression','unary_expression',2,'p_unary_expression','parser.py',334),
('unary_expression -> unary_expression_not_plus_minus','unary_expression',1,'p_unary_expression','parser.py',335),
('unary_expression_not_name -> pre_increment_expression','unary_expression_not_name',1,'p_unary_expression_not_name','parser.py',342),
('unary_expression_not_name -> pre_decrement_expression','unary_expression_not_name',1,'p_unary_expression_not_name','parser.py',343),
('unary_expression_not_name -> + unary_expression','unary_expression_not_name',2,'p_unary_expression_not_name','parser.py',344),
('unary_expression_not_name -> - unary_expression','unary_expression_not_name',2,'p_unary_expression_not_name','parser.py',345),
('unary_expression_not_name -> unary_expression_not_plus_minus_not_name','unary_expression_not_name',1,'p_unary_expression_not_name','parser.py',346),
('pre_increment_expression -> PLUSPLUS unary_expression','pre_increment_expression',2,'p_pre_increment_expression','parser.py',353),
('pre_decrement_expression -> MINUSMINUS unary_expression','pre_decrement_expression',2,'p_pre_decrement_expression','parser.py',357),
('unary_expression_not_plus_minus -> postfix_expression','unary_expression_not_plus_minus',1,'p_unary_expression_not_plus_minus','parser.py',361),
('unary_expression_not_plus_minus -> ~ unary_expression','unary_expression_not_plus_minus',2,'p_unary_expression_not_plus_minus','parser.py',362),
('unary_expression_not_plus_minus -> ! unary_expression','unary_expression_not_plus_minus',2,'p_unary_expression_not_plus_minus','parser.py',363),
('unary_expression_not_plus_minus -> cast_expression','unary_expression_not_plus_minus',1,'p_unary_expression_not_plus_minus','parser.py',364),
('unary_expression_not_plus_minus_not_name -> postfix_expression_not_name','unary_expression_not_plus_minus_not_name',1,'p_unary_expression_not_plus_minus_not_name','parser.py',371),
('unary_expression_not_plus_minus_not_name -> ~ unary_expression','unary_expression_not_plus_minus_not_name',2,'p_unary_expression_not_plus_minus_not_name','parser.py',372),
('unary_expression_not_plus_minus_not_name -> ! unary_expression','unary_expression_not_plus_minus_not_name',2,'p_unary_expression_not_plus_minus_not_name','parser.py',373),
('unary_expression_not_plus_minus_not_name -> cast_expression','unary_expression_not_plus_minus_not_name',1,'p_unary_expression_not_plus_minus_not_name','parser.py',374),
('postfix_expression -> primary','postfix_expression',1,'p_postfix_expression','parser.py',381),
('postfix_expression -> name','postfix_expression',1,'p_postfix_expression','parser.py',382),
('postfix_expression -> post_increment_expression','postfix_expression',1,'p_postfix_expression','parser.py',383),
('postfix_expression -> post_decrement_expression','postfix_expression',1,'p_postfix_expression','parser.py',384),
('postfix_expression_not_name -> primary','postfix_expression_not_name',1,'p_postfix_expression_not_name','parser.py',388),
('postfix_expression_not_name -> post_increment_expression','postfix_expression_not_name',1,'p_postfix_expression_not_name','parser.py',389),
('postfix_expression_not_name -> post_decrement_expression','postfix_expression_not_name',1,'p_postfix_expression_not_name','parser.py',390),
('post_increment_expression -> postfix_expression PLUSPLUS','post_increment_expression',2,'p_post_increment_expression','parser.py',394),
('post_decrement_expression -> postfix_expression MINUSMINUS','post_decrement_expression',2,'p_post_decrement_expression','parser.py',398),
('primary -> primary_no_new_array','primary',1,'p_primary','parser.py',402),
('primary -> array_creation_with_array_initializer','primary',1,'p_primary','parser.py',403),
('primary -> array_creation_without_array_initializer','primary',1,'p_primary','parser.py',404),
('primary_no_new_array -> literal','primary_no_new_array',1,'p_primary_no_new_array','parser.py',408),
('primary_no_new_array -> THIS','primary_no_new_array',1,'p_primary_no_new_array','parser.py',409),
('primary_no_new_array -> class_instance_creation_expression','primary_no_new_array',1,'p_primary_no_new_array','parser.py',410),
('primary_no_new_array -> field_access','primary_no_new_array',1,'p_primary_no_new_array','parser.py',411),
('primary_no_new_array -> method_invocation','primary_no_new_array',1,'p_primary_no_new_array','parser.py',412),
('primary_no_new_array -> array_access','primary_no_new_array',1,'p_primary_no_new_array','parser.py',413),
('primary_no_new_array -> ( name )','primary_no_new_array',3,'p_primary_no_new_array2','parser.py',417),
('primary_no_new_array -> ( expression_not_name )','primary_no_new_array',3,'p_primary_no_new_array2','parser.py',418),
('primary_no_new_array -> name . THIS','primary_no_new_array',3,'p_primary_no_new_array3','parser.py',422),
('primary_no_new_array -> name . SUPER','primary_no_new_array',3,'p_primary_no_new_array3','parser.py',423),
('primary_no_new_array -> name . CLASS','primary_no_new_array',3,'p_primary_no_new_array4','parser.py',428),
('primary_no_new_array -> name dims . CLASS','primary_no_new_array',4,'p_primary_no_new_array4','parser.py',429),
('primary_no_new_array -> primitive_type dims . CLASS','primary_no_new_array',4,'p_primary_no_new_array4','parser.py',430),
('primary_no_new_array -> primitive_type . CLASS','primary_no_new_array',3,'p_primary_no_new_array4','parser.py',431),
('dims_opt -> dims','dims_opt',1,'p_dims_opt','parser.py',438),
('dims_opt -> empty','dims_opt',1,'p_dims_opt2','parser.py',442),
('dims -> dims_loop','dims',1,'p_dims','parser.py',446),
('dims_loop -> one_dim_loop','dims_loop',1,'p_dims_loop','parser.py',450),
('dims_loop -> dims_loop one_dim_loop','dims_loop',2,'p_dims_loop','parser.py',451),
('one_dim_loop -> [ ]','one_dim_loop',2,'p_one_dim_loop','parser.py',458),
('cast_expression -> ( primitive_type dims_opt ) unary_expression','cast_expression',5,'p_cast_expression','parser.py',462),
('cast_expression -> ( name type_arguments dims_opt ) unary_expression_not_plus_minus','cast_expression',6,'p_cast_expression2','parser.py',466),
('cast_expression -> ( name type_arguments . class_or_interface_type dims_opt ) unary_expression_not_plus_minus','cast_expression',8,'p_cast_expression3','parser.py',470),
('cast_expression -> ( name ) unary_expression_not_plus_minus','cast_expression',4,'p_cast_expression4','parser.py',476),
('cast_expression -> ( name dims ) unary_expression_not_plus_minus','cast_expression',5,'p_cast_expression5','parser.py',481),
('block -> { block_statements_opt }','block',3,'p_block','parser.py',488),
('block_statements_opt -> block_statements','block_statements_opt',1,'p_block_statements_opt','parser.py',492),
('block_statements_opt -> empty','block_statements_opt',1,'p_block_statements_opt2','parser.py',496),
('block_statements -> block_statement','block_statements',1,'p_block_statements','parser.py',500),
('block_statements -> block_statements block_statement','block_statements',2,'p_block_statements','parser.py',501),
('block_statement -> local_variable_declaration_statement','block_statement',1,'p_block_statement','parser.py',508),
('block_statement -> statement','block_statement',1,'p_block_statement','parser.py',509),
('block_statement -> class_declaration','block_statement',1,'p_block_statement','parser.py',510),
('block_statement -> interface_declaration','block_statement',1,'p_block_statement','parser.py',511),
('block_statement -> annotation_type_declaration','block_statement',1,'p_block_statement','parser.py',512),
('block_statement -> enum_declaration','block_statement',1,'p_block_statement','parser.py',513),
('local_variable_declaration_statement -> local_variable_declaration ;','local_variable_declaration_statement',2,'p_local_variable_declaration_statement','parser.py',517),
('local_variable_declaration -> type variable_declarators','local_variable_declaration',2,'p_local_variable_declaration','parser.py',521),
('local_variable_declaration -> modifiers type variable_declarators','local_variable_declaration',3,'p_local_variable_declaration2','parser.py',525),
('variable_declarators -> variable_declarator','variable_declarators',1,'p_variable_declarators','parser.py',529),
('variable_declarators -> variable_declarators , variable_declarator','variable_declarators',3,'p_variable_declarators','parser.py',530),
('variable_declarator -> variable_declarator_id','variable_declarator',1,'p_variable_declarator','parser.py',537),
('variable_declarator -> variable_declarator_id = variable_initializer','variable_declarator',3,'p_variable_declarator','parser.py',538),
('variable_declarator_id -> NAME dims_opt','variable_declarator_id',2,'p_variable_declarator_id','parser.py',545),
('variable_initializer -> expression','variable_initializer',1,'p_variable_initializer','parser.py',549),
('variable_initializer -> array_initializer','variable_initializer',1,'p_variable_initializer','parser.py',550),
('statement -> statement_without_trailing_substatement','statement',1,'p_statement','parser.py',554),
('statement -> labeled_statement','statement',1,'p_statement','parser.py',555),
('statement -> if_then_statement','statement',1,'p_statement','parser.py',556),
('statement -> if_then_else_statement','statement',1,'p_statement','parser.py',557),
('statement -> while_statement','statement',1,'p_statement','parser.py',558),
('statement -> for_statement','statement',1,'p_statement','parser.py',559),
('statement -> enhanced_for_statement','statement',1,'p_statement','parser.py',560),
('statement_without_trailing_substatement -> block','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',564),
('statement_without_trailing_substatement -> expression_statement','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',565),
('statement_without_trailing_substatement -> assert_statement','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',566),
('statement_without_trailing_substatement -> empty_statement','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',567),
('statement_without_trailing_substatement -> switch_statement','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',568),
('statement_without_trailing_substatement -> do_statement','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',569),
('statement_without_trailing_substatement -> break_statement','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',570),
('statement_without_trailing_substatement -> continue_statement','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',571),
('statement_without_trailing_substatement -> return_statement','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',572),
('statement_without_trailing_substatement -> synchronized_statement','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',573),
('statement_without_trailing_substatement -> throw_statement','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',574),
('statement_without_trailing_substatement -> try_statement','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',575),
('statement_without_trailing_substatement -> try_statement_with_resources','statement_without_trailing_substatement',1,'p_statement_without_trailing_substatement','parser.py',576),
('expression_statement -> statement_expression ;','expression_statement',2,'p_expression_statement','parser.py',580),
('expression_statement -> explicit_constructor_invocation','expression_statement',1,'p_expression_statement','parser.py',581),
('statement_expression -> assignment','statement_expression',1,'p_statement_expression','parser.py',588),
('statement_expression -> pre_increment_expression','statement_expression',1,'p_statement_expression','parser.py',589),
('statement_expression -> pre_decrement_expression','statement_expression',1,'p_statement_expression','parser.py',590),
('statement_expression -> post_increment_expression','statement_expression',1,'p_statement_expression','parser.py',591),
('statement_expression -> post_decrement_expression','statement_expression',1,'p_statement_expression','parser.py',592),
('statement_expression -> method_invocation','statement_expression',1,'p_statement_expression','parser.py',593),
('statement_expression -> class_instance_creation_expression','statement_expression',1,'p_statement_expression','parser.py',594),
('comma_opt -> ,','comma_opt',1,'p_comma_opt','parser.py',598),
('comma_opt -> empty','comma_opt',1,'p_comma_opt','parser.py',599),
('array_initializer -> { comma_opt }','array_initializer',3,'p_array_initializer','parser.py',603),
('array_initializer -> { variable_initializers }','array_initializer',3,'p_array_initializer2','parser.py',607),
('array_initializer -> { variable_initializers , }','array_initializer',4,'p_array_initializer2','parser.py',608),
('variable_initializers -> variable_initializer','variable_initializers',1,'p_variable_initializers','parser.py',612),
('variable_initializers -> variable_initializers , variable_initializer','variable_initializers',3,'p_variable_initializers','parser.py',613),
('method_invocation -> NAME ( argument_list_opt )','method_invocation',4,'p_method_invocation','parser.py',620),
('method_invocation -> name . type_arguments NAME ( argument_list_opt )','method_invocation',7,'p_method_invocation2','parser.py',624),
('method_invocation -> primary . type_arguments NAME ( argument_list_opt )','method_invocation',7,'p_method_invocation2','parser.py',625),
('method_invocation -> SUPER . type_arguments NAME ( argument_list_opt )','method_invocation',7,'p_method_invocation2','parser.py',626),
('method_invocation -> name . NAME ( argument_list_opt )','method_invocation',6,'p_method_invocation3','parser.py',630),
('method_invocation -> primary . NAME ( argument_list_opt )','method_invocation',6,'p_method_invocation3','parser.py',631),
('method_invocation -> SUPER . NAME ( argument_list_opt )','method_invocation',6,'p_method_invocation3','parser.py',632),
('labeled_statement -> label : statement','labeled_statement',3,'p_labeled_statement','parser.py',636),
('labeled_statement_no_short_if -> label : statement_no_short_if','labeled_statement_no_short_if',3,'p_labeled_statement_no_short_if','parser.py',641),
('label -> NAME','label',1,'p_label','parser.py',646),
('if_then_statement -> IF ( expression ) statement','if_then_statement',5,'p_if_then_statement','parser.py',650),
('if_then_else_statement -> IF ( expression ) statement_no_short_if ELSE statement','if_then_else_statement',7,'p_if_then_else_statement','parser.py',654),
('if_then_else_statement_no_short_if -> IF ( expression ) statement_no_short_if ELSE statement_no_short_if','if_then_else_statement_no_short_if',7,'p_if_then_else_statement_no_short_if','parser.py',658),
('while_statement -> WHILE ( expression ) statement','while_statement',5,'p_while_statement','parser.py',662),
('while_statement_no_short_if -> WHILE ( expression ) statement_no_short_if','while_statement_no_short_if',5,'p_while_statement_no_short_if','parser.py',666),
('for_statement -> FOR ( for_init_opt ; expression_opt ; for_update_opt ) statement','for_statement',9,'p_for_statement','parser.py',670),
('for_statement_no_short_if -> FOR ( for_init_opt ; expression_opt ; for_update_opt ) statement_no_short_if','for_statement_no_short_if',9,'p_for_statement_no_short_if','parser.py',674),
('for_init_opt -> for_init','for_init_opt',1,'p_for_init_opt','parser.py',678),
('for_init_opt -> empty','for_init_opt',1,'p_for_init_opt','parser.py',679),
('for_init -> statement_expression_list','for_init',1,'p_for_init','parser.py',683),
('for_init -> local_variable_declaration','for_init',1,'p_for_init','parser.py',684),
('statement_expression_list -> statement_expression','statement_expression_list',1,'p_statement_expression_list','parser.py',688),
('statement_expression_list -> statement_expression_list , statement_expression','statement_expression_list',3,'p_statement_expression_list','parser.py',689),
('expression_opt -> expression','expression_opt',1,'p_expression_opt','parser.py',696),
('expression_opt -> empty','expression_opt',1,'p_expression_opt','parser.py',697),
('for_update_opt -> for_update','for_update_opt',1,'p_for_update_opt','parser.py',701),
('for_update_opt -> empty','for_update_opt',1,'p_for_update_opt','parser.py',702),
('for_update -> statement_expression_list','for_update',1,'p_for_update','parser.py',706),
('enhanced_for_statement -> enhanced_for_statement_header statement','enhanced_for_statement',2,'p_enhanced_for_statement','parser.py',710),
('enhanced_for_statement_no_short_if -> enhanced_for_statement_header statement_no_short_if','enhanced_for_statement_no_short_if',2,'p_enhanced_for_statement_no_short_if','parser.py',714),
('enhanced_for_statement_header -> enhanced_for_statement_header_init : expression )','enhanced_for_statement_header',4,'p_enhanced_for_statement_header','parser.py',718),
('enhanced_for_statement_header_init -> FOR ( type NAME dims_opt','enhanced_for_statement_header_init',5,'p_enhanced_for_statement_header_init','parser.py',723),
('enhanced_for_statement_header_init -> FOR ( modifiers type NAME dims_opt','enhanced_for_statement_header_init',6,'p_enhanced_for_statement_header_init2','parser.py',727),
('statement_no_short_if -> statement_without_trailing_substatement','statement_no_short_if',1,'p_statement_no_short_if','parser.py',731),
('statement_no_short_if -> labeled_statement_no_short_if','statement_no_short_if',1,'p_statement_no_short_if','parser.py',732),
('statement_no_short_if -> if_then_else_statement_no_short_if','statement_no_short_if',1,'p_statement_no_short_if','parser.py',733),
('statement_no_short_if -> while_statement_no_short_if','statement_no_short_if',1,'p_statement_no_short_if','parser.py',734),
('statement_no_short_if -> for_statement_no_short_if','statement_no_short_if',1,'p_statement_no_short_if','parser.py',735),
('statement_no_short_if -> enhanced_for_statement_no_short_if','statement_no_short_if',1,'p_statement_no_short_if','parser.py',736),
('assert_statement -> ASSERT expression ;','assert_statement',3,'p_assert_statement','parser.py',740),
('assert_statement -> ASSERT expression : expression ;','assert_statement',5,'p_assert_statement','parser.py',741),
('empty_statement -> ;','empty_statement',1,'p_empty_statement','parser.py',748),
('switch_statement -> SWITCH ( expression ) switch_block','switch_statement',5,'p_switch_statement','parser.py',752),
('switch_block -> { }','switch_block',2,'p_switch_block','parser.py',756),
('switch_block -> { switch_block_statements }','switch_block',3,'p_switch_block2','parser.py',760),
('switch_block -> { switch_labels }','switch_block',3,'p_switch_block3','parser.py',764),
('switch_block -> { switch_block_statements switch_labels }','switch_block',4,'p_switch_block4','parser.py',768),
('switch_block_statements -> switch_block_statement','switch_block_statements',1,'p_switch_block_statements','parser.py',772),
('switch_block_statements -> switch_block_statements switch_block_statement','switch_block_statements',2,'p_switch_block_statements','parser.py',773),
('switch_block_statement -> switch_labels block_statements','switch_block_statement',2,'p_switch_block_statement','parser.py',780),
('switch_labels -> switch_label','switch_labels',1,'p_switch_labels','parser.py',784),
('switch_labels -> switch_labels switch_label','switch_labels',2,'p_switch_labels','parser.py',785),
('switch_label -> CASE constant_expression :','switch_label',3,'p_switch_label','parser.py',792),
('switch_label -> DEFAULT :','switch_label',2,'p_switch_label','parser.py',793),
('constant_expression -> expression','constant_expression',1,'p_constant_expression','parser.py',800),
('do_statement -> DO statement WHILE ( expression ) ;','do_statement',7,'p_do_statement','parser.py',804),
('break_statement -> BREAK ;','break_statement',2,'p_break_statement','parser.py',808),
('break_statement -> BREAK NAME ;','break_statement',3,'p_break_statement','parser.py',809),
('continue_statement -> CONTINUE ;','continue_statement',2,'p_continue_statement','parser.py',816),
('continue_statement -> CONTINUE NAME ;','continue_statement',3,'p_continue_statement','parser.py',817),
('return_statement -> RETURN expression_opt ;','return_statement',3,'p_return_statement','parser.py',824),
('synchronized_statement -> SYNCHRONIZED ( expression ) block','synchronized_statement',5,'p_synchronized_statement','parser.py',828),
('throw_statement -> THROW expression ;','throw_statement',3,'p_throw_statement','parser.py',832),
('try_statement -> TRY try_block catches','try_statement',3,'p_try_statement','parser.py',836),
('try_statement -> TRY try_block catches_opt finally','try_statement',4,'p_try_statement','parser.py',837),
('try_block -> block','try_block',1,'p_try_block','parser.py',844),
('catches -> catch_clause','catches',1,'p_catches','parser.py',848),
('catches -> catches catch_clause','catches',2,'p_catches','parser.py',849),
('catches_opt -> catches','catches_opt',1,'p_catches_opt','parser.py',856),
('catches_opt -> empty','catches_opt',1,'p_catches_opt2','parser.py',860),
('catch_clause -> CATCH ( catch_formal_parameter ) block','catch_clause',5,'p_catch_clause','parser.py',864),
('catch_formal_parameter -> modifiers_opt catch_type variable_declarator_id','catch_formal_parameter',3,'p_catch_formal_parameter','parser.py',868),
('catch_type -> union_type','catch_type',1,'p_catch_type','parser.py',872),
('union_type -> type','union_type',1,'p_union_type','parser.py',876),
('union_type -> union_type | type','union_type',3,'p_union_type','parser.py',877),
('try_statement_with_resources -> TRY resource_specification try_block catches_opt','try_statement_with_resources',4,'p_try_statement_with_resources','parser.py',884),
('try_statement_with_resources -> TRY resource_specification try_block catches_opt finally','try_statement_with_resources',5,'p_try_statement_with_resources','parser.py',885),
('resource_specification -> ( resources semi_opt )','resource_specification',4,'p_resource_specification','parser.py',892),
('semi_opt -> ;','semi_opt',1,'p_semi_opt','parser.py',896),
('semi_opt -> empty','semi_opt',1,'p_semi_opt','parser.py',897),
('resources -> resource','resources',1,'p_resources','parser.py',901),
('resources -> resources trailing_semicolon resource','resources',3,'p_resources','parser.py',902),
('trailing_semicolon -> ;','trailing_semicolon',1,'p_trailing_semicolon','parser.py',909),
('resource -> type variable_declarator_id = variable_initializer','resource',4,'p_resource','parser.py',913),
('resource -> modifiers type variable_declarator_id = variable_initializer','resource',5,'p_resource2','parser.py',917),
('finally -> FINALLY block','finally',2,'p_finally','parser.py',921),
('explicit_constructor_invocation -> THIS ( argument_list_opt ) ;','explicit_constructor_invocation',5,'p_explicit_constructor_invocation','parser.py',925),
('explicit_constructor_invocation -> SUPER ( argument_list_opt ) ;','explicit_constructor_invocation',5,'p_explicit_constructor_invocation','parser.py',926),
('explicit_constructor_invocation -> type_arguments SUPER ( argument_list_opt ) ;','explicit_constructor_invocation',6,'p_explicit_constructor_invocation2','parser.py',930),
('explicit_constructor_invocation -> type_arguments THIS ( argument_list_opt ) ;','explicit_constructor_invocation',6,'p_explicit_constructor_invocation2','parser.py',931),
('explicit_constructor_invocation -> primary . SUPER ( argument_list_opt ) ;','explicit_constructor_invocation',7,'p_explicit_constructor_invocation3','parser.py',935),
('explicit_constructor_invocation -> name . SUPER ( argument_list_opt ) ;','explicit_constructor_invocation',7,'p_explicit_constructor_invocation3','parser.py',936),
('explicit_constructor_invocation -> primary . THIS ( argument_list_opt ) ;','explicit_constructor_invocation',7,'p_explicit_constructor_invocation3','parser.py',937),
('explicit_constructor_invocation -> name . THIS ( argument_list_opt ) ;','explicit_constructor_invocation',7,'p_explicit_constructor_invocation3','parser.py',938),
('explicit_constructor_invocation -> primary . type_arguments SUPER ( argument_list_opt ) ;','explicit_constructor_invocation',8,'p_explicit_constructor_invocation4','parser.py',942),
('explicit_constructor_invocation -> name . type_arguments SUPER ( argument_list_opt ) ;','explicit_constructor_invocation',8,'p_explicit_constructor_invocation4','parser.py',943),
('explicit_constructor_invocation -> primary . type_arguments THIS ( argument_list_opt ) ;','explicit_constructor_invocation',8,'p_explicit_constructor_invocation4','parser.py',944),
('explicit_constructor_invocation -> name . type_arguments THIS ( argument_list_opt ) ;','explicit_constructor_invocation',8,'p_explicit_constructor_invocation4','parser.py',945),
('class_instance_creation_expression -> NEW type_arguments class_type ( argument_list_opt ) class_body_opt','class_instance_creation_expression',7,'p_class_instance_creation_expression','parser.py',949),
('class_instance_creation_expression -> NEW class_type ( argument_list_opt ) class_body_opt','class_instance_creation_expression',6,'p_class_instance_creation_expression2','parser.py',953),
('class_instance_creation_expression -> primary . NEW type_arguments class_type ( argument_list_opt ) class_body_opt','class_instance_creation_expression',9,'p_class_instance_creation_expression3','parser.py',957),
('class_instance_creation_expression -> primary . NEW class_type ( argument_list_opt ) class_body_opt','class_instance_creation_expression',8,'p_class_instance_creation_expression4','parser.py',961),
('class_instance_creation_expression -> class_instance_creation_expression_name NEW class_type ( argument_list_opt ) class_body_opt','class_instance_creation_expression',7,'p_class_instance_creation_expression5','parser.py',965),
('class_instance_creation_expression -> class_instance_creation_expression_name NEW type_arguments class_type ( argument_list_opt ) class_body_opt','class_instance_creation_expression',8,'p_class_instance_creation_expression6','parser.py',969),
('class_instance_creation_expression_name -> name .','class_instance_creation_expression_name',2,'p_class_instance_creation_expression_name','parser.py',973),
('class_body_opt -> class_body','class_body_opt',1,'p_class_body_opt','parser.py',977),
('class_body_opt -> empty','class_body_opt',1,'p_class_body_opt','parser.py',978),
('field_access -> primary . NAME','field_access',3,'p_field_access','parser.py',982),
('field_access -> SUPER . NAME','field_access',3,'p_field_access','parser.py',983),
('array_access -> name [ expression ]','array_access',4,'p_array_access','parser.py',987),
('array_access -> primary_no_new_array [ expression ]','array_access',4,'p_array_access','parser.py',988),
('array_access -> array_creation_with_array_initializer [ expression ]','array_access',4,'p_array_access','parser.py',989),
('array_creation_with_array_initializer -> NEW primitive_type dim_with_or_without_exprs array_initializer','array_creation_with_array_initializer',4,'p_array_creation_with_array_initializer','parser.py',993),
('array_creation_with_array_initializer -> NEW class_or_interface_type dim_with_or_without_exprs array_initializer','array_creation_with_array_initializer',4,'p_array_creation_with_array_initializer','parser.py',994),
('dim_with_or_without_exprs -> dim_with_or_without_expr','dim_with_or_without_exprs',1,'p_dim_with_or_without_exprs','parser.py',998),
('dim_with_or_without_exprs -> dim_with_or_without_exprs dim_with_or_without_expr','dim_with_or_without_exprs',2,'p_dim_with_or_without_exprs','parser.py',999),
('dim_with_or_without_expr -> [ expression ]','dim_with_or_without_expr',3,'p_dim_with_or_without_expr','parser.py',1006),
('dim_with_or_without_expr -> [ ]','dim_with_or_without_expr',2,'p_dim_with_or_without_expr','parser.py',1007),
('array_creation_without_array_initializer -> NEW primitive_type dim_with_or_without_exprs','array_creation_without_array_initializer',3,'p_array_creation_without_array_initializer','parser.py',1014),
('array_creation_without_array_initializer -> NEW class_or_interface_type dim_with_or_without_exprs','array_creation_without_array_initializer',3,'p_array_creation_without_array_initializer','parser.py',1015),
('name -> simple_name','name',1,'p_name','parser.py',1021),
('name -> qualified_name','name',1,'p_name','parser.py',1022),
('simple_name -> NAME','simple_name',1,'p_simple_name','parser.py',1026),
('qualified_name -> name . simple_name','qualified_name',3,'p_qualified_name','parser.py',1030),
('literal -> NUM','literal',1,'p_literal','parser.py',1037),
('literal -> CHAR_LITERAL','literal',1,'p_literal','parser.py',1038),
('literal -> STRING_LITERAL','literal',1,'p_literal','parser.py',1039),
('literal -> TRUE','literal',1,'p_literal','parser.py',1040),
('literal -> FALSE','literal',1,'p_literal','parser.py',1041),
('literal -> NULL','literal',1,'p_literal','parser.py',1042),
('modifiers_opt -> modifiers','modifiers_opt',1,'p_modifiers_opt','parser.py',1048),
('modifiers_opt -> empty','modifiers_opt',1,'p_modifiers_opt2','parser.py',1052),
('modifiers -> modifier','modifiers',1,'p_modifiers','parser.py',1056),
('modifiers -> modifiers modifier','modifiers',2,'p_modifiers','parser.py',1057),
('modifier -> PUBLIC','modifier',1,'p_modifier','parser.py',1064),
('modifier -> PROTECTED','modifier',1,'p_modifier','parser.py',1065),
('modifier -> PRIVATE','modifier',1,'p_modifier','parser.py',1066),
('modifier -> STATIC','modifier',1,'p_modifier','parser.py',1067),
('modifier -> ABSTRACT','modifier',1,'p_modifier','parser.py',1068),
('modifier -> FINAL','modifier',1,'p_modifier','parser.py',1069),
('modifier -> NATIVE','modifier',1,'p_modifier','parser.py',1070),
('modifier -> SYNCHRONIZED','modifier',1,'p_modifier','parser.py',1071),
('modifier -> TRANSIENT','modifier',1,'p_modifier','parser.py',1072),
('modifier -> VOLATILE','modifier',1,'p_modifier','parser.py',1073),
('modifier -> STRICTFP','modifier',1,'p_modifier','parser.py',1074),
('modifier -> annotation','modifier',1,'p_modifier','parser.py',1075),
('type -> primitive_type','type',1,'p_type','parser.py',1079),
('type -> reference_type','type',1,'p_type','parser.py',1080),
('primitive_type -> BOOLEAN','primitive_type',1,'p_primitive_type','parser.py',1084),
('primitive_type -> VOID','primitive_type',1,'p_primitive_type','parser.py',1085),
('primitive_type -> BYTE','primitive_type',1,'p_primitive_type','parser.py',1086),
('primitive_type -> SHORT','primitive_type',1,'p_primitive_type','parser.py',1087),
('primitive_type -> INT','primitive_type',1,'p_primitive_type','parser.py',1088),
('primitive_type -> LONG','primitive_type',1,'p_primitive_type','parser.py',1089),
('primitive_type -> CHAR','primitive_type',1,'p_primitive_type','parser.py',1090),
('primitive_type -> FLOAT','primitive_type',1,'p_primitive_type','parser.py',1091),
('primitive_type -> DOUBLE','primitive_type',1,'p_primitive_type','parser.py',1092),
('reference_type -> class_or_interface_type','reference_type',1,'p_reference_type','parser.py',1096),
('reference_type -> array_type','reference_type',1,'p_reference_type','parser.py',1097),
('class_or_interface_type -> class_or_interface','class_or_interface_type',1,'p_class_or_interface_type','parser.py',1101),
('class_or_interface_type -> generic_type','class_or_interface_type',1,'p_class_or_interface_type','parser.py',1102),
('class_type -> class_or_interface_type','class_type',1,'p_class_type','parser.py',1106),
('class_or_interface -> name','class_or_interface',1,'p_class_or_interface','parser.py',1110),
('class_or_interface -> generic_type . name','class_or_interface',3,'p_class_or_interface','parser.py',1111),
('generic_type -> class_or_interface type_arguments','generic_type',2,'p_generic_type','parser.py',1118),
('generic_type -> class_or_interface < >','generic_type',3,'p_generic_type2','parser.py',1123),
('array_type -> primitive_type dims','array_type',2,'p_array_type','parser.py',1138),
('array_type -> name dims','array_type',2,'p_array_type','parser.py',1139),
('array_type -> generic_type dims','array_type',2,'p_array_type2','parser.py',1143),
('array_type -> generic_type . name dims','array_type',4,'p_array_type3','parser.py',1148),
('type_arguments -> < type_argument_list1','type_arguments',2,'p_type_arguments','parser.py',1152),
('type_argument_list1 -> type_argument1','type_argument_list1',1,'p_type_argument_list1','parser.py',1156),
('type_argument_list1 -> type_argument_list , type_argument1','type_argument_list1',3,'p_type_argument_list1','parser.py',1157),
('type_argument_list -> type_argument','type_argument_list',1,'p_type_argument_list','parser.py',1164),
('type_argument_list -> type_argument_list , type_argument','type_argument_list',3,'p_type_argument_list','parser.py',1165),
('type_argument -> reference_type','type_argument',1,'p_type_argument','parser.py',1172),
('type_argument -> wildcard','type_argument',1,'p_type_argument','parser.py',1173),
('type_argument1 -> reference_type1','type_argument1',1,'p_type_argument1','parser.py',1177),
('type_argument1 -> wildcard1','type_argument1',1,'p_type_argument1','parser.py',1178),
('reference_type1 -> reference_type >','reference_type1',2,'p_reference_type1','parser.py',1182),
('reference_type1 -> class_or_interface < type_argument_list2','reference_type1',3,'p_reference_type1','parser.py',1183),
('type_argument_list2 -> type_argument2','type_argument_list2',1,'p_type_argument_list2','parser.py',1191),
('type_argument_list2 -> type_argument_list , type_argument2','type_argument_list2',3,'p_type_argument_list2','parser.py',1192),
('type_argument2 -> reference_type2','type_argument2',1,'p_type_argument2','parser.py',1199),
('type_argument2 -> wildcard2','type_argument2',1,'p_type_argument2','parser.py',1200),
('reference_type2 -> reference_type RSHIFT','reference_type2',2,'p_reference_type2','parser.py',1204),
('reference_type2 -> class_or_interface < type_argument_list3','reference_type2',3,'p_reference_type2','parser.py',1205),
('type_argument_list3 -> type_argument3','type_argument_list3',1,'p_type_argument_list3','parser.py',1213),
('type_argument_list3 -> type_argument_list , type_argument3','type_argument_list3',3,'p_type_argument_list3','parser.py',1214),
('type_argument3 -> reference_type3','type_argument3',1,'p_type_argument3','parser.py',1221),
('type_argument3 -> wildcard3','type_argument3',1,'p_type_argument3','parser.py',1222),
('reference_type3 -> reference_type RRSHIFT','reference_type3',2,'p_reference_type3','parser.py',1226),
('wildcard -> ?','wildcard',1,'p_wildcard','parser.py',1230),
('wildcard -> ? wildcard_bounds','wildcard',2,'p_wildcard','parser.py',1231),
('wildcard_bounds -> EXTENDS reference_type','wildcard_bounds',2,'p_wildcard_bounds','parser.py',1238),
('wildcard_bounds -> SUPER reference_type','wildcard_bounds',2,'p_wildcard_bounds','parser.py',1239),
('wildcard1 -> ? >','wildcard1',2,'p_wildcard1','parser.py',1246),
('wildcard1 -> ? wildcard_bounds1','wildcard1',2,'p_wildcard1','parser.py',1247),
('wildcard_bounds1 -> EXTENDS reference_type1','wildcard_bounds1',2,'p_wildcard_bounds1','parser.py',1254),
('wildcard_bounds1 -> SUPER reference_type1','wildcard_bounds1',2,'p_wildcard_bounds1','parser.py',1255),
('wildcard2 -> ? RSHIFT','wildcard2',2,'p_wildcard2','parser.py',1262),
('wildcard2 -> ? wildcard_bounds2','wildcard2',2,'p_wildcard2','parser.py',1263),
('wildcard_bounds2 -> EXTENDS reference_type2','wildcard_bounds2',2,'p_wildcard_bounds2','parser.py',1270),
('wildcard_bounds2 -> SUPER reference_type2','wildcard_bounds2',2,'p_wildcard_bounds2','parser.py',1271),
('wildcard3 -> ? RRSHIFT','wildcard3',2,'p_wildcard3','parser.py',1278),
('wildcard3 -> ? wildcard_bounds3','wildcard3',2,'p_wildcard3','parser.py',1279),
('wildcard_bounds3 -> EXTENDS reference_type3','wildcard_bounds3',2,'p_wildcard_bounds3','parser.py',1286),
('wildcard_bounds3 -> SUPER reference_type3','wildcard_bounds3',2,'p_wildcard_bounds3','parser.py',1287),
('type_parameter_header -> NAME','type_parameter_header',1,'p_type_parameter_header','parser.py',1294),
('type_parameters -> < type_parameter_list1','type_parameters',2,'p_type_parameters','parser.py',1298),
('type_parameter_list -> type_parameter','type_parameter_list',1,'p_type_parameter_list','parser.py',1302),
('type_parameter_list -> type_parameter_list , type_parameter','type_parameter_list',3,'p_type_parameter_list','parser.py',1303),
('type_parameter -> type_parameter_header','type_parameter',1,'p_type_parameter','parser.py',1310),
('type_parameter -> type_parameter_header EXTENDS reference_type','type_parameter',3,'p_type_parameter','parser.py',1311),
('type_parameter -> type_parameter_header EXTENDS reference_type additional_bound_list','type_parameter',4,'p_type_parameter','parser.py',1312),
('additional_bound_list -> additional_bound','additional_bound_list',1,'p_additional_bound_list','parser.py',1321),
('additional_bound_list -> additional_bound_list additional_bound','additional_bound_list',2,'p_additional_bound_list','parser.py',1322),
('additional_bound -> & reference_type','additional_bound',2,'p_additional_bound','parser.py',1329),
('type_parameter_list1 -> type_parameter1','type_parameter_list1',1,'p_type_parameter_list1','parser.py',1333),
('type_parameter_list1 -> type_parameter_list , type_parameter1','type_parameter_list1',3,'p_type_parameter_list1','parser.py',1334),
('type_parameter1 -> type_parameter_header >','type_parameter1',2,'p_type_parameter1','parser.py',1341),
('type_parameter1 -> type_parameter_header EXTENDS reference_type1','type_parameter1',3,'p_type_parameter1','parser.py',1342),
('type_parameter1 -> type_parameter_header EXTENDS reference_type additional_bound_list1','type_parameter1',4,'p_type_parameter1','parser.py',1343),
('additional_bound_list1 -> additional_bound1','additional_bound_list1',1,'p_additional_bound_list1','parser.py',1352),
('additional_bound_list1 -> additional_bound_list additional_bound1','additional_bound_list1',2,'p_additional_bound_list1','parser.py',1353),
('additional_bound1 -> & reference_type1','additional_bound1',2,'p_additional_bound1','parser.py',1360),
('type_declaration -> class_declaration','type_declaration',1,'p_type_declaration','parser.py',1366),
('type_declaration -> interface_declaration','type_declaration',1,'p_type_declaration','parser.py',1367),
('type_declaration -> enum_declaration','type_declaration',1,'p_type_declaration','parser.py',1368),
('type_declaration -> annotation_type_declaration','type_declaration',1,'p_type_declaration','parser.py',1369),
('type_declaration -> ;','type_declaration',1,'p_type_declaration2','parser.py',1373),
('class_declaration -> class_header class_body','class_declaration',2,'p_class_declaration','parser.py',1377),
('class_header -> class_header_name class_header_extends_opt class_header_implements_opt','class_header',3,'p_class_header','parser.py',1383),
('class_header_name -> class_header_name1 type_parameters','class_header_name',2,'p_class_header_name','parser.py',1389),
('class_header_name -> class_header_name1','class_header_name',1,'p_class_header_name','parser.py',1390),
('class_header_name1 -> modifiers_opt CLASS NAME','class_header_name1',3,'p_class_header_name1','parser.py',1398),
('class_header_extends_opt -> class_header_extends','class_header_extends_opt',1,'p_class_header_extends_opt','parser.py',1402),
('class_header_extends_opt -> empty','class_header_extends_opt',1,'p_class_header_extends_opt','parser.py',1403),
('class_header_extends -> EXTENDS class_type','class_header_extends',2,'p_class_header_extends','parser.py',1407),
('class_header_implements_opt -> class_header_implements','class_header_implements_opt',1,'p_class_header_implements_opt','parser.py',1411),
('class_header_implements_opt -> empty','class_header_implements_opt',1,'p_class_header_implements_opt','parser.py',1412),
('class_header_implements -> IMPLEMENTS interface_type_list','class_header_implements',2,'p_class_header_implements','parser.py',1416),
('interface_type_list -> interface_type','interface_type_list',1,'p_interface_type_list','parser.py',1420),
('interface_type_list -> interface_type_list , interface_type','interface_type_list',3,'p_interface_type_list','parser.py',1421),
('interface_type -> class_or_interface_type','interface_type',1,'p_interface_type','parser.py',1428),
('class_body -> { class_body_declarations_opt }','class_body',3,'p_class_body','parser.py',1432),
('class_body_declarations_opt -> class_body_declarations','class_body_declarations_opt',1,'p_class_body_declarations_opt','parser.py',1436),
('class_body_declarations_opt -> empty','class_body_declarations_opt',1,'p_class_body_declarations_opt2','parser.py',1440),
('class_body_declarations -> class_body_declaration','class_body_declarations',1,'p_class_body_declarations','parser.py',1444),
('class_body_declarations -> class_body_declarations class_body_declaration','class_body_declarations',2,'p_class_body_declarations','parser.py',1445),
('class_body_declaration -> class_member_declaration','class_body_declaration',1,'p_class_body_declaration','parser.py',1452),
('class_body_declaration -> static_initializer','class_body_declaration',1,'p_class_body_declaration','parser.py',1453),
('class_body_declaration -> constructor_declaration','class_body_declaration',1,'p_class_body_declaration','parser.py',1454),
('class_body_declaration -> block','class_body_declaration',1,'p_class_body_declaration2','parser.py',1458),
('class_member_declaration -> field_declaration','class_member_declaration',1,'p_class_member_declaration','parser.py',1462),
('class_member_declaration -> class_declaration','class_member_declaration',1,'p_class_member_declaration','parser.py',1463),
('class_member_declaration -> method_declaration','class_member_declaration',1,'p_class_member_declaration','parser.py',1464),
('class_member_declaration -> interface_declaration','class_member_declaration',1,'p_class_member_declaration','parser.py',1465),
('class_member_declaration -> enum_declaration','class_member_declaration',1,'p_class_member_declaration','parser.py',1466),
('class_member_declaration -> annotation_type_declaration','class_member_declaration',1,'p_class_member_declaration','parser.py',1467),
('class_member_declaration -> ;','class_member_declaration',1,'p_class_member_declaration2','parser.py',1471),
('field_declaration -> modifiers_opt type variable_declarators ;','field_declaration',4,'p_field_declaration','parser.py',1475),
('static_initializer -> STATIC block','static_initializer',2,'p_static_initializer','parser.py',1479),
('constructor_declaration -> constructor_header method_body','constructor_declaration',2,'p_constructor_declaration','parser.py',1483),
('constructor_header -> constructor_header_name formal_parameter_list_opt ) method_header_throws_clause_opt','constructor_header',4,'p_constructor_header','parser.py',1489),
('constructor_header_name -> modifiers_opt type_parameters NAME (','constructor_header_name',4,'p_constructor_header_name','parser.py',1495),
('constructor_header_name -> modifiers_opt NAME (','constructor_header_name',3,'p_constructor_header_name','parser.py',1496),
('formal_parameter_list_opt -> formal_parameter_list','formal_parameter_list_opt',1,'p_formal_parameter_list_opt','parser.py',1503),
('formal_parameter_list_opt -> empty','formal_parameter_list_opt',1,'p_formal_parameter_list_opt2','parser.py',1507),
('formal_parameter_list -> formal_parameter','formal_parameter_list',1,'p_formal_parameter_list','parser.py',1511),
('formal_parameter_list -> formal_parameter_list , formal_parameter','formal_parameter_list',3,'p_formal_parameter_list','parser.py',1512),
('formal_parameter -> modifiers_opt type variable_declarator_id','formal_parameter',3,'p_formal_parameter','parser.py',1519),
('formal_parameter -> modifiers_opt type ELLIPSIS variable_declarator_id','formal_parameter',4,'p_formal_parameter','parser.py',1520),
('method_header_throws_clause_opt -> method_header_throws_clause','method_header_throws_clause_opt',1,'p_method_header_throws_clause_opt','parser.py',1527),
('method_header_throws_clause_opt -> empty','method_header_throws_clause_opt',1,'p_method_header_throws_clause_opt','parser.py',1528),
('method_header_throws_clause -> THROWS class_type_list','method_header_throws_clause',2,'p_method_header_throws_clause','parser.py',1532),
('class_type_list -> class_type_elt','class_type_list',1,'p_class_type_list','parser.py',1536),
('class_type_list -> class_type_list , class_type_elt','class_type_list',3,'p_class_type_list','parser.py',1537),
('class_type_elt -> class_type','class_type_elt',1,'p_class_type_elt','parser.py',1544),
('method_body -> { block_statements_opt }','method_body',3,'p_method_body','parser.py',1548),
('method_declaration -> abstract_method_declaration','method_declaration',1,'p_method_declaration','parser.py',1553),
('method_declaration -> method_header method_body','method_declaration',2,'p_method_declaration','parser.py',1554),
('abstract_method_declaration -> method_header ;','abstract_method_declaration',2,'p_abstract_method_declaration','parser.py',1564),
('method_header -> method_header_name formal_parameter_list_opt ) method_header_extended_dims method_header_throws_clause_opt','method_header',5,'p_method_header','parser.py',1571),
('method_header_name -> modifiers_opt type_parameters type NAME (','method_header_name',5,'p_method_header_name','parser.py',1578),
('method_header_name -> modifiers_opt type NAME (','method_header_name',4,'p_method_header_name','parser.py',1579),
('method_header_extended_dims -> dims_opt','method_header_extended_dims',1,'p_method_header_extended_dims','parser.py',1586),
('interface_declaration -> interface_header interface_body','interface_declaration',2,'p_interface_declaration','parser.py',1590),
('interface_header -> interface_header_name interface_header_extends_opt','interface_header',2,'p_interface_header','parser.py',1597),
('interface_header_name -> interface_header_name1 type_parameters','interface_header_name',2,'p_interface_header_name','parser.py',1602),
('interface_header_name -> interface_header_name1','interface_header_name',1,'p_interface_header_name','parser.py',1603),
('interface_header_name1 -> modifiers_opt INTERFACE NAME','interface_header_name1',3,'p_interface_header_name1','parser.py',1611),
('interface_header_extends_opt -> interface_header_extends','interface_header_extends_opt',1,'p_interface_header_extends_opt','parser.py',1615),
('interface_header_extends_opt -> empty','interface_header_extends_opt',1,'p_interface_header_extends_opt2','parser.py',1619),
('interface_header_extends -> EXTENDS interface_type_list','interface_header_extends',2,'p_interface_header_extends','parser.py',1623),
('interface_body -> { interface_member_declarations_opt }','interface_body',3,'p_interface_body','parser.py',1627),
('interface_member_declarations_opt -> interface_member_declarations','interface_member_declarations_opt',1,'p_interface_member_declarations_opt','parser.py',1631),
('interface_member_declarations_opt -> empty','interface_member_declarations_opt',1,'p_interface_member_declarations_opt2','parser.py',1635),
('interface_member_declarations -> interface_member_declaration','interface_member_declarations',1,'p_interface_member_declarations','parser.py',1639),
('interface_member_declarations -> interface_member_declarations interface_member_declaration','interface_member_declarations',2,'p_interface_member_declarations','parser.py',1640),
('interface_member_declaration -> constant_declaration','interface_member_declaration',1,'p_interface_member_declaration','parser.py',1647),
('interface_member_declaration -> abstract_method_declaration','interface_member_declaration',1,'p_interface_member_declaration','parser.py',1648),
('interface_member_declaration -> class_declaration','interface_member_declaration',1,'p_interface_member_declaration','parser.py',1649),
('interface_member_declaration -> interface_declaration','interface_member_declaration',1,'p_interface_member_declaration','parser.py',1650),
('interface_member_declaration -> enum_declaration','interface_member_declaration',1,'p_interface_member_declaration','parser.py',1651),
('interface_member_declaration -> annotation_type_declaration','interface_member_declaration',1,'p_interface_member_declaration','parser.py',1652),
('interface_member_declaration -> ;','interface_member_declaration',1,'p_interface_member_declaration2','parser.py',1656),
('constant_declaration -> field_declaration','constant_declaration',1,'p_constant_declaration','parser.py',1660),
('enum_declaration -> enum_header enum_body','enum_declaration',2,'p_enum_declaration','parser.py',1664),
('enum_header -> enum_header_name class_header_implements_opt','enum_header',2,'p_enum_header','parser.py',1670),
('enum_header_name -> modifiers_opt ENUM NAME','enum_header_name',3,'p_enum_header_name','parser.py',1675),
('enum_header_name -> modifiers_opt ENUM NAME type_parameters','enum_header_name',4,'p_enum_header_name','parser.py',1676),
('enum_body -> { enum_body_declarations_opt }','enum_body',3,'p_enum_body','parser.py',1683),
('enum_body -> { , enum_body_declarations_opt }','enum_body',4,'p_enum_body2','parser.py',1687),
('enum_body -> { enum_constants , enum_body_declarations_opt }','enum_body',5,'p_enum_body3','parser.py',1691),
('enum_body -> { enum_constants enum_body_declarations_opt }','enum_body',4,'p_enum_body4','parser.py',1695),
('enum_constants -> enum_constant','enum_constants',1,'p_enum_constants','parser.py',1699),
('enum_constants -> enum_constants , enum_constant','enum_constants',3,'p_enum_constants','parser.py',1700),
('enum_constant -> enum_constant_header class_body','enum_constant',2,'p_enum_constant','parser.py',1707),
('enum_constant -> enum_constant_header','enum_constant',1,'p_enum_constant','parser.py',1708),
('enum_constant_header -> enum_constant_header_name arguments_opt','enum_constant_header',2,'p_enum_constant_header','parser.py',1715),
('enum_constant_header_name -> modifiers_opt NAME','enum_constant_header_name',2,'p_enum_constant_header_name','parser.py',1720),
('arguments_opt -> arguments','arguments_opt',1,'p_arguments_opt','parser.py',1724),
('arguments_opt -> empty','arguments_opt',1,'p_arguments_opt2','parser.py',1728),
('arguments -> ( argument_list_opt )','arguments',3,'p_arguments','parser.py',1732),
('argument_list_opt -> argument_list','argument_list_opt',1,'p_argument_list_opt','parser.py',1736),
('argument_list_opt -> empty','argument_list_opt',1,'p_argument_list_opt2','parser.py',1740),
('argument_list -> expression','argument_list',1,'p_argument_list','parser.py',1744),
('argument_list -> argument_list , expression','argument_list',3,'p_argument_list','parser.py',1745),
('enum_body_declarations_opt -> enum_declarations','enum_body_declarations_opt',1,'p_enum_body_declarations_opt','parser.py',1752),
('enum_body_declarations_opt -> empty','enum_body_declarations_opt',1,'p_enum_body_declarations_opt2','parser.py',1756),
('enum_declarations -> ; class_body_declarations_opt','enum_declarations',2,'p_enum_body_declarations','parser.py',1760),
('annotation_type_declaration -> annotation_type_declaration_header annotation_type_body','annotation_type_declaration',2,'p_annotation_type_declaration','parser.py',1764),
('annotation_type_declaration_header -> annotation_type_declaration_header_name class_header_extends_opt class_header_implements_opt','annotation_type_declaration_header',3,'p_annotation_type_declaration_header','parser.py',1771),
('annotation_type_declaration_header_name -> modifiers @ INTERFACE NAME','annotation_type_declaration_header_name',4,'p_annotation_type_declaration_header_name','parser.py',1777),
('annotation_type_declaration_header_name -> modifiers @ INTERFACE NAME type_parameters','annotation_type_declaration_header_name',5,'p_annotation_type_declaration_header_name2','parser.py',1781),
('annotation_type_declaration_header_name -> @ INTERFACE NAME type_parameters','annotation_type_declaration_header_name',4,'p_annotation_type_declaration_header_name3','parser.py',1785),
('annotation_type_declaration_header_name -> @ INTERFACE NAME','annotation_type_declaration_header_name',3,'p_annotation_type_declaration_header_name4','parser.py',1789),
('annotation_type_body -> { annotation_type_member_declarations_opt }','annotation_type_body',3,'p_annotation_type_body','parser.py',1793),
('annotation_type_member_declarations_opt -> annotation_type_member_declarations','annotation_type_member_declarations_opt',1,'p_annotation_type_member_declarations_opt','parser.py',1797),
('annotation_type_member_declarations_opt -> empty','annotation_type_member_declarations_opt',1,'p_annotation_type_member_declarations_opt2','parser.py',1801),
('annotation_type_member_declarations -> annotation_type_member_declaration','annotation_type_member_declarations',1,'p_annotation_type_member_declarations','parser.py',1805),
('annotation_type_member_declarations -> annotation_type_member_declarations annotation_type_member_declaration','annotation_type_member_declarations',2,'p_annotation_type_member_declarations','parser.py',1806),
('annotation_type_member_declaration -> annotation_method_header ;','annotation_type_member_declaration',2,'p_annotation_type_member_declaration','parser.py',1813),
('annotation_type_member_declaration -> constant_declaration','annotation_type_member_declaration',1,'p_annotation_type_member_declaration','parser.py',1814),
('annotation_type_member_declaration -> constructor_declaration','annotation_type_member_declaration',1,'p_annotation_type_member_declaration','parser.py',1815),
('annotation_type_member_declaration -> type_declaration','annotation_type_member_declaration',1,'p_annotation_type_member_declaration','parser.py',1816),
('annotation_method_header -> annotation_method_header_name formal_parameter_list_opt ) method_header_extended_dims annotation_method_header_default_value_opt','annotation_method_header',5,'p_annotation_method_header','parser.py',1820),
('annotation_method_header_name -> modifiers_opt type_parameters type NAME (','annotation_method_header_name',5,'p_annotation_method_header_name','parser.py',1827),
('annotation_method_header_name -> modifiers_opt type NAME (','annotation_method_header_name',4,'p_annotation_method_header_name','parser.py',1828),
('annotation_method_header_default_value_opt -> default_value','annotation_method_header_default_value_opt',1,'p_annotation_method_header_default_value_opt','parser.py',1835),
('annotation_method_header_default_value_opt -> empty','annotation_method_header_default_value_opt',1,'p_annotation_method_header_default_value_opt','parser.py',1836),
('default_value -> DEFAULT member_value','default_value',2,'p_default_value','parser.py',1840),
('member_value -> conditional_expression_not_name','member_value',1,'p_member_value','parser.py',1844),
('member_value -> name','member_value',1,'p_member_value','parser.py',1845),
('member_value -> annotation','member_value',1,'p_member_value','parser.py',1846),
('member_value -> member_value_array_initializer','member_value',1,'p_member_value','parser.py',1847),
('member_value_array_initializer -> { member_values , }','member_value_array_initializer',4,'p_member_value_array_initializer','parser.py',1851),
('member_value_array_initializer -> { member_values }','member_value_array_initializer',3,'p_member_value_array_initializer','parser.py',1852),
('member_value_array_initializer -> { , }','member_value_array_initializer',3,'p_member_value_array_initializer2','parser.py',1856),
('member_value_array_initializer -> { }','member_value_array_initializer',2,'p_member_value_array_initializer2','parser.py',1857),
('member_values -> member_value','member_values',1,'p_member_values','parser.py',1861),
('member_values -> member_values , member_value','member_values',3,'p_member_values','parser.py',1862),
('annotation -> normal_annotation','annotation',1,'p_annotation','parser.py',1869),
('annotation -> marker_annotation','annotation',1,'p_annotation','parser.py',1870),
('annotation -> single_member_annotation','annotation',1,'p_annotation','parser.py',1871),
('normal_annotation -> annotation_name ( member_value_pairs_opt )','normal_annotation',4,'p_normal_annotation','parser.py',1875),
('annotation_name -> @ name','annotation_name',2,'p_annotation_name','parser.py',1879),
('member_value_pairs_opt -> member_value_pairs','member_value_pairs_opt',1,'p_member_value_pairs_opt','parser.py',1883),
('member_value_pairs_opt -> empty','member_value_pairs_opt',1,'p_member_value_pairs_opt2','parser.py',1887),
('member_value_pairs -> member_value_pair','member_value_pairs',1,'p_member_value_pairs','parser.py',1891),
('member_value_pairs -> member_value_pairs , member_value_pair','member_value_pairs',3,'p_member_value_pairs','parser.py',1892),
('member_value_pair -> simple_name = member_value','member_value_pair',3,'p_member_value_pair','parser.py',1899),
('marker_annotation -> annotation_name','marker_annotation',1,'p_marker_annotation','parser.py',1903),
('single_member_annotation -> annotation_name ( single_member_annotation_member_value )','single_member_annotation',4,'p_single_member_annotation','parser.py',1907),
('single_member_annotation_member_value -> member_value','single_member_annotation_member_value',1,'p_single_member_annotation_member_value','parser.py',1911),
('compilation_unit -> package_declaration','compilation_unit',1,'p_compilation_unit','parser.py',1917),
('compilation_unit -> package_declaration import_declarations','compilation_unit',2,'p_compilation_unit2','parser.py',1921),
('compilation_unit -> package_declaration import_declarations type_declarations','compilation_unit',3,'p_compilation_unit3','parser.py',1925),
('compilation_unit -> package_declaration type_declarations','compilation_unit',2,'p_compilation_unit4','parser.py',1929),
('compilation_unit -> import_declarations','compilation_unit',1,'p_compilation_unit5','parser.py',1933),
('compilation_unit -> type_declarations','compilation_unit',1,'p_compilation_unit6','parser.py',1937),
('compilation_unit -> import_declarations type_declarations','compilation_unit',2,'p_compilation_unit7','parser.py',1941),
('compilation_unit -> empty','compilation_unit',1,'p_compilation_unit8','parser.py',1945),
('package_declaration -> package_declaration_name ;','package_declaration',2,'p_package_declaration','parser.py',1949),
('package_declaration_name -> modifiers PACKAGE name','package_declaration_name',3,'p_package_declaration_name','parser.py',1956),
('package_declaration_name -> PACKAGE name','package_declaration_name',2,'p_package_declaration_name','parser.py',1957),
('import_declarations -> import_declaration','import_declarations',1,'p_import_declarations','parser.py',1964),
('import_declarations -> import_declarations import_declaration','import_declarations',2,'p_import_declarations','parser.py',1965),
('import_declaration -> single_type_import_declaration','import_declaration',1,'p_import_declaration','parser.py',1972),
('import_declaration -> type_import_on_demand_declaration','import_declaration',1,'p_import_declaration','parser.py',1973),
('import_declaration -> single_static_import_declaration','import_declaration',1,'p_import_declaration','parser.py',1974),
('import_declaration -> static_import_on_demand_declaration','import_declaration',1,'p_import_declaration','parser.py',1975),
('single_type_import_declaration -> IMPORT name ;','single_type_import_declaration',3,'p_single_type_import_declaration','parser.py',1979),
('type_import_on_demand_declaration -> IMPORT name . * ;','type_import_on_demand_declaration',5,'p_type_import_on_demand_declaration','parser.py',1983),
('single_static_import_declaration -> IMPORT STATIC name ;','single_static_import_declaration',4,'p_single_static_import_declaration','parser.py',1987),
('static_import_on_demand_declaration -> IMPORT STATIC name . * ;','static_import_on_demand_declaration',6,'p_static_import_on_demand_declaration','parser.py',1991),
('type_declarations -> type_declaration','type_declarations',1,'p_type_declarations','parser.py',1995),
('type_declarations -> type_declarations type_declaration','type_declarations',2,'p_type_declarations','parser.py',1996),
('goal -> PLUSPLUS compilation_unit','goal',2,'p_goal_compilation_unit','parser.py',2007),
('goal -> MINUSMINUS expression','goal',2,'p_goal_expression','parser.py',2011),
('goal -> * block_statement','goal',2,'p_goal_statement','parser.py',2015),
('empty -> <empty>','empty',0,'p_empty','parser.py',2022),
]
| 511.845329
| 192,018
| 0.713683
|
4a0335d5bcd9901ccf1a56748962745c2a09cb02
| 570
|
py
|
Python
|
project/jamo.py
|
lani009/Naver-Ai-Burning-warmup
|
beea88b3215d2a00deab3679902aa28918bde1f1
|
[
"MIT"
] | 1
|
2020-09-03T02:42:38.000Z
|
2020-09-03T02:42:38.000Z
|
project/jamo.py
|
lani009/Naver-Ai-Burning-warmup
|
beea88b3215d2a00deab3679902aa28918bde1f1
|
[
"MIT"
] | 17
|
2020-07-07T13:15:39.000Z
|
2020-09-03T02:43:26.000Z
|
project/jamo.py
|
lani009/Naver-Ai-Burning-warmup
|
beea88b3215d2a00deab3679902aa28918bde1f1
|
[
"MIT"
] | 1
|
2020-07-07T14:10:49.000Z
|
2020-07-07T14:10:49.000Z
|
from soynlp.hangle import decompose
import re
#자음과 모음으로 음절 나누는 코드.
doublespace_pattern = re.compile('\s+')
def jamo_sentence(sent):
cjj = ()
def transform(char):
if char == ' ':
return char
cjj = decompose(char)
if len(cjj) == 1:
return cjj
cjj_ = ''.join(c if c != ' ' else '-' for c in cjj)
return cjj_
sent_ = ''.join(transform(char) for char in sent)
sent_ = doublespace_pattern.sub(' ', sent_)
return sent_
jamo_sentence('어이고ㅋaaf 켁켁 아이고오aaaaa')
# 'ㅇㅓ-ㅇㅣ-ㄱㅗ- ㅋㅔㄱㅋㅔㄱ ㅇㅏ-ㅇㅣ-ㄱㅗ-ㅇㅗ-'
| 25.909091
| 59
| 0.587719
|
4a03378aca1965817895c7bbba141a94a3f00c0a
| 180
|
py
|
Python
|
reverse the number.py
|
Jeevananthamcse/Python-programs
|
b7847e25854b3ae95933edffcb141ef71185960a
|
[
"Unlicense"
] | 2
|
2021-08-30T08:04:15.000Z
|
2022-02-27T12:47:25.000Z
|
reverse the number.py
|
Jeevananthamcse/Python-programs
|
b7847e25854b3ae95933edffcb141ef71185960a
|
[
"Unlicense"
] | null | null | null |
reverse the number.py
|
Jeevananthamcse/Python-programs
|
b7847e25854b3ae95933edffcb141ef71185960a
|
[
"Unlicense"
] | null | null | null |
n= int(input("Enter the integer number: "))
sum= 0
while (n>0):
r= n%10
sum= (sum * 10) +r
n=n// 10
print("The reverse number is : {}".format(sum))
| 22.5
| 51
| 0.5
|
4a033804e7bb6efbc79d0900a97b110d4fc642f7
| 3,465
|
py
|
Python
|
claims_hosp/tests/test_load_data.py
|
jingjtang/covidcast-indicators
|
34cb8786f78fbea2710b810a9500ee02c2379241
|
[
"MIT"
] | 8
|
2020-10-12T04:27:04.000Z
|
2022-03-08T16:56:57.000Z
|
claims_hosp/tests/test_load_data.py
|
jingjtang/covidcast-indicators
|
34cb8786f78fbea2710b810a9500ee02c2379241
|
[
"MIT"
] | 666
|
2020-09-30T21:18:41.000Z
|
2022-03-31T22:37:12.000Z
|
claims_hosp/tests/test_load_data.py
|
jingjtang/covidcast-indicators
|
34cb8786f78fbea2710b810a9500ee02c2379241
|
[
"MIT"
] | 13
|
2020-10-01T14:25:06.000Z
|
2022-02-12T08:31:19.000Z
|
# third party
import pandas as pd
import pytest
# first party
from delphi_claims_hosp.config import Config, GeoConstants
from delphi_claims_hosp.load_data import load_data, load_claims_data
CONFIG = Config()
CONSTANTS = GeoConstants()
PARAMS = {
"indicator": {
"input_file": "test_data/SYNEDI_AGG_INPATIENT_11062020_1451CDT.csv.gz",
"drop_date": "2020-06-11",
}
}
DATA_FILEPATH = PARAMS["indicator"]["input_file"]
DROP_DATE = pd.to_datetime(PARAMS["indicator"]["drop_date"])
class TestLoadData:
fips_claims_data = load_claims_data(DATA_FILEPATH, DROP_DATE, "fips")
hrr_claims_data = load_claims_data(DATA_FILEPATH, DROP_DATE, "hrr")
fips_data = load_data(DATA_FILEPATH, DROP_DATE, "fips")
hrr_data = load_data(DATA_FILEPATH, DROP_DATE, "hrr")
def test_base_unit(self):
with pytest.raises(AssertionError):
load_claims_data(DATA_FILEPATH, DROP_DATE, "foo")
with pytest.raises(AssertionError):
load_data(DATA_FILEPATH, DROP_DATE, "foo")
def test_claims_columns(self):
assert "hrr" in self.hrr_claims_data.index.names
assert "fips" in self.fips_claims_data.index.names
assert "timestamp" in self.hrr_claims_data.index.names
assert "timestamp" in self.fips_claims_data.index.names
expected_claims_columns = ["Denominator", "Covid_like"]
for col in expected_claims_columns:
assert col in self.fips_claims_data.columns
assert col in self.hrr_claims_data.columns
assert len(set(self.fips_claims_data.columns) - set(expected_claims_columns)) == 0
assert len(set(self.hrr_claims_data.columns) - set(expected_claims_columns)) == 0
def test_data_columns(self):
assert "hrr" in self.hrr_data.columns
assert "fips" in self.fips_data.columns
assert "timestamp" in self.hrr_data.columns
assert "timestamp" in self.fips_data.columns
expected_columns = ["num", "den"]
for col in expected_columns:
assert col in self.fips_data.columns
assert col in self.hrr_data.columns
def test_edge_values(self):
for data in [self.hrr_claims_data, self.fips_claims_data]:
assert data.index.get_level_values("timestamp").max() >= Config.FIRST_DATA_DATE
assert data.index.get_level_values("timestamp").min() < DROP_DATE
for data in [self.hrr_data, self.fips_data]:
assert data["timestamp"].max() >= Config.FIRST_DATA_DATE
assert data["timestamp"].min() < DROP_DATE
def test_hrrs_values(self):
assert len(self.hrr_data.hrr.unique()) <= CONSTANTS.NUM_HRRS
assert len(self.hrr_claims_data.index.get_level_values(
'hrr').unique()) <= CONSTANTS.NUM_HRRS
assert self.hrr_data.isna().sum().sum() == 0
assert self.hrr_data["num"].sum() == self.hrr_claims_data["Covid_like"].sum()
assert self.hrr_data["den"].sum() == self.hrr_claims_data["Denominator"].sum()
def test_fips_values(self):
assert len(self.fips_data.fips.unique()) <= CONSTANTS.NUM_COUNTIES
assert len(self.fips_claims_data.index.get_level_values(
'fips').unique()) <= CONSTANTS.NUM_COUNTIES
assert self.fips_data.isna().sum().sum() == 0
assert self.fips_data["num"].sum() == self.fips_claims_data["Covid_like"].sum()
assert self.fips_data["den"].sum() == self.fips_claims_data["Denominator"].sum()
| 42.256098
| 91
| 0.68658
|
4a03381a31c870c0181c960a9378f6db866ded5e
| 6,155
|
py
|
Python
|
tests/functional/test_yaml.py
|
NeilBotelho/pip
|
d01bfcfaa13a4f06fa0ce61fa18cf06012f2e78f
|
[
"MIT"
] | 1
|
2021-01-26T12:46:40.000Z
|
2021-01-26T12:46:40.000Z
|
tests/functional/test_yaml.py
|
NeilBotelho/pip
|
d01bfcfaa13a4f06fa0ce61fa18cf06012f2e78f
|
[
"MIT"
] | 1
|
2021-10-04T12:25:25.000Z
|
2021-10-05T07:30:54.000Z
|
tests/functional/test_yaml.py
|
NeilBotelho/pip
|
d01bfcfaa13a4f06fa0ce61fa18cf06012f2e78f
|
[
"MIT"
] | 1
|
2021-09-03T11:41:21.000Z
|
2021-09-03T11:41:21.000Z
|
"""
Tests for the resolver
"""
import os
import re
import pytest
import yaml
from tests.lib import DATA_DIR, create_basic_wheel_for_package, path_to_url
_conflict_finder_pat = re.compile(
# Conflicting Requirements: \
# A 1.0.0 requires B == 2.0.0, C 1.0.0 requires B == 1.0.0.
r"""
(?P<package>[\w\-_]+?)
[ ]
(?P<version>\S+?)
[ ]requires[ ]
(?P<selector>.+?)
(?=,|\.$)
""",
re.X
)
def generate_yaml_tests(directory):
"""
Generate yaml test cases from the yaml files in the given directory
"""
for yml_file in directory.glob("*.yml"):
data = yaml.safe_load(yml_file.read_text())
assert "cases" in data, "A fixture needs cases to be used in testing"
# Strip the parts of the directory to only get a name without
# extension and resolver directory
base_name = str(yml_file)[len(str(directory)) + 1:-4]
base = data.get("base", {})
cases = data["cases"]
for resolver in 'old', 'new':
for i, case_template in enumerate(cases):
case = base.copy()
case.update(case_template)
case[":name:"] = base_name
if len(cases) > 1:
case[":name:"] += "-" + str(i)
case[":name:"] += "*" + resolver
case[":resolver:"] = resolver
skip = case.pop("skip", False)
assert skip in [False, True, 'old', 'new']
if skip is True or skip == resolver:
case = pytest.param(case, marks=pytest.mark.xfail)
yield case
def id_func(param):
"""
Give a nice parameter name to the generated function parameters
"""
if isinstance(param, dict) and ":name:" in param:
return param[":name:"]
retval = str(param)
if len(retval) > 25:
retval = retval[:20] + "..." + retval[-2:]
return retval
def convert_to_dict(string):
def stripping_split(my_str, splitwith, count=None):
if count is None:
return [x.strip() for x in my_str.strip().split(splitwith)]
else:
return [x.strip() for x in my_str.strip().split(splitwith, count)]
parts = stripping_split(string, ";")
retval = {}
retval["depends"] = []
retval["extras"] = {}
retval["name"], retval["version"] = stripping_split(parts[0], " ")
for part in parts[1:]:
verb, args_str = stripping_split(part, " ", 1)
assert verb in ["depends"], "Unknown verb {!r}".format(verb)
retval[verb] = stripping_split(args_str, ",")
return retval
def handle_request(script, action, requirement, options, new_resolver=False):
if action == 'install':
args = ['install']
if new_resolver:
args.append("--unstable-feature=resolver")
args.extend(["--no-index", "--find-links",
path_to_url(script.scratch_path)])
elif action == 'uninstall':
args = ['uninstall', '--yes']
else:
raise "Did not excpet action: {!r}".format(action)
if isinstance(requirement, str):
args.append(requirement)
elif isinstance(requirement, list):
args.extend(requirement)
else:
raise "requirement neither str nor list {!r}".format(requirement)
args.extend(options)
args.append("--verbose")
result = script.pip(*args,
allow_stderr_error=True,
allow_stderr_warning=True,
allow_error=True)
# Check which packages got installed
state = []
for path in os.listdir(script.site_packages_path):
if path.endswith(".dist-info"):
name, version = (
os.path.basename(path)[:-len(".dist-info")]
).rsplit("-", 1)
# TODO: information about extras.
state.append(" ".join((name, version)))
return {"result": result, "state": sorted(state)}
@pytest.mark.yaml
@pytest.mark.parametrize(
"case", generate_yaml_tests(DATA_DIR.parent / "yaml"), ids=id_func
)
def test_yaml_based(script, case):
available = case.get("available", [])
requests = case.get("request", [])
responses = case.get("response", [])
assert len(requests) == len(responses), (
"Expected requests and responses counts to be same"
)
# Create a custom index of all the packages that are supposed to be
# available
# XXX: This doesn't work because this isn't making an index of files.
for package in available:
if isinstance(package, str):
package = convert_to_dict(package)
assert isinstance(package, dict), "Needs to be a dictionary"
create_basic_wheel_for_package(script, **package)
# use scratch path for index
for request, response in zip(requests, responses):
for action in 'install', 'uninstall':
if action in request:
break
else:
raise "Unsupported request {!r}".format(request)
# Perform the requested action
effect = handle_request(script, action,
request[action],
request.get('options', '').split(),
case[':resolver:'] == 'new')
if 0: # for analyzing output easier
with open(DATA_DIR.parent / "yaml" /
case[':name:'].replace('*', '-'), 'w') as fo:
result = effect['result']
fo.write("=== RETURNCODE = %d\n" % result.returncode)
fo.write("=== STDERR ===:\n%s\n" % result.stderr)
if 'state' in response:
assert effect['state'] == (response['state'] or []), \
str(effect["result"])
error = False
if 'conflicting' in response:
error = True
if error:
if case[":resolver:"] == 'old':
assert effect["result"].returncode == 0, str(effect["result"])
elif case[":resolver:"] == 'new':
assert effect["result"].returncode == 1, str(effect["result"])
| 30.929648
| 78
| 0.554184
|
4a03388f222362d76f479a8f8ece2fb35a8679cd
| 169
|
py
|
Python
|
Aula 02/ListaDeExerciciosExtra/Lista10.py
|
diegorafaelvieira/Programacao-1
|
657a974f1215cec4aed68603e738d9a135131545
|
[
"MIT"
] | null | null | null |
Aula 02/ListaDeExerciciosExtra/Lista10.py
|
diegorafaelvieira/Programacao-1
|
657a974f1215cec4aed68603e738d9a135131545
|
[
"MIT"
] | null | null | null |
Aula 02/ListaDeExerciciosExtra/Lista10.py
|
diegorafaelvieira/Programacao-1
|
657a974f1215cec4aed68603e738d9a135131545
|
[
"MIT"
] | null | null | null |
ValorCelsius =int(input("Digite a temperatura em Celsius:"))
ValorFahrenheit = (ValorCelsius * 1.8)+32
print ("O valor da temperatura em Fahrenheit é:",ValorFahrenheit)
| 42.25
| 65
| 0.769231
|
4a03389e7d6ee5ae83fab90ea32632228a50dd91
| 5,955
|
py
|
Python
|
mistletoe/parse_context.py
|
executablebooks/mistletoe-ebp
|
229812436726fd9b1af85c6e66ff8c81b415758d
|
[
"MIT"
] | 2
|
2020-05-19T02:06:47.000Z
|
2020-06-27T10:01:59.000Z
|
mistletoe/parse_context.py
|
executablebooks/mistletoe-ebp
|
229812436726fd9b1af85c6e66ff8c81b415758d
|
[
"MIT"
] | 5
|
2020-03-10T22:43:16.000Z
|
2020-03-21T22:09:09.000Z
|
mistletoe/parse_context.py
|
ExecutableBookProject/mistletoe-ebp
|
229812436726fd9b1af85c6e66ff8c81b415758d
|
[
"MIT"
] | null | null | null |
"""This module provides a container for global variables of a single parse.
It uses the `threading.local` object to ensure that global variables
are not changed by different threads.
"""
from collections import OrderedDict
from collections.abc import MutableSet
from copy import deepcopy
from importlib import import_module
import logging
from threading import local
from typing import Optional
THREAD = local()
LOGGER = logging.getLogger(__name__)
class OrderedSet(MutableSet):
"""An ordered set, optimized for `a in set` tests"""
def __init__(self, iterable=()):
self._items = OrderedDict((t, None) for t in iterable)
def __repr__(self):
return list(self._items).__repr__()
def __contains__(self, item):
return item in self._items
def __iter__(self):
for item in self._items:
yield item
def __len__(self):
return len(self._items)
def add(self, item):
if item not in self._items:
self._items[item] = None
def discard(self, item):
self._items.pop(item, None)
def insert(self, index, item):
item_list = list(self._items.items())
item_list.insert(index, (item, None))
self._items = OrderedDict(item_list)
def insert_after(self, item, after_item):
assert after_item in self._items, after_item
indx = list(self._items.keys()).index(after_item) + 1
token_list = list(self._items.items())
token_list.insert(indx, (item, None))
self._items = OrderedDict(token_list)
def insert_before(self, item, before_item):
assert before_item in self._items
indx = list(self._items.keys()).index(before_item)
token_list = list(self._items.items())
token_list.insert(indx, (item, None))
self._items = OrderedDict(token_list)
class ParseContext:
"""A class to contain context for a single parse.
:param find_blocks: a list of block tokens to use during the parse. If None,
the standard blocks will be used from `BaseRenderer.default_block_token`.
:param find_spans: a list of span tokens to use during the parse. If None,
the standard blocks will be used from `BaseRenderer.default_span_tokens`.
:param link_definitions: a dict of link definitons, obtained from `[def]: link`
:param foot_definitions: a dict of footnote definitons,
obtained from `[^def]: link` (if Footnote token active)
:param nesting_matches: a dict of matches recorded from `find_nested_tokenizer`
"""
def __init__(
self,
find_blocks=None,
find_spans=None,
link_definitions=None,
foot_definitions=None,
logger: Optional[logging.Logger] = None,
):
# tokens used for matching
if find_blocks is not None:
self.block_tokens = OrderedSet(tokens_from_classes(find_blocks))
else:
from mistletoe.renderers.base import BaseRenderer
self.block_tokens = OrderedSet(BaseRenderer.default_block_tokens)
if find_spans is not None:
self.span_tokens = OrderedSet(tokens_from_classes(find_spans))
else:
from mistletoe.renderers.base import BaseRenderer
self.span_tokens = OrderedSet(BaseRenderer.default_span_tokens)
# definition references, collected during parsing
if link_definitions is None:
self._link_definitions = {}
else:
self._link_definitions = link_definitions
if foot_definitions is None:
self._foot_definitions = OrderedDict()
else:
self._foot_definitions = foot_definitions
self.nesting_matches = {}
self._foot_references = OrderedSet()
if logger is None:
logger = LOGGER
self._logger = logger
def __repr__(self):
return "{0}(block_cls={1},span_cls={2},link_defs={3},footnotes={4})".format(
self.__class__.__name__,
len(self.block_tokens),
len(self.span_tokens),
len(self.link_definitions),
len(self.foot_definitions),
)
@property
def link_definitions(self) -> dict:
return self._link_definitions
@property
def foot_definitions(self) -> dict:
return self._foot_definitions
@property
def foot_references(self) -> OrderedSet:
return self._foot_references
@property
def logger(self) -> logging.Logger:
return self._logger
@logger.setter
def logger(self, logger: logging.Logger):
self._logger = logger
def reset_definitions(self):
self._link_definitions = {}
self._foot_definitions = {}
self._foot_references = OrderedSet()
def copy(self):
return deepcopy(self)
def get_parse_context(reset=False) -> ParseContext:
"""Return the current ``ParseContext`` (one per thread)."""
global THREAD
if reset:
THREAD.context = ParseContext()
else:
try:
return THREAD.context
except AttributeError:
THREAD.context = ParseContext()
return THREAD.context
def set_parse_context(parse_context):
"""Set an existing ``ParseContext`` (one per thread)."""
global THREAD
THREAD.context = parse_context
def tokens_from_module(module):
"""
Helper method; takes a module and returns a list of all token classes
specified in module.__all__.
Useful when custom tokens are defined in single module.
"""
return [getattr(module, name) for name in module.__all__]
def tokens_from_classes(classes):
"""
Helper method; take a list of classes and/or class paths
(e.g. `mistletoe.span_tokens.Math`) and return the loaded classes.
"""
return [
getattr(import_module(".".join(cls.split(".")[:-1])), cls.split(".")[-1])
if isinstance(cls, str)
else cls
for cls in classes
]
| 30.695876
| 84
| 0.654912
|
4a033a10755670557bc5f34c8e8f1f7eab1ec475
| 48
|
py
|
Python
|
Livro Nilo Ney (Python)/Cap.05/Exe 5.2.py
|
EduardoOliver25/Python
|
626f0f05641ce52ebe5e350d380ac21c3af53aa8
|
[
"MIT"
] | null | null | null |
Livro Nilo Ney (Python)/Cap.05/Exe 5.2.py
|
EduardoOliver25/Python
|
626f0f05641ce52ebe5e350d380ac21c3af53aa8
|
[
"MIT"
] | null | null | null |
Livro Nilo Ney (Python)/Cap.05/Exe 5.2.py
|
EduardoOliver25/Python
|
626f0f05641ce52ebe5e350d380ac21c3af53aa8
|
[
"MIT"
] | null | null | null |
x = 49
while x <= 99:
x = x + 1
print(x)
| 12
| 14
| 0.416667
|
4a033a301bd242cec8202aed252780ed44ae5d6a
| 2,341
|
py
|
Python
|
models/malliva_accounts.py
|
olubiyiontheweb/malliva
|
b212e6b359eed54c92533f0a02afe3c0042150e2
|
[
"MIT"
] | null | null | null |
models/malliva_accounts.py
|
olubiyiontheweb/malliva
|
b212e6b359eed54c92533f0a02afe3c0042150e2
|
[
"MIT"
] | null | null | null |
models/malliva_accounts.py
|
olubiyiontheweb/malliva
|
b212e6b359eed54c92533f0a02afe3c0042150e2
|
[
"MIT"
] | 1
|
2021-07-19T12:15:52.000Z
|
2021-07-19T12:15:52.000Z
|
# Marketplace accounts created on the platform, they have users and settings and databases
from datetime import timedelta, datetime
from enum import Enum
from mongoengine.queryset.base import DO_NOTHING
from .malliva_users import User as UserModel
from mongoengine import Document, EmbeddedDocument, fields
class Plan(Document):
id = fields.SequenceField(primary_key=True)
plan_name = fields.StringField(max_length=50, required=True)
features = fields.DynamicField(default={})
duration = fields.IntField(required=True)
price = fields.FloatField(default="00.0")
meta = {'db_alias': 'default'}
class Subscription(Document):
id = fields.SequenceField(primary_key=True)
current_plan = fields.ReferenceField(
Plan, required=True, reverse_delete_rule=DO_NOTHING)
owner = fields.ReferenceField(
UserModel, required=True, reverse_delete_rule=DO_NOTHING)
first_subscription_date = fields.DateTimeField(
required=True, default=datetime.now())
last_subscription_date = fields.DateTimeField(null=True, default=None)
next_expiration_date = fields.DateTimeField(
default=datetime.now() + timedelta(days=30))
is_active = fields.BooleanField(default=False)
meta = {'db_alias': 'default'}
class MallivaAccount(Document):
class MARKETPLACE_MODE(Enum):
DEVELOPMENT = "DEVELOPMENT"
PRODUCTION = "PRODUCTION"
id = fields.SequenceField(primary_key=True)
marketplace_name = fields.StringField(max_length=200)
owner = fields.ReferenceField(
UserModel, reverse_delete_rule=DO_NOTHING, default="1")
database_name = fields.StringField(max_length=200, default="", unique=True)
subdomain = fields.StringField(max_length=200, unique=True)
domain = fields.StringField(max_length=200, unique=True, default="")
use_domain = fields.BooleanField(default=False)
# configuration = models.OneToOneField(Configuration, on_delete=models.SET_DEFAULT, default="1")
curent_mode = fields.EnumField(
MARKETPLACE_MODE, default=MARKETPLACE_MODE.DEVELOPMENT)
subscription = fields.ReferenceField(
Subscription, reverse_delete_rule=DO_NOTHING, default="1")
created_at = fields.DateTimeField(auto_now_add=True)
updated_at = fields.DateTimeField(auto_now_add=True)
meta = {'db_alias': 'default'}
| 39.677966
| 100
| 0.747117
|
4a033a3de420194bf1c9827d0b07c6be8a593ebf
| 988
|
py
|
Python
|
pynet/vision/data/iris.py
|
deep-learning-algorithm/PyNet
|
354c7ee88a712a1f5069d58a0be4a6cbfaeab861
|
[
"Apache-2.0"
] | 8
|
2020-11-22T02:22:55.000Z
|
2022-03-16T12:18:03.000Z
|
pynet/vision/data/iris.py
|
zjZSTU/PyNet
|
354c7ee88a712a1f5069d58a0be4a6cbfaeab861
|
[
"Apache-2.0"
] | null | null | null |
pynet/vision/data/iris.py
|
zjZSTU/PyNet
|
354c7ee88a712a1f5069d58a0be4a6cbfaeab861
|
[
"Apache-2.0"
] | 4
|
2020-12-10T09:21:56.000Z
|
2021-04-19T02:25:01.000Z
|
# -*- coding: utf-8 -*-
# @Time : 19-6-20 下午4:25
# @Author : zj
import pandas as pd
import numpy as np
from sklearn import utils
from sklearn.model_selection import train_test_split
# iris_path = '/home/zj/data/iris-species/Iris.csv'
def load_iris(iris_path, shuffle=True, tsize=0.8):
"""
加载iris数据
"""
data = pd.read_csv(iris_path, header=0, delimiter=',')
if shuffle:
data = utils.shuffle(data)
species_dict = {
'Iris-setosa': 0,
'Iris-versicolor': 1,
'Iris-virginica': 2
}
data['Species'] = data['Species'].map(species_dict)
data_x = np.array(
[data['SepalLengthCm'], data['SepalWidthCm'], data['PetalLengthCm'], data['PetalWidthCm']]).T
data_y = data['Species']
x_train, x_test, y_train, y_test = train_test_split(data_x, data_y, train_size=tsize, test_size=(1 - tsize),
shuffle=False)
return x_train, x_test, y_train, y_test
| 25.333333
| 112
| 0.606275
|
4a033aa51d6858913aa1c250b1cb7768a4ddebc5
| 374
|
py
|
Python
|
decorator_timer_test.py
|
pieteradejong/temp
|
2dfd4cee58a37c33f5611d274ab9d12534f0c383
|
[
"MIT"
] | null | null | null |
decorator_timer_test.py
|
pieteradejong/temp
|
2dfd4cee58a37c33f5611d274ab9d12534f0c383
|
[
"MIT"
] | null | null | null |
decorator_timer_test.py
|
pieteradejong/temp
|
2dfd4cee58a37c33f5611d274ab9d12534f0c383
|
[
"MIT"
] | null | null | null |
import time
def timer_decorator(fnc):
def wrapper():
t1 = time.time()
fnc()
t2 = time.time()
print "Execution time: ", t2 - t1
return wrapper
class Solution:
# def __init__(self):
#
@timer_decorator
def my_fnc(self):
print sum(xrange(1000))
def main():
sol = Solution()
sol.my_fnc()
if __name__ == "__main__":
main()
| 14.96
| 39
| 0.593583
|
4a033aaf869bc22e8b1ef83d835b31677a6cbcfb
| 4,977
|
py
|
Python
|
openstack_dashboard/test/integration_tests/decorators.py
|
Mirantis/mos-horizon
|
d2444220d959c8b921436bd75459c2face0e71d2
|
[
"Apache-2.0"
] | 9
|
2016-06-03T03:53:24.000Z
|
2017-05-20T16:53:23.000Z
|
openstack_dashboard/test/integration_tests/decorators.py
|
Mirantis/mos-horizon
|
d2444220d959c8b921436bd75459c2face0e71d2
|
[
"Apache-2.0"
] | 1
|
2016-09-08T10:57:46.000Z
|
2016-09-08T10:59:06.000Z
|
openstack_dashboard/test/integration_tests/decorators.py
|
Mirantis/mos-horizon
|
d2444220d959c8b921436bd75459c2face0e71d2
|
[
"Apache-2.0"
] | 4
|
2016-08-01T10:50:15.000Z
|
2017-02-22T12:11:19.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import functools
import inspect
import os
from openstack_dashboard.test.integration_tests import config
import testtools
def _is_test_method_name(method):
return method.startswith('test_')
def _is_test_fixture(method):
return method in ['setUp', 'tearDown']
def _is_test_cls(cls):
return cls.__name__.startswith('Test')
def _mark_method_skipped(meth, reason):
"""Mark method as skipped by replacing the actual method with wrapper
that raises the testtools.testcase.TestSkipped exception.
"""
@functools.wraps(meth)
def wrapper(*args, **kwargs):
raise testtools.testcase.TestSkipped(reason)
return wrapper
def _mark_class_skipped(cls, reason):
"""Mark every test method of the class as skipped."""
tests = [attr for attr in dir(cls) if _is_test_method_name(attr) or
_is_test_fixture(attr)]
for test in tests:
method = getattr(cls, test)
if callable(method):
setattr(cls, test, _mark_method_skipped(method, reason))
return cls
NOT_TEST_OBJECT_ERROR_MSG = "Decorator can be applied only on test" \
" classes and test methods."
def services_required(*req_services):
"""Decorator for marking test's service requirements,
if requirements are not met in the configuration file
test is marked as skipped.
Usage:
from openstack_dashboard.test.integration_tests.tests import decorators
@decorators.services_required("sahara")
class TestLogin(helpers.BaseTestCase):
.
.
.
from openstack_dashboard.test.integration_tests.tests import decorators
class TestLogin(helpers.BaseTestCase):
@decorators.services_required("sahara")
def test_login(self):
login_pg = loginpage.LoginPage(self.driver, self.conf)
.
.
.
"""
def actual_decoration(obj):
# make sure that we can decorate method and classes as well
if inspect.isclass(obj):
if not _is_test_cls(obj):
raise ValueError(NOT_TEST_OBJECT_ERROR_MSG)
skip_method = _mark_class_skipped
else:
if not _is_test_method_name(obj.__name__):
raise ValueError(NOT_TEST_OBJECT_ERROR_MSG)
skip_method = _mark_method_skipped
# get available services from configuration
avail_services = config.get_config().service_available
for req_service in req_services:
if not getattr(avail_services, req_service, False):
obj = skip_method(obj, "%s service is required for this test"
" to work properly." % req_service)
break
return obj
return actual_decoration
def skip_because(**kwargs):
"""Decorator for skipping tests hitting known bugs
Usage:
from openstack_dashboard.test.integration_tests.tests import decorators
class TestDashboardHelp(helpers.TestCase):
@decorators.skip_because(bugs=["1234567"])
def test_dashboard_help_redirection(self):
.
.
.
"""
def actual_decoration(obj):
if inspect.isclass(obj):
if not _is_test_cls(obj):
raise ValueError(NOT_TEST_OBJECT_ERROR_MSG)
skip_method = _mark_class_skipped
else:
if not _is_test_method_name(obj.__name__):
raise ValueError(NOT_TEST_OBJECT_ERROR_MSG)
skip_method = _mark_method_skipped
bugs = kwargs.get("bugs")
if bugs and isinstance(bugs, collections.Iterable):
for bug in bugs:
if not bug.isdigit():
raise ValueError("bug must be a valid bug number")
obj = skip_method(obj, "Skipped until Bugs: %s are resolved." %
", ".join([bug for bug in bugs]))
return obj
return actual_decoration
def skip_new_design(obj):
if not os.environ.get('SKIP_NEW_DESIGN'):
return obj
if inspect.isclass(obj):
if not _is_test_cls(obj):
raise ValueError(NOT_TEST_OBJECT_ERROR_MSG)
skip = _mark_class_skipped
else:
if not _is_test_method_name(obj.__name__):
raise ValueError(NOT_TEST_OBJECT_ERROR_MSG)
skip = _mark_method_skipped
return skip(obj, "New design isn't supported")
| 31.5
| 78
| 0.658429
|
4a033ae7b28f42170a8d5e6de7b85be353395626
| 43,550
|
py
|
Python
|
src/transformers/modeling_t5.py
|
yuvalpinter/transformers
|
9c67196b83a824df577742d32d38e9121d8a9285
|
[
"Apache-2.0"
] | null | null | null |
src/transformers/modeling_t5.py
|
yuvalpinter/transformers
|
9c67196b83a824df577742d32d38e9121d8a9285
|
[
"Apache-2.0"
] | null | null | null |
src/transformers/modeling_t5.py
|
yuvalpinter/transformers
|
9c67196b83a824df577742d32d38e9121d8a9285
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2018 Mesh TensorFlow authors, T5 Authors and HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch T5 model. """
import copy
import itertools
import logging
import math
import os
import torch
import torch.nn.functional as F
from torch import nn
from torch.nn import CrossEntropyLoss
from .configuration_t5 import T5Config
from .file_utils import DUMMY_INPUTS, DUMMY_MASK, add_start_docstrings
from .modeling_utils import PreTrainedModel, prune_linear_layer
logger = logging.getLogger(__name__)
####################################################
# This dict contrains shortcut names and associated url
# for the pretrained weights provided with the models
####################################################
T5_PRETRAINED_MODEL_ARCHIVE_MAP = {
"t5-small": "https://s3.amazonaws.com/models.huggingface.co/bert/t5-small-pytorch_model.bin",
"t5-base": "https://s3.amazonaws.com/models.huggingface.co/bert/t5-base-pytorch_model.bin",
"t5-large": "https://s3.amazonaws.com/models.huggingface.co/bert/t5-large-pytorch_model.bin",
"t5-3b": "https://s3.amazonaws.com/models.huggingface.co/bert/t5-3b-pytorch_model.bin",
"t5-11b": "https://s3.amazonaws.com/models.huggingface.co/bert/t5-11b-pytorch_model.bin",
}
####################################################
# This is a conversion method from TF 1.0 to PyTorch
# More details: https://medium.com/huggingface/from-tensorflow-to-pytorch-265f40ef2a28
####################################################
def load_tf_weights_in_t5(model, config, tf_checkpoint_path):
""" Load tf checkpoints in a pytorch model.
"""
try:
import re
import numpy as np
import tensorflow as tf
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see "
"https://www.tensorflow.org/install/ for installation instructions."
)
raise
tf_path = os.path.abspath(tf_checkpoint_path)
logger.info("Converting TensorFlow checkpoint from {}".format(tf_path))
# Load weights from TF model
init_vars = tf.train.list_variables(tf_path)
names = []
tf_weights = {}
for name, shape in init_vars:
logger.info("Loading TF weight {} with shape {}".format(name, shape))
array = tf.train.load_variable(tf_path, name)
names.append(name)
tf_weights[name] = array
for txt_name in names:
name = txt_name.split("/")
# adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v
# which are not required for using pretrained model
if any(n in ["adam_v", "adam_m", "global_step"] for n in name):
logger.info("Skipping {}".format("/".join(name)))
tf_weights.pop(txt_name, None)
continue
if "_slot_" in name[-1]:
logger.info("Skipping {}".format("/".join(name)))
tf_weights.pop(txt_name, None)
continue
pointer = model
array = tf_weights[txt_name]
for m_name in name:
if re.fullmatch(r"[A-Za-z]+_\d+", m_name):
scope_names = re.split(r"_(\d+)", m_name)
else:
scope_names = [m_name]
if scope_names[0] in ["kernel", "scale", "embedding"]:
pointer = getattr(pointer, "weight")
# elif scope_names[0] == 'scale':
# pointer = getattr(pointer, 'weight')
# elif scope_names[0] == 'output_bias' or scope_names[0] == 'beta':
# pointer = getattr(pointer, 'bias')
# elif scope_names[0] == 'squad':
# pointer = getattr(pointer, 'classifier')
else:
try:
pointer = getattr(pointer, scope_names[0])
except AttributeError:
logger.info("Skipping {}".format("/".join(name)))
continue
if len(scope_names) >= 2:
num = int(scope_names[1])
pointer = pointer[num]
if scope_names[0] not in ["kernel", "scale", "embedding"]:
pointer = getattr(pointer, "weight")
if scope_names[0] != "embedding":
logger.info("Transposing numpy weight of shape {} for {}".format(array.shape, name))
array = np.transpose(array)
try:
assert pointer.shape == array.shape
except AssertionError as e:
e.args += (pointer.shape, array.shape)
raise
logger.info("Initialize PyTorch weight {}".format(name))
pointer.data = torch.from_numpy(array.astype(np.float32))
tf_weights.pop(txt_name, None)
logger.info("Weights not copied to PyTorch model: {}".format(", ".join(tf_weights.keys())))
# logger.info("Weights not copied to PyTorch model: {}".format(', '.join(tf_weights.keys())))
return model
####################################################
# PyTorch Models are constructed by sub-classing
# - torch.nn.Module for the layers and
# - PreTrainedModel for the models (it-self a sub-class of torch.nn.Module)
####################################################
class T5LayerNorm(nn.Module):
def __init__(self, hidden_size, eps=1e-6):
""" Construct a layernorm module in the T5 style
No bias and no substraction of mean.
"""
super().__init__()
self.weight = nn.Parameter(torch.ones(hidden_size))
self.variance_epsilon = eps
def forward(self, x):
variance = x.pow(2).mean(-1, keepdim=True)
x = x / torch.sqrt(variance + self.variance_epsilon)
return self.weight * x
class T5DenseReluDense(nn.Module):
def __init__(self, config):
super().__init__()
self.wi = nn.Linear(config.d_model, config.d_ff, bias=False)
self.wo = nn.Linear(config.d_ff, config.d_model, bias=False)
self.dropout = nn.Dropout(config.dropout_rate)
def forward(self, hidden_states):
h = self.wi(hidden_states)
h = F.relu(h)
h = self.dropout(h)
h = self.wo(h)
return h
class T5LayerFF(nn.Module):
def __init__(self, config):
super().__init__()
self.DenseReluDense = T5DenseReluDense(config)
self.layer_norm = T5LayerNorm(config.d_model, eps=config.layer_norm_epsilon)
self.dropout = nn.Dropout(config.dropout_rate)
def forward(self, hidden_states):
norm_x = self.layer_norm(hidden_states)
y = self.DenseReluDense(norm_x)
layer_output = hidden_states + self.dropout(y)
return layer_output
class T5Attention(nn.Module):
NEW_ID = itertools.count()
def __init__(self, config, has_relative_attention_bias=False):
super().__init__()
self.layer_id = next(T5Attention.NEW_ID)
self.is_decoder = config.is_decoder
self.has_relative_attention_bias = has_relative_attention_bias
self.output_attentions = config.output_attentions
self.relative_attention_num_buckets = config.relative_attention_num_buckets
self.d_model = config.d_model
self.d_kv = config.d_kv
self.n_heads = config.num_heads
self.dropout = config.dropout_rate
self.inner_dim = self.n_heads * self.d_kv
# Mesh TensorFlow initialization to avoid scaling before softmax
self.q = nn.Linear(self.d_model, self.inner_dim, bias=False)
self.k = nn.Linear(self.d_model, self.inner_dim, bias=False)
self.v = nn.Linear(self.d_model, self.inner_dim, bias=False)
self.o = nn.Linear(self.inner_dim, self.d_model, bias=False)
if self.has_relative_attention_bias:
self.relative_attention_bias = nn.Embedding(self.relative_attention_num_buckets, self.n_heads)
self.pruned_heads = set()
def prune_heads(self, heads):
if len(heads) == 0:
return
mask = torch.ones(self.n_heads, self.d_kv)
heads = set(heads) - self.pruned_heads
for head in heads:
head -= sum(1 if h < head else 0 for h in self.pruned_heads)
mask[head] = 0
mask = mask.view(-1).contiguous().eq(1)
index = torch.arange(len(mask))[mask].long()
# Prune linear layers
self.q = prune_linear_layer(self.q, index)
self.k = prune_linear_layer(self.k, index)
self.v = prune_linear_layer(self.v, index)
self.o = prune_linear_layer(self.o, index, dim=1)
# Update hyper params
self.n_heads = self.n_heads - len(heads)
self.inner_dim = self.d_kv * self.n_heads
self.pruned_heads = self.pruned_heads.union(heads)
@staticmethod
def _relative_position_bucket(relative_position, bidirectional=True, num_buckets=32, max_distance=128):
"""
Adapted from Mesh Tensorflow:
https://github.com/tensorflow/mesh/blob/0cb87fe07da627bf0b7e60475d59f95ed6b5be3d/mesh_tensorflow/transformer/transformer_layers.py#L593
Translate relative position to a bucket number for relative attention.
The relative position is defined as memory_position - query_position, i.e.
the distance in tokens from the attending position to the attended-to
position. If bidirectional=False, then positive relative positions are
invalid.
We use smaller buckets for small absolute relative_position and larger buckets
for larger absolute relative_positions. All relative positions >=max_distance
map to the same bucket. All relative positions <=-max_distance map to the
same bucket. This should allow for more graceful generalization to longer
sequences than the model has been trained on.
Args:
relative_position: an int32 Tensor
bidirectional: a boolean - whether the attention is bidirectional
num_buckets: an integer
max_distance: an integer
Returns:
a Tensor with the same shape as relative_position, containing int32
values in the range [0, num_buckets)
"""
ret = 0
n = -relative_position
if bidirectional:
num_buckets //= 2
ret += (n < 0).to(torch.long) * num_buckets # mtf.to_int32(mtf.less(n, 0)) * num_buckets
n = torch.abs(n)
else:
n = torch.max(n, torch.zeros_like(n))
# now n is in the range [0, inf)
# half of the buckets are for exact increments in positions
max_exact = num_buckets // 2
is_small = n < max_exact
# The other half of the buckets are for logarithmically bigger bins in positions up to max_distance
val_if_large = max_exact + (
torch.log(n.float() / max_exact) / math.log(max_distance / max_exact) * (num_buckets - max_exact)
).to(torch.long)
val_if_large = torch.min(val_if_large, torch.full_like(val_if_large, num_buckets - 1))
ret += torch.where(is_small, n, val_if_large)
return ret
def compute_bias(self, qlen, klen):
""" Compute binned relative position bias """
context_position = torch.arange(qlen, dtype=torch.long)[:, None]
memory_position = torch.arange(klen, dtype=torch.long)[None, :]
relative_position = memory_position - context_position # shape (qlen, klen)
rp_bucket = self._relative_position_bucket(
relative_position, # shape (qlen, klen)
bidirectional=not self.is_decoder,
num_buckets=self.relative_attention_num_buckets,
)
rp_bucket = rp_bucket.to(self.relative_attention_bias.weight.device)
values = self.relative_attention_bias(rp_bucket) # shape (qlen, klen, num_heads)
values = values.permute([2, 0, 1]).unsqueeze(0) # shape (1, num_heads, qlen, klen)
return values
def forward(self, input, mask=None, kv=None, position_bias=None, cache=None, head_mask=None):
"""
Self-attention (if kv is None) or attention over source sentence (provided by kv).
"""
# Input is (bs, qlen, dim)
# Mask is (bs, klen) (non-causal) or (bs, klen, klen)
bs, qlen, dim = input.size()
if kv is None:
klen = qlen if cache is None else cache["slen"] + qlen
else:
klen = kv.size(1)
def shape(x):
""" projection """
return x.view(bs, -1, self.n_heads, self.d_kv).transpose(1, 2)
def unshape(x):
""" compute context """
return x.transpose(1, 2).contiguous().view(bs, -1, self.inner_dim)
q = shape(self.q(input)) # (bs, n_heads, qlen, dim_per_head)
if kv is None:
k = shape(self.k(input)) # (bs, n_heads, qlen, dim_per_head)
v = shape(self.v(input)) # (bs, n_heads, qlen, dim_per_head)
elif cache is None or self.layer_id not in cache:
k = v = kv
k = shape(self.k(k)) # (bs, n_heads, qlen, dim_per_head)
v = shape(self.v(v)) # (bs, n_heads, qlen, dim_per_head)
if cache is not None:
if self.layer_id in cache:
if kv is None:
k_, v_ = cache[self.layer_id]
k = torch.cat([k_, k], dim=2) # (bs, n_heads, klen, dim_per_head)
v = torch.cat([v_, v], dim=2) # (bs, n_heads, klen, dim_per_head)
else:
k, v = cache[self.layer_id]
cache[self.layer_id] = (k, v)
# q = q / math.sqrt(dim_per_head) # No scaling in T5
scores = torch.einsum("bnqd,bnkd->bnqk", q, k) # (bs, n_heads, qlen, klen)
if position_bias is None:
if not self.has_relative_attention_bias:
raise ValueError("No position_bias provided and no weights to compute position_bias")
position_bias = self.compute_bias(qlen, klen)
if mask is not None:
position_bias = position_bias + mask # (bs, n_heads, qlen, klen)
scores += position_bias
weights = F.softmax(scores.float(), dim=-1).type_as(scores) # (bs, n_heads, qlen, klen)
weights = F.dropout(weights, p=self.dropout, training=self.training) # (bs, n_heads, qlen, klen)
# Mask heads if we want to
if head_mask is not None:
weights = weights * head_mask
context = torch.matmul(weights, v) # (bs, n_heads, qlen, dim_per_head)
context = unshape(context) # (bs, qlen, dim)
context = self.o(context)
outputs = (context,)
if self.output_attentions:
outputs = outputs + (weights,)
if self.has_relative_attention_bias:
outputs = outputs + (position_bias,)
return outputs
class T5LayerSelfAttention(nn.Module):
def __init__(self, config, has_relative_attention_bias=False):
super().__init__()
self.SelfAttention = T5Attention(config, has_relative_attention_bias=has_relative_attention_bias)
self.layer_norm = T5LayerNorm(config.d_model, eps=config.layer_norm_epsilon)
self.dropout = nn.Dropout(config.dropout_rate)
def forward(self, hidden_states, attention_mask=None, position_bias=None, head_mask=None):
norm_x = self.layer_norm(hidden_states)
attention_output = self.SelfAttention(
norm_x, mask=attention_mask, position_bias=position_bias, head_mask=head_mask
)
y = attention_output[0]
layer_output = hidden_states + self.dropout(y)
outputs = (layer_output,) + attention_output[1:] # add attentions if we output them
return outputs
class T5LayerCrossAttention(nn.Module):
def __init__(self, config, has_relative_attention_bias=False):
super().__init__()
self.EncDecAttention = T5Attention(config, has_relative_attention_bias=has_relative_attention_bias)
self.layer_norm = T5LayerNorm(config.d_model, eps=config.layer_norm_epsilon)
self.dropout = nn.Dropout(config.dropout_rate)
def forward(self, hidden_states, kv, attention_mask=None, position_bias=None, head_mask=None):
norm_x = self.layer_norm(hidden_states)
attention_output = self.EncDecAttention(
norm_x, mask=attention_mask, kv=kv, position_bias=position_bias, head_mask=head_mask
)
y = attention_output[0]
layer_output = hidden_states + self.dropout(y)
outputs = (layer_output,) + attention_output[1:] # add attentions if we output them
return outputs
class T5Block(nn.Module):
def __init__(self, config, has_relative_attention_bias=False):
super().__init__()
self.is_decoder = config.is_decoder
self.layer = nn.ModuleList()
self.layer.append(T5LayerSelfAttention(config, has_relative_attention_bias=has_relative_attention_bias))
if self.is_decoder:
self.layer.append(T5LayerCrossAttention(config, has_relative_attention_bias=has_relative_attention_bias))
self.layer.append(T5LayerFF(config))
else:
self.layer.append(T5LayerFF(config))
def forward(
self,
hidden_states,
attention_mask=None,
position_bias=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
encoder_decoder_position_bias=None,
head_mask=None,
):
self_attention_outputs = self.layer[0](
hidden_states, attention_mask=attention_mask, position_bias=position_bias, head_mask=head_mask
)
hidden_states = self_attention_outputs[0]
outputs = self_attention_outputs[1:] # Keep self-attention outputs and relative position weights
if not self.is_decoder:
hidden_states = self.layer[1](hidden_states)
else:
cross_attention_outputs = self.layer[1](
hidden_states,
kv=encoder_hidden_states,
attention_mask=encoder_attention_mask,
position_bias=encoder_decoder_position_bias,
head_mask=head_mask,
)
hidden_states = cross_attention_outputs[0]
outputs = (
outputs + cross_attention_outputs[1:]
) # Keep cross-attention outputs and relative position weights
hidden_states = self.layer[2](hidden_states)
outputs = (hidden_states,) + outputs # add attentions if we output them
return outputs # hidden-states, (self-attention weights), (self-attention position bias), (cross-attention weights), (cross-attention position bias)
class T5PreTrainedModel(PreTrainedModel):
""" An abstract class to handle weights initialization and
a simple interface for downloading and loading pretrained models.
"""
config_class = T5Config
pretrained_model_archive_map = T5_PRETRAINED_MODEL_ARCHIVE_MAP
load_tf_weights = load_tf_weights_in_t5
base_model_prefix = "transformer"
@property
def dummy_inputs(self):
input_ids = torch.tensor(DUMMY_INPUTS)
input_mask = torch.tensor(DUMMY_MASK)
dummy_inputs = {
"decoder_input_ids": input_ids,
"encoder_input_ids": input_ids,
"decoder_attention_mask": input_mask,
}
return dummy_inputs
def _init_weights(self, module):
""" Initialize the weights """
factor = self.config.initializer_factor # Used for testing weights initialization
if isinstance(module, T5LayerNorm):
module.weight.data.fill_(factor * 1.0)
elif isinstance(module, (T5Model, T5WithLMHeadModel)):
# Mesh TensorFlow embeddings initialization
# See https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/layers.py#L1624
module.shared.weight.data.normal_(mean=0.0, std=factor * 1.0)
elif isinstance(module, T5DenseReluDense):
# Mesh TensorFlow FF initialization
# See https://github.com/tensorflow/mesh/blob/master/mesh_tensorflow/transformer/transformer_layers.py#L56
# and https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/layers.py#L89
module.wi.weight.data.normal_(mean=0.0, std=factor * ((self.config.d_model) ** -0.5))
if hasattr(module.wi, "bias") and module.wi.bias is not None:
module.wi.bias.data.zero_()
module.wo.weight.data.normal_(mean=0.0, std=factor * ((self.config.d_ff) ** -0.5))
if hasattr(module.wo, "bias") and module.wo.bias is not None:
module.wo.bias.data.zero_()
elif isinstance(module, T5Attention):
# Mesh TensorFlow attention initialization to avoid scaling before softmax
# See https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/transformer/attention.py#L136
d_model = self.config.d_model
d_kv = self.config.d_kv
n_heads = self.config.num_heads
module.q.weight.data.normal_(mean=0.0, std=factor * ((d_model * d_kv) ** -0.5))
module.k.weight.data.normal_(mean=0.0, std=factor * (d_model ** -0.5))
module.v.weight.data.normal_(mean=0.0, std=factor * (d_model ** -0.5))
module.o.weight.data.normal_(mean=0.0, std=factor * ((n_heads * d_kv) ** -0.5))
if module.has_relative_attention_bias:
module.relative_attention_bias.weight.data.normal_(mean=0.0, std=factor * ((d_model) ** -0.5))
class T5Stack(T5PreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.output_attentions = config.output_attentions
self.output_hidden_states = config.output_hidden_states
self.is_decoder = config.is_decoder
self.block = nn.ModuleList(
[T5Block(config, has_relative_attention_bias=bool(i == 0)) for i in range(config.num_layers)]
)
self.final_layer_norm = T5LayerNorm(config.d_model, eps=config.layer_norm_epsilon)
self.dropout = nn.Dropout(config.dropout_rate)
self.init_weights()
def forward(
self,
hidden_states,
attention_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
head_mask=None,
):
batch_size, seq_length = hidden_states.shape[0], hidden_states.shape[1]
if attention_mask is None:
attention_mask = torch.ones(batch_size, seq_length).to(hidden_states.device)
if self.is_decoder and encoder_attention_mask is None:
encoder_seq_length = encoder_hidden_states.shape[1]
encoder_attention_mask = torch.ones(batch_size, encoder_seq_length).to(hidden_states.device)
# We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
# ourselves in which case we just need to make it broadcastable to all heads.
if attention_mask.dim() == 3:
extended_attention_mask = attention_mask[:, None, :, :]
elif attention_mask.dim() == 2:
# Provided a padding mask of dimensions [batch_size, seq_length]
# - if the model is a decoder, apply a causal mask in addition to the padding mask
# - if the model is an encoder, make the mask broadcastable to [batch_size, num_heads, seq_length, seq_length]
if self.config.is_decoder:
seq_ids = torch.arange(seq_length, device=hidden_states.device)
causal_mask = seq_ids[None, None, :].repeat(batch_size, seq_length, 1) <= seq_ids[None, :, None]
causal_mask = causal_mask.to(attention_mask)
extended_attention_mask = causal_mask[:, None, :, :] * attention_mask[:, None, None, :]
else:
extended_attention_mask = attention_mask[:, None, None, :]
# Since attention_mask is 1.0 for positions we want to attend and 0.0 for
# masked positions, this operation will create a tensor which is 0.0 for
# positions we want to attend and -1e9 for masked positions.
# Since we are adding it to the raw scores before the softmax, this is
# effectively the same as removing these entirely.
# T5 has a mask that can compare sequence ids, we can simulate this here with this transposition
# Cf. https://github.com/tensorflow/mesh/blob/8d2465e9bc93129b913b5ccc6a59aa97abd96ec6/mesh_tensorflow/transformer/transformer_layers.py#L270
# extended_attention_mask = (extended_attention_mask == extended_attention_mask.transpose(-1, -2))
extended_attention_mask = extended_attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility
extended_attention_mask = (1.0 - extended_attention_mask) * -1e9
if self.is_decoder:
# If a 2D ou 3D attention mask is provided for the cross-attention
# we need to make broadcastabe to [batch_size, num_heads, seq_length, seq_length]
if encoder_attention_mask.dim() == 3:
encoder_extended_attention_mask = encoder_attention_mask[:, None, :, :]
if encoder_attention_mask.dim() == 2:
encoder_extended_attention_mask = encoder_attention_mask[:, None, None, :]
# T5 has a mask that can compare sequence ids, we can simulate this here with this transposition
# Cf. https://github.com/tensorflow/mesh/blob/8d2465e9bc93129b913b5ccc6a59aa97abd96ec6/mesh_tensorflow/transformer/transformer_layers.py#L270
# encoder_extended_attention_mask = (encoder_extended_attention_mask == encoder_extended_attention_mask.transpose(-1, -2))
encoder_extended_attention_mask = encoder_extended_attention_mask.to(
dtype=next(self.parameters()).dtype
) # fp16 compatibility
encoder_extended_attention_mask = (1.0 - encoder_extended_attention_mask) * -1e9
else:
encoder_extended_attention_mask = None
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
# and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length]
if head_mask is not None:
if head_mask.dim() == 1:
head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(-1).unsqueeze(-1)
head_mask = head_mask.expand(self.config.num_layers, -1, -1, -1, -1)
elif head_mask.dim() == 2:
head_mask = (
head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1)
) # We can specify head_mask for each layer
head_mask = head_mask.to(
dtype=next(self.parameters()).dtype
) # switch to fload if need + fp16 compatibility
else:
head_mask = [None] * self.config.num_layers
all_hidden_states = ()
all_attentions = ()
position_bias = None
encoder_decoder_position_bias = None
hidden_states = self.dropout(hidden_states)
for i, layer_module in enumerate(self.block):
if self.output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
layer_outputs = layer_module(
hidden_states,
attention_mask=extended_attention_mask,
position_bias=position_bias,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_extended_attention_mask,
encoder_decoder_position_bias=encoder_decoder_position_bias,
head_mask=head_mask[i],
)
# layer_outputs is a tuple with:
# hidden-states, (self-attention weights), (self-attention position bias), (cross-attention weights), (cross-attention position bias)
hidden_states = layer_outputs[0]
if i == 0:
# We share the position biases between the layers - the first layer store them
# layer_outputs = hidden-states, (self-attention weights), (self-attention position bias), (cross-attention weights), (cross-attention position bias)
position_bias = layer_outputs[2 if self.output_attentions else 1]
if self.is_decoder:
encoder_decoder_position_bias = layer_outputs[4 if self.output_attentions else 2]
if self.output_attentions:
all_attentions = all_attentions + (layer_outputs[1],) # We keep only self-attention weights for now
hidden_states = self.final_layer_norm(hidden_states)
hidden_states = self.dropout(hidden_states)
# Add last layer
if self.output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
outputs = (hidden_states,)
if self.output_hidden_states:
outputs = outputs + (all_hidden_states,)
if self.output_attentions:
outputs = outputs + (all_attentions,)
return outputs # last-layer hidden state, (all hidden states), (all attentions)
T5_START_DOCSTRING = r""" The T5 model was proposed in
`Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer`_
by Colin Raffel, Noam Shazeer, Adam Roberts, Katherine Lee, Sharan Narang, Michael Matena, Yanqi Zhou, Wei Li, Peter J. Liu.
It's an encoder decoder transformer pre-trained in a text-to-text denoising generative setting.
This model is a PyTorch `torch.nn.Module`_ sub-class. Use it as a regular PyTorch Module and
refer to the PyTorch documentation for all matter related to general usage and behavior.
.. _`Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer`:
https://arxiv.org/abs/1910.10683
.. _`torch.nn.Module`:
https://pytorch.org/docs/stable/nn.html#module
Parameters:
config (:class:`~transformers.T5Config`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the configuration.
Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
"""
T5_INPUTS_DOCSTRING = r"""
Inputs:
**input_ids**: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
Indices of input sequence tokens in the vocabulary.
To match pre-training, T5 input sequence should be formatted with [CLS] and [SEP] tokens as follows:
(a) For sequence pairs:
``tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]``
(b) For single sequences:
``tokens: [CLS] the dog is hairy . [SEP]``
T5 is a model with relative position embeddings so you should be able to pad the inputs on
the right or the left.
Indices can be obtained using :class:`transformers.T5Tokenizer`.
See :func:`transformers.PreTrainedTokenizer.encode` and
:func:`transformers.PreTrainedTokenizer.convert_tokens_to_ids` for details.
**attention_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(batch_size, sequence_length)``:
Mask to avoid performing attention on padding token indices.
Mask values selected in ``[0, 1]``:
``1`` for tokens that are NOT MASKED, ``0`` for MASKED tokens.
**head_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(num_heads,)`` or ``(num_layers, num_heads)``:
Mask to nullify selected heads of the self-attention modules.
Mask values selected in ``[0, 1]``:
``1`` indicates the head is **not masked**, ``0`` indicates the head is **masked**.
"""
@add_start_docstrings(
"The bare T5 Model transformer outputting raw hidden-states" "without any specific head on top.",
T5_START_DOCSTRING,
T5_INPUTS_DOCSTRING,
)
class T5Model(T5PreTrainedModel):
r"""
Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
**last_hidden_state**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, hidden_size)``
Sequence of hidden-states at the output of the last layer of the model.
**hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
of shape ``(batch_size, sequence_length, hidden_size)``:
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
**attentions**: (`optional`, returned when ``config.output_attentions=True``)
list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
Examples::
tokenizer = T5Tokenizer.from_pretrained('t5-small')
model = T5Model.from_pretrained('t5-small')
input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1
outputs = model(input_ids=input_ids)
last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple
"""
def __init__(self, config):
super().__init__(config)
self.shared = nn.Embedding(config.vocab_size, config.d_model)
encoder_config = copy.deepcopy(config)
self.encoder = T5Stack(encoder_config)
decoder_config = copy.deepcopy(config)
decoder_config.is_decoder = True
self.decoder = T5Stack(decoder_config)
self.init_weights()
def get_input_embeddings(self):
return self.shared
def set_input_embeddings(self, new_embeddings):
self.shared = new_embeddings
def _prune_heads(self, heads_to_prune):
""" Prunes heads of the model.
heads_to_prune: dict of {layer_num: list of heads to prune in this layer}
See base class PreTrainedModel
"""
for layer, heads in heads_to_prune.items():
self.encoder.layer[layer].attention.prune_heads(heads)
def forward(self, **kwargs):
# keyword arguments come in 3 flavors: encoder-specific (prefixed by
# `encoder_`), decoder-specific (prefixed by `decoder_`) and those
# that apply to the model as whole.
# We let the specific kwargs override the common ones in case of conflict.
kwargs_common = dict(
(k, v) for k, v in kwargs.items() if not k.startswith("encoder_") and not k.startswith("decoder_")
)
kwargs_encoder = kwargs_common.copy()
kwargs_decoder = kwargs_common.copy()
kwargs_encoder.update(dict((k[len("encoder_") :], v) for k, v in kwargs.items() if k.startswith("encoder_")))
kwargs_decoder.update(dict((k[len("decoder_") :], v) for k, v in kwargs.items() if k.startswith("decoder_")))
# Encode if needed (training, first prediction pass)
encoder_hidden_states = kwargs_encoder.pop("hidden_states", None)
encoder_attention_mask = kwargs_encoder.get("attention_mask", None)
if encoder_hidden_states is None:
# Convert encoder inputs in embeddings if needed
hidden_states = kwargs_encoder.pop("inputs_embeds", None)
if hidden_states is None:
encoder_inputs_ids = kwargs_encoder.pop("input_ids")
hidden_states = self.shared(encoder_inputs_ids) # Convert inputs in embeddings
if encoder_attention_mask is not None:
# Apply masking
encoder_attention_mask = (encoder_attention_mask != 0).to(hidden_states)
hidden_states = hidden_states * encoder_attention_mask.unsqueeze(-1)
encoder_outputs = self.encoder(hidden_states, **kwargs_encoder)
encoder_hidden_states = encoder_outputs[0]
else:
encoder_outputs = ()
# Decode
# Convert decoder inputs in embeddings if needed
hidden_states = kwargs_decoder.pop("inputs_embeds", None)
if hidden_states is None:
decoder_inputs_ids = kwargs_decoder.pop("input_ids")
hidden_states = self.shared(decoder_inputs_ids)
kwargs_decoder["encoder_hidden_states"] = encoder_hidden_states
kwargs_decoder["encoder_attention_mask"] = encoder_attention_mask
decoder_outputs = self.decoder(hidden_states, **kwargs_decoder)
return decoder_outputs + encoder_outputs
@add_start_docstrings("""T5 Model with a `language modeling` head on top. """, T5_START_DOCSTRING, T5_INPUTS_DOCSTRING)
class T5WithLMHeadModel(T5PreTrainedModel):
r"""
**lm_labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
Labels for computing the masked language modeling loss.
Indices should either be in ``[0, ..., config.vocab_size]`` or -1 (see ``input_ids`` docstring).
Tokens with indices set to ``-1`` are ignored (masked), the loss is only computed for the tokens with labels
in ``[0, ..., config.vocab_size]``.
Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
**loss**: (`optional`, returned when ``lm_labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``:
Masked language modeling loss.
**prediction_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, config.vocab_size)``
Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax).
**hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
of shape ``(batch_size, sequence_length, hidden_size)``:
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
**attentions**: (`optional`, returned when ``config.output_attentions=True``)
list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
Examples::
tokenizer = T5Tokenizer.from_pretrained('t5-small')
model = T5WithLMHeadModel.from_pretrained('t5-small')
input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1
outputs = model(input_ids=input_ids, lm_labels=input_ids)
loss, prediction_scores = outputs[:2]
"""
def __init__(self, config):
super().__init__(config)
self.model_dim = config.d_model
self.shared = nn.Embedding(config.vocab_size, config.d_model)
encoder_config = copy.deepcopy(config)
self.encoder = T5Stack(encoder_config)
decoder_config = copy.deepcopy(config)
decoder_config.is_decoder = True
self.decoder = T5Stack(decoder_config)
self.lm_head = nn.Linear(config.d_model, config.vocab_size, bias=False)
self.init_weights()
def get_input_embeddings(self):
return self.shared
def set_input_embeddings(self, new_embeddings):
self.shared = new_embeddings
def get_output_embeddings(self):
return self.lm_head
def forward(self, **kwargs):
# keyword arguments come in 3 flavors: encoder-specific (prefixed by
# `encoder_`), decoder-specific (prefixed by `decoder_`) and those
# that apply to the model as whole.
# We let the specific kwargs override the common ones in case of conflict.
lm_labels = kwargs.pop("decoder_lm_labels", None)
kwargs_common = dict(
(k, v) for k, v in kwargs.items() if not k.startswith("encoder_") and not k.startswith("decoder_")
)
kwargs_encoder = kwargs_common.copy()
kwargs_decoder = kwargs_common.copy()
kwargs_encoder.update(dict((k[len("encoder_") :], v) for k, v in kwargs.items() if k.startswith("encoder_")))
kwargs_decoder.update(dict((k[len("decoder_") :], v) for k, v in kwargs.items() if k.startswith("decoder_")))
# Encode if needed (training, first prediction pass)
encoder_hidden_states = kwargs_encoder.pop("hidden_states", None)
if encoder_hidden_states is None:
# Convert encoder inputs in embeddings if needed
hidden_states = kwargs_encoder.pop("inputs_embeds", None)
if hidden_states is None:
encoder_inputs_ids = kwargs_encoder.pop("input_ids")
hidden_states = self.shared(encoder_inputs_ids) # Convert inputs in embeddings
encoder_outputs = self.encoder(hidden_states, **kwargs_encoder)
encoder_hidden_states = encoder_outputs[0]
else:
encoder_outputs = ()
# Decode
# Convert decoder inputs in embeddings if needed
hidden_states = kwargs_decoder.pop("inputs_embeds", None)
if hidden_states is None:
decoder_inputs_ids = kwargs_decoder.pop("input_ids")
hidden_states = self.shared(decoder_inputs_ids)
kwargs_decoder["encoder_hidden_states"] = encoder_hidden_states
kwargs_decoder["encoder_attention_mask"] = kwargs_encoder.get("attention_mask", None)
decoder_outputs = self.decoder(hidden_states, **kwargs_decoder)
sequence_output = decoder_outputs[0]
# Rescale output before projecting on vocab
# See https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/transformer/transformer.py#L586
sequence_output = sequence_output * (self.model_dim ** -0.5)
lm_logits = self.lm_head(sequence_output)
decoder_outputs = (lm_logits,) + decoder_outputs[1:] # Add hidden states and attention if they are here
if lm_labels is not None:
shift_logits = lm_logits[..., :-1, :].contiguous()
shift_labels = lm_labels[..., 1:].contiguous()
loss_fct = CrossEntropyLoss()
loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1))
decoder_outputs = (
loss,
) + decoder_outputs # TODO(thom): Add z_loss https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/layers.py#L666
return decoder_outputs + encoder_outputs
| 47.543668
| 169
| 0.650425
|
4a033b4b054ab8c566a8b132d775e3801c9d6346
| 63,365
|
py
|
Python
|
google/cloud/vmmigration_v1/services/vm_migration/transports/grpc_asyncio.py
|
LaudateCorpus1/python-vm-migration
|
bf6760ce5ead26b352a5a89e079fa2ca20c0c3c6
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/vmmigration_v1/services/vm_migration/transports/grpc_asyncio.py
|
LaudateCorpus1/python-vm-migration
|
bf6760ce5ead26b352a5a89e079fa2ca20c0c3c6
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/vmmigration_v1/services/vm_migration/transports/grpc_asyncio.py
|
LaudateCorpus1/python-vm-migration
|
bf6760ce5ead26b352a5a89e079fa2ca20c0c3c6
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1
from google.api_core import grpc_helpers_async
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.vmmigration_v1.types import vmmigration
from google.longrunning import operations_pb2 # type: ignore
from .base import VmMigrationTransport, DEFAULT_CLIENT_INFO
from .grpc import VmMigrationGrpcTransport
class VmMigrationGrpcAsyncIOTransport(VmMigrationTransport):
"""gRPC AsyncIO backend transport for VmMigration.
VM Migration Service
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "vmmigration.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "vmmigration.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Sanity check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def list_sources(
self,
) -> Callable[
[vmmigration.ListSourcesRequest], Awaitable[vmmigration.ListSourcesResponse]
]:
r"""Return a callable for the list sources method over gRPC.
Lists Sources in a given project and location.
Returns:
Callable[[~.ListSourcesRequest],
Awaitable[~.ListSourcesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_sources" not in self._stubs:
self._stubs["list_sources"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/ListSources",
request_serializer=vmmigration.ListSourcesRequest.serialize,
response_deserializer=vmmigration.ListSourcesResponse.deserialize,
)
return self._stubs["list_sources"]
@property
def get_source(
self,
) -> Callable[[vmmigration.GetSourceRequest], Awaitable[vmmigration.Source]]:
r"""Return a callable for the get source method over gRPC.
Gets details of a single Source.
Returns:
Callable[[~.GetSourceRequest],
Awaitable[~.Source]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_source" not in self._stubs:
self._stubs["get_source"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/GetSource",
request_serializer=vmmigration.GetSourceRequest.serialize,
response_deserializer=vmmigration.Source.deserialize,
)
return self._stubs["get_source"]
@property
def create_source(
self,
) -> Callable[
[vmmigration.CreateSourceRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the create source method over gRPC.
Creates a new Source in a given project and location.
Returns:
Callable[[~.CreateSourceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_source" not in self._stubs:
self._stubs["create_source"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/CreateSource",
request_serializer=vmmigration.CreateSourceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_source"]
@property
def update_source(
self,
) -> Callable[
[vmmigration.UpdateSourceRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the update source method over gRPC.
Updates the parameters of a single Source.
Returns:
Callable[[~.UpdateSourceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_source" not in self._stubs:
self._stubs["update_source"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/UpdateSource",
request_serializer=vmmigration.UpdateSourceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_source"]
@property
def delete_source(
self,
) -> Callable[
[vmmigration.DeleteSourceRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the delete source method over gRPC.
Deletes a single Source.
Returns:
Callable[[~.DeleteSourceRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_source" not in self._stubs:
self._stubs["delete_source"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/DeleteSource",
request_serializer=vmmigration.DeleteSourceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_source"]
@property
def fetch_inventory(
self,
) -> Callable[
[vmmigration.FetchInventoryRequest],
Awaitable[vmmigration.FetchInventoryResponse],
]:
r"""Return a callable for the fetch inventory method over gRPC.
List remote source's inventory of VMs.
The remote source is the onprem vCenter (remote in the
sense it's not in Compute Engine). The inventory
describes the list of existing VMs in that source. Note
that this operation lists the VMs on the remote source,
as opposed to listing the MigratingVms resources in the
vmmigration service.
Returns:
Callable[[~.FetchInventoryRequest],
Awaitable[~.FetchInventoryResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "fetch_inventory" not in self._stubs:
self._stubs["fetch_inventory"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/FetchInventory",
request_serializer=vmmigration.FetchInventoryRequest.serialize,
response_deserializer=vmmigration.FetchInventoryResponse.deserialize,
)
return self._stubs["fetch_inventory"]
@property
def list_utilization_reports(
self,
) -> Callable[
[vmmigration.ListUtilizationReportsRequest],
Awaitable[vmmigration.ListUtilizationReportsResponse],
]:
r"""Return a callable for the list utilization reports method over gRPC.
Lists Utilization Reports of the given Source.
Returns:
Callable[[~.ListUtilizationReportsRequest],
Awaitable[~.ListUtilizationReportsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_utilization_reports" not in self._stubs:
self._stubs["list_utilization_reports"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/ListUtilizationReports",
request_serializer=vmmigration.ListUtilizationReportsRequest.serialize,
response_deserializer=vmmigration.ListUtilizationReportsResponse.deserialize,
)
return self._stubs["list_utilization_reports"]
@property
def get_utilization_report(
self,
) -> Callable[
[vmmigration.GetUtilizationReportRequest],
Awaitable[vmmigration.UtilizationReport],
]:
r"""Return a callable for the get utilization report method over gRPC.
Gets a single Utilization Report.
Returns:
Callable[[~.GetUtilizationReportRequest],
Awaitable[~.UtilizationReport]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_utilization_report" not in self._stubs:
self._stubs["get_utilization_report"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/GetUtilizationReport",
request_serializer=vmmigration.GetUtilizationReportRequest.serialize,
response_deserializer=vmmigration.UtilizationReport.deserialize,
)
return self._stubs["get_utilization_report"]
@property
def create_utilization_report(
self,
) -> Callable[
[vmmigration.CreateUtilizationReportRequest],
Awaitable[operations_pb2.Operation],
]:
r"""Return a callable for the create utilization report method over gRPC.
Creates a new UtilizationReport.
Returns:
Callable[[~.CreateUtilizationReportRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_utilization_report" not in self._stubs:
self._stubs["create_utilization_report"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/CreateUtilizationReport",
request_serializer=vmmigration.CreateUtilizationReportRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_utilization_report"]
@property
def delete_utilization_report(
self,
) -> Callable[
[vmmigration.DeleteUtilizationReportRequest],
Awaitable[operations_pb2.Operation],
]:
r"""Return a callable for the delete utilization report method over gRPC.
Deletes a single Utilization Report.
Returns:
Callable[[~.DeleteUtilizationReportRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_utilization_report" not in self._stubs:
self._stubs["delete_utilization_report"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/DeleteUtilizationReport",
request_serializer=vmmigration.DeleteUtilizationReportRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_utilization_report"]
@property
def list_datacenter_connectors(
self,
) -> Callable[
[vmmigration.ListDatacenterConnectorsRequest],
Awaitable[vmmigration.ListDatacenterConnectorsResponse],
]:
r"""Return a callable for the list datacenter connectors method over gRPC.
Lists DatacenterConnectors in a given Source.
Returns:
Callable[[~.ListDatacenterConnectorsRequest],
Awaitable[~.ListDatacenterConnectorsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_datacenter_connectors" not in self._stubs:
self._stubs["list_datacenter_connectors"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/ListDatacenterConnectors",
request_serializer=vmmigration.ListDatacenterConnectorsRequest.serialize,
response_deserializer=vmmigration.ListDatacenterConnectorsResponse.deserialize,
)
return self._stubs["list_datacenter_connectors"]
@property
def get_datacenter_connector(
self,
) -> Callable[
[vmmigration.GetDatacenterConnectorRequest],
Awaitable[vmmigration.DatacenterConnector],
]:
r"""Return a callable for the get datacenter connector method over gRPC.
Gets details of a single DatacenterConnector.
Returns:
Callable[[~.GetDatacenterConnectorRequest],
Awaitable[~.DatacenterConnector]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_datacenter_connector" not in self._stubs:
self._stubs["get_datacenter_connector"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/GetDatacenterConnector",
request_serializer=vmmigration.GetDatacenterConnectorRequest.serialize,
response_deserializer=vmmigration.DatacenterConnector.deserialize,
)
return self._stubs["get_datacenter_connector"]
@property
def create_datacenter_connector(
self,
) -> Callable[
[vmmigration.CreateDatacenterConnectorRequest],
Awaitable[operations_pb2.Operation],
]:
r"""Return a callable for the create datacenter connector method over gRPC.
Creates a new DatacenterConnector in a given Source.
Returns:
Callable[[~.CreateDatacenterConnectorRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_datacenter_connector" not in self._stubs:
self._stubs["create_datacenter_connector"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/CreateDatacenterConnector",
request_serializer=vmmigration.CreateDatacenterConnectorRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_datacenter_connector"]
@property
def delete_datacenter_connector(
self,
) -> Callable[
[vmmigration.DeleteDatacenterConnectorRequest],
Awaitable[operations_pb2.Operation],
]:
r"""Return a callable for the delete datacenter connector method over gRPC.
Deletes a single DatacenterConnector.
Returns:
Callable[[~.DeleteDatacenterConnectorRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_datacenter_connector" not in self._stubs:
self._stubs["delete_datacenter_connector"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/DeleteDatacenterConnector",
request_serializer=vmmigration.DeleteDatacenterConnectorRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_datacenter_connector"]
@property
def create_migrating_vm(
self,
) -> Callable[
[vmmigration.CreateMigratingVmRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the create migrating vm method over gRPC.
Creates a new MigratingVm in a given Source.
Returns:
Callable[[~.CreateMigratingVmRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_migrating_vm" not in self._stubs:
self._stubs["create_migrating_vm"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/CreateMigratingVm",
request_serializer=vmmigration.CreateMigratingVmRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_migrating_vm"]
@property
def list_migrating_vms(
self,
) -> Callable[
[vmmigration.ListMigratingVmsRequest],
Awaitable[vmmigration.ListMigratingVmsResponse],
]:
r"""Return a callable for the list migrating vms method over gRPC.
Lists MigratingVms in a given Source.
Returns:
Callable[[~.ListMigratingVmsRequest],
Awaitable[~.ListMigratingVmsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_migrating_vms" not in self._stubs:
self._stubs["list_migrating_vms"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/ListMigratingVms",
request_serializer=vmmigration.ListMigratingVmsRequest.serialize,
response_deserializer=vmmigration.ListMigratingVmsResponse.deserialize,
)
return self._stubs["list_migrating_vms"]
@property
def get_migrating_vm(
self,
) -> Callable[
[vmmigration.GetMigratingVmRequest], Awaitable[vmmigration.MigratingVm]
]:
r"""Return a callable for the get migrating vm method over gRPC.
Gets details of a single MigratingVm.
Returns:
Callable[[~.GetMigratingVmRequest],
Awaitable[~.MigratingVm]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_migrating_vm" not in self._stubs:
self._stubs["get_migrating_vm"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/GetMigratingVm",
request_serializer=vmmigration.GetMigratingVmRequest.serialize,
response_deserializer=vmmigration.MigratingVm.deserialize,
)
return self._stubs["get_migrating_vm"]
@property
def update_migrating_vm(
self,
) -> Callable[
[vmmigration.UpdateMigratingVmRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the update migrating vm method over gRPC.
Updates the parameters of a single MigratingVm.
Returns:
Callable[[~.UpdateMigratingVmRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_migrating_vm" not in self._stubs:
self._stubs["update_migrating_vm"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/UpdateMigratingVm",
request_serializer=vmmigration.UpdateMigratingVmRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_migrating_vm"]
@property
def delete_migrating_vm(
self,
) -> Callable[
[vmmigration.DeleteMigratingVmRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the delete migrating vm method over gRPC.
Deletes a single MigratingVm.
Returns:
Callable[[~.DeleteMigratingVmRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_migrating_vm" not in self._stubs:
self._stubs["delete_migrating_vm"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/DeleteMigratingVm",
request_serializer=vmmigration.DeleteMigratingVmRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_migrating_vm"]
@property
def start_migration(
self,
) -> Callable[
[vmmigration.StartMigrationRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the start migration method over gRPC.
Starts migration for a VM. Starts the process of
uploading data and creating snapshots, in replication
cycles scheduled by the policy.
Returns:
Callable[[~.StartMigrationRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "start_migration" not in self._stubs:
self._stubs["start_migration"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/StartMigration",
request_serializer=vmmigration.StartMigrationRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["start_migration"]
@property
def resume_migration(
self,
) -> Callable[
[vmmigration.ResumeMigrationRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the resume migration method over gRPC.
Resumes a migration for a VM. When called on a paused
migration, will start the process of uploading data and
creating snapshots; when called on a completed cut-over
migration, will update the migration to active state and
start the process of uploading data and creating
snapshots.
Returns:
Callable[[~.ResumeMigrationRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "resume_migration" not in self._stubs:
self._stubs["resume_migration"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/ResumeMigration",
request_serializer=vmmigration.ResumeMigrationRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["resume_migration"]
@property
def pause_migration(
self,
) -> Callable[
[vmmigration.PauseMigrationRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the pause migration method over gRPC.
Pauses a migration for a VM. If cycle tasks are
running they will be cancelled, preserving source task
data. Further replication cycles will not be triggered
while the VM is paused.
Returns:
Callable[[~.PauseMigrationRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "pause_migration" not in self._stubs:
self._stubs["pause_migration"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/PauseMigration",
request_serializer=vmmigration.PauseMigrationRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["pause_migration"]
@property
def finalize_migration(
self,
) -> Callable[
[vmmigration.FinalizeMigrationRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the finalize migration method over gRPC.
Marks a migration as completed, deleting migration
resources that are no longer being used. Only applicable
after cutover is done.
Returns:
Callable[[~.FinalizeMigrationRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "finalize_migration" not in self._stubs:
self._stubs["finalize_migration"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/FinalizeMigration",
request_serializer=vmmigration.FinalizeMigrationRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["finalize_migration"]
@property
def create_clone_job(
self,
) -> Callable[
[vmmigration.CreateCloneJobRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the create clone job method over gRPC.
Initiates a Clone of a specific migrating VM.
Returns:
Callable[[~.CreateCloneJobRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_clone_job" not in self._stubs:
self._stubs["create_clone_job"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/CreateCloneJob",
request_serializer=vmmigration.CreateCloneJobRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_clone_job"]
@property
def cancel_clone_job(
self,
) -> Callable[
[vmmigration.CancelCloneJobRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the cancel clone job method over gRPC.
Initiates the cancellation of a running clone job.
Returns:
Callable[[~.CancelCloneJobRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "cancel_clone_job" not in self._stubs:
self._stubs["cancel_clone_job"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/CancelCloneJob",
request_serializer=vmmigration.CancelCloneJobRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["cancel_clone_job"]
@property
def list_clone_jobs(
self,
) -> Callable[
[vmmigration.ListCloneJobsRequest], Awaitable[vmmigration.ListCloneJobsResponse]
]:
r"""Return a callable for the list clone jobs method over gRPC.
Lists CloneJobs of a given migrating VM.
Returns:
Callable[[~.ListCloneJobsRequest],
Awaitable[~.ListCloneJobsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_clone_jobs" not in self._stubs:
self._stubs["list_clone_jobs"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/ListCloneJobs",
request_serializer=vmmigration.ListCloneJobsRequest.serialize,
response_deserializer=vmmigration.ListCloneJobsResponse.deserialize,
)
return self._stubs["list_clone_jobs"]
@property
def get_clone_job(
self,
) -> Callable[[vmmigration.GetCloneJobRequest], Awaitable[vmmigration.CloneJob]]:
r"""Return a callable for the get clone job method over gRPC.
Gets details of a single CloneJob.
Returns:
Callable[[~.GetCloneJobRequest],
Awaitable[~.CloneJob]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_clone_job" not in self._stubs:
self._stubs["get_clone_job"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/GetCloneJob",
request_serializer=vmmigration.GetCloneJobRequest.serialize,
response_deserializer=vmmigration.CloneJob.deserialize,
)
return self._stubs["get_clone_job"]
@property
def create_cutover_job(
self,
) -> Callable[
[vmmigration.CreateCutoverJobRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the create cutover job method over gRPC.
Initiates a Cutover of a specific migrating VM.
The returned LRO is completed when the cutover job
resource is created and the job is initiated.
Returns:
Callable[[~.CreateCutoverJobRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_cutover_job" not in self._stubs:
self._stubs["create_cutover_job"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/CreateCutoverJob",
request_serializer=vmmigration.CreateCutoverJobRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_cutover_job"]
@property
def cancel_cutover_job(
self,
) -> Callable[
[vmmigration.CancelCutoverJobRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the cancel cutover job method over gRPC.
Initiates the cancellation of a running cutover job.
Returns:
Callable[[~.CancelCutoverJobRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "cancel_cutover_job" not in self._stubs:
self._stubs["cancel_cutover_job"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/CancelCutoverJob",
request_serializer=vmmigration.CancelCutoverJobRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["cancel_cutover_job"]
@property
def list_cutover_jobs(
self,
) -> Callable[
[vmmigration.ListCutoverJobsRequest],
Awaitable[vmmigration.ListCutoverJobsResponse],
]:
r"""Return a callable for the list cutover jobs method over gRPC.
Lists CutoverJobs of a given migrating VM.
Returns:
Callable[[~.ListCutoverJobsRequest],
Awaitable[~.ListCutoverJobsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_cutover_jobs" not in self._stubs:
self._stubs["list_cutover_jobs"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/ListCutoverJobs",
request_serializer=vmmigration.ListCutoverJobsRequest.serialize,
response_deserializer=vmmigration.ListCutoverJobsResponse.deserialize,
)
return self._stubs["list_cutover_jobs"]
@property
def get_cutover_job(
self,
) -> Callable[
[vmmigration.GetCutoverJobRequest], Awaitable[vmmigration.CutoverJob]
]:
r"""Return a callable for the get cutover job method over gRPC.
Gets details of a single CutoverJob.
Returns:
Callable[[~.GetCutoverJobRequest],
Awaitable[~.CutoverJob]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_cutover_job" not in self._stubs:
self._stubs["get_cutover_job"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/GetCutoverJob",
request_serializer=vmmigration.GetCutoverJobRequest.serialize,
response_deserializer=vmmigration.CutoverJob.deserialize,
)
return self._stubs["get_cutover_job"]
@property
def list_groups(
self,
) -> Callable[
[vmmigration.ListGroupsRequest], Awaitable[vmmigration.ListGroupsResponse]
]:
r"""Return a callable for the list groups method over gRPC.
Lists Groups in a given project and location.
Returns:
Callable[[~.ListGroupsRequest],
Awaitable[~.ListGroupsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_groups" not in self._stubs:
self._stubs["list_groups"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/ListGroups",
request_serializer=vmmigration.ListGroupsRequest.serialize,
response_deserializer=vmmigration.ListGroupsResponse.deserialize,
)
return self._stubs["list_groups"]
@property
def get_group(
self,
) -> Callable[[vmmigration.GetGroupRequest], Awaitable[vmmigration.Group]]:
r"""Return a callable for the get group method over gRPC.
Gets details of a single Group.
Returns:
Callable[[~.GetGroupRequest],
Awaitable[~.Group]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_group" not in self._stubs:
self._stubs["get_group"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/GetGroup",
request_serializer=vmmigration.GetGroupRequest.serialize,
response_deserializer=vmmigration.Group.deserialize,
)
return self._stubs["get_group"]
@property
def create_group(
self,
) -> Callable[
[vmmigration.CreateGroupRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the create group method over gRPC.
Creates a new Group in a given project and location.
Returns:
Callable[[~.CreateGroupRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_group" not in self._stubs:
self._stubs["create_group"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/CreateGroup",
request_serializer=vmmigration.CreateGroupRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_group"]
@property
def update_group(
self,
) -> Callable[
[vmmigration.UpdateGroupRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the update group method over gRPC.
Updates the parameters of a single Group.
Returns:
Callable[[~.UpdateGroupRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_group" not in self._stubs:
self._stubs["update_group"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/UpdateGroup",
request_serializer=vmmigration.UpdateGroupRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_group"]
@property
def delete_group(
self,
) -> Callable[
[vmmigration.DeleteGroupRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the delete group method over gRPC.
Deletes a single Group.
Returns:
Callable[[~.DeleteGroupRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_group" not in self._stubs:
self._stubs["delete_group"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/DeleteGroup",
request_serializer=vmmigration.DeleteGroupRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_group"]
@property
def add_group_migration(
self,
) -> Callable[
[vmmigration.AddGroupMigrationRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the add group migration method over gRPC.
Adds a MigratingVm to a Group.
Returns:
Callable[[~.AddGroupMigrationRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "add_group_migration" not in self._stubs:
self._stubs["add_group_migration"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/AddGroupMigration",
request_serializer=vmmigration.AddGroupMigrationRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["add_group_migration"]
@property
def remove_group_migration(
self,
) -> Callable[
[vmmigration.RemoveGroupMigrationRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the remove group migration method over gRPC.
Removes a MigratingVm from a Group.
Returns:
Callable[[~.RemoveGroupMigrationRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "remove_group_migration" not in self._stubs:
self._stubs["remove_group_migration"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/RemoveGroupMigration",
request_serializer=vmmigration.RemoveGroupMigrationRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["remove_group_migration"]
@property
def list_target_projects(
self,
) -> Callable[
[vmmigration.ListTargetProjectsRequest],
Awaitable[vmmigration.ListTargetProjectsResponse],
]:
r"""Return a callable for the list target projects method over gRPC.
Lists TargetProjects in a given project.
NOTE: TargetProject is a global resource; hence the only
supported value for location is ``global``.
Returns:
Callable[[~.ListTargetProjectsRequest],
Awaitable[~.ListTargetProjectsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_target_projects" not in self._stubs:
self._stubs["list_target_projects"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/ListTargetProjects",
request_serializer=vmmigration.ListTargetProjectsRequest.serialize,
response_deserializer=vmmigration.ListTargetProjectsResponse.deserialize,
)
return self._stubs["list_target_projects"]
@property
def get_target_project(
self,
) -> Callable[
[vmmigration.GetTargetProjectRequest], Awaitable[vmmigration.TargetProject]
]:
r"""Return a callable for the get target project method over gRPC.
Gets details of a single TargetProject.
NOTE: TargetProject is a global resource; hence the only
supported value for location is ``global``.
Returns:
Callable[[~.GetTargetProjectRequest],
Awaitable[~.TargetProject]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_target_project" not in self._stubs:
self._stubs["get_target_project"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/GetTargetProject",
request_serializer=vmmigration.GetTargetProjectRequest.serialize,
response_deserializer=vmmigration.TargetProject.deserialize,
)
return self._stubs["get_target_project"]
@property
def create_target_project(
self,
) -> Callable[
[vmmigration.CreateTargetProjectRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the create target project method over gRPC.
Creates a new TargetProject in a given project.
NOTE: TargetProject is a global resource; hence the only
supported value for location is ``global``.
Returns:
Callable[[~.CreateTargetProjectRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_target_project" not in self._stubs:
self._stubs["create_target_project"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/CreateTargetProject",
request_serializer=vmmigration.CreateTargetProjectRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_target_project"]
@property
def update_target_project(
self,
) -> Callable[
[vmmigration.UpdateTargetProjectRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the update target project method over gRPC.
Updates the parameters of a single TargetProject.
NOTE: TargetProject is a global resource; hence the only
supported value for location is ``global``.
Returns:
Callable[[~.UpdateTargetProjectRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_target_project" not in self._stubs:
self._stubs["update_target_project"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/UpdateTargetProject",
request_serializer=vmmigration.UpdateTargetProjectRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["update_target_project"]
@property
def delete_target_project(
self,
) -> Callable[
[vmmigration.DeleteTargetProjectRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the delete target project method over gRPC.
Deletes a single TargetProject.
NOTE: TargetProject is a global resource; hence the only
supported value for location is ``global``.
Returns:
Callable[[~.DeleteTargetProjectRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_target_project" not in self._stubs:
self._stubs["delete_target_project"] = self.grpc_channel.unary_unary(
"/google.cloud.vmmigration.v1.VmMigration/DeleteTargetProject",
request_serializer=vmmigration.DeleteTargetProjectRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_target_project"]
def close(self):
return self.grpc_channel.close()
__all__ = ("VmMigrationGrpcAsyncIOTransport",)
| 42.271514
| 95
| 0.637323
|
4a033b8ce8450acf542173778b7b17a1e21e4348
| 10,574
|
py
|
Python
|
src/gt4sd/algorithms/generation/molgx/implementation.py
|
christofid/gt4sd-core
|
ea4257e8ff24ee7f766d7010ea5955d823eb9ad7
|
[
"MIT"
] | 57
|
2022-02-11T22:32:58.000Z
|
2022-03-31T23:17:06.000Z
|
src/gt4sd/algorithms/generation/molgx/implementation.py
|
christofid/gt4sd-core
|
ea4257e8ff24ee7f766d7010ea5955d823eb9ad7
|
[
"MIT"
] | 31
|
2022-02-11T22:43:22.000Z
|
2022-03-31T12:04:00.000Z
|
src/gt4sd/algorithms/generation/molgx/implementation.py
|
christofid/gt4sd-core
|
ea4257e8ff24ee7f766d7010ea5955d823eb9ad7
|
[
"MIT"
] | 8
|
2022-02-15T11:13:54.000Z
|
2022-03-22T13:56:13.000Z
|
#
# MIT License
#
# Copyright (c) 2022 GT4SD team
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
"""Implementation of MolGX conditional generators."""
import logging
import os
from typing import Any, Dict, List
from ....extras import EXTRAS_ENABLED
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
if EXTRAS_ENABLED:
from AMD_Analytics.amdsdk import AMDsdk
class MolGXGenerator:
"""Interface for MolGX generator."""
def __init__(
self,
resources_path: str,
tag_name: str,
homo_energy_value: float = -0.25,
lumo_energy_value: float = 0.08,
use_linear_model: bool = True,
number_of_candidates: int = 2,
maximum_number_of_candidates: int = 3,
maximum_number_of_solutions: int = 3,
maximum_number_of_nodes: int = 50000,
beam_size: int = 2000,
without_estimate: bool = True,
use_specific_rings: bool = True,
use_fragment_const: bool = False,
) -> None:
"""Instantiate a MolGX generator.
Args:
resources_path: path to the resources for model loading.
tag_name: tag for the pretrained model.
homo_energy_value: target HOMO energy value. Defaults to -0.25.
lumo_energy_value: target LUMO energy value. Defaults to 0.08.
use_linear_model: linear model usage. Defaults to True.
number_of_candidates: number of candidates to consider. Defaults to 2.
maximum_number_of_candidates: maximum number of candidates to consider. Defaults to 3.
maximum_number_of_solutions: maximum number of solutions. Defaults to 3.
maximum_number_of_nodes: maximum number of nodes in the graph exploration. Defaults to 50000.
beam_size: size of the beam during search. Defaults to 2000.
without_estimate: disable estimates. Defaults to True.
use_specific_rings: flag to indicate whether specific rings are used. Defaults to True.
use_fragment_const: using constant fragments. Defaults to False.
Raises:
RuntimeError: in the case extras are disabled.
"""
if not EXTRAS_ENABLED:
raise RuntimeError("Can't instantiate MolGXGenerator, extras disabled!")
# loading artifacts
self.resources_path = resources_path
self.tag_name = tag_name
self.amd = self.load_molgx(self.resources_path, self.tag_name)
self.molecules_data, self.target_property = self.amd.LoadPickle("model")
# algorithm parameters
self._homo_energy_value = homo_energy_value
self._lumo_energy_value = lumo_energy_value
self._use_linear_model = use_linear_model
self._number_of_candidates = number_of_candidates
self._maximum_number_of_candidates = maximum_number_of_candidates
self._maximum_number_of_solutions = maximum_number_of_solutions
self._maximum_number_of_nodes = maximum_number_of_nodes
self._beam_size = beam_size
self._without_estimate = without_estimate
self._use_specific_rings = use_specific_rings
self._use_fragment_const = use_fragment_const
self._parameters = self._create_parameters_dictionary()
@staticmethod
def load_molgx(resource_path: str, tag_name: str) -> AMDsdk:
"""Load MolGX model.
Args:
resource_path: path to the resources for model loading.
tag_name: tag for the pretrained model.
Returns:
MolGX model SDK.
"""
return AMDsdk(
dir_pickle=os.path.join(resource_path, "pickle"),
dir_data=os.path.join(resource_path, "data"),
tag_data=tag_name,
)
def _create_parameters_dictionary(self) -> Dict[str, Any]:
"""Create parameters dictionary.
Returns:
the parameters to run MolGX.
"""
self.target_property["homo"] = (self.homo_energy_value,) * 2
self.target_property["lumo"] = (self.lumo_energy_value,) * 2
parameters: Dict[str, Any] = {}
parameters["target_property"] = self.target_property
parameters["use_linear_model"] = self.use_linear_model
parameters["num_candidate"] = self.number_of_candidates
parameters["max_candidate"] = self.maximum_number_of_candidates
parameters["max_solution"] = self.maximum_number_of_solutions
parameters["max_node"] = self.maximum_number_of_nodes
parameters["beam_size"] = self.beam_size
parameters["without_estimate"] = self.without_estimate
parameters["use_specific_rings"] = self.use_specific_rings
parameters["use_fragment_const"] = self.use_fragment_const
return parameters
@property
def homo_energy_value(self) -> float:
return self._homo_energy_value
@homo_energy_value.setter
def homo_energy_value(self, value: float) -> None:
self._homo_energy_value = value
self.parameters = self._create_parameters_dictionary()
@property
def lumo_energy_value(self) -> float:
return self._lumo_energy_value
@lumo_energy_value.setter
def lumo_energy_value(self, value: float) -> None:
self._lumo_energy_value = value
self.parameters = self._create_parameters_dictionary()
@property
def use_linear_model(self) -> bool:
return self._use_linear_model
@use_linear_model.setter
def use_linear_model(self, value: bool) -> None:
self._use_linear_model = value
self.parameters = self._create_parameters_dictionary()
@property
def number_of_candidates(self) -> int:
return self._number_of_candidates
@number_of_candidates.setter
def number_of_candidates(self, value: int) -> None:
self._number_of_candidates = value
self.parameters = self._create_parameters_dictionary()
@property
def maximum_number_of_candidates(self) -> int:
return self._maximum_number_of_candidates
@maximum_number_of_candidates.setter
def maximum_number_of_candidates(self, value: int) -> None:
self._maximum_number_of_candidates = value
self.parameters = self._create_parameters_dictionary()
@property
def maximum_number_of_solutions(self) -> int:
return self._maximum_number_of_solutions
@maximum_number_of_solutions.setter
def maximum_number_of_solutions(self, value: int) -> None:
self._maximum_number_of_solutions = value
self.parameters = self._create_parameters_dictionary()
@property
def maximum_number_of_nodes(self) -> int:
return self._maximum_number_of_nodes
@maximum_number_of_nodes.setter
def maximum_number_of_nodes(self, value: int) -> None:
self._maximum_number_of_nodes = value
self.parameters = self._create_parameters_dictionary()
@property
def beam_size(self) -> int:
return self._beam_size
@beam_size.setter
def beam_size(self, value: int) -> None:
self._beam_size = value
self.parameters = self._create_parameters_dictionary()
@property
def without_estimate(self) -> bool:
return self._without_estimate
@without_estimate.setter
def without_estimate(self, value: bool) -> None:
self._without_estimate = value
self.parameters = self._create_parameters_dictionary()
@property
def use_specific_rings(self) -> bool:
return self._use_specific_rings
@use_specific_rings.setter
def use_specific_rings(self, value: bool) -> None:
self._use_specific_rings = value
self.parameters = self._create_parameters_dictionary()
@property
def use_fragment_const(self) -> bool:
return self._use_fragment_const
@use_fragment_const.setter
def use_fragment_const(self, value: bool) -> None:
self._use_fragment_const = value
self.parameters = self._create_parameters_dictionary()
@property
def parameters(self) -> Dict[str, Any]:
return self._parameters
@parameters.setter
def parameters(self, value: Dict[str, Any]) -> None:
parameters = self._create_parameters_dictionary()
parameters.update(value)
self._parameters = parameters
def generate(self) -> List[str]:
"""Sample random molecules.
Returns:
sampled molecule (SMILES).
"""
# generate molecules
logger.info(
f"running MolGX with the following parameters: {self.parameters}"
)
molecules_df = self.amd.GenMols(self.molecules_data, self.parameters)
logger.info("MolGX run completed")
return molecules_df["SMILES"].tolist()
else:
logger.warning("install AMD_analytcs extras to use MolGX")
| 40.358779
| 109
| 0.646302
|
4a033c56fecac4ff5580a0b5a3e39f9c4334098c
| 2,287
|
py
|
Python
|
frontera/utils/managers.py
|
bomquote/transistor-frontera
|
29174a9a04e6ea76cec13890f89fb5fca598ef2d
|
[
"BSD-3-Clause"
] | null | null | null |
frontera/utils/managers.py
|
bomquote/transistor-frontera
|
29174a9a04e6ea76cec13890f89fb5fca598ef2d
|
[
"BSD-3-Clause"
] | null | null | null |
frontera/utils/managers.py
|
bomquote/transistor-frontera
|
29174a9a04e6ea76cec13890f89fb5fca598ef2d
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
from frontera.core.manager import LocalFrontierManager, SpiderFrontierManager
from .converters import BaseRequestConverter, BaseResponseConverter
class FrontierManagerWrapper:
def __init__(self, settings, manager=None):
if manager is None:
manager = LocalFrontierManager if settings.get("LOCAL_MODE") is True else SpiderFrontierManager
self.manager = manager.from_settings(settings)
self.request_converter = None
self.response_converter = None
def start(self):
if not hasattr(self, 'request_converter'):
raise NotImplementedError("Request converter should be instantiated in subclass")
if not hasattr(self, 'response_converter'):
raise NotImplementedError("Response converter should be instantiated in subclass")
assert isinstance(self.request_converter, BaseRequestConverter), 'request_converter ' \
'must be instance of BaseRequestConverter'
assert isinstance(self.response_converter, BaseResponseConverter), 'response_converter ' \
'must be instance of BaseResponseConverter'
self.manager.start()
def stop(self):
self.manager.stop()
def get_next_requests(self, max_next_requests=0, **kwargs):
frontier_requests = self.manager.get_next_requests(max_next_requests=max_next_requests, **kwargs)
return [self.request_converter.from_frontier(frontier_request) for frontier_request in frontier_requests]
def page_crawled(self, response):
self.manager.page_crawled(self.response_converter.to_frontier(response))
def links_extracted(self, request, links):
frontier_links = [self.request_converter.to_frontier(link) for link in links]
self.manager.links_extracted(request=self.request_converter.to_frontier(request),
links=frontier_links)
def request_error(self, request, error):
self.manager.request_error(request=self.request_converter.to_frontier(request),
error=error)
def finished(self):
return self.manager.finished
| 49.717391
| 118
| 0.681242
|
4a033cbcf1d32a55eaacbe9c0f6704e304e127b3
| 3,467
|
py
|
Python
|
python/paddle_serving_server/web_service.py
|
wangxicoding/Serving
|
508997bbbe88849d5272950639dc7ad62ee35467
|
[
"Apache-2.0"
] | null | null | null |
python/paddle_serving_server/web_service.py
|
wangxicoding/Serving
|
508997bbbe88849d5272950639dc7ad62ee35467
|
[
"Apache-2.0"
] | null | null | null |
python/paddle_serving_server/web_service.py
|
wangxicoding/Serving
|
508997bbbe88849d5272950639dc7ad62ee35467
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!flask/bin/python
# pylint: disable=doc-string-missing
from flask import Flask, request, abort
from multiprocessing import Pool, Process
from paddle_serving_server import OpMaker, OpSeqMaker, Server
from paddle_serving_client import Client
class WebService(object):
def __init__(self, name="default_service"):
self.name = name
def load_model_config(self, model_config):
self.model_config = model_config
def _launch_rpc_service(self):
op_maker = OpMaker()
read_op = op_maker.create('general_reader')
general_infer_op = op_maker.create('general_infer')
general_response_op = op_maker.create('general_response')
op_seq_maker = OpSeqMaker()
op_seq_maker.add_op(read_op)
op_seq_maker.add_op(general_infer_op)
op_seq_maker.add_op(general_response_op)
server = Server()
server.set_op_sequence(op_seq_maker.get_op_sequence())
server.set_num_threads(16)
server.load_model_config(self.model_config)
server.prepare_server(
workdir=self.workdir, port=self.port + 1, device=self.device)
server.run_server()
def prepare_server(self, workdir="", port=9393, device="cpu"):
self.workdir = workdir
self.port = port
self.device = device
def _launch_web_service(self):
self.client_service = Client()
self.client_service.load_client_config(
"{}/serving_server_conf.prototxt".format(self.model_config))
self.client_service.connect(["0.0.0.0:{}".format(self.port + 1)])
def get_prediction(self, request):
if not request.json:
abort(400)
if "fetch" not in request.json:
abort(400)
try:
feed, fetch = self.preprocess(request.json, request.json["fetch"])
if isinstance(feed, dict) and "fetch" in feed:
del feed["fetch"]
fetch_map = self.client_service.predict(feed=feed, fetch=fetch)
for key in fetch_map:
fetch_map[key] = fetch_map[key][0].tolist()
result = self.postprocess(
feed=request.json, fetch=fetch, fetch_map=fetch_map)
result = {"result": result}
except ValueError:
result = {"result": "Request Value Error"}
return result
def run_server(self):
import socket
localIP = socket.gethostbyname(socket.gethostname())
print("web service address:")
print("http://{}:{}/{}/prediction".format(localIP, self.port,
self.name))
p_rpc = Process(target=self._launch_rpc_service)
p_rpc.start()
def preprocess(self, feed={}, fetch=[]):
return feed, fetch
def postprocess(self, feed={}, fetch=[], fetch_map=None):
return fetch_map
| 38.098901
| 78
| 0.654745
|
4a033d0d8f26f0b2f7019b1d491896846c28fd98
| 5,906
|
py
|
Python
|
xenavalkyrie/samples/xena_line_test.py
|
xenadevel/PyXenaValkyrie
|
9bb1d0b058c45dc94a778fd674a679b53f03a34c
|
[
"Apache-2.0"
] | 4
|
2018-07-13T08:09:38.000Z
|
2022-02-09T01:36:13.000Z
|
xenavalkyrie/samples/xena_line_test.py
|
xenadevel/PyXenaValkyrie
|
9bb1d0b058c45dc94a778fd674a679b53f03a34c
|
[
"Apache-2.0"
] | 1
|
2019-07-31T04:56:43.000Z
|
2019-08-01T07:11:21.000Z
|
xenavalkyrie/samples/xena_line_test.py
|
xenadevel/PyXenaValkyrie
|
9bb1d0b058c45dc94a778fd674a679b53f03a34c
|
[
"Apache-2.0"
] | 3
|
2019-05-30T23:47:02.000Z
|
2022-02-04T12:32:14.000Z
|
#!/usr/bin/env python
# encoding: utf-8
""""
@author: yoram@ignissoft.com
"""
import sys
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, SUPPRESS
import logging
import time
import json
from trafficgenerator.tgn_utils import ApiType
from xenavalkyrie.xena_app import init_xena
from xenavalkyrie.xena_port import XenaPort
from xenavalkyrie.xena_stream import XenaStreamState
from xenavalkyrie.xena_statistics_view import XenaPortsStats
version = 0.3
def xena_line_test(args=None):
""" Xena line test script. """
program_version = "v%s" % version
program_version_message = '%%(prog)s %s' % (program_version)
description = '''Run xena line test.'''
# Setup argument parser
parser = ArgumentParser(description=description,
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('-V', '--version', action='version', version=program_version_message)
parser.add_argument('-l', '--log', required=False, default='xena_line_test_log.txt', metavar='file',
help='Log file')
parser.add_argument('-c', '--chassis', required=True, metavar='chassis',
help='Xena line chassis')
subparsers = parser.add_subparsers(help='type "xena_line_test [subcommand] -h" for help.')
# save sub-parser
save_convert = subparsers.add_parser('save', formatter_class=ArgumentDefaultsHelpFormatter)
save_convert.set_defaults(func=save_config)
save_convert.add_argument('-p', '--ports', default=SUPPRESS, required=False, nargs='+', metavar='port',
help='Ports to save configuration from. (default: all)')
save_convert.add_argument('-o', '--output', required=True, metavar='file',
help='Configuration output file.')
# load sub-parser
load_analyze = subparsers.add_parser('load', formatter_class=ArgumentDefaultsHelpFormatter)
load_analyze.set_defaults(func=load_config)
load_analyze.add_argument('-i', '--input', required=True, metavar='file',
help='Configuration input file.')
# run sub-parser
run_analyze = subparsers.add_parser('run', formatter_class=ArgumentDefaultsHelpFormatter)
run_analyze.set_defaults(func=run_test)
run_analyze.add_argument('-p', '--ports', required=True, nargs='+', metavar='port',
help='Ports to start traffic on.')
run_analyze.add_argument('-t', '--time', required=True, type=int, metavar='int',
help='Run duration in seconds')
run_analyze.add_argument('-r', '--results', required=True, metavar='file',
help='Results output file')
run_analyze.add_argument('-c', '--counters', required=False, default=SUPPRESS, nargs='+', metavar='counter',
help='List of counters to save in output file. (default: all)')
# Process arguments
parsed_args = parser.parse_args(args)
parsed_args.func(parsed_args)
def save_config(parsed_args):
chassis = connect(parsed_args.log, parsed_args.chassis)
chassis.inventory(modules_inventory=True)
ports_per_module = [m.ports.values() for m in chassis.modules.values()]
inventory_ports = {p.index: p for m in ports_per_module for p in m}
if not parsed_args.ports:
ports = inventory_ports.keys()
else:
ports = parsed_args.ports
with open(parsed_args.output, 'w+') as _:
pass
for port in ports:
inventory_ports[port].save_config(parsed_args.output, 'a+')
chassis.api.disconnect()
def load_config(parsed_args):
chassis = connect(parsed_args.log, parsed_args.chassis)
with open(parsed_args.input) as f:
commands = f.read().splitlines()
for command in commands:
if command.startswith(';'):
port = XenaPort(chassis, command.split(':')[1].strip())
port.reserve(force=True)
elif command.startswith('P_LPTXMODE'):
pass
else:
if not command.startswith('P_LPTXMODE'):
port.send_command(command)
for port in chassis.ports.values():
port.release()
def run_test(parsed_args):
chassis = connect(parsed_args.log, parsed_args.chassis)
for port in parsed_args.ports:
XenaPort(chassis, port).reserve(force=True)
for port in chassis.ports.values():
port.clear_stats()
for stream in port.streams.values():
stream.set_state(XenaStreamState.enabled)
chassis.start_traffic()
time.sleep(parsed_args.time)
chassis.stop_traffic()
time.sleep(2)
counters = parsed_args.counters if hasattr(parsed_args, 'counters') else None
with open(parsed_args.results, 'w+') as f:
ports_stats = XenaPortsStats(chassis.parent)
ports_stats.read_stats()
if counters:
f.write('port,{}\n'.format(','.join(counters)))
for port in chassis.ports:
line = port
for counter in counters:
line += ','
line += str(ports_stats.get_flat_stats()[port][counter])
f.write('{}\n'.format(line))
else:
f.write(json.dumps(ports_stats.get_flat_stats(), indent=2))
for port in chassis.ports.values():
port.release()
def connect(log_file, chassis):
# Xena manager requires standard logger. To log all low level CLI commands set DEBUG level.
logger = logging.getLogger('log')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler(sys.stdout))
logger.addHandler(logging.FileHandler(log_file))
# Create XenaApp object and connect to chassis.
xm = init_xena(ApiType.socket, logger, 'xena_line_test', chassis)
return xm.session.add_chassis(chassis)
if __name__ == "__main__":
sys.exit(xena_line_test((sys.argv[1:])))
| 35.578313
| 112
| 0.657806
|
4a033d5ff3bd360fa4536dac93ce6f2aead9bc9e
| 3,292
|
py
|
Python
|
distribution_metrics/patch_coherence_loss.py
|
ariel415el/GPDM
|
50e0a3c3897eb5bbcec81c44a5668d230cdfd26c
|
[
"Apache-2.0"
] | 18
|
2021-11-16T19:09:09.000Z
|
2022-03-31T23:29:39.000Z
|
distribution_metrics/patch_coherence_loss.py
|
ariel415el/GPDM
|
50e0a3c3897eb5bbcec81c44a5668d230cdfd26c
|
[
"Apache-2.0"
] | 1
|
2022-03-30T16:36:36.000Z
|
2022-03-30T16:42:22.000Z
|
distribution_metrics/patch_coherence_loss.py
|
ariel415el/GPDM
|
50e0a3c3897eb5bbcec81c44a5668d230cdfd26c
|
[
"Apache-2.0"
] | 2
|
2022-03-30T15:17:09.000Z
|
2022-03-31T23:29:45.000Z
|
from random import randint
import torch
from distribution_metrics.patch_swd import extract_patches
def efficient_compute_distances(x, y):
dist = (x * x).sum(1)[:, None] + (y * y).sum(1)[None, :] - 2.0 * torch.mm(x, torch.transpose(y, 0, 1))
return dist
def compute_dists(x, y):
dist = torch.sum((x[:, None] - y[None, :]) **2, -1)
return dist
def dist_mat(input_patches, target_patches):
dist_matrix = torch.zeros((len(input_patches), len(target_patches)), dtype=torch.float16).to(input_patches.device)
b = 64
n_batches = len(input_patches) // b
for i in range(n_batches):
# dist_matrix[i * b:(i + 1) * b] = torch.cdist(input_patches[i * b:(i + 1) * b], target_patches) **2
dist_matrix[i * b:(i + 1) * b] = efficient_compute_distances(input_patches[i * b:(i + 1) * b], target_patches)
if len(input_patches) % b != 0:
# dist_matrix[n_batches * b:] = torch.cdist(input_patches[n_batches * b:], target_patches)**2
dist_matrix[n_batches * b:] = efficient_compute_distances(input_patches[n_batches * b:], target_patches)
return dist_matrix
def compute_patch_coherence(input_patches, target_patches, mode='detached'):
dist_matrix = torch.cdist(target_patches, input_patches)
# dist_matrix = dist_mat(target_patches, input_patches)
min_indices = torch.min(dist_matrix, dim=0)[1]
if mode == 'detached':
return ((input_patches - target_patches[min_indices]) ** 2).mean()
else:
alpha = 0.05
dist_matrix /= (torch.min(dist_matrix, dim=1)[0] + alpha)[:, None] # reduces distance to target patches with no similar input patche
loss = torch.min(dist_matrix, dim=0)[0].mean()
return loss
class PatchCoherentLoss(torch.nn.Module):
"""For each patch in input image x find its NN in target y and sum the their distances"""
def __init__(self, patch_size=7, stride=1, mode='detached', batch_reduction='mean'):
super(PatchCoherentLoss, self).__init__()
self.name = f"PatchCoheren(p-{patch_size}:{stride}_M-{mode})"
self.patch_size = patch_size
self.stride = stride
self.batch_reduction = batch_reduction
self.mode = mode
def forward(self, x, y):
b, c, h, w = x.shape
if self.stride > 1:
rows_offset = randint(0, self.stride -1)
cols_offset = randint(0, self.stride -1)
x = x[:, :, rows_offset:, cols_offset:]
y = y[:, :, rows_offset:, cols_offset:]
x_patches = extract_patches(x, self.patch_size, self.stride)
y_patches = extract_patches(y, self.patch_size, self.stride)
results = []
for i in range(b):
results.append(compute_patch_coherence(x_patches[i], y_patches[i], self.mode))
results = torch.stack(results)
if self.batch_reduction == 'mean':
return results.mean()
else:
return results
if __name__ == '__main__':
input_image = torch.randn((1, 3,250,250)).cuda()
target_image = torch.randn((1, 3,250,250)).cuda() * 2
from time import time
start = time()
loss = PatchCoherentLoss(5, 3, 'batched_detached-l2').cuda()
for i in range(10):
loss(input_image, target_image)
print(f"Time: {(time() - start) / 10}")
| 38.729412
| 141
| 0.639429
|
4a033d87c0993500072b9e5e9c80adf692d0b877
| 14,766
|
py
|
Python
|
pymatgen/symmetry/bandstructure.py
|
Crivella/pymatgen
|
dd3737011e76520da1347d5db75db3a3f87e520f
|
[
"MIT"
] | 1
|
2021-11-02T21:10:11.000Z
|
2021-11-02T21:10:11.000Z
|
pymatgen/symmetry/bandstructure.py
|
Crivella/pymatgen
|
dd3737011e76520da1347d5db75db3a3f87e520f
|
[
"MIT"
] | 5
|
2018-08-07T23:00:23.000Z
|
2021-01-05T22:46:23.000Z
|
pymatgen/symmetry/bandstructure.py
|
Crivella/pymatgen
|
dd3737011e76520da1347d5db75db3a3f87e520f
|
[
"MIT"
] | 6
|
2019-04-26T18:50:41.000Z
|
2020-03-29T17:58:34.000Z
|
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
Provides a class for interacting with KPath classes to
generate high-symmetry k-paths using different conventions.
"""
import itertools
from warnings import warn
import networkx as nx
import numpy as np
from pymatgen.symmetry.kpath import (
KPathBase,
KPathLatimerMunro,
KPathSeek,
KPathSetyawanCurtarolo,
)
from pymatgen.electronic_structure.bandstructure import BandStructureSymmLine
from pymatgen.electronic_structure.core import Spin
__author__ = "Jason Munro"
__copyright__ = "Copyright 2020, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Jason Munro"
__email__ = "jmunro@lbl.gov"
__status__ = "Development"
__date__ = "March 2020"
class HighSymmKpath(KPathBase):
"""
This class generates path along high symmetry lines in the
Brillouin zone according to different conventions.
The class is designed to be used with a specific primitive
cell setting. The definitions for the primitive cell
used can be found in: Computational Materials Science,
49(2), 299-312. doi:10.1016/j.commatsci.2010.05.010.
The space group analyzer can be used to produce the correct
primitive structure
(method get_primitive_standard_structure(international_monoclinic=False)).
Ensure input structure is correct before 'get_kpoints()' method is used.
See individual KPath classes for details on specific conventions.
"""
def __init__(
self,
structure,
has_magmoms=False,
magmom_axis=None,
path_type="setyawan_curtarolo",
symprec=0.01,
angle_tolerance=5,
atol=1e-5,
):
"""
Args:
structure (Structure): Structure object
has_magmoms (boolean): Whether the input structure contains
magnetic moments as site properties with the key 'magmom.'
Values may be in the form of 3-component vectors given in
the basis of the input lattice vectors, in
which case the spin axis will default to a_3, the third
real-space lattice vector (this triggers a warning).
magmom_axis (list or numpy array): 3-component vector specifying
direction along which magnetic moments given as scalars
should point. If all magnetic moments are provided as
vectors then this argument is not used.
path_type (string): Chooses which convention to use to generate
the high symmetry path. Options are: 'setyawan_curtarolo', 'hinuma',
'latimer_munro' for the Setyawan & Curtarolo, Hinuma et al., and
Latimer & Munro conventions. Choosing 'all' will generate one path
with points from all three conventions. Equivalent labels between
each will also be generated. Order will always be Latimer & Munro,
Setyawan & Curtarolo, and Hinuma et al. Lengths for each of the paths
will also be generated and output as a list. Note for 'all' the user
will have to alter the labels on their own for plotting.
symprec (float): Tolerance for symmetry finding
angle_tolerance (float): Angle tolerance for symmetry finding.
atol (float): Absolute tolerance used to determine symmetric
equivalence of points and lines on the BZ.
"""
super().__init__(structure, symprec=symprec, angle_tolerance=angle_tolerance, atol=atol)
self._path_type = path_type
self._equiv_labels = None
self._path_lengths = None
self._label_index = None
if path_type != "all":
if path_type == "latimer_munro":
self._kpath = self._get_lm_kpath(has_magmoms, magmom_axis, symprec, angle_tolerance, atol).kpath
elif path_type == "setyawan_curtarolo":
self._kpath = self._get_sc_kpath(symprec, angle_tolerance, atol).kpath
elif path_type == "hinuma":
hin_dat = self._get_hin_kpath(symprec, angle_tolerance, atol, not has_magmoms)
self._kpath = hin_dat.kpath
self._hin_tmat = hin_dat._tmat
else:
if has_magmoms:
raise ValueError("Cannot select 'all' with non-zero magmoms.")
lm_bs = self._get_lm_kpath(has_magmoms, magmom_axis, symprec, angle_tolerance, atol)
rpg = lm_bs._rpg
sc_bs = self._get_sc_kpath(symprec, angle_tolerance, atol)
hin_bs = self._get_hin_kpath(symprec, angle_tolerance, atol, not has_magmoms)
index = 0
cat_points = {}
label_index = {}
num_path = []
self._path_lengths = []
for bs in [lm_bs, sc_bs, hin_bs]:
for key, value in enumerate(bs.kpath["kpoints"]):
cat_points[index] = bs.kpath["kpoints"][value]
label_index[index] = value
index += 1
total_points_path = 0
for seg in bs.kpath["path"]:
total_points_path += len(seg)
for block in bs.kpath["path"]:
new_block = []
for label in block:
for ind in range(
len(label_index) - len(bs.kpath["kpoints"]),
len(label_index),
):
if label_index[ind] == label:
new_block.append(ind)
num_path.append(new_block)
self._path_lengths.append(total_points_path)
self._label_index = label_index
self._kpath = {"kpoints": cat_points, "path": num_path}
self._equiv_labels = self._get_klabels(lm_bs, sc_bs, hin_bs, rpg)
@property
def path_type(self):
"""
Returns:
The type of kpath chosen
"""
return self._path_type
@property
def label_index(self):
"""
Returns:
The correspondance between numbers and kpoint symbols for the
combined kpath generated when path_type = 'all'. None otherwise.
"""
return self._label_index
@property
def equiv_labels(self):
"""
Returns:
The correspondance between the kpoint symbols in the Latimer and
Munro convention, Setyawan and Curtarolo, and Hinuma
conventions respectively. Only generated when path_type = 'all'.
"""
return self._equiv_labels
@property
def path_lengths(self):
"""
Returns:
List of lengths of the Latimer and Munro, Setyawan and Curtarolo, and Hinuma
conventions in the combined HighSymmKpath object when path_type = 'all' respectively.
None otherwise.
"""
return self._path_lengths
def _get_lm_kpath(self, has_magmoms, magmom_axis, symprec, angle_tolerance, atol):
"""
Returns:
Latimer and Munro k-path with labels.
"""
return KPathLatimerMunro(self._structure, has_magmoms, magmom_axis, symprec, angle_tolerance, atol)
def _get_sc_kpath(self, symprec, angle_tolerance, atol):
"""
Returns:
Setyawan and Curtarolo k-path with labels.
"""
kpath = KPathSetyawanCurtarolo(self._structure, symprec, angle_tolerance, atol)
self.prim = kpath.prim
self.conventional = kpath.conventional
self.prim_rec = kpath.prim_rec
self._rec_lattice = self.prim_rec
return kpath
def _get_hin_kpath(self, symprec, angle_tolerance, atol, tri):
"""
Returns:
Hinuma et al. k-path with labels.
"""
bs = KPathSeek(self._structure, symprec, angle_tolerance, atol, tri)
kpoints = bs.kpath["kpoints"]
tmat = bs._tmat
for key in kpoints:
kpoints[key] = np.dot(np.transpose(np.linalg.inv(tmat)), kpoints[key])
bs.kpath["kpoints"] = kpoints
self._rec_lattice = self._structure.lattice.reciprocal_lattice
warn(
"K-path from the Hinuma et al. convention has been transformed to the basis of the reciprocal lattice \
of the input structure. Use `KPathSeek` for the path in the original author-intended basis."
)
return bs
def _get_klabels(self, lm_bs, sc_bs, hin_bs, rpg):
"""
Returns:
labels (dict): Dictionary of equivalent labels for paths if 'all' is chosen.
If an exact kpoint match cannot be found, symmetric equivalency will be
searched for and indicated with an asterisk in the equivalent label.
If an equivalent label can still not be found, or the point is not in
the explicit kpath, its equivalent label will be set to itself in the output.
"""
lm_path = lm_bs.kpath
sc_path = sc_bs.kpath
hin_path = hin_bs.kpath
n_op = len(rpg)
pairs = itertools.permutations(
[{"setyawan_curtarolo": sc_path}, {"latimer_munro": lm_path}, {"hinuma": hin_path}], r=2
)
labels = {"setyawan_curtarolo": {}, "latimer_munro": {}, "hinuma": {}}
for (a, b) in pairs:
[(a_type, a_path)] = list(a.items())
[(b_type, b_path)] = list(b.items())
sc_count = np.zeros(n_op)
for o_num in range(0, n_op):
a_tr_coord = []
for (label_a, coord_a) in a_path["kpoints"].items():
a_tr_coord.append(np.dot(rpg[o_num], coord_a))
for coord_a in a_tr_coord:
for key, value in b_path["kpoints"].items():
if np.allclose(value, coord_a, atol=self._atol):
sc_count[o_num] += 1
break
a_to_b_labels = {}
unlabeled = {}
for (label_a, coord_a) in a_path["kpoints"].items():
coord_a_t = np.dot(rpg[np.argmax(sc_count)], coord_a)
assigned = False
for (label_b, coord_b) in b_path["kpoints"].items():
if np.allclose(coord_b, coord_a_t, atol=self._atol):
a_to_b_labels[label_a] = label_b
assigned = True
break
if not assigned:
unlabeled[label_a] = coord_a
for (label_a, coord_a) in unlabeled.items():
for op in rpg:
coord_a_t = np.dot(op, coord_a)
key = [
key
for key, value in b_path["kpoints"].items()
if np.allclose(value, coord_a_t, atol=self._atol)
]
if key != []:
a_to_b_labels[label_a] = key[0][0] + "^{*}"
break
if key == []:
a_to_b_labels[label_a] = label_a
labels[a_type][b_type] = a_to_b_labels
return labels
@staticmethod
def get_continuous_path(bandstructure):
"""
Obtain a continous version of an inputted path using graph theory.
This routine will attempt to add connections between nodes of
odd-degree to ensure a Eulerian path can be formed. Initial
k-path must be able to be converted to a connected graph. See
npj Comput Mater 6, 112 (2020). 10.1038/s41524-020-00383-7
for more details.
Args:
bandstructure (BandstructureSymmLine): BandstructureSymmLine object.
Returns:
bandstructure (BandstructureSymmLine): New BandstructureSymmLine object with continous path.
"""
G = nx.Graph()
labels = []
for point in bandstructure.kpoints:
if point.label is not None:
labels.append(point.label)
plot_axis = []
for i in range(int(len(labels) / 2)):
G.add_edges_from([(labels[2 * i], labels[(2 * i) + 1])])
plot_axis.append((labels[2 * i], labels[(2 * i) + 1]))
G_euler = nx.algorithms.euler.eulerize(G)
G_euler_circuit = nx.algorithms.euler.eulerian_circuit(G_euler)
distances_map = []
kpath_euler = []
for edge_euler in G_euler_circuit:
kpath_euler.append(edge_euler)
for edge_reg in plot_axis:
if edge_euler == edge_reg:
distances_map.append((plot_axis.index(edge_reg), False))
elif edge_euler[::-1] == edge_reg:
distances_map.append((plot_axis.index(edge_reg), True))
if bandstructure.is_spin_polarized:
spins = [Spin.up, Spin.down]
else:
spins = [Spin.up]
new_kpoints = []
new_bands = {spin: [np.array([]) for _ in range(bandstructure.nb_bands)] for spin in spins}
new_projections = {spin: [[] for _ in range(bandstructure.nb_bands)] for spin in spins}
for entry in distances_map:
if not entry[1]:
branch = bandstructure.branches[entry[0]]
start = branch["start_index"]
stop = branch["end_index"] + 1
step = 1
else:
branch = bandstructure.branches[entry[0]]
start = branch["end_index"]
stop = branch["start_index"] - 1
step = -1
# kpoints
new_kpoints += [point.frac_coords for point in bandstructure.kpoints[start:stop:step]]
# eigenvals
for spin in spins:
for n, band in enumerate(bandstructure.bands[spin]):
new_bands[spin][n] = np.concatenate((new_bands[spin][n], band[start:stop:step]))
# projections
for spin in spins:
for n, band in enumerate(bandstructure.projections[spin]):
new_projections[spin][n] += band[start:stop:step].tolist()
for spin in spins:
new_projections[spin] = np.array(new_projections[spin])
new_labels_dict = {label: point.frac_coords for label, point in bandstructure.labels_dict.items()}
new_bandstructure = BandStructureSymmLine(
kpoints=new_kpoints,
eigenvals=new_bands,
lattice=bandstructure.lattice_rec,
efermi=bandstructure.efermi,
labels_dict=new_labels_dict,
structure=bandstructure.structure,
projections=new_projections,
)
return new_bandstructure
| 36.369458
| 115
| 0.587227
|
4a033dc8f079518a4f0638b47ddcdba51c025bb7
| 3,638
|
py
|
Python
|
experiment/raug/raug/utils/telegram_bot.py
|
enkiwang/Portable-Skin-Lesion-Diagnosis
|
cfd69ba5f32adb946db8c0366b1032055418e0a4
|
[
"MIT"
] | 4
|
2021-04-28T08:38:33.000Z
|
2022-02-15T19:43:25.000Z
|
raug/utils/telegram_bot.py
|
paaatcha/jedy
|
da733d3e71243c477f243e604e1c2d7bb62462d2
|
[
"MIT"
] | null | null | null |
raug/utils/telegram_bot.py
|
paaatcha/jedy
|
da733d3e71243c477f243e604e1c2d7bb62462d2
|
[
"MIT"
] | 2
|
2021-04-20T13:14:03.000Z
|
2022-01-22T18:31:26.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Author: André Pacheco
E-mail: pacheco.comp@gmail.com
This file implements a telegram bot to delivery some pieces of information about the training phase.
If you find any bug or have some suggestion, please, email me.
"""
from datetime import date
from telegram.ext import Updater, CommandHandler
import datetime
class TelegramBot:
"""
Using this class you're going to be able to send some messages via telegram. You may, for example, get to know
when a training is over and what's the final stats about it.
To know more about how the telegram bot works go to: https://github.com/python-telegram-bot/python-telegram-bot
"""
def __init__(self, chat_id, token="821262177:AAFwwfIc7tkJuwYipyD89hGyF-qyJmeX6a4", model_name="CNN"):
"""
Class contructor
:param chat_id (string): the id in which the bot needs to send a message
:param token (string, optional): the bot token. The default is the Jedy-Bot
:param model_name (string, optional): the model's name, ex: ResNet. Default is CNN
"""
self.token = token
self.chat_id = chat_id
self.model_name = model_name
self.info = False
self.epoch_info = "Hey, it's running the 1st epoch yet!"
self.best_info = "Calm down! Wait to finish the 1st epoch to get the best performance so far."
def start_bot (self):
"""
This method just start the bot and send a msg saying it's about to start. The user can interact with the bot
sendind /info, /stop and /best. This commands will be get by the CommandHandler and will change the class'
attributes. In this sense, the training loop will check if it needs to send any information to through the bot.
"""
self.updater = Updater(token=self.token)
self.updater.start_polling()
# Setting a dispatcher to interact via app
disp = self.updater.dispatcher
info_handler = CommandHandler("info", self.get_info)
disp.add_handler(info_handler)
stop_handler = CommandHandler("stop", self.stop_info)
disp.add_handler(stop_handler)
best_handler = CommandHandler("best", self.get_best_info)
disp.add_handler(best_handler)
epoch_handler = CommandHandler("epoch", self.get_epoch_info)
disp.add_handler(epoch_handler)
good_handler = CommandHandler("goodbot", self.get_good_bot)
disp.add_handler(good_handler)
now = datetime.datetime.now().strftime("%d/%m/%Y -- %H:%M")
self.updater.bot.send_message(chat_id=self.chat_id,
text="--------\nHello, the training phase of your {} model is about to start!\nDate and time: {}\n\nSend /info to check the status every epoch. By default, I won't send it except you ask.\n\nSend /stop to stop to check the status.\n\nSend /best to get the best performance so far.\n\nSend /epoch to get the current epoch so far.\n--------\n".format(self.model_name, now))
def send_msg (self, msg):
self.updater.bot.send_message(chat_id=self.chat_id, text=msg)
def get_info (self, update, context):
self.info = True
def stop_info (self, update, context):
self.info = False
def get_best_info (self, update, context):
self.send_msg(self.best_info)
def get_epoch_info (self, update, context):
self.send_msg(self.epoch_info)
def get_good_bot (self, update, context):
self.send_msg("Uhuuuul! Now can you pay me a coffee?")
def stop_bot (self):
self.updater.stop()
| 37.895833
| 409
| 0.669874
|
4a033f61b66b57915b211444a8e6764f277b6cea
| 107
|
py
|
Python
|
djongo/dynamic_formsets/apps.py
|
tanguy-s/djongo
|
f64c313628de52b836a979aab3a4c2d8638552ab
|
[
"BSD-3-Clause"
] | null | null | null |
djongo/dynamic_formsets/apps.py
|
tanguy-s/djongo
|
f64c313628de52b836a979aab3a4c2d8638552ab
|
[
"BSD-3-Clause"
] | null | null | null |
djongo/dynamic_formsets/apps.py
|
tanguy-s/djongo
|
f64c313628de52b836a979aab3a4c2d8638552ab
|
[
"BSD-3-Clause"
] | null | null | null |
from django.apps import AppConfig
class DynamicFormsetsConfig(AppConfig):
name = 'dynamic_formsets'
| 15.285714
| 39
| 0.785047
|
4a033fa4fdfaba2d308fb34a9f0e7cf6d1b7925d
| 7,069
|
py
|
Python
|
test/test_prettyxml.py
|
tonyfast/rdflib
|
e4fe0fdbd4de7e1183418f302315b51a14602e03
|
[
"BSD-3-Clause"
] | 2
|
2021-02-06T17:36:05.000Z
|
2021-04-21T07:33:39.000Z
|
test/test_prettyxml.py
|
pragya16067/rdflib
|
6b5bd37ccc67bdec62d2e36d174eb7933b5020b2
|
[
"BSD-3-Clause"
] | 2
|
2020-05-09T15:03:57.000Z
|
2020-05-30T10:51:40.000Z
|
test/test_prettyxml.py
|
pragya16067/rdflib
|
6b5bd37ccc67bdec62d2e36d174eb7933b5020b2
|
[
"BSD-3-Clause"
] | 4
|
2020-05-08T08:36:19.000Z
|
2020-05-28T07:23:23.000Z
|
# -*- coding: UTF-8 -*-
from rdflib.term import URIRef, BNode, Literal
from rdflib.namespace import RDF, RDFS
from io import BytesIO
from rdflib.plugins.serializers.rdfxml import PrettyXMLSerializer
from rdflib.graph import ConjunctiveGraph
class SerializerTestBase(object):
repeats = 8
def setup(self):
graph = ConjunctiveGraph()
graph.parse(data=self.testContent, format=self.testContentFormat)
self.sourceGraph = graph
def test_serialize_and_reparse(self):
reparsedGraph = serialize_and_load(self.sourceGraph, self.serializer)
_assert_equal_graphs(self.sourceGraph, reparsedGraph)
def test_multiple(self):
"""Repeats ``test_serialize`` ``self.repeats`` times, to reduce sucess based on in-memory ordering."""
for i in range(self.repeats):
self.test_serialize_and_reparse()
# test_multiple.slowtest=True # not really slow?
def _assert_equal_graphs(g1, g2):
assert len(g1) == len(g2), "Serialized graph not same size as source graph."
g1copy = _mangled_copy(g1)
g2copy = _mangled_copy(g2)
g1copy -= _mangled_copy(g2)
g2copy -= _mangled_copy(g1)
assert len(g1copy) == 0, "Source graph larger than serialized graph."
assert len(g2copy) == 0, "Serialized graph larger than source graph."
_blank = BNode()
def _mangled_copy(g):
"Makes a copy of the graph, replacing all bnodes with the bnode ``_blank``."
gcopy = ConjunctiveGraph()
def isbnode(v):
return isinstance(v, BNode)
for s, p, o in g:
if isbnode(s):
s = _blank
if isbnode(p):
p = _blank
if isbnode(o):
o = _blank
gcopy.add((s, p, o))
return gcopy
def serialize(sourceGraph, makeSerializer, getValue=True, extra_args={}):
serializer = makeSerializer(sourceGraph)
stream = BytesIO()
serializer.serialize(stream, **extra_args)
return getValue and stream.getvalue() or stream
def serialize_and_load(sourceGraph, makeSerializer):
stream = serialize(sourceGraph, makeSerializer, False)
stream.seek(0)
reparsedGraph = ConjunctiveGraph()
reparsedGraph.load(stream)
return reparsedGraph
class TestPrettyXmlSerializer(SerializerTestBase):
serializer = PrettyXMLSerializer
testContent = """
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix owl: <http://www.w3.org/2002/07/owl#> .
@prefix : <http://example.org/model/test#> .
:value rdfs:domain :Test .
:Test rdfs:subClassOf
[ a owl:Restriction;
owl:onProperty :value ],
[ a owl:Restriction;
owl:onProperty :name ] .
<http://example.org/data/a> a :Test;
rdfs:seeAlso <http://example.org/data/b>;
:value "A" .
<http://example.org/data/b>
:name "Bee"@en, "Be"@sv;
:value "B" .
<http://example.org/data/c> a rdfs:Resource;
rdfs:seeAlso <http://example.org/data/c>;
:value 3 .
<http://example.org/data/d> a rdfs:Resource;
rdfs:seeAlso <http://example.org/data/c> ;
rdfs:seeAlso <http://example.org/data/b> ;
rdfs:seeAlso <http://example.org/data/a> .
_:bnode1 a :BNode;
rdfs:seeAlso _:bnode2 .
_:bnode2 a :BNode ;
rdfs:seeAlso _:bnode3 .
_:bnode3 a :BNode ;
rdfs:seeAlso _:bnode2 .
"""
testContentFormat = "n3"
def test_result_fragments(self):
rdfXml = serialize(self.sourceGraph, self.serializer)
assert (
'<Test rdf:about="http://example.org/data/a">'.encode("latin-1") in rdfXml
)
assert (
'<rdf:Description rdf:about="http://example.org/data/b">'.encode("latin-1")
in rdfXml
)
assert '<name xml:lang="en">Bee</name>'.encode("latin-1") in rdfXml
assert (
'<value rdf:datatype="http://www.w3.org/2001/XMLSchema#integer">3</value>'.encode(
"latin-1"
)
in rdfXml
)
assert (
'<BNode rdf:nodeID="'.encode("latin-1") in rdfXml
), "expected one identified bnode in serialized graph"
# onlyBNodesMsg = "expected only inlined subClassOf-bnodes in serialized graph"
# assert '<rdfs:subClassOf>' in rdfXml, onlyBNodesMsg
# assert not '<rdfs:subClassOf ' in rdfXml, onlyBNodesMsg
def test_result_fragments_with_base(self):
rdfXml = serialize(
self.sourceGraph,
self.serializer,
extra_args={
"base": "http://example.org/",
"xml_base": "http://example.org/",
},
)
assert 'xml:base="http://example.org/"'.encode("latin-1") in rdfXml
assert '<Test rdf:about="data/a">'.encode("latin-1") in rdfXml
assert '<rdf:Description rdf:about="data/b">'.encode("latin-1") in rdfXml
assert (
'<value rdf:datatype="http://www.w3.org/2001/XMLSchema#integer">3</value>'.encode(
"latin-1"
)
in rdfXml
)
assert (
'<BNode rdf:nodeID="'.encode("latin-1") in rdfXml
), "expected one identified bnode in serialized graph"
def test_subClassOf_objects(self):
reparsedGraph = serialize_and_load(self.sourceGraph, self.serializer)
_assert_expected_object_types_for_predicates(
reparsedGraph, [RDFS.seeAlso, RDFS.subClassOf], [URIRef, BNode]
)
def test_pretty_xmlliteral(self):
# given:
g = ConjunctiveGraph()
g.add(
(
BNode(),
RDF.value,
Literal(
u"""<p xmlns="http://www.w3.org/1999/xhtml">See also <a href="#aring">Å</a></p>""",
datatype=RDF.XMLLiteral,
),
)
)
# when:
xmlrepr = g.serialize(format="pretty-xml")
# then:
assert (
u"""<rdf:value rdf:parseType="Literal"><p xmlns="http://www.w3.org/1999/xhtml">See also <a href="#aring">Å</a></p></rdf:value>""".encode(
"utf-8"
)
in xmlrepr
)
def test_pretty_broken_xmlliteral(self):
# given:
g = ConjunctiveGraph()
g.add((BNode(), RDF.value, Literal(u"""<p """, datatype=RDF.XMLLiteral)))
# when:
xmlrepr = g.serialize(format="pretty-xml")
# then:
assert (
u"""<rdf:value rdf:datatype="http://www.w3.org/1999/02/22-rdf-syntax-ns#XMLLiteral"><p """.encode(
"utf-8"
)
in xmlrepr
)
def _assert_expected_object_types_for_predicates(graph, predicates, types):
for s, p, o in graph:
if p in predicates:
someTrue = [isinstance(o, t) for t in types]
assert (
True in someTrue
), "Bad type %s for object when predicate is <%s>." % (type(o), p)
| 32.278539
| 149
| 0.577309
|
4a03418eaa208cdff1026590d19e51c2c88eb367
| 6,483
|
py
|
Python
|
pysteps/verification/plots.py
|
AFansGH/pysteps
|
ee5cd10ed9058808f934cb1992913055fbcbb3d2
|
[
"BSD-3-Clause"
] | null | null | null |
pysteps/verification/plots.py
|
AFansGH/pysteps
|
ee5cd10ed9058808f934cb1992913055fbcbb3d2
|
[
"BSD-3-Clause"
] | null | null | null |
pysteps/verification/plots.py
|
AFansGH/pysteps
|
ee5cd10ed9058808f934cb1992913055fbcbb3d2
|
[
"BSD-3-Clause"
] | null | null | null |
"""
pysteps.verification.plots
==========================
Methods for plotting verification results.
.. autosummary::
:toctree: ../generated/
plot_intensityscale
plot_rankhist
plot_reldiag
plot_ROC
"""
from matplotlib import cm
import matplotlib.pylab as plt
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import numpy as np
from . import ensscores, probscores, spatialscores
def plot_intensityscale(intscale, fig=None, vminmax=None, kmperpixel=None, unit=None):
"""Plot a intensity-scale verification table with a color bar and axis
labels.
Parameters
----------
intscale : dict
The intensity-scale object initialized with
:py:func:`pysteps.verification.spatialscores.intensity_scale_init`
and accumulated with
:py:func:`pysteps.verification.spatialscores.intensity_scale_accum`.
fig : matplotlib.figure.Figure, optional
The figure object to use for plotting. If not supplied, a new
figure is created.
vminmax : tuple of floats, optional
The minimum and maximum values for the intensity-scale skill score
in the plot.
Defaults to the data extent.
kmperpixel : float, optional
The conversion factor from pixels to kilometers. If supplied,
the unit of the shown spatial scales is km instead of pixels.
unit : string, optional
The unit of the intensity thresholds.
"""
if fig is None:
fig = plt.figure()
ax = fig.gca()
SS = spatialscores.intensity_scale_compute(intscale)
vmin = vmax = None
if vminmax is not None:
vmin = np.min(vminmax)
vmax = np.max(vminmax)
im = ax.imshow(SS, vmin=vmin, vmax=vmax, interpolation="nearest", cmap=cm.jet)
cb = fig.colorbar(im)
cb.set_label(intscale["label"])
if unit is None:
ax.set_xlabel("Intensity threshold")
else:
ax.set_xlabel("Intensity threshold [%s]" % unit)
if kmperpixel is None:
ax.set_ylabel("Spatial scale [pixels]")
else:
ax.set_ylabel("Spatial scale [km]")
ax.set_xticks(np.arange(SS.shape[1]))
ax.set_xticklabels(intscale["thrs"])
ax.set_yticks(np.arange(SS.shape[0]))
if kmperpixel is None:
scales = intscale["scales"]
else:
scales = np.array(intscale["scales"]) * kmperpixel
ax.set_yticklabels(scales)
def plot_rankhist(rankhist, ax=None):
"""Plot a rank histogram.
Parameters
----------
rankhist : dict
A rank histogram object created by ensscores.rankhist_init.
ax : axis handle, optional
Axis handle for the figure. If set to None, the handle is taken from
the current figure (matplotlib.pylab.gca()).
"""
if ax is None:
ax = plt.gca()
r = ensscores.rankhist_compute(rankhist)
x = np.linspace(0, 1, rankhist["num_ens_members"] + 1)
ax.bar(x, r, width=1.0 / len(x), align="edge", color="gray", edgecolor="black")
ax.set_xticks(x[::3] + (x[1] - x[0]))
ax.set_xticklabels(np.arange(1, len(x))[::3])
ax.set_xlim(0, 1 + 1.0 / len(x))
ax.set_ylim(0, np.max(r) * 1.25)
ax.set_xlabel("Rank of observation (among ensemble members)")
ax.set_ylabel("Relative frequency")
ax.grid(True, axis="y", ls=":")
def plot_reldiag(reldiag, ax=None):
"""Plot a reliability diagram.
Parameters
----------
reldiag : dict
A reldiag object created by probscores.reldiag_init.
ax : axis handle, optional
Axis handle for the figure. If set to None, the handle is taken from
the current figure (matplotlib.pylab.gca()).
"""
if ax is None:
ax = plt.gca()
# Plot the reliability diagram.
f = 1.0 * reldiag["Y_sum"] / reldiag["num_idx"]
r = 1.0 * reldiag["X_sum"] / reldiag["num_idx"]
mask = np.logical_and(np.isfinite(r), np.isfinite(f))
ax.plot(r[mask], f[mask], "kD-")
ax.plot([0, 1], [0, 1], "k--")
ax.set_xlim(0, 1)
ax.set_ylim(0, 1)
ax.grid(True, ls=":")
ax.set_xlabel("Forecast probability")
ax.set_ylabel("Observed relative frequency")
# Plot sharpness diagram into an inset figure.
iax = inset_axes(ax, width="35%", height="20%", loc=4, borderpad=3.5)
bw = reldiag["bin_edges"][2] - reldiag["bin_edges"][1]
iax.bar(
reldiag["bin_edges"][:-1],
reldiag["sample_size"],
width=bw,
align="edge",
color="gray",
edgecolor="black",
)
iax.set_yscale("log", basey=10)
iax.set_xticks(reldiag["bin_edges"])
iax.set_xticklabels(["%.1f" % max(v, 1e-6) for v in reldiag["bin_edges"]])
yt_min = int(max(np.floor(np.log10(min(reldiag["sample_size"][:-1]))), 1))
yt_max = int(np.ceil(np.log10(max(reldiag["sample_size"][:-1]))))
t = [pow(10.0, k) for k in range(yt_min, yt_max)]
iax.set_yticks([int(t_) for t_ in t])
iax.set_xlim(0.0, 1.0)
iax.set_ylim(t[0], 5 * t[-1])
iax.set_ylabel("log10(samples)")
iax.yaxis.tick_right()
iax.yaxis.set_label_position("right")
iax.tick_params(axis="both", which="major", labelsize=6)
def plot_ROC(ROC, ax=None, opt_prob_thr=False):
"""Plot a ROC curve.
Parameters
----------
ROC : dict
A ROC curve object created by probscores.ROC_curve_init.
ax : axis handle, optional
Axis handle for the figure. If set to None, the handle is taken from
the current figure (matplotlib.pylab.gca()).
opt_prob_thr : bool, optional
If set to True, plot the optimal probability threshold that maximizes
the difference between the hit rate (POD) and false alarm rate (POFD).
"""
if ax is None:
ax = plt.gca()
POFD, POD, area = probscores.ROC_curve_compute(ROC, compute_area=True)
p_thr = ROC["prob_thrs"]
ax.plot([0, 1], [0, 1], "k--")
ax.set_xlim(0, 1)
ax.set_ylim(0, 1)
ax.set_xlabel("False alarm rate (POFD)")
ax.set_ylabel("Probability of detection (POD)")
ax.grid(True, ls=":")
ax.plot(POFD, POD, "kD-")
if opt_prob_thr:
opt_prob_thr_idx = np.argmax(np.array(POD) - np.array(POFD))
ax.scatter(
[POFD[opt_prob_thr_idx]],
[POD[opt_prob_thr_idx]],
c="r",
s=150,
facecolors=None,
edgecolors="r",
)
for p_thr_, x, y in zip(p_thr, POFD, POD):
if p_thr_ > 0.05 and p_thr_ < 0.95:
ax.text(x + 0.02, y - 0.02, "%.2f" % p_thr_, fontsize=7)
| 29.60274
| 86
| 0.622089
|
4a03446f7d00d65d3f64cdae69d1c8264fd1ec63
| 1,136
|
py
|
Python
|
symcon/querysets.py
|
lociii/symcon-index
|
cf5882778bab9f32a1eeccb14e9a79db30a3d1d7
|
[
"MIT"
] | null | null | null |
symcon/querysets.py
|
lociii/symcon-index
|
cf5882778bab9f32a1eeccb14e9a79db30a3d1d7
|
[
"MIT"
] | 6
|
2016-12-30T19:52:31.000Z
|
2018-08-23T18:48:09.000Z
|
symcon/querysets.py
|
lociii/symcon-index
|
cf5882778bab9f32a1eeccb14e9a79db30a3d1d7
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
from django.db import models
from django.db.models.query_utils import Q
class LibraryQuerySet(models.QuerySet):
def last_updated(self):
return self.order_by('-repository__last_update')
def search(self, search_term):
return self.prefetch_related(
'repository', 'librarybranch_set__branch',
'librarybranch_set__module_set__modulealias_set',
'librarybranch_set__module_set__moduleparentrequirement_set',
'librarybranch_set__module_set__modulechildrequirement_set',
'librarybranch_set__module_set__moduleimplementedrequirement_set').filter(
Q(librarybranch__name__icontains=search_term) |
Q(librarybranch__module__name__icontains=search_term) |
Q(librarybranch__title__icontains=search_term) |
Q(librarybranch__module__title__icontains=search_term) |
Q(librarybranch__description__icontains=search_term) |
Q(librarybranch__module__description__icontains=search_term) |
Q(librarybranch__librarybranchtag__name__icontains=search_term)).distinct()
| 47.333333
| 87
| 0.738556
|
4a03451cb78532bdee875ac3887fc267e8597c6a
| 7,471
|
py
|
Python
|
server/server/organizations/models.py
|
omert-visiblerisk/connective
|
c6b81700b35e2d8355ad1535b182093595fff8b7
|
[
"MIT"
] | null | null | null |
server/server/organizations/models.py
|
omert-visiblerisk/connective
|
c6b81700b35e2d8355ad1535b182093595fff8b7
|
[
"MIT"
] | null | null | null |
server/server/organizations/models.py
|
omert-visiblerisk/connective
|
c6b81700b35e2d8355ad1535b182093595fff8b7
|
[
"MIT"
] | null | null | null |
from django.core.validators import RegexValidator
from django.db import models
from django.utils.translation import gettext_lazy as _
from taggit.managers import TaggableManager
from server.schools.models import School
from server.users.models import Consumer, Instructor, User
from server.utils.model_fields import random_slug
class SchoolActivityGroupManager(models.Manager):
def get_activity_container_only_group(self, activity_group):
container_only_groups = self.filter(
activity_order=activity_group.activity_order,
group_type=SchoolActivityGroup.GroupTypes.CONTAINER_ONLY,
)
if container_only_groups.exists():
return container_only_groups[0]
class Organization(models.Model):
slug = models.CharField(max_length=40, default=random_slug, unique=True)
organization_number = models.CharField(max_length=10, unique=True, null=True)
email = models.EmailField()
description = models.CharField(max_length=250)
website_url = models.URLField(null=True, blank=True)
name = models.CharField(max_length=50)
goal = models.CharField(max_length=250, null=True, blank=True)
year_founded = models.CharField(max_length=4, null=True, blank=True)
status = models.CharField(max_length=50, null=True, blank=True)
target_audience = models.JSONField(null=True, blank=True)
number_of_employees = models.PositiveIntegerField(null=True, blank=True)
number_of_members = models.PositiveIntegerField(null=True, blank=True)
number_of_volunteers = models.PositiveIntegerField(null=True, blank=True)
location_lon = models.DecimalField(
max_digits=9,
decimal_places=6,
null=True,
blank=True,
)
location_lat = models.DecimalField(
max_digits=9,
decimal_places=6,
null=True,
blank=True,
)
address_city = models.CharField(max_length=150, null=True, blank=True)
address_street = models.CharField(max_length=150, null=True, blank=True)
address_house_num = models.CharField(max_length=4, null=True, blank=True)
address_zipcode = models.CharField(max_length=9, null=True, blank=True)
cities = models.JSONField(null=True, blank=True)
districts = models.JSONField(null=True, blank=True)
union_type = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return f"{self.name} | {self.slug}"
class Activity(models.Model):
class Domain(models.TextChoices):
SCIENCE_AND_TECH = "SCIENCE_AND_TECH", "Science And Tech"
EXTREME_SPORTS = "EXTREME_SPORTS", "Extreme Sports"
FIELD = "FIELD", "Field"
tags = TaggableManager(blank=True)
slug = models.CharField(max_length=40, default=random_slug, unique=True)
name = models.CharField(max_length=35)
target_audience = models.JSONField()
domain = models.CharField(
max_length=55, null=True, blank=True, choices=Domain.choices
)
originization = models.ForeignKey(
Organization, on_delete=models.SET_NULL, null=True, blank=True
)
activity_website_url = models.URLField(null=True, blank=True)
activity_email = models.EmailField(null=True, blank=True)
description = models.CharField(max_length=550, default="")
contact_name = models.CharField(max_length=60, default="")
logo = models.ImageField(blank=True, null=True)
phone_number = models.CharField(
blank=True,
max_length=15,
validators=[
RegexValidator(
regex=r"^\d{9,15}$",
message=_("phone number must be between 9-15 digits"),
)
],
)
def __str__(self):
try:
return f"{self.name} | {self.slug} | {self.originization.name}"
except AttributeError:
return f"{self.name} | {self.slug}"
class ActivityMedia(models.Model):
slug = models.CharField(max_length=40, default=random_slug, unique=True)
name = models.CharField(max_length=40, null=True, blank=True)
image_url = models.ImageField(blank=True, null=True)
video_url = models.URLField(blank=True, null=True)
activity = models.ForeignKey(
Activity,
on_delete=models.CASCADE,
related_name="rich_media",
)
def __str__(self):
return f"{self.name} | {self.slug} | {self.activity.name}"
class OrganizationMember(models.Model):
user = models.OneToOneField(
User, on_delete=models.CASCADE, related_name="organization_member"
)
organization = models.ForeignKey(
Organization,
on_delete=models.CASCADE,
related_name="organization_member",
)
def __str__(self):
return f"{self.user.email} | {self.organization.name}"
class SchoolActivityOrder(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(fields=["school", "activity"], name="unique_order")
]
class Status(models.TextChoices):
CANCELLED = "CANCELLED", "Cancelled"
PENDING_ADMIN_APPROVAL = "PENDING_ADMIN_APPROVAL", "Pending Admin Approval"
APPROVED = "APPROVED", "Approved"
base_status = Status.PENDING_ADMIN_APPROVAL
slug = models.CharField(max_length=40, default=random_slug, unique=True)
requested_by = models.ForeignKey(
User,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="requested_orders",
)
last_updated_by = models.ForeignKey(
User,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="last_updated_by_me_orders",
)
school = models.ForeignKey(
School, on_delete=models.CASCADE, related_name="school_activity_orders"
)
activity = models.ForeignKey(
Activity, on_delete=models.CASCADE, related_name="school_activity_orders"
)
status = models.CharField(
_("status"), max_length=50, choices=Status.choices, default=base_status
)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return f"{self.activity} | {self.school} | {self.status} | {self.pk}"
class SchoolActivityGroup(models.Model):
class GroupTypes(models.TextChoices):
CONTAINER_ONLY = "CONTAINER_ONLY", "Container Only"
DISABLED_CONSUMERS = "DISABLED_CONSUMERS", "Disabled Consumers"
DEFAULT = "DEFAULT", "Default"
objects = SchoolActivityGroupManager()
slug = models.CharField(max_length=40, default=random_slug, unique=True)
activity_order = models.ForeignKey(
SchoolActivityOrder, on_delete=models.CASCADE, related_name="activity_groups"
)
name = models.CharField(_("name"), max_length=50)
description = models.CharField(_("description"), max_length=550)
consumers = models.ManyToManyField(
Consumer,
related_name="activity_groups",
blank=True,
)
group_type = models.CharField(
_("group type"),
max_length=50,
choices=GroupTypes.choices,
default=GroupTypes.DEFAULT,
)
instructor = models.ForeignKey(
Instructor,
on_delete=models.SET_NULL,
related_name="managed_activity_groups",
null=True,
blank=True,
)
def __str__(self):
return f"""
{self.name} : {self.group_type} : {self.slug} :
{self.activity_order.activity.name} : {self.activity_order.school.name}
"""
| 35.407583
| 87
| 0.683576
|
4a03463fffa2f8fcb56821b93766fc637909c3fa
| 3,365
|
py
|
Python
|
tests/aiohttp/schema.py
|
TheVinhLuong102/Strawberry
|
3c442dc19d17bc55c4e26de1db7a9eedc0a228f5
|
[
"MIT"
] | 2,062
|
2019-04-07T17:47:30.000Z
|
2022-03-31T01:54:16.000Z
|
tests/aiohttp/schema.py
|
TheVinhLuong102/Strawberry
|
3c442dc19d17bc55c4e26de1db7a9eedc0a228f5
|
[
"MIT"
] | 1,582
|
2019-04-07T18:31:33.000Z
|
2022-03-31T18:32:13.000Z
|
tests/aiohttp/schema.py
|
TheVinhLuong102/Strawberry
|
3c442dc19d17bc55c4e26de1db7a9eedc0a228f5
|
[
"MIT"
] | 303
|
2019-04-13T08:44:40.000Z
|
2022-03-29T09:54:41.000Z
|
import asyncio
import typing
from enum import Enum
from graphql import GraphQLError
import strawberry
from strawberry.file_uploads import Upload
from strawberry.subscriptions.protocols.graphql_transport_ws.types import PingMessage
@strawberry.enum
class Flavor(Enum):
VANILLA = "vanilla"
STRAWBERRY = "strawberry"
CHOCOLATE = "chocolate"
@strawberry.input
class FolderInput:
files: typing.List[Upload]
@strawberry.type
class DebugInfo:
num_active_result_handlers: int
is_connection_init_timeout_task_done: typing.Optional[bool]
@strawberry.type
class Query:
hello: str = "strawberry"
@strawberry.type
class Mutation:
@strawberry.mutation
def read_text(self, text_file: Upload) -> str:
return text_file.read().decode()
@strawberry.mutation
def read_files(self, files: typing.List[Upload]) -> typing.List[str]:
contents = []
for file in files:
contents.append(file.read().decode())
return contents
@strawberry.mutation
def read_folder(self, folder: FolderInput) -> typing.List[str]:
contents = []
for file in folder.files:
contents.append(file.read().decode())
return contents
@strawberry.type
class Subscription:
@strawberry.subscription
async def echo(
self, message: str, delay: float = 0
) -> typing.AsyncGenerator[str, None]:
await asyncio.sleep(delay)
yield message
@strawberry.subscription
async def request_ping(self, info) -> typing.AsyncGenerator[bool, None]:
ws = info.context["ws"]
await ws.send_json(PingMessage().as_dict())
yield True
@strawberry.subscription
async def infinity(self, message: str) -> typing.AsyncGenerator[str, None]:
while True:
yield message
await asyncio.sleep(1)
@strawberry.subscription
async def context(self, info) -> typing.AsyncGenerator[str, None]:
yield info.context["custom_value"]
@strawberry.subscription
async def error(self, message: str) -> typing.AsyncGenerator[str, None]:
yield GraphQLError(message) # type: ignore
@strawberry.subscription
async def exception(self, message: str) -> typing.AsyncGenerator[str, None]:
raise ValueError(message)
# Without this yield, the method is not recognised as an async generator
yield "Hi" # noqa
@strawberry.subscription
async def flavors(self) -> typing.AsyncGenerator[Flavor, None]:
yield Flavor.VANILLA
yield Flavor.STRAWBERRY
yield Flavor.CHOCOLATE
@strawberry.subscription
async def debug(self, info) -> typing.AsyncGenerator[DebugInfo, None]:
active_result_handlers = [
task for task in info.context["tasks"].values() if not task.done()
]
connection_init_timeout_task = info.context["connectionInitTimeoutTask"]
is_connection_init_timeout_task_done = (
connection_init_timeout_task.done()
if connection_init_timeout_task
else None
)
yield DebugInfo(
num_active_result_handlers=len(active_result_handlers),
is_connection_init_timeout_task_done=is_connection_init_timeout_task_done,
)
schema = strawberry.Schema(query=Query, mutation=Mutation, subscription=Subscription)
| 28.516949
| 86
| 0.684695
|
4a0346c1dacc5fdacd11a45f262e16ef3e57ce5e
| 31,861
|
py
|
Python
|
kubernetes_asyncio/client/models/v1_volume.py
|
dineshsonachalam/kubernetes_asyncio
|
d57e9e9be11f6789e1ce8d5b161acb64d29acf35
|
[
"Apache-2.0"
] | 1
|
2021-02-25T04:36:18.000Z
|
2021-02-25T04:36:18.000Z
|
kubernetes_asyncio/client/models/v1_volume.py
|
hubo1016/kubernetes_asyncio
|
d57e9e9be11f6789e1ce8d5b161acb64d29acf35
|
[
"Apache-2.0"
] | null | null | null |
kubernetes_asyncio/client/models/v1_volume.py
|
hubo1016/kubernetes_asyncio
|
d57e9e9be11f6789e1ce8d5b161acb64d29acf35
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.12.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class V1Volume(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'aws_elastic_block_store': 'V1AWSElasticBlockStoreVolumeSource',
'azure_disk': 'V1AzureDiskVolumeSource',
'azure_file': 'V1AzureFileVolumeSource',
'cephfs': 'V1CephFSVolumeSource',
'cinder': 'V1CinderVolumeSource',
'config_map': 'V1ConfigMapVolumeSource',
'downward_api': 'V1DownwardAPIVolumeSource',
'empty_dir': 'V1EmptyDirVolumeSource',
'fc': 'V1FCVolumeSource',
'flex_volume': 'V1FlexVolumeSource',
'flocker': 'V1FlockerVolumeSource',
'gce_persistent_disk': 'V1GCEPersistentDiskVolumeSource',
'git_repo': 'V1GitRepoVolumeSource',
'glusterfs': 'V1GlusterfsVolumeSource',
'host_path': 'V1HostPathVolumeSource',
'iscsi': 'V1ISCSIVolumeSource',
'name': 'str',
'nfs': 'V1NFSVolumeSource',
'persistent_volume_claim': 'V1PersistentVolumeClaimVolumeSource',
'photon_persistent_disk': 'V1PhotonPersistentDiskVolumeSource',
'portworx_volume': 'V1PortworxVolumeSource',
'projected': 'V1ProjectedVolumeSource',
'quobyte': 'V1QuobyteVolumeSource',
'rbd': 'V1RBDVolumeSource',
'scale_io': 'V1ScaleIOVolumeSource',
'secret': 'V1SecretVolumeSource',
'storageos': 'V1StorageOSVolumeSource',
'vsphere_volume': 'V1VsphereVirtualDiskVolumeSource'
}
attribute_map = {
'aws_elastic_block_store': 'awsElasticBlockStore',
'azure_disk': 'azureDisk',
'azure_file': 'azureFile',
'cephfs': 'cephfs',
'cinder': 'cinder',
'config_map': 'configMap',
'downward_api': 'downwardAPI',
'empty_dir': 'emptyDir',
'fc': 'fc',
'flex_volume': 'flexVolume',
'flocker': 'flocker',
'gce_persistent_disk': 'gcePersistentDisk',
'git_repo': 'gitRepo',
'glusterfs': 'glusterfs',
'host_path': 'hostPath',
'iscsi': 'iscsi',
'name': 'name',
'nfs': 'nfs',
'persistent_volume_claim': 'persistentVolumeClaim',
'photon_persistent_disk': 'photonPersistentDisk',
'portworx_volume': 'portworxVolume',
'projected': 'projected',
'quobyte': 'quobyte',
'rbd': 'rbd',
'scale_io': 'scaleIO',
'secret': 'secret',
'storageos': 'storageos',
'vsphere_volume': 'vsphereVolume'
}
def __init__(self, aws_elastic_block_store=None, azure_disk=None, azure_file=None, cephfs=None, cinder=None, config_map=None, downward_api=None, empty_dir=None, fc=None, flex_volume=None, flocker=None, gce_persistent_disk=None, git_repo=None, glusterfs=None, host_path=None, iscsi=None, name=None, nfs=None, persistent_volume_claim=None, photon_persistent_disk=None, portworx_volume=None, projected=None, quobyte=None, rbd=None, scale_io=None, secret=None, storageos=None, vsphere_volume=None): # noqa: E501
"""V1Volume - a model defined in Swagger""" # noqa: E501
self._aws_elastic_block_store = None
self._azure_disk = None
self._azure_file = None
self._cephfs = None
self._cinder = None
self._config_map = None
self._downward_api = None
self._empty_dir = None
self._fc = None
self._flex_volume = None
self._flocker = None
self._gce_persistent_disk = None
self._git_repo = None
self._glusterfs = None
self._host_path = None
self._iscsi = None
self._name = None
self._nfs = None
self._persistent_volume_claim = None
self._photon_persistent_disk = None
self._portworx_volume = None
self._projected = None
self._quobyte = None
self._rbd = None
self._scale_io = None
self._secret = None
self._storageos = None
self._vsphere_volume = None
self.discriminator = None
if aws_elastic_block_store is not None:
self.aws_elastic_block_store = aws_elastic_block_store
if azure_disk is not None:
self.azure_disk = azure_disk
if azure_file is not None:
self.azure_file = azure_file
if cephfs is not None:
self.cephfs = cephfs
if cinder is not None:
self.cinder = cinder
if config_map is not None:
self.config_map = config_map
if downward_api is not None:
self.downward_api = downward_api
if empty_dir is not None:
self.empty_dir = empty_dir
if fc is not None:
self.fc = fc
if flex_volume is not None:
self.flex_volume = flex_volume
if flocker is not None:
self.flocker = flocker
if gce_persistent_disk is not None:
self.gce_persistent_disk = gce_persistent_disk
if git_repo is not None:
self.git_repo = git_repo
if glusterfs is not None:
self.glusterfs = glusterfs
if host_path is not None:
self.host_path = host_path
if iscsi is not None:
self.iscsi = iscsi
self.name = name
if nfs is not None:
self.nfs = nfs
if persistent_volume_claim is not None:
self.persistent_volume_claim = persistent_volume_claim
if photon_persistent_disk is not None:
self.photon_persistent_disk = photon_persistent_disk
if portworx_volume is not None:
self.portworx_volume = portworx_volume
if projected is not None:
self.projected = projected
if quobyte is not None:
self.quobyte = quobyte
if rbd is not None:
self.rbd = rbd
if scale_io is not None:
self.scale_io = scale_io
if secret is not None:
self.secret = secret
if storageos is not None:
self.storageos = storageos
if vsphere_volume is not None:
self.vsphere_volume = vsphere_volume
@property
def aws_elastic_block_store(self):
"""Gets the aws_elastic_block_store of this V1Volume. # noqa: E501
AWSElasticBlockStore represents an AWS Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore # noqa: E501
:return: The aws_elastic_block_store of this V1Volume. # noqa: E501
:rtype: V1AWSElasticBlockStoreVolumeSource
"""
return self._aws_elastic_block_store
@aws_elastic_block_store.setter
def aws_elastic_block_store(self, aws_elastic_block_store):
"""Sets the aws_elastic_block_store of this V1Volume.
AWSElasticBlockStore represents an AWS Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore # noqa: E501
:param aws_elastic_block_store: The aws_elastic_block_store of this V1Volume. # noqa: E501
:type: V1AWSElasticBlockStoreVolumeSource
"""
self._aws_elastic_block_store = aws_elastic_block_store
@property
def azure_disk(self):
"""Gets the azure_disk of this V1Volume. # noqa: E501
AzureDisk represents an Azure Data Disk mount on the host and bind mount to the pod. # noqa: E501
:return: The azure_disk of this V1Volume. # noqa: E501
:rtype: V1AzureDiskVolumeSource
"""
return self._azure_disk
@azure_disk.setter
def azure_disk(self, azure_disk):
"""Sets the azure_disk of this V1Volume.
AzureDisk represents an Azure Data Disk mount on the host and bind mount to the pod. # noqa: E501
:param azure_disk: The azure_disk of this V1Volume. # noqa: E501
:type: V1AzureDiskVolumeSource
"""
self._azure_disk = azure_disk
@property
def azure_file(self):
"""Gets the azure_file of this V1Volume. # noqa: E501
AzureFile represents an Azure File Service mount on the host and bind mount to the pod. # noqa: E501
:return: The azure_file of this V1Volume. # noqa: E501
:rtype: V1AzureFileVolumeSource
"""
return self._azure_file
@azure_file.setter
def azure_file(self, azure_file):
"""Sets the azure_file of this V1Volume.
AzureFile represents an Azure File Service mount on the host and bind mount to the pod. # noqa: E501
:param azure_file: The azure_file of this V1Volume. # noqa: E501
:type: V1AzureFileVolumeSource
"""
self._azure_file = azure_file
@property
def cephfs(self):
"""Gets the cephfs of this V1Volume. # noqa: E501
CephFS represents a Ceph FS mount on the host that shares a pod's lifetime # noqa: E501
:return: The cephfs of this V1Volume. # noqa: E501
:rtype: V1CephFSVolumeSource
"""
return self._cephfs
@cephfs.setter
def cephfs(self, cephfs):
"""Sets the cephfs of this V1Volume.
CephFS represents a Ceph FS mount on the host that shares a pod's lifetime # noqa: E501
:param cephfs: The cephfs of this V1Volume. # noqa: E501
:type: V1CephFSVolumeSource
"""
self._cephfs = cephfs
@property
def cinder(self):
"""Gets the cinder of this V1Volume. # noqa: E501
Cinder represents a cinder volume attached and mounted on kubelets host machine More info: https://releases.k8s.io/HEAD/examples/mysql-cinder-pd/README.md # noqa: E501
:return: The cinder of this V1Volume. # noqa: E501
:rtype: V1CinderVolumeSource
"""
return self._cinder
@cinder.setter
def cinder(self, cinder):
"""Sets the cinder of this V1Volume.
Cinder represents a cinder volume attached and mounted on kubelets host machine More info: https://releases.k8s.io/HEAD/examples/mysql-cinder-pd/README.md # noqa: E501
:param cinder: The cinder of this V1Volume. # noqa: E501
:type: V1CinderVolumeSource
"""
self._cinder = cinder
@property
def config_map(self):
"""Gets the config_map of this V1Volume. # noqa: E501
ConfigMap represents a configMap that should populate this volume # noqa: E501
:return: The config_map of this V1Volume. # noqa: E501
:rtype: V1ConfigMapVolumeSource
"""
return self._config_map
@config_map.setter
def config_map(self, config_map):
"""Sets the config_map of this V1Volume.
ConfigMap represents a configMap that should populate this volume # noqa: E501
:param config_map: The config_map of this V1Volume. # noqa: E501
:type: V1ConfigMapVolumeSource
"""
self._config_map = config_map
@property
def downward_api(self):
"""Gets the downward_api of this V1Volume. # noqa: E501
DownwardAPI represents downward API about the pod that should populate this volume # noqa: E501
:return: The downward_api of this V1Volume. # noqa: E501
:rtype: V1DownwardAPIVolumeSource
"""
return self._downward_api
@downward_api.setter
def downward_api(self, downward_api):
"""Sets the downward_api of this V1Volume.
DownwardAPI represents downward API about the pod that should populate this volume # noqa: E501
:param downward_api: The downward_api of this V1Volume. # noqa: E501
:type: V1DownwardAPIVolumeSource
"""
self._downward_api = downward_api
@property
def empty_dir(self):
"""Gets the empty_dir of this V1Volume. # noqa: E501
EmptyDir represents a temporary directory that shares a pod's lifetime. More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir # noqa: E501
:return: The empty_dir of this V1Volume. # noqa: E501
:rtype: V1EmptyDirVolumeSource
"""
return self._empty_dir
@empty_dir.setter
def empty_dir(self, empty_dir):
"""Sets the empty_dir of this V1Volume.
EmptyDir represents a temporary directory that shares a pod's lifetime. More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir # noqa: E501
:param empty_dir: The empty_dir of this V1Volume. # noqa: E501
:type: V1EmptyDirVolumeSource
"""
self._empty_dir = empty_dir
@property
def fc(self):
"""Gets the fc of this V1Volume. # noqa: E501
FC represents a Fibre Channel resource that is attached to a kubelet's host machine and then exposed to the pod. # noqa: E501
:return: The fc of this V1Volume. # noqa: E501
:rtype: V1FCVolumeSource
"""
return self._fc
@fc.setter
def fc(self, fc):
"""Sets the fc of this V1Volume.
FC represents a Fibre Channel resource that is attached to a kubelet's host machine and then exposed to the pod. # noqa: E501
:param fc: The fc of this V1Volume. # noqa: E501
:type: V1FCVolumeSource
"""
self._fc = fc
@property
def flex_volume(self):
"""Gets the flex_volume of this V1Volume. # noqa: E501
FlexVolume represents a generic volume resource that is provisioned/attached using an exec based plugin. # noqa: E501
:return: The flex_volume of this V1Volume. # noqa: E501
:rtype: V1FlexVolumeSource
"""
return self._flex_volume
@flex_volume.setter
def flex_volume(self, flex_volume):
"""Sets the flex_volume of this V1Volume.
FlexVolume represents a generic volume resource that is provisioned/attached using an exec based plugin. # noqa: E501
:param flex_volume: The flex_volume of this V1Volume. # noqa: E501
:type: V1FlexVolumeSource
"""
self._flex_volume = flex_volume
@property
def flocker(self):
"""Gets the flocker of this V1Volume. # noqa: E501
Flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running # noqa: E501
:return: The flocker of this V1Volume. # noqa: E501
:rtype: V1FlockerVolumeSource
"""
return self._flocker
@flocker.setter
def flocker(self, flocker):
"""Sets the flocker of this V1Volume.
Flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running # noqa: E501
:param flocker: The flocker of this V1Volume. # noqa: E501
:type: V1FlockerVolumeSource
"""
self._flocker = flocker
@property
def gce_persistent_disk(self):
"""Gets the gce_persistent_disk of this V1Volume. # noqa: E501
GCEPersistentDisk represents a GCE Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk # noqa: E501
:return: The gce_persistent_disk of this V1Volume. # noqa: E501
:rtype: V1GCEPersistentDiskVolumeSource
"""
return self._gce_persistent_disk
@gce_persistent_disk.setter
def gce_persistent_disk(self, gce_persistent_disk):
"""Sets the gce_persistent_disk of this V1Volume.
GCEPersistentDisk represents a GCE Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk # noqa: E501
:param gce_persistent_disk: The gce_persistent_disk of this V1Volume. # noqa: E501
:type: V1GCEPersistentDiskVolumeSource
"""
self._gce_persistent_disk = gce_persistent_disk
@property
def git_repo(self):
"""Gets the git_repo of this V1Volume. # noqa: E501
GitRepo represents a git repository at a particular revision. DEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an EmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir into the Pod's container. # noqa: E501
:return: The git_repo of this V1Volume. # noqa: E501
:rtype: V1GitRepoVolumeSource
"""
return self._git_repo
@git_repo.setter
def git_repo(self, git_repo):
"""Sets the git_repo of this V1Volume.
GitRepo represents a git repository at a particular revision. DEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an EmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir into the Pod's container. # noqa: E501
:param git_repo: The git_repo of this V1Volume. # noqa: E501
:type: V1GitRepoVolumeSource
"""
self._git_repo = git_repo
@property
def glusterfs(self):
"""Gets the glusterfs of this V1Volume. # noqa: E501
Glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. More info: https://releases.k8s.io/HEAD/examples/volumes/glusterfs/README.md # noqa: E501
:return: The glusterfs of this V1Volume. # noqa: E501
:rtype: V1GlusterfsVolumeSource
"""
return self._glusterfs
@glusterfs.setter
def glusterfs(self, glusterfs):
"""Sets the glusterfs of this V1Volume.
Glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. More info: https://releases.k8s.io/HEAD/examples/volumes/glusterfs/README.md # noqa: E501
:param glusterfs: The glusterfs of this V1Volume. # noqa: E501
:type: V1GlusterfsVolumeSource
"""
self._glusterfs = glusterfs
@property
def host_path(self):
"""Gets the host_path of this V1Volume. # noqa: E501
HostPath represents a pre-existing file or directory on the host machine that is directly exposed to the container. This is generally used for system agents or other privileged things that are allowed to see the host machine. Most containers will NOT need this. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath # noqa: E501
:return: The host_path of this V1Volume. # noqa: E501
:rtype: V1HostPathVolumeSource
"""
return self._host_path
@host_path.setter
def host_path(self, host_path):
"""Sets the host_path of this V1Volume.
HostPath represents a pre-existing file or directory on the host machine that is directly exposed to the container. This is generally used for system agents or other privileged things that are allowed to see the host machine. Most containers will NOT need this. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath # noqa: E501
:param host_path: The host_path of this V1Volume. # noqa: E501
:type: V1HostPathVolumeSource
"""
self._host_path = host_path
@property
def iscsi(self):
"""Gets the iscsi of this V1Volume. # noqa: E501
ISCSI represents an ISCSI Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://releases.k8s.io/HEAD/examples/volumes/iscsi/README.md # noqa: E501
:return: The iscsi of this V1Volume. # noqa: E501
:rtype: V1ISCSIVolumeSource
"""
return self._iscsi
@iscsi.setter
def iscsi(self, iscsi):
"""Sets the iscsi of this V1Volume.
ISCSI represents an ISCSI Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://releases.k8s.io/HEAD/examples/volumes/iscsi/README.md # noqa: E501
:param iscsi: The iscsi of this V1Volume. # noqa: E501
:type: V1ISCSIVolumeSource
"""
self._iscsi = iscsi
@property
def name(self):
"""Gets the name of this V1Volume. # noqa: E501
Volume's name. Must be a DNS_LABEL and unique within the pod. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names # noqa: E501
:return: The name of this V1Volume. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this V1Volume.
Volume's name. Must be a DNS_LABEL and unique within the pod. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names # noqa: E501
:param name: The name of this V1Volume. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def nfs(self):
"""Gets the nfs of this V1Volume. # noqa: E501
NFS represents an NFS mount on the host that shares a pod's lifetime More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs # noqa: E501
:return: The nfs of this V1Volume. # noqa: E501
:rtype: V1NFSVolumeSource
"""
return self._nfs
@nfs.setter
def nfs(self, nfs):
"""Sets the nfs of this V1Volume.
NFS represents an NFS mount on the host that shares a pod's lifetime More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs # noqa: E501
:param nfs: The nfs of this V1Volume. # noqa: E501
:type: V1NFSVolumeSource
"""
self._nfs = nfs
@property
def persistent_volume_claim(self):
"""Gets the persistent_volume_claim of this V1Volume. # noqa: E501
PersistentVolumeClaimVolumeSource represents a reference to a PersistentVolumeClaim in the same namespace. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims # noqa: E501
:return: The persistent_volume_claim of this V1Volume. # noqa: E501
:rtype: V1PersistentVolumeClaimVolumeSource
"""
return self._persistent_volume_claim
@persistent_volume_claim.setter
def persistent_volume_claim(self, persistent_volume_claim):
"""Sets the persistent_volume_claim of this V1Volume.
PersistentVolumeClaimVolumeSource represents a reference to a PersistentVolumeClaim in the same namespace. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims # noqa: E501
:param persistent_volume_claim: The persistent_volume_claim of this V1Volume. # noqa: E501
:type: V1PersistentVolumeClaimVolumeSource
"""
self._persistent_volume_claim = persistent_volume_claim
@property
def photon_persistent_disk(self):
"""Gets the photon_persistent_disk of this V1Volume. # noqa: E501
PhotonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine # noqa: E501
:return: The photon_persistent_disk of this V1Volume. # noqa: E501
:rtype: V1PhotonPersistentDiskVolumeSource
"""
return self._photon_persistent_disk
@photon_persistent_disk.setter
def photon_persistent_disk(self, photon_persistent_disk):
"""Sets the photon_persistent_disk of this V1Volume.
PhotonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine # noqa: E501
:param photon_persistent_disk: The photon_persistent_disk of this V1Volume. # noqa: E501
:type: V1PhotonPersistentDiskVolumeSource
"""
self._photon_persistent_disk = photon_persistent_disk
@property
def portworx_volume(self):
"""Gets the portworx_volume of this V1Volume. # noqa: E501
PortworxVolume represents a portworx volume attached and mounted on kubelets host machine # noqa: E501
:return: The portworx_volume of this V1Volume. # noqa: E501
:rtype: V1PortworxVolumeSource
"""
return self._portworx_volume
@portworx_volume.setter
def portworx_volume(self, portworx_volume):
"""Sets the portworx_volume of this V1Volume.
PortworxVolume represents a portworx volume attached and mounted on kubelets host machine # noqa: E501
:param portworx_volume: The portworx_volume of this V1Volume. # noqa: E501
:type: V1PortworxVolumeSource
"""
self._portworx_volume = portworx_volume
@property
def projected(self):
"""Gets the projected of this V1Volume. # noqa: E501
Items for all in one resources secrets, configmaps, and downward API # noqa: E501
:return: The projected of this V1Volume. # noqa: E501
:rtype: V1ProjectedVolumeSource
"""
return self._projected
@projected.setter
def projected(self, projected):
"""Sets the projected of this V1Volume.
Items for all in one resources secrets, configmaps, and downward API # noqa: E501
:param projected: The projected of this V1Volume. # noqa: E501
:type: V1ProjectedVolumeSource
"""
self._projected = projected
@property
def quobyte(self):
"""Gets the quobyte of this V1Volume. # noqa: E501
Quobyte represents a Quobyte mount on the host that shares a pod's lifetime # noqa: E501
:return: The quobyte of this V1Volume. # noqa: E501
:rtype: V1QuobyteVolumeSource
"""
return self._quobyte
@quobyte.setter
def quobyte(self, quobyte):
"""Sets the quobyte of this V1Volume.
Quobyte represents a Quobyte mount on the host that shares a pod's lifetime # noqa: E501
:param quobyte: The quobyte of this V1Volume. # noqa: E501
:type: V1QuobyteVolumeSource
"""
self._quobyte = quobyte
@property
def rbd(self):
"""Gets the rbd of this V1Volume. # noqa: E501
RBD represents a Rados Block Device mount on the host that shares a pod's lifetime. More info: https://releases.k8s.io/HEAD/examples/volumes/rbd/README.md # noqa: E501
:return: The rbd of this V1Volume. # noqa: E501
:rtype: V1RBDVolumeSource
"""
return self._rbd
@rbd.setter
def rbd(self, rbd):
"""Sets the rbd of this V1Volume.
RBD represents a Rados Block Device mount on the host that shares a pod's lifetime. More info: https://releases.k8s.io/HEAD/examples/volumes/rbd/README.md # noqa: E501
:param rbd: The rbd of this V1Volume. # noqa: E501
:type: V1RBDVolumeSource
"""
self._rbd = rbd
@property
def scale_io(self):
"""Gets the scale_io of this V1Volume. # noqa: E501
ScaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes. # noqa: E501
:return: The scale_io of this V1Volume. # noqa: E501
:rtype: V1ScaleIOVolumeSource
"""
return self._scale_io
@scale_io.setter
def scale_io(self, scale_io):
"""Sets the scale_io of this V1Volume.
ScaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes. # noqa: E501
:param scale_io: The scale_io of this V1Volume. # noqa: E501
:type: V1ScaleIOVolumeSource
"""
self._scale_io = scale_io
@property
def secret(self):
"""Gets the secret of this V1Volume. # noqa: E501
Secret represents a secret that should populate this volume. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret # noqa: E501
:return: The secret of this V1Volume. # noqa: E501
:rtype: V1SecretVolumeSource
"""
return self._secret
@secret.setter
def secret(self, secret):
"""Sets the secret of this V1Volume.
Secret represents a secret that should populate this volume. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret # noqa: E501
:param secret: The secret of this V1Volume. # noqa: E501
:type: V1SecretVolumeSource
"""
self._secret = secret
@property
def storageos(self):
"""Gets the storageos of this V1Volume. # noqa: E501
StorageOS represents a StorageOS volume attached and mounted on Kubernetes nodes. # noqa: E501
:return: The storageos of this V1Volume. # noqa: E501
:rtype: V1StorageOSVolumeSource
"""
return self._storageos
@storageos.setter
def storageos(self, storageos):
"""Sets the storageos of this V1Volume.
StorageOS represents a StorageOS volume attached and mounted on Kubernetes nodes. # noqa: E501
:param storageos: The storageos of this V1Volume. # noqa: E501
:type: V1StorageOSVolumeSource
"""
self._storageos = storageos
@property
def vsphere_volume(self):
"""Gets the vsphere_volume of this V1Volume. # noqa: E501
VsphereVolume represents a vSphere volume attached and mounted on kubelets host machine # noqa: E501
:return: The vsphere_volume of this V1Volume. # noqa: E501
:rtype: V1VsphereVirtualDiskVolumeSource
"""
return self._vsphere_volume
@vsphere_volume.setter
def vsphere_volume(self, vsphere_volume):
"""Sets the vsphere_volume of this V1Volume.
VsphereVolume represents a vSphere volume attached and mounted on kubelets host machine # noqa: E501
:param vsphere_volume: The vsphere_volume of this V1Volume. # noqa: E501
:type: V1VsphereVirtualDiskVolumeSource
"""
self._vsphere_volume = vsphere_volume
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1Volume):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 36.495991
| 512
| 0.654656
|
4a0346f5527dccec476ae29905b0c28f57507abc
| 6,443
|
py
|
Python
|
Python/walk_the_folders.py
|
Apop85/Tools
|
9f8b8a3d229d2acbede5693a74b75b28620b5f20
|
[
"MIT"
] | null | null | null |
Python/walk_the_folders.py
|
Apop85/Tools
|
9f8b8a3d229d2acbede5693a74b75b28620b5f20
|
[
"MIT"
] | null | null | null |
Python/walk_the_folders.py
|
Apop85/Tools
|
9f8b8a3d229d2acbede5693a74b75b28620b5f20
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
####
# File: walk_the_folders.py
# Project: sonstige_uebungen
#-----
# Created Date: Monday 20.04.2020, 12:17
# Author: Apop85
#-----
# Last Modified: Tuesday 21.04.2020, 23:00
#-----
# Copyright (c) 2020 Apop85
# This software is published under the MIT license.
# Check http://www.opensource.org/licenses/MIT for further informations
#-----
# Description: This script analyzes every Folder and Subfolder and lists Folder, Subfolder and Files as well as filesize for each extension.
####
import os, re
def loading():
# Setze globale Variablen
global current_direction
global current_position
# Defniere Aussehen des Ladebalkens
loading_bar = "[ ]"
# Definiere das zu bewegende Objekt
loading_symbol_fw = ">++('>"
loading_symbol_bw = "<')++<"
# Ist die Variabel current_direction noch nicht gesetzt wird ein Error ausgelöst
try:
test = current_direction*5
except:
# Der Error wird abgefangen und die Variablen gesetzt
current_direction = 0
current_position = 1
# Ist die Bewegung vorwärts erhöhe Position um 1
if current_direction == 0:
current_position += 1
symbol = loading_symbol_fw
# Ist die Bewegung rückwärts vermindere Position um 1
else:
current_position -= 1
symbol = loading_symbol_bw
# Prüfe ob der Rand des Ladebalkens erreicht wurde und wechsle Laufrichtung
if current_position >= len(loading_bar)-1:
current_direction = 1
elif current_position == 1:
current_direction = 0
# Gebe Ladebalken aus
print("\r"*(len(loading_bar)+len(symbol)) + loading_bar[0:current_position] + symbol + loading_bar[current_position:], end="")
# Fordere Zielpfad an
while True:
target_dir = input("Bitte Zielpfad angeben: ")
# Prüfe ob Pfad ein Ordner ist und existiert
if os.path.exists(target_dir) and os.path.isdir(target_dir):
break
else:
print("Pfad konnte nicht gefunden werden")
# Fordere Pfad für die Zieldatei an
while True:
target_file = input("Bitte Pfad für Ausgabedatei angeben: ")
# Splitte Pfad auf
rel_path = target_file.split("\\")
del rel_path[-1]
# Füge absoluter Pfad zusammen
rel_path = "\\".join(rel_path)
if os.path.exists(rel_path) and not os.path.isdir(target_file):
break
elif os.path.isdir(target_file):
print(target_file + " ist keine Datei. Beispiel: C:\\output.txt")
else:
print("Pfad " + rel_path + " konnte nicht gefunden werden.")
# Frage ob nach jedem Eintrag eine Pause eingelegt werden soll
pause = "na"
while not pause in ["0","1"]:
pause = input("Nach jedem Ordner eine Pause? (0 = nein, 1 = ja): ")
# Füllelemente
filler = "█"
filler2 = "░"
# Leere Resultattabelle erstellen
result_table = {target_dir : {}}
# Erstelle Walk-Generator
folder_table = os.walk(target_dir)
# Muster zur erkennung der Dateiendung
file_pattern = re.compile(".*\..{2,3}")
# Prüfe alle Einträge
for folder, subfolder, filename in folder_table:
loading()
# Existiert noch kein Eintrag für den aktuellen Ordner dann erstellen
if not folder in result_table.keys():
# Lese übergeordneter Ordner aus
last_folder = folder.split("\\")
del last_folder[-1]
last_folder = "\\".join(last_folder)
# Erstelle Eintrag in Resultattabelle
result_table.setdefault(folder, {})
# Wenn Dateien im Ordner existieren
if filename != []:
# Lege Eintrag unter dem Schlüssel "FILE" an mit den Dateien
result_table[folder].setdefault("FILE", filename)
for file in filename:
# Lese Dateiendung aus
if file_pattern.match(file):
file_extension = (file.split("."))[-1]
else:
file_extension = "None"
# Erstelle Eintrag für Dateiendung mit Bytecounter
result_table[folder].setdefault(file_extension, 0)
try:
# Versuche Dateigrösse auszulesen
file_size = (os.stat(folder + "\\" + file)).st_size
# Füge bytes dem Bytecounter hinzu
result_table[folder][file_extension] += file_size
except:
pass
# Sind Unterordner vorhanden
if subfolder != []:
# Lege EIntrag mit dem Schlüssel "SUB" an mit den Unterordnern
result_table[folder].setdefault("SUB", subfolder)
print()
def print_n_save(content):
print(content)
# öffne Datei
file_writer = open(target_file, "a", encoding="utf-8")
# Schreibe in Datei
file_writer.write(content + "\n")
# Speichere Output-Datei
file_writer.close()
def choose_size_format(byte_amount):
format_table = { "b" : 1, "kb" : 1000, "mb" : 1000000, "gb" : 1000000000}
for key in format_table.keys():
if len(str(int(byte_amount/format_table[key]))) < 4 or key == "gb":
if key != "b":
value = byte_amount/format_table[key]
value = "%.2f" % value
return str(value) + " " + key
else:
return str(int(byte_amount/format_table[key])) + " " + key
# Erstelle Datei
file_writer = open(target_file, "w", encoding="utf-8")
# Speichere Output-Datei
file_writer.close()
# Laufe alle Ordner durch
for key in result_table.keys():
print_n_save(filler*100)
print_n_save("Ordner: " + key)
# Laufe alle Einträge im Ordner durch
for subkey in result_table[key].keys():
# Lautet der Key "SUB"?
if subkey == "SUB":
print_n_save(filler2*100)
print_n_save("Unterordner:")
# Gebe alle unterordner aus
for foldername in result_table[key][subkey]:
print_n_save("--> " + foldername)
print_n_save(filler2*100)
# Lautet der Key "FILE"?
elif subkey == "FILE":
print_n_save(filler2*100)
print_n_save("Dateien:")
# Gebe alle Dateien aus
for filename in result_table[key][subkey]:
print_n_save("--> " + filename)
print_n_save(filler2*100)
else:
# Ist es weder FILE noch SUB, ist es ein Dateityp
print_n_save("Dateityp: " + subkey + " - Totalgrösse: " + choose_size_format(result_table[key][subkey]))
if pause == "1":
input("Enter zum Fortfahren")
| 34.089947
| 140
| 0.629055
|
4a03472beee6237b566ff1b8d0fb564ea4fdef9e
| 9,938
|
py
|
Python
|
train.py
|
lexical-kenobi/Face-Vision-3D_Pose
|
07eee33d09018c99251051a983d3842212177e5a
|
[
"MIT"
] | 3,276
|
2018-06-30T00:51:46.000Z
|
2022-03-31T13:25:50.000Z
|
train.py
|
lexical-kenobi/Face-Vision-3D_Pose
|
07eee33d09018c99251051a983d3842212177e5a
|
[
"MIT"
] | 704
|
2020-09-30T10:44:13.000Z
|
2022-03-30T07:18:28.000Z
|
train.py
|
lexical-kenobi/Face-Vision-3D_Pose
|
07eee33d09018c99251051a983d3842212177e5a
|
[
"MIT"
] | 650
|
2018-07-03T13:44:05.000Z
|
2022-03-23T23:30:42.000Z
|
#!/usr/bin/env python3
# coding: utf-8
import os.path as osp
from pathlib import Path
import numpy as np
import argparse
import time
import logging
import torch
import torch.nn as nn
import torchvision.transforms as transforms
from torch.utils.data import DataLoader
import mobilenet_v1
import torch.backends.cudnn as cudnn
from utils.ddfa import DDFADataset, ToTensorGjz, NormalizeGjz
from utils.ddfa import str2bool, AverageMeter
from utils.io import mkdir
from vdc_loss import VDCLoss
from wpdc_loss import WPDCLoss
# global args (configuration)
args = None
lr = None
arch_choices = ['mobilenet_2', 'mobilenet_1', 'mobilenet_075', 'mobilenet_05', 'mobilenet_025']
def parse_args():
parser = argparse.ArgumentParser(description='3DMM Fitting')
parser.add_argument('-j', '--workers', default=6, type=int)
parser.add_argument('--epochs', default=40, type=int)
parser.add_argument('--start-epoch', default=1, type=int)
parser.add_argument('-b', '--batch-size', default=128, type=int)
parser.add_argument('-vb', '--val-batch-size', default=32, type=int)
parser.add_argument('--base-lr', '--learning-rate', default=0.001, type=float)
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--weight-decay', '--wd', default=5e-4, type=float)
parser.add_argument('--print-freq', '-p', default=20, type=int)
parser.add_argument('--resume', default='', type=str, metavar='PATH')
parser.add_argument('--devices-id', default='0,1', type=str)
parser.add_argument('--filelists-train',
default='', type=str)
parser.add_argument('--filelists-val',
default='', type=str)
parser.add_argument('--root', default='')
parser.add_argument('--snapshot', default='', type=str)
parser.add_argument('--log-file', default='output.log', type=str)
parser.add_argument('--log-mode', default='w', type=str)
parser.add_argument('--size-average', default='true', type=str2bool)
parser.add_argument('--num-classes', default=62, type=int)
parser.add_argument('--arch', default='mobilenet_1', type=str,
choices=arch_choices)
parser.add_argument('--frozen', default='false', type=str2bool)
parser.add_argument('--milestones', default='15,25,30', type=str)
parser.add_argument('--task', default='all', type=str)
parser.add_argument('--test_initial', default='false', type=str2bool)
parser.add_argument('--warmup', default=-1, type=int)
parser.add_argument('--param-fp-train',
default='',
type=str)
parser.add_argument('--param-fp-val',
default='')
parser.add_argument('--opt-style', default='resample', type=str) # resample
parser.add_argument('--resample-num', default=132, type=int)
parser.add_argument('--loss', default='vdc', type=str)
global args
args = parser.parse_args()
# some other operations
args.devices_id = [int(d) for d in args.devices_id.split(',')]
args.milestones = [int(m) for m in args.milestones.split(',')]
snapshot_dir = osp.split(args.snapshot)[0]
mkdir(snapshot_dir)
def print_args(args):
for arg in vars(args):
s = arg + ': ' + str(getattr(args, arg))
logging.info(s)
def adjust_learning_rate(optimizer, epoch, milestones=None):
"""Sets the learning rate: milestone is a list/tuple"""
def to(epoch):
if epoch <= args.warmup:
return 1
elif args.warmup < epoch <= milestones[0]:
return 0
for i in range(1, len(milestones)):
if milestones[i - 1] < epoch <= milestones[i]:
return i
return len(milestones)
n = to(epoch)
global lr
lr = args.base_lr * (0.2 ** n)
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def save_checkpoint(state, filename='checkpoint.pth.tar'):
torch.save(state, filename)
logging.info(f'Save checkpoint to {filename}')
def train(train_loader, model, criterion, optimizer, epoch):
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
model.train()
end = time.time()
# loader is batch style
# for i, (input, target) in enumerate(train_loader):
for i, (input, target) in enumerate(train_loader):
target.requires_grad = False
target = target.cuda(non_blocking=True)
output = model(input)
data_time.update(time.time() - end)
if args.loss.lower() == 'vdc':
loss = criterion(output, target)
elif args.loss.lower() == 'wpdc':
loss = criterion(output, target)
elif args.loss.lower() == 'pdc':
loss = criterion(output, target)
else:
raise Exception(f'Unknown loss {args.loss}')
losses.update(loss.item(), input.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# log
if i % args.print_freq == 0:
logging.info(f'Epoch: [{epoch}][{i}/{len(train_loader)}]\t'
f'LR: {lr:8f}\t'
f'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
# f'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
f'Loss {losses.val:.4f} ({losses.avg:.4f})')
def validate(val_loader, model, criterion, epoch):
model.eval()
end = time.time()
with torch.no_grad():
losses = []
for i, (input, target) in enumerate(val_loader):
# compute output
target.requires_grad = False
target = target.cuda(non_blocking=True)
output = model(input)
loss = criterion(output, target)
losses.append(loss.item())
elapse = time.time() - end
loss = np.mean(losses)
logging.info(f'Val: [{epoch}][{len(val_loader)}]\t'
f'Loss {loss:.4f}\t'
f'Time {elapse:.3f}')
def main():
parse_args() # parse global argsl
# logging setup
logging.basicConfig(
format='[%(asctime)s] [p%(process)s] [%(pathname)s:%(lineno)d] [%(levelname)s] %(message)s',
level=logging.INFO,
handlers=[
logging.FileHandler(args.log_file, mode=args.log_mode),
logging.StreamHandler()
]
)
print_args(args) # print args
# step1: define the model structure
model = getattr(mobilenet_v1, args.arch)(num_classes=args.num_classes)
torch.cuda.set_device(args.devices_id[0]) # fix bug for `ERROR: all tensors must be on devices[0]`
model = nn.DataParallel(model, device_ids=args.devices_id).cuda() # -> GPU
# step2: optimization: loss and optimization method
# criterion = nn.MSELoss(size_average=args.size_average).cuda()
if args.loss.lower() == 'wpdc':
print(args.opt_style)
criterion = WPDCLoss(opt_style=args.opt_style).cuda()
logging.info('Use WPDC Loss')
elif args.loss.lower() == 'vdc':
criterion = VDCLoss(opt_style=args.opt_style).cuda()
logging.info('Use VDC Loss')
elif args.loss.lower() == 'pdc':
criterion = nn.MSELoss(size_average=args.size_average).cuda()
logging.info('Use PDC loss')
else:
raise Exception(f'Unknown Loss {args.loss}')
optimizer = torch.optim.SGD(model.parameters(),
lr=args.base_lr,
momentum=args.momentum,
weight_decay=args.weight_decay,
nesterov=True)
# step 2.1 resume
if args.resume:
if Path(args.resume).is_file():
logging.info(f'=> loading checkpoint {args.resume}')
checkpoint = torch.load(args.resume, map_location=lambda storage, loc: storage)['state_dict']
# checkpoint = torch.load(args.resume)['state_dict']
model.load_state_dict(checkpoint)
else:
logging.info(f'=> no checkpoint found at {args.resume}')
# step3: data
normalize = NormalizeGjz(mean=127.5, std=128) # may need optimization
train_dataset = DDFADataset(
root=args.root,
filelists=args.filelists_train,
param_fp=args.param_fp_train,
transform=transforms.Compose([ToTensorGjz(), normalize])
)
val_dataset = DDFADataset(
root=args.root,
filelists=args.filelists_val,
param_fp=args.param_fp_val,
transform=transforms.Compose([ToTensorGjz(), normalize])
)
train_loader = DataLoader(train_dataset, batch_size=args.batch_size, num_workers=args.workers,
shuffle=True, pin_memory=True, drop_last=True)
val_loader = DataLoader(val_dataset, batch_size=args.val_batch_size, num_workers=args.workers,
shuffle=False, pin_memory=True)
# step4: run
cudnn.benchmark = True
if args.test_initial:
logging.info('Testing from initial')
validate(val_loader, model, criterion, args.start_epoch)
for epoch in range(args.start_epoch, args.epochs + 1):
# adjust learning rate
adjust_learning_rate(optimizer, epoch, args.milestones)
# train for one epoch
train(train_loader, model, criterion, optimizer, epoch)
filename = f'{args.snapshot}_checkpoint_epoch_{epoch}.pth.tar'
save_checkpoint(
{
'epoch': epoch,
'state_dict': model.state_dict(),
# 'optimizer': optimizer.state_dict()
},
filename
)
validate(val_loader, model, criterion, epoch)
if __name__ == '__main__':
main()
| 35.241135
| 105
| 0.613604
|
4a03492146f23a55ab09937fdb1fac06d3deac2c
| 1,627
|
py
|
Python
|
core/data/collates/contrib/__init__.py
|
cjy97/LibFewShot
|
cffd0f6d9cb9a13cb4eaf0fb69c13f317508591f
|
[
"MIT"
] | 471
|
2021-09-13T11:28:34.000Z
|
2022-03-30T07:26:54.000Z
|
core/data/collates/contrib/__init__.py
|
cjy97/LibFewShot
|
cffd0f6d9cb9a13cb4eaf0fb69c13f317508591f
|
[
"MIT"
] | 24
|
2021-09-22T02:34:05.000Z
|
2022-02-19T07:26:39.000Z
|
core/data/collates/contrib/__init__.py
|
cjy97/LibFewShot
|
cffd0f6d9cb9a13cb4eaf0fb69c13f317508591f
|
[
"MIT"
] | 82
|
2021-09-16T12:48:01.000Z
|
2022-03-28T06:57:47.000Z
|
# -*- coding: utf-8 -*-
from .autoaugment import ImageNetPolicy
from .cutout import Cutout
from .randaugment import RandAugment
from torchvision import transforms
CJ_DICT = {"brightness": 0.4, "contrast": 0.4, "saturation": 0.4}
def get_augment_method(
config,
):
"""Return the corresponding augmentation method according to the setting.
+ Use `ColorJitter` and `RandomHorizontalFlip` when not setting `augment_method` or using `NormalAug`.
+ Use `ImageNetPolicy()`when using `AutoAugment`.
+ Use `Cutout()`when using `Cutout`.
+ Use `RandAugment()`when using `RandAugment`.
+ Use `CenterCrop` and `RandomHorizontalFlip` when using `AutoAugment`.
+ Users can add their own augment method in this function.
Args:
config (dict): A LFS setting dict
Returns:
list: A list of specific transforms.
"""
if "augment_method" not in config or config["augment_method"] == "NormalAug":
trfms = [
transforms.ColorJitter(**CJ_DICT),
transforms.RandomHorizontalFlip(),
]
elif config["augment_method"] == "AutoAugment":
trfms = [ImageNetPolicy()]
elif config["augment_method"] == "Cutout":
trfms = [Cutout()]
elif config["augment_method"] == "RandAugment":
trfms = [RandAugment()]
elif (
config["augment_method"] == "MTLAugment"
): # refer to https://github.com/yaoyao-liu/meta-transfer-learning/blob/fe189c96797446b54a0ae1c908f8d92a6d3cb831/pytorch/dataloader/dataset_loader.py#L60
trfms = [transforms.CenterCrop(80), transforms.RandomHorizontalFlip()]
return trfms
| 36.155556
| 158
| 0.676706
|
4a034b9106ec2f00f4c78c5b4a3c286ed87c9dd4
| 9,060
|
py
|
Python
|
docs/conf.py
|
ghofranehr/foobar
|
0c0baaea8c161d62584298a63f74fb40d867342b
|
[
"BSD-3-Clause"
] | null | null | null |
docs/conf.py
|
ghofranehr/foobar
|
0c0baaea8c161d62584298a63f74fb40d867342b
|
[
"BSD-3-Clause"
] | null | null | null |
docs/conf.py
|
ghofranehr/foobar
|
0c0baaea8c161d62584298a63f74fb40d867342b
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# foobar documentation build configuration file, created by
# sphinx-quickstart on Fri Jan 16 15:13:53 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../foobar'))
sys.path.insert(1, os.path.abspath('/home/env/lib/python2.7/site-packages'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'foobar'
copyright = u'2015, ghofrane'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'foobardoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'foobar.tex', u'foobar Documentation',
u'ghofrane', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'foobar', u'foobar Documentation',
[u'ghofrane'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'foobar', u'foobar Documentation',
u'ghofrane', 'foobar', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| 32.12766
| 79
| 0.715784
|
4a034bb146e6efb727ee9b741e3d6ccee5a800da
| 2,006
|
py
|
Python
|
create_dataframe.py
|
MW55/amplicon_snakemake_pipeline
|
feb30960dca294490bcdff666af885a4b768a429
|
[
"MIT"
] | 27
|
2019-10-10T09:21:23.000Z
|
2021-12-28T19:10:28.000Z
|
create_dataframe.py
|
MW55/amplicon_snakemake_pipeline
|
feb30960dca294490bcdff666af885a4b768a429
|
[
"MIT"
] | 15
|
2019-11-11T11:47:15.000Z
|
2021-11-16T13:34:19.000Z
|
create_dataframe.py
|
MW55/amplicon_snakemake_pipeline
|
feb30960dca294490bcdff666af885a4b768a429
|
[
"MIT"
] | 7
|
2020-02-16T17:38:55.000Z
|
2022-02-08T23:44:48.000Z
|
import yaml
import pandas as pd
import numpy as np
from glob import glob
import sys
# Create the datatable containing the samples, units and paths of all
# fastq files formatted correctly. This is vital for the snakemake
# pipeline, without it, the wildcards can't be created.
with open(sys.argv[1]) as f_:
config = yaml.load(f_, Loader=yaml.FullLoader)
def create_dataframe(fl, fpl, config, slice):
if config['merge']['paired_End'] and not config['general']['already_assembled']:
df = pd.DataFrame(columns=['sample', 'unit', 'fq1', 'fq2'],
index =range(int(len(fl)/2)), dtype=str)
i, j = (0, 0)
while i < len(fl)/2:
df.loc[i]['sample'] = fl[j].split('_')[0]
df.loc[i]['unit'] = fl[j].split('_')[1]
df.loc[i]['fq1'] = fpl[j][:slice]
df.loc[i]['fq2'] = fpl[j+1][:slice]
j += 2
i += 1
else:
df = pd.DataFrame(columns=['sample', 'unit', 'fq1', 'fq2'],
index = range(int(len(fl))), dtype=str)
i = 0
while i < len(fl):
df.loc[i]['sample'] = fl[i].split('_')[0]
df.loc[i]['unit'] = fl[i].split('_')[1]
df.loc[i]['fq1'] = fpl[i][:slice]
df.loc[i]['fq2'] = np.nan
i += 1
return df
if __name__ == '__main__':
if not config['general']['already_assembled']:
file_path_list = ['demultiplexed/' + name.split('/')[-1] for name in
sorted(glob(config['general']['filename'] + '/*.gz'))]
file_list = sorted([file_.split('/')[-1] for file_
in file_path_list])
slice = -3 # Remove the .gz extension from the file paths.
else:
file_path_list = sorted(glob('results/assembly/*/*.fastq'))
file_list = sorted([file_.split('/')[-1] for file_
in file_path_list])
slice = None
df = create_dataframe(file_list, file_path_list, config, slice)
df.to_csv('units.tsv', sep='\t')
| 37.148148
| 84
| 0.547358
|
4a034bbe9e282d968346f3bc09415a5dc008430c
| 6,956
|
py
|
Python
|
train.py
|
kerengaiger/mnist_autoencoder
|
19d7c347897a7f1ced684a04146b052940884e5f
|
[
"MIT"
] | null | null | null |
train.py
|
kerengaiger/mnist_autoencoder
|
19d7c347897a7f1ced684a04146b052940884e5f
|
[
"MIT"
] | null | null | null |
train.py
|
kerengaiger/mnist_autoencoder
|
19d7c347897a7f1ced684a04146b052940884e5f
|
[
"MIT"
] | null | null | null |
import argparse
import os
import pathlib
import matplotlib.pyplot as plt
import numpy as np
import torch
import torch.nn as nn
import torchvision.transforms as transforms
from torch.utils.data.sampler import SubsetRandomSampler
from torchvision import datasets
from tqdm import tqdm
from model import DeNoiser
from utils import add_noise, plot_imgs
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--lr', type=float, default=0.001, help="learning rate")
parser.add_argument('--alpha', type=float, default=0.5, help="fraction of original image size to use as latent dim")
parser.add_argument('--batch_size', type=int, default=20, help="batch size")
parser.add_argument('--epochs', type=int, default=10, help="number of epochs to run on training")
parser.add_argument('--noise_var', type=float, default=0.5, help="variance of gausian noise")
parser.add_argument('--valid_split', type=float, default=0.2, help="part of dataset to use as validation set")
parser.add_argument('--loss', type=str, default='mse', help="loss function to use for training: BCE or MSE")
parser.add_argument('--plot_imgs', action='store_true', help="plots the first epoch images in each epoch")
parser.add_argument('--plot_kernels', action='store_true', help="plots the conv1 kernels in each epoch")
parser.add_argument('--save_dir', type=str, default='./figures/', help="directory to store figures in case "
"plot_imgs is configured")
parser.add_argument('--model_file', type=str, default='mnist_autoencoder.pt', help="trained model path")
return parser.parse_args()
def split_train_valid(dataset, batch_size, valid_split, shuffle_dataset=True, random_seed= 42):
dataset_size = len(dataset)
indices = list(range(dataset_size))
split = int(np.floor(valid_split * dataset_size))
if shuffle_dataset:
np.random.seed(random_seed)
np.random.shuffle(indices)
train_indices, val_indices = indices[split:], indices[:split]
# Creating PT data samplers and loaders:
train_sampler = SubsetRandomSampler(train_indices)
valid_sampler = SubsetRandomSampler(val_indices)
train_loader = torch.utils.data.DataLoader(dataset, batch_size=batch_size,
sampler=train_sampler)
validation_loader = torch.utils.data.DataLoader(dataset, batch_size=batch_size,
sampler=valid_sampler)
return train_loader, validation_loader
def induce_latent_dim(h, w, alpha):
# TODO - remove print
print('latent_dim', int(np.floor(h * w * alpha)))
return int(np.floor(h * w * alpha))
def plot_batch(noisy_imgs, outputs, save_dir, fig_name):
plot_imgs(noisy_imgs, save_dir, f'{fig_name}_noisy')
plot_imgs(outputs, save_dir, f'{fig_name}_clean')
def plot_kernel_map(model, input, e, save_dir):
if not os.path.exists(save_dir):
os.makedirs(save_dir)
input = torch.unsqueeze(input, 0)
output = model(input)
kernels = model.conv1.weight.detach()
fig, axarr = plt.subplots(4, 8)
i = 0
for row in range(4):
for ax in range(8):
axarr[row][ax].imshow(kernels[i].squeeze(), cmap='gray')
axarr[row][ax].get_xaxis().set_visible(False)
axarr[row][ax].get_yaxis().set_visible(False)
i += 1
fig.savefig(pathlib.Path(save_dir, f'kernal_conv1_epoch_{e}.png'))
def run_epoch(model, optimizer, criterion, train_loader, cnfg, e, plot_imgs):
train_loss = 0.0
pbar = tqdm(train_loader)
for data in pbar:
images, _ = data
noisy_imgs = add_noise(images, cnfg.noise_var)
optimizer.zero_grad()
outputs = model(noisy_imgs)
loss = criterion(outputs, images)
loss.backward()
optimizer.step()
train_loss += loss.item() * images.size(0)
train_loss = train_loss / len(train_loader)
if cnfg.plot_imgs:
noisy_imgs_plot = add_noise(plot_imgs, cnfg.noise_var)
outputs_plot = model(noisy_imgs_plot)
plot_batch(noisy_imgs_plot, outputs_plot, cnfg.save_dir, f'epoch_{e}')
if cnfg.plot_kernels:
plot_kernel_map(model, plot_imgs[0], e, cnfg.save_dir)
return train_loss
def validate(model, eval_loader, cnfg):
criterion = nn.MSELoss()
eval_loss = 0.0
with torch.no_grad():
model.eval()
pbar = tqdm(eval_loader)
for data in pbar:
images, _ = data
noisy_imgs = add_noise(images, cnfg.noise_var)
outputs = model(noisy_imgs)
loss = criterion(outputs, images)
eval_loss += loss.item() * images.size(0)
eval_loss = eval_loss / len(eval_loader)
return eval_loss
def plot_epochs_loss(train_losses, valid_losses):
fig, ax = plt.subplots(constrained_layout=True)
ax.plot(range(len(train_losses)), train_losses, label="train_loss")
ax.plot(range(len(valid_losses)), valid_losses, label="valid_loss")
ax.set_xlabel('epochs')
ax.set_ylabel(r'MSE loss')
plt.title('Train / Valid Loss per epoch')
plt.legend()
fig.savefig(f'plot_epochs.png')
def train(cnfg):
train_data = datasets.MNIST(root='data', train=True, download=True, transform=transforms.ToTensor())
train_loader, valid_loader = split_train_valid(train_data, cnfg.batch_size, valid_split=cnfg.valid_split,
shuffle_dataset=True, random_seed=42)
orig_h, orig_w = next(iter(train_loader))[0].shape[2], next(iter(train_loader))[0].shape[3]
model = DeNoiser(induce_latent_dim(orig_h, orig_w, cnfg.alpha))
if cnfg.loss == 'mse':
criterion = nn.MSELoss()
else:
criterion = nn.BCELoss()
optimizer = torch.optim.Adam(model.parameters(), cnfg.lr)
batch_imgs_plot = next(iter(train_loader))[0]
train_losses, valid_losses = list(), list()
for e in range(1, cnfg.epochs + 1):
train_loss = run_epoch(model, optimizer, criterion, train_loader, cnfg, e, batch_imgs_plot)
train_losses.append(train_loss)
print('Epoch: {}'.format(e),
'\tTraining Loss: {:.4f}'.format(train_loss))
valid_loss = validate(model, valid_loader, cnfg)
print('Epoch: {}'.format(e),
'\tValidation Loss: {:.4f}'.format(valid_loss))
valid_losses.append(valid_loss)
plot_epochs_loss(train_losses, valid_losses)
torch.save(model, cnfg.model_file)
return model
def main():
args = parse_args()
model = train(args)
test_data = datasets.MNIST(root='data', train=False, download=True, transform=transforms.ToTensor())
test_loader = torch.utils.data.DataLoader(test_data, batch_size=args.batch_size)
test_loss = validate(model, test_loader, args)
print(f'Test reconstruction Loss: {test_loss}')
if __name__ == '__main__':
main()
| 38.010929
| 120
| 0.670357
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.