code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from django.test import TestCase
from billing.utils.credit_card import CreditCard
from billing import get_gateway, GatewayNotConfigured, get_integration, IntegrationNotConfigured
from django.conf import settings
from django.template import Template, Context, TemplateSyntaxError
class MerchantTestCase(TestCase):
def testCorrectClassLoading(self):
gateway = get_gateway("authorize_net")
self.assertEquals(gateway.display_name, "Authorize.Net")
def testSettingAttributes(self):
self.assertTrue(getattr(settings, "MERCHANT_SETTINGS", None) != None)
self.assertTrue(isinstance(settings.MERCHANT_SETTINGS, dict))
def testRaiseExceptionNotConfigured(self):
original_settings = settings.MERCHANT_SETTINGS
settings.MERCHANT_SETTINGS = {
"google_checkout": {
"MERCHANT_ID": '',
"MERCHANT_KEY": ''
}
}
# Test if we can import any other gateway or integration
self.assertRaises(IntegrationNotConfigured, lambda: get_integration("stripe"))
self.assertRaises(GatewayNotConfigured, lambda: get_gateway("authorize_net"))
settings.MERCHANT_SETTINGS = original_settings
def testTemplateTagLoad(self):
original_settings = settings.MERCHANT_SETTINGS
settings.MERCHANT_SETTINGS = {
"google_checkout": {
"MERCHANT_ID": '',
"MERCHANT_KEY": ''
}
}
# Raises TemplateSyntaxError: Invalid Block Tag
self.assertRaises(TemplateSyntaxError, lambda: Template("{% load render_integration from billing_tags %}{% stripe obj %}"))
tmpl = Template("{% load render_integration from billing_tags %}{% render_integration obj %}")
gc = get_integration("google_checkout")
fields = {"items": [{
"name": "name of the item",
"description": "Item description",
"amount": 1,
"id": "999AXZ",
"currency": "USD",
"quantity": 1,
}],
"return_url": "http://127.0.0.1:8000/offsite/google-checkout/",
}
gc.add_fields(fields)
self.assertTrue(len(tmpl.render(Context({"obj": gc}))) > 0)
settings.MERCHANT_SETTINGS = original_settings
| [
"billing.get_gateway",
"django.template.Template",
"billing.get_integration",
"django.template.Context"
] | [((372, 400), 'billing.get_gateway', 'get_gateway', (['"""authorize_net"""'], {}), "('authorize_net')\n", (383, 400), False, 'from billing import get_gateway, GatewayNotConfigured, get_integration, IntegrationNotConfigured\n'), ((1693, 1790), 'django.template.Template', 'Template', (['"""{% load render_integration from billing_tags %}{% render_integration obj %}"""'], {}), "(\n '{% load render_integration from billing_tags %}{% render_integration obj %}'\n )\n", (1701, 1790), False, 'from django.template import Template, Context, TemplateSyntaxError\n'), ((1794, 1828), 'billing.get_integration', 'get_integration', (['"""google_checkout"""'], {}), "('google_checkout')\n", (1809, 1828), False, 'from billing import get_gateway, GatewayNotConfigured, get_integration, IntegrationNotConfigured\n'), ((1055, 1080), 'billing.get_integration', 'get_integration', (['"""stripe"""'], {}), "('stripe')\n", (1070, 1080), False, 'from billing import get_gateway, GatewayNotConfigured, get_integration, IntegrationNotConfigured\n'), ((1138, 1166), 'billing.get_gateway', 'get_gateway', (['"""authorize_net"""'], {}), "('authorize_net')\n", (1149, 1166), False, 'from billing import get_gateway, GatewayNotConfigured, get_integration, IntegrationNotConfigured\n'), ((1600, 1675), 'django.template.Template', 'Template', (['"""{% load render_integration from billing_tags %}{% stripe obj %}"""'], {}), "('{% load render_integration from billing_tags %}{% stripe obj %}')\n", (1608, 1675), False, 'from django.template import Template, Context, TemplateSyntaxError\n'), ((2301, 2321), 'django.template.Context', 'Context', (["{'obj': gc}"], {}), "({'obj': gc})\n", (2308, 2321), False, 'from django.template import Template, Context, TemplateSyntaxError\n')] |
#!/usr/bin/env python
# -*- encode: utf-8 -*-
#Copyright 2015 RAPP
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
# Authors: <NAME>
# contact: <EMAIL>, g<EMAIL>}
import rospy
import sys
import time
import hashlib
import threading
import os.path
import rospkg
from speech_recognition_sphinx4 import *
from rapp_utilities import RappUtilities
from rapp_platform_ros_communications.srv import (
SpeechRecognitionSphinx4TotalSrv,
SpeechRecognitionSphinx4TotalSrvResponse
)
## @class SpeechRecognitionSphinx4HandlerNode
# @brief Maintains Sphinx instances to perform speech recognition
#
# Maintains a number of child processes to perform speech recognition utilizing
# Sphinx4
# (rapp_speech_detection_sphinx4.speech_recognition_sphinx4.SpeechRecognitionSphinx4).
# Provides ros services and handles the requests according to the child
# processes' status.
class SpeechRecognitionSphinx4HandlerNode():
## @brief Initializes the subprocesses and the services (constructor)
def __init__(self):
## The number of child subprocesses.
self._threads = \
rospy.get_param("rapp_speech_detection_sphinx4_threads")
if rospy.get_param("rapp_speech_detection_sphinx4_preconfigure"):
configurations = self._getPreconfigurationNames()
## The subprocesses structure that contains information used for the
# subprocess handling
self._availableProcesses = [{
'sphinx': SpeechRecognitionSphinx4(configurations[i]), \
'running': False, \
'configuration_hash': 0\
} for i in range(self._threads)]
for proc in self._availableProcesses:
proc['configuration_hash'] = proc['sphinx'].getConfigurationHash()
else:
self._availableProcesses = [{
'sphinx': SpeechRecognitionSphinx4(), \
'running': False, \
'configuration_hash': 0\
} for i in range(self._threads)]
## Thread conditional variable used for the subprocess scheduling
self._lock = threading.Condition()
## Total service callback threads waiting to execute
self._threadCounter = 0
serv_batch_topic = \
rospy.get_param("rapp_speech_detection_sphinx4_total_topic")
if(not serv_batch_topic):
rospy.logerror("Sphinx4 Speech detection batch topic param not found")
## Ros service server for sphinx speech recognition
self._speech_recognition_batch_service = rospy.Service( \
serv_batch_topic, SpeechRecognitionSphinx4TotalSrv, \
self.handleSpeechRecognitionCallback)
## @brief Specifies the requested preconfiguration names
#
# Reads and creates a matrix with the configuration name requested from
# rapp_speech_detection_sphinx4::cfg::sphinx4_wrapper_params.yaml
#
# @return preconf [ list<string> ] The preconfiguration names for all subprocesses
def _getPreconfigurationNames(self):
preconf = []
RappUtilities.rapp_print( 'Fetcing preconfiguration names' )
# Get number of requested preconfigurations
confNumber = \
rospy.get_param("rapp_speech_detection_sphinx4_preconfigure_number")
# Too many configurations
if confNumber > self._threads:
RappUtilities.rapp_print("Sphinx preconfigurations requested exceed " + \
"Sphinx processes. Truncating", 'WARN')
confNumber = self._threads
# Check actual unique configurations provided
if rospy.has_param("rapp_speech_detection_sphinx4_preconfiguration"):
confDict = \
rospy.get_param("rapp_speech_detection_sphinx4_preconfiguration")
uniqueConfigurations = len( confDict )
if uniqueConfigurations > confNumber:
uniqueConfigurations = confNumber
else:
RappUtilities.rapp_print("Preconfigurations requested, but none was " + \
"provided", 'ERROR')
for it in range(self._threads):
preconf.append(None)
return preconf
for confIter in range(confNumber):
preconf.append(confDict[ str(confIter % uniqueConfigurations) ])
for it in range(self._threads - confNumber):
preconf.append(None)
RappUtilities.rapp_print(str(preconf), 'DEBUG')
return preconf
## @brief The callback to perform speech recognition
#
# @param req [rapp_platform_ros_communications::SpeechDetectionSphinx4Wrapper::SpeechRecognitionSphinx4TotalSrvRequest] The service request
# @return res [rapp_platform_ros_communications::SpeechDetectionSphinx4Wrapper::SpeechRecognitionSphinx4TotalSrvResponse] The service response
def handleSpeechRecognitionCallback(self, req):
RappUtilities.rapp_print("Received service request", 'DEBUG')
res = SpeechRecognitionSphinx4TotalSrvResponse()
request_hash = self._calculateRequestHash( req )
self._lock.acquire()
self._threadCounter += 1
if self._threadCounter > self._threads:
self._lock.wait()
# Search for available Sphinx with similar configuration
for proc in self._availableProcesses:
if proc['running'] == False and \
proc['configuration_hash'] == request_hash:
RappUtilities.rapp_print("Found Sphinx process with same configuration",\
'DEBUG')
proc['running'] = True
self._lock.release()
res = proc['sphinx'].speechRecognitionBatch( req )
self._lock.acquire()
proc['running'] = False
self._threadCounter -= 1
if self._threadCounter >= self._threads:
self._lock.notify()
self._lock.release()
return res
# Search for available Sphinx
for proc in self._availableProcesses:
if proc['running'] == False:
proc['configuration_hash'] = request_hash
proc['running'] = True
RappUtilities.rapp_print("Found Sphinx process", 'DEBUG')
self._lock.release()
res = proc['sphinx'].speechRecognitionBatch( req )
self._lock.acquire()
proc['running'] = False
self._threadCounter -= 1
if self._threadCounter >= self._threads:
self._lock.notify()
self._lock.release()
return res
## @brief Calculates the service request sha1 hash for process handling purposes
#
# Hash is used to identify common request configurations for proper subprocess selection.
# (Requests with common requests do not require reconfiguration reducing computation time)
#
# @param req [rapp_platform_ros_communications::SpeechDetectionSphinx4Wrapper::SpeechRecognitionSphinx4TotalSrvRequest] The service request
#
# @return hexdigest [string] The hash digest containing only hexadecimal digits
def _calculateRequestHash(self, req):
hash_object = hashlib.sha1()
hash_object.update( req.language )
for word in req.words:
hash_object.update( word )
for gram in req.grammar:
hash_object.update( gram )
for sent in req.sentences:
hash_object.update( sent )
return hash_object.hexdigest()
if __name__ == "__main__":
rospy.init_node('SpeechRecognitionSphinx4')
rospack = rospkg.RosPack()
sphinx_class = rospack.get_path('rapp_speech_detection_sphinx4') + \
"/src/Sphinx4.class"
if not os.path.isfile(sphinx_class):
rospy.logerr("speech_recognition_sphinx4_handler_node: Sphinx.class file is missing. You can execute 'buildJava.sh'")
else:
SpeechRecognitionSphinx4HandlerNode = SpeechRecognitionSphinx4HandlerNode()
RappUtilities.rapp_print("Sphinx4 Handler node initialized", 'DEBUG')
rospy.spin()
| [
"rospy.logerr",
"rapp_utilities.RappUtilities.rapp_print",
"rospy.logerror",
"rospy.init_node",
"rospy.get_param",
"rospy.Service",
"rospy.has_param",
"rospkg.RosPack",
"rapp_platform_ros_communications.srv.SpeechRecognitionSphinx4TotalSrvResponse",
"rospy.spin",
"threading.Condition",
"hashli... | [((7395, 7438), 'rospy.init_node', 'rospy.init_node', (['"""SpeechRecognitionSphinx4"""'], {}), "('SpeechRecognitionSphinx4')\n", (7410, 7438), False, 'import rospy\n'), ((7454, 7470), 'rospkg.RosPack', 'rospkg.RosPack', ([], {}), '()\n', (7468, 7470), False, 'import rospkg\n'), ((1566, 1622), 'rospy.get_param', 'rospy.get_param', (['"""rapp_speech_detection_sphinx4_threads"""'], {}), "('rapp_speech_detection_sphinx4_threads')\n", (1581, 1622), False, 'import rospy\n'), ((1631, 1692), 'rospy.get_param', 'rospy.get_param', (['"""rapp_speech_detection_sphinx4_preconfigure"""'], {}), "('rapp_speech_detection_sphinx4_preconfigure')\n", (1646, 1692), False, 'import rospy\n'), ((2459, 2480), 'threading.Condition', 'threading.Condition', ([], {}), '()\n', (2478, 2480), False, 'import threading\n'), ((2600, 2660), 'rospy.get_param', 'rospy.get_param', (['"""rapp_speech_detection_sphinx4_total_topic"""'], {}), "('rapp_speech_detection_sphinx4_total_topic')\n", (2615, 2660), False, 'import rospy\n'), ((2870, 2978), 'rospy.Service', 'rospy.Service', (['serv_batch_topic', 'SpeechRecognitionSphinx4TotalSrv', 'self.handleSpeechRecognitionCallback'], {}), '(serv_batch_topic, SpeechRecognitionSphinx4TotalSrv, self.\n handleSpeechRecognitionCallback)\n', (2883, 2978), False, 'import rospy\n'), ((3352, 3410), 'rapp_utilities.RappUtilities.rapp_print', 'RappUtilities.rapp_print', (['"""Fetcing preconfiguration names"""'], {}), "('Fetcing preconfiguration names')\n", (3376, 3410), False, 'from rapp_utilities import RappUtilities\n'), ((3487, 3555), 'rospy.get_param', 'rospy.get_param', (['"""rapp_speech_detection_sphinx4_preconfigure_number"""'], {}), "('rapp_speech_detection_sphinx4_preconfigure_number')\n", (3502, 3555), False, 'import rospy\n'), ((3843, 3908), 'rospy.has_param', 'rospy.has_param', (['"""rapp_speech_detection_sphinx4_preconfiguration"""'], {}), "('rapp_speech_detection_sphinx4_preconfiguration')\n", (3858, 3908), False, 'import rospy\n'), ((5019, 5080), 'rapp_utilities.RappUtilities.rapp_print', 'RappUtilities.rapp_print', (['"""Received service request"""', '"""DEBUG"""'], {}), "('Received service request', 'DEBUG')\n", (5043, 5080), False, 'from rapp_utilities import RappUtilities\n'), ((5091, 5133), 'rapp_platform_ros_communications.srv.SpeechRecognitionSphinx4TotalSrvResponse', 'SpeechRecognitionSphinx4TotalSrvResponse', ([], {}), '()\n', (5131, 5133), False, 'from rapp_platform_ros_communications.srv import SpeechRecognitionSphinx4TotalSrv, SpeechRecognitionSphinx4TotalSrvResponse\n'), ((7089, 7103), 'hashlib.sha1', 'hashlib.sha1', ([], {}), '()\n', (7101, 7103), False, 'import hashlib\n'), ((7619, 7746), 'rospy.logerr', 'rospy.logerr', (['"""speech_recognition_sphinx4_handler_node: Sphinx.class file is missing. You can execute \'buildJava.sh\'"""'], {}), '(\n "speech_recognition_sphinx4_handler_node: Sphinx.class file is missing. You can execute \'buildJava.sh\'"\n )\n', (7631, 7746), False, 'import rospy\n'), ((7829, 7898), 'rapp_utilities.RappUtilities.rapp_print', 'RappUtilities.rapp_print', (['"""Sphinx4 Handler node initialized"""', '"""DEBUG"""'], {}), "('Sphinx4 Handler node initialized', 'DEBUG')\n", (7853, 7898), False, 'from rapp_utilities import RappUtilities\n'), ((7903, 7915), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (7913, 7915), False, 'import rospy\n'), ((2697, 2767), 'rospy.logerror', 'rospy.logerror', (['"""Sphinx4 Speech detection batch topic param not found"""'], {}), "('Sphinx4 Speech detection batch topic param not found')\n", (2711, 2767), False, 'import rospy\n'), ((3628, 3743), 'rapp_utilities.RappUtilities.rapp_print', 'RappUtilities.rapp_print', (["('Sphinx preconfigurations requested exceed ' + 'Sphinx processes. Truncating')", '"""WARN"""'], {}), "('Sphinx preconfigurations requested exceed ' +\n 'Sphinx processes. Truncating', 'WARN')\n", (3652, 3743), False, 'from rapp_utilities import RappUtilities\n'), ((3943, 4008), 'rospy.get_param', 'rospy.get_param', (['"""rapp_speech_detection_sphinx4_preconfiguration"""'], {}), "('rapp_speech_detection_sphinx4_preconfiguration')\n", (3958, 4008), False, 'import rospy\n'), ((4162, 4258), 'rapp_utilities.RappUtilities.rapp_print', 'RappUtilities.rapp_print', (["('Preconfigurations requested, but none was ' + 'provided')", '"""ERROR"""'], {}), "('Preconfigurations requested, but none was ' +\n 'provided', 'ERROR')\n", (4186, 4258), False, 'from rapp_utilities import RappUtilities\n'), ((5517, 5602), 'rapp_utilities.RappUtilities.rapp_print', 'RappUtilities.rapp_print', (['"""Found Sphinx process with same configuration"""', '"""DEBUG"""'], {}), "('Found Sphinx process with same configuration',\n 'DEBUG')\n", (5541, 5602), False, 'from rapp_utilities import RappUtilities\n'), ((6157, 6214), 'rapp_utilities.RappUtilities.rapp_print', 'RappUtilities.rapp_print', (['"""Found Sphinx process"""', '"""DEBUG"""'], {}), "('Found Sphinx process', 'DEBUG')\n", (6181, 6214), False, 'from rapp_utilities import RappUtilities\n')] |
# coding=utf-8
# Copyright 2020 Microsoft and the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fast Tokenization class for model DeBERTa."""
import os
from shutil import copyfile
from typing import Optional, Tuple
from ...file_utils import is_sentencepiece_available
from ...tokenization_utils_fast import PreTrainedTokenizerFast
from ...utils import logging
if is_sentencepiece_available():
from .tokenization_deberta_v2 import DebertaV2Tokenizer
else:
DebertaV2Tokenizer = None
logger = logging.get_logger(__name__)
VOCAB_FILES_NAMES = {"vocab_file": "spm.model", "tokenizer_file": "tokenizer.json"}
PRETRAINED_VOCAB_FILES_MAP = {
"vocab_file": {
"microsoft/deberta-v2-xlarge": "https://huggingface.co/microsoft/deberta-v2-xlarge/resolve/main/spm.model",
"microsoft/deberta-v2-xxlarge": "https://huggingface.co/microsoft/deberta-v2-xxlarge/resolve/main/spm.model",
"microsoft/deberta-v2-xlarge-mnli": "https://huggingface.co/microsoft/deberta-v2-xlarge-mnli/resolve/main/spm.model",
"microsoft/deberta-v2-xxlarge-mnli": "https://huggingface.co/microsoft/deberta-v2-xxlarge-mnli/resolve/main/spm.model",
}
}
PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {
"microsoft/deberta-v2-xlarge": 512,
"microsoft/deberta-v2-xxlarge": 512,
"microsoft/deberta-v2-xlarge-mnli": 512,
"microsoft/deberta-v2-xxlarge-mnli": 512,
}
PRETRAINED_INIT_CONFIGURATION = {
"microsoft/deberta-v2-xlarge": {"do_lower_case": False},
"microsoft/deberta-v2-xxlarge": {"do_lower_case": False},
"microsoft/deberta-v2-xlarge-mnli": {"do_lower_case": False},
"microsoft/deberta-v2-xxlarge-mnli": {"do_lower_case": False},
}
class DebertaV2TokenizerFast(PreTrainedTokenizerFast):
r"""
Constructs a DeBERTa-v2 fast tokenizer. Based on [SentencePiece](https://github.com/google/sentencepiece).
Args:
vocab_file (`str`):
[SentencePiece](https://github.com/google/sentencepiece) file (generally has a *.spm* extension) that
contains the vocabulary necessary to instantiate a tokenizer.
do_lower_case (`bool`, *optional*, defaults to `False`):
Whether or not to lowercase the input when tokenizing.
bos_token (`string`, *optional*, defaults to `"[CLS]"`):
The beginning of sequence token that was used during pre-training. Can be used a sequence classifier token.
When building a sequence using special tokens, this is not the token that is used for the beginning of
sequence. The token used is the `cls_token`.
eos_token (`string`, *optional*, defaults to `"[SEP]"`):
The end of sequence token. When building a sequence using special tokens, this is not the token that is
used for the end of sequence. The token used is the `sep_token`.
unk_token (`str`, *optional*, defaults to `"[UNK]"`):
The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this
token instead.
sep_token (`str`, *optional*, defaults to `"[SEP]"`):
The separator token, which is used when building a sequence from multiple sequences, e.g. two sequences for
sequence classification or for a text and a question for question answering. It is also used as the last
token of a sequence built with special tokens.
pad_token (`str`, *optional*, defaults to `"[PAD]"`):
The token used for padding, for example when batching sequences of different lengths.
cls_token (`str`, *optional*, defaults to `"[CLS]"`):
The classifier token which is used when doing sequence classification (classification of the whole sequence
instead of per-token classification). It is the first token of the sequence when built with special tokens.
mask_token (`str`, *optional*, defaults to `"[MASK]"`):
The token used for masking values. This is the token used when training this model with masked language
modeling. This is the token which the model will try to predict.
sp_model_kwargs (`dict`, *optional*):
Will be passed to the `SentencePieceProcessor.__init__()` method. The [Python wrapper for
SentencePiece](https://github.com/google/sentencepiece/tree/master/python) can be used, among other things,
to set:
- `enable_sampling`: Enable subword regularization.
- `nbest_size`: Sampling parameters for unigram. Invalid for BPE-Dropout.
- `nbest_size = {0,1}`: No sampling is performed.
- `nbest_size > 1`: samples from the nbest_size results.
- `nbest_size < 0`: assuming that nbest_size is infinite and samples from the all hypothesis (lattice)
using forward-filtering-and-backward-sampling algorithm.
- `alpha`: Smoothing parameter for unigram sampling, and dropout probability of merge operations for
BPE-dropout.
"""
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
slow_tokenizer_class = DebertaV2Tokenizer
def __init__(
self,
vocab_file=None,
tokenizer_file=None,
do_lower_case=False,
split_by_punct=False,
bos_token="[CLS]",
eos_token="[SEP]",
unk_token="[UNK]",
sep_token="[SEP]",
pad_token="[PAD]",
cls_token="[CLS]",
mask_token="[MASK]",
**kwargs
) -> None:
super().__init__(
vocab_file,
tokenizer_file=tokenizer_file,
do_lower_case=do_lower_case,
bos_token=bos_token,
eos_token=eos_token,
unk_token=unk_token,
sep_token=sep_token,
pad_token=pad_token,
cls_token=cls_token,
mask_token=mask_token,
split_by_punct=split_by_punct,
**kwargs,
)
self.do_lower_case = do_lower_case
self.split_by_punct = split_by_punct
self.vocab_file = vocab_file
self.can_save_slow_tokenizer = False if not self.vocab_file else True
def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and
adding special tokens. A DeBERTa sequence has the following format:
- single sequence: [CLS] X [SEP]
- pair of sequences: [CLS] A [SEP] B [SEP]
Args:
token_ids_0 (`List[int]`):
List of IDs to which the special tokens will be added.
token_ids_1 (`List[int]`, *optional*):
Optional second list of IDs for sequence pairs.
Returns:
`List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens.
"""
if token_ids_1 is None:
return [self.cls_token_id] + token_ids_0 + [self.sep_token_id]
cls = [self.cls_token_id]
sep = [self.sep_token_id]
return cls + token_ids_0 + sep + token_ids_1 + sep
def get_special_tokens_mask(self, token_ids_0, token_ids_1=None, already_has_special_tokens=False):
"""
Retrieves sequence ids from a token list that has no special tokens added. This method is called when adding
special tokens using the tokenizer `prepare_for_model` or `encode_plus` methods.
Args:
token_ids_0 (`List[int]`):
List of IDs.
token_ids_1 (`List[int]`, *optional*):
Optional second list of IDs for sequence pairs.
already_has_special_tokens (`bool`, *optional*, defaults to `False`):
Whether or not the token list is already formatted with special tokens for the model.
Returns:
`List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token.
"""
if already_has_special_tokens:
return super().get_special_tokens_mask(
token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True
)
if token_ids_1 is not None:
return [1] + ([0] * len(token_ids_0)) + [1] + ([0] * len(token_ids_1)) + [1]
return [1] + ([0] * len(token_ids_0)) + [1]
def create_token_type_ids_from_sequences(self, token_ids_0, token_ids_1=None):
"""
Create a mask from the two sequences passed to be used in a sequence-pair classification task. A DeBERTa
sequence pair mask has the following format:
```
0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1
| first sequence | second sequence |
```
If `token_ids_1` is `None`, this method only returns the first portion of the mask (0s).
Args:
token_ids_0 (`List[int]`):
List of IDs.
token_ids_1 (`List[int]`, *optional*):
Optional second list of IDs for sequence pairs.
Returns:
`List[int]`: List of [token type IDs](../glossary#token-type-ids) according to the given sequence(s).
"""
sep = [self.sep_token_id]
cls = [self.cls_token_id]
if token_ids_1 is None:
return len(cls + token_ids_0 + sep) * [0]
return len(cls + token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1]
def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]:
if not self.can_save_slow_tokenizer:
raise ValueError(
"Your fast tokenizer does not have the necessary information to save the vocabulary for a slow "
"tokenizer."
)
if not os.path.isdir(save_directory):
logger.error(f"Vocabulary path ({save_directory}) should be a directory")
return
out_vocab_file = os.path.join(
save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"]
)
if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file):
copyfile(self.vocab_file, out_vocab_file)
return (out_vocab_file,)
| [
"os.path.abspath",
"shutil.copyfile",
"os.path.isdir",
"os.path.join"
] | [((10642, 10760), 'os.path.join', 'os.path.join', (['save_directory', "((filename_prefix + '-' if filename_prefix else '') + VOCAB_FILES_NAMES[\n 'vocab_file'])"], {}), "(save_directory, (filename_prefix + '-' if filename_prefix else\n '') + VOCAB_FILES_NAMES['vocab_file'])\n", (10654, 10760), False, 'import os\n'), ((10481, 10510), 'os.path.isdir', 'os.path.isdir', (['save_directory'], {}), '(save_directory)\n', (10494, 10510), False, 'import os\n'), ((10791, 10823), 'os.path.abspath', 'os.path.abspath', (['self.vocab_file'], {}), '(self.vocab_file)\n', (10806, 10823), False, 'import os\n'), ((10827, 10858), 'os.path.abspath', 'os.path.abspath', (['out_vocab_file'], {}), '(out_vocab_file)\n', (10842, 10858), False, 'import os\n'), ((10872, 10913), 'shutil.copyfile', 'copyfile', (['self.vocab_file', 'out_vocab_file'], {}), '(self.vocab_file, out_vocab_file)\n', (10880, 10913), False, 'from shutil import copyfile\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-07-03 13:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import mptt.fields
class Migration(migrations.Migration):
dependencies = [
('users', '0005_auto_20170703_1614'),
]
operations = [
migrations.AlterModelOptions(
name='position',
options={'ordering': ['name'], 'verbose_name': 'Job Positions', 'verbose_name_plural': 'Job Positions'},
),
migrations.AlterField(
model_name='department',
name='parent',
field=mptt.fields.TreeForeignKey(blank=True, help_text='The parent department', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='children', to='users.Department', verbose_name='Parent'),
),
migrations.AlterField(
model_name='position',
name='department',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='users.Department', verbose_name='Department'),
),
migrations.AlterField(
model_name='position',
name='parent',
field=mptt.fields.TreeForeignKey(blank=True, help_text='The parent Job Position', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='children', to='users.Position', verbose_name='Reports To'),
),
]
| [
"django.db.migrations.AlterModelOptions",
"django.db.models.ForeignKey"
] | [((350, 504), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""position"""', 'options': "{'ordering': ['name'], 'verbose_name': 'Job Positions',\n 'verbose_name_plural': 'Job Positions'}"}), "(name='position', options={'ordering': ['name'],\n 'verbose_name': 'Job Positions', 'verbose_name_plural': 'Job Positions'})\n", (378, 504), False, 'from django.db import migrations, models\n'), ((978, 1095), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.PROTECT', 'to': '"""users.Department"""', 'verbose_name': '"""Department"""'}), "(on_delete=django.db.models.deletion.PROTECT, to=\n 'users.Department', verbose_name='Department')\n", (995, 1095), False, 'from django.db import migrations, models\n')] |
from PPPForgivenessSDK.client import Client
# to run file 'list_forgiveness_messages.py', use valid token (page parameter can be changed )
client = Client(
access_token='{{YOUR_TOKEN_HERE}}',
vendor_key='{{YOUR_VENDOR_KEY}}',
environment='sandbox'
)
lookup_api = client.validations
result = lookup_api.list(sba_number="{{SBA_NUMBER}}")
if result['status'] == 200:
print(result['data'])
else:
print("An error occurred." + str(result['status']))
print(result['data'])
| [
"PPPForgivenessSDK.client.Client"
] | [((149, 252), 'PPPForgivenessSDK.client.Client', 'Client', ([], {'access_token': '"""{{YOUR_TOKEN_HERE}}"""', 'vendor_key': '"""{{YOUR_VENDOR_KEY}}"""', 'environment': '"""sandbox"""'}), "(access_token='{{YOUR_TOKEN_HERE}}', vendor_key='{{YOUR_VENDOR_KEY}}',\n environment='sandbox')\n", (155, 252), False, 'from PPPForgivenessSDK.client import Client\n')] |
"""Test cases for STEREO Map subclasses.
This particular test file pertains to EUVIMap.
@Author: <NAME>. (VaticanCameos)
"""
import os
import glob
from sunpy.map.sources.stereo import EUVIMap
from sunpy.map import Map
from sunpy.sun import sun
import sunpy.data.test
path = sunpy.data.test.rootdir
fitspath = glob.glob(os.path.join(path, "euvi_20090615_000900_n4euA_s.fts"))
euvi = Map(fitspath)
# EUVI Tests
def test_fitstoEIT():
"""Tests the creation of EUVIMap using FITS."""
assert isinstance(euvi, EUVIMap)
def test_is_datasource_for():
"""Test the is_datasource_for method of EUVIMap.
Note that header data to be provided as an argument
can be a MetaDict object."""
assert euvi.is_datasource_for(euvi.data, euvi.meta)
def test_measurement():
"""Tests the measurement property of the EUVIMap object."""
assert euvi.measurement.value == 171
def test_observatory():
"""Tests the observatory property of the EUVIMap object."""
assert euvi.observatory == "STEREO A"
def test_rsun_obs():
"""Tests the rsun_obs property"""
assert euvi.rsun_obs.value == euvi.meta['rsun']
def test_rsun_missing():
"""Tests output if 'rsun' is missing"""
euvi_no_rsun = Map(fitspath)
euvi_no_rsun.meta['rsun'] = None
assert euvi_no_rsun.rsun_obs.value == sun.solar_semidiameter_angular_size(euvi.date).to('arcsec').value
| [
"sunpy.sun.sun.solar_semidiameter_angular_size",
"os.path.join",
"sunpy.map.Map"
] | [((385, 398), 'sunpy.map.Map', 'Map', (['fitspath'], {}), '(fitspath)\n', (388, 398), False, 'from sunpy.map import Map\n'), ((322, 376), 'os.path.join', 'os.path.join', (['path', '"""euvi_20090615_000900_n4euA_s.fts"""'], {}), "(path, 'euvi_20090615_000900_n4euA_s.fts')\n", (334, 376), False, 'import os\n'), ((1215, 1228), 'sunpy.map.Map', 'Map', (['fitspath'], {}), '(fitspath)\n', (1218, 1228), False, 'from sunpy.map import Map\n'), ((1308, 1354), 'sunpy.sun.sun.solar_semidiameter_angular_size', 'sun.solar_semidiameter_angular_size', (['euvi.date'], {}), '(euvi.date)\n', (1343, 1354), False, 'from sunpy.sun import sun\n')] |
"""show_spanning_tree.py
supported commands:
* show spanning-tree detail
* show spanning-tree mst detail
* show spanning-tree summary
* show errdisable recovery
* show spanning-tree
* show spanning-tree mst <WORD>
* show spanning-tree vlan <WORD>
* show spanning-tree mst configuration
"""
import re
from genie.metaparser import MetaParser
from genie.metaparser.util.schemaengine import Schema, \
Any, \
Optional, \
Or, \
And, \
Default, \
Use
# import parser utils
from genie.libs.parser.utils.common import Common
class ShowSpanningTreeSummarySchema(MetaParser):
"""Schema for show spanning-tree summary"""
schema = {
Optional('etherchannel_misconfig_guard'): bool,
Optional('extended_system_id'): bool,
Optional('portfast_default'): bool,
'bpdu_guard': bool,
Optional('bpdu_filter'): bool,
Optional('bridge_assurance'): bool,
Optional('loop_guard'): bool,
'uplink_fast': bool,
'backbone_fast': bool,
Optional('root_bridge_for'): str,
Optional('pvst_simulation'): bool,
Optional('pvst_simulation_status'): str,
Optional('platform_pvst_simulation'): bool,
Optional("configured_pathcost"): {
'method': str,
Optional('operational_value'): str,
},
Optional('mode'): {
Any(): { # mstp, pvst, rapid_pvst
Any(): { # <mst_domain>, <pvst_id>
'blocking': int,
'listening': int,
'learning': int,
'forwarding': int,
'stp_active': int,
}
}
},
'total_statistics': {
'blockings': int,
'listenings': int,
'learnings': int,
'forwardings': int,
'stp_actives': int,
Optional('num_of_msts'): int,
Optional('num_of_vlans'): int,
}
}
class ShowSpanningTreeSummary(ShowSpanningTreeSummarySchema):
"""Parser for show show spanning-tree summary"""
cli_command = 'show spanning-tree summary'
def cli(self,output=None):
if output is None:
# get output from device
out = self.device.execute(self.cli_command)
else:
out = output
# initial return dictionary
ret_dict = {}
# initial regexp pattern
p1 = re.compile(r'^Switch +is +in +(?P<mode>[\w\-]+) +mode( *\(IEEE +Standard\))?$')
p2 = re.compile(r'^Root +bridge +for: +(?P<root_bridge_for>[\w\-\,\s]+).?$')
#p3 = re.compile(r'^(?P<name>\w+(?: \S+){,5}?) +is '
# '+(?P<value>disabled|enabled)(?: +but +inactive +in (?P<simulation_value>\S+) +mode)?$')
p3 = re.compile(r'^(?P<name>\w+(?: \S+){,5}?) +is +(?P<value>disable|disabled|enabled)'
r'(?: +but (?P<simulation_value>active|inactive) +in +rapid-pvst +mode)?$')
p4 = re.compile(r'^(?P<id>(?!Total)\w+) +(?P<blocking>\d+) +(?P<listening>\d+)'
r' +(?P<learning>\d+) +(?P<forwarding>\d+) +(?P<stp_active>\d+)$')
p5 = re.compile(r'^(?P<num>\d+) +(msts?|vlans?) +(?P<blockings>\d+) +(?P<listenings>\d+)'
r' +(?P<learnings>\d+) +(?P<forwardings>\d+) +(?P<stp_actives>\d+)$')
p6 = re.compile(r'^(?:Configured +)?Pathcost +method +used +is '
r'+(?P<method>\w+)(?: +\(Operational +value +is +(?P<operational_value>\w+)\))?$')
p7 = re.compile(r'Total +(?P<blockings>\d+) +(?P<listenings>\d+)'
r' +(?P<learnings>\d+) +(?P<forwardings>\d+) +(?P<stp_actives>\d+)$')
p8 = re.compile(r'^(?P<root_bridge_for>(?:(?:[\w-]+, +)+)?[\w-]+)$')
key_map = {'EtherChannel misconfig guard': 'etherchannel_misconfig_guard',
'Extended system ID': 'extended_system_id',
'Portfast Default': 'portfast_default',
'PortFast BPDU Guard': 'bpdu_guard',
'PortFast BPDU Guard Default': 'bpdu_guard',
'Portfast Edge BPDU Guard Default': 'bpdu_guard',
'Portfast BPDU Filter Default': 'bpdu_filter',
'Portfast Edge BPDU Filter Default': 'bpdu_filter',
'Loopguard Default': 'loop_guard',
'UplinkFast': 'uplink_fast',
'Bridge Assurance': 'bridge_assurance',
'BackboneFast': 'backbone_fast',
'PVST Simulation': 'pvst_simulation',
'Platform PVST Simulation': 'platform_pvst_simulation'}
for line in out.splitlines():
line = line.strip()
# Switch is in mst mode (IEEE Standard)
m = p1.match(line)
if m:
mode = m.groupdict()['mode'].replace('-', '_')
continue
# Root bridge for: MST0, MST100
m = p2.match(line)
if m:
ret_dict['root_bridge_for'] = m.groupdict()['root_bridge_for']
continue
# VLAN0780, VLAN0801-VLAN0803, VLAN0806, VLAN0808-VLAN0818, VLAN0821-VLAN0822
m = p8.match(line)
if m:
ret_dict['root_bridge_for'] += ', {}'.format(m.groupdict()['root_bridge_for'])
# EtherChannel misconfig guard is disabled
# Extended system ID is enabled
# Portfast Default is disabled
# PortFast BPDU Guard Default is disabled or Portfast Edge BPDU Guard Default
# Portfast BPDU Filter Default is disabled or Portfast Edge BPDU Filter Default
# Loopguard Default is disabled
# UplinkFast is disabled
# BackboneFast is disabled
# PVST Simulation is enabled
# PVST Simulation Default is enabled but inactive in rapid-pvst mode
# Platform PVST Simulation is enabled
m = p3.match(line)
if m:
group = m.groupdict()
if 'PVST Simulation Default' in group['name']:
group['name'] = 'PVST Simulation'
if 'enabled' in group['value'].lower():
if group['simulation_value']:
ret_dict[key_map[group['name'].strip()]] = True
ret_dict['pvst_simulation_status'] = group['simulation_value']
else:
ret_dict[key_map[group['name'].strip()]] = True
else:
ret_dict[key_map[group['name'].strip()]] = False
continue
# VLAN0100 0 1 0 0 1
m = p4.match(line)
if m:
group = m.groupdict()
mode_id = group.pop('id')
mode_dict = ret_dict.setdefault('mode', {})\
.setdefault(mode, {}).setdefault(mode_id, {})
mode_dict.update({k:int(v) for k, v in group.items()})
continue
# 5 vlans 0 5 0 0 5
# 2 msts 6 0 0 10 16
m = p5.match(line)
if m:
group = m.groupdict()
if 'mst' in line:
key = 'num_of_msts'
elif 'vlan' in line:
key = 'num_of_vlans'
ret_dict.setdefault('total_statistics', {})\
.setdefault(key, int(group.pop('num')))
ret_dict.setdefault('total_statistics', {})\
.update({k:int(v) for k, v in group.items()})
continue
# Configured Pathcost method used is short
# Configured Pathcost method used is short (Operational value is long)
# Pathcost method used is long
m = p6.match(line)
if m:
group = m.groupdict()
ret_dict.setdefault('configured_pathcost', {})\
.update({k:v for k, v in group.items() if v})
continue
m = p7.match(line)
if m:
group = m.groupdict()
ret_dict.setdefault('total_statistics', {}) \
.update({k: int(v) for k, v in group.items()})
continue
return ret_dict
class ShowSpanningTreeDetailSchema(MetaParser):
"""Schema for show spanning-tree detail"""
schema = {
Any(): { # mstp, pvst, rapid_pvst
Optional('domain'): str,
Optional('pvst_id'): str,
Optional('name'): str,
Optional('revision'): int,
Optional('max_hop'): int,
'hello_time': int,
'max_age': int,
'forwarding_delay': int,
Optional('hold_count'): int,
Any(): { # mst_instances, vlans
Any(): {
Optional('mst_id'): int,
Optional('vlan'): str,
Optional('vlan_id'): int,
Optional('hello_time'): int,
Optional('max_age'): int,
Optional('forwarding_delay'): int,
Optional('hold_count'): int,
'bridge_priority': int,
'bridge_sysid': int,
'bridge_address': str,
Optional('root_of_spanning_tree'): bool,
'topology_change_flag': bool,
'topology_detected_flag': bool,
'hold_time': int,
'topology_changes': int,
'time_since_topology_change': str,
Optional('topology_from_port'): str,
'hello_time': int,
'max_age': int,
'forwarding_delay': int,
'hold_time': int,
'topology_change_times': int,
'notification_times': int,
'hello_timer': int,
'topology_change_timer': int,
'notification_timer': int,
Optional('aging_timer'): int,
'interfaces': {
Any(): {
'status': str,
'name': str,
'cost': int,
'port_priority': int,
'port_num': int,
'port_identifier': str,
'designated_root_priority': int,
'designated_root_address': str,
'designated_path_cost': int,
'designated_port_id': str,
'designated_bridge_priority': int,
'designated_bridge_address': str,
'number_of_forward_transitions': int,
'message_age': int,
'forward_delay': int,
'hold': int,
'link_type': str,
Optional('boundary'): str,
Optional('peer'): str,
Optional('loop_guard'): bool,
'counters': {
'bpdu_sent': int,
'bpdu_received': int,
}
}
}
},
},
}
}
class ShowSpanningTreeDetail(ShowSpanningTreeDetailSchema):
"""Parser for show spanning-tree detail"""
MODE_NAME_MAP = {'mstp': 'mstp',
'ieee': 'pvst',
'rstp': 'rapid_pvst'}
MODE_INST_MAP = {'mstp': 'mst_instances',
'ieee': 'vlans',
'rstp': 'vlans'}
MODE_KEY_MAP = {'mstp': 'mst_id',
'ieee': 'vlan_id',
'rstp': 'vlan_id'}
cli_command = 'show spanning-tree detail'
def cli(self, output=None):
if output is None:
# get output from device
out = self.device.execute(self.cli_command)
else:
out = output
# initial return dictionary
ret_dict = {}
# initial regexp pattern
p1 = re.compile(r'^(MST|VLAN)?(?P<inst>\w+) +is +executing +the +(?P<mode>[\w\-]+) +'
'compatible +Spanning +Tree +protocol$')
p2 = re.compile(r'^Bridge +Identifier +has +priority +(?P<bridge_priority>\d+), +'
'sysid +(?P<bridge_sysid>\d+), +'
'address +(?P<bridge_address>[\w\.]+)$')
p3 = re.compile(r'^Configured +hello +time +(?P<hello_time>\d+), +'
'max +age +(?P<max_age>\d+), +forward +delay +(?P<forwarding_delay>\d+)(, +'
'(transmit|tranmsit) +hold\-count +(?P<hold_count>\d+))?$')
p4 = re.compile(r'^We +are +the +root +of +the +spanning +tree$')
p5 = re.compile(r'^Topology +change +flag +(?P<topology_change_flag>[\w\s]+), +'
'detected +flag +(?P<topology_detected_flag>[\w\s]+)$')
p6 = re.compile(r'^Number +of +topology +changes +(?P<topology_changes>\d+) +'
'last +change +occurred +(?P<time_since_topology_change>[\w\.\:]+)( +ago)?$')
p7 = re.compile(r'^from +(?P<topology_from_port>[\w\.\/\-]+)$')
p8 = re.compile(r'^Times: +hold +(?P<hold_time>\d+), +'
'topology +change +(?P<topology_change_times>\d+), +'
'notification +(?P<notification_times>\d+)$')
p9 = re.compile(r'^hello +(?P<hello_time>\d+), '
'max +age +(?P<max_age>\d+), '
'+forward +delay +(?P<forwarding_delay>\d+)$')
p10 = re.compile(r'^Timers: +hello +(?P<hello_timer>\d+), +'
'topology +change +(?P<topology_change_timer>\d+), +'
'notification +(?P<notification_timer>\d+)'
'(, +aging +(?P<aging_timer>\d+))?$')
p11 = re.compile(r'^Port +(?P<port_num>\d+) *\((?P<name>[\w\/\-\.]+)\) +'
'of +(?P<inst>\w+) +is +(?P<status>.*)$')
p12 = re.compile(r'^Port +path +cost +(?P<cost>\d+), +'
'Port +priority +(?P<port_priority>\d+), +'
'Port +Identifier +(?P<port_identifier>[\w\.]+)$')
p13 = re.compile(r'^Designated +root +has +priority +(?P<designated_root_priority>\d+), +'
'address +(?P<designated_root_address>[\w\.]+)$')
p14 = re.compile(r'^Designated +bridge +has +priority +(?P<designated_bridge_priority>\d+), +'
'address +(?P<designated_bridge_address>[\w\.]+)$')
p15 = re.compile(r'^Designated +port +id +is +(?P<designated_port_id>[\w\.]+), +'
'designated +path +cost +(?P<designated_path_cost>\d+)'
'( +[\w\s\,]+)?$')
p16 = re.compile(r'^Timers: +message +age +(?P<message_age>\d+), +'
'forward +delay +(?P<forward_delay>\d+), +hold +(?P<hold>\d+)$')
p17 = re.compile(r'^Number +of +transitions +to +forwarding +'
'state: +(?P<number_of_forward_transitions>\d+)$')
p18 = re.compile(r'^Link +type +is +(?P<link_type>[\w\-]+) +by +default'
'(, *(Boundary +(?P<boundary>\w+)|Peer +is +(?P<peer>\w+)))?$')
p19 = re.compile(r'^Loop +guard +is +(?P<loop_guard>\w+) +by +default +on +the +port$')
p20 = re.compile(r'^BPDU: +sent +(?P<bpdu_sent>\d+), +'
'received +(?P<bpdu_received>\d+)$')
for line in out.splitlines():
line = line.strip()
# MST0 is executing the mstp compatible Spanning Tree protocol
m = p1.match(line)
if m:
group = m.groupdict()
mode = group['mode']
mode_dict = ret_dict.setdefault(self.MODE_NAME_MAP[mode], {})
inst_dict = mode_dict.setdefault(self.MODE_INST_MAP[mode], {}).\
setdefault(int(group['inst']), {})
inst_dict[self.MODE_KEY_MAP[mode]] = int(group['inst'])
continue
# Bridge Identifier has priority 32768, sysid 0, address d8b1.90ff.c889
m = p2.match(line)
if m:
group = m.groupdict()
inst_dict['bridge_address'] = group.pop('bridge_address')
inst_dict.update({k:int(v) for k, v in group.items()})
continue
# Configured hello time 10, max age 40, forward delay 30, transmit hold-count 20
# Configured hello time 2, max age 20, forward delay 15, tranmsit hold-count 6
m = p3.match(line)
if m:
group = m.groupdict()
update_dict = {k:int(v) for k, v in group.items() if v}
mode_dict.update(update_dict)
inst_dict.update(update_dict)
continue
# We are the root of the spanning tree
m = p4.match(line)
if m:
inst_dict['root_of_spanning_tree'] = True
continue
# Topology change flag not set, detected flag not set
m = p5.match(line)
if m:
group = m.groupdict()
inst_dict['topology_change_flag'] = False if 'not' in group['topology_change_flag'] else True
inst_dict['topology_detected_flag'] = False if 'not' in group['topology_detected_flag'] else True
continue
# Number of topology changes 3 last change occurred 03:09:48 ago
m = p6.match(line)
if m:
group = m.groupdict()
inst_dict['topology_changes'] = int(group['topology_changes'])
inst_dict['time_since_topology_change'] = group['time_since_topology_change']
continue
# from Port-channel24
m = p7.match(line)
if m:
inst_dict['topology_from_port'] = m.groupdict()['topology_from_port']
continue
# Times: hold 1, topology change 70, notification 10
m = p8.match(line)
if m:
group = m.groupdict()
inst_dict.update({k:int(v) for k, v in group.items()})
continue
# hello 10, max age 40, forward delay 30
m = p9.match(line)
if m:
group = m.groupdict()
inst_dict.update({k:int(v) for k, v in group.items()})
continue
# Timers: hello 0, topology change 0, notification 0
# hello 0, topology change 0, notification 0, aging 300
m = p10.match(line)
if m:
group = m.groupdict()
inst_dict.update({k:int(v) for k, v in group.items() if v})
continue
# Port 2390 (Port-channel14) of MST0 is broken (PVST Sim. Inconsistent)
# Port 2400 (Port-channel24) of MST0 is designated forwarding
m = p11.match(line)
if m:
group = m.groupdict()
intf_dict = inst_dict.setdefault('interfaces', {}).setdefault(group['name'], {})
intf_dict['port_num'] = int(group['port_num'])
intf_dict['name'] = group['name']
intf_dict['status'] = group['status']
continue
# Port path cost 6660, Port priority 128, Port Identifier 128.2390.
m = p12.match(line)
if m:
group = m.groupdict()
intf_dict['port_identifier'] = group.pop('port_identifier')
intf_dict.update({k:int(v) for k, v in group.items()})
continue
# Designated root has priority 32768, address d8b1.90ff.c889
m = p13.match(line)
if m:
group = m.groupdict()
intf_dict['designated_root_priority'] = int(group['designated_root_priority'])
intf_dict['designated_root_address'] = group['designated_root_address']
continue
# Designated bridge has priority 32768, address d8b1.90ff.c889
m = p14.match(line)
if m:
group = m.groupdict()
intf_dict['designated_bridge_priority'] = int(group['designated_bridge_priority'])
intf_dict['designated_bridge_address'] = group['designated_bridge_address']
continue
# Designated port id is 128.2390, designated path cost 0
m = p15.match(line)
if m:
group = m.groupdict()
intf_dict['designated_path_cost'] = int(group['designated_path_cost'])
intf_dict['designated_port_id'] = group['designated_port_id']
continue
# Timers: message age 0, forward delay 0, hold 0
m = p16.match(line)
if m:
group = m.groupdict()
intf_dict.update({k:int(v) for k, v in group.items()})
continue
# Number of transitions to forwarding state: 0
m = p17.match(line)
if m:
intf_dict['number_of_forward_transitions'] = \
int(m.groupdict()['number_of_forward_transitions'])
continue
# Link type is point-to-point by default, Boundary PVST
m = p18.match(line)
if m:
group = m.groupdict()
intf_dict.update({k:v for k, v in group.items() if v})
continue
# Loop guard is enabled by default on the port
m = p19.match(line)
if m:
group = m.groupdict()
intf_dict['loop_guard'] = True if 'enabled' in m.groupdict()['loop_guard'].lower() \
else False
continue
# BPDU: sent 138231, received 167393
m = p20.match(line)
if m:
group = m.groupdict()
intf_dict.setdefault('counters', {}).update({k:int(v) for k, v in group.items()})
continue
return ret_dict
class ShowSpanningTreeMstDetailSchema(MetaParser):
"""Schema for show spanning-tree mst detail"""
schema = {
'mst_instances': {
Any(): {
'mst_id': int,
Optional('vlan'): str,
'bridge_address': str,
'bridge_priority': int,
'sysid': int,
Optional('root'): str,
Optional('root_address'): str,
Optional('root_priority'): int,
Optional('operational'): {
'hello_time': int,
'forward_delay': int,
'max_age': int,
'tx_hold_count': int
},
Optional('configured'): {
'hello_time': int,
'forward_delay': int,
'max_age': int,
'max_hops': int
},
'interfaces': {
Any(): {
'status': str,
Optional('broken_reason'): str,
'name': str,
'port_id': str,
'cost': int,
'port_priority': int,
'designated_root_priority': int,
'designated_root_address': str,
'designated_root_cost': int,
Optional('designated_regional_root_cost'): int,
Optional('designated_regional_root_priority'): int,
Optional('designated_regional_root_address'): str,
'designated_bridge_priority': int,
'designated_bridge_address': str,
'designated_bridge_port_id': str,
'forward_transitions': int,
'message_expires': int,
'forward_delay': int,
'counters': {
'bpdu_sent': int,
'bpdu_received': int,
}
}
}
},
}
}
class ShowSpanningTreeMstDetail(ShowSpanningTreeMstDetailSchema):
"""Parser for show spanning-tree mst detail"""
cli_command = 'show spanning-tree mst detail'
def cli(self, output=None):
if output is None:
# get output from device
out = self.device.execute(self.cli_command)
else:
out = output
# initial return dictionary
ret_dict = {}
# initial regexp pattern
p1 = re.compile(r'^\#+ +MST(?P<inst>\d+) +'
r'vlans +mapped: +(?P<vlan>[\d\-\,\s]+)$')
p2 = re.compile(r'^Bridge +address +(?P<bridge_address>[\w\.]+) +'
r'priority +(?P<bridge_priority>\d+) +'
r'\((\d+) +sysid +(?P<sysid>\d+)\)$')
p3 = re.compile(r'^Root +this +switch +for +(the +)?(?P<root>[\w\.\s]+)$')
# Root address 58ac.78ff.c3f5 priority 8198 (8192 sysid 6)
p3_1 = re.compile(r'^Root +address +(?P<root_address>[\w\.]+) +'
r'priority +(?P<root_priority>\d+) +'
r'\((\d+) +sysid +(?P<sysid>\d+)\)$')
p4 = re.compile(r'^Operational +hello +time +(?P<hello_time>\d+), +'
r'forward +delay +(?P<forward_delay>\d+), +'
r'max +age +(?P<max_age>\d+), +'
r'txholdcount +(?P<tx_hold_count>\d+)$')
p5 = re.compile(r'^Configured +hello +time +(?P<hello_time>\d+), +'
r'forward +delay +(?P<forward_delay>\d+), +'
r'max +age +(?P<max_age>\d+), +'
r'max +hops +(?P<max_hops>\d+)$')
p6 = re.compile(r'^(?P<name>[\w\-\.\/]+) +of +'
r'MST(\d+) +is +(?P<status>[\w\s]+)'
r'( +\((?P<broken_reason>.*)\))?$')
p7 = re.compile(r'^Port +info +port +id +'
r'(?P<port_id>[\d\.]+) +'
r'priority +(?P<port_priority>\d+) +'
r'cost +(?P<cost>\d+)$')
p8 = re.compile(r'^Designated +root +address +'
r'(?P<designated_root_address>[\w\.]+) +'
r'priority +(?P<designated_root_priority>\d+) +'
r'cost +(?P<designated_root_cost>\d+)$')
p9 = re.compile(r'^Design\. +regional +root +address +'
r'(?P<designated_regional_root_address>[\w\.]+) +'
r'priority +(?P<designated_regional_root_priority>\d+) +'
r'cost +(?P<designated_regional_root_cost>\d+)$')
p10 = re.compile(r'^Designated +bridge +address +'
r'(?P<designated_bridge_address>[\w\.]+) +'
r'priority +(?P<designated_bridge_priority>\d+) +'
r'port +id +(?P<designated_bridge_port_id>[\d\.]+)$')
p11 = re.compile(r'^Timers: +message +expires +in +(?P<message_expires>\d+) +sec, +'
r'forward +delay +(?P<forward_delay>\d+), '
r'forward +transitions +(?P<forward_transitions>\d+)$')
p12 = re.compile(r'^Bpdus +(\(\w+\) *)?'
r'sent +(?P<bpdu_sent>\d+), +'
r'received +(?P<bpdu_received>\d+)')
for line in out.splitlines():
line = line.strip()
# ##### MST0 vlans mapped: 1-9,11-99,101-4094
m = p1.match(line)
if m:
group = m.groupdict()
inst = int(group['inst'])
inst_dict = ret_dict.setdefault('mst_instances', {}).setdefault(inst, {})
inst_dict['mst_id'] = inst
inst_dict['vlan'] = group['vlan']
continue
# Bridge address d8b1.90ff.c889 priority 32768 (32768 sysid 0)
m = p2.match(line)
if m:
group = m.groupdict()
inst_dict['bridge_address'] = group.pop('bridge_address')
inst_dict.update({k:int(v) for k, v in group.items()})
continue
# Root this switch for the CIST
m = p3.match(line)
if m:
inst_dict['root'] = m.groupdict()['root']
continue
# Root address 58ac.78ff.c3f5 priority 8198 (8192 sysid 6)
m = p3_1.match(line)
if m:
group = m.groupdict()
inst_dict['root_address'] = group.pop('root_address')
inst_dict.update({k:int(v) for k, v in group.items()})
continue
# Operational hello time 10, forward delay 30, max age 40, txholdcount 20
m = p4.match(line)
if m:
inst_dict.setdefault('operational', {}).update(
{k:int(v) for k, v in m.groupdict().items()})
continue
# Configured hello time 10, forward delay 30, max age 40, max hops 255
m = p5.match(line)
if m:
inst_dict.setdefault('configured', {}).update(
{k:int(v) for k, v in m.groupdict().items()})
continue
# Port-channel14 of MST0 is broken (PVST Sim. Inconsistent)
m = p6.match(line)
if m:
group = m.groupdict()
intf = group['name']
intf_dict = inst_dict.setdefault('interfaces', {}).setdefault(intf, {})
intf_dict.update({k:v for k,v in group.items() if v})
continue
# Port info port id 128.23 priority 128 cost 20000
m = p7.match(line)
if m:
group = m.groupdict()
intf_dict['port_id'] = group.pop('port_id')
intf_dict.update({k:int(v) for k,v in group.items()})
continue
# Designated root address 3820.56ff.e15b priority 32768 cost 0
m = p8.match(line)
if m:
group = m.groupdict()
intf_dict['designated_root_address'] = group.pop('designated_root_address')
intf_dict.update({k:int(v) for k,v in group.items()})
continue
# Design. regional root address 3820.56ff.e15b priority 32768 cost 0
m = p9.match(line)
if m:
group = m.groupdict()
intf_dict['designated_regional_root_address'] = \
group.pop('designated_regional_root_address')
intf_dict.update({k:int(v) for k,v in group.items()})
continue
# Designated bridge address 3820.56ff.e15b priority 32768 port id 128.23
m = p10.match(line)
if m:
group = m.groupdict()
intf_dict['designated_bridge_priority'] = \
int(group.pop('designated_bridge_priority'))
intf_dict.update({k:v for k,v in group.items()})
continue
# Timers: message expires in 0 sec, forward delay 0, forward transitions 1
m = p11.match(line)
if m:
group = m.groupdict()
intf_dict.update({k:int(v) for k,v in group.items()})
continue
# Bpdus (MRecords) sent 493, received 0
# Bpdus sent 493, received 0
m = p12.match(line)
if m:
group = m.groupdict()
intf_dict.setdefault('counters', {}).update({k:int(v) for k,v in group.items()})
continue
return ret_dict
class ShowErrdisableRecoverySchema(MetaParser):
"""Schema for show errdisable recovery"""
schema = {
'timer_status': {
Any(): bool,
},
'bpduguard_timeout_recovery': int,
Optional('interfaces'): {
Any(): {
'interface': str,
'errdisable_reason': str,
'time_left': int,
},
}
}
class ShowErrdisableRecovery(ShowErrdisableRecoverySchema):
"""Parser for show errdisable recovery"""
cli_command = 'show errdisable recovery'
def cli(self, output=None):
if output is None:
# get output from device
out = self.device.execute(self.cli_command)
else:
out = output
# initial return dictionary
ret_dict = {}
# initial regexp pattern
p1 = re.compile(r'^Timer +interval: +(?P<interval>\d+) +seconds$')
p2 = re.compile(r'^(?P<name>[\w\-\s\(\)\"\:"]+) +'
'(?P<status>(Disabled|Enabled)+)$')
p3 = re.compile(r'^(?P<interface>[\w\-\/\.]+) +'
'(?P<errdisable_reason>\w+) +'
'(?P<time_left>\d+)$')
for line in out.splitlines():
line = line.strip()
# Timer interval: 333 seconds
m = p1.match(line)
if m:
ret_dict['bpduguard_timeout_recovery'] = int(m.groupdict()['interval'])
continue
# channel-misconfig (STP) Disabled
m = p2.match(line)
if m:
group = m.groupdict()
status_dict = ret_dict.setdefault('timer_status', {})
status_dict[group['name'].strip()] = False if 'disabled' in group['status'].lower() else True
continue
# Fa2/4 bpduguard 273
m = p3.match(line)
if m:
group = m.groupdict()
intf = Common.convert_intf_name(group.pop('interface'))
intf_dict = ret_dict.setdefault('interfaces', {}).setdefault(intf, {})
intf_dict['interface'] = intf
intf_dict['time_left'] = int(group.pop('time_left'))
intf_dict.update({k:v for k,v in group.items()})
continue
return ret_dict
class ShowSpanningTreeSchema(MetaParser):
"""Schema for show spanning-tree [mst|vlan <WORD>]"""
schema = {
Any(): { # mstp, pvst, rapid_pvst
Any(): { # mst_instances, vlans
Any(): {
Any(): { # root, bridge
'priority': int,
'address': str,
Optional('cost'): int,
Optional('port'): int,
Optional('interface'): str,
Optional('configured_bridge_priority'): int,
Optional('sys_id_ext'): int,
'hello_time': int,
'max_age': int,
'forward_delay': int,
Optional('aging_time'): int,
},
'interfaces': {
Any(): {
'role': str,
'port_state': str,
'cost': int,
'port_priority': int,
'port_num': int,
'type': str,
Optional('peer'): str,
Optional('bound'): str,
}
}
}
}
}
}
class ShowSpanningTree(ShowSpanningTreeSchema):
"""Parser for show spanning-tree [mst|vlan <WORD>]"""
MODE_NAME_MAP = {'mstp': 'mstp',
'ieee': 'pvst',
'rstp': 'rapid_pvst'}
MODE_INST_MAP = {'mstp': 'mst_instances',
'ieee': 'vlans',
'rstp': 'vlans'}
PORT_STATE_MAP = {'FWD': 'forwarding',
'BLK': 'blocking',
'DIS': 'disabled',
'LRN': 'learning',
'LIS': 'listensing',
'BKN*': 'broken'}
ROLE_MAP = {'Mstr': 'master ',
'Desg': 'designated',
'Root': 'root',
'BLK': 'blocking',
'Altn': 'alternate',
'Back': 'backup'}
cli_command = ['show spanning-tree vlan {vlan}','show spanning-tree mst {mst}','show spanning-tree']
def cli(self, mst='', vlan='',output=None):
if output is None:
# get output from device
if vlan:
cmd = self.cli_command[0].format(vlan=vlan)
elif mst:
cmd = self.cli_command[1].format(mst=mst)
else:
cmd = self.cli_command[2]
out = self.device.execute(cmd)
else:
out = output
# initial return dictionary
ret_dict = {}
# initial regexp pattern
p1 = re.compile(r'^(MST|VLAN)(?P<inst>\d+)$')
p2 = re.compile(r'^Spanning +tree +enabled p+rotocol +(?P<mode>\w+)$')
p3 = re.compile(r'^Root +ID +Priority +(?P<priority>\d+)$')
p4 = re.compile(r'^Bridge +ID +Priority +(?P<priority>\d+)'
'( *\(priority +(?P<configured_bridge_priority>\d+) +'
'sys\-id\-ext +(?P<sys_id_ext>\d+)\))?$')
p5 = re.compile(r'^Address +(?P<address>[\w\.]+)$')
p6 = re.compile(r'^Cost +(?P<cost>\d+)$')
p7 = re.compile(r'^Port +(?P<port>\d+) +\((?P<interface>[\w\-\/\.]+)\)$')
p8 = re.compile(r'Hello +Time +(?P<hello_time>\d+) +sec +'
'Max +Age +(?P<max_age>\d+) +sec +'
'Forward +Delay +(?P<forward_delay>\d+) +sec$')
p9 = re.compile(r'^Aging +Time +(?P<aging_time>\d+) +sec$')
p10 = re.compile(r'^(?P<interface>[\w\-\/\.]+) +'
'(?P<role>[\w\*]+) +(?P<port_state>[A-Z\*]+) *'
'(?P<cost>\d+) +(?P<port_priority>\d+)\.'
'(?P<port_num>\d+) +(?P<type>\w+)'
'( +(Bound\((?P<bound>\w+)\)|Peer\((?P<peer>\w+)\)))?'
'( +\*\S+)?$')
for line in out.splitlines():
line = line.strip()
# VLAN0200
# MST10
m = p1.match(line)
if m:
inst = int(m.groupdict()['inst'])
continue
# Spanning tree enabled protocol rstp
m = p2.match(line)
if m:
mode_dict = ret_dict.setdefault(self.MODE_NAME_MAP[m.groupdict()['mode']], {})
inst_dict = mode_dict.setdefault(self.MODE_INST_MAP[m.groupdict()['mode']], {}).\
setdefault(inst, {})
continue
# Root ID Priority 24776
m = p3.match(line)
if m:
role_dict = inst_dict.setdefault('root', {})
role_dict['priority'] = int(m.groupdict()['priority'])
continue
# Address 58bf.eaff.e5b6
m = p5.match(line)
if m:
role_dict['address'] = m.groupdict()['address']
continue
# Cost 3
m = p6.match(line)
if m:
role_dict['cost'] = int(m.groupdict()['cost'])
continue
# Port 2390 (Port-channel14)
m = p7.match(line)
if m:
group = m.groupdict()
role_dict['port'] = int(group['port'])
role_dict['interface'] = group['interface']
continue
# Hello Time 2 sec Max Age 20 sec Forward Delay 15 sec
m = p8.match(line)
if m:
role_dict.update({k:int(v) for k,v in m.groupdict().items()})
continue
# Bridge ID Priority 28872 (priority 28672 sys-id-ext 200)
m = p4.match(line)
if m:
role_dict = inst_dict.setdefault('bridge', {})
role_dict.update({k:int(v) for k,v in m.groupdict().items() if v})
continue
# Aging Time 300 sec
m = p9.match(line)
if m:
role_dict['aging_time'] = int(m.groupdict()['aging_time'])
continue
# Gi1/0/5 Desg FWD 4 128.5 P2p Peer(STP)
# Gi1/0/5 Mstr FWD 20000 128.5 P2p Bound(RSTP)
# Po14 Desg BKN*6660 128.2390 P2p Bound(PVST) *PVST_Inc
m = p10.match(line)
if m:
group = m.groupdict()
intf = Common.convert_intf_name(group.pop('interface'))
intf_dict = inst_dict.setdefault('interfaces', {}).setdefault(intf, {})
intf_dict['cost'] = int(group.pop('cost'))
intf_dict['port_priority'] = int(group.pop('port_priority'))
intf_dict['port_num'] = int(group.pop('port_num'))
intf_dict['role'] = self.ROLE_MAP[group.pop('role')]
intf_dict['port_state'] = self.PORT_STATE_MAP[group.pop('port_state')]
intf_dict.update({k:v for k,v in group.items() if v})
continue
return ret_dict
class ShowSpanningTreeMstConfigurationSchema(MetaParser):
"""Schema for show spanning-tree mst configuration"""
schema = {
'mstp': {
'name': str,
'revision': int,
'instances_configured': int,
'mst_instances': {
Any(): {
'vlan_mapped': str,
}
}
}
}
class ShowSpanningTreeMstConfiguration(ShowSpanningTreeMstConfigurationSchema):
"""Parser for show spanning-tree mst configuration"""
cli_command = 'show spanning-tree mst configuration'
def cli(self,output=None):
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
# initial return dictionary
ret_dict = {}
# initial regexp pattern
p1 = re.compile(r'^Name +\[(?P<name>.*)\]$')
p2 = re.compile(r'^Revision +(?P<revision>\d+) +'
'Instances +configured +(?P<instances_configured>\d+)$')
p3 = re.compile(r'^(?P<inst>\d+) +(?P<vlan_mapped>[\d\,\s\-]+)$')
for line in out.splitlines():
line = line.strip()
# Name [mst]
m = p1.match(line)
if m:
ret_dict['name'] = m.groupdict()['name']
continue
# Revision 111 Instances configured 2
m = p2.match(line)
if m:
group = m.groupdict()
ret_dict.update({k:int(v) for k, v in group.items()})
continue
# 0 1-99,201-4094
m = p3.match(line)
if m:
group = m.groupdict()
ret_dict.setdefault('mst_instances', {}).setdefault(int(group['inst']), {}).update({
'vlan_mapped': group['vlan_mapped']})
continue
return {'mstp': ret_dict} if ret_dict else ret_dict | [
"genie.metaparser.util.schemaengine.Any",
"genie.metaparser.util.schemaengine.Optional",
"re.compile"
] | [((938, 978), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""etherchannel_misconfig_guard"""'], {}), "('etherchannel_misconfig_guard')\n", (946, 978), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((994, 1024), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""extended_system_id"""'], {}), "('extended_system_id')\n", (1002, 1024), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((1040, 1068), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""portfast_default"""'], {}), "('portfast_default')\n", (1048, 1068), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((1112, 1135), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""bpdu_filter"""'], {}), "('bpdu_filter')\n", (1120, 1135), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((1151, 1179), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""bridge_assurance"""'], {}), "('bridge_assurance')\n", (1159, 1179), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((1195, 1217), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""loop_guard"""'], {}), "('loop_guard')\n", (1203, 1217), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((1293, 1320), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""root_bridge_for"""'], {}), "('root_bridge_for')\n", (1301, 1320), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((1335, 1362), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""pvst_simulation"""'], {}), "('pvst_simulation')\n", (1343, 1362), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((1378, 1412), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""pvst_simulation_status"""'], {}), "('pvst_simulation_status')\n", (1386, 1412), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((1427, 1463), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""platform_pvst_simulation"""'], {}), "('platform_pvst_simulation')\n", (1435, 1463), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((1479, 1510), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""configured_pathcost"""'], {}), "('configured_pathcost')\n", (1487, 1510), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((1608, 1624), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""mode"""'], {}), "('mode')\n", (1616, 1624), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((2707, 2794), 're.compile', 're.compile', (['"""^Switch +is +in +(?P<mode>[\\\\w\\\\-]+) +mode( *\\\\(IEEE +Standard\\\\))?$"""'], {}), "(\n '^Switch +is +in +(?P<mode>[\\\\w\\\\-]+) +mode( *\\\\(IEEE +Standard\\\\))?$')\n", (2717, 2794), False, 'import re\n'), ((2800, 2874), 're.compile', 're.compile', (['"""^Root +bridge +for: +(?P<root_bridge_for>[\\\\w\\\\-\\\\,\\\\s]+).?$"""'], {}), "('^Root +bridge +for: +(?P<root_bridge_for>[\\\\w\\\\-\\\\,\\\\s]+).?$')\n", (2810, 2874), False, 'import re\n'), ((3061, 3226), 're.compile', 're.compile', (['"""^(?P<name>\\\\w+(?: \\\\S+){,5}?) +is +(?P<value>disable|disabled|enabled)(?: +but (?P<simulation_value>active|inactive) +in +rapid-pvst +mode)?$"""'], {}), "(\n '^(?P<name>\\\\w+(?: \\\\S+){,5}?) +is +(?P<value>disable|disabled|enabled)(?: +but (?P<simulation_value>active|inactive) +in +rapid-pvst +mode)?$'\n )\n", (3071, 3226), False, 'import re\n'), ((3258, 3410), 're.compile', 're.compile', (['"""^(?P<id>(?!Total)\\\\w+) +(?P<blocking>\\\\d+) +(?P<listening>\\\\d+) +(?P<learning>\\\\d+) +(?P<forwarding>\\\\d+) +(?P<stp_active>\\\\d+)$"""'], {}), "(\n '^(?P<id>(?!Total)\\\\w+) +(?P<blocking>\\\\d+) +(?P<listening>\\\\d+) +(?P<learning>\\\\d+) +(?P<forwarding>\\\\d+) +(?P<stp_active>\\\\d+)$'\n )\n", (3268, 3410), False, 'import re\n'), ((3437, 3602), 're.compile', 're.compile', (['"""^(?P<num>\\\\d+) +(msts?|vlans?) +(?P<blockings>\\\\d+) +(?P<listenings>\\\\d+) +(?P<learnings>\\\\d+) +(?P<forwardings>\\\\d+) +(?P<stp_actives>\\\\d+)$"""'], {}), "(\n '^(?P<num>\\\\d+) +(msts?|vlans?) +(?P<blockings>\\\\d+) +(?P<listenings>\\\\d+) +(?P<learnings>\\\\d+) +(?P<forwardings>\\\\d+) +(?P<stp_actives>\\\\d+)$'\n )\n", (3447, 3602), False, 'import re\n'), ((3630, 3781), 're.compile', 're.compile', (['"""^(?:Configured +)?Pathcost +method +used +is +(?P<method>\\\\w+)(?: +\\\\(Operational +value +is +(?P<operational_value>\\\\w+)\\\\))?$"""'], {}), "(\n '^(?:Configured +)?Pathcost +method +used +is +(?P<method>\\\\w+)(?: +\\\\(Operational +value +is +(?P<operational_value>\\\\w+)\\\\))?$'\n )\n", (3640, 3781), False, 'import re\n'), ((3811, 3951), 're.compile', 're.compile', (['"""Total +(?P<blockings>\\\\d+) +(?P<listenings>\\\\d+) +(?P<learnings>\\\\d+) +(?P<forwardings>\\\\d+) +(?P<stp_actives>\\\\d+)$"""'], {}), "(\n 'Total +(?P<blockings>\\\\d+) +(?P<listenings>\\\\d+) +(?P<learnings>\\\\d+) +(?P<forwardings>\\\\d+) +(?P<stp_actives>\\\\d+)$'\n )\n", (3821, 3951), False, 'import re\n'), ((3980, 4044), 're.compile', 're.compile', (['"""^(?P<root_bridge_for>(?:(?:[\\\\w-]+, +)+)?[\\\\w-]+)$"""'], {}), "('^(?P<root_bridge_for>(?:(?:[\\\\w-]+, +)+)?[\\\\w-]+)$')\n", (3990, 4044), False, 'import re\n'), ((9031, 9036), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (9034, 9036), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((12922, 13052), 're.compile', 're.compile', (['"""^(MST|VLAN)?(?P<inst>\\\\w+) +is +executing +the +(?P<mode>[\\\\w\\\\-]+) +compatible +Spanning +Tree +protocol$"""'], {}), "(\n '^(MST|VLAN)?(?P<inst>\\\\w+) +is +executing +the +(?P<mode>[\\\\w\\\\-]+) +compatible +Spanning +Tree +protocol$'\n )\n", (12932, 13052), False, 'import re\n'), ((13083, 13242), 're.compile', 're.compile', (['"""^Bridge +Identifier +has +priority +(?P<bridge_priority>\\\\d+), +sysid +(?P<bridge_sysid>\\\\d+), +address +(?P<bridge_address>[\\\\w\\\\.]+)$"""'], {}), "(\n '^Bridge +Identifier +has +priority +(?P<bridge_priority>\\\\d+), +sysid +(?P<bridge_sysid>\\\\d+), +address +(?P<bridge_address>[\\\\w\\\\.]+)$'\n )\n", (13093, 13242), False, 'import re\n'), ((13300, 13507), 're.compile', 're.compile', (['"""^Configured +hello +time +(?P<hello_time>\\\\d+), +max +age +(?P<max_age>\\\\d+), +forward +delay +(?P<forwarding_delay>\\\\d+)(, +(transmit|tranmsit) +hold\\\\-count +(?P<hold_count>\\\\d+))?$"""'], {}), "(\n '^Configured +hello +time +(?P<hello_time>\\\\d+), +max +age +(?P<max_age>\\\\d+), +forward +delay +(?P<forwarding_delay>\\\\d+)(, +(transmit|tranmsit) +hold\\\\-count +(?P<hold_count>\\\\d+))?$'\n )\n", (13310, 13507), False, 'import re\n'), ((13564, 13623), 're.compile', 're.compile', (['"""^We +are +the +root +of +the +spanning +tree$"""'], {}), "('^We +are +the +root +of +the +spanning +tree$')\n", (13574, 13623), False, 'import re\n'), ((13639, 13780), 're.compile', 're.compile', (['"""^Topology +change +flag +(?P<topology_change_flag>[\\\\w\\\\s]+), +detected +flag +(?P<topology_detected_flag>[\\\\w\\\\s]+)$"""'], {}), "(\n '^Topology +change +flag +(?P<topology_change_flag>[\\\\w\\\\s]+), +detected +flag +(?P<topology_detected_flag>[\\\\w\\\\s]+)$'\n )\n", (13649, 13780), False, 'import re\n'), ((13810, 13971), 're.compile', 're.compile', (['"""^Number +of +topology +changes +(?P<topology_changes>\\\\d+) +last +change +occurred +(?P<time_since_topology_change>[\\\\w\\\\.\\\\:]+)( +ago)?$"""'], {}), "(\n '^Number +of +topology +changes +(?P<topology_changes>\\\\d+) +last +change +occurred +(?P<time_since_topology_change>[\\\\w\\\\.\\\\:]+)( +ago)?$'\n )\n", (13820, 13971), False, 'import re\n'), ((14001, 14062), 're.compile', 're.compile', (['"""^from +(?P<topology_from_port>[\\\\w\\\\.\\\\/\\\\-]+)$"""'], {}), "('^from +(?P<topology_from_port>[\\\\w\\\\.\\\\/\\\\-]+)$')\n", (14011, 14062), False, 'import re\n'), ((14074, 14230), 're.compile', 're.compile', (['"""^Times: +hold +(?P<hold_time>\\\\d+), +topology +change +(?P<topology_change_times>\\\\d+), +notification +(?P<notification_times>\\\\d+)$"""'], {}), "(\n '^Times: +hold +(?P<hold_time>\\\\d+), +topology +change +(?P<topology_change_times>\\\\d+), +notification +(?P<notification_times>\\\\d+)$'\n )\n", (14084, 14230), False, 'import re\n'), ((14289, 14416), 're.compile', 're.compile', (['"""^hello +(?P<hello_time>\\\\d+), max +age +(?P<max_age>\\\\d+), +forward +delay +(?P<forwarding_delay>\\\\d+)$"""'], {}), "(\n '^hello +(?P<hello_time>\\\\d+), max +age +(?P<max_age>\\\\d+), +forward +delay +(?P<forwarding_delay>\\\\d+)$'\n )\n", (14299, 14416), False, 'import re\n'), ((14476, 14670), 're.compile', 're.compile', (['"""^Timers: +hello +(?P<hello_timer>\\\\d+), +topology +change +(?P<topology_change_timer>\\\\d+), +notification +(?P<notification_timer>\\\\d+)(, +aging +(?P<aging_timer>\\\\d+))?$"""'], {}), "(\n '^Timers: +hello +(?P<hello_timer>\\\\d+), +topology +change +(?P<topology_change_timer>\\\\d+), +notification +(?P<notification_timer>\\\\d+)(, +aging +(?P<aging_timer>\\\\d+))?$'\n )\n", (14486, 14670), False, 'import re\n'), ((14760, 14883), 're.compile', 're.compile', (['"""^Port +(?P<port_num>\\\\d+) *\\\\((?P<name>[\\\\w\\\\/\\\\-\\\\.]+)\\\\) +of +(?P<inst>\\\\w+) +is +(?P<status>.*)$"""'], {}), "(\n '^Port +(?P<port_num>\\\\d+) *\\\\((?P<name>[\\\\w\\\\/\\\\-\\\\.]+)\\\\) +of +(?P<inst>\\\\w+) +is +(?P<status>.*)$'\n )\n", (14770, 14883), False, 'import re\n'), ((14911, 15062), 're.compile', 're.compile', (['"""^Port +path +cost +(?P<cost>\\\\d+), +Port +priority +(?P<port_priority>\\\\d+), +Port +Identifier +(?P<port_identifier>[\\\\w\\\\.]+)$"""'], {}), "(\n '^Port +path +cost +(?P<cost>\\\\d+), +Port +priority +(?P<port_priority>\\\\d+), +Port +Identifier +(?P<port_identifier>[\\\\w\\\\.]+)$'\n )\n", (14921, 15062), False, 'import re\n'), ((15121, 15264), 're.compile', 're.compile', (['"""^Designated +root +has +priority +(?P<designated_root_priority>\\\\d+), +address +(?P<designated_root_address>[\\\\w\\\\.]+)$"""'], {}), "(\n '^Designated +root +has +priority +(?P<designated_root_priority>\\\\d+), +address +(?P<designated_root_address>[\\\\w\\\\.]+)$'\n )\n", (15131, 15264), False, 'import re\n'), ((15297, 15446), 're.compile', 're.compile', (['"""^Designated +bridge +has +priority +(?P<designated_bridge_priority>\\\\d+), +address +(?P<designated_bridge_address>[\\\\w\\\\.]+)$"""'], {}), "(\n '^Designated +bridge +has +priority +(?P<designated_bridge_priority>\\\\d+), +address +(?P<designated_bridge_address>[\\\\w\\\\.]+)$'\n )\n", (15307, 15446), False, 'import re\n'), ((15479, 15638), 're.compile', 're.compile', (['"""^Designated +port +id +is +(?P<designated_port_id>[\\\\w\\\\.]+), +designated +path +cost +(?P<designated_path_cost>\\\\d+)( +[\\\\w\\\\s\\\\,]+)?$"""'], {}), "(\n '^Designated +port +id +is +(?P<designated_port_id>[\\\\w\\\\.]+), +designated +path +cost +(?P<designated_path_cost>\\\\d+)( +[\\\\w\\\\s\\\\,]+)?$'\n )\n", (15489, 15638), False, 'import re\n'), ((15697, 15832), 're.compile', 're.compile', (['"""^Timers: +message +age +(?P<message_age>\\\\d+), +forward +delay +(?P<forward_delay>\\\\d+), +hold +(?P<hold>\\\\d+)$"""'], {}), "(\n '^Timers: +message +age +(?P<message_age>\\\\d+), +forward +delay +(?P<forward_delay>\\\\d+), +hold +(?P<hold>\\\\d+)$'\n )\n", (15707, 15832), False, 'import re\n'), ((15865, 15979), 're.compile', 're.compile', (['"""^Number +of +transitions +to +forwarding +state: +(?P<number_of_forward_transitions>\\\\d+)$"""'], {}), "(\n '^Number +of +transitions +to +forwarding +state: +(?P<number_of_forward_transitions>\\\\d+)$'\n )\n", (15875, 15979), False, 'import re\n'), ((16014, 16154), 're.compile', 're.compile', (['"""^Link +type +is +(?P<link_type>[\\\\w\\\\-]+) +by +default(, *(Boundary +(?P<boundary>\\\\w+)|Peer +is +(?P<peer>\\\\w+)))?$"""'], {}), "(\n '^Link +type +is +(?P<link_type>[\\\\w\\\\-]+) +by +default(, *(Boundary +(?P<boundary>\\\\w+)|Peer +is +(?P<peer>\\\\w+)))?$'\n )\n", (16024, 16154), False, 'import re\n'), ((16186, 16272), 're.compile', 're.compile', (['"""^Loop +guard +is +(?P<loop_guard>\\\\w+) +by +default +on +the +port$"""'], {}), "(\n '^Loop +guard +is +(?P<loop_guard>\\\\w+) +by +default +on +the +port$')\n", (16196, 16272), False, 'import re\n'), ((16283, 16372), 're.compile', 're.compile', (['"""^BPDU: +sent +(?P<bpdu_sent>\\\\d+), +received +(?P<bpdu_received>\\\\d+)$"""'], {}), "(\n '^BPDU: +sent +(?P<bpdu_sent>\\\\d+), +received +(?P<bpdu_received>\\\\d+)$')\n", (16293, 16372), False, 'import re\n'), ((25737, 25824), 're.compile', 're.compile', (['"""^\\\\#+ +MST(?P<inst>\\\\d+) +vlans +mapped: +(?P<vlan>[\\\\d\\\\-\\\\,\\\\s]+)$"""'], {}), "(\n '^\\\\#+ +MST(?P<inst>\\\\d+) +vlans +mapped: +(?P<vlan>[\\\\d\\\\-\\\\,\\\\s]+)$')\n", (25747, 25824), False, 'import re\n'), ((25857, 26004), 're.compile', 're.compile', (['"""^Bridge +address +(?P<bridge_address>[\\\\w\\\\.]+) +priority +(?P<bridge_priority>\\\\d+) +\\\\((\\\\d+) +sysid +(?P<sysid>\\\\d+)\\\\)$"""'], {}), "(\n '^Bridge +address +(?P<bridge_address>[\\\\w\\\\.]+) +priority +(?P<bridge_priority>\\\\d+) +\\\\((\\\\d+) +sysid +(?P<sysid>\\\\d+)\\\\)$'\n )\n", (25867, 26004), False, 'import re\n'), ((26067, 26138), 're.compile', 're.compile', (['"""^Root +this +switch +for +(the +)?(?P<root>[\\\\w\\\\.\\\\s]+)$"""'], {}), "('^Root +this +switch +for +(the +)?(?P<root>[\\\\w\\\\.\\\\s]+)$')\n", (26077, 26138), False, 'import re\n'), ((26236, 26377), 're.compile', 're.compile', (['"""^Root +address +(?P<root_address>[\\\\w\\\\.]+) +priority +(?P<root_priority>\\\\d+) +\\\\((\\\\d+) +sysid +(?P<sysid>\\\\d+)\\\\)$"""'], {}), "(\n '^Root +address +(?P<root_address>[\\\\w\\\\.]+) +priority +(?P<root_priority>\\\\d+) +\\\\((\\\\d+) +sysid +(?P<sysid>\\\\d+)\\\\)$'\n )\n", (26246, 26377), False, 'import re\n'), ((26444, 26627), 're.compile', 're.compile', (['"""^Operational +hello +time +(?P<hello_time>\\\\d+), +forward +delay +(?P<forward_delay>\\\\d+), +max +age +(?P<max_age>\\\\d+), +txholdcount +(?P<tx_hold_count>\\\\d+)$"""'], {}), "(\n '^Operational +hello +time +(?P<hello_time>\\\\d+), +forward +delay +(?P<forward_delay>\\\\d+), +max +age +(?P<max_age>\\\\d+), +txholdcount +(?P<tx_hold_count>\\\\d+)$'\n )\n", (26454, 26627), False, 'import re\n'), ((26721, 26896), 're.compile', 're.compile', (['"""^Configured +hello +time +(?P<hello_time>\\\\d+), +forward +delay +(?P<forward_delay>\\\\d+), +max +age +(?P<max_age>\\\\d+), +max +hops +(?P<max_hops>\\\\d+)$"""'], {}), "(\n '^Configured +hello +time +(?P<hello_time>\\\\d+), +forward +delay +(?P<forward_delay>\\\\d+), +max +age +(?P<max_age>\\\\d+), +max +hops +(?P<max_hops>\\\\d+)$'\n )\n", (26731, 26896), False, 'import re\n'), ((26990, 27115), 're.compile', 're.compile', (['"""^(?P<name>[\\\\w\\\\-\\\\.\\\\/]+) +of +MST(\\\\d+) +is +(?P<status>[\\\\w\\\\s]+)( +\\\\((?P<broken_reason>.*)\\\\))?$"""'], {}), "(\n '^(?P<name>[\\\\w\\\\-\\\\.\\\\/]+) +of +MST(\\\\d+) +is +(?P<status>[\\\\w\\\\s]+)( +\\\\((?P<broken_reason>.*)\\\\))?$'\n )\n", (27000, 27115), False, 'import re\n'), ((27176, 27303), 're.compile', 're.compile', (['"""^Port +info +port +id +(?P<port_id>[\\\\d\\\\.]+) +priority +(?P<port_priority>\\\\d+) +cost +(?P<cost>\\\\d+)$"""'], {}), "(\n '^Port +info +port +id +(?P<port_id>[\\\\d\\\\.]+) +priority +(?P<port_priority>\\\\d+) +cost +(?P<cost>\\\\d+)$'\n )\n", (27186, 27303), False, 'import re\n'), ((27397, 27572), 're.compile', 're.compile', (['"""^Designated +root +address +(?P<designated_root_address>[\\\\w\\\\.]+) +priority +(?P<designated_root_priority>\\\\d+) +cost +(?P<designated_root_cost>\\\\d+)$"""'], {}), "(\n '^Designated +root +address +(?P<designated_root_address>[\\\\w\\\\.]+) +priority +(?P<designated_root_priority>\\\\d+) +cost +(?P<designated_root_cost>\\\\d+)$'\n )\n", (27407, 27572), False, 'import re\n'), ((27666, 27877), 're.compile', 're.compile', (['"""^Design\\\\. +regional +root +address +(?P<designated_regional_root_address>[\\\\w\\\\.]+) +priority +(?P<designated_regional_root_priority>\\\\d+) +cost +(?P<designated_regional_root_cost>\\\\d+)$"""'], {}), "(\n '^Design\\\\. +regional +root +address +(?P<designated_regional_root_address>[\\\\w\\\\.]+) +priority +(?P<designated_regional_root_priority>\\\\d+) +cost +(?P<designated_regional_root_cost>\\\\d+)$'\n )\n", (27676, 27877), False, 'import re\n'), ((27971, 28166), 're.compile', 're.compile', (['"""^Designated +bridge +address +(?P<designated_bridge_address>[\\\\w\\\\.]+) +priority +(?P<designated_bridge_priority>\\\\d+) +port +id +(?P<designated_bridge_port_id>[\\\\d\\\\.]+)$"""'], {}), "(\n '^Designated +bridge +address +(?P<designated_bridge_address>[\\\\w\\\\.]+) +priority +(?P<designated_bridge_priority>\\\\d+) +port +id +(?P<designated_bridge_port_id>[\\\\d\\\\.]+)$'\n )\n", (27981, 28166), False, 'import re\n'), ((28263, 28445), 're.compile', 're.compile', (['"""^Timers: +message +expires +in +(?P<message_expires>\\\\d+) +sec, +forward +delay +(?P<forward_delay>\\\\d+), forward +transitions +(?P<forward_transitions>\\\\d+)$"""'], {}), "(\n '^Timers: +message +expires +in +(?P<message_expires>\\\\d+) +sec, +forward +delay +(?P<forward_delay>\\\\d+), forward +transitions +(?P<forward_transitions>\\\\d+)$'\n )\n", (28273, 28445), False, 'import re\n'), ((28515, 28623), 're.compile', 're.compile', (['"""^Bpdus +(\\\\(\\\\w+\\\\) *)?sent +(?P<bpdu_sent>\\\\d+), +received +(?P<bpdu_received>\\\\d+)"""'], {}), "(\n '^Bpdus +(\\\\(\\\\w+\\\\) *)?sent +(?P<bpdu_sent>\\\\d+), +received +(?P<bpdu_received>\\\\d+)'\n )\n", (28525, 28623), False, 'import re\n'), ((33451, 33473), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""interfaces"""'], {}), "('interfaces')\n", (33459, 33473), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((34091, 34152), 're.compile', 're.compile', (['"""^Timer +interval: +(?P<interval>\\\\d+) +seconds$"""'], {}), "('^Timer +interval: +(?P<interval>\\\\d+) +seconds$')\n", (34101, 34152), False, 'import re\n'), ((34166, 34255), 're.compile', 're.compile', (['"""^(?P<name>[\\\\w\\\\-\\\\s\\\\(\\\\)\\\\"\\\\:"]+) +(?P<status>(Disabled|Enabled)+)$"""'], {}), '(\n \'^(?P<name>[\\\\w\\\\-\\\\s\\\\(\\\\)\\\\"\\\\:"]+) +(?P<status>(Disabled|Enabled)+)$\')\n', (34176, 34255), False, 'import re\n'), ((34286, 34392), 're.compile', 're.compile', (['"""^(?P<interface>[\\\\w\\\\-\\\\/\\\\.]+) +(?P<errdisable_reason>\\\\w+) +(?P<time_left>\\\\d+)$"""'], {}), "(\n '^(?P<interface>[\\\\w\\\\-\\\\/\\\\.]+) +(?P<errdisable_reason>\\\\w+) +(?P<time_left>\\\\d+)$'\n )\n", (34296, 34392), False, 'import re\n'), ((35727, 35732), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (35730, 35732), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((38408, 38448), 're.compile', 're.compile', (['"""^(MST|VLAN)(?P<inst>\\\\d+)$"""'], {}), "('^(MST|VLAN)(?P<inst>\\\\d+)$')\n", (38418, 38448), False, 'import re\n'), ((38462, 38527), 're.compile', 're.compile', (['"""^Spanning +tree +enabled p+rotocol +(?P<mode>\\\\w+)$"""'], {}), "('^Spanning +tree +enabled p+rotocol +(?P<mode>\\\\w+)$')\n", (38472, 38527), False, 'import re\n'), ((38541, 38595), 're.compile', 're.compile', (['"""^Root +ID +Priority +(?P<priority>\\\\d+)$"""'], {}), "('^Root +ID +Priority +(?P<priority>\\\\d+)$')\n", (38551, 38595), False, 'import re\n'), ((38609, 38770), 're.compile', 're.compile', (['"""^Bridge +ID +Priority +(?P<priority>\\\\d+)( *\\\\(priority +(?P<configured_bridge_priority>\\\\d+) +sys\\\\-id\\\\-ext +(?P<sys_id_ext>\\\\d+)\\\\))?$"""'], {}), "(\n '^Bridge +ID +Priority +(?P<priority>\\\\d+)( *\\\\(priority +(?P<configured_bridge_priority>\\\\d+) +sys\\\\-id\\\\-ext +(?P<sys_id_ext>\\\\d+)\\\\))?$'\n )\n", (38619, 38770), False, 'import re\n'), ((38824, 38871), 're.compile', 're.compile', (['"""^Address +(?P<address>[\\\\w\\\\.]+)$"""'], {}), "('^Address +(?P<address>[\\\\w\\\\.]+)$')\n", (38834, 38871), False, 'import re\n'), ((38884, 38920), 're.compile', 're.compile', (['"""^Cost +(?P<cost>\\\\d+)$"""'], {}), "('^Cost +(?P<cost>\\\\d+)$')\n", (38894, 38920), False, 'import re\n'), ((38934, 39008), 're.compile', 're.compile', (['"""^Port +(?P<port>\\\\d+) +\\\\((?P<interface>[\\\\w\\\\-\\\\/\\\\.]+)\\\\)$"""'], {}), "('^Port +(?P<port>\\\\d+) +\\\\((?P<interface>[\\\\w\\\\-\\\\/\\\\.]+)\\\\)$')\n", (38944, 39008), False, 'import re\n'), ((39016, 39159), 're.compile', 're.compile', (['"""Hello +Time +(?P<hello_time>\\\\d+) +sec +Max +Age +(?P<max_age>\\\\d+) +sec +Forward +Delay +(?P<forward_delay>\\\\d+) +sec$"""'], {}), "(\n 'Hello +Time +(?P<hello_time>\\\\d+) +sec +Max +Age +(?P<max_age>\\\\d+) +sec +Forward +Delay +(?P<forward_delay>\\\\d+) +sec$'\n )\n", (39026, 39159), False, 'import re\n'), ((39217, 39271), 're.compile', 're.compile', (['"""^Aging +Time +(?P<aging_time>\\\\d+) +sec$"""'], {}), "('^Aging +Time +(?P<aging_time>\\\\d+) +sec$')\n", (39227, 39271), False, 'import re\n'), ((39286, 39538), 're.compile', 're.compile', (['"""^(?P<interface>[\\\\w\\\\-\\\\/\\\\.]+) +(?P<role>[\\\\w\\\\*]+) +(?P<port_state>[A-Z\\\\*]+) *(?P<cost>\\\\d+) +(?P<port_priority>\\\\d+)\\\\.(?P<port_num>\\\\d+) +(?P<type>\\\\w+)( +(Bound\\\\((?P<bound>\\\\w+)\\\\)|Peer\\\\((?P<peer>\\\\w+)\\\\)))?( +\\\\*\\\\S+)?$"""'], {}), "(\n '^(?P<interface>[\\\\w\\\\-\\\\/\\\\.]+) +(?P<role>[\\\\w\\\\*]+) +(?P<port_state>[A-Z\\\\*]+) *(?P<cost>\\\\d+) +(?P<port_priority>\\\\d+)\\\\.(?P<port_num>\\\\d+) +(?P<type>\\\\w+)( +(Bound\\\\((?P<bound>\\\\w+)\\\\)|Peer\\\\((?P<peer>\\\\w+)\\\\)))?( +\\\\*\\\\S+)?$'\n )\n", (39296, 39538), False, 'import re\n'), ((43671, 43711), 're.compile', 're.compile', (['"""^Name +\\\\[(?P<name>.*)\\\\]$"""'], {}), "('^Name +\\\\[(?P<name>.*)\\\\]$')\n", (43681, 43711), False, 'import re\n'), ((43724, 43833), 're.compile', 're.compile', (['"""^Revision +(?P<revision>\\\\d+) +Instances +configured +(?P<instances_configured>\\\\d+)$"""'], {}), "(\n '^Revision +(?P<revision>\\\\d+) +Instances +configured +(?P<instances_configured>\\\\d+)$'\n )\n", (43734, 43833), False, 'import re\n'), ((43864, 43928), 're.compile', 're.compile', (['"""^(?P<inst>\\\\d+) +(?P<vlan_mapped>[\\\\d\\\\,\\\\s\\\\-]+)$"""'], {}), "('^(?P<inst>\\\\d+) +(?P<vlan_mapped>[\\\\d\\\\,\\\\s\\\\-]+)$')\n", (43874, 43928), False, 'import re\n'), ((1553, 1582), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""operational_value"""'], {}), "('operational_value')\n", (1561, 1582), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((1640, 1645), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (1643, 1645), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((2158, 2181), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""num_of_msts"""'], {}), "('num_of_msts')\n", (2166, 2181), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((2200, 2224), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""num_of_vlans"""'], {}), "('num_of_vlans')\n", (2208, 2224), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9081, 9099), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""domain"""'], {}), "('domain')\n", (9089, 9099), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9118, 9137), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""pvst_id"""'], {}), "('pvst_id')\n", (9126, 9137), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9156, 9172), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""name"""'], {}), "('name')\n", (9164, 9172), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9191, 9211), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""revision"""'], {}), "('revision')\n", (9199, 9211), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9230, 9249), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""max_hop"""'], {}), "('max_hop')\n", (9238, 9249), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9364, 9386), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""hold_count"""'], {}), "('hold_count')\n", (9372, 9386), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9405, 9410), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (9408, 9410), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((23256, 23261), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (23259, 23261), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((33376, 33381), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (33379, 33381), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((33489, 33494), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (33492, 33494), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((35777, 35782), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (35780, 35782), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((1691, 1696), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (1694, 1696), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9455, 9460), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (9458, 9460), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((23312, 23328), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""vlan"""'], {}), "('vlan')\n", (23320, 23328), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((23460, 23476), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""root"""'], {}), "('root')\n", (23468, 23476), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((23499, 23523), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""root_address"""'], {}), "('root_address')\n", (23507, 23523), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((23546, 23571), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""root_priority"""'], {}), "('root_priority')\n", (23554, 23571), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((23594, 23617), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""operational"""'], {}), "('operational')\n", (23602, 23617), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((23814, 23836), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""configured"""'], {}), "('configured')\n", (23822, 23836), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((35827, 35832), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (35830, 35832), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((43117, 43122), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (43120, 43122), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9484, 9502), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""mst_id"""'], {}), "('mst_id')\n", (9492, 9502), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9529, 9545), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""vlan"""'], {}), "('vlan')\n", (9537, 9545), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9572, 9591), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""vlan_id"""'], {}), "('vlan_id')\n", (9580, 9591), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9618, 9640), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""hello_time"""'], {}), "('hello_time')\n", (9626, 9640), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9667, 9686), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""max_age"""'], {}), "('max_age')\n", (9675, 9686), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9713, 9741), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""forwarding_delay"""'], {}), "('forwarding_delay')\n", (9721, 9741), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9768, 9790), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""hold_count"""'], {}), "('hold_count')\n", (9776, 9790), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((9945, 9978), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""root_of_spanning_tree"""'], {}), "('root_of_spanning_tree')\n", (9953, 9978), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((10246, 10276), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""topology_from_port"""'], {}), "('topology_from_port')\n", (10254, 10276), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((10695, 10718), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""aging_timer"""'], {}), "('aging_timer')\n", (10703, 10718), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((24064, 24069), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (24067, 24069), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((35856, 35861), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (35859, 35861), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((10785, 10790), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (10788, 10790), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((24136, 24161), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""broken_reason"""'], {}), "('broken_reason')\n", (24144, 24161), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((24518, 24559), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""designated_regional_root_cost"""'], {}), "('designated_regional_root_cost')\n", (24526, 24559), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((24590, 24635), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""designated_regional_root_priority"""'], {}), "('designated_regional_root_priority')\n", (24598, 24635), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((24666, 24710), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""designated_regional_root_address"""'], {}), "('designated_regional_root_address')\n", (24674, 24710), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((35986, 36002), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""cost"""'], {}), "('cost')\n", (35994, 36002), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((36033, 36049), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""port"""'], {}), "('port')\n", (36041, 36049), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((36080, 36101), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""interface"""'], {}), "('interface')\n", (36088, 36101), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((36132, 36170), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""configured_bridge_priority"""'], {}), "('configured_bridge_priority')\n", (36140, 36170), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((36201, 36223), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""sys_id_ext"""'], {}), "('sys_id_ext')\n", (36209, 36223), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((36383, 36405), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""aging_time"""'], {}), "('aging_time')\n", (36391, 36405), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((36496, 36501), 'genie.metaparser.util.schemaengine.Any', 'Any', ([], {}), '()\n', (36499, 36501), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((11703, 11723), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""boundary"""'], {}), "('boundary')\n", (11711, 11723), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((11758, 11774), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""peer"""'], {}), "('peer')\n", (11766, 11774), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((11809, 11831), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""loop_guard"""'], {}), "('loop_guard')\n", (11817, 11831), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((36798, 36814), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""peer"""'], {}), "('peer')\n", (36806, 36814), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n'), ((36849, 36866), 'genie.metaparser.util.schemaengine.Optional', 'Optional', (['"""bound"""'], {}), "('bound')\n", (36857, 36866), False, 'from genie.metaparser.util.schemaengine import Schema, Any, Optional, Or, And, Default, Use\n')] |
# Copyright 2021 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List, Dict, Any, Sequence, Tuple, Optional
import cirq
class EmptyActOnArgs(cirq.ActOnArgs):
def __init__(self, qubits, logs):
super().__init__(
qubits=qubits,
log_of_measurement_results=logs,
)
def _perform_measurement(self, qubits: Sequence[cirq.Qid]) -> List[int]:
return []
def copy(self) -> 'EmptyActOnArgs':
return EmptyActOnArgs(
qubits=self.qubits,
logs=self.log_of_measurement_results.copy(),
)
def _act_on_fallback_(self, action: Any, qubits: Sequence[cirq.Qid], allow_decompose: bool):
return True
def kronecker_product(self, other: 'EmptyActOnArgs') -> 'EmptyActOnArgs':
return EmptyActOnArgs(
qubits=self.qubits + other.qubits,
logs=self.log_of_measurement_results,
)
def factor(
self,
qubits: Sequence['cirq.Qid'],
*,
validate=True,
atol=1e-07,
) -> Tuple['EmptyActOnArgs', 'EmptyActOnArgs']:
extracted_args = EmptyActOnArgs(
qubits=qubits,
logs=self.log_of_measurement_results,
)
remainder_args = EmptyActOnArgs(
qubits=tuple(q for q in self.qubits if q not in qubits),
logs=self.log_of_measurement_results,
)
return extracted_args, remainder_args
def transpose_to_qubit_order(self, qubits: Sequence['cirq.Qid']) -> 'EmptyActOnArgs':
return EmptyActOnArgs(
qubits=qubits,
logs=self.log_of_measurement_results,
)
def sample(self, qubits, repetitions=1, seed=None):
pass
q0, q1 = qs2 = cirq.LineQubit.range(2)
def create_container(
qubits: Sequence['cirq.Qid'],
split_untangled_states=True,
) -> cirq.ActOnArgsContainer[EmptyActOnArgs]:
args_map: Dict[Optional['cirq.Qid'], EmptyActOnArgs] = {}
log: Dict[str, Any] = {}
if split_untangled_states:
for q in reversed(qubits):
args_map[q] = EmptyActOnArgs([q], log)
args_map[None] = EmptyActOnArgs((), log)
else:
args = EmptyActOnArgs(qubits, log)
for q in qubits:
args_map[q] = args
args_map[None] = args if not split_untangled_states else EmptyActOnArgs((), log)
return cirq.ActOnArgsContainer(args_map, qubits, split_untangled_states, log)
def test_entanglement_causes_join():
args = create_container(qs2)
assert len(set(args.values())) == 3
args.apply_operation(cirq.CNOT(q0, q1))
assert len(set(args.values())) == 2
assert args[q0] is args[q1]
assert args[None] is not args[q0]
def test_identity_does_not_join():
args = create_container(qs2)
assert len(set(args.values())) == 3
args.apply_operation(cirq.IdentityGate(2)(q0, q1))
assert len(set(args.values())) == 3
assert args[q0] is not args[q1]
assert args[q0] is not args[None]
def test_measurement_causes_split():
args = create_container(qs2)
args.apply_operation(cirq.CNOT(q0, q1))
assert len(set(args.values())) == 2
args.apply_operation(cirq.measure(q0))
assert len(set(args.values())) == 3
assert args[q0] is not args[q1]
assert args[q0] is not args[None]
def test_reset_causes_split():
args = create_container(qs2)
args.apply_operation(cirq.CNOT(q0, q1))
assert len(set(args.values())) == 2
args.apply_operation(cirq.reset(q0))
assert len(set(args.values())) == 3
assert args[q0] is not args[q1]
assert args[q0] is not args[None]
def test_measurement_does_not_split_if_disabled():
args = create_container(qs2, False)
args.apply_operation(cirq.CNOT(q0, q1))
assert len(set(args.values())) == 1
args.apply_operation(cirq.measure(q0))
assert len(set(args.values())) == 1
assert args[q1] is args[q0]
assert args[None] is args[q0]
def test_reset_does_not_split_if_disabled():
args = create_container(qs2, False)
args.apply_operation(cirq.CNOT(q0, q1))
assert len(set(args.values())) == 1
args.apply_operation(cirq.reset(q0))
assert len(set(args.values())) == 1
assert args[q1] is args[q0]
assert args[None] is args[q0]
def test_measurement_of_all_qubits_causes_split():
args = create_container(qs2)
args.apply_operation(cirq.CNOT(q0, q1))
assert len(set(args.values())) == 2
args.apply_operation(cirq.measure(q0, q1))
assert len(set(args.values())) == 3
assert args[q0] is not args[q1]
assert args[q0] is not args[None]
def test_measurement_in_single_qubit_circuit_passes():
args = create_container([q0])
assert len(set(args.values())) == 2
args.apply_operation(cirq.measure(q0))
assert len(set(args.values())) == 2
assert args[q0] is not args[None]
def test_reorder_succeeds():
args = create_container(qs2, False)
reordered = args[q0].transpose_to_qubit_order([q1, q0])
assert reordered.qubits == (q1, q0)
def test_copy_succeeds():
args = create_container(qs2, False)
copied = args[q0].copy()
assert copied.qubits == (q0, q1)
def test_merge_succeeds():
args = create_container(qs2, False)
merged = args.create_merged_state()
assert merged.qubits == (q0, q1)
def test_swap_does_not_merge():
args = create_container(qs2)
old_q0 = args[q0]
old_q1 = args[q1]
args.apply_operation(cirq.SWAP(q0, q1))
assert len(set(args.values())) == 3
assert args[q0] is not old_q0
assert args[q1] is old_q0
assert args[q1] is not old_q1
assert args[q0] is old_q1
assert args[q0].qubits == (q0,)
assert args[q1].qubits == (q1,)
def test_half_swap_does_merge():
args = create_container(qs2)
args.apply_operation(cirq.SWAP(q0, q1) ** 0.5)
assert len(set(args.values())) == 2
assert args[q0] is args[q1]
def test_swap_after_entangle_reorders():
args = create_container(qs2)
args.apply_operation(cirq.CX(q0, q1))
assert len(set(args.values())) == 2
assert args[q0].qubits == (q0, q1)
args.apply_operation(cirq.SWAP(q0, q1))
assert len(set(args.values())) == 2
assert args[q0] is args[q1]
assert args[q0].qubits == (q1, q0)
| [
"cirq.SWAP",
"cirq.CNOT",
"cirq.IdentityGate",
"cirq.LineQubit.range",
"cirq.reset",
"cirq.measure",
"cirq.ActOnArgsContainer",
"cirq.CX"
] | [((2262, 2285), 'cirq.LineQubit.range', 'cirq.LineQubit.range', (['(2)'], {}), '(2)\n', (2282, 2285), False, 'import cirq\n'), ((2889, 2959), 'cirq.ActOnArgsContainer', 'cirq.ActOnArgsContainer', (['args_map', 'qubits', 'split_untangled_states', 'log'], {}), '(args_map, qubits, split_untangled_states, log)\n', (2912, 2959), False, 'import cirq\n'), ((3097, 3114), 'cirq.CNOT', 'cirq.CNOT', (['q0', 'q1'], {}), '(q0, q1)\n', (3106, 3114), False, 'import cirq\n'), ((3602, 3619), 'cirq.CNOT', 'cirq.CNOT', (['q0', 'q1'], {}), '(q0, q1)\n', (3611, 3619), False, 'import cirq\n'), ((3686, 3702), 'cirq.measure', 'cirq.measure', (['q0'], {}), '(q0)\n', (3698, 3702), False, 'import cirq\n'), ((3909, 3926), 'cirq.CNOT', 'cirq.CNOT', (['q0', 'q1'], {}), '(q0, q1)\n', (3918, 3926), False, 'import cirq\n'), ((3993, 4007), 'cirq.reset', 'cirq.reset', (['q0'], {}), '(q0)\n', (4003, 4007), False, 'import cirq\n'), ((4241, 4258), 'cirq.CNOT', 'cirq.CNOT', (['q0', 'q1'], {}), '(q0, q1)\n', (4250, 4258), False, 'import cirq\n'), ((4325, 4341), 'cirq.measure', 'cirq.measure', (['q0'], {}), '(q0)\n', (4337, 4341), False, 'import cirq\n'), ((4561, 4578), 'cirq.CNOT', 'cirq.CNOT', (['q0', 'q1'], {}), '(q0, q1)\n', (4570, 4578), False, 'import cirq\n'), ((4645, 4659), 'cirq.reset', 'cirq.reset', (['q0'], {}), '(q0)\n', (4655, 4659), False, 'import cirq\n'), ((4878, 4895), 'cirq.CNOT', 'cirq.CNOT', (['q0', 'q1'], {}), '(q0, q1)\n', (4887, 4895), False, 'import cirq\n'), ((4962, 4982), 'cirq.measure', 'cirq.measure', (['q0', 'q1'], {}), '(q0, q1)\n', (4974, 4982), False, 'import cirq\n'), ((5254, 5270), 'cirq.measure', 'cirq.measure', (['q0'], {}), '(q0)\n', (5266, 5270), False, 'import cirq\n'), ((5937, 5954), 'cirq.SWAP', 'cirq.SWAP', (['q0', 'q1'], {}), '(q0, q1)\n', (5946, 5954), False, 'import cirq\n'), ((6488, 6503), 'cirq.CX', 'cirq.CX', (['q0', 'q1'], {}), '(q0, q1)\n', (6495, 6503), False, 'import cirq\n'), ((6609, 6626), 'cirq.SWAP', 'cirq.SWAP', (['q0', 'q1'], {}), '(q0, q1)\n', (6618, 6626), False, 'import cirq\n'), ((3361, 3381), 'cirq.IdentityGate', 'cirq.IdentityGate', (['(2)'], {}), '(2)\n', (3378, 3381), False, 'import cirq\n'), ((6289, 6306), 'cirq.SWAP', 'cirq.SWAP', (['q0', 'q1'], {}), '(q0, q1)\n', (6298, 6306), False, 'import cirq\n')] |
# PRIVATE/ADMIN file!!!
# DO NOT share to participants
import base64, string, random
from typing import Dict
FLAG = 'sdctf{OBscUr1ty_a1nt_s3CURITy}'
SEED = 0x1337face
B64_ALPHABET = string.ascii_uppercase + string.ascii_lowercase + string.digits + '+/'
# print(B64_ALPHABET)
assert len(B64_ALPHABET) == 64
PAD_CHAR = '='
letter2index: Dict[str, int] = dict()
for i, letter in enumerate(B64_ALPHABET):
letter2index[letter] = i
random.seed(SEED)
shift = random.randint(1, 63)
print('Shift: {}'.format(shift))
s_b64 = base64.b64encode(FLAG.encode()).decode()
# print(s_b64)
def char_shift(c: str):
if c == PAD_CHAR:
return PAD_CHAR
i = letter2index[c]
return B64_ALPHABET[(i+shift) % 64]
print(''.join(map(char_shift, s_b64)))
| [
"random.randint",
"random.seed"
] | [((436, 453), 'random.seed', 'random.seed', (['SEED'], {}), '(SEED)\n', (447, 453), False, 'import base64, string, random\n'), ((463, 484), 'random.randint', 'random.randint', (['(1)', '(63)'], {}), '(1, 63)\n', (477, 484), False, 'import base64, string, random\n')] |
# -*- coding: utf-8 -*-
"""
:math:`IC_TC_P` Colour Encoding
===============================
Defines the :math:`IC_TC_P` colour encoding related transformations:
- :func:`colour.RGB_to_ICtCp`
- :func:`colour.ICtCp_to_RGB`
- :func:`colour.XYZ_to_ICtCp`
- :func:`colour.ICtCp_to_XYZ`
References
----------
- :cite:`Dolby2016a` : Dolby. (2016). WHAT IS ICtCp? - INTRODUCTION.
https://www.dolby.com/us/en/technologies/dolby-vision/ICtCp-white-paper.pdf
- :cite:`InternationalTelecommunicationUnion2018` : International
Telecommunication Union. (2018). Recommendation ITU-R BT.2100-2 - Image
parameter values for high dynamic range television for use in production
and international programme exchange.
https://www.itu.int/dms_pubrec/itu-r/rec/bt/\
R-REC-BT.2100-2-201807-I!!PDF-E.pdf
- :cite:`Lu2016c` : <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., Pytlarz,
J., <NAME>., <NAME>., & <NAME>. (2016). ITP Colour Space and Its
Compression Performance for High Dynamic Range and Wide Colour Gamut Video
Distribution. ZTE Communications, 14(1), 32-38.
"""
import numpy as np
from colour.algebra import vector_dot
from colour.colorimetry import CCS_ILLUMINANTS
from colour.models.rgb import RGB_COLOURSPACES, RGB_to_XYZ, XYZ_to_RGB
from colour.models.rgb.transfer_functions import (
eotf_ST2084, eotf_inverse_ST2084, oetf_HLG_BT2100, oetf_inverse_HLG_BT2100)
from colour.utilities import (domain_range_scale, from_range_1, to_domain_1,
validate_method)
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2021 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = '<EMAIL>'
__status__ = 'Production'
__all__ = [
'MATRIX_ICTCP_RGB_TO_LMS', 'MATRIX_ICTCP_LMS_TO_RGB',
'MATRIX_ICTCP_LMS_P_TO_ICTCP', 'MATRIX_ICTCP_ICTCP_TO_LMS_P',
'MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2',
'MATRIX_ICTCP_ICTCP_TO_LMS_P_HLG_BT2100_2', 'RGB_to_ICtCp', 'ICtCp_to_RGB',
'XYZ_to_ICtCp', 'ICtCp_to_XYZ'
]
MATRIX_ICTCP_RGB_TO_LMS = np.array([
[1688, 2146, 262],
[683, 2951, 462],
[99, 309, 3688],
]) / 4096
"""
*ITU-R BT.2020* colourspace to normalised cone responses matrix.
MATRIX_ICTCP_RGB_TO_LMS : array_like, (3, 3)
"""
MATRIX_ICTCP_LMS_TO_RGB = np.linalg.inv(MATRIX_ICTCP_RGB_TO_LMS)
"""
:math:`IC_TC_P` colourspace normalised cone responses to *ITU-R BT.2020*
colourspace matrix.
MATRIX_ICTCP_LMS_TO_RGB : array_like, (3, 3)
"""
MATRIX_ICTCP_LMS_P_TO_ICTCP = np.array([
[2048, 2048, 0],
[6610, -13613, 7003],
[17933, -17390, -543],
]) / 4096
"""
:math:`LMS_p` *SMPTE ST 2084:2014* encoded normalised cone responses to
:math:`IC_TC_P` colour encoding matrix.
MATRIX_ICTCP_LMS_P_TO_ICTCP : array_like, (3, 3)
"""
MATRIX_ICTCP_ICTCP_TO_LMS_P = np.linalg.inv(MATRIX_ICTCP_LMS_P_TO_ICTCP)
"""
:math:`IC_TC_P` colour encoding to :math:`LMS_p` *SMPTE ST 2084:2014* encoded
normalised cone responses matrix.
MATRIX_ICTCP_ICTCP_TO_LMS_P : array_like, (3, 3)
"""
MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2 = np.array([
[2048, 2048, 0],
[3625, -7465, 3840],
[9500, -9212, -288],
]) / 4096
"""
:math:`LMS_p` *SMPTE ST 2084:2014* encoded normalised cone responses to
:math:`IC_TC_P` colour encoding matrix as given in *ITU-R BT.2100-2*.
MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2 : array_like, (3, 3)
"""
MATRIX_ICTCP_ICTCP_TO_LMS_P_HLG_BT2100_2 = np.linalg.inv(
MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2)
"""
:math:`IC_TC_P` colour encoding to :math:`LMS_p` *SMPTE ST 2084:2014* encoded
normalised cone responses matrix as given in *ITU-R BT.2100-2*.
MATRIX_ICTCP_ICTCP_TO_LMS_P_HLG_BT2100_2 : array_like, (3, 3)
"""
def RGB_to_ICtCp(RGB, method='Dolby 2016', L_p=10000):
"""
Converts from *ITU-R BT.2020* colourspace to :math:`IC_TC_P` colour
encoding.
Parameters
----------
RGB : array_like
*ITU-R BT.2020* colourspace array.
method : unicode, optional
**{'Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',
'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'}**,
Computation method. *Recommendation ITU-R BT.2100* defines multiple
variants of the :math:`IC_TC_P` colour encoding:
- *ITU-R BT.2100-1*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and the :math:`IC_TC_P` matrix
from :cite:`Dolby2016a`: *ITU-R BT.2100-1 HLG* method.
- *ITU-R BT.2100-2*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and a custom :math:`IC_TC_P`
matrix from :cite:`InternationalTelecommunicationUnion2018`:
*ITU-R BT.2100-2 HLG* method.
L_p : numeric, optional
Display peak luminance :math:`cd/m^2` for *SMPTE ST 2084:2014*
non-linear encoding. This parameter should stay at its default
:math:`10000 cd/m^2` value for practical applications. It is exposed so
that the definition can be used as a fitting function.
Returns
-------
ndarray
:math:`IC_TC_P` colour encoding array.
Warnings
--------
The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function.
Notes
-----
- The *ITU-R BT.2100-1 PQ* and *ITU-R BT.2100-2 PQ* methods are aliases
for the *Dolby 2016* method.
- The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function, thus the domain and range values for the *Reference*
and *1* scales are only indicative that the data is not affected by
scale transformations. The effective domain of *SMPTE ST 2084:2014*
inverse electro-optical transfer function (EOTF / EOCF) is
[0.0001, 10000].
+------------+-----------------------+------------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``RGB`` | ``UN`` | ``UN`` |
+------------+-----------------------+------------------+
+------------+-----------------------+------------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``ICtCp`` | ``I`` : [0, 1] | ``I`` : [0, 1] |
| | | |
| | ``CT`` : [-1, 1] | ``CT`` : [-1, 1] |
| | | |
| | ``CP`` : [-1, 1] | ``CP`` : [-1, 1] |
+------------+-----------------------+------------------+
References
----------
:cite:`Dolby2016a`, :cite:`Lu2016c`
Examples
--------
>>> RGB = np.array([0.45620519, 0.03081071, 0.04091952])
>>> RGB_to_ICtCp(RGB) # doctest: +ELLIPSIS
array([ 0.0735136..., 0.0047525..., 0.0935159...])
>>> RGB_to_ICtCp(RGB, method='ITU-R BT.2100-2 HLG') # doctest: +ELLIPSIS
array([ 0.6256789..., -0.0198449..., 0.3591125...])
"""
RGB = to_domain_1(RGB)
method = validate_method(method, [
'Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',
'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'
])
is_hlg_method = 'hlg' in method
is_BT2100_2_method = '2100-2' in method
LMS = vector_dot(MATRIX_ICTCP_RGB_TO_LMS, RGB)
with domain_range_scale('ignore'):
LMS_p = (oetf_HLG_BT2100(LMS)
if is_hlg_method else eotf_inverse_ST2084(LMS, L_p))
ICtCp = (vector_dot(MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2, LMS_p)
if (is_hlg_method and is_BT2100_2_method) else vector_dot(
MATRIX_ICTCP_LMS_P_TO_ICTCP, LMS_p))
return from_range_1(ICtCp)
def ICtCp_to_RGB(ICtCp, method='Dolby 2016', L_p=10000):
"""
Converts from :math:`IC_TC_P` colour encoding to *ITU-R BT.2020*
colourspace.
Parameters
----------
ICtCp : array_like
:math:`IC_TC_P` colour encoding array.
method : unicode, optional
**{'Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',
'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'}**,
Computation method. *Recommendation ITU-R BT.2100* defines multiple
variants of the :math:`IC_TC_P` colour encoding:
- *ITU-R BT.2100-1*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and the :math:`IC_TC_P` matrix
from :cite:`Dolby2016a`: *ITU-R BT.2100-1 HLG* method.
- *ITU-R BT.2100-2*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and a custom :math:`IC_TC_P`
matrix from :cite:`InternationalTelecommunicationUnion2018`:
*ITU-R BT.2100-2 HLG* method.
L_p : numeric, optional
Display peak luminance :math:`cd/m^2` for *SMPTE ST 2084:2014*
non-linear encoding. This parameter should stay at its default
:math:`10000 cd/m^2` value for practical applications. It is exposed so
that the definition can be used as a fitting function.
Returns
-------
ndarray
*ITU-R BT.2020* colourspace array.
Warnings
--------
The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function.
Notes
-----
- The *ITU-R BT.2100-1 PQ* and *ITU-R BT.2100-2 PQ* methods are aliases
for the *Dolby 2016* method.
- The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function, thus the domain and range values for the *Reference*
and *1* scales are only indicative that the data is not affected by
scale transformations.
+------------+-----------------------+------------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``ICtCp`` | ``I`` : [0, 1] | ``I`` : [0, 1] |
| | | |
| | ``CT`` : [-1, 1] | ``CT`` : [-1, 1] |
| | | |
| | ``CP`` : [-1, 1] | ``CP`` : [-1, 1] |
+------------+-----------------------+------------------+
+------------+-----------------------+------------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``RGB`` | ``UN`` | ``UN`` |
+------------+-----------------------+------------------+
References
----------
:cite:`Dolby2016a`, :cite:`Lu2016c`
Examples
--------
>>> ICtCp = np.array([0.07351364, 0.00475253, 0.09351596])
>>> ICtCp_to_RGB(ICtCp) # doctest: +ELLIPSIS
array([ 0.4562052..., 0.0308107..., 0.0409195...])
>>> ICtCp = np.array([0.62567899, -0.01984490, 0.35911259])
>>> ICtCp_to_RGB(ICtCp, method='ITU-R BT.2100-2 HLG') # doctest: +ELLIPSIS
array([ 0.4562052..., 0.0308107..., 0.0409195...])
"""
ICtCp = to_domain_1(ICtCp)
method = validate_method(method, [
'Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',
'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'
])
is_hlg_method = 'hlg' in method
is_BT2100_2_method = '2100-2' in method
LMS_p = (vector_dot(MATRIX_ICTCP_ICTCP_TO_LMS_P_HLG_BT2100_2, ICtCp)
if (is_hlg_method and is_BT2100_2_method) else vector_dot(
MATRIX_ICTCP_ICTCP_TO_LMS_P, ICtCp))
with domain_range_scale('ignore'):
LMS = (oetf_inverse_HLG_BT2100(LMS_p)
if is_hlg_method else eotf_ST2084(LMS_p, L_p))
RGB = vector_dot(MATRIX_ICTCP_LMS_TO_RGB, LMS)
return from_range_1(RGB)
def XYZ_to_ICtCp(XYZ,
illuminant=CCS_ILLUMINANTS[
'CIE 1931 2 Degree Standard Observer']['D65'],
chromatic_adaptation_transform='CAT02',
method='Dolby 2016',
L_p=10000):
"""
Converts from *CIE XYZ* tristimulus values to :math:`IC_TC_P` colour
encoding.
Parameters
----------
XYZ : array_like
*CIE XYZ* tristimulus values.
illuminant : array_like, optional
Source illuminant chromaticity coordinates.
chromatic_adaptation_transform : unicode, optional
**{'CAT02', 'XYZ Scaling', '<NAME>', 'Bradford', 'Sharp',
'Fairchild', 'CMCCAT97', 'CMCCAT2000', 'CAT02 Brill 2008', 'CAT16',
'Bianco 2010', 'Bianco PC 2010'}**,
*Chromatic adaptation* transform.
method : unicode, optional
**{'Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',
'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'}**,
Computation method. *Recommendation ITU-R BT.2100* defines multiple
variants of the :math:`IC_TC_P` colour encoding:
- *ITU-R BT.2100-1*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and the :math:`IC_TC_P` matrix
from :cite:`Dolby2016a`: *ITU-R BT.2100-1 HLG* method.
- *ITU-R BT.2100-2*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and a custom :math:`IC_TC_P`
matrix from :cite:`InternationalTelecommunicationUnion2018`:
*ITU-R BT.2100-2 HLG* method.
L_p : numeric, optional
Display peak luminance :math:`cd/m^2` for *SMPTE ST 2084:2014*
non-linear encoding. This parameter should stay at its default
:math:`10000 cd/m^2` value for practical applications. It is exposed so
that the definition can be used as a fitting function.
Returns
-------
ndarray
:math:`IC_TC_P` colour encoding array.
Warnings
--------
The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function.
Notes
-----
- The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function, thus the domain and range values for the *Reference*
- The *ITU-R BT.2100-1 PQ* and *ITU-R BT.2100-2 PQ* methods are aliases
for the *Dolby 2016* method.
and *1* scales are only indicative that the data is not affected by
scale transformations. The effective domain of *SMPTE ST 2084:2014*
inverse electro-optical transfer function (EOTF / EOCF) is
[0.0001, 10000].
+------------+-----------------------+------------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``XYZ`` | ``UN`` | ``UN`` |
+------------+-----------------------+------------------+
+------------+-----------------------+------------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``ICtCp`` | ``I`` : [0, 1] | ``I`` : [0, 1] |
| | | |
| | ``CT`` : [-1, 1] | ``CT`` : [-1, 1] |
| | | |
| | ``CP`` : [-1, 1] | ``CP`` : [-1, 1] |
+------------+-----------------------+------------------+
References
----------
:cite:`Dolby2016a`, :cite:`Lu2016c`
Examples
--------
>>> XYZ = np.array([0.20654008, 0.12197225, 0.05136952])
>>> XYZ_to_ICtCp(XYZ) # doctest: +ELLIPSIS
array([ 0.0685809..., -0.0028384..., 0.0602098...])
>>> XYZ_to_ICtCp(XYZ, method='ITU-R BT.2100-2 HLG') # doctest: +ELLIPSIS
array([ 0.5924279..., -0.0374073..., 0.2512267...])
"""
BT2020 = RGB_COLOURSPACES['ITU-R BT.2020']
RGB = XYZ_to_RGB(
XYZ,
illuminant,
BT2020.whitepoint,
BT2020.matrix_XYZ_to_RGB,
chromatic_adaptation_transform,
)
return RGB_to_ICtCp(RGB, method, L_p)
def ICtCp_to_XYZ(ICtCp,
illuminant=CCS_ILLUMINANTS[
'CIE 1931 2 Degree Standard Observer']['D65'],
chromatic_adaptation_transform='CAT02',
method='Dolby 2016',
L_p=10000):
"""
Converts from :math:`IC_TC_P` colour encoding to *CIE XYZ* tristimulus
values.
Parameters
----------
ICtCp : array_like
:math:`IC_TC_P` colour encoding array.
illuminant : array_like, optional
Source illuminant chromaticity coordinates.
chromatic_adaptation_transform : unicode, optional
**{'CAT02', 'XYZ Scaling', '<NAME>', 'Bradford', 'Sharp',
'Fairchild', 'CMCCAT97', 'CMCCAT2000', 'CAT02 Brill 2008', 'CAT16',
'Bianco 2010', 'Bianco PC 2010'}**,
*Chromatic adaptation* transform.
method : unicode, optional
**{'Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',
'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'}**,
Computation method. *Recommendation ITU-R BT.2100* defines multiple
variants of the :math:`IC_TC_P` colour encoding:
- *ITU-R BT.2100-1*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and the :math:`IC_TC_P` matrix
from :cite:`Dolby2016a`: *ITU-R BT.2100-1 HLG* method.
- *ITU-R BT.2100-2*
- *SMPTE ST 2084:2014* inverse electro-optical transfer
function (EOTF / EOCF) and the :math:`IC_TC_P` matrix from
:cite:`Dolby2016a`: *Dolby 2016*, *ITU-R BT.2100-1 PQ*,
*ITU-R BT.2100-2 PQ* methods.
- *Recommendation ITU-R BT.2100* *Reference HLG* opto-electrical
transfer function (OETF / OECF) and a custom :math:`IC_TC_P`
matrix from :cite:`InternationalTelecommunicationUnion2018`:
*ITU-R BT.2100-2 HLG* method.
L_p : numeric, optional
Display peak luminance :math:`cd/m^2` for *SMPTE ST 2084:2014*
non-linear encoding. This parameter should stay at its default
:math:`10000 cd/m^2` value for practical applications. It is exposed so
that the definition can be used as a fitting function.
Returns
-------
ndarray
*CIE XYZ* tristimulus values.
Warnings
--------
The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function.
Notes
-----
- The *ITU-R BT.2100-1 PQ* and *ITU-R BT.2100-2 PQ* methods are aliases
for the *Dolby 2016* method.
- The underlying *SMPTE ST 2084:2014* transfer function is an absolute
transfer function, thus the domain and range values for the *Reference*
and *1* scales are only indicative that the data is not affected by
scale transformations.
+------------+-----------------------+------------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``ICtCp`` | ``I`` : [0, 1] | ``I`` : [0, 1] |
| | | |
| | ``CT`` : [-1, 1] | ``CT`` : [-1, 1] |
| | | |
| | ``CP`` : [-1, 1] | ``CP`` : [-1, 1] |
+------------+-----------------------+------------------+
+------------+-----------------------+------------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+==================+
| ``XYZ`` | ``UN`` | ``UN`` |
+------------+-----------------------+------------------+
References
----------
:cite:`Dolby2016a`, :cite:`Lu2016c`
Examples
--------
>>> ICtCp = np.array([0.06858097, -0.00283842, 0.06020983])
>>> ICtCp_to_XYZ(ICtCp) # doctest: +ELLIPSIS
array([ 0.2065400..., 0.1219722..., 0.0513695...])
>>> ICtCp = np.array([0.59242792, -0.03740730, 0.25122675])
>>> ICtCp_to_XYZ(ICtCp, method='ITU-R BT.2100-2 HLG') # doctest: +ELLIPSIS
array([ 0.2065400..., 0.1219722..., 0.0513695...])
"""
RGB = ICtCp_to_RGB(ICtCp, method, L_p)
BT2020 = RGB_COLOURSPACES['ITU-R BT.2020']
XYZ = RGB_to_XYZ(
RGB,
BT2020.whitepoint,
illuminant,
BT2020.matrix_RGB_to_XYZ,
chromatic_adaptation_transform,
)
return XYZ
| [
"colour.algebra.vector_dot",
"colour.utilities.to_domain_1",
"colour.utilities.from_range_1",
"colour.models.rgb.RGB_to_XYZ",
"colour.models.rgb.transfer_functions.oetf_HLG_BT2100",
"colour.models.rgb.XYZ_to_RGB",
"colour.utilities.domain_range_scale",
"colour.models.rgb.transfer_functions.eotf_invers... | [((2348, 2386), 'numpy.linalg.inv', 'np.linalg.inv', (['MATRIX_ICTCP_RGB_TO_LMS'], {}), '(MATRIX_ICTCP_RGB_TO_LMS)\n', (2361, 2386), True, 'import numpy as np\n'), ((2861, 2903), 'numpy.linalg.inv', 'np.linalg.inv', (['MATRIX_ICTCP_LMS_P_TO_ICTCP'], {}), '(MATRIX_ICTCP_LMS_P_TO_ICTCP)\n', (2874, 2903), True, 'import numpy as np\n'), ((3467, 3522), 'numpy.linalg.inv', 'np.linalg.inv', (['MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2'], {}), '(MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2)\n', (3480, 3522), True, 'import numpy as np\n'), ((2115, 2179), 'numpy.array', 'np.array', (['[[1688, 2146, 262], [683, 2951, 462], [99, 309, 3688]]'], {}), '([[1688, 2146, 262], [683, 2951, 462], [99, 309, 3688]])\n', (2123, 2179), True, 'import numpy as np\n'), ((2565, 2637), 'numpy.array', 'np.array', (['[[2048, 2048, 0], [6610, -13613, 7003], [17933, -17390, -543]]'], {}), '([[2048, 2048, 0], [6610, -13613, 7003], [17933, -17390, -543]])\n', (2573, 2637), True, 'import numpy as np\n'), ((3118, 3187), 'numpy.array', 'np.array', (['[[2048, 2048, 0], [3625, -7465, 3840], [9500, -9212, -288]]'], {}), '([[2048, 2048, 0], [3625, -7465, 3840], [9500, -9212, -288]])\n', (3126, 3187), True, 'import numpy as np\n'), ((7721, 7737), 'colour.utilities.to_domain_1', 'to_domain_1', (['RGB'], {}), '(RGB)\n', (7732, 7737), False, 'from colour.utilities import domain_range_scale, from_range_1, to_domain_1, validate_method\n'), ((7751, 7884), 'colour.utilities.validate_method', 'validate_method', (['method', "['Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',\n 'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ']"], {}), "(method, ['Dolby 2016', 'ITU-R BT.2100-1 HLG',\n 'ITU-R BT.2100-1 PQ', 'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'])\n", (7766, 7884), False, 'from colour.utilities import domain_range_scale, from_range_1, to_domain_1, validate_method\n'), ((7995, 8035), 'colour.algebra.vector_dot', 'vector_dot', (['MATRIX_ICTCP_RGB_TO_LMS', 'RGB'], {}), '(MATRIX_ICTCP_RGB_TO_LMS, RGB)\n', (8005, 8035), False, 'from colour.algebra import vector_dot\n'), ((8396, 8415), 'colour.utilities.from_range_1', 'from_range_1', (['ICtCp'], {}), '(ICtCp)\n', (8408, 8415), False, 'from colour.utilities import domain_range_scale, from_range_1, to_domain_1, validate_method\n'), ((12336, 12354), 'colour.utilities.to_domain_1', 'to_domain_1', (['ICtCp'], {}), '(ICtCp)\n', (12347, 12354), False, 'from colour.utilities import domain_range_scale, from_range_1, to_domain_1, validate_method\n'), ((12368, 12501), 'colour.utilities.validate_method', 'validate_method', (['method', "['Dolby 2016', 'ITU-R BT.2100-1 HLG', 'ITU-R BT.2100-1 PQ',\n 'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ']"], {}), "(method, ['Dolby 2016', 'ITU-R BT.2100-1 HLG',\n 'ITU-R BT.2100-1 PQ', 'ITU-R BT.2100-2 HLG', 'ITU-R BT.2100-2 PQ'])\n", (12383, 12501), False, 'from colour.utilities import domain_range_scale, from_range_1, to_domain_1, validate_method\n'), ((12960, 13000), 'colour.algebra.vector_dot', 'vector_dot', (['MATRIX_ICTCP_LMS_TO_RGB', 'LMS'], {}), '(MATRIX_ICTCP_LMS_TO_RGB, LMS)\n', (12970, 13000), False, 'from colour.algebra import vector_dot\n'), ((13013, 13030), 'colour.utilities.from_range_1', 'from_range_1', (['RGB'], {}), '(RGB)\n', (13025, 13030), False, 'from colour.utilities import domain_range_scale, from_range_1, to_domain_1, validate_method\n'), ((17633, 17741), 'colour.models.rgb.XYZ_to_RGB', 'XYZ_to_RGB', (['XYZ', 'illuminant', 'BT2020.whitepoint', 'BT2020.matrix_XYZ_to_RGB', 'chromatic_adaptation_transform'], {}), '(XYZ, illuminant, BT2020.whitepoint, BT2020.matrix_XYZ_to_RGB,\n chromatic_adaptation_transform)\n', (17643, 17741), False, 'from colour.models.rgb import RGB_COLOURSPACES, RGB_to_XYZ, XYZ_to_RGB\n'), ((22412, 22520), 'colour.models.rgb.RGB_to_XYZ', 'RGB_to_XYZ', (['RGB', 'BT2020.whitepoint', 'illuminant', 'BT2020.matrix_RGB_to_XYZ', 'chromatic_adaptation_transform'], {}), '(RGB, BT2020.whitepoint, illuminant, BT2020.matrix_RGB_to_XYZ,\n chromatic_adaptation_transform)\n', (22422, 22520), False, 'from colour.models.rgb import RGB_COLOURSPACES, RGB_to_XYZ, XYZ_to_RGB\n'), ((8046, 8074), 'colour.utilities.domain_range_scale', 'domain_range_scale', (['"""ignore"""'], {}), "('ignore')\n", (8064, 8074), False, 'from colour.utilities import domain_range_scale, from_range_1, to_domain_1, validate_method\n'), ((8198, 8257), 'colour.algebra.vector_dot', 'vector_dot', (['MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2', 'LMS_p'], {}), '(MATRIX_ICTCP_LMS_P_TO_ICTCP_HLG_BT2100_2, LMS_p)\n', (8208, 8257), False, 'from colour.algebra import vector_dot\n'), ((8318, 8364), 'colour.algebra.vector_dot', 'vector_dot', (['MATRIX_ICTCP_LMS_P_TO_ICTCP', 'LMS_p'], {}), '(MATRIX_ICTCP_LMS_P_TO_ICTCP, LMS_p)\n', (8328, 8364), False, 'from colour.algebra import vector_dot\n'), ((12615, 12674), 'colour.algebra.vector_dot', 'vector_dot', (['MATRIX_ICTCP_ICTCP_TO_LMS_P_HLG_BT2100_2', 'ICtCp'], {}), '(MATRIX_ICTCP_ICTCP_TO_LMS_P_HLG_BT2100_2, ICtCp)\n', (12625, 12674), False, 'from colour.algebra import vector_dot\n'), ((12735, 12781), 'colour.algebra.vector_dot', 'vector_dot', (['MATRIX_ICTCP_ICTCP_TO_LMS_P', 'ICtCp'], {}), '(MATRIX_ICTCP_ICTCP_TO_LMS_P, ICtCp)\n', (12745, 12781), False, 'from colour.algebra import vector_dot\n'), ((12811, 12839), 'colour.utilities.domain_range_scale', 'domain_range_scale', (['"""ignore"""'], {}), "('ignore')\n", (12829, 12839), False, 'from colour.utilities import domain_range_scale, from_range_1, to_domain_1, validate_method\n'), ((8093, 8113), 'colour.models.rgb.transfer_functions.oetf_HLG_BT2100', 'oetf_HLG_BT2100', (['LMS'], {}), '(LMS)\n', (8108, 8113), False, 'from colour.models.rgb.transfer_functions import eotf_ST2084, eotf_inverse_ST2084, oetf_HLG_BT2100, oetf_inverse_HLG_BT2100\n'), ((8153, 8182), 'colour.models.rgb.transfer_functions.eotf_inverse_ST2084', 'eotf_inverse_ST2084', (['LMS', 'L_p'], {}), '(LMS, L_p)\n', (8172, 8182), False, 'from colour.models.rgb.transfer_functions import eotf_ST2084, eotf_inverse_ST2084, oetf_HLG_BT2100, oetf_inverse_HLG_BT2100\n'), ((12856, 12886), 'colour.models.rgb.transfer_functions.oetf_inverse_HLG_BT2100', 'oetf_inverse_HLG_BT2100', (['LMS_p'], {}), '(LMS_p)\n', (12879, 12886), False, 'from colour.models.rgb.transfer_functions import eotf_ST2084, eotf_inverse_ST2084, oetf_HLG_BT2100, oetf_inverse_HLG_BT2100\n'), ((12924, 12947), 'colour.models.rgb.transfer_functions.eotf_ST2084', 'eotf_ST2084', (['LMS_p', 'L_p'], {}), '(LMS_p, L_p)\n', (12935, 12947), False, 'from colour.models.rgb.transfer_functions import eotf_ST2084, eotf_inverse_ST2084, oetf_HLG_BT2100, oetf_inverse_HLG_BT2100\n')] |
from django.shortcuts import render, get_object_or_404, redirect
from django.contrib.auth.decorators import login_required
from .forms import ContactForms
from django.contrib.auth import authenticate
from .models import BlogPage
from .forms import BlogForm, BlogModelForm
from django.contrib.admin.views.decorators import staff_member_required
# from django.http import HttpResponse
# Create your views here.
def home_page(request):
context = {
'title': 'Home page',
'text': 'Hello',
}
if request.user.is_authenticated:
context['vip'] = 'Your is a VIP member'
return render(request, 'blog/index.html', context)
def about_page(request):
context = {
'title': 'About page',
'text': '',
}
return render(request, 'blog/index.html', context)
def contact_page(request):
contact_form = ContactForms(request.POST or None)
context = {
'title': 'Contact page',
'text': 'hi',
'forms': contact_form
}
# print('====================================================')
if contact_form.is_valid():
print(contact_form.cleaned_data)
print('====================================================')
if request.method == 'POST':
pass
# print(request.POST)
# print(request.POST.get('name'))
# print(request.POST.get('email'))
# print(request.POST.get('contact'))
return render(request, 'blog/contact.html', context)
def blog_list_view(request):
queryset = BlogPage.objects.all()
template_name = 'blog/list.html'
context = {
'title': 'List Page',
'queryset': queryset,
'range': range(2)
}
return render(request, template_name, context)
@staff_member_required
def blog_create_view(request):
form = BlogModelForm(request.POST or None)
if form.is_valid():
obj = form.save(commit=False)
obj.user = request.user
obj.save()
form = BlogModelForm()
template_name = 'blog/create.html'
context = {
'title': 'Create Page',
'form': form,
}
return render(request, template_name, context)
@staff_member_required
def blog_update_view(request, slug):
queryset = get_object_or_404(BlogPage, slug=slug)
form = BlogModelForm(request.POST or None, instance=queryset)
if form.is_valid():
form.save()
return redirect('../')
template_name = 'blog/update.html'
context = {
'title': f'update {queryset.title}',
'form': form,
'queryset': queryset,
}
return render(request, template_name, context)
@staff_member_required
def blog_delete_view(request, slug):
queryset = get_object_or_404(BlogPage, slug=slug)
if request.method == 'POST':
queryset.delete()
return redirect('../../')
template_name = 'blog/delete.html'
context = {
'title': 'Delete Page',
'queryset': queryset,
}
return render(request, template_name, context)
def blog_detail_view(request, slug):
queryset = get_object_or_404(BlogPage, slug=slug)
template_name = 'blog/detail.html'
context = {
'title': 'Detail Page',
'queryset': queryset,
}
return render(request, template_name, context)
def test_page(request):
template_name = 'blog/test.html'
return render(request, template_name ,{})
def test2_page(request):
template_name = 'blog/test2.html'
return render(request, template_name ,{})
def test3_page(request):
template_name = 'index.html'
return render(request, template_name ,{})
| [
"django.shortcuts.render",
"django.shortcuts.redirect",
"django.shortcuts.get_object_or_404"
] | [((613, 656), 'django.shortcuts.render', 'render', (['request', '"""blog/index.html"""', 'context'], {}), "(request, 'blog/index.html', context)\n", (619, 656), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((769, 812), 'django.shortcuts.render', 'render', (['request', '"""blog/index.html"""', 'context'], {}), "(request, 'blog/index.html', context)\n", (775, 812), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((1428, 1473), 'django.shortcuts.render', 'render', (['request', '"""blog/contact.html"""', 'context'], {}), "(request, 'blog/contact.html', context)\n", (1434, 1473), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((1700, 1739), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (1706, 1739), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2113, 2152), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (2119, 2152), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2230, 2268), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['BlogPage'], {'slug': 'slug'}), '(BlogPage, slug=slug)\n', (2247, 2268), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2579, 2618), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (2585, 2618), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2696, 2734), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['BlogPage'], {'slug': 'slug'}), '(BlogPage, slug=slug)\n', (2713, 2734), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2962, 3001), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (2968, 3001), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((3056, 3094), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['BlogPage'], {'slug': 'slug'}), '(BlogPage, slug=slug)\n', (3073, 3094), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((3229, 3268), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (3235, 3268), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((3342, 3376), 'django.shortcuts.render', 'render', (['request', 'template_name', '{}'], {}), '(request, template_name, {})\n', (3348, 3376), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((3452, 3486), 'django.shortcuts.render', 'render', (['request', 'template_name', '{}'], {}), '(request, template_name, {})\n', (3458, 3486), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((3557, 3591), 'django.shortcuts.render', 'render', (['request', 'template_name', '{}'], {}), '(request, template_name, {})\n', (3563, 3591), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2394, 2409), 'django.shortcuts.redirect', 'redirect', (['"""../"""'], {}), "('../')\n", (2402, 2409), False, 'from django.shortcuts import render, get_object_or_404, redirect\n'), ((2809, 2827), 'django.shortcuts.redirect', 'redirect', (['"""../../"""'], {}), "('../../')\n", (2817, 2827), False, 'from django.shortcuts import render, get_object_or_404, redirect\n')] |
import dataclasses
@dataclasses.dataclass(frozen=True)
class Sd:
height: int = 480
width: int = 640
@dataclasses.dataclass(frozen=True)
class Hd:
height: int = 720
width: int = 1280
@dataclasses.dataclass(frozen=True)
class Fhd:
height: int = 1080
width: int = 1920
@dataclasses.dataclass(frozen=True)
class Qhd:
height: int = 1440
width: int = 2560
@dataclasses.dataclass(frozen=True)
class Uhd:
height: int = 2160
width: int = 3840
@dataclasses.dataclass(frozen=True)
class Fuhd:
height: int = 4320
width: int = 7680
| [
"dataclasses.dataclass"
] | [((22, 56), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (43, 56), False, 'import dataclasses\n'), ((113, 147), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (134, 147), False, 'import dataclasses\n'), ((205, 239), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (226, 239), False, 'import dataclasses\n'), ((299, 333), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (320, 333), False, 'import dataclasses\n'), ((393, 427), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (414, 427), False, 'import dataclasses\n'), ((487, 521), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (508, 521), False, 'import dataclasses\n')] |
from __future__ import print_function
from flask import Flask, Response
from pyzbar import pyzbar
from picamera.array import PiRGBArray
from picamera import PiCamera
from datetime import datetime
import numpy as np
import cv2
import time
camera = PiCamera()
camera.resolution = (640, 480)
camera.framerate = 32
rawCapture = PiRGBArray(camera, size=(640, 480))
time.sleep(0.1)
app = Flask(__name__)
@app.route('/stream')
def stream():
return Response(gen(),
mimetype='multipart/x-mixed-replace; boundary=frame')
def gen():
while True:
frame = get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n\r\n')
def get_frame():
camera.capture(rawCapture, format="bgr", use_video_port=True)
frame = rawCapture.array
process_frame(frame)
ret, jpeg = cv2.imencode('.jpg', frame)
rawCapture.truncate(0)
return jpeg.tobytes()
def process_frame(frame):
decoded_objs = decode(frame)
draw_positions(frame, decoded_objs)
def decode(frame):
decoded_objs = pyzbar.decode(frame, scan_locations=True)
for decoded_obj in decoded_objs:
print(datetime.now().strftime('%H:%M:%S.%f'))
print('Type: ', decoded_obj.type)
print('Data: ', decoded_obj.data)
return decoded_objs
def draw_positions(frame, decoded_objs):
for decoded_obj in decoded_objs:
left, top, width, height = decoded_obj.rect
frame = cv2.rectangle(frame,
(left, top),
(left + width, height + top),
(0, 0, 168), 20)
if __name__ == '__main__':
app.run(host="0.0.0.0", debug=False, threaded=True)
| [
"cv2.rectangle",
"cv2.imencode",
"flask.Flask",
"picamera.PiCamera",
"time.sleep",
"pyzbar.pyzbar.decode",
"datetime.datetime.now",
"picamera.array.PiRGBArray"
] | [((250, 260), 'picamera.PiCamera', 'PiCamera', ([], {}), '()\n', (258, 260), False, 'from picamera import PiCamera\n'), ((327, 362), 'picamera.array.PiRGBArray', 'PiRGBArray', (['camera'], {'size': '(640, 480)'}), '(camera, size=(640, 480))\n', (337, 362), False, 'from picamera.array import PiRGBArray\n'), ((363, 378), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (373, 378), False, 'import time\n'), ((386, 401), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (391, 401), False, 'from flask import Flask, Response\n'), ((855, 882), 'cv2.imencode', 'cv2.imencode', (['""".jpg"""', 'frame'], {}), "('.jpg', frame)\n", (867, 882), False, 'import cv2\n'), ((1080, 1121), 'pyzbar.pyzbar.decode', 'pyzbar.decode', (['frame'], {'scan_locations': '(True)'}), '(frame, scan_locations=True)\n', (1093, 1121), False, 'from pyzbar import pyzbar\n'), ((1481, 1566), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(left, top)', '(left + width, height + top)', '(0, 0, 168)', '(20)'], {}), '(frame, (left, top), (left + width, height + top), (0, 0, 168), 20\n )\n', (1494, 1566), False, 'import cv2\n'), ((1173, 1187), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1185, 1187), False, 'from datetime import datetime\n')] |
from godity.engine import getMonitorSize
MONITOR_SIZE = getMonitorSize()
PIXEL_SIZE = 5
SCREEN_WIDTH = int(MONITOR_SIZE[0] / PIXEL_SIZE)
SCREEN_HEIGHT = int(MONITOR_SIZE[1] / PIXEL_SIZE) | [
"godity.engine.getMonitorSize"
] | [((59, 75), 'godity.engine.getMonitorSize', 'getMonitorSize', ([], {}), '()\n', (73, 75), False, 'from godity.engine import getMonitorSize\n')] |
# XXX TypeErrors on calling handlers, or on bad return values from a
# handler, are obscure and unhelpful.
from io import BytesIO
import os
import platform
import sys
import sysconfig
import unittest
import traceback
from xml.parsers import expat
from xml.parsers.expat import errors
from test.support import sortdict
class SetAttributeTest(unittest.TestCase):
def setUp(self):
self.parser = expat.ParserCreate(namespace_separator='!')
def test_buffer_text(self):
self.assertIs(self.parser.buffer_text, False)
for x in 0, 1, 2, 0:
self.parser.buffer_text = x
self.assertIs(self.parser.buffer_text, bool(x))
def test_namespace_prefixes(self):
self.assertIs(self.parser.namespace_prefixes, False)
for x in 0, 1, 2, 0:
self.parser.namespace_prefixes = x
self.assertIs(self.parser.namespace_prefixes, bool(x))
def test_ordered_attributes(self):
self.assertIs(self.parser.ordered_attributes, False)
for x in 0, 1, 2, 0:
self.parser.ordered_attributes = x
self.assertIs(self.parser.ordered_attributes, bool(x))
def test_specified_attributes(self):
self.assertIs(self.parser.specified_attributes, False)
for x in 0, 1, 2, 0:
self.parser.specified_attributes = x
self.assertIs(self.parser.specified_attributes, bool(x))
def test_invalid_attributes(self):
with self.assertRaises(AttributeError):
self.parser.returns_unicode = 1
with self.assertRaises(AttributeError):
self.parser.returns_unicode
# Issue #25019
self.assertRaises(TypeError, setattr, self.parser, range(0xF), 0)
self.assertRaises(TypeError, self.parser.__setattr__, range(0xF), 0)
self.assertRaises(TypeError, getattr, self.parser, range(0xF))
data = b'''\
<?xml version="1.0" encoding="iso-8859-1" standalone="no"?>
<?xml-stylesheet href="stylesheet.css"?>
<!-- comment data -->
<!DOCTYPE quotations SYSTEM "quotations.dtd" [
<!ELEMENT root ANY>
<!ATTLIST root attr1 CDATA #REQUIRED attr2 CDATA #IMPLIED>
<!NOTATION notation SYSTEM "notation.jpeg">
<!ENTITY acirc "â">
<!ENTITY external_entity SYSTEM "entity.file">
<!ENTITY unparsed_entity SYSTEM "entity.file" NDATA notation>
%unparsed_entity;
]>
<root attr1="value1" attr2="value2ὀ">
<myns:subelement xmlns:myns="http://www.python.org/namespace">
Contents of subelements
</myns:subelement>
<sub2><![CDATA[contents of CDATA section]]></sub2>
&external_entity;
&skipped_entity;
\xb5
</root>
'''
# Produce UTF-8 output
class ParseTest(unittest.TestCase):
class Outputter:
def __init__(self):
self.out = []
def StartElementHandler(self, name, attrs):
self.out.append('Start element: ' + repr(name) + ' ' +
sortdict(attrs))
def EndElementHandler(self, name):
self.out.append('End element: ' + repr(name))
def CharacterDataHandler(self, data):
data = data.strip()
if data:
self.out.append('Character data: ' + repr(data))
def ProcessingInstructionHandler(self, target, data):
self.out.append('PI: ' + repr(target) + ' ' + repr(data))
def StartNamespaceDeclHandler(self, prefix, uri):
self.out.append('NS decl: ' + repr(prefix) + ' ' + repr(uri))
def EndNamespaceDeclHandler(self, prefix):
self.out.append('End of NS decl: ' + repr(prefix))
def StartCdataSectionHandler(self):
self.out.append('Start of CDATA section')
def EndCdataSectionHandler(self):
self.out.append('End of CDATA section')
def CommentHandler(self, text):
self.out.append('Comment: ' + repr(text))
def NotationDeclHandler(self, *args):
name, base, sysid, pubid = args
self.out.append('Notation declared: %s' %(args,))
def UnparsedEntityDeclHandler(self, *args):
entityName, base, systemId, publicId, notationName = args
self.out.append('Unparsed entity decl: %s' %(args,))
def NotStandaloneHandler(self):
self.out.append('Not standalone')
return 1
def ExternalEntityRefHandler(self, *args):
context, base, sysId, pubId = args
self.out.append('External entity ref: %s' %(args[1:],))
return 1
def StartDoctypeDeclHandler(self, *args):
self.out.append(('Start doctype', args))
return 1
def EndDoctypeDeclHandler(self):
self.out.append("End doctype")
return 1
def EntityDeclHandler(self, *args):
self.out.append(('Entity declaration', args))
return 1
def XmlDeclHandler(self, *args):
self.out.append(('XML declaration', args))
return 1
def ElementDeclHandler(self, *args):
self.out.append(('Element declaration', args))
return 1
def AttlistDeclHandler(self, *args):
self.out.append(('Attribute list declaration', args))
return 1
def SkippedEntityHandler(self, *args):
self.out.append(("Skipped entity", args))
return 1
def DefaultHandler(self, userData):
pass
def DefaultHandlerExpand(self, userData):
pass
handler_names = [
'StartElementHandler', 'EndElementHandler', 'CharacterDataHandler',
'ProcessingInstructionHandler', 'UnparsedEntityDeclHandler',
'NotationDeclHandler', 'StartNamespaceDeclHandler',
'EndNamespaceDeclHandler', 'CommentHandler',
'StartCdataSectionHandler', 'EndCdataSectionHandler', 'DefaultHandler',
'DefaultHandlerExpand', 'NotStandaloneHandler',
'ExternalEntityRefHandler', 'StartDoctypeDeclHandler',
'EndDoctypeDeclHandler', 'EntityDeclHandler', 'XmlDeclHandler',
'ElementDeclHandler', 'AttlistDeclHandler', 'SkippedEntityHandler',
]
def _hookup_callbacks(self, parser, handler):
"""
Set each of the callbacks defined on handler and named in
self.handler_names on the given parser.
"""
for name in self.handler_names:
setattr(parser, name, getattr(handler, name))
def _verify_parse_output(self, operations):
expected_operations = [
('XML declaration', ('1.0', 'iso-8859-1', 0)),
'PI: \'xml-stylesheet\' \'href="stylesheet.css"\'',
"Comment: ' comment data '",
"Not standalone",
("Start doctype", ('quotations', 'quotations.dtd', None, 1)),
('Element declaration', ('root', (2, 0, None, ()))),
('Attribute list declaration', ('root', 'attr1', 'CDATA', None,
1)),
('Attribute list declaration', ('root', 'attr2', 'CDATA', None,
0)),
"Notation declared: ('notation', None, 'notation.jpeg', None)",
('Entity declaration', ('acirc', 0, '\xe2', None, None, None, None)),
('Entity declaration', ('external_entity', 0, None, None,
'entity.file', None, None)),
"Unparsed entity decl: ('unparsed_entity', None, 'entity.file', None, 'notation')",
"Not standalone",
"End doctype",
"Start element: 'root' {'attr1': 'value1', 'attr2': 'value2\u1f40'}",
"NS decl: 'myns' 'http://www.python.org/namespace'",
"Start element: 'http://www.python.org/namespace!subelement' {}",
"Character data: 'Contents of subelements'",
"End element: 'http://www.python.org/namespace!subelement'",
"End of NS decl: 'myns'",
"Start element: 'sub2' {}",
'Start of CDATA section',
"Character data: 'contents of CDATA section'",
'End of CDATA section',
"End element: 'sub2'",
"External entity ref: (None, 'entity.file', None)",
('Skipped entity', ('skipped_entity', 0)),
"Character data: '\xb5'",
"End element: 'root'",
]
for operation, expected_operation in zip(operations, expected_operations):
self.assertEqual(operation, expected_operation)
def test_parse_bytes(self):
out = self.Outputter()
parser = expat.ParserCreate(namespace_separator='!')
self._hookup_callbacks(parser, out)
parser.Parse(data, True)
operations = out.out
self._verify_parse_output(operations)
# Issue #6697.
self.assertRaises(AttributeError, getattr, parser, '\uD800')
def test_parse_str(self):
out = self.Outputter()
parser = expat.ParserCreate(namespace_separator='!')
self._hookup_callbacks(parser, out)
parser.Parse(data.decode('iso-8859-1'), True)
operations = out.out
self._verify_parse_output(operations)
def test_parse_file(self):
# Try parsing a file
out = self.Outputter()
parser = expat.ParserCreate(namespace_separator='!')
self._hookup_callbacks(parser, out)
file = BytesIO(data)
parser.ParseFile(file)
operations = out.out
self._verify_parse_output(operations)
def test_parse_again(self):
parser = expat.ParserCreate()
file = BytesIO(data)
parser.ParseFile(file)
# Issue 6676: ensure a meaningful exception is raised when attempting
# to parse more than one XML document per xmlparser instance,
# a limitation of the Expat library.
with self.assertRaises(expat.error) as cm:
parser.ParseFile(file)
self.assertEqual(expat.ErrorString(cm.exception.code),
expat.errors.XML_ERROR_FINISHED)
class NamespaceSeparatorTest(unittest.TestCase):
def test_legal(self):
# Tests that make sure we get errors when the namespace_separator value
# is illegal, and that we don't for good values:
expat.ParserCreate()
expat.ParserCreate(namespace_separator=None)
expat.ParserCreate(namespace_separator=' ')
def test_illegal(self):
try:
expat.ParserCreate(namespace_separator=42)
self.fail()
except TypeError as e:
self.assertEqual(str(e),
"ParserCreate() argument 'namespace_separator' must be str or None, not int")
try:
expat.ParserCreate(namespace_separator='too long')
self.fail()
except ValueError as e:
self.assertEqual(str(e),
'namespace_separator must be at most one character, omitted, or None')
def test_zero_length(self):
# ParserCreate() needs to accept a namespace_separator of zero length
# to satisfy the requirements of RDF applications that are required
# to simply glue together the namespace URI and the localname. Though
# considered a wart of the RDF specifications, it needs to be supported.
#
# See XML-SIG mailing list thread starting with
# http://mail.python.org/pipermail/xml-sig/2001-April/005202.html
#
expat.ParserCreate(namespace_separator='') # too short
class InterningTest(unittest.TestCase):
def test(self):
# Test the interning machinery.
p = expat.ParserCreate()
L = []
def collector(name, *args):
L.append(name)
p.StartElementHandler = collector
p.EndElementHandler = collector
p.Parse(b"<e> <e/> <e></e> </e>", True)
tag = L[0]
self.assertEqual(len(L), 6)
for entry in L:
# L should have the same string repeated over and over.
self.assertTrue(tag is entry)
def test_issue9402(self):
# create an ExternalEntityParserCreate with buffer text
class ExternalOutputter:
def __init__(self, parser):
self.parser = parser
self.parser_result = None
def ExternalEntityRefHandler(self, context, base, sysId, pubId):
external_parser = self.parser.ExternalEntityParserCreate("")
self.parser_result = external_parser.Parse(b"", True)
return 1
parser = expat.ParserCreate(namespace_separator='!')
parser.buffer_text = 1
out = ExternalOutputter(parser)
parser.ExternalEntityRefHandler = out.ExternalEntityRefHandler
parser.Parse(data, True)
self.assertEqual(out.parser_result, 1)
class BufferTextTest(unittest.TestCase):
def setUp(self):
self.stuff = []
self.parser = expat.ParserCreate()
self.parser.buffer_text = 1
self.parser.CharacterDataHandler = self.CharacterDataHandler
def check(self, expected, label):
self.assertEqual(self.stuff, expected,
"%s\nstuff = %r\nexpected = %r"
% (label, self.stuff, map(str, expected)))
def CharacterDataHandler(self, text):
self.stuff.append(text)
def StartElementHandler(self, name, attrs):
self.stuff.append("<%s>" % name)
bt = attrs.get("buffer-text")
if bt == "yes":
self.parser.buffer_text = 1
elif bt == "no":
self.parser.buffer_text = 0
def EndElementHandler(self, name):
self.stuff.append("</%s>" % name)
def CommentHandler(self, data):
self.stuff.append("<!--%s-->" % data)
def setHandlers(self, handlers=[]):
for name in handlers:
setattr(self.parser, name, getattr(self, name))
def test_default_to_disabled(self):
parser = expat.ParserCreate()
self.assertFalse(parser.buffer_text)
def test_buffering_enabled(self):
# Make sure buffering is turned on
self.assertTrue(self.parser.buffer_text)
self.parser.Parse(b"<a>1<b/>2<c/>3</a>", True)
self.assertEqual(self.stuff, ['123'],
"buffered text not properly collapsed")
def test1(self):
# XXX This test exposes more detail of Expat's text chunking than we
# XXX like, but it tests what we need to concisely.
self.setHandlers(["StartElementHandler"])
self.parser.Parse(b"<a>1<b buffer-text='no'/>2\n3<c buffer-text='yes'/>4\n5</a>", True)
self.assertEqual(self.stuff,
["<a>", "1", "<b>", "2", "\n", "3", "<c>", "4\n5"],
"buffering control not reacting as expected")
def test2(self):
self.parser.Parse(b"<a>1<b/><2><c/> \n 3</a>", True)
self.assertEqual(self.stuff, ["1<2> \n 3"],
"buffered text not properly collapsed")
def test3(self):
self.setHandlers(["StartElementHandler"])
self.parser.Parse(b"<a>1<b/>2<c/>3</a>", True)
self.assertEqual(self.stuff, ["<a>", "1", "<b>", "2", "<c>", "3"],
"buffered text not properly split")
def test4(self):
self.setHandlers(["StartElementHandler", "EndElementHandler"])
self.parser.CharacterDataHandler = None
self.parser.Parse(b"<a>1<b/>2<c/>3</a>", True)
self.assertEqual(self.stuff,
["<a>", "<b>", "</b>", "<c>", "</c>", "</a>"])
def test5(self):
self.setHandlers(["StartElementHandler", "EndElementHandler"])
self.parser.Parse(b"<a>1<b></b>2<c/>3</a>", True)
self.assertEqual(self.stuff,
["<a>", "1", "<b>", "</b>", "2", "<c>", "</c>", "3", "</a>"])
def test6(self):
self.setHandlers(["CommentHandler", "EndElementHandler",
"StartElementHandler"])
self.parser.Parse(b"<a>1<b/>2<c></c>345</a> ", True)
self.assertEqual(self.stuff,
["<a>", "1", "<b>", "</b>", "2", "<c>", "</c>", "345", "</a>"],
"buffered text not properly split")
def test7(self):
self.setHandlers(["CommentHandler", "EndElementHandler",
"StartElementHandler"])
self.parser.Parse(b"<a>1<b/>2<c></c>3<!--abc-->4<!--def-->5</a> ", True)
self.assertEqual(self.stuff,
["<a>", "1", "<b>", "</b>", "2", "<c>", "</c>", "3",
"<!--abc-->", "4", "<!--def-->", "5", "</a>"],
"buffered text not properly split")
# Test handling of exception from callback:
class HandlerExceptionTest(unittest.TestCase):
def StartElementHandler(self, name, attrs):
raise RuntimeError(name)
def check_traceback_entry(self, entry, filename, funcname):
self.assertEqual(os.path.basename(entry[0]), filename)
self.assertEqual(entry[2], funcname)
def test_exception(self):
parser = expat.ParserCreate()
parser.StartElementHandler = self.StartElementHandler
try:
parser.Parse(b"<a><b><c/></b></a>", True)
self.fail()
except RuntimeError as e:
self.assertEqual(e.args[0], 'a',
"Expected RuntimeError for element 'a', but" + \
" found %r" % e.args[0])
# Check that the traceback contains the relevant line in pyexpat.c
entries = traceback.extract_tb(e.__traceback__)
self.assertEqual(len(entries), 3)
self.check_traceback_entry(entries[0],
"test_pyexpat.py", "test_exception")
self.check_traceback_entry(entries[1],
"pyexpat.c", "StartElement")
self.check_traceback_entry(entries[2],
"test_pyexpat.py", "StartElementHandler")
if sysconfig.is_python_build() and not (sys.platform == 'win32' and platform.machine() == 'ARM'):
self.assertIn('call_with_frame("StartElement"', entries[1][3])
# Test Current* members:
class PositionTest(unittest.TestCase):
def StartElementHandler(self, name, attrs):
self.check_pos('s')
def EndElementHandler(self, name):
self.check_pos('e')
def check_pos(self, event):
pos = (event,
self.parser.CurrentByteIndex,
self.parser.CurrentLineNumber,
self.parser.CurrentColumnNumber)
self.assertTrue(self.upto < len(self.expected_list),
'too many parser events')
expected = self.expected_list[self.upto]
self.assertEqual(pos, expected,
'Expected position %s, got position %s' %(pos, expected))
self.upto += 1
def test(self):
self.parser = expat.ParserCreate()
self.parser.StartElementHandler = self.StartElementHandler
self.parser.EndElementHandler = self.EndElementHandler
self.upto = 0
self.expected_list = [('s', 0, 1, 0), ('s', 5, 2, 1), ('s', 11, 3, 2),
('e', 15, 3, 6), ('e', 17, 4, 1), ('e', 22, 5, 0)]
xml = b'<a>\n <b>\n <c/>\n </b>\n</a>'
self.parser.Parse(xml, True)
class sf1296433Test(unittest.TestCase):
def test_parse_only_xml_data(self):
# http://python.org/sf/1296433
#
xml = "<?xml version='1.0' encoding='iso8859'?><s>%s</s>" % ('a' * 1025)
# this one doesn't crash
#xml = "<?xml version='1.0'?><s>%s</s>" % ('a' * 10000)
class SpecificException(Exception):
pass
def handler(text):
raise SpecificException
parser = expat.ParserCreate()
parser.CharacterDataHandler = handler
self.assertRaises(Exception, parser.Parse, xml.encode('iso8859'))
class ChardataBufferTest(unittest.TestCase):
"""
test setting of chardata buffer size
"""
def test_1025_bytes(self):
self.assertEqual(self.small_buffer_test(1025), 2)
def test_1000_bytes(self):
self.assertEqual(self.small_buffer_test(1000), 1)
def test_wrong_size(self):
parser = expat.ParserCreate()
parser.buffer_text = 1
with self.assertRaises(ValueError):
parser.buffer_size = -1
with self.assertRaises(ValueError):
parser.buffer_size = 0
with self.assertRaises((ValueError, OverflowError)):
parser.buffer_size = sys.maxsize + 1
with self.assertRaises(TypeError):
parser.buffer_size = 512.0
def test_unchanged_size(self):
xml1 = b"<?xml version='1.0' encoding='iso8859'?><s>" + b'a' * 512
xml2 = b'a'*512 + b'</s>'
parser = expat.ParserCreate()
parser.CharacterDataHandler = self.counting_handler
parser.buffer_size = 512
parser.buffer_text = 1
# Feed 512 bytes of character data: the handler should be called
# once.
self.n = 0
parser.Parse(xml1)
self.assertEqual(self.n, 1)
# Reassign to buffer_size, but assign the same size.
parser.buffer_size = parser.buffer_size
self.assertEqual(self.n, 1)
# Try parsing rest of the document
parser.Parse(xml2)
self.assertEqual(self.n, 2)
def test_disabling_buffer(self):
xml1 = b"<?xml version='1.0' encoding='iso8859'?><a>" + b'a' * 512
xml2 = b'b' * 1024
xml3 = b'c' * 1024 + b'</a>';
parser = expat.ParserCreate()
parser.CharacterDataHandler = self.counting_handler
parser.buffer_text = 1
parser.buffer_size = 1024
self.assertEqual(parser.buffer_size, 1024)
# Parse one chunk of XML
self.n = 0
parser.Parse(xml1, False)
self.assertEqual(parser.buffer_size, 1024)
self.assertEqual(self.n, 1)
# Turn off buffering and parse the next chunk.
parser.buffer_text = 0
self.assertFalse(parser.buffer_text)
self.assertEqual(parser.buffer_size, 1024)
for i in range(10):
parser.Parse(xml2, False)
self.assertEqual(self.n, 11)
parser.buffer_text = 1
self.assertTrue(parser.buffer_text)
self.assertEqual(parser.buffer_size, 1024)
parser.Parse(xml3, True)
self.assertEqual(self.n, 12)
def counting_handler(self, text):
self.n += 1
def small_buffer_test(self, buffer_len):
xml = b"<?xml version='1.0' encoding='iso8859'?><s>" + b'a' * buffer_len + b'</s>'
parser = expat.ParserCreate()
parser.CharacterDataHandler = self.counting_handler
parser.buffer_size = 1024
parser.buffer_text = 1
self.n = 0
parser.Parse(xml)
return self.n
def test_change_size_1(self):
xml1 = b"<?xml version='1.0' encoding='iso8859'?><a><s>" + b'a' * 1024
xml2 = b'aaa</s><s>' + b'a' * 1025 + b'</s></a>'
parser = expat.ParserCreate()
parser.CharacterDataHandler = self.counting_handler
parser.buffer_text = 1
parser.buffer_size = 1024
self.assertEqual(parser.buffer_size, 1024)
self.n = 0
parser.Parse(xml1, False)
parser.buffer_size *= 2
self.assertEqual(parser.buffer_size, 2048)
parser.Parse(xml2, True)
self.assertEqual(self.n, 2)
def test_change_size_2(self):
xml1 = b"<?xml version='1.0' encoding='iso8859'?><a>a<s>" + b'a' * 1023
xml2 = b'aaa</s><s>' + b'a' * 1025 + b'</s></a>'
parser = expat.ParserCreate()
parser.CharacterDataHandler = self.counting_handler
parser.buffer_text = 1
parser.buffer_size = 2048
self.assertEqual(parser.buffer_size, 2048)
self.n=0
parser.Parse(xml1, False)
parser.buffer_size = parser.buffer_size // 2
self.assertEqual(parser.buffer_size, 1024)
parser.Parse(xml2, True)
self.assertEqual(self.n, 4)
class MalformedInputTest(unittest.TestCase):
def test1(self):
xml = b"\0\r\n"
parser = expat.ParserCreate()
try:
parser.Parse(xml, True)
self.fail()
except expat.ExpatError as e:
self.assertEqual(str(e), 'unclosed token: line 2, column 0')
def test2(self):
# \xc2\x85 is UTF-8 encoded U+0085 (NEXT LINE)
xml = b"<?xml version\xc2\x85='1.0'?>\r\n"
parser = expat.ParserCreate()
err_pattern = r'XML declaration not well-formed: line 1, column \d+'
with self.assertRaisesRegex(expat.ExpatError, err_pattern):
parser.Parse(xml, True)
class ErrorMessageTest(unittest.TestCase):
def test_codes(self):
# verify mapping of errors.codes and errors.messages
self.assertEqual(errors.XML_ERROR_SYNTAX,
errors.messages[errors.codes[errors.XML_ERROR_SYNTAX]])
def test_expaterror(self):
xml = b'<'
parser = expat.ParserCreate()
try:
parser.Parse(xml, True)
self.fail()
except expat.ExpatError as e:
self.assertEqual(e.code,
errors.codes[errors.XML_ERROR_UNCLOSED_TOKEN])
class ForeignDTDTests(unittest.TestCase):
"""
Tests for the UseForeignDTD method of expat parser objects.
"""
def test_use_foreign_dtd(self):
"""
If UseForeignDTD is passed True and a document without an external
entity reference is parsed, ExternalEntityRefHandler is first called
with None for the public and system ids.
"""
handler_call_args = []
def resolve_entity(context, base, system_id, public_id):
handler_call_args.append((public_id, system_id))
return 1
parser = expat.ParserCreate()
parser.UseForeignDTD(True)
parser.SetParamEntityParsing(expat.XML_PARAM_ENTITY_PARSING_ALWAYS)
parser.ExternalEntityRefHandler = resolve_entity
parser.Parse(b"<?xml version='1.0'?><element/>")
self.assertEqual(handler_call_args, [(None, None)])
# test UseForeignDTD() is equal to UseForeignDTD(True)
handler_call_args[:] = []
parser = expat.ParserCreate()
parser.UseForeignDTD()
parser.SetParamEntityParsing(expat.XML_PARAM_ENTITY_PARSING_ALWAYS)
parser.ExternalEntityRefHandler = resolve_entity
parser.Parse(b"<?xml version='1.0'?><element/>")
self.assertEqual(handler_call_args, [(None, None)])
def test_ignore_use_foreign_dtd(self):
"""
If UseForeignDTD is passed True and a document with an external
entity reference is parsed, ExternalEntityRefHandler is called with
the public and system ids from the document.
"""
handler_call_args = []
def resolve_entity(context, base, system_id, public_id):
handler_call_args.append((public_id, system_id))
return 1
parser = expat.ParserCreate()
parser.UseForeignDTD(True)
parser.SetParamEntityParsing(expat.XML_PARAM_ENTITY_PARSING_ALWAYS)
parser.ExternalEntityRefHandler = resolve_entity
parser.Parse(
b"<?xml version='1.0'?><!DOCTYPE foo PUBLIC 'bar' 'baz'><element/>")
self.assertEqual(handler_call_args, [("bar", "baz")])
if __name__ == "__main__":
unittest.main()
| [
"xml.parsers.expat.ParserCreate",
"sysconfig.is_python_build",
"xml.parsers.expat.ErrorString",
"io.BytesIO",
"test.support.sortdict",
"os.path.basename",
"unittest.main",
"platform.machine",
"traceback.extract_tb"
] | [((28048, 28063), 'unittest.main', 'unittest.main', ([], {}), '()\n', (28061, 28063), False, 'import unittest\n'), ((428, 471), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {'namespace_separator': '"""!"""'}), "(namespace_separator='!')\n", (446, 471), False, 'from xml.parsers import expat\n'), ((8673, 8716), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {'namespace_separator': '"""!"""'}), "(namespace_separator='!')\n", (8691, 8716), False, 'from xml.parsers import expat\n'), ((9054, 9097), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {'namespace_separator': '"""!"""'}), "(namespace_separator='!')\n", (9072, 9097), False, 'from xml.parsers import expat\n'), ((9393, 9436), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {'namespace_separator': '"""!"""'}), "(namespace_separator='!')\n", (9411, 9436), False, 'from xml.parsers import expat\n'), ((9498, 9511), 'io.BytesIO', 'BytesIO', (['data'], {}), '(data)\n', (9505, 9511), False, 'from io import BytesIO\n'), ((9678, 9698), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (9696, 9698), False, 'from xml.parsers import expat\n'), ((9715, 9728), 'io.BytesIO', 'BytesIO', (['data'], {}), '(data)\n', (9722, 9728), False, 'from io import BytesIO\n'), ((10396, 10416), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (10414, 10416), False, 'from xml.parsers import expat\n'), ((10426, 10470), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {'namespace_separator': 'None'}), '(namespace_separator=None)\n', (10444, 10470), False, 'from xml.parsers import expat\n'), ((10480, 10523), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {'namespace_separator': '""" """'}), "(namespace_separator=' ')\n", (10498, 10523), False, 'from xml.parsers import expat\n'), ((11595, 11637), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {'namespace_separator': '""""""'}), "(namespace_separator='')\n", (11613, 11637), False, 'from xml.parsers import expat\n'), ((11770, 11790), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (11788, 11790), False, 'from xml.parsers import expat\n'), ((12728, 12771), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {'namespace_separator': '"""!"""'}), "(namespace_separator='!')\n", (12746, 12771), False, 'from xml.parsers import expat\n'), ((13115, 13135), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (13133, 13135), False, 'from xml.parsers import expat\n'), ((14154, 14174), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (14172, 14174), False, 'from xml.parsers import expat\n'), ((17332, 17352), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (17350, 17352), False, 'from xml.parsers import expat\n'), ((19256, 19276), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (19274, 19276), False, 'from xml.parsers import expat\n'), ((20153, 20173), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (20171, 20173), False, 'from xml.parsers import expat\n'), ((20644, 20664), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (20662, 20664), False, 'from xml.parsers import expat\n'), ((21223, 21243), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (21241, 21243), False, 'from xml.parsers import expat\n'), ((22013, 22033), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (22031, 22033), False, 'from xml.parsers import expat\n'), ((23111, 23131), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (23129, 23131), False, 'from xml.parsers import expat\n'), ((23525, 23545), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (23543, 23545), False, 'from xml.parsers import expat\n'), ((24133, 24153), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (24151, 24153), False, 'from xml.parsers import expat\n'), ((24679, 24699), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (24697, 24699), False, 'from xml.parsers import expat\n'), ((25039, 25059), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (25057, 25059), False, 'from xml.parsers import expat\n'), ((25584, 25604), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (25602, 25604), False, 'from xml.parsers import expat\n'), ((26434, 26454), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (26452, 26454), False, 'from xml.parsers import expat\n'), ((26866, 26886), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (26884, 26886), False, 'from xml.parsers import expat\n'), ((27651, 27671), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {}), '()\n', (27669, 27671), False, 'from xml.parsers import expat\n'), ((10071, 10107), 'xml.parsers.expat.ErrorString', 'expat.ErrorString', (['cm.exception.code'], {}), '(cm.exception.code)\n', (10088, 10107), False, 'from xml.parsers import expat\n'), ((10582, 10624), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {'namespace_separator': '(42)'}), '(namespace_separator=42)\n', (10600, 10624), False, 'from xml.parsers import expat\n'), ((10844, 10894), 'xml.parsers.expat.ParserCreate', 'expat.ParserCreate', ([], {'namespace_separator': '"""too long"""'}), "(namespace_separator='too long')\n", (10862, 10894), False, 'from xml.parsers import expat\n'), ((17197, 17223), 'os.path.basename', 'os.path.basename', (['entry[0]'], {}), '(entry[0])\n', (17213, 17223), False, 'import os\n'), ((17828, 17865), 'traceback.extract_tb', 'traceback.extract_tb', (['e.__traceback__'], {}), '(e.__traceback__)\n', (17848, 17865), False, 'import traceback\n'), ((2972, 2987), 'test.support.sortdict', 'sortdict', (['attrs'], {}), '(attrs)\n', (2980, 2987), False, 'from test.support import sortdict\n'), ((18313, 18340), 'sysconfig.is_python_build', 'sysconfig.is_python_build', ([], {}), '()\n', (18338, 18340), False, 'import sysconfig\n'), ((18378, 18396), 'platform.machine', 'platform.machine', ([], {}), '()\n', (18394, 18396), False, 'import platform\n')] |
from concurrent import futures
import logging
import os
import grpc
from PIL import Image, ImageOps
import helloworld_pb2
import helloworld_pb2_grpc
from minio import Minio
minioEnvKey = "MINIO_ADDRESS"
image_name = 'img2.jpeg'
image2_name = 'img3.jpeg'
image_path = '/pulled_' + image_name
image_path2 = '/pulled_' +image2_name
responses = ["record_response", "replay_response"]
minioAddress = os.getenv(minioEnvKey)
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
if minioAddress == None:
return None
minioClient = Minio(minioAddress,
access_key='minioadmin',
secret_key='minioadmin',
secure=False)
if request.name == "record":
msg = 'Hello, %s!' % responses[0]
minioClient.fget_object('mybucket', image_name, image_path)
image = Image.open(image_path)
img = image.transpose(Image.ROTATE_90)
elif request.name == "replay":
msg = 'Hello, %s!' % responses[1]
minioClient.fget_object('mybucket', image2_name, image_path2)
image2 = Image.open(image_path2)
img = image2.transpose(Image.ROTATE_90)
else:
msg = 'Hello, %s!' % request.name
minioClient.fget_object('mybucket', image_name, image_path)
image = Image.open(image_path)
img = image.transpose(Image.ROTATE_90)
return helloworld_pb2.HelloReply(message=msg)
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port('[::]:50051')
server.start()
server.wait_for_termination()
if __name__ == '__main__':
logging.basicConfig()
serve()
| [
"logging.basicConfig",
"PIL.Image.open",
"os.getenv",
"minio.Minio",
"concurrent.futures.ThreadPoolExecutor",
"helloworld_pb2.HelloReply"
] | [((401, 423), 'os.getenv', 'os.getenv', (['minioEnvKey'], {}), '(minioEnvKey)\n', (410, 423), False, 'import os\n'), ((1803, 1824), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (1822, 1824), False, 'import logging\n'), ((600, 687), 'minio.Minio', 'Minio', (['minioAddress'], {'access_key': '"""minioadmin"""', 'secret_key': '"""minioadmin"""', 'secure': '(False)'}), "(minioAddress, access_key='minioadmin', secret_key='minioadmin',\n secure=False)\n", (605, 687), False, 'from minio import Minio\n'), ((1479, 1517), 'helloworld_pb2.HelloReply', 'helloworld_pb2.HelloReply', ([], {'message': 'msg'}), '(message=msg)\n', (1504, 1517), False, 'import helloworld_pb2\n'), ((1558, 1599), 'concurrent.futures.ThreadPoolExecutor', 'futures.ThreadPoolExecutor', ([], {'max_workers': '(1)'}), '(max_workers=1)\n', (1584, 1599), False, 'from concurrent import futures\n'), ((907, 929), 'PIL.Image.open', 'Image.open', (['image_path'], {}), '(image_path)\n', (917, 929), False, 'from PIL import Image, ImageOps\n'), ((1161, 1184), 'PIL.Image.open', 'Image.open', (['image_path2'], {}), '(image_path2)\n', (1171, 1184), False, 'from PIL import Image, ImageOps\n'), ((1389, 1411), 'PIL.Image.open', 'Image.open', (['image_path'], {}), '(image_path)\n', (1399, 1411), False, 'from PIL import Image, ImageOps\n')] |
import sys
from PyQt5.QtWidgets import QApplication
from client.mainWindow import MainWindow
def main():
app = QApplication(sys.argv)
win = MainWindow()
win.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
| [
"client.mainWindow.MainWindow",
"PyQt5.QtWidgets.QApplication"
] | [((117, 139), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (129, 139), False, 'from PyQt5.QtWidgets import QApplication\n'), ((150, 162), 'client.mainWindow.MainWindow', 'MainWindow', ([], {}), '()\n', (160, 162), False, 'from client.mainWindow import MainWindow\n')] |
'''
@Date: 2019-11-26 19:58:17
@Author: ywyz
@LastModifiedBy: ywyz
@Github: https://github.com/ywyz
@LastEditors: ywyz
@LastEditTime: 2019-11-26 19:59:27
'''
import turtle
from math import pi, sin, cos
from UsefulTurtleFunctions import drawLine, writeText
drawLine(-200, 0, 200, 0)
drawLine(175, 15, 200, 0)
drawLine(200, 0, 175, -15)
drawLine(0, 200, 0, -200)
drawLine(-15, 175, 0, 200)
drawLine(0, 200, 15, 175)
turtle.penup()
turtle.color("blue")
for x in range(-175, 176):
turtle.goto(x, 50 * sin((x / 100) * 2 * pi))
turtle.pendown()
# Cos函数
turtle.penup()
turtle.color("red")
for x in range(-175, 176):
turtle.goto(x, 50 * cos((x / 100) * 2 * pi))
turtle.pendown()
writeText("-2\u03c0", -100, -15)
writeText("2\u03c0", 100, -15)
turtle.done()
| [
"turtle.pendown",
"turtle.penup",
"turtle.done",
"UsefulTurtleFunctions.writeText",
"turtle.color",
"math.cos",
"math.sin",
"UsefulTurtleFunctions.drawLine"
] | [((258, 283), 'UsefulTurtleFunctions.drawLine', 'drawLine', (['(-200)', '(0)', '(200)', '(0)'], {}), '(-200, 0, 200, 0)\n', (266, 283), False, 'from UsefulTurtleFunctions import drawLine, writeText\n'), ((284, 309), 'UsefulTurtleFunctions.drawLine', 'drawLine', (['(175)', '(15)', '(200)', '(0)'], {}), '(175, 15, 200, 0)\n', (292, 309), False, 'from UsefulTurtleFunctions import drawLine, writeText\n'), ((310, 336), 'UsefulTurtleFunctions.drawLine', 'drawLine', (['(200)', '(0)', '(175)', '(-15)'], {}), '(200, 0, 175, -15)\n', (318, 336), False, 'from UsefulTurtleFunctions import drawLine, writeText\n'), ((337, 362), 'UsefulTurtleFunctions.drawLine', 'drawLine', (['(0)', '(200)', '(0)', '(-200)'], {}), '(0, 200, 0, -200)\n', (345, 362), False, 'from UsefulTurtleFunctions import drawLine, writeText\n'), ((363, 389), 'UsefulTurtleFunctions.drawLine', 'drawLine', (['(-15)', '(175)', '(0)', '(200)'], {}), '(-15, 175, 0, 200)\n', (371, 389), False, 'from UsefulTurtleFunctions import drawLine, writeText\n'), ((390, 415), 'UsefulTurtleFunctions.drawLine', 'drawLine', (['(0)', '(200)', '(15)', '(175)'], {}), '(0, 200, 15, 175)\n', (398, 415), False, 'from UsefulTurtleFunctions import drawLine, writeText\n'), ((416, 430), 'turtle.penup', 'turtle.penup', ([], {}), '()\n', (428, 430), False, 'import turtle\n'), ((432, 452), 'turtle.color', 'turtle.color', (['"""blue"""'], {}), "('blue')\n", (444, 452), False, 'import turtle\n'), ((558, 572), 'turtle.penup', 'turtle.penup', ([], {}), '()\n', (570, 572), False, 'import turtle\n'), ((573, 592), 'turtle.color', 'turtle.color', (['"""red"""'], {}), "('red')\n", (585, 592), False, 'import turtle\n'), ((691, 718), 'UsefulTurtleFunctions.writeText', 'writeText', (['"""-2π"""', '(-100)', '(-15)'], {}), "('-2π', -100, -15)\n", (700, 718), False, 'from UsefulTurtleFunctions import drawLine, writeText\n'), ((724, 749), 'UsefulTurtleFunctions.writeText', 'writeText', (['"""2π"""', '(100)', '(-15)'], {}), "('2π', 100, -15)\n", (733, 749), False, 'from UsefulTurtleFunctions import drawLine, writeText\n'), ((756, 769), 'turtle.done', 'turtle.done', ([], {}), '()\n', (767, 769), False, 'import turtle\n'), ((533, 549), 'turtle.pendown', 'turtle.pendown', ([], {}), '()\n', (547, 549), False, 'import turtle\n'), ((673, 689), 'turtle.pendown', 'turtle.pendown', ([], {}), '()\n', (687, 689), False, 'import turtle\n'), ((504, 525), 'math.sin', 'sin', (['(x / 100 * 2 * pi)'], {}), '(x / 100 * 2 * pi)\n', (507, 525), False, 'from math import pi, sin, cos\n'), ((644, 665), 'math.cos', 'cos', (['(x / 100 * 2 * pi)'], {}), '(x / 100 * 2 * pi)\n', (647, 665), False, 'from math import pi, sin, cos\n')] |
from django.shortcuts import render
from models import Camp, Resource, AuditLog, Reservation
from datetime import datetime, timedelta
def index(request):
today = datetime.now().date()
tomorrow = today + timedelta(1)
reservations = Reservation.objects.filter(start_time__gte=today) \
.filter(end_time__lt=tomorrow) \
.order_by('start_time')
data = {
'camps': Camp.objects.all().order_by('name'),
'resources': Resource.objects.all().order_by('name'),
'today': group_reservations_by_resource(reservations),
'date': datetime.now().date(),
}
# check for an easter egg, show it now and clear it out
if 'easter' in request.session:
data['easter'] = request.session['easter']
del request.session['easter']
return render(request, 'index.html', data)
def log(request, year=None, month=None, day=None):
if year:
start_year = int(year)
start_month = int(month) if month else 1
start_day = int(day) if day else 1
start_date = datetime(start_year, start_month, start_day)
if day:
end_date = start_date + timedelta(1)
elif month:
if start_month == 12:
end_month = 1
end_year = start_year + 1
else:
end_month = start_month + 1
end_year = start_year
end_date = datetime(end_year, end_month, 1)
else:
end_date = datetime(start_year + 1, 1, 1)
logs = AuditLog.objects.filter(timestamp__gte=start_date) \
.filter(timestamp__lt=end_date) \
.order_by('timestamp')
else:
logs = AuditLog.objects.all().order_by('timestamp')
data = {
'logs': logs
}
return render(request, 'logs.html', data)
def group_reservations_by_resource(reservations):
"""This method takes in a list of reservation objects that are assumed
to be sorted by time. It returns a list of tuples, each
representing one resources's of reservations. The first item in
the pair is the resource id; the second item is the resource name;
the third item is a list of reservations that use that resource.
Because reservations may use more than one resource, the final
output will likely duplicate reservations in multiple lists.
"""
reservation_resources = {}
resource_names = {}
for reservation in reservations:
for resource in reservation.resources.all():
if resource.id not in reservation_resources:
resource_names[resource.id] = resource.name
reservation_resources[resource.id] = []
reservation_resources[resource.id].append(reservation)
return [(key, resource_names[key], value)
for key, value in reservation_resources.iteritems()]
| [
"django.shortcuts.render",
"datetime.datetime",
"models.Reservation.objects.filter",
"datetime.datetime.now",
"models.Camp.objects.all",
"models.AuditLog.objects.filter",
"models.Resource.objects.all",
"models.AuditLog.objects.all",
"datetime.timedelta"
] | [((869, 904), 'django.shortcuts.render', 'render', (['request', '"""index.html"""', 'data'], {}), "(request, 'index.html', data)\n", (875, 904), False, 'from django.shortcuts import render\n'), ((1884, 1918), 'django.shortcuts.render', 'render', (['request', '"""logs.html"""', 'data'], {}), "(request, 'logs.html', data)\n", (1890, 1918), False, 'from django.shortcuts import render\n'), ((213, 225), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (222, 225), False, 'from datetime import datetime, timedelta\n'), ((1115, 1159), 'datetime.datetime', 'datetime', (['start_year', 'start_month', 'start_day'], {}), '(start_year, start_month, start_day)\n', (1123, 1159), False, 'from datetime import datetime, timedelta\n'), ((168, 182), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (180, 182), False, 'from datetime import datetime, timedelta\n'), ((460, 478), 'models.Camp.objects.all', 'Camp.objects.all', ([], {}), '()\n', (476, 478), False, 'from models import Camp, Resource, AuditLog, Reservation\n'), ((518, 540), 'models.Resource.objects.all', 'Resource.objects.all', ([], {}), '()\n', (538, 540), False, 'from models import Camp, Resource, AuditLog, Reservation\n'), ((638, 652), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (650, 652), False, 'from datetime import datetime, timedelta\n'), ((1212, 1224), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (1221, 1224), False, 'from datetime import datetime, timedelta\n'), ((1474, 1506), 'datetime.datetime', 'datetime', (['end_year', 'end_month', '(1)'], {}), '(end_year, end_month, 1)\n', (1482, 1506), False, 'from datetime import datetime, timedelta\n'), ((1544, 1574), 'datetime.datetime', 'datetime', (['(start_year + 1)', '(1)', '(1)'], {}), '(start_year + 1, 1, 1)\n', (1552, 1574), False, 'from datetime import datetime, timedelta\n'), ((1788, 1810), 'models.AuditLog.objects.all', 'AuditLog.objects.all', ([], {}), '()\n', (1808, 1810), False, 'from models import Camp, Resource, AuditLog, Reservation\n'), ((245, 294), 'models.Reservation.objects.filter', 'Reservation.objects.filter', ([], {'start_time__gte': 'today'}), '(start_time__gte=today)\n', (271, 294), False, 'from models import Camp, Resource, AuditLog, Reservation\n'), ((1591, 1641), 'models.AuditLog.objects.filter', 'AuditLog.objects.filter', ([], {'timestamp__gte': 'start_date'}), '(timestamp__gte=start_date)\n', (1614, 1641), False, 'from models import Camp, Resource, AuditLog, Reservation\n')] |
import os
BOT_TOKEN = os.getenv('BOT_TOKEN')
if not BOT_TOKEN:
print('Environment variable BOT_TOKEN must be set')
quit()
WEBHOOK_ENABLED = os.getenv('WEBHOOK_ENABLED')
WEBHOOK_ENABLED = WEBHOOK_ENABLED and int(WEBHOOK_ENABLED) == 1
if WEBHOOK_ENABLED:
HEROKU_APP_NAME = os.getenv('HEROKU_APP_NAME')
WEBHOOK_HOST = f'https://{HEROKU_APP_NAME}.herokuapp.com'
WEBHOOK_PATH = f'/webhook/{BOT_TOKEN}'
WEBHOOK_URL = f'{WEBHOOK_HOST}{WEBHOOK_PATH}'
WEBAPP_HOST = '0.0.0.0'
WEBAPP_PORT = int(os.getenv('PORT'))
| [
"os.getenv"
] | [((23, 45), 'os.getenv', 'os.getenv', (['"""BOT_TOKEN"""'], {}), "('BOT_TOKEN')\n", (32, 45), False, 'import os\n'), ((150, 178), 'os.getenv', 'os.getenv', (['"""WEBHOOK_ENABLED"""'], {}), "('WEBHOOK_ENABLED')\n", (159, 178), False, 'import os\n'), ((286, 314), 'os.getenv', 'os.getenv', (['"""HEROKU_APP_NAME"""'], {}), "('HEROKU_APP_NAME')\n", (295, 314), False, 'import os\n'), ((520, 537), 'os.getenv', 'os.getenv', (['"""PORT"""'], {}), "('PORT')\n", (529, 537), False, 'import os\n')] |
from PIL import Image
import resources
def readlevel(file):
# level-loading procedure with input of gamesize^2 bitmap level file
level = [[resources.constants.leveldef['nothing'] for i in range(resources.constants.gamesize)] for j in range(resources.constants.gamesize)]
im = Image.open(resources.paths.levels + file)
im.load()
for col in range(0, resources.constants.gamesize):
for row in range(0, resources.constants.gamesize):
pixel = im.getpixel((col, row))
if pixel == resources.constants.editordef["coin"]:
level[col][row] = resources.constants.leveldef['coin']
elif pixel == resources.constants.editordef["heart"]:
level[col][row] = resources.constants.leveldef['heart']
elif pixel == resources.constants.editordef["wall"]:
level[col][row] = resources.constants.leveldef['wall']
elif pixel == resources.constants.editordef["player"]:
level[col][row] = resources.constants.leveldef['player']
elif pixel == resources.constants.editordef["red_ghost"]:
level[col][row] = resources.constants.leveldef['red_ghost']
elif pixel == resources.constants.editordef["green_ghost"]:
level[col][row] = resources.constants.leveldef['green_ghost']
elif pixel == resources.constants.editordef["blue_ghost"]:
level[col][row] = resources.constants.leveldef['blue_ghost']
return level
| [
"PIL.Image.open"
] | [((290, 331), 'PIL.Image.open', 'Image.open', (['(resources.paths.levels + file)'], {}), '(resources.paths.levels + file)\n', (300, 331), False, 'from PIL import Image\n')] |
# Generated by Django 1.11.20 on 2019-06-12 06:41
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("zerver", "0229_stream_message_retention_days"),
]
operations = [
migrations.RenameField(
model_name="userprofile",
old_name="enable_stream_sounds",
new_name="enable_stream_audible_notifications",
),
]
| [
"django.db.migrations.RenameField"
] | [((239, 373), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""userprofile"""', 'old_name': '"""enable_stream_sounds"""', 'new_name': '"""enable_stream_audible_notifications"""'}), "(model_name='userprofile', old_name=\n 'enable_stream_sounds', new_name='enable_stream_audible_notifications')\n", (261, 373), False, 'from django.db import migrations\n')] |
import csv
from flask_wtf import FlaskForm as Form
from flask_wtf.file import FileField, FileRequired, FileAllowed
from wtforms import StringField, SubmitField
from wtforms.validators import DataRequired
from wtforms import ValidationError
# noinspection PyMethodMayBeStatic
class HostForm(Form):
fqdn = StringField('FQDN or IP', validators=[DataRequired()])
port = StringField('TCP Port')
friendly_name = StringField('Friendly Name')
submit = SubmitField('Submit')
def validate_port(self, field):
if len(field.data) > 0:
try:
int(field.data)
except ValueError:
raise ValidationError('Port provided is not valid')
class ImportForm(Form):
file = FileField('Hosts', validators=[FileRequired(), FileAllowed(['csv'], 'Only CSV is supported!')])
submit = SubmitField('Submit')
| [
"flask_wtf.file.FileAllowed",
"wtforms.ValidationError",
"wtforms.SubmitField",
"wtforms.StringField",
"flask_wtf.file.FileRequired",
"wtforms.validators.DataRequired"
] | [((376, 399), 'wtforms.StringField', 'StringField', (['"""TCP Port"""'], {}), "('TCP Port')\n", (387, 399), False, 'from wtforms import StringField, SubmitField\n'), ((420, 448), 'wtforms.StringField', 'StringField', (['"""Friendly Name"""'], {}), "('Friendly Name')\n", (431, 448), False, 'from wtforms import StringField, SubmitField\n'), ((462, 483), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (473, 483), False, 'from wtforms import StringField, SubmitField\n'), ((851, 872), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (862, 872), False, 'from wtforms import StringField, SubmitField\n'), ((348, 362), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (360, 362), False, 'from wtforms.validators import DataRequired\n'), ((773, 787), 'flask_wtf.file.FileRequired', 'FileRequired', ([], {}), '()\n', (785, 787), False, 'from flask_wtf.file import FileField, FileRequired, FileAllowed\n'), ((789, 835), 'flask_wtf.file.FileAllowed', 'FileAllowed', (["['csv']", '"""Only CSV is supported!"""'], {}), "(['csv'], 'Only CSV is supported!')\n", (800, 835), False, 'from flask_wtf.file import FileField, FileRequired, FileAllowed\n'), ((659, 704), 'wtforms.ValidationError', 'ValidationError', (['"""Port provided is not valid"""'], {}), "('Port provided is not valid')\n", (674, 704), False, 'from wtforms import ValidationError\n')] |
# -*- coding: utf-8 -*-
"""Test driver for the outcomes package."""
import sys, os
path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, path)
import pandas as pd
import outcomes.outcomes as outcomes
def test_chg_analysis(src_file=None, src_sheet='Sheet1', src_col_dt='date',
src_col_topic='topic', src_col_rfr='rfr', src_col_bmk='benchmark',
tgt_file=None, tgt_sheet='Sheet1'):
"""Test the change_analysis funciton."""
# read source data
path = os.path.dirname(os.path.abspath(__file__)) + '/'
src_file = 'src_returns.xlsx'
tgt_file = 'out_returns.xlsx'
xlsx = pd.ExcelFile(path + src_file)
src_df = pd.read_excel(xlsx, src_sheet)
src_df.index = src_df[src_col_dt]
# test measures
measures_labels = outcomes.Measures()
measures = [measures_labels.level, measures_labels.level_ln,
measures_labels.chg_rel, measures_labels.chg_ln,
measures_labels.vol_ln]
measures_exclude_flag = False
periods = [outcomes.TP_1M, outcomes.TP_3M, outcomes.TP_6M, outcomes.TP_1Y,
outcomes.TP_CUM]
df = outcomes.change_analysis(src_df, src_col_topic, src_col_rfr,
src_col_bmk, measures=measures,
m_lbls=measures_labels,
measures_exclude_flag=measures_exclude_flag,
periods=periods)
xlsx_writer = pd.ExcelWriter(path + tgt_file)
df.reorder_levels(('measure', 'period', 'srs_type'), 1).sort_index(axis=1,
level=('measure', 'period', 'srs_type')).to_excel(
xlsx_writer, tgt_sheet)
xlsx_writer.save()
if __name__ == '__main__':
test_chg_analysis() | [
"sys.path.insert",
"outcomes.outcomes.Measures",
"pandas.ExcelFile",
"pandas.read_excel",
"os.path.abspath",
"pandas.ExcelWriter",
"outcomes.outcomes.change_analysis"
] | [((152, 176), 'sys.path.insert', 'sys.path.insert', (['(0)', 'path'], {}), '(0, path)\n', (167, 176), False, 'import sys, os\n'), ((642, 671), 'pandas.ExcelFile', 'pd.ExcelFile', (['(path + src_file)'], {}), '(path + src_file)\n', (654, 671), True, 'import pandas as pd\n'), ((685, 715), 'pandas.read_excel', 'pd.read_excel', (['xlsx', 'src_sheet'], {}), '(xlsx, src_sheet)\n', (698, 715), True, 'import pandas as pd\n'), ((801, 820), 'outcomes.outcomes.Measures', 'outcomes.Measures', ([], {}), '()\n', (818, 820), True, 'import outcomes.outcomes as outcomes\n'), ((1148, 1335), 'outcomes.outcomes.change_analysis', 'outcomes.change_analysis', (['src_df', 'src_col_topic', 'src_col_rfr', 'src_col_bmk'], {'measures': 'measures', 'm_lbls': 'measures_labels', 'measures_exclude_flag': 'measures_exclude_flag', 'periods': 'periods'}), '(src_df, src_col_topic, src_col_rfr, src_col_bmk,\n measures=measures, m_lbls=measures_labels, measures_exclude_flag=\n measures_exclude_flag, periods=periods)\n', (1172, 1335), True, 'import outcomes.outcomes as outcomes\n'), ((1485, 1516), 'pandas.ExcelWriter', 'pd.ExcelWriter', (['(path + tgt_file)'], {}), '(path + tgt_file)\n', (1499, 1516), True, 'import pandas as pd\n'), ((124, 149), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (139, 149), False, 'import sys, os\n'), ((530, 555), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (545, 555), False, 'import sys, os\n')] |
import re
import logging
import tarfile
import tempfile
import zipfile
from pathlib import Path
from typing import Callable, Dict, List, Optional, Tuple, Union
import json
from farm.data_handler.utils import http_get
from haystack.file_converter.base import BaseConverter
from haystack.file_converter.docx import DocxToTextConverter
from haystack.file_converter.pdf import PDFToTextConverter
from haystack.file_converter.tika import TikaConverter
from haystack import Document, Label
from haystack.file_converter.txt import TextConverter
logger = logging.getLogger(__name__)
def eval_data_from_file(filename: str) -> Tuple[List[Document], List[Label]]:
"""
Read Documents + Labels from a SQuAD-style file.
Document and Labels can then be indexed to the DocumentStore and be used for evaluation.
:param filename: Path to file in SQuAD format
:return: (List of Documents, List of Labels)
"""
docs = []
labels = []
with open(filename, "r") as file:
data = json.load(file)
for document in data["data"]:
# get all extra fields from document level (e.g. title)
meta_doc = {k: v for k, v in document.items() if k not in ("paragraphs", "title")}
for paragraph in document["paragraphs"]:
cur_meta = {"name": document["title"]}
# all other fields from paragraph level
meta_paragraph = {k: v for k, v in paragraph.items() if k not in ("qas", "context")}
cur_meta.update(meta_paragraph)
# meta from parent document
cur_meta.update(meta_doc)
# Create Document
cur_doc = Document(text=paragraph["context"], meta=cur_meta)
docs.append(cur_doc)
# Get Labels
for qa in paragraph["qas"]:
if len(qa["answers"]) > 0:
for answer in qa["answers"]:
label = Label(
question=qa["question"],
answer=answer["text"],
is_correct_answer=True,
is_correct_document=True,
document_id=cur_doc.id,
offset_start_in_doc=answer["answer_start"],
no_answer=qa["is_impossible"],
origin="gold_label",
)
labels.append(label)
else:
label = Label(
question=qa["question"],
answer="",
is_correct_answer=True,
is_correct_document=True,
document_id=cur_doc.id,
offset_start_in_doc=0,
no_answer=qa["is_impossible"],
origin="gold_label",
)
labels.append(label)
return docs, labels
def convert_files_to_dicts(dir_path: str, clean_func: Optional[Callable] = None, split_paragraphs: bool = False) -> \
List[dict]:
"""
Convert all files(.txt, .pdf, .docx) in the sub-directories of the given path to Python dicts that can be written to a
Document Store.
:param dir_path: path for the documents to be written to the DocumentStore
:param clean_func: a custom cleaning function that gets applied to each doc (input: str, output:str)
:param split_paragraphs: split text in paragraphs.
:return: None
"""
file_paths = [p for p in Path(dir_path).glob("**/*")]
allowed_suffixes = [".pdf", ".txt", ".docx"]
suffix2converter: Dict[str, BaseConverter] = {}
suffix2paths: Dict[str, List[Path]] = {}
for path in file_paths:
file_suffix = path.suffix.lower()
if file_suffix in allowed_suffixes:
if file_suffix not in suffix2paths:
suffix2paths[file_suffix] = []
suffix2paths[file_suffix].append(path)
elif not path.is_dir():
logger.warning('Skipped file {0} as type {1} is not supported here. '
'See haystack.file_converter for support of more file types'.format(path, file_suffix))
# No need to initialize converter if file type not present
for file_suffix in suffix2paths.keys():
if file_suffix == ".pdf":
suffix2converter[file_suffix] = PDFToTextConverter()
if file_suffix == ".txt":
suffix2converter[file_suffix] = TextConverter()
if file_suffix == ".docx":
suffix2converter[file_suffix] = DocxToTextConverter()
documents = []
for suffix, paths in suffix2paths.items():
for path in paths:
logger.info('Converting {}'.format(path))
document = suffix2converter[suffix].convert(file_path=path, meta=None)
text = document["text"]
if clean_func:
text = clean_func(text)
if split_paragraphs:
for para in text.split("\n\n"):
if not para.strip(): # skip empty paragraphs
continue
documents.append({"text": para, "meta": {"name": path.name}})
else:
documents.append({"text": text, "meta": {"name": path.name}})
return documents
def tika_convert_files_to_dicts(
dir_path: str,
clean_func: Optional[Callable] = None,
split_paragraphs: bool = False,
merge_short: bool = True,
merge_lowercase: bool = True
) -> List[dict]:
"""
Convert all files(.txt, .pdf) in the sub-directories of the given path to Python dicts that can be written to a
Document Store.
:param merge_lowercase: allow conversion of merged paragraph to lowercase
:param merge_short: allow merging of short paragraphs
:param dir_path: path for the documents to be written to the DocumentStore
:param clean_func: a custom cleaning function that gets applied to each doc (input: str, output:str)
:param split_paragraphs: split text in paragraphs.
:return: None
"""
converter = TikaConverter(remove_header_footer=True)
paths = [p for p in Path(dir_path).glob("**/*")]
allowed_suffixes = [".pdf", ".txt"]
file_paths: List[Path] = []
for path in paths:
file_suffix = path.suffix.lower()
if file_suffix in allowed_suffixes:
file_paths.append(path)
elif not path.is_dir():
logger.warning('Skipped file {0} as type {1} is not supported here. '
'See haystack.file_converter for support of more file types'.format(path, file_suffix))
documents = []
for path in file_paths:
logger.info('Converting {}'.format(path))
document = converter.convert(path)
meta = document["meta"] or {}
meta["name"] = path.name
text = document["text"]
pages = text.split("\f")
if split_paragraphs:
if pages:
paras = pages[0].split("\n\n")
# pop the last paragraph from the first page
last_para = paras.pop(-1) if paras else ''
for page in pages[1:]:
page_paras = page.split("\n\n")
# merge the last paragraph in previous page to the first paragraph in this page
if page_paras:
page_paras[0] = last_para + ' ' + page_paras[0]
last_para = page_paras.pop(-1)
paras += page_paras
if last_para:
paras.append(last_para)
if paras:
last_para = ''
for para in paras:
para = para.strip()
if not para: continue
# merge paragraphs to improve qa
# merge this paragraph if less than 10 characters or 2 words
# or this paragraph starts with a lower case and last paragraph does not end with a punctuation
if merge_short and len(para) < 10 or len(re.findall('\s+', para)) < 2 \
or merge_lowercase and para and para[0].islower() and last_para and last_para[-1] not in '.?!"\'\]\)':
last_para += ' ' + para
else:
if last_para:
documents.append({"text": last_para, "meta": meta})
last_para = para
# don't forget the last one
if last_para:
documents.append({"text": last_para, "meta": meta})
else:
if clean_func:
text = clean_func(text)
documents.append({"text": text, "meta": meta})
return documents
def fetch_archive_from_http(url: str, output_dir: str, proxies: Optional[dict] = None):
"""
Fetch an archive (zip or tar.gz) from a url via http and extract content to an output directory.
:param url: http address
:type url: str
:param output_dir: local path
:type output_dir: str
:param proxies: proxies details as required by requests library
:type proxies: dict
:return: bool if anything got fetched
"""
# verify & prepare local directory
path = Path(output_dir)
if not path.exists():
path.mkdir(parents=True)
is_not_empty = len(list(Path(path).rglob("*"))) > 0
if is_not_empty:
logger.info(
f"Found data stored in `{output_dir}`. Delete this first if you really want to fetch new data."
)
return False
else:
logger.info(f"Fetching from {url} to `{output_dir}`")
# download & extract
with tempfile.NamedTemporaryFile() as temp_file:
http_get(url, temp_file, proxies=proxies)
temp_file.flush()
temp_file.seek(0) # making tempfile accessible
# extract
if url[-4:] == ".zip":
zip_archive = zipfile.ZipFile(temp_file.name)
zip_archive.extractall(output_dir)
elif url[-7:] == ".tar.gz":
tar_archive = tarfile.open(temp_file.name)
tar_archive.extractall(output_dir)
else:
logger.warning('Skipped url {0} as file type is not supported here. '
'See haystack documentation for support of more file types'.format(url))
# temp_file gets deleted here
return True
| [
"logging.getLogger",
"haystack.file_converter.tika.TikaConverter",
"tarfile.open",
"haystack.Label",
"zipfile.ZipFile",
"pathlib.Path",
"haystack.file_converter.txt.TextConverter",
"farm.data_handler.utils.http_get",
"haystack.file_converter.pdf.PDFToTextConverter",
"haystack.Document",
"tempfil... | [((550, 577), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (567, 577), False, 'import logging\n'), ((6278, 6318), 'haystack.file_converter.tika.TikaConverter', 'TikaConverter', ([], {'remove_header_footer': '(True)'}), '(remove_header_footer=True)\n', (6291, 6318), False, 'from haystack.file_converter.tika import TikaConverter\n'), ((9538, 9554), 'pathlib.Path', 'Path', (['output_dir'], {}), '(output_dir)\n', (9542, 9554), False, 'from pathlib import Path\n'), ((1004, 1019), 'json.load', 'json.load', (['file'], {}), '(file)\n', (1013, 1019), False, 'import json\n'), ((4550, 4570), 'haystack.file_converter.pdf.PDFToTextConverter', 'PDFToTextConverter', ([], {}), '()\n', (4568, 4570), False, 'from haystack.file_converter.pdf import PDFToTextConverter\n'), ((4649, 4664), 'haystack.file_converter.txt.TextConverter', 'TextConverter', ([], {}), '()\n', (4662, 4664), False, 'from haystack.file_converter.txt import TextConverter\n'), ((4744, 4765), 'haystack.file_converter.docx.DocxToTextConverter', 'DocxToTextConverter', ([], {}), '()\n', (4763, 4765), False, 'from haystack.file_converter.docx import DocxToTextConverter\n'), ((9967, 9996), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (9994, 9996), False, 'import tempfile\n'), ((10023, 10064), 'farm.data_handler.utils.http_get', 'http_get', (['url', 'temp_file'], {'proxies': 'proxies'}), '(url, temp_file, proxies=proxies)\n', (10031, 10064), False, 'from farm.data_handler.utils import http_get\n'), ((1680, 1730), 'haystack.Document', 'Document', ([], {'text': "paragraph['context']", 'meta': 'cur_meta'}), "(text=paragraph['context'], meta=cur_meta)\n", (1688, 1730), False, 'from haystack import Document, Label\n'), ((10242, 10273), 'zipfile.ZipFile', 'zipfile.ZipFile', (['temp_file.name'], {}), '(temp_file.name)\n', (10257, 10273), False, 'import zipfile\n'), ((3699, 3713), 'pathlib.Path', 'Path', (['dir_path'], {}), '(dir_path)\n', (3703, 3713), False, 'from pathlib import Path\n'), ((6343, 6357), 'pathlib.Path', 'Path', (['dir_path'], {}), '(dir_path)\n', (6347, 6357), False, 'from pathlib import Path\n'), ((10395, 10423), 'tarfile.open', 'tarfile.open', (['temp_file.name'], {}), '(temp_file.name)\n', (10407, 10423), False, 'import tarfile\n'), ((2596, 2794), 'haystack.Label', 'Label', ([], {'question': "qa['question']", 'answer': '""""""', 'is_correct_answer': '(True)', 'is_correct_document': '(True)', 'document_id': 'cur_doc.id', 'offset_start_in_doc': '(0)', 'no_answer': "qa['is_impossible']", 'origin': '"""gold_label"""'}), "(question=qa['question'], answer='', is_correct_answer=True,\n is_correct_document=True, document_id=cur_doc.id, offset_start_in_doc=0,\n no_answer=qa['is_impossible'], origin='gold_label')\n", (2601, 2794), False, 'from haystack import Document, Label\n'), ((9643, 9653), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (9647, 9653), False, 'from pathlib import Path\n'), ((1978, 2215), 'haystack.Label', 'Label', ([], {'question': "qa['question']", 'answer': "answer['text']", 'is_correct_answer': '(True)', 'is_correct_document': '(True)', 'document_id': 'cur_doc.id', 'offset_start_in_doc': "answer['answer_start']", 'no_answer': "qa['is_impossible']", 'origin': '"""gold_label"""'}), "(question=qa['question'], answer=answer['text'], is_correct_answer=\n True, is_correct_document=True, document_id=cur_doc.id,\n offset_start_in_doc=answer['answer_start'], no_answer=qa[\n 'is_impossible'], origin='gold_label')\n", (1983, 2215), False, 'from haystack import Document, Label\n'), ((8303, 8327), 're.findall', 're.findall', (['"""\\\\s+"""', 'para'], {}), "('\\\\s+', para)\n", (8313, 8327), False, 'import re\n')] |
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import collections
import re
import os
import ast
import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
from resource_management.libraries.script import Script
from resource_management.libraries.functions import default
from resource_management.libraries.functions import format
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import format_jvm_option
from resource_management.libraries.functions.is_empty import is_empty
from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.expect import expect
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.stack_features import get_stack_feature_version
from resource_management.libraries.functions.get_architecture import get_architecture
from ambari_commons.constants import AMBARI_SUDO_BINARY
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
stack_root = Script.get_stack_root()
architecture = get_architecture()
dfs_type = default("/commandParams/dfs_type", "")
artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
jdk_name = default("/hostLevelParams/jdk_name", None)
java_home = config['hostLevelParams']['java_home']
java_version = expect("/hostLevelParams/java_version", int)
jdk_location = config['hostLevelParams']['jdk_location']
hadoop_custom_extensions_enabled = default("/configurations/core-site/hadoop.custom-extensions.enabled", False)
sudo = AMBARI_SUDO_BINARY
ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
stack_version_unformatted = config['hostLevelParams']['stack_version']
stack_version_formatted = format_stack_version(stack_version_unformatted)
upgrade_type = Script.get_upgrade_type(default("/commandParams/upgrade_type", ""))
version = default("/commandParams/version", None)
# Handle upgrade and downgrade
if (upgrade_type is not None) and version:
stack_version_formatted = format_stack_version(version)
security_enabled = config['configurations']['cluster-env']['security_enabled']
hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
# Some datanode settings
dfs_dn_addr = default('/configurations/hdfs-site/dfs.datanode.address', None)
dfs_dn_http_addr = default('/configurations/hdfs-site/dfs.datanode.http.address', None)
dfs_dn_https_addr = default('/configurations/hdfs-site/dfs.datanode.https.address', None)
dfs_http_policy = default('/configurations/hdfs-site/dfs.http.policy', None)
secure_dn_ports_are_in_use = False
def get_port(address):
"""
Extracts port from the address like 0.0.0.0:1019
"""
if address is None:
return None
m = re.search(r'(?:http(?:s)?://)?([\w\d.]*):(\d{1,5})', address)
if m is not None:
return int(m.group(2))
else:
return None
def is_secure_port(port):
"""
Returns True if port is root-owned at *nix systems
"""
if port is not None:
return port < 1024
else:
return False
# upgrades would cause these directories to have a version instead of "current"
# which would cause a lot of problems when writing out hadoop-env.sh; instead
# force the use of "current" in the hook
hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
hadoop_home = stack_select.get_hadoop_dir("home")
hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
hadoop_lib_home = stack_select.get_hadoop_dir("lib")
hadoop_dir = "/etc/hadoop"
hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
datanode_max_locked_memory = config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory']
is_datanode_max_locked_memory_set = not is_empty(config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory'])
mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
if not security_enabled:
hadoop_secure_dn_user = '""'
else:
dfs_dn_port = get_port(dfs_dn_addr)
dfs_dn_http_port = get_port(dfs_dn_http_addr)
dfs_dn_https_port = get_port(dfs_dn_https_addr)
# We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
if dfs_http_policy == "HTTPS_ONLY":
secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_https_port)
elif dfs_http_policy == "HTTP_AND_HTTPS":
secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port) or is_secure_port(dfs_dn_https_port)
else: # params.dfs_http_policy == "HTTP_ONLY" or not defined:
secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port)
if secure_dn_ports_are_in_use:
hadoop_secure_dn_user = hdfs_user
else:
hadoop_secure_dn_user = '""'
#hadoop params
hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
jsvc_path = "/usr/lib/bigtop-utils"
hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m")
namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m")
jtnode_opt_newsize = "200m"
jtnode_opt_maxnewsize = "200m"
jtnode_heapsize = "1024m"
ttnode_heapsize = "1024m"
dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
nfsgateway_heapsize = config['configurations']['hadoop-env']['nfsgateway_heapsize']
mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
#users and groups
hbase_user = config['configurations']['hbase-env']['hbase_user']
smoke_user = config['configurations']['cluster-env']['smokeuser']
gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
gmond_user = config['configurations']['ganglia-env']["gmond_user"]
tez_user = config['configurations']['tez-env']["tez_user"]
oozie_user = config['configurations']['oozie-env']["oozie_user"]
falcon_user = config['configurations']['falcon-env']["falcon_user"]
ranger_user = config['configurations']['ranger-env']["ranger_user"]
zeppelin_user = config['configurations']['zeppelin-env']["zeppelin_user"]
zeppelin_group = config['configurations']['zeppelin-env']["zeppelin_group"]
user_group = config['configurations']['cluster-env']['user_group']
ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
namenode_host = default("/clusterHostInfo/namenode_host", [])
hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
oozie_servers = default("/clusterHostInfo/oozie_server", [])
falcon_server_hosts = default("/clusterHostInfo/falcon_server_hosts", [])
ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
zeppelin_master_hosts = default("/clusterHostInfo/zeppelin_master_hosts", [])
# get the correct version to use for checking stack features
version_for_stack_feature_checks = get_stack_feature_version(config)
has_namenode = not len(namenode_host) == 0
has_ganglia_server = not len(ganglia_server_hosts) == 0
has_tez = 'tez-site' in config['configurations']
has_hbase_masters = not len(hbase_master_hosts) == 0
has_oozie_server = not len(oozie_servers) == 0
has_falcon_server_hosts = not len(falcon_server_hosts) == 0
has_ranger_admin = not len(ranger_admin_hosts) == 0
has_zeppelin_master = not len(zeppelin_master_hosts) == 0
stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
# HDFS High Availability properties
dfs_ha_enabled = False
dfs_ha_nameservices = default('/configurations/hdfs-site/dfs.internal.nameservices', None)
if dfs_ha_nameservices is None:
dfs_ha_nameservices = default('/configurations/hdfs-site/dfs.nameservices', None)
dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
if dfs_ha_namenode_ids:
dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
if dfs_ha_namenode_ids_array_len > 1:
dfs_ha_enabled = True
if has_namenode or dfs_type == 'HCFS':
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
hbase_tmp_dir = "/tmp/hbase-hbase"
proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")
ranger_group = config['configurations']['ranger-env']['ranger_group']
dfs_cluster_administrators_group = config['configurations']['hdfs-site']["dfs.cluster.administrators"]
sysprep_skip_create_users_and_groups = default("/configurations/cluster-env/sysprep_skip_create_users_and_groups", False)
ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
fetch_nonlocal_groups = config['configurations']['cluster-env']["fetch_nonlocal_groups"]
smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
if has_hbase_masters:
hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
#repo params
repo_info = config['hostLevelParams']['repo_info']
service_repo_info = default("/hostLevelParams/service_repo_info",None)
user_to_groups_dict = collections.defaultdict(lambda:[user_group])
user_to_groups_dict[smoke_user] = [proxyuser_group]
if has_ganglia_server:
user_to_groups_dict[gmond_user] = [gmond_user]
user_to_groups_dict[gmetad_user] = [gmetad_user]
if has_tez:
user_to_groups_dict[tez_user] = [proxyuser_group]
if has_oozie_server:
user_to_groups_dict[oozie_user] = [proxyuser_group]
if has_falcon_server_hosts:
user_to_groups_dict[falcon_user] = [proxyuser_group]
if has_ranger_admin:
user_to_groups_dict[ranger_user] = [ranger_group]
if has_zeppelin_master:
user_to_groups_dict[zeppelin_user] = [zeppelin_group, user_group]
#Append new user-group mapping to the dict
try:
user_group_map = ast.literal_eval(config['hostLevelParams']['user_groups'])
for key in user_group_map.iterkeys():
user_to_groups_dict[key] = user_group_map[key]
except ValueError:
print('User Group mapping (user_group) is missing in the hostLevelParams')
user_to_gid_dict = collections.defaultdict(lambda:user_group)
user_list = json.loads(config['hostLevelParams']['user_list'])
group_list = json.loads(config['hostLevelParams']['group_list'])
host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
tez_am_view_acls = config['configurations']['tez-site']["tez.am.view-acls"]
override_uid = str(default("/configurations/cluster-env/override_uid", "true")).lower()
# if NN HA on secure clutser, access Zookeper securely
if stack_supports_zk_security and dfs_ha_enabled and security_enabled:
hadoop_zkfc_opts=format("-Dzookeeper.sasl.client=true -Dzookeeper.sasl.client.username=zookeeper -Djava.security.auth.login.config={hadoop_conf_secure_dir}/hdfs_jaas.conf -Dzookeeper.sasl.clientconfig=Client")
| [
"resource_management.libraries.functions.format_jvm_option",
"resource_management.libraries.script.Script.get_config",
"resource_management.libraries.functions.stack_features.check_stack_feature",
"re.search",
"resource_management.libraries.functions.conf_select.get_hadoop_conf_dir",
"resource_management.... | [((1936, 1955), 'resource_management.libraries.script.Script.get_config', 'Script.get_config', ([], {}), '()\n', (1953, 1955), False, 'from resource_management.libraries.script import Script\n'), ((1966, 1986), 'resource_management.libraries.script.Script.get_tmp_dir', 'Script.get_tmp_dir', ([], {}), '()\n', (1984, 1986), False, 'from resource_management.libraries.script import Script\n'), ((2001, 2024), 'resource_management.libraries.script.Script.get_stack_root', 'Script.get_stack_root', ([], {}), '()\n', (2022, 2024), False, 'from resource_management.libraries.script import Script\n'), ((2041, 2059), 'resource_management.libraries.functions.get_architecture.get_architecture', 'get_architecture', ([], {}), '()\n', (2057, 2059), False, 'from resource_management.libraries.functions.get_architecture import get_architecture\n'), ((2072, 2110), 'resource_management.libraries.functions.default', 'default', (['"""/commandParams/dfs_type"""', '""""""'], {}), "('/commandParams/dfs_type', '')\n", (2079, 2110), False, 'from resource_management.libraries.functions import default\n'), ((2127, 2164), 'resource_management.libraries.functions.format', 'format', (['"""{tmp_dir}/AMBARI-artifacts/"""'], {}), "('{tmp_dir}/AMBARI-artifacts/')\n", (2133, 2164), False, 'from resource_management.libraries.functions import format\n'), ((2176, 2218), 'resource_management.libraries.functions.default', 'default', (['"""/hostLevelParams/jdk_name"""', 'None'], {}), "('/hostLevelParams/jdk_name', None)\n", (2183, 2218), False, 'from resource_management.libraries.functions import default\n'), ((2285, 2329), 'resource_management.libraries.functions.expect.expect', 'expect', (['"""/hostLevelParams/java_version"""', 'int'], {}), "('/hostLevelParams/java_version', int)\n", (2291, 2329), False, 'from resource_management.libraries.functions.expect import expect\n'), ((2423, 2499), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/core-site/hadoop.custom-extensions.enabled"""', '(False)'], {}), "('/configurations/core-site/hadoop.custom-extensions.enabled', False)\n", (2430, 2499), False, 'from resource_management.libraries.functions import default\n'), ((2702, 2749), 'resource_management.libraries.functions.version.format_stack_version', 'format_stack_version', (['stack_version_unformatted'], {}), '(stack_version_unformatted)\n', (2722, 2749), False, 'from resource_management.libraries.functions.version import format_stack_version\n'), ((2844, 2883), 'resource_management.libraries.functions.default', 'default', (['"""/commandParams/version"""', 'None'], {}), "('/commandParams/version', None)\n", (2851, 2883), False, 'from resource_management.libraries.functions import default\n'), ((3200, 3263), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/hdfs-site/dfs.datanode.address"""', 'None'], {}), "('/configurations/hdfs-site/dfs.datanode.address', None)\n", (3207, 3263), False, 'from resource_management.libraries.functions import default\n'), ((3283, 3351), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/hdfs-site/dfs.datanode.http.address"""', 'None'], {}), "('/configurations/hdfs-site/dfs.datanode.http.address', None)\n", (3290, 3351), False, 'from resource_management.libraries.functions import default\n'), ((3372, 3441), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/hdfs-site/dfs.datanode.https.address"""', 'None'], {}), "('/configurations/hdfs-site/dfs.datanode.https.address', None)\n", (3379, 3441), False, 'from resource_management.libraries.functions import default\n'), ((3460, 3518), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/hdfs-site/dfs.http.policy"""', 'None'], {}), "('/configurations/hdfs-site/dfs.http.policy', None)\n", (3467, 3518), False, 'from resource_management.libraries.functions import default\n'), ((4206, 4276), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/hadoop-env/hdfs_user_nofile_limit"""', '"""128000"""'], {}), "('/configurations/hadoop-env/hdfs_user_nofile_limit', '128000')\n", (4213, 4276), False, 'from resource_management.libraries.functions import default\n'), ((4291, 4326), 'resource_management.libraries.functions.stack_select.get_hadoop_dir', 'stack_select.get_hadoop_dir', (['"""home"""'], {}), "('home')\n", (4318, 4326), False, 'from resource_management.libraries.functions import stack_select\n'), ((4348, 4386), 'resource_management.libraries.functions.stack_select.get_hadoop_dir', 'stack_select.get_hadoop_dir', (['"""libexec"""'], {}), "('libexec')\n", (4375, 4386), False, 'from resource_management.libraries.functions import stack_select\n'), ((4405, 4439), 'resource_management.libraries.functions.stack_select.get_hadoop_dir', 'stack_select.get_hadoop_dir', (['"""lib"""'], {}), "('lib')\n", (4432, 4439), False, 'from resource_management.libraries.functions import stack_select\n'), ((4492, 4538), 'os.path.join', 'os.path.join', (['tmp_dir', '"""hadoop_java_io_tmpdir"""'], {}), "(tmp_dir, 'hadoop_java_io_tmpdir')\n", (4504, 4538), False, 'import os\n'), ((6381, 6458), 'resource_management.libraries.functions.format_jvm_option', 'format_jvm_option', (['"""/configurations/hadoop-env/namenode_opt_permsize"""', '"""128m"""'], {}), "('/configurations/hadoop-env/namenode_opt_permsize', '128m')\n", (6398, 6458), False, 'from resource_management.libraries.functions import format_jvm_option\n'), ((6485, 6570), 'resource_management.libraries.functions.format_jvm_option', 'format_jvm_option', (['"""/configurations/hadoop-env/namenode_opt_maxpermsize"""', '"""256m"""'], {}), "('/configurations/hadoop-env/namenode_opt_maxpermsize', '256m'\n )\n", (6502, 6570), False, 'from resource_management.libraries.functions import format_jvm_option\n'), ((6863, 6955), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/mapred-env/mapred_pid_dir_prefix"""', '"""/var/run/hadoop-mapreduce"""'], {}), "('/configurations/mapred-env/mapred_pid_dir_prefix',\n '/var/run/hadoop-mapreduce')\n", (6870, 6955), False, 'from resource_management.libraries.functions import default\n'), ((6975, 7067), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/mapred-env/mapred_log_dir_prefix"""', '"""/var/log/hadoop-mapreduce"""'], {}), "('/configurations/mapred-env/mapred_log_dir_prefix',\n '/var/log/hadoop-mapreduce')\n", (6982, 7067), False, 'from resource_management.libraries.functions import default\n'), ((7927, 7978), 'resource_management.libraries.functions.default', 'default', (['"""/clusterHostInfo/ganglia_server_host"""', '[]'], {}), "('/clusterHostInfo/ganglia_server_host', [])\n", (7934, 7978), False, 'from resource_management.libraries.functions import default\n'), ((7995, 8040), 'resource_management.libraries.functions.default', 'default', (['"""/clusterHostInfo/namenode_host"""', '[]'], {}), "('/clusterHostInfo/namenode_host', [])\n", (8002, 8040), False, 'from resource_management.libraries.functions import default\n'), ((8062, 8112), 'resource_management.libraries.functions.default', 'default', (['"""/clusterHostInfo/hbase_master_hosts"""', '[]'], {}), "('/clusterHostInfo/hbase_master_hosts', [])\n", (8069, 8112), False, 'from resource_management.libraries.functions import default\n'), ((8129, 8173), 'resource_management.libraries.functions.default', 'default', (['"""/clusterHostInfo/oozie_server"""', '[]'], {}), "('/clusterHostInfo/oozie_server', [])\n", (8136, 8173), False, 'from resource_management.libraries.functions import default\n'), ((8196, 8247), 'resource_management.libraries.functions.default', 'default', (['"""/clusterHostInfo/falcon_server_hosts"""', '[]'], {}), "('/clusterHostInfo/falcon_server_hosts', [])\n", (8203, 8247), False, 'from resource_management.libraries.functions import default\n'), ((8269, 8319), 'resource_management.libraries.functions.default', 'default', (['"""/clusterHostInfo/ranger_admin_hosts"""', '[]'], {}), "('/clusterHostInfo/ranger_admin_hosts', [])\n", (8276, 8319), False, 'from resource_management.libraries.functions import default\n'), ((8344, 8397), 'resource_management.libraries.functions.default', 'default', (['"""/clusterHostInfo/zeppelin_master_hosts"""', '[]'], {}), "('/clusterHostInfo/zeppelin_master_hosts', [])\n", (8351, 8397), False, 'from resource_management.libraries.functions import default\n'), ((8495, 8528), 'resource_management.libraries.functions.stack_features.get_stack_feature_version', 'get_stack_feature_version', (['config'], {}), '(config)\n', (8520, 8528), False, 'from resource_management.libraries.functions.stack_features import get_stack_feature_version\n'), ((8978, 9066), 'resource_management.libraries.functions.stack_features.check_stack_feature', 'check_stack_feature', (['StackFeature.SECURE_ZOOKEEPER', 'version_for_stack_feature_checks'], {}), '(StackFeature.SECURE_ZOOKEEPER,\n version_for_stack_feature_checks)\n', (8997, 9066), False, 'from resource_management.libraries.functions.stack_features import check_stack_feature\n'), ((9145, 9213), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/hdfs-site/dfs.internal.nameservices"""', 'None'], {}), "('/configurations/hdfs-site/dfs.internal.nameservices', None)\n", (9152, 9213), False, 'from resource_management.libraries.functions import default\n'), ((9878, 9940), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/hadoop-env/proxyuser_group"""', '"""users"""'], {}), "('/configurations/hadoop-env/proxyuser_group', 'users')\n", (9885, 9940), False, 'from resource_management.libraries.functions import default\n'), ((10153, 10239), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/cluster-env/sysprep_skip_create_users_and_groups"""', '(False)'], {}), "('/configurations/cluster-env/sysprep_skip_create_users_and_groups',\n False)\n", (10160, 10239), False, 'from resource_management.libraries.functions import default\n'), ((10264, 10335), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/cluster-env/ignore_groupsusers_create"""', '(False)'], {}), "('/configurations/cluster-env/ignore_groupsusers_create', False)\n", (10271, 10335), False, 'from resource_management.libraries.functions import default\n'), ((10444, 10578), 'resource_management.libraries.functions.format', 'format', (['"""/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}"""'], {}), "(\n '/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}'\n )\n", (10450, 10578), False, 'from resource_management.libraries.functions import format\n'), ((10802, 10853), 'resource_management.libraries.functions.default', 'default', (['"""/hostLevelParams/service_repo_info"""', 'None'], {}), "('/hostLevelParams/service_repo_info', None)\n", (10809, 10853), False, 'from resource_management.libraries.functions import default\n'), ((10876, 10922), 'collections.defaultdict', 'collections.defaultdict', (['(lambda : [user_group])'], {}), '(lambda : [user_group])\n', (10899, 10922), False, 'import collections\n'), ((11815, 11859), 'collections.defaultdict', 'collections.defaultdict', (['(lambda : user_group)'], {}), '(lambda : user_group)\n', (11838, 11859), False, 'import collections\n'), ((11871, 11921), 'ambari_simplejson.loads', 'json.loads', (["config['hostLevelParams']['user_list']"], {}), "(config['hostLevelParams']['user_list'])\n", (11881, 11921), True, 'import ambari_simplejson as json\n'), ((11935, 11986), 'ambari_simplejson.loads', 'json.loads', (["config['hostLevelParams']['group_list']"], {}), "(config['hostLevelParams']['group_list'])\n", (11945, 11986), True, 'import ambari_simplejson as json\n'), ((12006, 12057), 'resource_management.libraries.functions.default', 'default', (['"""/hostLevelParams/host_sys_prepped"""', '(False)'], {}), "('/hostLevelParams/host_sys_prepped', False)\n", (12013, 12057), False, 'from resource_management.libraries.functions import default\n'), ((2790, 2832), 'resource_management.libraries.functions.default', 'default', (['"""/commandParams/upgrade_type"""', '""""""'], {}), "('/commandParams/upgrade_type', '')\n", (2797, 2832), False, 'from resource_management.libraries.functions import default\n'), ((2986, 3015), 'resource_management.libraries.functions.version.format_stack_version', 'format_stack_version', (['version'], {}), '(version)\n', (3006, 3015), False, 'from resource_management.libraries.functions.version import format_stack_version\n'), ((3685, 3748), 're.search', 're.search', (['"""(?:http(?:s)?://)?([\\\\w\\\\d.]*):(\\\\d{1,5})"""', 'address'], {}), "('(?:http(?:s)?://)?([\\\\w\\\\d.]*):(\\\\d{1,5})', address)\n", (3694, 3748), False, 'import re\n'), ((4680, 4766), 'resource_management.libraries.functions.is_empty.is_empty', 'is_empty', (["config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory']"], {}), "(config['configurations']['hdfs-site'][\n 'dfs.datanode.max.locked.memory'])\n", (4688, 4766), False, 'from resource_management.libraries.functions.is_empty import is_empty\n'), ((9270, 9329), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/hdfs-site/dfs.nameservices"""', 'None'], {}), "('/configurations/hdfs-site/dfs.nameservices', None)\n", (9277, 9329), False, 'from resource_management.libraries.functions import default\n'), ((9360, 9434), 'resource_management.libraries.functions.format', 'format', (['"""/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"""'], {}), "('/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}')\n", (9366, 9434), False, 'from resource_management.libraries.functions import format\n'), ((9720, 9753), 'resource_management.libraries.functions.conf_select.get_hadoop_conf_dir', 'conf_select.get_hadoop_conf_dir', ([], {}), '()\n', (9751, 9753), False, 'from resource_management.libraries.functions import conf_select\n'), ((9783, 9822), 'os.path.join', 'os.path.join', (['hadoop_conf_dir', '"""secure"""'], {}), "(hadoop_conf_dir, 'secure')\n", (9795, 9822), False, 'import os\n'), ((10611, 10727), 'resource_management.libraries.functions.format', 'format', (['"""/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}"""'], {}), "(\n '/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}'\n )\n", (10617, 10727), False, 'from resource_management.libraries.functions import format\n'), ((11550, 11608), 'ast.literal_eval', 'ast.literal_eval', (["config['hostLevelParams']['user_groups']"], {}), "(config['hostLevelParams']['user_groups'])\n", (11566, 11608), False, 'import ast\n'), ((12371, 12573), 'resource_management.libraries.functions.format', 'format', (['"""-Dzookeeper.sasl.client=true -Dzookeeper.sasl.client.username=zookeeper -Djava.security.auth.login.config={hadoop_conf_secure_dir}/hdfs_jaas.conf -Dzookeeper.sasl.clientconfig=Client"""'], {}), "(\n '-Dzookeeper.sasl.client=true -Dzookeeper.sasl.client.username=zookeeper -Djava.security.auth.login.config={hadoop_conf_secure_dir}/hdfs_jaas.conf -Dzookeeper.sasl.clientconfig=Client'\n )\n", (12377, 12573), False, 'from resource_management.libraries.functions import format\n'), ((12154, 12213), 'resource_management.libraries.functions.default', 'default', (['"""/configurations/cluster-env/override_uid"""', '"""true"""'], {}), "('/configurations/cluster-env/override_uid', 'true')\n", (12161, 12213), False, 'from resource_management.libraries.functions import default\n')] |
#!/usr/bin/env python3
import csv
import numpy
thr_sig=5.0
def sigmoid(x):
return 1.0/(1.0+numpy.exp(-(x-thr_sig)))
if __name__=="__main__":
#parameters
time_pitch=1.0 #ms
save_pitch=10
save_pitch_weight=1000
simlen_sec=900.0
simlen=int(simlen_sec*1000.0/time_pitch)
tauL=10.0 #ms
phi=80.0/1000.0
phi_input=80.0/1000.0
alpha_som=0.5
alpha_dnd=0.5
beta_som=0.0
beta_dnd=0.0
gamma=1.0
c0=70.0
eta_som=0.2
eta_dnd=0.2
taudeltaW=1.0*1000.0 #ms
tau_mean=60.0*1000.0
eta_Wdecay=1e-7
Wnoise_amp=5e-3/numpy.sqrt(time_pitch)
som_input_num=50
dnd_input_num=som_input_num+0
group1_num=10
input_src_num=4
tau_input=10.0 #ms
input_amp=0.1/numpy.sqrt(time_pitch)
noise_amp=0.1/numpy.sqrt(time_pitch)
Winit=5.0
Wmin=0.0
E0=0.05
#variables
x=0.0
y=0.0
Ex=E0
Ey=E0
input_src=numpy.zeros(input_src_num)
som_input_current=numpy.zeros(som_input_num)
dnd_input_current=numpy.zeros(dnd_input_num)
som_inputPSC=numpy.zeros(som_input_num)
dnd_inputPSC=numpy.zeros(dnd_input_num)
deltaWsom=numpy.zeros(som_input_num)
deltaWdnd=numpy.zeros(dnd_input_num)
Wsom=Winit*(numpy.random.rand(som_input_num))
Wdnd=Winit*(numpy.random.rand(dnd_input_num))
#save
f_activity=open("activity.csv", "w")
csv_activity=csv.writer(f_activity, delimiter=",")
f_Wsom=open("Wsom.csv", "w")
csv_Wsom=csv.writer(f_Wsom, delimiter=",")
f_Wdnd=open("Wdnd.csv", "w")
csv_Wdnd=csv.writer(f_Wdnd, delimiter=",")
f_som_input=open("som_input.csv", "w")
csv_som_input=csv.writer(f_som_input, delimiter=",")
f_dnd_input=open("dnd_input.csv", "w")
csv_dnd_input=csv.writer(f_dnd_input, delimiter=",")
som_src=numpy.zeros([som_input_num, input_src_num])
som_src[:group1_num, 0]=1.0
som_src[group1_num:, 2]=1.0
dnd_src=numpy.zeros([dnd_input_num, input_src_num])
dnd_src[:group1_num,1]=1.0
dnd_src[group1_num:,3]=1.0
#simulation
for t in range(simlen):
time_sec=float(t)*time_pitch/1000.0
if time_sec==int(time_sec):
print(time_sec,"sec")
#source signal
input_src=input_src+time_pitch*(-input_src/tau_input+input_amp*numpy.random.randn(input_src_num))
#inputs
som_input_current+=time_pitch*(-som_input_current/tauL+som_src@input_src+noise_amp*numpy.random.randn(som_input_num))
dnd_input_current+=time_pitch*(-dnd_input_current/tauL+dnd_src@input_src+noise_amp*numpy.random.randn(dnd_input_num))
som_input=phi_input*sigmoid(som_input_current)
dnd_input=phi_input*sigmoid(dnd_input_current)
som_inputPSC+=time_pitch*(-som_inputPSC/tauL+som_input)
dnd_inputPSC+=time_pitch*(-dnd_inputPSC/tauL+dnd_input)
#dynamics
xprev=x+0.0
yprev=y+0.0
Isom=Wsom@som_inputPSC
Idnd=Wdnd@dnd_inputPSC
x=sigmoid(Isom+beta_som*yprev)
y=sigmoid(Idnd+beta_dnd*xprev)
z=(1.0+gamma*y)*phi*x
#plasticity
#som
Wsom+=time_pitch*(eta_som*deltaWsom+Wnoise_amp*numpy.random.randn(som_input_num)-eta_Wdecay*Wsom)
Wsom[Wsom<Wmin]=Wmin
theta_som=c0*Ex*Ex
deltaWsom+=time_pitch*(-deltaWsom+((1.0-alpha_som)*x*(x-theta_som)+alpha_som*x*y)*(1.0-x)*som_inputPSC)/taudeltaW
#dnd
Wdnd+=time_pitch*(eta_dnd*deltaWdnd+Wnoise_amp*numpy.random.randn(dnd_input_num)-eta_Wdecay*Wdnd)
Wdnd[Wdnd<Wmin]=Wmin
theta_dnd=c0*Ey*Ey
deltaWdnd+=time_pitch*(-deltaWdnd+((1.0-alpha_dnd)*y*(y-theta_dnd)+alpha_dnd*x*y)*(1.0-y)*dnd_inputPSC)/taudeltaW
Ex+=time_pitch*(-Ex+x)/tau_mean
Ey+=time_pitch*(-Ey+y)/tau_mean
if t%save_pitch==0:
csv_activity.writerow([time_sec, x, y, z]); f_activity.flush();
csv_som_input.writerow(numpy.hstack([time_sec, som_input])); f_som_input.flush();
csv_dnd_input.writerow(numpy.hstack([time_sec, dnd_input])); f_dnd_input.flush();
if t%save_pitch_weight==0:
csv_Wsom.writerow(numpy.hstack([time_sec, Wsom])); f_Wsom.flush();
csv_Wdnd.writerow(numpy.hstack([time_sec, Wdnd])); f_Wdnd.flush();
| [
"numpy.sqrt",
"numpy.random.rand",
"numpy.hstack",
"csv.writer",
"numpy.exp",
"numpy.zeros",
"numpy.random.randn"
] | [((917, 943), 'numpy.zeros', 'numpy.zeros', (['input_src_num'], {}), '(input_src_num)\n', (928, 943), False, 'import numpy\n'), ((966, 992), 'numpy.zeros', 'numpy.zeros', (['som_input_num'], {}), '(som_input_num)\n', (977, 992), False, 'import numpy\n'), ((1015, 1041), 'numpy.zeros', 'numpy.zeros', (['dnd_input_num'], {}), '(dnd_input_num)\n', (1026, 1041), False, 'import numpy\n'), ((1059, 1085), 'numpy.zeros', 'numpy.zeros', (['som_input_num'], {}), '(som_input_num)\n', (1070, 1085), False, 'import numpy\n'), ((1103, 1129), 'numpy.zeros', 'numpy.zeros', (['dnd_input_num'], {}), '(dnd_input_num)\n', (1114, 1129), False, 'import numpy\n'), ((1144, 1170), 'numpy.zeros', 'numpy.zeros', (['som_input_num'], {}), '(som_input_num)\n', (1155, 1170), False, 'import numpy\n'), ((1185, 1211), 'numpy.zeros', 'numpy.zeros', (['dnd_input_num'], {}), '(dnd_input_num)\n', (1196, 1211), False, 'import numpy\n'), ((1381, 1418), 'csv.writer', 'csv.writer', (['f_activity'], {'delimiter': '""","""'}), "(f_activity, delimiter=',')\n", (1391, 1418), False, 'import csv\n'), ((1466, 1499), 'csv.writer', 'csv.writer', (['f_Wsom'], {'delimiter': '""","""'}), "(f_Wsom, delimiter=',')\n", (1476, 1499), False, 'import csv\n'), ((1546, 1579), 'csv.writer', 'csv.writer', (['f_Wdnd'], {'delimiter': '""","""'}), "(f_Wdnd, delimiter=',')\n", (1556, 1579), False, 'import csv\n'), ((1642, 1680), 'csv.writer', 'csv.writer', (['f_som_input'], {'delimiter': '""","""'}), "(f_som_input, delimiter=',')\n", (1652, 1680), False, 'import csv\n'), ((1742, 1780), 'csv.writer', 'csv.writer', (['f_dnd_input'], {'delimiter': '""","""'}), "(f_dnd_input, delimiter=',')\n", (1752, 1780), False, 'import csv\n'), ((1794, 1837), 'numpy.zeros', 'numpy.zeros', (['[som_input_num, input_src_num]'], {}), '([som_input_num, input_src_num])\n', (1805, 1837), False, 'import numpy\n'), ((1915, 1958), 'numpy.zeros', 'numpy.zeros', (['[dnd_input_num, input_src_num]'], {}), '([dnd_input_num, input_src_num])\n', (1926, 1958), False, 'import numpy\n'), ((585, 607), 'numpy.sqrt', 'numpy.sqrt', (['time_pitch'], {}), '(time_pitch)\n', (595, 607), False, 'import numpy\n'), ((743, 765), 'numpy.sqrt', 'numpy.sqrt', (['time_pitch'], {}), '(time_pitch)\n', (753, 765), False, 'import numpy\n'), ((784, 806), 'numpy.sqrt', 'numpy.sqrt', (['time_pitch'], {}), '(time_pitch)\n', (794, 806), False, 'import numpy\n'), ((1228, 1260), 'numpy.random.rand', 'numpy.random.rand', (['som_input_num'], {}), '(som_input_num)\n', (1245, 1260), False, 'import numpy\n'), ((1278, 1310), 'numpy.random.rand', 'numpy.random.rand', (['dnd_input_num'], {}), '(dnd_input_num)\n', (1295, 1310), False, 'import numpy\n'), ((97, 122), 'numpy.exp', 'numpy.exp', (['(-(x - thr_sig))'], {}), '(-(x - thr_sig))\n', (106, 122), False, 'import numpy\n'), ((3919, 3954), 'numpy.hstack', 'numpy.hstack', (['[time_sec, som_input]'], {}), '([time_sec, som_input])\n', (3931, 3954), False, 'import numpy\n'), ((4013, 4048), 'numpy.hstack', 'numpy.hstack', (['[time_sec, dnd_input]'], {}), '([time_sec, dnd_input])\n', (4025, 4048), False, 'import numpy\n'), ((4137, 4167), 'numpy.hstack', 'numpy.hstack', (['[time_sec, Wsom]'], {}), '([time_sec, Wsom])\n', (4149, 4167), False, 'import numpy\n'), ((4216, 4246), 'numpy.hstack', 'numpy.hstack', (['[time_sec, Wdnd]'], {}), '([time_sec, Wdnd])\n', (4228, 4246), False, 'import numpy\n'), ((2426, 2459), 'numpy.random.randn', 'numpy.random.randn', (['som_input_num'], {}), '(som_input_num)\n', (2444, 2459), False, 'import numpy\n'), ((2552, 2585), 'numpy.random.randn', 'numpy.random.randn', (['dnd_input_num'], {}), '(dnd_input_num)\n', (2570, 2585), False, 'import numpy\n'), ((2275, 2308), 'numpy.random.randn', 'numpy.random.randn', (['input_src_num'], {}), '(input_src_num)\n', (2293, 2308), False, 'import numpy\n'), ((3169, 3202), 'numpy.random.randn', 'numpy.random.randn', (['som_input_num'], {}), '(som_input_num)\n', (3187, 3202), False, 'import numpy\n'), ((3468, 3501), 'numpy.random.randn', 'numpy.random.randn', (['dnd_input_num'], {}), '(dnd_input_num)\n', (3486, 3501), False, 'import numpy\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
No need to create a list of fishes and add new one to the end of the list
"""
from collections import defaultdict
def main():
# XXX: COPY/PASTED
input_file = './input.txt'
X = defaultdict(int)
with open(input_file, 'r') as ftr:
for n in map(int, ftr.read().strip().split(',')):
if (n not in X.keys()):
X[n] = 0
X[n] += 1
for _ in range(256):
Y = defaultdict(int)
for x, cnt in X.items():
# add new fish
if (x == 0):
Y[6] += cnt
Y[8] += cnt
else:
Y[x-1] += cnt
X = Y
print(sum(X.values()))
if __name__ == '__main__':
main()
| [
"collections.defaultdict"
] | [((236, 252), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (247, 252), False, 'from collections import defaultdict\n'), ((442, 458), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (453, 458), False, 'from collections import defaultdict\n')] |
# Copyright 2021 EnICS Labs, Bar-Ilan University.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import sys, os
sys.path.append(os.path.abspath('..'))
from salamandra import *
def main():
test(is_metatest=False)
def test(is_metatest):
and_ = Component('and')
and_.add_pin(Input('A1'))
and_.add_pin(Input('A2'))
and_.add_pin(Output('Z'))
nand = Component('nand')
nand.add_pin(Input('A1'))
nand.add_pin(Input('A2'))
nand.add_pin(Output('Z'))
nand.add_subcomponent(and_, 'i_and')
nand.connect('A1', 'i_and.A1')
nand.connect('A2', 'i_and.A2')
nand.connect('Z', 'i_and.Z')
old0 = nand.get_connected('i_and.A1')
old1 = nand.get_connected('i_and.A2')
nand.disconnect('i_and.A1')
nand.disconnect('i_and.A2')
nand.connect(old0, 'i_and.A2')
nand.connect(old1, 'i_and.A1')
if not is_metatest:
nand.print_verilog()
return True
if __name__ == '__main__':
main()
| [
"os.path.abspath"
] | [((195, 216), 'os.path.abspath', 'os.path.abspath', (['""".."""'], {}), "('..')\n", (210, 216), False, 'import sys, os\n')] |
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""View functions to interact with web clients."""
import atexit
import json
import logging
import os
import re
import string
import time
from django import http
from django import shortcuts
from django import template
from django.core import urlresolvers
from makani.analysis.checks import log_util
from makani.avionics.network import message_type as aio_message_type
from makani.avionics.network import network_config
from makani.gs.monitor2.apps.layout import autogen
from makani.gs.monitor2.apps.layout import base as layout_base
from makani.gs.monitor2.apps.layout import layout_util
from makani.gs.monitor2.apps.layout import loader
from makani.gs.monitor2.apps.layout import memory as layout_memory
from makani.gs.monitor2.apps.layout import stoplights
from makani.gs.monitor2.apps.layout import widgets
from makani.gs.monitor2.apps.receiver import receiver_manager
from makani.gs.monitor2.apps.receiver import views as receiver_views
from makani.gs.monitor2.project import settings
from makani.lib.bazel import bazel_util
from makani.lib.python import c_helpers
from makani.lib.python import debug_util
from makani.lib.python import struct_tree
from makani.lib.python.h5_utils import h5_io
import numpy
MESSAGE_TYPE_HELPER = c_helpers.EnumHelper('MessageType', aio_message_type)
CONFIG_FILES = {
'plot_defs': os.path.join(settings.MONITOR_PATH, 'configs/plot_defs.json'),
}
def Home(request):
"""Get the response for the home page."""
layout_names = loader.LayoutLoader().Names()
layout_names.sort()
all_layouts = [
{'name': layout,
'url': urlresolvers.reverse(
'view_aio_layout', args=[loader.LayoutLoader().ModuleName(layout)])}
for layout in layout_names]
context = {
'layouts': all_layouts,
'canvas_cols': settings.CSS_GRID_COLUMNS,
}
_CreateAndAddClientIdToContext(context)
template_name = 'home.html'
return shortcuts.render(request, template_name, context,
context_instance=template.RequestContext(request))
def _ListFiles(path_arg):
"""List files under a local path."""
path_template = string.Template(path_arg)
prefix_path = path_template.substitute(os.environ)
sub_paths = os.listdir(prefix_path)
return prefix_path, sub_paths
def _GetFullFilePath(prefix_path, sub_path):
return os.path.join(prefix_path, sub_path)
def SelectAllLogs(request):
"""Select all logs in the last visited directory."""
current_path = request.session['current_path']
try:
prefix_path, sub_paths = _ListFiles(current_path)
except OSError:
return http.HttpResponse('Cannot list directory "%s"!' % current_path)
file_list = []
for sub_path in sorted(sub_paths):
# Construct the full path.
if sub_path.endswith('.h5') and not sub_path.startswith('format'):
full_path = _GetFullFilePath(prefix_path, sub_path)
if not os.path.isdir(full_path):
file_list.append(full_path)
return http.HttpResponse(';\n'.join(file_list))
def Console(request, command, args):
"""Take commandlines from the client and respond with console outputs.
Args:
request: The HTML resquest object.
command: The command to be run. Only 'ls' is permitted for now.
args: The string of arguments to the command.
Returns:
The HttpResponse telling the output of the command.
"""
if command != 'ls':
message = 'Command "%s" is not allowed.' % command
return http.HttpResponse(message)
arg_template = string.Template(args)
arg_path = arg_template.safe_substitute(
{'MAKANI_HOME': bazel_util.GetWorkspaceRoot()})
try:
prefix_path, sub_paths = _ListFiles(arg_path)
request.session['current_path'] = arg_path
except OSError:
return http.HttpResponse('Cannot list directory "%s"!' % arg_path)
file_list = []
for sub_path in sorted(sub_paths):
# Construct the full path.
full_path = _GetFullFilePath(prefix_path, sub_path)
if os.path.isdir(full_path):
# If this is a directory, add the javascript to allow users to click
# into it.
file_list.append(
'<a href="javascript:void(0)" onclick="onListFiles(\'%s\')">%s</a>'
% (full_path, sub_path))
elif sub_path.endswith('.h5') and not sub_path.startswith('format'):
# If this is an HDF5 file, add the javascript to allow users to
# visualize it.
file_list.append(
'<a href="javascript:void(0)" onclick="onAddLog(\'%s\')">%s</a>'
% (full_path, sub_path))
else:
file_list.append(sub_path)
text = '<br>'.join(file_list)
return http.HttpResponse(text)
def _GetMinMessageFrequency():
"""Get the minimum frequency across all message types."""
config = network_config.NetworkConfig(settings.NETWORK_YAML)
return min(m.frequency_hz for m in config.all_messages if m.frequency_hz > 0)
def _TryToEnforceAioReceiver(client_id):
"""Ensure that the client is subscribed to the AioReceiver."""
# TODO: Investigate always running the AioReceiver.
message_receiver = receiver_manager.ReceiverManager.GetReceiver(client_id)
if not message_receiver:
if receiver_manager.ReceiverManager.CheckAndStartAioReceiver(
client_id, receiver_views.CreateAioReceiver):
# A new AioReceiver is started.
# Get the longest period for all messages, and multiply it by two to
# make sure we do not miss any message.
time.sleep(2.0 / _GetMinMessageFrequency())
return receiver_manager.ReceiverManager.GetReceiver(client_id)
else:
return message_receiver
def ViewMessageType(request, client_id, message_type,
template_name='monitor.html'):
"""View information within a message by automatically generating a layout.
Args:
request: An HttpRequest from the client.
client_id: The ID of the client's browser tab.
message_type: The Enum name of a message type.
template_name: The HTML template used to render the layout.
Returns:
An HttpResponse in the format of a serialized JSON object.
"""
configs = _LoadConfigs()
_TryToEnforceAioReceiver(client_id)
resp = _GetMessage(request, client_id, message_type)
resp = resp.Data(convert_to_basic_types=True) if resp else {}
configs['scenarios'] = autogen.GenerateScenario(resp, message_type)
context = _PrepareContext(configs)
new_client_id = _CreateAndAddClientIdToContext(context)
context['periodic_url'] = '/dashboard/periodic/msg_enum/%s/%s' % (
new_client_id, message_type)
context['content_width'] = settings.CSS_GRID_COLUMNS
context['order_horizontally'] = True
return shortcuts.render(request, template_name, context,
context_instance=template.RequestContext(request))
def UpdateMessageOptions(unused_request, client_id):
"""Detect what messages have been received and update the client.
Args:
unused_request: An HttpRequest from the client.
client_id: The ID of the client's browser tab.
Returns:
An HttpResponse about a dictionary of {message_enum: message_short_name}
"""
message_receiver = _TryToEnforceAioReceiver(client_id)
info = message_receiver.GetReceivedMessageTypes() if message_receiver else []
return http.HttpResponse(json.dumps(info))
def ViewAioLayout(request, layout_name):
"""Open a monitor layout that get data from AIO.
Args:
request: An HttpRequest from the client.
layout_name: Name of the layout associated with the client.
Returns:
An HttpResponse in the format of a serialized JSON object.
"""
context = {'receiver_type': 'aio'}
return _ViewLayout(request, layout_name, context)
def BrowseLog(request, path):
"""Browse the log by expanding the field at `path`.
Args:
request: An HttpRequest from the client.
path: A path pointing to one field in the log.
Returns:
An HttpResponse serializing a list of names for child fields.
"""
# The log structure may differ across logs, we always use the first log to
# construct the log structure.
log_path = request.session['log_paths'][0]
log_data = struct_tree.StructTree(log_path, fail_silently=True, readonly=True)
try:
skeleton = log_data.Skeleton(path, depth=1)
except h5_io.H5IndexError:
return http.HttpResponse('{}')
parent_path = path
d3_data = struct_tree.DictToD3Tree(skeleton, '.', parent_path)
if 'children' in d3_data:
# The first layer is a placeholder. Starts from the second layer.
return http.HttpResponse(json.dumps(d3_data['children']))
else:
return http.HttpResponse('{}')
def ViewLogStructure(request, paths, template_name='log_structure.html'):
"""View structure of an HDF5 log at given log path.
Args:
request: An HttpRequest from the client.
paths: Paths to the local log files.
template_name: The HTML template used to render the layout.
Returns:
An HttpResponse that renders the log structure.
"""
# `context` includes variables used to render the HTML.
context = {
'graph_width': 6000,
'graph_height': 6000,
'frame_width': 200,
'frame_height': 540,
'canvas_cols': 12,
}
log_paths = []
for path in paths.split(';'):
path = path.strip()
if not path:
continue
path_template = string.Template(path)
log_path = path_template.substitute(os.environ)
basename = os.path.basename(log_path)
if basename.startswith('(') and basename.endswith(')'):
dirname = os.path.dirname(log_path)
regex_pattern = re.compile(basename[1:-1]+'$')
filenames = os.listdir(dirname)
matched_files = [f for f in filenames if regex_pattern.match(f)]
log_paths += [os.path.join(dirname, f) for f in matched_files]
else:
log_paths.append(log_path)
if not log_paths:
context['errors'] = 'Cannot find log data'
else:
# Use the first log to index fields.
log_data = struct_tree.StructTree(
log_paths[0], fail_silently=True, readonly=True)
log_skeleton = log_data.Skeleton(depth=1)
d3_data = struct_tree.DictToD3Tree(log_skeleton, '/')
d3_data['expand_url'] = urlresolvers.reverse('browse_log', args=[''])
request.session['log_paths'] = log_paths
context['skeleton'] = json.dumps(d3_data)
order_horizontally = True
configs = _LoadConfigs()
scenarios = layout_base.AssembleLayout([
('Signals', [
widgets.DictLinesWidget('series', None, interactive=True,
use_markers=True),
]),
], desired_view_cols=1, order_horizontally=order_horizontally)
layout_names = loader.LayoutLoader().ModuleNames()
layout_names.sort()
configs['scenarios'] = scenarios
context.update(_PrepareContext(configs))
context['layout_names'] = layout_names
context['content_width'] = settings.CSS_GRID_COLUMNS - 2
context['order_horizontally'] = order_horizontally
_CreateAndAddClientIdToContext(context)
return shortcuts.render(request, template_name, context,
context_instance=template.RequestContext(request))
def PeriodicDataPoll(request, client_id, layout_name):
"""Compute realtime data and respond to periodic polling from a client layout.
Args:
request: An HttpRequest from the client.
client_id: The ID of the client's browser tab.
layout_name: Name of the layout associated with the client.
Returns:
An HttpResponse in the format of a serialized JSON object.
"""
aggregated_message = _GetMessage(request, client_id)
if not aggregated_message:
aggregated_message = struct_tree.StructTree(
{}, fail_silently=True, readonly=True)
layout = loader.LayoutLoader().GetLayoutByModuleName(layout_name)
tab_memory = layout_memory.GetMemory(client_id, False)
if tab_memory is not None:
# Load the persistent memory.
layout.Import(tab_memory)
else:
layout.Initialize()
tab_memory = layout_memory.GetMemory(client_id, True)
# Start the AIO receiver in case the server has restarted.
_TryToEnforceAioReceiver(client_id)
try:
data = layout.Filter(aggregated_message)
except Exception: # pylint: disable=broad-except
# layout.Filter may introduce any kind of exception.
logging.error('PeriodicDataPoll encountered an error:\n%s',
debug_util.FormatTraceback())
layout.Export(tab_memory)
return http.HttpResponse('{}')
# Save the persistent memory.
layout.Export(tab_memory)
resp = data.Json()
if settings.DEBUG:
resp['__message__'] = '\n-----------------------------\n'.join(
'Error in indicator "%s":\n%s' % (k, v)
for k, v in layout.ErrorReport())
resp_str = json.dumps(resp)
layout.ClearErrors()
return http.HttpResponse(resp_str)
def _DownSample(data, length):
window_size = max(1, len(data)/length)
if window_size > 1:
data = data[:len(data) / window_size * window_size]
return numpy.mean(data.reshape(-1, window_size), 1), window_size
else:
return data, 1
def GetLogData(request, mode, fields):
"""Get values of data fields within a log file."""
log_paths = request.session['log_paths']
fields = [f.strip() for f in fields.split('\n') if f.strip()]
field_labels = layout_util.GetDistinguishableNames(
fields, '.', ['kAioNode', 'kMessageType'])
if mode == 'merge':
series = ConcatenateLogData(log_paths, field_labels)
else: # By default, mode = 'compare'
series = CompareLogData(log_paths, field_labels)
resp = {'series': series}
return http.HttpResponse(json.dumps(resp))
def _StringReplace(subject, translate):
for s, t in translate:
subject = subject.replace(s, t)
return subject
def GetMessageSnapshot(request, client_id, title):
aggregated_message = _GetMessage(request, client_id)
result = aggregated_message.Data(True)
response = http.HttpResponse(content_type='text/plain')
response['Content-Disposition'] = (
'attachment; filename=snapshot_%s.json' % title)
response.write(json.dumps(result, indent=2))
return response
def GetRawLogData(request, fields):
"""Get values of data fields within a log file."""
log_paths = request.session['log_paths']
fields = [f.strip() for f in fields.split('\n') if f.strip()]
field_labels = layout_util.GetDistinguishableNames(
fields, '.', ['kAioNode', 'kMessageType'])
result = {}
# Remove special characters so variables can be parsed and loaded into Matlab.
bad_chars = ['.', ',', '-', '+', '(', ')', '[', ']', '{', '}', ':',
'kMessageType', 'kAioNode', 'messages', 'message']
replacement = list(zip(bad_chars, ['_'] * len(bad_chars)))
replacement = [('[:]', ''), (':,', ''), (' ', '')] + replacement
for log_path in log_paths:
base_name = os.path.basename(log_path)
log_name = 'log_' + _StringReplace(base_name[:base_name.find('.')],
replacement)
log_data = struct_tree.StructTree(
log_path, fail_silently=True, readonly=True)
result[log_name] = {}
for field, legend_label in field_labels.iteritems():
data, timestamps = log_util.GetOrderedDedupDataAndTimeByField(
log_data, field, rebase=False)
result[log_name][_StringReplace(legend_label, replacement)] = {
'values': data.tolist() if data is not None else None,
'timestamps': timestamps.tolist() if timestamps is not None else None,
'status': 'success' if data is not None else 'missing',
}
response = http.HttpResponse(content_type='text/plain')
response['Content-Disposition'] = 'attachment; filename=makani_log_data.json'
response.write(json.dumps(result, indent=2))
return response
def ConcatenateLogData(log_paths, field_labels):
"""Get series of data, each corresponding to field values in all logs."""
series = {}
base_timeline = float('inf')
for log_path in log_paths:
log_data = struct_tree.StructTree(
log_path, fail_silently=True, readonly=True)
for field, legend_label in field_labels.iteritems():
data, timestamps = log_util.GetOrderedDedupDataAndTimeByField(
log_data, field, rebase=False)
if data is None or timestamps is None:
continue
base_timeline = min(base_timeline, float(timestamps[0]))
if legend_label not in series:
series[legend_label] = {'x': timestamps, 'y': data}
else:
series[legend_label]['x'] = numpy.concatenate(
(series[legend_label]['x'], timestamps))
series[legend_label]['y'] = numpy.concatenate(
(series[legend_label]['y'], data))
result = {}
for field, legend_label in field_labels.iteritems():
timestamps, _ = _DownSample(
series[legend_label]['x'], settings.MAX_DATA_POINTS_PER_LOG_FIELD)
data, downsample_rate = _DownSample(
series[legend_label]['y'], settings.MAX_DATA_POINTS_PER_LOG_FIELD)
if downsample_rate > 1:
legend_label += '(/%d)' % downsample_rate
result[legend_label] = {'x': (timestamps - base_timeline).tolist(),
'y': data.tolist()}
return result
def CompareLogData(log_paths, field_labels):
"""Get series of data, each corresponding to field values within a log."""
series = {}
base_timeline = float('inf')
for log_path in log_paths:
log_data = struct_tree.StructTree(
log_path, fail_silently=True, readonly=True)
log_name = os.path.basename(log_path)
if '.' in log_name:
log_name = log_name[:log_name.rfind('.')]
for field, legend_label in field_labels.iteritems():
data, timestamps = log_util.GetOrderedDedupDataAndTimeByField(
log_data, field, rebase=True)
if data is None or timestamps is None:
continue
data, _ = _DownSample(data, settings.MAX_DATA_POINTS_PER_LOG_FIELD)
timestamps, downsample_rate = _DownSample(
timestamps, settings.MAX_DATA_POINTS_PER_LOG_FIELD)
base_timeline = min(base_timeline, float(timestamps[0]))
short_name = '%s.%s' % (log_name, legend_label)
if downsample_rate > 1:
short_name += '(/%d)' % downsample_rate
series[short_name] = {'x': timestamps,
'y': data.tolist()}
for short_name in series:
series[short_name]['x'] = (series[short_name]['x'] - base_timeline).tolist()
return series
def PeriodicMessagePoll(request, client_id, message_type=None):
"""Retrieve realtime data and respond to periodic polling from a message view.
Args:
request: An HttpRequest from the client.
client_id: The ID of the client's browser tab.
message_type: The Enum name of a message type.
Returns:
An HttpResponse in the format of a serialized JSON object.
"""
resp = _GetMessage(request, client_id, message_type)
if not resp:
resp = {}
else:
resp = resp.Data(convert_to_basic_types=True)
resp_str = json.dumps(resp)
return http.HttpResponse(resp_str)
def _LoadConfigs():
"""Load default layout configuration parameters."""
configs = {}
for cf, filename in CONFIG_FILES.iteritems():
with open(filename, 'r') as fp:
configs[cf] = json.load(fp)
if 'plot_defs' not in configs:
logging.Error('Missing definitions for plotting javascripts.')
return configs
def _PrepareContext(configs):
"""Prepare the context to render the layout."""
context = {}
fig_templates = set()
canvas_cols = configs['scenarios']['canvas']['grid_width']
context['canvas_cols'] = canvas_cols
row_height_px = configs['scenarios']['canvas']['row_height_px']
ui_objs = []
max_cols = canvas_cols
for stripe in configs['scenarios']['views']:
for view in stripe['stripe']:
view['canvas_cols'] = int(
float(view['grid_width']) / stripe['grid_width'] * canvas_cols + 0.5)
for indicator in view['indicators']:
ui_obj = indicator
if 'rows' not in ui_obj:
ui_obj['height'] = 'auto'
else:
rows = ui_obj['rows']
ui_obj['height'] = str(rows * row_height_px) + 'px'
if 'cols' not in ui_obj:
ui_obj['cols'] = max_cols
# TODO: Change `id` to 'indicator_id', and 'selector'
# to 'dom_selector'.
ui_obj['id'] = 'ui_obj_%s' % len(ui_objs)
ui_obj['selector'] = '#%s' % (ui_obj['id'])
ui_objs.append(ui_obj)
fig_templates.add(ui_obj['template'])
context['fig_templates'] = fig_templates
context['plot_defs'] = configs['plot_defs']
context['views'] = configs['scenarios']['views']
context['ui_objs_str'] = json.dumps(ui_objs)
context['stoplight_error'] = stoplights.STOPLIGHT_ERROR
context['stoplight_warning'] = stoplights.STOPLIGHT_WARNING
context['stoplight_normal'] = stoplights.STOPLIGHT_NORMAL
context['stoplight_unavailable'] = stoplights.STOPLIGHT_UNAVAILABLE
context['stoplight_any'] = stoplights.STOPLIGHT_ANY
return context
def _GetMessage(unused_request, client_id, message_type=None):
"""Get a message from the receiver."""
message_receiver = receiver_manager.ReceiverManager.GetReceiver(client_id)
resp = struct_tree.StructTree({}, fail_silently=True, readonly=True)
if message_receiver:
if message_type is not None:
message_enum = MESSAGE_TYPE_HELPER.Value(message_type)
else:
message_enum = None
resp = message_receiver.GetLatest(message_enum)
return resp
def _CreateAndAddClientIdToContext(context):
client_id = receiver_manager.ReceiverManager.GetNewClientId()
context['client_id'] = client_id
return client_id
def _ViewLayout(request, layout_name, extra_context=None):
"""Get a monitor layout according to `layout_name`."""
layout = loader.LayoutLoader().GetLayoutByModuleName(layout_name)
if layout is None:
return http.HttpResponseRedirect(urlresolvers.reverse('home'))
layout.Initialize()
configs = _LoadConfigs()
configs['scenarios'] = layout.Scenario()
context = _PrepareContext(configs)
client_id = _CreateAndAddClientIdToContext(context)
# Initialize the layout.
layout.Export(layout_memory.GetMemory(client_id, True))
# Add polling URL.
context['periodic_url'] = '/dashboard/periodic/layout/%s/%s' % (client_id,
layout_name)
context['layout_name'] = layout_name
context['content_width'] = settings.CSS_GRID_COLUMNS
context['order_horizontally'] = layout.OrderHorizontally()
context['default_font_size'] = layout.DefaultFontSize()
context['sim_mode'] = settings.POPULATE_MESSAGES_FROM_SIM
if extra_context:
context.update(extra_context)
template_name = 'monitor.html'
return shortcuts.render(request, template_name, context,
context_instance=template.RequestContext(request))
| [
"logging.Error",
"re.compile",
"makani.lib.python.debug_util.FormatTraceback",
"makani.gs.monitor2.apps.layout.autogen.GenerateScenario",
"django.core.urlresolvers.reverse",
"makani.analysis.checks.log_util.GetOrderedDedupDataAndTimeByField",
"makani.lib.python.c_helpers.EnumHelper",
"os.listdir",
"... | [((1826, 1879), 'makani.lib.python.c_helpers.EnumHelper', 'c_helpers.EnumHelper', (['"""MessageType"""', 'aio_message_type'], {}), "('MessageType', aio_message_type)\n", (1846, 1879), False, 'from makani.lib.python import c_helpers\n'), ((1915, 1976), 'os.path.join', 'os.path.join', (['settings.MONITOR_PATH', '"""configs/plot_defs.json"""'], {}), "(settings.MONITOR_PATH, 'configs/plot_defs.json')\n", (1927, 1976), False, 'import os\n'), ((2696, 2721), 'string.Template', 'string.Template', (['path_arg'], {}), '(path_arg)\n', (2711, 2721), False, 'import string\n'), ((2789, 2812), 'os.listdir', 'os.listdir', (['prefix_path'], {}), '(prefix_path)\n', (2799, 2812), False, 'import os\n'), ((2901, 2936), 'os.path.join', 'os.path.join', (['prefix_path', 'sub_path'], {}), '(prefix_path, sub_path)\n', (2913, 2936), False, 'import os\n'), ((4051, 4072), 'string.Template', 'string.Template', (['args'], {}), '(args)\n', (4066, 4072), False, 'import string\n'), ((5151, 5174), 'django.http.HttpResponse', 'http.HttpResponse', (['text'], {}), '(text)\n', (5168, 5174), False, 'from django import http\n'), ((5279, 5330), 'makani.avionics.network.network_config.NetworkConfig', 'network_config.NetworkConfig', (['settings.NETWORK_YAML'], {}), '(settings.NETWORK_YAML)\n', (5307, 5330), False, 'from makani.avionics.network import network_config\n'), ((5594, 5649), 'makani.gs.monitor2.apps.receiver.receiver_manager.ReceiverManager.GetReceiver', 'receiver_manager.ReceiverManager.GetReceiver', (['client_id'], {}), '(client_id)\n', (5638, 5649), False, 'from makani.gs.monitor2.apps.receiver import receiver_manager\n'), ((6804, 6848), 'makani.gs.monitor2.apps.layout.autogen.GenerateScenario', 'autogen.GenerateScenario', (['resp', 'message_type'], {}), '(resp, message_type)\n', (6828, 6848), False, 'from makani.gs.monitor2.apps.layout import autogen\n'), ((8616, 8683), 'makani.lib.python.struct_tree.StructTree', 'struct_tree.StructTree', (['log_path'], {'fail_silently': '(True)', 'readonly': '(True)'}), '(log_path, fail_silently=True, readonly=True)\n', (8638, 8683), False, 'from makani.lib.python import struct_tree\n'), ((8836, 8888), 'makani.lib.python.struct_tree.DictToD3Tree', 'struct_tree.DictToD3Tree', (['skeleton', '"""."""', 'parent_path'], {}), "(skeleton, '.', parent_path)\n", (8860, 8888), False, 'from makani.lib.python import struct_tree\n'), ((12211, 12252), 'makani.gs.monitor2.apps.layout.memory.GetMemory', 'layout_memory.GetMemory', (['client_id', '(False)'], {}), '(client_id, False)\n', (12234, 12252), True, 'from makani.gs.monitor2.apps.layout import memory as layout_memory\n'), ((13150, 13166), 'json.dumps', 'json.dumps', (['resp'], {}), '(resp)\n', (13160, 13166), False, 'import json\n'), ((13199, 13226), 'django.http.HttpResponse', 'http.HttpResponse', (['resp_str'], {}), '(resp_str)\n', (13216, 13226), False, 'from django import http\n'), ((13694, 13772), 'makani.gs.monitor2.apps.layout.layout_util.GetDistinguishableNames', 'layout_util.GetDistinguishableNames', (['fields', '"""."""', "['kAioNode', 'kMessageType']"], {}), "(fields, '.', ['kAioNode', 'kMessageType'])\n", (13729, 13772), False, 'from makani.gs.monitor2.apps.layout import layout_util\n'), ((14308, 14352), 'django.http.HttpResponse', 'http.HttpResponse', ([], {'content_type': '"""text/plain"""'}), "(content_type='text/plain')\n", (14325, 14352), False, 'from django import http\n'), ((14726, 14804), 'makani.gs.monitor2.apps.layout.layout_util.GetDistinguishableNames', 'layout_util.GetDistinguishableNames', (['fields', '"""."""', "['kAioNode', 'kMessageType']"], {}), "(fields, '.', ['kAioNode', 'kMessageType'])\n", (14761, 14804), False, 'from makani.gs.monitor2.apps.layout import layout_util\n'), ((15957, 16001), 'django.http.HttpResponse', 'http.HttpResponse', ([], {'content_type': '"""text/plain"""'}), "(content_type='text/plain')\n", (15974, 16001), False, 'from django import http\n'), ((19326, 19342), 'json.dumps', 'json.dumps', (['resp'], {}), '(resp)\n', (19336, 19342), False, 'import json\n'), ((19352, 19379), 'django.http.HttpResponse', 'http.HttpResponse', (['resp_str'], {}), '(resp_str)\n', (19369, 19379), False, 'from django import http\n'), ((20984, 21003), 'json.dumps', 'json.dumps', (['ui_objs'], {}), '(ui_objs)\n', (20994, 21003), False, 'import json\n'), ((21452, 21507), 'makani.gs.monitor2.apps.receiver.receiver_manager.ReceiverManager.GetReceiver', 'receiver_manager.ReceiverManager.GetReceiver', (['client_id'], {}), '(client_id)\n', (21496, 21507), False, 'from makani.gs.monitor2.apps.receiver import receiver_manager\n'), ((21517, 21578), 'makani.lib.python.struct_tree.StructTree', 'struct_tree.StructTree', (['{}'], {'fail_silently': '(True)', 'readonly': '(True)'}), '({}, fail_silently=True, readonly=True)\n', (21539, 21578), False, 'from makani.lib.python import struct_tree\n'), ((21859, 21908), 'makani.gs.monitor2.apps.receiver.receiver_manager.ReceiverManager.GetNewClientId', 'receiver_manager.ReceiverManager.GetNewClientId', ([], {}), '()\n', (21906, 21908), False, 'from makani.gs.monitor2.apps.receiver import receiver_manager\n'), ((4006, 4032), 'django.http.HttpResponse', 'http.HttpResponse', (['message'], {}), '(message)\n', (4023, 4032), False, 'from django import http\n'), ((4513, 4537), 'os.path.isdir', 'os.path.isdir', (['full_path'], {}), '(full_path)\n', (4526, 4537), False, 'import os\n'), ((5684, 5790), 'makani.gs.monitor2.apps.receiver.receiver_manager.ReceiverManager.CheckAndStartAioReceiver', 'receiver_manager.ReceiverManager.CheckAndStartAioReceiver', (['client_id', 'receiver_views.CreateAioReceiver'], {}), '(client_id,\n receiver_views.CreateAioReceiver)\n', (5741, 5790), False, 'from makani.gs.monitor2.apps.receiver import receiver_manager\n'), ((6017, 6072), 'makani.gs.monitor2.apps.receiver.receiver_manager.ReceiverManager.GetReceiver', 'receiver_manager.ReceiverManager.GetReceiver', (['client_id'], {}), '(client_id)\n', (6061, 6072), False, 'from makani.gs.monitor2.apps.receiver import receiver_manager\n'), ((7772, 7788), 'json.dumps', 'json.dumps', (['info'], {}), '(info)\n', (7782, 7788), False, 'import json\n'), ((9068, 9091), 'django.http.HttpResponse', 'http.HttpResponse', (['"""{}"""'], {}), "('{}')\n", (9085, 9091), False, 'from django import http\n'), ((9787, 9808), 'string.Template', 'string.Template', (['path'], {}), '(path)\n', (9802, 9808), False, 'import string\n'), ((9876, 9902), 'os.path.basename', 'os.path.basename', (['log_path'], {}), '(log_path)\n', (9892, 9902), False, 'import os\n'), ((10411, 10482), 'makani.lib.python.struct_tree.StructTree', 'struct_tree.StructTree', (['log_paths[0]'], {'fail_silently': '(True)', 'readonly': '(True)'}), '(log_paths[0], fail_silently=True, readonly=True)\n', (10433, 10482), False, 'from makani.lib.python import struct_tree\n'), ((10552, 10595), 'makani.lib.python.struct_tree.DictToD3Tree', 'struct_tree.DictToD3Tree', (['log_skeleton', '"""/"""'], {}), "(log_skeleton, '/')\n", (10576, 10595), False, 'from makani.lib.python import struct_tree\n'), ((10624, 10669), 'django.core.urlresolvers.reverse', 'urlresolvers.reverse', (['"""browse_log"""'], {'args': "['']"}), "('browse_log', args=[''])\n", (10644, 10669), False, 'from django.core import urlresolvers\n'), ((10741, 10760), 'json.dumps', 'json.dumps', (['d3_data'], {}), '(d3_data)\n', (10751, 10760), False, 'import json\n'), ((12057, 12118), 'makani.lib.python.struct_tree.StructTree', 'struct_tree.StructTree', (['{}'], {'fail_silently': '(True)', 'readonly': '(True)'}), '({}, fail_silently=True, readonly=True)\n', (12079, 12118), False, 'from makani.lib.python import struct_tree\n'), ((12395, 12435), 'makani.gs.monitor2.apps.layout.memory.GetMemory', 'layout_memory.GetMemory', (['client_id', '(True)'], {}), '(client_id, True)\n', (12418, 12435), True, 'from makani.gs.monitor2.apps.layout import memory as layout_memory\n'), ((14008, 14024), 'json.dumps', 'json.dumps', (['resp'], {}), '(resp)\n', (14018, 14024), False, 'import json\n'), ((14463, 14491), 'json.dumps', 'json.dumps', (['result'], {'indent': '(2)'}), '(result, indent=2)\n', (14473, 14491), False, 'import json\n'), ((15217, 15243), 'os.path.basename', 'os.path.basename', (['log_path'], {}), '(log_path)\n', (15233, 15243), False, 'import os\n'), ((15383, 15450), 'makani.lib.python.struct_tree.StructTree', 'struct_tree.StructTree', (['log_path'], {'fail_silently': '(True)', 'readonly': '(True)'}), '(log_path, fail_silently=True, readonly=True)\n', (15405, 15450), False, 'from makani.lib.python import struct_tree\n'), ((16099, 16127), 'json.dumps', 'json.dumps', (['result'], {'indent': '(2)'}), '(result, indent=2)\n', (16109, 16127), False, 'import json\n'), ((16363, 16430), 'makani.lib.python.struct_tree.StructTree', 'struct_tree.StructTree', (['log_path'], {'fail_silently': '(True)', 'readonly': '(True)'}), '(log_path, fail_silently=True, readonly=True)\n', (16385, 16430), False, 'from makani.lib.python import struct_tree\n'), ((17770, 17837), 'makani.lib.python.struct_tree.StructTree', 'struct_tree.StructTree', (['log_path'], {'fail_silently': '(True)', 'readonly': '(True)'}), '(log_path, fail_silently=True, readonly=True)\n', (17792, 17837), False, 'from makani.lib.python import struct_tree\n'), ((17862, 17888), 'os.path.basename', 'os.path.basename', (['log_path'], {}), '(log_path)\n', (17878, 17888), False, 'import os\n'), ((19626, 19688), 'logging.Error', 'logging.Error', (['"""Missing definitions for plotting javascripts."""'], {}), "('Missing definitions for plotting javascripts.')\n", (19639, 19688), False, 'import logging\n'), ((22463, 22503), 'makani.gs.monitor2.apps.layout.memory.GetMemory', 'layout_memory.GetMemory', (['client_id', '(True)'], {}), '(client_id, True)\n', (22486, 22503), True, 'from makani.gs.monitor2.apps.layout import memory as layout_memory\n'), ((2063, 2084), 'makani.gs.monitor2.apps.layout.loader.LayoutLoader', 'loader.LayoutLoader', ([], {}), '()\n', (2082, 2084), False, 'from makani.gs.monitor2.apps.layout import loader\n'), ((2577, 2609), 'django.template.RequestContext', 'template.RequestContext', (['request'], {}), '(request)\n', (2600, 2609), False, 'from django import template\n'), ((3161, 3224), 'django.http.HttpResponse', 'http.HttpResponse', (['(\'Cannot list directory "%s"!\' % current_path)'], {}), '(\'Cannot list directory "%s"!\' % current_path)\n', (3178, 3224), False, 'from django import http\n'), ((4138, 4167), 'makani.lib.bazel.bazel_util.GetWorkspaceRoot', 'bazel_util.GetWorkspaceRoot', ([], {}), '()\n', (4165, 4167), False, 'from makani.lib.bazel import bazel_util\n'), ((4304, 4363), 'django.http.HttpResponse', 'http.HttpResponse', (['(\'Cannot list directory "%s"!\' % arg_path)'], {}), '(\'Cannot list directory "%s"!\' % arg_path)\n', (4321, 4363), False, 'from django import http\n'), ((7244, 7276), 'django.template.RequestContext', 'template.RequestContext', (['request'], {}), '(request)\n', (7267, 7276), False, 'from django import template\n'), ((8779, 8802), 'django.http.HttpResponse', 'http.HttpResponse', (['"""{}"""'], {}), "('{}')\n", (8796, 8802), False, 'from django import http\n'), ((9016, 9047), 'json.dumps', 'json.dumps', (["d3_data['children']"], {}), "(d3_data['children'])\n", (9026, 9047), False, 'import json\n'), ((9979, 10004), 'os.path.dirname', 'os.path.dirname', (['log_path'], {}), '(log_path)\n', (9994, 10004), False, 'import os\n'), ((10027, 10059), 're.compile', 're.compile', (["(basename[1:-1] + '$')"], {}), "(basename[1:-1] + '$')\n", (10037, 10059), False, 'import re\n'), ((10076, 10095), 'os.listdir', 'os.listdir', (['dirname'], {}), '(dirname)\n', (10086, 10095), False, 'import os\n'), ((11093, 11114), 'makani.gs.monitor2.apps.layout.loader.LayoutLoader', 'loader.LayoutLoader', ([], {}), '()\n', (11112, 11114), False, 'from makani.gs.monitor2.apps.layout import loader\n'), ((11526, 11558), 'django.template.RequestContext', 'template.RequestContext', (['request'], {}), '(request)\n', (11549, 11558), False, 'from django import template\n'), ((12139, 12160), 'makani.gs.monitor2.apps.layout.loader.LayoutLoader', 'loader.LayoutLoader', ([], {}), '()\n', (12158, 12160), False, 'from makani.gs.monitor2.apps.layout import loader\n'), ((12852, 12875), 'django.http.HttpResponse', 'http.HttpResponse', (['"""{}"""'], {}), "('{}')\n", (12869, 12875), False, 'from django import http\n'), ((15568, 15641), 'makani.analysis.checks.log_util.GetOrderedDedupDataAndTimeByField', 'log_util.GetOrderedDedupDataAndTimeByField', (['log_data', 'field'], {'rebase': '(False)'}), '(log_data, field, rebase=False)\n', (15610, 15641), False, 'from makani.analysis.checks import log_util\n'), ((16522, 16595), 'makani.analysis.checks.log_util.GetOrderedDedupDataAndTimeByField', 'log_util.GetOrderedDedupDataAndTimeByField', (['log_data', 'field'], {'rebase': '(False)'}), '(log_data, field, rebase=False)\n', (16564, 16595), False, 'from makani.analysis.checks import log_util\n'), ((18043, 18115), 'makani.analysis.checks.log_util.GetOrderedDedupDataAndTimeByField', 'log_util.GetOrderedDedupDataAndTimeByField', (['log_data', 'field'], {'rebase': '(True)'}), '(log_data, field, rebase=True)\n', (18085, 18115), False, 'from makani.analysis.checks import log_util\n'), ((19575, 19588), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (19584, 19588), False, 'import json\n'), ((22092, 22113), 'makani.gs.monitor2.apps.layout.loader.LayoutLoader', 'loader.LayoutLoader', ([], {}), '()\n', (22111, 22113), False, 'from makani.gs.monitor2.apps.layout import loader\n'), ((22207, 22235), 'django.core.urlresolvers.reverse', 'urlresolvers.reverse', (['"""home"""'], {}), "('home')\n", (22227, 22235), False, 'from django.core import urlresolvers\n'), ((23146, 23178), 'django.template.RequestContext', 'template.RequestContext', (['request'], {}), '(request)\n', (23169, 23178), False, 'from django import template\n'), ((3453, 3477), 'os.path.isdir', 'os.path.isdir', (['full_path'], {}), '(full_path)\n', (3466, 3477), False, 'import os\n'), ((10187, 10211), 'os.path.join', 'os.path.join', (['dirname', 'f'], {}), '(dirname, f)\n', (10199, 10211), False, 'import os\n'), ((12780, 12808), 'makani.lib.python.debug_util.FormatTraceback', 'debug_util.FormatTraceback', ([], {}), '()\n', (12806, 12808), False, 'from makani.lib.python import debug_util\n'), ((16877, 16935), 'numpy.concatenate', 'numpy.concatenate', (["(series[legend_label]['x'], timestamps)"], {}), "((series[legend_label]['x'], timestamps))\n", (16894, 16935), False, 'import numpy\n'), ((16985, 17037), 'numpy.concatenate', 'numpy.concatenate', (["(series[legend_label]['y'], data)"], {}), "((series[legend_label]['y'], data))\n", (17002, 17037), False, 'import numpy\n'), ((10890, 10965), 'makani.gs.monitor2.apps.layout.widgets.DictLinesWidget', 'widgets.DictLinesWidget', (['"""series"""', 'None'], {'interactive': '(True)', 'use_markers': '(True)'}), "('series', None, interactive=True, use_markers=True)\n", (10913, 10965), False, 'from makani.gs.monitor2.apps.layout import widgets\n'), ((2228, 2249), 'makani.gs.monitor2.apps.layout.loader.LayoutLoader', 'loader.LayoutLoader', ([], {}), '()\n', (2247, 2249), False, 'from makani.gs.monitor2.apps.layout import loader\n')] |
"""
This module contains pure functions to parse and craft packets.
"""
from ryu.ofproto import ofproto_v1_4 as ofp
from ryu.lib.packet import packet
from ryu.lib.packet import ethernet
from ryu.lib.packet import arp
from ryu.lib.packet import lldp
from ryu.ofproto import ether as ethertypes
import fabric.flows as flows
def create_lldp(dpid, port_no=ofp.OFPP_FLOOD):
'''
Create an LLDP broadcast packet.
:param dpid: 64bit switch id
:type dpid: int
:param port_no: port number
:type port_no: int
:returns: binary representation of LLDP packet
:rtype: `bytearray`
'''
pkt = packet.Packet() # creating empty pkt
dst = lldp.LLDP_MAC_NEAREST_BRIDGE # Singlehop LLDP multicast
src = flows.int_to_mac(dpid)
ethertype = ethertypes.ETH_TYPE_LLDP
eth_pkt = ethernet.ethernet(dst, src, ethertype)
pkt.add_protocol(eth_pkt) # Adding Ethernet
tlvs = (lldp.ChassisID(subtype=lldp.ChassisID.SUB_LOCALLY_ASSIGNED,
chassis_id=hex(dpid)),
lldp.PortID(subtype=lldp.PortID.SUB_INTERFACE_NAME,
port_id=hex(port_no)),
lldp.TTL(ttl=1),
lldp.End())
lldp_pkt = lldp.lldp(tlvs)
pkt.add_protocol(lldp_pkt) # Adding llDP
pkt.serialize()
return pkt.data
def create_arp(dl_src, dl_dst, nl_src, nl_dst):
'''
Create an ARP reply packet.
:param dl_src: 48bit MAC address
:type dl_src: str
:param dl_dst: 48bit MAC address
:type dl_dst: str
:param nl_src: 32bit IP address
:type nl_src: str
:param nl_dst: 32bit IP address
:type nl_dst: str
:returns: binary representation of ARP packet
:rtype: `bytearray`
'''
pkt = packet.Packet()
pkt.add_protocol(ethernet.ethernet(ethertype=ethertypes.ETH_TYPE_ARP,
dst=dl_dst,
src=dl_src))
pkt.add_protocol(arp.arp(opcode=arp.ARP_REPLY,
src_mac=dl_src,
src_ip=nl_src,
dst_mac=dl_dst,
dst_ip=nl_dst))
return pkt.serialize()
def parse_lldp(data):
'''
Parse LLDP headers and adds them to provided dict.
:param data: binary of a packet to parse
:type data: `bytearray`
:returns: `headers` with additional entries of "peer_id"
and "peer_port" form LLDP
:rtype: dict
'''
pkt = packet.Packet(data)
pkt_lldp = pkt.get_protocol(lldp.lldp)
headers = {"peer_id": int(pkt_lldp.tlvs[0].chassis_id, 16),
"peer_port": int(pkt_lldp.tlvs[1].port_id, 16)}
return headers
def parse_arp(data):
'''
Parse ARP headers and add them to provided dict.
:param data: binary of a packet to parse
:type data: `bytearray`
:returns: `headers` with entries of "opcode", "nl_src" and "nl_dst"
from ARP.
:rtype: dict
'''
pkt = packet.Packet(data)
pkt_arp = pkt.get_protocol(arp.arp)
headers = {"nl_src": pkt_arp.src_ip,
"nl_dst": pkt_arp.dst_ip,
"opcode": pkt_arp.opcode}
return headers
def parse(data):
'''
Parse Ethernet headers and calls for additional parsing
in case of ARP and LLDP.
:param data: binary of a packet to parse
:type data: `bytearray`
:returns: dictionary of all important headers parsed
with "dl_src" and "dl_dst" and "ethertype" at minimum;
:rtype: dict
'''
pkt = packet.Packet(data)
pkt_eth = pkt.get_protocol(ethernet.ethernet)
headers = {"dl_src": pkt_eth.src,
"dl_dst": pkt_eth.dst,
"ethertype": pkt_eth.ethertype}
if headers["ethertype"] == ethertypes.ETH_TYPE_ARP:
headers.update(parse_arp(data))
elif headers["ethertype"] == ethertypes.ETH_TYPE_LLDP:
headers.update(parse_lldp(data))
return headers
| [
"ryu.lib.packet.packet.Packet",
"ryu.lib.packet.lldp.lldp",
"ryu.lib.packet.lldp.End",
"ryu.lib.packet.lldp.TTL",
"fabric.flows.int_to_mac",
"ryu.lib.packet.arp.arp",
"ryu.lib.packet.ethernet.ethernet"
] | [((622, 637), 'ryu.lib.packet.packet.Packet', 'packet.Packet', ([], {}), '()\n', (635, 637), False, 'from ryu.lib.packet import packet\n'), ((737, 759), 'fabric.flows.int_to_mac', 'flows.int_to_mac', (['dpid'], {}), '(dpid)\n', (753, 759), True, 'import fabric.flows as flows\n'), ((815, 853), 'ryu.lib.packet.ethernet.ethernet', 'ethernet.ethernet', (['dst', 'src', 'ethertype'], {}), '(dst, src, ethertype)\n', (832, 853), False, 'from ryu.lib.packet import ethernet\n'), ((1204, 1219), 'ryu.lib.packet.lldp.lldp', 'lldp.lldp', (['tlvs'], {}), '(tlvs)\n', (1213, 1219), False, 'from ryu.lib.packet import lldp\n'), ((1727, 1742), 'ryu.lib.packet.packet.Packet', 'packet.Packet', ([], {}), '()\n', (1740, 1742), False, 'from ryu.lib.packet import packet\n'), ((2476, 2495), 'ryu.lib.packet.packet.Packet', 'packet.Packet', (['data'], {}), '(data)\n', (2489, 2495), False, 'from ryu.lib.packet import packet\n'), ((2977, 2996), 'ryu.lib.packet.packet.Packet', 'packet.Packet', (['data'], {}), '(data)\n', (2990, 2996), False, 'from ryu.lib.packet import packet\n'), ((3533, 3552), 'ryu.lib.packet.packet.Packet', 'packet.Packet', (['data'], {}), '(data)\n', (3546, 3552), False, 'from ryu.lib.packet import packet\n'), ((1148, 1163), 'ryu.lib.packet.lldp.TTL', 'lldp.TTL', ([], {'ttl': '(1)'}), '(ttl=1)\n', (1156, 1163), False, 'from ryu.lib.packet import lldp\n'), ((1177, 1187), 'ryu.lib.packet.lldp.End', 'lldp.End', ([], {}), '()\n', (1185, 1187), False, 'from ryu.lib.packet import lldp\n'), ((1764, 1840), 'ryu.lib.packet.ethernet.ethernet', 'ethernet.ethernet', ([], {'ethertype': 'ethertypes.ETH_TYPE_ARP', 'dst': 'dl_dst', 'src': 'dl_src'}), '(ethertype=ethertypes.ETH_TYPE_ARP, dst=dl_dst, src=dl_src)\n', (1781, 1840), False, 'from ryu.lib.packet import ethernet\n'), ((1941, 2036), 'ryu.lib.packet.arp.arp', 'arp.arp', ([], {'opcode': 'arp.ARP_REPLY', 'src_mac': 'dl_src', 'src_ip': 'nl_src', 'dst_mac': 'dl_dst', 'dst_ip': 'nl_dst'}), '(opcode=arp.ARP_REPLY, src_mac=dl_src, src_ip=nl_src, dst_mac=dl_dst,\n dst_ip=nl_dst)\n', (1948, 2036), False, 'from ryu.lib.packet import arp\n')] |
# coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tf_agents.bandits.agents.neural_linucb_agent."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl.testing import parameterized
import numpy as np
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
import tensorflow_probability as tfp
from tf_agents.bandits.agents import neural_linucb_agent
from tf_agents.bandits.agents import utils as bandit_utils
from tf_agents.bandits.drivers import driver_utils
from tf_agents.bandits.networks import global_and_arm_feature_network
from tf_agents.bandits.policies import policy_utilities
from tf_agents.bandits.specs import utils as bandit_spec_utils
from tf_agents.networks import network
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import policy_step
from tf_agents.trajectories import time_step
from tf_agents.utils import common
from tensorflow.python.framework import test_util # pylint: disable=g-direct-tensorflow-import # TF internal
tfd = tfp.distributions
class DummyNet(network.Network):
def __init__(self, observation_spec, encoding_dim=10):
super(DummyNet, self).__init__(
observation_spec, state_spec=(), name='DummyNet')
context_dim = observation_spec.shape[0]
# Store custom layers that can be serialized through the Checkpointable API.
self._dummy_layers = [
tf.keras.layers.Dense(
encoding_dim,
kernel_initializer=tf.compat.v1.initializers.constant(
np.ones([context_dim, encoding_dim])),
bias_initializer=tf.compat.v1.initializers.constant(
np.zeros([encoding_dim])))
]
def call(self, inputs, step_type=None, network_state=()):
del step_type
inputs = tf.cast(inputs, tf.float32)
for layer in self._dummy_layers:
inputs = layer(inputs)
return inputs, network_state
def test_cases():
return parameterized.named_parameters(
{
'testcase_name': '_batch1_contextdim10',
'batch_size': 1,
'context_dim': 10,
}, {
'testcase_name': '_batch4_contextdim5',
'batch_size': 4,
'context_dim': 5,
})
def _get_initial_and_final_steps(batch_size, context_dim):
observation = np.array(range(batch_size * context_dim)).reshape(
[batch_size, context_dim])
reward = np.random.uniform(0.0, 1.0, [batch_size])
initial_step = time_step.TimeStep(
tf.constant(
time_step.StepType.FIRST, dtype=tf.int32, shape=[batch_size],
name='step_type'),
tf.constant(0.0, dtype=tf.float32, shape=[batch_size], name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'),
tf.constant(observation, dtype=tf.float32,
shape=[batch_size, context_dim], name='observation'))
final_step = time_step.TimeStep(
tf.constant(
time_step.StepType.LAST, dtype=tf.int32, shape=[batch_size],
name='step_type'),
tf.constant(reward, dtype=tf.float32, shape=[batch_size], name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'),
tf.constant(observation + 100.0, dtype=tf.float32,
shape=[batch_size, context_dim], name='observation'))
return initial_step, final_step
def _get_initial_and_final_steps_with_action_mask(batch_size,
context_dim,
num_actions=None):
observation = np.array(range(batch_size * context_dim)).reshape(
[batch_size, context_dim])
observation = tf.constant(observation, dtype=tf.float32)
mask = 1 - tf.eye(batch_size, num_columns=num_actions, dtype=tf.int32)
reward = np.random.uniform(0.0, 1.0, [batch_size])
initial_step = time_step.TimeStep(
tf.constant(
time_step.StepType.FIRST,
dtype=tf.int32,
shape=[batch_size],
name='step_type'),
tf.constant(0.0, dtype=tf.float32, shape=[batch_size], name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'),
(observation, mask))
final_step = time_step.TimeStep(
tf.constant(
time_step.StepType.LAST,
dtype=tf.int32,
shape=[batch_size],
name='step_type'),
tf.constant(reward, dtype=tf.float32, shape=[batch_size], name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[batch_size], name='discount'),
(observation + 100.0, mask))
return initial_step, final_step
def _get_action_step(action):
return policy_step.PolicyStep(
action=tf.convert_to_tensor(action),
info=policy_utilities.PolicyInfo())
def _get_experience(initial_step, action_step, final_step):
single_experience = driver_utils.trajectory_for_bandit(
initial_step, action_step, final_step)
# Adds a 'time' dimension.
return tf.nest.map_structure(
lambda x: tf.expand_dims(tf.convert_to_tensor(x), 1),
single_experience)
@test_util.run_all_in_graph_and_eager_modes
class NeuralLinUCBAgentTest(tf.test.TestCase, parameterized.TestCase):
def setUp(self):
super(NeuralLinUCBAgentTest, self).setUp()
tf.compat.v1.enable_resource_variables()
@test_cases()
def testInitializeAgentNumTrainSteps0(self, batch_size, context_dim):
num_actions = 5
observation_spec = tensor_spec.TensorSpec([context_dim], tf.float32)
time_step_spec = time_step.time_step_spec(observation_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoder = DummyNet(observation_spec)
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=0,
encoding_dim=10,
optimizer=None)
self.evaluate(agent.initialize())
@test_cases()
def testInitializeAgentNumTrainSteps10(self, batch_size, context_dim):
num_actions = 5
observation_spec = tensor_spec.TensorSpec([context_dim], tf.float32)
time_step_spec = time_step.time_step_spec(observation_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoder = DummyNet(observation_spec)
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=10,
encoding_dim=10,
optimizer=None)
self.evaluate(agent.initialize())
@test_cases()
def testNeuralLinUCBUpdateNumTrainSteps0(self, batch_size=1, context_dim=10):
"""Check NeuralLinUCBAgent updates when behaving like LinUCB."""
# Construct a `Trajectory` for the given action, observation, reward.
num_actions = 5
initial_step, final_step = _get_initial_and_final_steps(
batch_size, context_dim)
action = np.random.randint(num_actions, size=batch_size, dtype=np.int32)
action_step = _get_action_step(action)
experience = _get_experience(initial_step, action_step, final_step)
# Construct an agent and perform the update.
observation_spec = tensor_spec.TensorSpec([context_dim], tf.float32)
time_step_spec = time_step.time_step_spec(observation_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoder = DummyNet(observation_spec)
encoding_dim = 10
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=0,
encoding_dim=encoding_dim,
optimizer=tf.compat.v1.train.AdamOptimizer(learning_rate=1e-2))
loss_info = agent.train(experience)
self.evaluate(agent.initialize())
self.evaluate(tf.compat.v1.global_variables_initializer())
self.evaluate(loss_info)
final_a = self.evaluate(agent.cov_matrix)
final_b = self.evaluate(agent.data_vector)
# Compute the expected updated estimates.
observations_list = tf.dynamic_partition(
data=tf.reshape(tf.cast(experience.observation, tf.float64),
[batch_size, context_dim]),
partitions=tf.convert_to_tensor(action),
num_partitions=num_actions)
rewards_list = tf.dynamic_partition(
data=tf.reshape(tf.cast(experience.reward, tf.float64), [batch_size]),
partitions=tf.convert_to_tensor(action),
num_partitions=num_actions)
expected_a_updated_list = []
expected_b_updated_list = []
for _, (observations_for_arm, rewards_for_arm) in enumerate(zip(
observations_list, rewards_list)):
encoded_observations_for_arm, _ = encoder(observations_for_arm)
encoded_observations_for_arm = tf.cast(
encoded_observations_for_arm, dtype=tf.float64)
num_samples_for_arm_current = tf.cast(
tf.shape(rewards_for_arm)[0], tf.float64)
num_samples_for_arm_total = num_samples_for_arm_current
# pylint: disable=cell-var-from-loop
def true_fn():
a_new = tf.matmul(
encoded_observations_for_arm,
encoded_observations_for_arm,
transpose_a=True)
b_new = bandit_utils.sum_reward_weighted_observations(
rewards_for_arm, encoded_observations_for_arm)
return a_new, b_new
def false_fn():
return (tf.zeros([encoding_dim, encoding_dim], dtype=tf.float64),
tf.zeros([encoding_dim], dtype=tf.float64))
a_new, b_new = tf.cond(
tf.squeeze(num_samples_for_arm_total) > 0,
true_fn,
false_fn)
expected_a_updated_list.append(self.evaluate(a_new))
expected_b_updated_list.append(self.evaluate(b_new))
# Check that the actual updated estimates match the expectations.
self.assertAllClose(expected_a_updated_list, final_a)
self.assertAllClose(expected_b_updated_list, final_b)
@test_cases()
def testNeuralLinUCBUpdateDistributed(self, batch_size=1, context_dim=10):
"""Same as above but with distributed LinUCB updates."""
# Construct a `Trajectory` for the given action, observation, reward.
num_actions = 5
initial_step, final_step = _get_initial_and_final_steps(
batch_size, context_dim)
action = np.random.randint(num_actions, size=batch_size, dtype=np.int32)
action_step = _get_action_step(action)
experience = _get_experience(initial_step, action_step, final_step)
# Construct an agent and perform the update.
observation_spec = tensor_spec.TensorSpec([context_dim], tf.float32)
time_step_spec = time_step.time_step_spec(observation_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoder = DummyNet(observation_spec)
encoding_dim = 10
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=0,
encoding_dim=encoding_dim,
optimizer=tf.compat.v1.train.AdamOptimizer(learning_rate=1e-2))
self.evaluate(agent.initialize())
self.evaluate(tf.compat.v1.global_variables_initializer())
# Call the distributed LinUCB training instead of agent.train().
train_fn = common.function_in_tf1()(
agent.compute_loss_using_linucb_distributed)
reward = tf.cast(experience.reward, agent._dtype)
loss_info = train_fn(
experience.observation, action, reward, weights=None)
self.evaluate(loss_info)
final_a = self.evaluate(agent.cov_matrix)
final_b = self.evaluate(agent.data_vector)
# Compute the expected updated estimates.
observations_list = tf.dynamic_partition(
data=tf.reshape(tf.cast(experience.observation, tf.float64),
[batch_size, context_dim]),
partitions=tf.convert_to_tensor(action),
num_partitions=num_actions)
rewards_list = tf.dynamic_partition(
data=tf.reshape(tf.cast(experience.reward, tf.float64), [batch_size]),
partitions=tf.convert_to_tensor(action),
num_partitions=num_actions)
expected_a_updated_list = []
expected_b_updated_list = []
for _, (observations_for_arm, rewards_for_arm) in enumerate(zip(
observations_list, rewards_list)):
encoded_observations_for_arm, _ = encoder(observations_for_arm)
encoded_observations_for_arm = tf.cast(
encoded_observations_for_arm, dtype=tf.float64)
num_samples_for_arm_current = tf.cast(
tf.shape(rewards_for_arm)[0], tf.float64)
num_samples_for_arm_total = num_samples_for_arm_current
# pylint: disable=cell-var-from-loop
def true_fn():
a_new = tf.matmul(
encoded_observations_for_arm,
encoded_observations_for_arm,
transpose_a=True)
b_new = bandit_utils.sum_reward_weighted_observations(
rewards_for_arm, encoded_observations_for_arm)
return a_new, b_new
def false_fn():
return (tf.zeros([encoding_dim, encoding_dim], dtype=tf.float64),
tf.zeros([encoding_dim], dtype=tf.float64))
a_new, b_new = tf.cond(
tf.squeeze(num_samples_for_arm_total) > 0,
true_fn,
false_fn)
expected_a_updated_list.append(self.evaluate(a_new))
expected_b_updated_list.append(self.evaluate(b_new))
# Check that the actual updated estimates match the expectations.
self.assertAllClose(expected_a_updated_list, final_a)
self.assertAllClose(expected_b_updated_list, final_b)
@test_cases()
def testNeuralLinUCBUpdateNumTrainSteps10(self, batch_size=1, context_dim=10):
"""Check NeuralLinUCBAgent updates when behaving like eps-greedy."""
# Construct a `Trajectory` for the given action, observation, reward.
num_actions = 5
initial_step, final_step = _get_initial_and_final_steps(
batch_size, context_dim)
action = np.random.randint(num_actions, size=batch_size, dtype=np.int32)
action_step = _get_action_step(action)
experience = _get_experience(initial_step, action_step, final_step)
# Construct an agent and perform the update.
observation_spec = tensor_spec.TensorSpec([context_dim], tf.float32)
time_step_spec = time_step.time_step_spec(observation_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoder = DummyNet(observation_spec)
encoding_dim = 10
variable_collection = neural_linucb_agent.NeuralLinUCBVariableCollection(
num_actions, encoding_dim)
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=10,
encoding_dim=encoding_dim,
variable_collection=variable_collection,
optimizer=tf.compat.v1.train.AdamOptimizer(learning_rate=0.001))
loss_info, _ = agent.train(experience)
self.evaluate(agent.initialize())
self.evaluate(tf.compat.v1.global_variables_initializer())
loss_value = self.evaluate(loss_info)
self.assertGreater(loss_value, 0.0)
@test_cases()
def testNeuralLinUCBUpdateNumTrainSteps10MaskedActions(
self, batch_size=1, context_dim=10):
"""Check updates when behaving like eps-greedy and using masked actions."""
# Construct a `Trajectory` for the given action, observation, reward.
num_actions = 5
initial_step, final_step = _get_initial_and_final_steps_with_action_mask(
batch_size, context_dim, num_actions)
action = np.random.randint(num_actions, size=batch_size, dtype=np.int32)
action_step = _get_action_step(action)
experience = _get_experience(initial_step, action_step, final_step)
# Construct an agent and perform the update.
observation_spec = (tensor_spec.TensorSpec([context_dim], tf.float32),
tensor_spec.TensorSpec([num_actions], tf.int32))
time_step_spec = time_step.time_step_spec(observation_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoder = DummyNet(observation_spec[0])
encoding_dim = 10
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=10,
encoding_dim=encoding_dim,
optimizer=tf.compat.v1.train.AdamOptimizer(learning_rate=0.001),
observation_and_action_constraint_splitter=lambda x: (x[0], x[1]))
loss_info, _ = agent.train(experience)
self.evaluate(agent.initialize())
self.evaluate(tf.compat.v1.global_variables_initializer())
loss_value = self.evaluate(loss_info)
self.assertGreater(loss_value, 0.0)
def testInitializeRestoreVariableCollection(self):
if not tf.executing_eagerly():
self.skipTest('Test only works in eager mode.')
num_actions = 5
encoding_dim = 7
variable_collection = neural_linucb_agent.NeuralLinUCBVariableCollection(
num_actions=num_actions, encoding_dim=encoding_dim)
self.evaluate(tf.compat.v1.global_variables_initializer())
self.evaluate(variable_collection.num_samples_list)
checkpoint = tf.train.Checkpoint(variable_collection=variable_collection)
checkpoint_dir = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_dir, 'checkpoint')
checkpoint.save(file_prefix=checkpoint_prefix)
variable_collection.actions_from_reward_layer.assign(False)
latest_checkpoint = tf.train.latest_checkpoint(checkpoint_dir)
checkpoint_load_status = checkpoint.restore(latest_checkpoint)
self.evaluate(checkpoint_load_status.initialize_or_restore())
self.assertEqual(
self.evaluate(variable_collection.actions_from_reward_layer), True)
def testTrainPerArmAgentWithMask(self):
num_actions = 5
obs_spec = bandit_spec_utils.create_per_arm_observation_spec(
2, 3, num_actions, add_action_mask=True)
time_step_spec = time_step.time_step_spec(obs_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoding_dim = 10
encoder = (
global_and_arm_feature_network.create_feed_forward_common_tower_network(
obs_spec[0], (4, 3), (3, 4), (4, 2), encoding_dim))
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=10,
encoding_dim=encoding_dim,
observation_and_action_constraint_splitter=lambda x: (x[0], x[1]),
accepts_per_arm_features=True,
optimizer=tf.compat.v1.train.AdamOptimizer(learning_rate=0.001))
observations = ({
bandit_spec_utils.GLOBAL_FEATURE_KEY:
tf.constant([[1, 2], [3, 4]], dtype=tf.float32),
bandit_spec_utils.PER_ARM_FEATURE_KEY:
tf.cast(
tf.reshape(tf.range(30), shape=[2, 5, 3]), dtype=tf.float32)
}, tf.ones(shape=(2, num_actions), dtype=tf.int32))
actions = np.array([0, 3], dtype=np.int32)
rewards = np.array([0.5, 3.0], dtype=np.float32)
initial_step = time_step.TimeStep(
tf.constant(
time_step.StepType.FIRST,
dtype=tf.int32,
shape=[2],
name='step_type'),
tf.constant(0.0, dtype=tf.float32, shape=[2], name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[2], name='discount'),
observations)
final_step = time_step.TimeStep(
tf.constant(
time_step.StepType.LAST,
dtype=tf.int32,
shape=[2],
name='step_type'),
tf.constant(rewards, dtype=tf.float32, name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[2], name='discount'),
observations)
action_step = policy_step.PolicyStep(
action=tf.convert_to_tensor(actions),
info=policy_utilities.PerArmPolicyInfo(
chosen_arm_features=np.array([[1, 2, 3], [3, 2, 1]],
dtype=np.float32)))
experience = _get_experience(initial_step, action_step, final_step)
loss_info, _ = agent.train(experience, None)
self.evaluate(tf.compat.v1.initialize_all_variables())
loss_value = self.evaluate(loss_info)
self.assertGreater(loss_value, 0.0)
def testTrainPerArmAgentVariableActions(self):
num_actions = 5
obs_spec = bandit_spec_utils.create_per_arm_observation_spec(
2, 3, num_actions, add_num_actions_feature=True)
time_step_spec = time_step.time_step_spec(obs_spec)
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=num_actions - 1)
encoding_dim = 10
encoder = (
global_and_arm_feature_network.create_feed_forward_common_tower_network(
obs_spec, (4, 3), (3, 4), (4, 2), encoding_dim))
agent = neural_linucb_agent.NeuralLinUCBAgent(
time_step_spec=time_step_spec,
action_spec=action_spec,
encoding_network=encoder,
encoding_network_num_train_steps=10,
encoding_dim=encoding_dim,
accepts_per_arm_features=True,
optimizer=tf.compat.v1.train.AdamOptimizer(learning_rate=0.001))
observations = {
bandit_spec_utils.GLOBAL_FEATURE_KEY:
tf.constant([[1, 2], [3, 4]], dtype=tf.float32),
bandit_spec_utils.PER_ARM_FEATURE_KEY:
tf.cast(
tf.reshape(tf.range(30), shape=[2, 5, 3]), dtype=tf.float32),
bandit_spec_utils.NUM_ACTIONS_FEATURE_KEY:
tf.constant([3, 4], dtype=tf.int32)
}
actions = np.array([0, 3], dtype=np.int32)
rewards = np.array([0.5, 3.0], dtype=np.float32)
initial_step = time_step.TimeStep(
tf.constant(
time_step.StepType.FIRST,
dtype=tf.int32,
shape=[2],
name='step_type'),
tf.constant(0.0, dtype=tf.float32, shape=[2], name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[2], name='discount'),
observations)
final_step = time_step.TimeStep(
tf.constant(
time_step.StepType.LAST,
dtype=tf.int32,
shape=[2],
name='step_type'),
tf.constant(rewards, dtype=tf.float32, name='reward'),
tf.constant(1.0, dtype=tf.float32, shape=[2], name='discount'),
observations)
action_step = policy_step.PolicyStep(
action=tf.convert_to_tensor(actions),
info=policy_utilities.PerArmPolicyInfo(
chosen_arm_features=np.array([[1, 2, 3], [3, 2, 1]],
dtype=np.float32)))
experience = _get_experience(initial_step, action_step, final_step)
loss_info, _ = agent.train(experience, None)
self.evaluate(tf.compat.v1.initialize_all_variables())
loss_value = self.evaluate(loss_info)
self.assertGreater(loss_value, 0.0)
if __name__ == '__main__':
tf.test.main()
| [
"tensorflow.train.Checkpoint",
"tensorflow.shape",
"tensorflow.compat.v1.train.AdamOptimizer",
"tf_agents.specs.tensor_spec.BoundedTensorSpec",
"numpy.array",
"tf_agents.trajectories.time_step.time_step_spec",
"tensorflow.cast",
"tensorflow.compat.v1.global_variables_initializer",
"tensorflow.eye",
... | [((2541, 2735), 'absl.testing.parameterized.named_parameters', 'parameterized.named_parameters', (["{'testcase_name': '_batch1_contextdim10', 'batch_size': 1, 'context_dim': 10}", "{'testcase_name': '_batch4_contextdim5', 'batch_size': 4, 'context_dim': 5}"], {}), "({'testcase_name': '_batch1_contextdim10',\n 'batch_size': 1, 'context_dim': 10}, {'testcase_name':\n '_batch4_contextdim5', 'batch_size': 4, 'context_dim': 5})\n", (2571, 2735), False, 'from absl.testing import parameterized\n'), ((2985, 3026), 'numpy.random.uniform', 'np.random.uniform', (['(0.0)', '(1.0)', '[batch_size]'], {}), '(0.0, 1.0, [batch_size])\n', (3002, 3026), True, 'import numpy as np\n'), ((4249, 4291), 'tensorflow.constant', 'tf.constant', (['observation'], {'dtype': 'tf.float32'}), '(observation, dtype=tf.float32)\n', (4260, 4291), True, 'import tensorflow as tf\n'), ((4376, 4417), 'numpy.random.uniform', 'np.random.uniform', (['(0.0)', '(1.0)', '[batch_size]'], {}), '(0.0, 1.0, [batch_size])\n', (4393, 4417), True, 'import numpy as np\n'), ((5414, 5487), 'tf_agents.bandits.drivers.driver_utils.trajectory_for_bandit', 'driver_utils.trajectory_for_bandit', (['initial_step', 'action_step', 'final_step'], {}), '(initial_step, action_step, final_step)\n', (5448, 5487), False, 'from tf_agents.bandits.drivers import driver_utils\n'), ((23937, 23951), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (23949, 23951), True, 'import tensorflow as tf\n'), ((2385, 2412), 'tensorflow.cast', 'tf.cast', (['inputs', 'tf.float32'], {}), '(inputs, tf.float32)\n', (2392, 2412), True, 'import tensorflow as tf\n'), ((3070, 3165), 'tensorflow.constant', 'tf.constant', (['time_step.StepType.FIRST'], {'dtype': 'tf.int32', 'shape': '[batch_size]', 'name': '"""step_type"""'}), "(time_step.StepType.FIRST, dtype=tf.int32, shape=[batch_size],\n name='step_type')\n", (3081, 3165), True, 'import tensorflow as tf\n'), ((3190, 3259), 'tensorflow.constant', 'tf.constant', (['(0.0)'], {'dtype': 'tf.float32', 'shape': '[batch_size]', 'name': '"""reward"""'}), "(0.0, dtype=tf.float32, shape=[batch_size], name='reward')\n", (3201, 3259), True, 'import tensorflow as tf\n'), ((3267, 3338), 'tensorflow.constant', 'tf.constant', (['(1.0)'], {'dtype': 'tf.float32', 'shape': '[batch_size]', 'name': '"""discount"""'}), "(1.0, dtype=tf.float32, shape=[batch_size], name='discount')\n", (3278, 3338), True, 'import tensorflow as tf\n'), ((3346, 3445), 'tensorflow.constant', 'tf.constant', (['observation'], {'dtype': 'tf.float32', 'shape': '[batch_size, context_dim]', 'name': '"""observation"""'}), "(observation, dtype=tf.float32, shape=[batch_size, context_dim],\n name='observation')\n", (3357, 3445), True, 'import tensorflow as tf\n'), ((3502, 3596), 'tensorflow.constant', 'tf.constant', (['time_step.StepType.LAST'], {'dtype': 'tf.int32', 'shape': '[batch_size]', 'name': '"""step_type"""'}), "(time_step.StepType.LAST, dtype=tf.int32, shape=[batch_size],\n name='step_type')\n", (3513, 3596), True, 'import tensorflow as tf\n'), ((3621, 3693), 'tensorflow.constant', 'tf.constant', (['reward'], {'dtype': 'tf.float32', 'shape': '[batch_size]', 'name': '"""reward"""'}), "(reward, dtype=tf.float32, shape=[batch_size], name='reward')\n", (3632, 3693), True, 'import tensorflow as tf\n'), ((3701, 3772), 'tensorflow.constant', 'tf.constant', (['(1.0)'], {'dtype': 'tf.float32', 'shape': '[batch_size]', 'name': '"""discount"""'}), "(1.0, dtype=tf.float32, shape=[batch_size], name='discount')\n", (3712, 3772), True, 'import tensorflow as tf\n'), ((3780, 3887), 'tensorflow.constant', 'tf.constant', (['(observation + 100.0)'], {'dtype': 'tf.float32', 'shape': '[batch_size, context_dim]', 'name': '"""observation"""'}), "(observation + 100.0, dtype=tf.float32, shape=[batch_size,\n context_dim], name='observation')\n", (3791, 3887), True, 'import tensorflow as tf\n'), ((4305, 4364), 'tensorflow.eye', 'tf.eye', (['batch_size'], {'num_columns': 'num_actions', 'dtype': 'tf.int32'}), '(batch_size, num_columns=num_actions, dtype=tf.int32)\n', (4311, 4364), True, 'import tensorflow as tf\n'), ((4461, 4556), 'tensorflow.constant', 'tf.constant', (['time_step.StepType.FIRST'], {'dtype': 'tf.int32', 'shape': '[batch_size]', 'name': '"""step_type"""'}), "(time_step.StepType.FIRST, dtype=tf.int32, shape=[batch_size],\n name='step_type')\n", (4472, 4556), True, 'import tensorflow as tf\n'), ((4601, 4670), 'tensorflow.constant', 'tf.constant', (['(0.0)'], {'dtype': 'tf.float32', 'shape': '[batch_size]', 'name': '"""reward"""'}), "(0.0, dtype=tf.float32, shape=[batch_size], name='reward')\n", (4612, 4670), True, 'import tensorflow as tf\n'), ((4678, 4749), 'tensorflow.constant', 'tf.constant', (['(1.0)'], {'dtype': 'tf.float32', 'shape': '[batch_size]', 'name': '"""discount"""'}), "(1.0, dtype=tf.float32, shape=[batch_size], name='discount')\n", (4689, 4749), True, 'import tensorflow as tf\n'), ((4819, 4913), 'tensorflow.constant', 'tf.constant', (['time_step.StepType.LAST'], {'dtype': 'tf.int32', 'shape': '[batch_size]', 'name': '"""step_type"""'}), "(time_step.StepType.LAST, dtype=tf.int32, shape=[batch_size],\n name='step_type')\n", (4830, 4913), True, 'import tensorflow as tf\n'), ((4958, 5030), 'tensorflow.constant', 'tf.constant', (['reward'], {'dtype': 'tf.float32', 'shape': '[batch_size]', 'name': '"""reward"""'}), "(reward, dtype=tf.float32, shape=[batch_size], name='reward')\n", (4969, 5030), True, 'import tensorflow as tf\n'), ((5038, 5109), 'tensorflow.constant', 'tf.constant', (['(1.0)'], {'dtype': 'tf.float32', 'shape': '[batch_size]', 'name': '"""discount"""'}), "(1.0, dtype=tf.float32, shape=[batch_size], name='discount')\n", (5049, 5109), True, 'import tensorflow as tf\n'), ((5829, 5869), 'tensorflow.compat.v1.enable_resource_variables', 'tf.compat.v1.enable_resource_variables', ([], {}), '()\n', (5867, 5869), True, 'import tensorflow as tf\n'), ((6002, 6051), 'tf_agents.specs.tensor_spec.TensorSpec', 'tensor_spec.TensorSpec', (['[context_dim]', 'tf.float32'], {}), '([context_dim], tf.float32)\n', (6024, 6051), False, 'from tf_agents.specs import tensor_spec\n'), ((6073, 6115), 'tf_agents.trajectories.time_step.time_step_spec', 'time_step.time_step_spec', (['observation_spec'], {}), '(observation_spec)\n', (6097, 6115), False, 'from tf_agents.trajectories import time_step\n'), ((6134, 6230), 'tf_agents.specs.tensor_spec.BoundedTensorSpec', 'tensor_spec.BoundedTensorSpec', ([], {'dtype': 'tf.int32', 'shape': '()', 'minimum': '(0)', 'maximum': '(num_actions - 1)'}), '(dtype=tf.int32, shape=(), minimum=0, maximum=\n num_actions - 1)\n', (6163, 6230), False, 'from tf_agents.specs import tensor_spec\n'), ((6289, 6485), 'tf_agents.bandits.agents.neural_linucb_agent.NeuralLinUCBAgent', 'neural_linucb_agent.NeuralLinUCBAgent', ([], {'time_step_spec': 'time_step_spec', 'action_spec': 'action_spec', 'encoding_network': 'encoder', 'encoding_network_num_train_steps': '(0)', 'encoding_dim': '(10)', 'optimizer': 'None'}), '(time_step_spec=time_step_spec,\n action_spec=action_spec, encoding_network=encoder,\n encoding_network_num_train_steps=0, encoding_dim=10, optimizer=None)\n', (6326, 6485), False, 'from tf_agents.bandits.agents import neural_linucb_agent\n'), ((6698, 6747), 'tf_agents.specs.tensor_spec.TensorSpec', 'tensor_spec.TensorSpec', (['[context_dim]', 'tf.float32'], {}), '([context_dim], tf.float32)\n', (6720, 6747), False, 'from tf_agents.specs import tensor_spec\n'), ((6769, 6811), 'tf_agents.trajectories.time_step.time_step_spec', 'time_step.time_step_spec', (['observation_spec'], {}), '(observation_spec)\n', (6793, 6811), False, 'from tf_agents.trajectories import time_step\n'), ((6830, 6926), 'tf_agents.specs.tensor_spec.BoundedTensorSpec', 'tensor_spec.BoundedTensorSpec', ([], {'dtype': 'tf.int32', 'shape': '()', 'minimum': '(0)', 'maximum': '(num_actions - 1)'}), '(dtype=tf.int32, shape=(), minimum=0, maximum=\n num_actions - 1)\n', (6859, 6926), False, 'from tf_agents.specs import tensor_spec\n'), ((6985, 7182), 'tf_agents.bandits.agents.neural_linucb_agent.NeuralLinUCBAgent', 'neural_linucb_agent.NeuralLinUCBAgent', ([], {'time_step_spec': 'time_step_spec', 'action_spec': 'action_spec', 'encoding_network': 'encoder', 'encoding_network_num_train_steps': '(10)', 'encoding_dim': '(10)', 'optimizer': 'None'}), '(time_step_spec=time_step_spec,\n action_spec=action_spec, encoding_network=encoder,\n encoding_network_num_train_steps=10, encoding_dim=10, optimizer=None)\n', (7022, 7182), False, 'from tf_agents.bandits.agents import neural_linucb_agent\n'), ((7630, 7693), 'numpy.random.randint', 'np.random.randint', (['num_actions'], {'size': 'batch_size', 'dtype': 'np.int32'}), '(num_actions, size=batch_size, dtype=np.int32)\n', (7647, 7693), True, 'import numpy as np\n'), ((7882, 7931), 'tf_agents.specs.tensor_spec.TensorSpec', 'tensor_spec.TensorSpec', (['[context_dim]', 'tf.float32'], {}), '([context_dim], tf.float32)\n', (7904, 7931), False, 'from tf_agents.specs import tensor_spec\n'), ((7953, 7995), 'tf_agents.trajectories.time_step.time_step_spec', 'time_step.time_step_spec', (['observation_spec'], {}), '(observation_spec)\n', (7977, 7995), False, 'from tf_agents.trajectories import time_step\n'), ((8014, 8110), 'tf_agents.specs.tensor_spec.BoundedTensorSpec', 'tensor_spec.BoundedTensorSpec', ([], {'dtype': 'tf.int32', 'shape': '()', 'minimum': '(0)', 'maximum': '(num_actions - 1)'}), '(dtype=tf.int32, shape=(), minimum=0, maximum=\n num_actions - 1)\n', (8043, 8110), False, 'from tf_agents.specs import tensor_spec\n'), ((11064, 11127), 'numpy.random.randint', 'np.random.randint', (['num_actions'], {'size': 'batch_size', 'dtype': 'np.int32'}), '(num_actions, size=batch_size, dtype=np.int32)\n', (11081, 11127), True, 'import numpy as np\n'), ((11316, 11365), 'tf_agents.specs.tensor_spec.TensorSpec', 'tensor_spec.TensorSpec', (['[context_dim]', 'tf.float32'], {}), '([context_dim], tf.float32)\n', (11338, 11365), False, 'from tf_agents.specs import tensor_spec\n'), ((11387, 11429), 'tf_agents.trajectories.time_step.time_step_spec', 'time_step.time_step_spec', (['observation_spec'], {}), '(observation_spec)\n', (11411, 11429), False, 'from tf_agents.trajectories import time_step\n'), ((11448, 11544), 'tf_agents.specs.tensor_spec.BoundedTensorSpec', 'tensor_spec.BoundedTensorSpec', ([], {'dtype': 'tf.int32', 'shape': '()', 'minimum': '(0)', 'maximum': '(num_actions - 1)'}), '(dtype=tf.int32, shape=(), minimum=0, maximum=\n num_actions - 1)\n', (11477, 11544), False, 'from tf_agents.specs import tensor_spec\n'), ((12198, 12238), 'tensorflow.cast', 'tf.cast', (['experience.reward', 'agent._dtype'], {}), '(experience.reward, agent._dtype)\n', (12205, 12238), True, 'import tensorflow as tf\n'), ((14779, 14842), 'numpy.random.randint', 'np.random.randint', (['num_actions'], {'size': 'batch_size', 'dtype': 'np.int32'}), '(num_actions, size=batch_size, dtype=np.int32)\n', (14796, 14842), True, 'import numpy as np\n'), ((15031, 15080), 'tf_agents.specs.tensor_spec.TensorSpec', 'tensor_spec.TensorSpec', (['[context_dim]', 'tf.float32'], {}), '([context_dim], tf.float32)\n', (15053, 15080), False, 'from tf_agents.specs import tensor_spec\n'), ((15102, 15144), 'tf_agents.trajectories.time_step.time_step_spec', 'time_step.time_step_spec', (['observation_spec'], {}), '(observation_spec)\n', (15126, 15144), False, 'from tf_agents.trajectories import time_step\n'), ((15163, 15259), 'tf_agents.specs.tensor_spec.BoundedTensorSpec', 'tensor_spec.BoundedTensorSpec', ([], {'dtype': 'tf.int32', 'shape': '()', 'minimum': '(0)', 'maximum': '(num_actions - 1)'}), '(dtype=tf.int32, shape=(), minimum=0, maximum=\n num_actions - 1)\n', (15192, 15259), False, 'from tf_agents.specs import tensor_spec\n'), ((15353, 15430), 'tf_agents.bandits.agents.neural_linucb_agent.NeuralLinUCBVariableCollection', 'neural_linucb_agent.NeuralLinUCBVariableCollection', (['num_actions', 'encoding_dim'], {}), '(num_actions, encoding_dim)\n', (15403, 15430), False, 'from tf_agents.bandits.agents import neural_linucb_agent\n'), ((16456, 16519), 'numpy.random.randint', 'np.random.randint', (['num_actions'], {'size': 'batch_size', 'dtype': 'np.int32'}), '(num_actions, size=batch_size, dtype=np.int32)\n', (16473, 16519), True, 'import numpy as np\n'), ((16854, 16896), 'tf_agents.trajectories.time_step.time_step_spec', 'time_step.time_step_spec', (['observation_spec'], {}), '(observation_spec)\n', (16878, 16896), False, 'from tf_agents.trajectories import time_step\n'), ((16915, 17011), 'tf_agents.specs.tensor_spec.BoundedTensorSpec', 'tensor_spec.BoundedTensorSpec', ([], {'dtype': 'tf.int32', 'shape': '()', 'minimum': '(0)', 'maximum': '(num_actions - 1)'}), '(dtype=tf.int32, shape=(), minimum=0, maximum=\n num_actions - 1)\n', (16944, 17011), False, 'from tf_agents.specs import tensor_spec\n'), ((17904, 18010), 'tf_agents.bandits.agents.neural_linucb_agent.NeuralLinUCBVariableCollection', 'neural_linucb_agent.NeuralLinUCBVariableCollection', ([], {'num_actions': 'num_actions', 'encoding_dim': 'encoding_dim'}), '(num_actions=num_actions,\n encoding_dim=encoding_dim)\n', (17954, 18010), False, 'from tf_agents.bandits.agents import neural_linucb_agent\n'), ((18152, 18212), 'tensorflow.train.Checkpoint', 'tf.train.Checkpoint', ([], {'variable_collection': 'variable_collection'}), '(variable_collection=variable_collection)\n', (18171, 18212), True, 'import tensorflow as tf\n'), ((18278, 18320), 'os.path.join', 'os.path.join', (['checkpoint_dir', '"""checkpoint"""'], {}), "(checkpoint_dir, 'checkpoint')\n", (18290, 18320), False, 'import os\n'), ((18462, 18504), 'tensorflow.train.latest_checkpoint', 'tf.train.latest_checkpoint', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (18488, 18504), True, 'import tensorflow as tf\n'), ((18814, 18908), 'tf_agents.bandits.specs.utils.create_per_arm_observation_spec', 'bandit_spec_utils.create_per_arm_observation_spec', (['(2)', '(3)', 'num_actions'], {'add_action_mask': '(True)'}), '(2, 3, num_actions,\n add_action_mask=True)\n', (18863, 18908), True, 'from tf_agents.bandits.specs import utils as bandit_spec_utils\n'), ((18935, 18969), 'tf_agents.trajectories.time_step.time_step_spec', 'time_step.time_step_spec', (['obs_spec'], {}), '(obs_spec)\n', (18959, 18969), False, 'from tf_agents.trajectories import time_step\n'), ((18988, 19084), 'tf_agents.specs.tensor_spec.BoundedTensorSpec', 'tensor_spec.BoundedTensorSpec', ([], {'dtype': 'tf.int32', 'shape': '()', 'minimum': '(0)', 'maximum': '(num_actions - 1)'}), '(dtype=tf.int32, shape=(), minimum=0, maximum=\n num_actions - 1)\n', (19017, 19084), False, 'from tf_agents.specs import tensor_spec\n'), ((19135, 19262), 'tf_agents.bandits.networks.global_and_arm_feature_network.create_feed_forward_common_tower_network', 'global_and_arm_feature_network.create_feed_forward_common_tower_network', (['obs_spec[0]', '(4, 3)', '(3, 4)', '(4, 2)', 'encoding_dim'], {}), '(\n obs_spec[0], (4, 3), (3, 4), (4, 2), encoding_dim)\n', (19206, 19262), False, 'from tf_agents.bandits.networks import global_and_arm_feature_network\n'), ((20040, 20072), 'numpy.array', 'np.array', (['[0, 3]'], {'dtype': 'np.int32'}), '([0, 3], dtype=np.int32)\n', (20048, 20072), True, 'import numpy as np\n'), ((20087, 20125), 'numpy.array', 'np.array', (['[0.5, 3.0]'], {'dtype': 'np.float32'}), '([0.5, 3.0], dtype=np.float32)\n', (20095, 20125), True, 'import numpy as np\n'), ((21413, 21515), 'tf_agents.bandits.specs.utils.create_per_arm_observation_spec', 'bandit_spec_utils.create_per_arm_observation_spec', (['(2)', '(3)', 'num_actions'], {'add_num_actions_feature': '(True)'}), '(2, 3, num_actions,\n add_num_actions_feature=True)\n', (21462, 21515), True, 'from tf_agents.bandits.specs import utils as bandit_spec_utils\n'), ((21542, 21576), 'tf_agents.trajectories.time_step.time_step_spec', 'time_step.time_step_spec', (['obs_spec'], {}), '(obs_spec)\n', (21566, 21576), False, 'from tf_agents.trajectories import time_step\n'), ((21595, 21691), 'tf_agents.specs.tensor_spec.BoundedTensorSpec', 'tensor_spec.BoundedTensorSpec', ([], {'dtype': 'tf.int32', 'shape': '()', 'minimum': '(0)', 'maximum': '(num_actions - 1)'}), '(dtype=tf.int32, shape=(), minimum=0, maximum=\n num_actions - 1)\n', (21624, 21691), False, 'from tf_agents.specs import tensor_spec\n'), ((21742, 21866), 'tf_agents.bandits.networks.global_and_arm_feature_network.create_feed_forward_common_tower_network', 'global_and_arm_feature_network.create_feed_forward_common_tower_network', (['obs_spec', '(4, 3)', '(3, 4)', '(4, 2)', 'encoding_dim'], {}), '(\n obs_spec, (4, 3), (3, 4), (4, 2), encoding_dim)\n', (21813, 21866), False, 'from tf_agents.bandits.networks import global_and_arm_feature_network\n'), ((22618, 22650), 'numpy.array', 'np.array', (['[0, 3]'], {'dtype': 'np.int32'}), '([0, 3], dtype=np.int32)\n', (22626, 22650), True, 'import numpy as np\n'), ((22665, 22703), 'numpy.array', 'np.array', (['[0.5, 3.0]'], {'dtype': 'np.float32'}), '([0.5, 3.0], dtype=np.float32)\n', (22673, 22703), True, 'import numpy as np\n'), ((5258, 5286), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['action'], {}), '(action)\n', (5278, 5286), True, 'import tensorflow as tf\n'), ((5299, 5328), 'tf_agents.bandits.policies.policy_utilities.PolicyInfo', 'policy_utilities.PolicyInfo', ([], {}), '()\n', (5326, 5328), False, 'from tf_agents.bandits.policies import policy_utilities\n'), ((8583, 8626), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (8624, 8626), True, 'import tensorflow as tf\n'), ((9540, 9595), 'tensorflow.cast', 'tf.cast', (['encoded_observations_for_arm'], {'dtype': 'tf.float64'}), '(encoded_observations_for_arm, dtype=tf.float64)\n', (9547, 9595), True, 'import tensorflow as tf\n'), ((11977, 12020), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (12018, 12020), True, 'import tensorflow as tf\n'), ((12106, 12130), 'tf_agents.utils.common.function_in_tf1', 'common.function_in_tf1', ([], {}), '()\n', (12128, 12130), False, 'from tf_agents.utils import common\n'), ((13239, 13294), 'tensorflow.cast', 'tf.cast', (['encoded_observations_for_arm'], {'dtype': 'tf.float64'}), '(encoded_observations_for_arm, dtype=tf.float64)\n', (13246, 13294), True, 'import tensorflow as tf\n'), ((15899, 15942), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (15940, 15942), True, 'import tensorflow as tf\n'), ((16709, 16758), 'tf_agents.specs.tensor_spec.TensorSpec', 'tensor_spec.TensorSpec', (['[context_dim]', 'tf.float32'], {}), '([context_dim], tf.float32)\n', (16731, 16758), False, 'from tf_agents.specs import tensor_spec\n'), ((16784, 16831), 'tf_agents.specs.tensor_spec.TensorSpec', 'tensor_spec.TensorSpec', (['[num_actions]', 'tf.int32'], {}), '([num_actions], tf.int32)\n', (16806, 16831), False, 'from tf_agents.specs import tensor_spec\n'), ((17567, 17610), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (17608, 17610), True, 'import tensorflow as tf\n'), ((17759, 17781), 'tensorflow.executing_eagerly', 'tf.executing_eagerly', ([], {}), '()\n', (17779, 17781), True, 'import tensorflow as tf\n'), ((18034, 18077), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (18075, 18077), True, 'import tensorflow as tf\n'), ((19977, 20024), 'tensorflow.ones', 'tf.ones', ([], {'shape': '(2, num_actions)', 'dtype': 'tf.int32'}), '(shape=(2, num_actions), dtype=tf.int32)\n', (19984, 20024), True, 'import tensorflow as tf\n'), ((20173, 20260), 'tensorflow.constant', 'tf.constant', (['time_step.StepType.FIRST'], {'dtype': 'tf.int32', 'shape': '[2]', 'name': '"""step_type"""'}), "(time_step.StepType.FIRST, dtype=tf.int32, shape=[2], name=\n 'step_type')\n", (20184, 20260), True, 'import tensorflow as tf\n'), ((20314, 20374), 'tensorflow.constant', 'tf.constant', (['(0.0)'], {'dtype': 'tf.float32', 'shape': '[2]', 'name': '"""reward"""'}), "(0.0, dtype=tf.float32, shape=[2], name='reward')\n", (20325, 20374), True, 'import tensorflow as tf\n'), ((20384, 20446), 'tensorflow.constant', 'tf.constant', (['(1.0)'], {'dtype': 'tf.float32', 'shape': '[2]', 'name': '"""discount"""'}), "(1.0, dtype=tf.float32, shape=[2], name='discount')\n", (20395, 20446), True, 'import tensorflow as tf\n'), ((20515, 20601), 'tensorflow.constant', 'tf.constant', (['time_step.StepType.LAST'], {'dtype': 'tf.int32', 'shape': '[2]', 'name': '"""step_type"""'}), "(time_step.StepType.LAST, dtype=tf.int32, shape=[2], name=\n 'step_type')\n", (20526, 20601), True, 'import tensorflow as tf\n'), ((20655, 20708), 'tensorflow.constant', 'tf.constant', (['rewards'], {'dtype': 'tf.float32', 'name': '"""reward"""'}), "(rewards, dtype=tf.float32, name='reward')\n", (20666, 20708), True, 'import tensorflow as tf\n'), ((20718, 20780), 'tensorflow.constant', 'tf.constant', (['(1.0)'], {'dtype': 'tf.float32', 'shape': '[2]', 'name': '"""discount"""'}), "(1.0, dtype=tf.float32, shape=[2], name='discount')\n", (20729, 20780), True, 'import tensorflow as tf\n'), ((21205, 21244), 'tensorflow.compat.v1.initialize_all_variables', 'tf.compat.v1.initialize_all_variables', ([], {}), '()\n', (21242, 21244), True, 'import tensorflow as tf\n'), ((22304, 22351), 'tensorflow.constant', 'tf.constant', (['[[1, 2], [3, 4]]'], {'dtype': 'tf.float32'}), '([[1, 2], [3, 4]], dtype=tf.float32)\n', (22315, 22351), True, 'import tensorflow as tf\n'), ((22562, 22597), 'tensorflow.constant', 'tf.constant', (['[3, 4]'], {'dtype': 'tf.int32'}), '([3, 4], dtype=tf.int32)\n', (22573, 22597), True, 'import tensorflow as tf\n'), ((22751, 22838), 'tensorflow.constant', 'tf.constant', (['time_step.StepType.FIRST'], {'dtype': 'tf.int32', 'shape': '[2]', 'name': '"""step_type"""'}), "(time_step.StepType.FIRST, dtype=tf.int32, shape=[2], name=\n 'step_type')\n", (22762, 22838), True, 'import tensorflow as tf\n'), ((22892, 22952), 'tensorflow.constant', 'tf.constant', (['(0.0)'], {'dtype': 'tf.float32', 'shape': '[2]', 'name': '"""reward"""'}), "(0.0, dtype=tf.float32, shape=[2], name='reward')\n", (22903, 22952), True, 'import tensorflow as tf\n'), ((22962, 23024), 'tensorflow.constant', 'tf.constant', (['(1.0)'], {'dtype': 'tf.float32', 'shape': '[2]', 'name': '"""discount"""'}), "(1.0, dtype=tf.float32, shape=[2], name='discount')\n", (22973, 23024), True, 'import tensorflow as tf\n'), ((23093, 23179), 'tensorflow.constant', 'tf.constant', (['time_step.StepType.LAST'], {'dtype': 'tf.int32', 'shape': '[2]', 'name': '"""step_type"""'}), "(time_step.StepType.LAST, dtype=tf.int32, shape=[2], name=\n 'step_type')\n", (23104, 23179), True, 'import tensorflow as tf\n'), ((23233, 23286), 'tensorflow.constant', 'tf.constant', (['rewards'], {'dtype': 'tf.float32', 'name': '"""reward"""'}), "(rewards, dtype=tf.float32, name='reward')\n", (23244, 23286), True, 'import tensorflow as tf\n'), ((23296, 23358), 'tensorflow.constant', 'tf.constant', (['(1.0)'], {'dtype': 'tf.float32', 'shape': '[2]', 'name': '"""discount"""'}), "(1.0, dtype=tf.float32, shape=[2], name='discount')\n", (23307, 23358), True, 'import tensorflow as tf\n'), ((23783, 23822), 'tensorflow.compat.v1.initialize_all_variables', 'tf.compat.v1.initialize_all_variables', ([], {}), '()\n', (23820, 23822), True, 'import tensorflow as tf\n'), ((5587, 5610), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['x'], {}), '(x)\n', (5607, 5610), True, 'import tensorflow as tf\n'), ((8432, 8484), 'tensorflow.compat.v1.train.AdamOptimizer', 'tf.compat.v1.train.AdamOptimizer', ([], {'learning_rate': '(0.01)'}), '(learning_rate=0.01)\n', (8464, 8484), True, 'import tensorflow as tf\n'), ((8983, 9011), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['action'], {}), '(action)\n', (9003, 9011), True, 'import tensorflow as tf\n'), ((9188, 9216), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['action'], {}), '(action)\n', (9208, 9216), True, 'import tensorflow as tf\n'), ((9848, 9939), 'tensorflow.matmul', 'tf.matmul', (['encoded_observations_for_arm', 'encoded_observations_for_arm'], {'transpose_a': '(True)'}), '(encoded_observations_for_arm, encoded_observations_for_arm,\n transpose_a=True)\n', (9857, 9939), True, 'import tensorflow as tf\n'), ((9989, 10085), 'tf_agents.bandits.agents.utils.sum_reward_weighted_observations', 'bandit_utils.sum_reward_weighted_observations', (['rewards_for_arm', 'encoded_observations_for_arm'], {}), '(rewards_for_arm,\n encoded_observations_for_arm)\n', (10034, 10085), True, 'from tf_agents.bandits.agents import utils as bandit_utils\n'), ((11866, 11918), 'tensorflow.compat.v1.train.AdamOptimizer', 'tf.compat.v1.train.AdamOptimizer', ([], {'learning_rate': '(0.01)'}), '(learning_rate=0.01)\n', (11898, 11918), True, 'import tensorflow as tf\n'), ((12682, 12710), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['action'], {}), '(action)\n', (12702, 12710), True, 'import tensorflow as tf\n'), ((12887, 12915), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['action'], {}), '(action)\n', (12907, 12915), True, 'import tensorflow as tf\n'), ((13547, 13638), 'tensorflow.matmul', 'tf.matmul', (['encoded_observations_for_arm', 'encoded_observations_for_arm'], {'transpose_a': '(True)'}), '(encoded_observations_for_arm, encoded_observations_for_arm,\n transpose_a=True)\n', (13556, 13638), True, 'import tensorflow as tf\n'), ((13688, 13784), 'tf_agents.bandits.agents.utils.sum_reward_weighted_observations', 'bandit_utils.sum_reward_weighted_observations', (['rewards_for_arm', 'encoded_observations_for_arm'], {}), '(rewards_for_arm,\n encoded_observations_for_arm)\n', (13733, 13784), True, 'from tf_agents.bandits.agents import utils as bandit_utils\n'), ((15744, 15797), 'tensorflow.compat.v1.train.AdamOptimizer', 'tf.compat.v1.train.AdamOptimizer', ([], {'learning_rate': '(0.001)'}), '(learning_rate=0.001)\n', (15776, 15797), True, 'import tensorflow as tf\n'), ((17337, 17390), 'tensorflow.compat.v1.train.AdamOptimizer', 'tf.compat.v1.train.AdamOptimizer', ([], {'learning_rate': '(0.001)'}), '(learning_rate=0.001)\n', (17369, 17390), True, 'import tensorflow as tf\n'), ((19641, 19694), 'tensorflow.compat.v1.train.AdamOptimizer', 'tf.compat.v1.train.AdamOptimizer', ([], {'learning_rate': '(0.001)'}), '(learning_rate=0.001)\n', (19673, 19694), True, 'import tensorflow as tf\n'), ((19776, 19823), 'tensorflow.constant', 'tf.constant', (['[[1, 2], [3, 4]]'], {'dtype': 'tf.float32'}), '([[1, 2], [3, 4]], dtype=tf.float32)\n', (19787, 19823), True, 'import tensorflow as tf\n'), ((20861, 20890), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['actions'], {}), '(actions)\n', (20881, 20890), True, 'import tensorflow as tf\n'), ((22170, 22223), 'tensorflow.compat.v1.train.AdamOptimizer', 'tf.compat.v1.train.AdamOptimizer', ([], {'learning_rate': '(0.001)'}), '(learning_rate=0.001)\n', (22202, 22223), True, 'import tensorflow as tf\n'), ((23439, 23468), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['actions'], {}), '(actions)\n', (23459, 23468), True, 'import tensorflow as tf\n'), ((8867, 8910), 'tensorflow.cast', 'tf.cast', (['experience.observation', 'tf.float64'], {}), '(experience.observation, tf.float64)\n', (8874, 8910), True, 'import tensorflow as tf\n'), ((9114, 9152), 'tensorflow.cast', 'tf.cast', (['experience.reward', 'tf.float64'], {}), '(experience.reward, tf.float64)\n', (9121, 9152), True, 'import tensorflow as tf\n'), ((9663, 9688), 'tensorflow.shape', 'tf.shape', (['rewards_for_arm'], {}), '(rewards_for_arm)\n', (9671, 9688), True, 'import tensorflow as tf\n'), ((10161, 10217), 'tensorflow.zeros', 'tf.zeros', (['[encoding_dim, encoding_dim]'], {'dtype': 'tf.float64'}), '([encoding_dim, encoding_dim], dtype=tf.float64)\n', (10169, 10217), True, 'import tensorflow as tf\n'), ((10235, 10277), 'tensorflow.zeros', 'tf.zeros', (['[encoding_dim]'], {'dtype': 'tf.float64'}), '([encoding_dim], dtype=tf.float64)\n', (10243, 10277), True, 'import tensorflow as tf\n'), ((10319, 10356), 'tensorflow.squeeze', 'tf.squeeze', (['num_samples_for_arm_total'], {}), '(num_samples_for_arm_total)\n', (10329, 10356), True, 'import tensorflow as tf\n'), ((12566, 12609), 'tensorflow.cast', 'tf.cast', (['experience.observation', 'tf.float64'], {}), '(experience.observation, tf.float64)\n', (12573, 12609), True, 'import tensorflow as tf\n'), ((12813, 12851), 'tensorflow.cast', 'tf.cast', (['experience.reward', 'tf.float64'], {}), '(experience.reward, tf.float64)\n', (12820, 12851), True, 'import tensorflow as tf\n'), ((13362, 13387), 'tensorflow.shape', 'tf.shape', (['rewards_for_arm'], {}), '(rewards_for_arm)\n', (13370, 13387), True, 'import tensorflow as tf\n'), ((13860, 13916), 'tensorflow.zeros', 'tf.zeros', (['[encoding_dim, encoding_dim]'], {'dtype': 'tf.float64'}), '([encoding_dim, encoding_dim], dtype=tf.float64)\n', (13868, 13916), True, 'import tensorflow as tf\n'), ((13934, 13976), 'tensorflow.zeros', 'tf.zeros', (['[encoding_dim]'], {'dtype': 'tf.float64'}), '([encoding_dim], dtype=tf.float64)\n', (13942, 13976), True, 'import tensorflow as tf\n'), ((14018, 14055), 'tensorflow.squeeze', 'tf.squeeze', (['num_samples_for_arm_total'], {}), '(num_samples_for_arm_total)\n', (14028, 14055), True, 'import tensorflow as tf\n'), ((22448, 22460), 'tensorflow.range', 'tf.range', (['(30)'], {}), '(30)\n', (22456, 22460), True, 'import tensorflow as tf\n'), ((2140, 2176), 'numpy.ones', 'np.ones', (['[context_dim, encoding_dim]'], {}), '([context_dim, encoding_dim])\n', (2147, 2176), True, 'import numpy as np\n'), ((2260, 2284), 'numpy.zeros', 'np.zeros', (['[encoding_dim]'], {}), '([encoding_dim])\n', (2268, 2284), True, 'import numpy as np\n'), ((19920, 19932), 'tensorflow.range', 'tf.range', (['(30)'], {}), '(30)\n', (19928, 19932), True, 'import tensorflow as tf\n'), ((20972, 21022), 'numpy.array', 'np.array', (['[[1, 2, 3], [3, 2, 1]]'], {'dtype': 'np.float32'}), '([[1, 2, 3], [3, 2, 1]], dtype=np.float32)\n', (20980, 21022), True, 'import numpy as np\n'), ((23550, 23600), 'numpy.array', 'np.array', (['[[1, 2, 3], [3, 2, 1]]'], {'dtype': 'np.float32'}), '([[1, 2, 3], [3, 2, 1]], dtype=np.float32)\n', (23558, 23600), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-interfaces - based on the path /interfaces/interface/ethernet/switched-vlan/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State variables for VLANs
"""
__slots__ = ('_path_helper', '_extmethods', '__interface_mode','__native_vlan','__access_vlan','__trunk_vlans',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__interface_mode = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCESS': {}, 'TRUNK': {}},), is_leaf=True, yang_name="interface-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-mode-type', is_config=False)
self.__native_vlan = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="native-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
self.__access_vlan = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="access-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
self.__trunk_vlans = YANGDynClass(unique=True, base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\.\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'}),]), is_leaf=False, yang_name="trunk-vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='union', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['interfaces', 'interface', 'ethernet', 'switched-vlan', 'state']
def _get_interface_mode(self):
"""
Getter method for interface_mode, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/interface_mode (oc-vlan-types:vlan-mode-type)
YANG Description: Set the interface to access or trunk mode for
VLANs
"""
return self.__interface_mode
def _set_interface_mode(self, v, load=False):
"""
Setter method for interface_mode, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/interface_mode (oc-vlan-types:vlan-mode-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_interface_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interface_mode() directly.
YANG Description: Set the interface to access or trunk mode for
VLANs
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCESS': {}, 'TRUNK': {}},), is_leaf=True, yang_name="interface-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-mode-type', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interface_mode must be of a type compatible with oc-vlan-types:vlan-mode-type""",
'defined-type': "oc-vlan-types:vlan-mode-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCESS': {}, 'TRUNK': {}},), is_leaf=True, yang_name="interface-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-mode-type', is_config=False)""",
})
self.__interface_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_interface_mode(self):
self.__interface_mode = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCESS': {}, 'TRUNK': {}},), is_leaf=True, yang_name="interface-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-mode-type', is_config=False)
def _get_native_vlan(self):
"""
Getter method for native_vlan, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/native_vlan (oc-vlan-types:vlan-id)
YANG Description: Set the native VLAN id for untagged frames arriving on
a trunk interface. Tagged frames sent on an interface
configured with a native VLAN should have their tags
stripped prior to transmission. This configuration is only
valid on a trunk interface.
"""
return self.__native_vlan
def _set_native_vlan(self, v, load=False):
"""
Setter method for native_vlan, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/native_vlan (oc-vlan-types:vlan-id)
If this variable is read-only (config: false) in the
source YANG file, then _set_native_vlan is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_native_vlan() directly.
YANG Description: Set the native VLAN id for untagged frames arriving on
a trunk interface. Tagged frames sent on an interface
configured with a native VLAN should have their tags
stripped prior to transmission. This configuration is only
valid on a trunk interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="native-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """native_vlan must be of a type compatible with oc-vlan-types:vlan-id""",
'defined-type': "oc-vlan-types:vlan-id",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="native-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)""",
})
self.__native_vlan = t
if hasattr(self, '_set'):
self._set()
def _unset_native_vlan(self):
self.__native_vlan = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="native-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
def _get_access_vlan(self):
"""
Getter method for access_vlan, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/access_vlan (oc-vlan-types:vlan-id)
YANG Description: Assign the access vlan to the access port.
"""
return self.__access_vlan
def _set_access_vlan(self, v, load=False):
"""
Setter method for access_vlan, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/access_vlan (oc-vlan-types:vlan-id)
If this variable is read-only (config: false) in the
source YANG file, then _set_access_vlan is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_access_vlan() directly.
YANG Description: Assign the access vlan to the access port.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="access-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """access_vlan must be of a type compatible with oc-vlan-types:vlan-id""",
'defined-type': "oc-vlan-types:vlan-id",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="access-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)""",
})
self.__access_vlan = t
if hasattr(self, '_set'):
self._set()
def _unset_access_vlan(self):
self.__access_vlan = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="access-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
def _get_trunk_vlans(self):
"""
Getter method for trunk_vlans, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/trunk_vlans (union)
YANG Description: Specify VLANs, or ranges thereof, that the interface may
carry when in trunk mode. If not specified, all VLANs are
allowed on the interface. Ranges are specified in the form
x..y, where x<y - ranges are assumed to be inclusive (such
that the VLAN range is x <= range <= y.
"""
return self.__trunk_vlans
def _set_trunk_vlans(self, v, load=False):
"""
Setter method for trunk_vlans, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/trunk_vlans (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_trunk_vlans is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_trunk_vlans() directly.
YANG Description: Specify VLANs, or ranges thereof, that the interface may
carry when in trunk mode. If not specified, all VLANs are
allowed on the interface. Ranges are specified in the form
x..y, where x<y - ranges are assumed to be inclusive (such
that the VLAN range is x <= range <= y.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,unique=True, base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\.\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'}),]), is_leaf=False, yang_name="trunk-vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='union', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """trunk_vlans must be of a type compatible with union""",
'defined-type': "openconfig-vlan:union",
'generated-type': """YANGDynClass(unique=True, base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\.\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'}),]), is_leaf=False, yang_name="trunk-vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='union', is_config=False)""",
})
self.__trunk_vlans = t
if hasattr(self, '_set'):
self._set()
def _unset_trunk_vlans(self):
self.__trunk_vlans = YANGDynClass(unique=True, base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\.\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'}),]), is_leaf=False, yang_name="trunk-vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='union', is_config=False)
interface_mode = __builtin__.property(_get_interface_mode)
native_vlan = __builtin__.property(_get_native_vlan)
access_vlan = __builtin__.property(_get_access_vlan)
trunk_vlans = __builtin__.property(_get_trunk_vlans)
_pyangbind_elements = OrderedDict([('interface_mode', interface_mode), ('native_vlan', native_vlan), ('access_vlan', access_vlan), ('trunk_vlans', trunk_vlans), ])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-interfaces - based on the path /interfaces/interface/ethernet/switched-vlan/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State variables for VLANs
"""
__slots__ = ('_path_helper', '_extmethods', '__interface_mode','__native_vlan','__access_vlan','__trunk_vlans',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__interface_mode = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCESS': {}, 'TRUNK': {}},), is_leaf=True, yang_name="interface-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-mode-type', is_config=False)
self.__native_vlan = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="native-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
self.__access_vlan = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="access-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
self.__trunk_vlans = YANGDynClass(unique=True, base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\.\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'}),]), is_leaf=False, yang_name="trunk-vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='union', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['interfaces', 'interface', 'ethernet', 'switched-vlan', 'state']
def _get_interface_mode(self):
"""
Getter method for interface_mode, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/interface_mode (oc-vlan-types:vlan-mode-type)
YANG Description: Set the interface to access or trunk mode for
VLANs
"""
return self.__interface_mode
def _set_interface_mode(self, v, load=False):
"""
Setter method for interface_mode, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/interface_mode (oc-vlan-types:vlan-mode-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_interface_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interface_mode() directly.
YANG Description: Set the interface to access or trunk mode for
VLANs
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCESS': {}, 'TRUNK': {}},), is_leaf=True, yang_name="interface-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-mode-type', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interface_mode must be of a type compatible with oc-vlan-types:vlan-mode-type""",
'defined-type': "oc-vlan-types:vlan-mode-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCESS': {}, 'TRUNK': {}},), is_leaf=True, yang_name="interface-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-mode-type', is_config=False)""",
})
self.__interface_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_interface_mode(self):
self.__interface_mode = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCESS': {}, 'TRUNK': {}},), is_leaf=True, yang_name="interface-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-mode-type', is_config=False)
def _get_native_vlan(self):
"""
Getter method for native_vlan, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/native_vlan (oc-vlan-types:vlan-id)
YANG Description: Set the native VLAN id for untagged frames arriving on
a trunk interface. Tagged frames sent on an interface
configured with a native VLAN should have their tags
stripped prior to transmission. This configuration is only
valid on a trunk interface.
"""
return self.__native_vlan
def _set_native_vlan(self, v, load=False):
"""
Setter method for native_vlan, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/native_vlan (oc-vlan-types:vlan-id)
If this variable is read-only (config: false) in the
source YANG file, then _set_native_vlan is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_native_vlan() directly.
YANG Description: Set the native VLAN id for untagged frames arriving on
a trunk interface. Tagged frames sent on an interface
configured with a native VLAN should have their tags
stripped prior to transmission. This configuration is only
valid on a trunk interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="native-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """native_vlan must be of a type compatible with oc-vlan-types:vlan-id""",
'defined-type': "oc-vlan-types:vlan-id",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="native-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)""",
})
self.__native_vlan = t
if hasattr(self, '_set'):
self._set()
def _unset_native_vlan(self):
self.__native_vlan = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="native-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
def _get_access_vlan(self):
"""
Getter method for access_vlan, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/access_vlan (oc-vlan-types:vlan-id)
YANG Description: Assign the access vlan to the access port.
"""
return self.__access_vlan
def _set_access_vlan(self, v, load=False):
"""
Setter method for access_vlan, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/access_vlan (oc-vlan-types:vlan-id)
If this variable is read-only (config: false) in the
source YANG file, then _set_access_vlan is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_access_vlan() directly.
YANG Description: Assign the access vlan to the access port.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="access-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """access_vlan must be of a type compatible with oc-vlan-types:vlan-id""",
'defined-type': "oc-vlan-types:vlan-id",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="access-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)""",
})
self.__access_vlan = t
if hasattr(self, '_set'):
self._set()
def _unset_access_vlan(self):
self.__access_vlan = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="access-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
def _get_trunk_vlans(self):
"""
Getter method for trunk_vlans, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/trunk_vlans (union)
YANG Description: Specify VLANs, or ranges thereof, that the interface may
carry when in trunk mode. If not specified, all VLANs are
allowed on the interface. Ranges are specified in the form
x..y, where x<y - ranges are assumed to be inclusive (such
that the VLAN range is x <= range <= y.
"""
return self.__trunk_vlans
def _set_trunk_vlans(self, v, load=False):
"""
Setter method for trunk_vlans, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/trunk_vlans (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_trunk_vlans is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_trunk_vlans() directly.
YANG Description: Specify VLANs, or ranges thereof, that the interface may
carry when in trunk mode. If not specified, all VLANs are
allowed on the interface. Ranges are specified in the form
x..y, where x<y - ranges are assumed to be inclusive (such
that the VLAN range is x <= range <= y.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,unique=True, base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\.\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'}),]), is_leaf=False, yang_name="trunk-vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='union', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """trunk_vlans must be of a type compatible with union""",
'defined-type': "openconfig-vlan:union",
'generated-type': """YANGDynClass(unique=True, base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\.\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'}),]), is_leaf=False, yang_name="trunk-vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='union', is_config=False)""",
})
self.__trunk_vlans = t
if hasattr(self, '_set'):
self._set()
def _unset_trunk_vlans(self):
self.__trunk_vlans = YANGDynClass(unique=True, base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\.\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'}),]), is_leaf=False, yang_name="trunk-vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='union', is_config=False)
interface_mode = __builtin__.property(_get_interface_mode)
native_vlan = __builtin__.property(_get_native_vlan)
access_vlan = __builtin__.property(_get_access_vlan)
trunk_vlans = __builtin__.property(_get_trunk_vlans)
_pyangbind_elements = OrderedDict([('interface_mode', interface_mode), ('native_vlan', native_vlan), ('access_vlan', access_vlan), ('trunk_vlans', trunk_vlans), ])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-interfaces - based on the path /interfaces/interface/ethernet/switched-vlan/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State variables for VLANs
"""
__slots__ = ('_path_helper', '_extmethods', '__interface_mode','__native_vlan','__access_vlan','__trunk_vlans',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__interface_mode = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCESS': {}, 'TRUNK': {}},), is_leaf=True, yang_name="interface-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-mode-type', is_config=False)
self.__native_vlan = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="native-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
self.__access_vlan = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="access-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
self.__trunk_vlans = YANGDynClass(unique=True, base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\.\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'}),]), is_leaf=False, yang_name="trunk-vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='union', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['interfaces', 'interface', 'ethernet', 'switched-vlan', 'state']
def _get_interface_mode(self):
"""
Getter method for interface_mode, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/interface_mode (oc-vlan-types:vlan-mode-type)
YANG Description: Set the interface to access or trunk mode for
VLANs
"""
return self.__interface_mode
def _set_interface_mode(self, v, load=False):
"""
Setter method for interface_mode, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/interface_mode (oc-vlan-types:vlan-mode-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_interface_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interface_mode() directly.
YANG Description: Set the interface to access or trunk mode for
VLANs
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCESS': {}, 'TRUNK': {}},), is_leaf=True, yang_name="interface-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-mode-type', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interface_mode must be of a type compatible with oc-vlan-types:vlan-mode-type""",
'defined-type': "oc-vlan-types:vlan-mode-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCESS': {}, 'TRUNK': {}},), is_leaf=True, yang_name="interface-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-mode-type', is_config=False)""",
})
self.__interface_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_interface_mode(self):
self.__interface_mode = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCESS': {}, 'TRUNK': {}},), is_leaf=True, yang_name="interface-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-mode-type', is_config=False)
def _get_native_vlan(self):
"""
Getter method for native_vlan, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/native_vlan (oc-vlan-types:vlan-id)
YANG Description: Set the native VLAN id for untagged frames arriving on
a trunk interface. Tagged frames sent on an interface
configured with a native VLAN should have their tags
stripped prior to transmission. This configuration is only
valid on a trunk interface.
"""
return self.__native_vlan
def _set_native_vlan(self, v, load=False):
"""
Setter method for native_vlan, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/native_vlan (oc-vlan-types:vlan-id)
If this variable is read-only (config: false) in the
source YANG file, then _set_native_vlan is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_native_vlan() directly.
YANG Description: Set the native VLAN id for untagged frames arriving on
a trunk interface. Tagged frames sent on an interface
configured with a native VLAN should have their tags
stripped prior to transmission. This configuration is only
valid on a trunk interface.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="native-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """native_vlan must be of a type compatible with oc-vlan-types:vlan-id""",
'defined-type': "oc-vlan-types:vlan-id",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="native-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)""",
})
self.__native_vlan = t
if hasattr(self, '_set'):
self._set()
def _unset_native_vlan(self):
self.__native_vlan = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="native-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
def _get_access_vlan(self):
"""
Getter method for access_vlan, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/access_vlan (oc-vlan-types:vlan-id)
YANG Description: Assign the access vlan to the access port.
"""
return self.__access_vlan
def _set_access_vlan(self, v, load=False):
"""
Setter method for access_vlan, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/access_vlan (oc-vlan-types:vlan-id)
If this variable is read-only (config: false) in the
source YANG file, then _set_access_vlan is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_access_vlan() directly.
YANG Description: Assign the access vlan to the access port.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="access-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """access_vlan must be of a type compatible with oc-vlan-types:vlan-id""",
'defined-type': "oc-vlan-types:vlan-id",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="access-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)""",
})
self.__access_vlan = t
if hasattr(self, '_set'):
self._set()
def _unset_access_vlan(self):
self.__access_vlan = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}), is_leaf=True, yang_name="access-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='oc-vlan-types:vlan-id', is_config=False)
def _get_trunk_vlans(self):
"""
Getter method for trunk_vlans, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/trunk_vlans (union)
YANG Description: Specify VLANs, or ranges thereof, that the interface may
carry when in trunk mode. If not specified, all VLANs are
allowed on the interface. Ranges are specified in the form
x..y, where x<y - ranges are assumed to be inclusive (such
that the VLAN range is x <= range <= y.
"""
return self.__trunk_vlans
def _set_trunk_vlans(self, v, load=False):
"""
Setter method for trunk_vlans, mapped from YANG variable /interfaces/interface/ethernet/switched_vlan/state/trunk_vlans (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_trunk_vlans is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_trunk_vlans() directly.
YANG Description: Specify VLANs, or ranges thereof, that the interface may
carry when in trunk mode. If not specified, all VLANs are
allowed on the interface. Ranges are specified in the form
x..y, where x<y - ranges are assumed to be inclusive (such
that the VLAN range is x <= range <= y.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,unique=True, base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\.\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'}),]), is_leaf=False, yang_name="trunk-vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='union', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """trunk_vlans must be of a type compatible with union""",
'defined-type': "openconfig-vlan:union",
'generated-type': """YANGDynClass(unique=True, base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\.\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'}),]), is_leaf=False, yang_name="trunk-vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='union', is_config=False)""",
})
self.__trunk_vlans = t
if hasattr(self, '_set'):
self._set()
def _unset_trunk_vlans(self):
self.__trunk_vlans = YANGDynClass(unique=True, base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['1..4094']}),RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\.\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'}),]), is_leaf=False, yang_name="trunk-vlans", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/vlan', defining_module='openconfig-vlan', yang_type='union', is_config=False)
interface_mode = __builtin__.property(_get_interface_mode)
native_vlan = __builtin__.property(_get_native_vlan)
access_vlan = __builtin__.property(_get_access_vlan)
trunk_vlans = __builtin__.property(_get_trunk_vlans)
_pyangbind_elements = OrderedDict([('interface_mode', interface_mode), ('native_vlan', native_vlan), ('access_vlan', access_vlan), ('trunk_vlans', trunk_vlans), ])
| [
"__builtin__.property",
"collections.OrderedDict",
"pyangbind.lib.yangtypes.RestrictedClassType"
] | [((17074, 17115), '__builtin__.property', '__builtin__.property', (['_get_interface_mode'], {}), '(_get_interface_mode)\n', (17094, 17115), False, 'import __builtin__\n'), ((17132, 17170), '__builtin__.property', '__builtin__.property', (['_get_native_vlan'], {}), '(_get_native_vlan)\n', (17152, 17170), False, 'import __builtin__\n'), ((17187, 17225), '__builtin__.property', '__builtin__.property', (['_get_access_vlan'], {}), '(_get_access_vlan)\n', (17207, 17225), False, 'import __builtin__\n'), ((17242, 17280), '__builtin__.property', '__builtin__.property', (['_get_trunk_vlans'], {}), '(_get_trunk_vlans)\n', (17262, 17280), False, 'import __builtin__\n'), ((17307, 17450), 'collections.OrderedDict', 'OrderedDict', (["[('interface_mode', interface_mode), ('native_vlan', native_vlan), (\n 'access_vlan', access_vlan), ('trunk_vlans', trunk_vlans)]"], {}), "([('interface_mode', interface_mode), ('native_vlan',\n native_vlan), ('access_vlan', access_vlan), ('trunk_vlans', trunk_vlans)])\n", (17318, 17450), False, 'from collections import OrderedDict\n'), ((33761, 33802), '__builtin__.property', '__builtin__.property', (['_get_interface_mode'], {}), '(_get_interface_mode)\n', (33781, 33802), False, 'import __builtin__\n'), ((33819, 33857), '__builtin__.property', '__builtin__.property', (['_get_native_vlan'], {}), '(_get_native_vlan)\n', (33839, 33857), False, 'import __builtin__\n'), ((33874, 33912), '__builtin__.property', '__builtin__.property', (['_get_access_vlan'], {}), '(_get_access_vlan)\n', (33894, 33912), False, 'import __builtin__\n'), ((33929, 33967), '__builtin__.property', '__builtin__.property', (['_get_trunk_vlans'], {}), '(_get_trunk_vlans)\n', (33949, 33967), False, 'import __builtin__\n'), ((33994, 34137), 'collections.OrderedDict', 'OrderedDict', (["[('interface_mode', interface_mode), ('native_vlan', native_vlan), (\n 'access_vlan', access_vlan), ('trunk_vlans', trunk_vlans)]"], {}), "([('interface_mode', interface_mode), ('native_vlan',\n native_vlan), ('access_vlan', access_vlan), ('trunk_vlans', trunk_vlans)])\n", (34005, 34137), False, 'from collections import OrderedDict\n'), ((50448, 50489), '__builtin__.property', '__builtin__.property', (['_get_interface_mode'], {}), '(_get_interface_mode)\n', (50468, 50489), False, 'import __builtin__\n'), ((50506, 50544), '__builtin__.property', '__builtin__.property', (['_get_native_vlan'], {}), '(_get_native_vlan)\n', (50526, 50544), False, 'import __builtin__\n'), ((50561, 50599), '__builtin__.property', '__builtin__.property', (['_get_access_vlan'], {}), '(_get_access_vlan)\n', (50581, 50599), False, 'import __builtin__\n'), ((50616, 50654), '__builtin__.property', '__builtin__.property', (['_get_trunk_vlans'], {}), '(_get_trunk_vlans)\n', (50636, 50654), False, 'import __builtin__\n'), ((50681, 50824), 'collections.OrderedDict', 'OrderedDict', (["[('interface_mode', interface_mode), ('native_vlan', native_vlan), (\n 'access_vlan', access_vlan), ('trunk_vlans', trunk_vlans)]"], {}), "([('interface_mode', interface_mode), ('native_vlan',\n native_vlan), ('access_vlan', access_vlan), ('trunk_vlans', trunk_vlans)])\n", (50692, 50824), False, 'from collections import OrderedDict\n'), ((1812, 1934), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_type': '"""dict_key"""', 'restriction_arg': "{'ACCESS': {}, 'TRUNK': {}}"}), "(base_type=six.text_type, restriction_type='dict_key',\n restriction_arg={'ACCESS': {}, 'TRUNK': {}})\n", (1831, 1934), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((7187, 7309), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_type': '"""dict_key"""', 'restriction_arg': "{'ACCESS': {}, 'TRUNK': {}}"}), "(base_type=six.text_type, restriction_type='dict_key',\n restriction_arg={'ACCESS': {}, 'TRUNK': {}})\n", (7206, 7309), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((18499, 18621), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_type': '"""dict_key"""', 'restriction_arg': "{'ACCESS': {}, 'TRUNK': {}}"}), "(base_type=six.text_type, restriction_type='dict_key',\n restriction_arg={'ACCESS': {}, 'TRUNK': {}})\n", (18518, 18621), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((23874, 23996), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_type': '"""dict_key"""', 'restriction_arg': "{'ACCESS': {}, 'TRUNK': {}}"}), "(base_type=six.text_type, restriction_type='dict_key',\n restriction_arg={'ACCESS': {}, 'TRUNK': {}})\n", (23893, 23996), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((35186, 35308), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_type': '"""dict_key"""', 'restriction_arg': "{'ACCESS': {}, 'TRUNK': {}}"}), "(base_type=six.text_type, restriction_type='dict_key',\n restriction_arg={'ACCESS': {}, 'TRUNK': {}})\n", (35205, 35308), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((40561, 40683), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_type': '"""dict_key"""', 'restriction_arg': "{'ACCESS': {}, 'TRUNK': {}}"}), "(base_type=six.text_type, restriction_type='dict_key',\n restriction_arg={'ACCESS': {}, 'TRUNK': {}})\n", (40580, 40683), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((5796, 5918), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_type': '"""dict_key"""', 'restriction_arg': "{'ACCESS': {}, 'TRUNK': {}}"}), "(base_type=six.text_type, restriction_type='dict_key',\n restriction_arg={'ACCESS': {}, 'TRUNK': {}})\n", (5815, 5918), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((22483, 22605), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_type': '"""dict_key"""', 'restriction_arg': "{'ACCESS': {}, 'TRUNK': {}}"}), "(base_type=six.text_type, restriction_type='dict_key',\n restriction_arg={'ACCESS': {}, 'TRUNK': {}})\n", (22502, 22605), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((39170, 39292), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_type': '"""dict_key"""', 'restriction_arg': "{'ACCESS': {}, 'TRUNK': {}}"}), "(base_type=six.text_type, restriction_type='dict_key',\n restriction_arg={'ACCESS': {}, 'TRUNK': {}})\n", (39189, 39292), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((2353, 2446), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (2372, 2446), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((2823, 2916), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (2842, 2916), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((10295, 10388), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (10314, 10388), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((12918, 13011), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (12937, 13011), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((19040, 19133), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (19059, 19133), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((19510, 19603), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (19529, 19603), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((26982, 27075), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (27001, 27075), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((29605, 29698), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (29624, 29698), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((35727, 35820), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (35746, 35820), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((36197, 36290), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (36216, 36290), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((43669, 43762), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (43688, 43762), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((46292, 46385), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (46311, 46385), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((3465, 3678), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_dict': "{'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n }"}), "(base_type=six.text_type, restriction_dict={'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n })\n", (3484, 3678), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((9012, 9105), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (9031, 9105), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((11635, 11728), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (11654, 11728), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((16595, 16808), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_dict': "{'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n }"}), "(base_type=six.text_type, restriction_dict={'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n })\n", (16614, 16808), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((20152, 20365), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_dict': "{'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n }"}), "(base_type=six.text_type, restriction_dict={'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n })\n", (20171, 20365), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((25699, 25792), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (25718, 25792), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((28322, 28415), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (28341, 28415), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((33282, 33495), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_dict': "{'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n }"}), "(base_type=six.text_type, restriction_dict={'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n })\n", (33301, 33495), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((36839, 37052), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_dict': "{'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n }"}), "(base_type=six.text_type, restriction_dict={'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n })\n", (36858, 37052), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((42386, 42479), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (42405, 42479), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((45009, 45102), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (45028, 45102), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((49969, 50182), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_dict': "{'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n }"}), "(base_type=six.text_type, restriction_dict={'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n })\n", (49988, 50182), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((14860, 15073), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_dict': "{'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n }"}), "(base_type=six.text_type, restriction_dict={'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n })\n", (14879, 15073), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((31547, 31760), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_dict': "{'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n }"}), "(base_type=six.text_type, restriction_dict={'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n })\n", (31566, 31760), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((48234, 48447), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'six.text_type', 'restriction_dict': "{'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n }"}), "(base_type=six.text_type, restriction_dict={'pattern':\n '^(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])\\\\.\\\\.(409[0-4]|40[0-8][0-9]|[1-3][0-9]{3}|[1-9][0-9]{1,2}|[1-9])$'\n })\n", (48253, 48447), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((3334, 3427), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (3353, 3427), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((16464, 16557), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (16483, 16557), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((20021, 20114), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (20040, 20114), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((33151, 33244), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (33170, 33244), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((36708, 36801), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (36727, 36801), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((49838, 49931), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (49857, 49931), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((14729, 14822), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (14748, 14822), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((31416, 31509), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (31435, 31509), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n'), ((48103, 48196), 'pyangbind.lib.yangtypes.RestrictedClassType', 'RestrictedClassType', ([], {'base_type': 'int', 'restriction_dict': "{'range': ['0..65535']}", 'int_size': '(16)'}), "(base_type=int, restriction_dict={'range': ['0..65535']},\n int_size=16)\n", (48122, 48196), False, 'from pyangbind.lib.yangtypes import RestrictedClassType\n')] |
import logging
from typing import Dict, List, Optional
from src.consensus.blockchain import Blockchain
from src.consensus.sub_block_record import SubBlockRecord
from src.full_node.block_store import BlockStore
from src.types.full_block import FullBlock
from src.types.header_block import HeaderBlock
from src.types.sized_bytes import bytes32
from src.types.sub_epoch_summary import SubEpochSummary
from src.util.ints import uint32
from src.wallet.wallet_blockchain import WalletBlockchain
class BlockCache:
BATCH_SIZE = 300
def __init__(
self,
sub_blocks: Dict[bytes32, SubBlockRecord],
sub_height_to_hash: Dict[uint32, bytes32],
header_blocks: Dict[uint32, HeaderBlock] = {},
sub_epoch_summaries: Dict[uint32, SubEpochSummary] = {},
block_store: Optional[BlockStore] = None,
):
self._sub_blocks = sub_blocks
self._header_cache = header_blocks
self._sub_height_to_hash = sub_height_to_hash
self._sub_epoch_summaries = sub_epoch_summaries
self.block_store = block_store
self.log = logging.getLogger(__name__)
async def header_block(self, header_hash: bytes32) -> Optional[HeaderBlock]:
if header_hash not in self._header_cache:
if self.block_store is not None:
block = await self.block_store.get_full_block(header_hash)
if block is not None:
self.log.debug(f"cache miss {block.sub_block_height} {block.header_hash}")
return await block.get_block_header()
self.log.error("could not find header hash in cache")
return None
return self._header_cache[header_hash]
async def height_to_header_block(self, height: uint32) -> Optional[HeaderBlock]:
header_hash = self._height_to_hash(height)
if header_hash is None:
self.log.error(f"could not find block height {height} in cache")
return None
return await self.header_block(header_hash)
def sub_block_record(self, header_hash: bytes32) -> Optional[SubBlockRecord]:
if header_hash not in self._sub_blocks:
self.log.error("could not find header hash in cache")
return None
return self._sub_blocks[header_hash]
def height_to_sub_block_record(self, height: uint32) -> Optional[SubBlockRecord]:
header_hash = self._height_to_hash(height)
if header_hash is None:
return None
return self.sub_block_record(header_hash)
def get_ses_heights(self) -> List[bytes32]:
return sorted(self._sub_epoch_summaries.keys())
def get_ses(self, height: uint32) -> SubEpochSummary:
return self._sub_epoch_summaries[height]
def get_ses_from_height(self, height: uint32) -> List[SubEpochSummary]:
ses_l = []
for ses_height in reversed(self.get_ses_heights()):
if ses_height <= height:
break
ses_l.append(self.get_ses(ses_height))
return ses_l
def _height_to_hash(self, height: uint32) -> Optional[bytes32]:
if height not in self._sub_height_to_hash:
self.log.error("could not find header hash in cache")
return None
return self._sub_height_to_hash[height]
def clean(self):
self._header_cache = {}
async def init_headers(self, start: uint32, stop: uint32):
if self.block_store is None:
self.log.debug("block store is None, dont init")
return
self._header_cache = {}
self.log.debug(f"init headers {start} {stop}")
self._header_cache = await init_header_cache(self.block_store, start, stop)
async def init_block_cache(blockchain: Blockchain, start: uint32 = uint32(0), stop: uint32 = uint32(0)) -> BlockCache:
header_blocks = await init_header_cache(blockchain.block_store, start, stop)
return BlockCache(
blockchain.sub_blocks,
blockchain.sub_height_to_hash,
header_blocks,
blockchain.sub_epoch_summaries,
blockchain.block_store,
)
async def init_header_cache(block_store: BlockStore, start: uint32, stop: uint32) -> Dict[bytes32, HeaderBlock]:
full_blocks: List[FullBlock] = []
batch_blocks: List[uint32] = []
for x in range(start, stop + 1):
batch_blocks.append(uint32(x))
if len(batch_blocks) == BlockCache.BATCH_SIZE:
blocks = await block_store.get_full_blocks_at(batch_blocks)
full_blocks.extend(blocks)
batch_blocks = []
if len(batch_blocks) != 0:
blocks = await block_store.get_full_blocks_at(batch_blocks)
full_blocks.extend(blocks)
batch_blocks = []
# fetch remaining blocks
blocks = await block_store.get_full_blocks_at(batch_blocks)
full_blocks.extend(blocks)
# convert to FullBlocks HeaderBlocks
header_blocks: Dict[bytes32, HeaderBlock] = {}
for block in full_blocks:
header_blocks[block.header_hash] = await block.get_block_header()
return header_blocks
async def init_wallet_block_cache(
blockchain: WalletBlockchain, start: uint32 = uint32(0), stop: uint32 = uint32(0)
) -> BlockCache:
header_blocks: List[HeaderBlock] = []
batch_blocks: List[uint32] = []
if stop == 0 and blockchain.peak_sub_height is not None:
stop = blockchain.peak_sub_height
for x in range(start, stop):
batch_blocks.append(uint32(x))
if len(batch_blocks) == BlockCache.BATCH_SIZE:
blocks = await blockchain.block_store.get_header_block_at(batch_blocks)
header_blocks.extend(blocks)
batch_blocks = []
# fetch remaining blocks
blocks = await blockchain.block_store.get_header_block_at(batch_blocks)
header_blocks.extend(blocks)
# map
header_block_map: Dict[bytes32, HeaderBlock] = {}
for block in header_blocks:
header_block_map[block.header_hash] = block
return BlockCache(
blockchain.sub_blocks, blockchain.sub_height_to_hash, header_block_map, blockchain.sub_epoch_summaries
)
| [
"src.util.ints.uint32",
"logging.getLogger"
] | [((3767, 3776), 'src.util.ints.uint32', 'uint32', (['(0)'], {}), '(0)\n', (3773, 3776), False, 'from src.util.ints import uint32\n'), ((3793, 3802), 'src.util.ints.uint32', 'uint32', (['(0)'], {}), '(0)\n', (3799, 3802), False, 'from src.util.ints import uint32\n'), ((5151, 5160), 'src.util.ints.uint32', 'uint32', (['(0)'], {}), '(0)\n', (5157, 5160), False, 'from src.util.ints import uint32\n'), ((5177, 5186), 'src.util.ints.uint32', 'uint32', (['(0)'], {}), '(0)\n', (5183, 5186), False, 'from src.util.ints import uint32\n'), ((1092, 1119), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1109, 1119), False, 'import logging\n'), ((4348, 4357), 'src.util.ints.uint32', 'uint32', (['x'], {}), '(x)\n', (4354, 4357), False, 'from src.util.ints import uint32\n'), ((5448, 5457), 'src.util.ints.uint32', 'uint32', (['x'], {}), '(x)\n', (5454, 5457), False, 'from src.util.ints import uint32\n')] |
""" Wrap Paragraph by <NAME>.
A Pydev script for rewrapping the current paragraph to fit inside the print
margin preference in Eclipse (defaults to 80 columns). A paragraph is a block
of lines with a common leading string such as '# ' or a number of spaces. The
lines in the newly wrapped paragraph will all have the same leading string as
the original paragraph.
Usage: Position cursor inside paragraph to be rewrapped and hit <ctrl+2>, w
Caveats: Embedded tabs are always replaced by single spaces.
Does not wrap if the cursor is within the first line of a docstring.
Wrap Paragraph makes simple assumptions about paragraphs. Check your
results, <ctrl-Z> will undo the last rewrap.
Note: Activates with 'w' by default. Edit the constants ACTIVATION_STRING
and WAIT_FOR_ENTER near the end of this file if this does not suit your
needs.
Version: 0.1.1 - alpha
Date: May 2006
License: Available under the same conditions as PyDev. See PyDev license for
details: http://pydev.sourceforge.net
Support: Contact the author for bug reports/feature requests via the Pydev
users list (or use the source).
History: 20 May 2006 - Initial release.
21 May 2006 - Changed no of columns wrapped from 80 to the Eclipse
setting for the print margin preference.
"""
#===============================================================================
# The following is a copy of textwrap.py from the CPython 2.4 standard library
# - slightly modified for Jython 2.1 compatibility. Included here directly
# instead of as an imported module so that the Wrap Paragraph Jython Pydev
# extension can consist of a single file. The extension code starts at around
# line 400.
#===============================================================================
"""Text wrapping and filling.
"""
# Copyright (C) 1999-2001 <NAME>.
# Copyright (C) 2002, 2003 Python Software Foundation.
# Written by <NAME> <<EMAIL>>
__revision__ = "$Id$"
#===============================================================================
# Pydev Extensions in Jython code protocol
#===============================================================================
if False:
from org.python.pydev.editor import PyEdit #@UnresolvedImport
cmd = 'command string'
editor = PyEdit
systemGlobals = {}
#---------------------------- REQUIRED LOCALS-----------------------------------
# interface: String indicating which command will be executed As this script
# will be watching the PyEdit (that is the actual editor in Pydev), and this
# script will be listening to it, this string can indicate any of the methods of
# org.python.pydev.editor.IPyEditListener
assert cmd is not None
# interface: PyEdit object: this is the actual editor that we will act upon
assert editor is not None
if cmd == 'onCreateActions':
#----------------------------------Paragrapher----------------------------------
Paragrapher = systemGlobals.get('Paragrapher')
if Paragrapher is None:
class Paragrapher:
''' Provides tools to process a paragraph of text in the Pydev editor.
'''
def __init__(self, editor):
self.selection = editor.createPySelection()
self.document = editor.getDocument()
self.offset = self.selection.getAbsoluteCursorOffset()
self.currentLineNo = self.selection.getLineOfOffset(self.offset)
self.docDelimiter = self.selection.getDelimiter(self.document)
self.currentLine = self.selection.getLine(self.currentLineNo)
self.pattern = r'''(\s*#\s*|\s*"""\s*|''' \
+ r"""\s*'''\s*|""" \
+ r'''\s*"\s*|''' \
+ r"""\s*'\s*|\s*)"""
import re
self.compiledRe = re.compile(self.pattern)
self.leadingString, self.mainText = \
self._splitLine(self.currentLine)
self.offsetOfOriginalParagraph = 0
self.lengthOfOriginalParagraph = 0
self.numberOfLinesInDocument = self.document.getNumberOfLines()
def _splitLine(self, line):
''' _splitLine(string: line) -> (string: leadingString,\
string: mainText)
Split the line into two parts - a leading string and the remaining
text.
'''
matched = self.compiledRe.match(line)
leadingString = line[0:matched.end()]
mainText = line[matched.end():]
return (leadingString, mainText)
def getCurrentLine(self):
''' getCurrentLine() -> string
Return the main part of the text of the current line as a string.
'''
self.currentLine = self.selection.getLine(self.currentLineNo)
self.mainText = self._splitLine(self.currentLine)[1]
return self.mainText
def previousLineIsInParagraph(self):
''' previousLineIsInParagraph() -> bool '''
previousLine = self.selection.getLine(self.currentLineNo - 1)
leadingStringOfPreviousLine, mainTextOfPreviousLine = \
self._splitLine(previousLine)
if (self.currentLineNo == 0) | \
(mainTextOfPreviousLine.strip() == "") | \
(leadingStringOfPreviousLine != self.leadingString): # diff para [1]
line = self.selection.getLine(self.currentLineNo)
lineEndsAt = self.selection.getEndLineOffset(self.currentLineNo)
self.offsetOfOriginalParagraph = lineEndsAt - len(line)
return False
else:
return True # same para
# [1] The current line is the first line of a paragraph. Calculate
# starting offset of the first character of the original paragraph.
def nextLineIsInParagraph(self):
''' nextLineIsInParagraph() -> bool '''
nextLine = self.selection.getLine(self.currentLineNo + 1)
leadingStringOfNextLine, mainTextOfNextLine = \
self._splitLine(nextLine)
if (self.currentLineNo + 1 == self.numberOfLinesInDocument) | \
(mainTextOfNextLine.strip() == "") | \
(leadingStringOfNextLine != self.leadingString): # diff para [1]
self.lengthOfOriginalParagraph = \
self.selection.getEndLineOffset(self.currentLineNo) - \
self.offsetOfOriginalParagraph
return False
else:
return True # same para
# [1] The current line is the last line of a paragraph. Calculate
# the length of the original paragraph.
systemGlobals['Paragrapher'] = Paragrapher
#------------------------------end of Paragrapher-------------------------------
WrapParagraph = systemGlobals.get('WrapParagraph')
if WrapParagraph is None:
Action = editor.getActionClass() #from org.eclipse.jface.action import Action #@UnresolvedImport
from java.lang import Runnable #@UnresolvedImport
class WrapParagraph(Action):
''' Rewrap the text of the current paragraph.
WrapParagraph searches for the beginning and end of the paragraph that
contains the selection, rewraps it to fit into 79 character lines and
replaces the original paragraph with the newly wrapped paragraph.
The current paragraph is the text surrounding the current selection
point.
Only one paragraph at a time is wrapped.
A paragraph is a consecutive block of lines whose alphanumeric text all
begins at the same column position. Any constant leading string will be
retained in the newly wrapped paragraph. This handles indented
paragraphs and # comment blocks, and avoids wrapping indented code
examples - but not code samples that are not indented.
The first, or only, line of a docstring is handled as a special case and
is not wrapped at all.
'''
def __init__(self, editor):
self.editor = editor
def displayStatusMessage(self):
self.editor.setMessage(False, "Cannot rewrap docstrings")
class RunInUi(Runnable):
'''Helper class that implements a Runnable (just so that we
can pass it to the Java side). It simply calls some callable.
'''
def __init__(self, c):
self.callable = c
def run(self):
self.callable()
def run(self):
editor = self.editor
p = Paragrapher(editor)
# Start building a list of lines of text in paragraph
paragraph = [p.getCurrentLine()]
isDocstring = (p.leadingString.find('"""') != -1) | \
(p.leadingString.find("'") != -1) | \
(p.leadingString.find('"') != -1)
if isDocstring:
editor.asyncExec(self.RunInUi(self.displayStatusMessage))
# Don't wrap empty lines or docstrings.
if ((paragraph[0].strip() != "") & (not isDocstring)):
startingLineNo = p.currentLineNo
# Add the lines before the line containing the selection.
while p.previousLineIsInParagraph():
p.currentLineNo -= 1
paragraph.insert(0, p.getCurrentLine())
# Add the lines after the line containing the selection.
p.currentLineNo = startingLineNo
while p.nextLineIsInParagraph():
p.currentLineNo += 1
paragraph.append(p.getCurrentLine())
# paragraph now contains all of the lines so rewrap it [1].
noCols = editor.getPrintMarginColums()
paragraph = [line.rstrip() + " " for line in paragraph]
import textwrap
paragraph = textwrap.wrap("".join(paragraph), \
width=noCols - len(p.leadingString), \
expand_tabs=False, \
)
# Add line terminators.
paragraph = map((lambda aLine: p.leadingString + aLine + \
p.docDelimiter), paragraph)
paragraph[-1] = paragraph[-1].replace(p.docDelimiter, "") # [2]
# Replace original paragraph.
p.document.replace(p.offsetOfOriginalParagraph, \
p.lengthOfOriginalParagraph, \
"".join(paragraph))
# and we are done.
# [1] paragraph now contains all of the lines of the paragraph to be
# rewrapped and the lines have all been stripped of their leading
# strings.
#
# Rewrap the paragraph allowing space to insert the leading strings back
# in again after the wrapping is done. But first we need to make sure
# that there is at least one space at the end of each line otherwise the
# wrap routine will combine the last word of one line with the first
# word of the next line. We cannot just add a space as this will be
# kept if there is one there already so strip off any trailing white
# space first and add back just a single space character.
#
# [2] Add line terminators to the end of every line in paragraph except
# the last line otherwise the new paragraph will have an extra line
# terminator at the end.
systemGlobals['WrapParagraph'] = WrapParagraph
# Change these constants if the default does not suit your needs
ACTIVATION_STRING = 'w'
WAIT_FOR_ENTER = False
# Register the extension as an ActionListener.
editor.addOfflineActionListener(ACTIVATION_STRING, WrapParagraph(editor), \
'Wrap paragraph', \
WAIT_FOR_ENTER)
| [
"re.compile"
] | [((3925, 3949), 're.compile', 're.compile', (['self.pattern'], {}), '(self.pattern)\n', (3935, 3949), False, 'import re\n')] |
#!/usr/bin/python
# coding=UTF-8
'''
@Author: recar
@Date: 2019-09-03 18:00:12
@LastEditTime: 2019-09-05 16:51:31
'''
# 对域名的curd
from flask import request, jsonify, current_app
from app.models import Domain, db
from . import domain_blueprint
from app.utils.response_util import success_response, faild_response, error_response
from sqlalchemy import or_, desc
import json
import traceback
@domain_blueprint.route("/domain/", methods=['GET'])
def get_domain(page_index=1):
try:
per_page = current_app.config['ARTISAN_POSTS_PER_PAGE']
pagination = Domain.query.order_by(desc(Domain.id)).paginate(page_index, per_page=per_page, error_out=False)
datas = pagination.items
domain_all_count = Domain.query.count()
datas = [d.as_dict() for d in datas ]
return_data = {
"domains": datas,
"domain_all_count": domain_all_count,
"page_index": page_index,
"per_page": per_page
}
response = success_response(return_data)
return response
except Exception as e:
response = error_response(str(e))
return response
@domain_blueprint.route("/domain/", methods=['POST'])
def add_domain():
data = json.loads(request.get_data())
domain = data.get("domain", "").strip()
name = data.get("name", "").strip()
if Domain.query.filter(Domain.domain==domain).first():
return faild_response(30001)
if domain and name:
try:
domain = Domain(
domain =domain,
name =name,
)
db.session.add(domain)
db.session.commit()
current_app.logger.info(domain.as_dict())
response = success_response(domain.as_dict())
except Exception as e:
current_app.logger.error(traceback.format_exc())
response = error_response(str(e))
else:
response = faild_response(20004)
return response
| [
"app.utils.response_util.success_response",
"traceback.format_exc",
"app.utils.response_util.faild_response",
"flask.request.get_data",
"sqlalchemy.desc",
"app.models.Domain.query.filter",
"app.models.db.session.commit",
"app.models.db.session.add",
"app.models.Domain",
"app.models.Domain.query.co... | [((724, 744), 'app.models.Domain.query.count', 'Domain.query.count', ([], {}), '()\n', (742, 744), False, 'from app.models import Domain, db\n'), ((995, 1024), 'app.utils.response_util.success_response', 'success_response', (['return_data'], {}), '(return_data)\n', (1011, 1024), False, 'from app.utils.response_util import success_response, faild_response, error_response\n'), ((1237, 1255), 'flask.request.get_data', 'request.get_data', ([], {}), '()\n', (1253, 1255), False, 'from flask import request, jsonify, current_app\n'), ((1415, 1436), 'app.utils.response_util.faild_response', 'faild_response', (['(30001)'], {}), '(30001)\n', (1429, 1436), False, 'from app.utils.response_util import success_response, faild_response, error_response\n'), ((1924, 1945), 'app.utils.response_util.faild_response', 'faild_response', (['(20004)'], {}), '(20004)\n', (1938, 1945), False, 'from app.utils.response_util import success_response, faild_response, error_response\n'), ((1348, 1392), 'app.models.Domain.query.filter', 'Domain.query.filter', (['(Domain.domain == domain)'], {}), '(Domain.domain == domain)\n', (1367, 1392), False, 'from app.models import Domain, db\n'), ((1496, 1528), 'app.models.Domain', 'Domain', ([], {'domain': 'domain', 'name': 'name'}), '(domain=domain, name=name)\n', (1502, 1528), False, 'from app.models import Domain, db\n'), ((1590, 1612), 'app.models.db.session.add', 'db.session.add', (['domain'], {}), '(domain)\n', (1604, 1612), False, 'from app.models import Domain, db\n'), ((1625, 1644), 'app.models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1642, 1644), False, 'from app.models import Domain, db\n'), ((590, 605), 'sqlalchemy.desc', 'desc', (['Domain.id'], {}), '(Domain.id)\n', (594, 605), False, 'from sqlalchemy import or_, desc\n'), ((1825, 1847), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1845, 1847), False, 'import traceback\n')] |
# Author of Aqsa: <NAME>
from django.contrib.auth.mixins import LoginRequiredMixin
from aqsa_apps import mixins as mix
from . import models as m
from django.utils.translation import ugettext_lazy as _
from django.views.generic.edit import FormView
from . import forms as f
from django.conf import settings
import zipfile
import os
class List(LoginRequiredMixin, mix.OwnerRequired, mix.ListViewContextLabelsPaginated):
template_name = 'common/list.html'
model = m.ImportFromFile
model_labels_and_fields = ('date', 'checked', 'no_error', 'num_imported_rows', 'success',
'wallet', 'bank', 'variety')
context = {
'title': _('My Uploaded Files'),
'links': (m.ImportFromFile.links['upload_bank_statement'], m.ImportFromFile.links['upload_backup_or_csv']),
'msg_empty_object_list': _('You did not import any file. '
'Click to "Upload Bank Statement" or "Upload Aqsa-Backup or CSV" for do that!'),
'actions_description': _('Actions is not available if file contains any error or file was completely '
'imported.'),
'import_from_file': True,
}
class UploadBankStatement(LoginRequiredMixin, mix.ContextForGenericView, mix.RequestUserInGetFormKwargs, FormView):
success_url = None
template_name = 'common/form.html'
form_class = f.UploadBankStatementForm
model = m.ImportFromFile
context = {
'title': _('Upload Bank Statement'),
'links': (
m.ImportFromFile.links['list'],
m.ImportFromFile.links['upload_backup_or_csv'],
),
'submit_btn': _('Upload'),
'upload': True,
}
def form_valid(self, form):
form.instance.owner = self.request.user
form.instance.variety = 1
form.save()
self.success_url = form.instance.get_check_url()
# User will be redirected to another software which will convert "wrong" TXT to okay CSV, then save new file
# with original name in "MEDIA/import_from_file_sberbank/<user_id>" folder because folder of user not
# "chmod 777" and PHP can not write to the folder of user. Finally redirect user to "get_check_url".
if form.instance.bank == 'rub_sberbank' and not settings.DEBUG:
self.success_url = '/sberbank.php?path=' + str(form.instance.file) + '&come_back=' + str(self.success_url)
form.instance.file = 'import_from_file_sberbank' + str(form.instance.file)[16:]
form.save()
return super().form_valid(form)
class UploadBackupOrCSV(LoginRequiredMixin, mix.ContextForGenericView, FormView):
success_url = None
template_name = 'common/form.html'
form_class = f.UploadBackupOrCSVForm
model = m.ImportFromFile
context = {
'title': _('Upload Aqsa-backup or CSV file'),
'links': (
m.ImportFromFile.links['upload_bank_statement'],
m.ImportFromFile.links['list'],
),
'submit_btn': _('Upload'),
'upload': True,
}
def form_valid(self, form):
form.instance.owner = self.request.user
form.save()
self.success_url = form.instance.get_check_url()
# If user select the "Backup (All in one ZIP)"
if form.instance.variety == 7:
path_of_zip_file = form.instance.file.path
no_error = False
error_msg = _('Error! Your file is not the ZIP format file.')
# If file is zipfile. Also can be checked extension of uploaded file.
# or str(form.instance.file).split('.')[-1] == 'zip'
if zipfile.is_zipfile(path_of_zip_file):
unzip_to_path = os.path.join(
settings.MEDIA_ROOT, os.path.join('import_from_file', str(form.instance.pk)))
fz = zipfile.ZipFile(path_of_zip_file, 'r')
try:
fz.extract('wallets.csv', path=unzip_to_path)
fz.extract('categories.csv', path=unzip_to_path)
fz.extract('tags.csv', path=unzip_to_path)
fz.extract('contacts.csv', path=unzip_to_path)
fz.extract('transactions.csv', path=unzip_to_path)
no_error = True
except KeyError:
error_msg = _('Error! Uploaded ZIP-file is not the back up file of Aqsa because it does'
' not contain required CSV-files.')
form.add_error('file', error_msg)
return super().form_invalid(form)
finally:
fz.close()
# Let's save space in our server. In any case, we do not need ZIP anymore.
os.remove(path_of_zip_file)
if no_error is False:
form.add_error('file', error_msg)
form.instance.delete()
return super().form_invalid(form)
return super().form_valid(form)
| [
"django.utils.translation.ugettext_lazy",
"zipfile.is_zipfile",
"zipfile.ZipFile",
"os.remove"
] | [((677, 699), 'django.utils.translation.ugettext_lazy', '_', (['"""My Uploaded Files"""'], {}), "('My Uploaded Files')\n", (678, 699), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((850, 965), 'django.utils.translation.ugettext_lazy', '_', (['"""You did not import any file. Click to "Upload Bank Statement" or "Upload Aqsa-Backup or CSV" for do that!"""'], {}), '(\'You did not import any file. Click to "Upload Bank Statement" or "Upload Aqsa-Backup or CSV" for do that!\'\n )\n', (851, 965), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1031, 1125), 'django.utils.translation.ugettext_lazy', '_', (['"""Actions is not available if file contains any error or file was completely imported."""'], {}), "('Actions is not available if file contains any error or file was completely imported.'\n )\n", (1032, 1125), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1483, 1509), 'django.utils.translation.ugettext_lazy', '_', (['"""Upload Bank Statement"""'], {}), "('Upload Bank Statement')\n", (1484, 1509), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1667, 1678), 'django.utils.translation.ugettext_lazy', '_', (['"""Upload"""'], {}), "('Upload')\n", (1668, 1678), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2834, 2869), 'django.utils.translation.ugettext_lazy', '_', (['"""Upload Aqsa-backup or CSV file"""'], {}), "('Upload Aqsa-backup or CSV file')\n", (2835, 2869), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3028, 3039), 'django.utils.translation.ugettext_lazy', '_', (['"""Upload"""'], {}), "('Upload')\n", (3029, 3039), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3433, 3482), 'django.utils.translation.ugettext_lazy', '_', (['"""Error! Your file is not the ZIP format file."""'], {}), "('Error! Your file is not the ZIP format file.')\n", (3434, 3482), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3646, 3682), 'zipfile.is_zipfile', 'zipfile.is_zipfile', (['path_of_zip_file'], {}), '(path_of_zip_file)\n', (3664, 3682), False, 'import zipfile\n'), ((3850, 3888), 'zipfile.ZipFile', 'zipfile.ZipFile', (['path_of_zip_file', '"""r"""'], {}), "(path_of_zip_file, 'r')\n", (3865, 3888), False, 'import zipfile\n'), ((4766, 4793), 'os.remove', 'os.remove', (['path_of_zip_file'], {}), '(path_of_zip_file)\n', (4775, 4793), False, 'import os\n'), ((4347, 4461), 'django.utils.translation.ugettext_lazy', '_', (['"""Error! Uploaded ZIP-file is not the back up file of Aqsa because it does not contain required CSV-files."""'], {}), "('Error! Uploaded ZIP-file is not the back up file of Aqsa because it does not contain required CSV-files.'\n )\n", (4348, 4461), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
""" input """
import sys
import time
import random
import builtins
import linecache
import termcolor
def getSettings(setting):
""" gets settings """
if setting == "print":
PRINT_SPEED = float(linecache.getline("settings.txt", 1).rstrip("\n"))
return PRINT_SPEED
elif setting == "names":
CUSTOM_NAMES = linecache.getline("settings.txt", 2).rstrip("\n")
PRINT_SPEED = getSettings("print")
CUSTOM_NAMES = getSettings("names")
class human:
""" a single squad member """
def __init__(self):
self.name = getName()
self.hp = 10
self.alive = True
self.rolls = None
def roll(self):
""" roll the duce """
self.rolls = random.randint(1, 20)
def wound(self, wounder):
""" wounds a human """
if self.rolls < random.randint(1, 20):
dmg = random.randint(1, 3)
print("{} ".format(self.name), "yellow", "")
print("was hit for ", None, "")
print("[-{} hp]".format(dmg), "red", "")
print("[{} hp]".format(self.hp - dmg), "green")
print("{} ".format(self.name), "yellow", "")
print("rolled {} to a {}".format(self.rolls, wounder.rolls))
self.hp -= dmg
def checkMyVitals(self): # :)
""" *check my vital sigggns...* """
if self.hp <= 0:
print("\n\nFATALITY :: [{}] was killed\n\n".format(self.name), "red")
self.alive = False
else: return 0
class squad:
""" full squad """
def __init__(self):
self.name = getName()
self.members = [human(), human(), human(), human()]
self.memberNames = []
for item in self.members:
self.memberNames.append(item.name)
self.active = True
def countOff(self):
""" counts off members """
print("{} squad count off".format(self.name))
counter = 0
for item in self.members:
if item.alive is not True:
del self.members[counter]
del self.memberNames[counter]
counter += 1
for item in self.memberNames:
print(item)
print("\n")
def checkActive(self):
""" checks if anyone is still out there """
if len(self.members) <= 0:
print("{} is out of action".format(self.name))
self.active = False
def getName():
""" allows for random human names"""
name = ""
for _ in range(5):
char = chr(random.randint(97, 122))
if char != "\n":
name += char
else:
name += " "
return name
def combat(one, two, lengths):
""" squad combat """
if random.randint(0, 1) == 1:
first = one
last = two
else:
first = two
last = one
for i in range(lengths):
personA = first.members[i]
personB = last.members[i]
print("[{}] v/s [{}]".format(personA.name, personB.name))
while personA.hp > 0 and personB.hp > 0:
personA.roll()
personB.roll()
if personA.rolls > personB.rolls:
personB.wound(personA)
else:
personA.wound(personB)
personA.checkMyVitals()
personB.checkMyVitals()
def delayed_print(text, color=None, end=None):
""" prints characters with delay provided between each character. color for prompt is optional """
text = str(text)
if end != "":
text += "\n"
for char in text:
if color:
sys.stdout.write(termcolor.colored(char, color))
else:
sys.stdout.write(char)
sys.stdout.flush()
time.sleep(PRINT_SPEED)
def main():
""" main """
builtins.print = delayed_print
print("init [war.py]")
print("enter to begin")
input()
while input("continue[y/n]").lower() != "n":
one = squad()
two = squad()
while one.active is True and two.active is True:
one.countOff()
two.countOff()
lengths1 = len(one.members)
lengths2 = len(two.members)
if lengths1 < lengths2:
combat(one, two, lengths1)
else:
combat(one, two, lengths2)
one.checkActive()
two.checkActive()
input("enter to continue round")
if __name__ == "__main__":
main()
| [
"termcolor.colored",
"time.sleep",
"linecache.getline",
"sys.stdout.flush",
"random.randint",
"sys.stdout.write"
] | [((711, 732), 'random.randint', 'random.randint', (['(1)', '(20)'], {}), '(1, 20)\n', (725, 732), False, 'import random\n'), ((2705, 2725), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (2719, 2725), False, 'import random\n'), ((3677, 3695), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (3693, 3695), False, 'import sys\n'), ((3704, 3727), 'time.sleep', 'time.sleep', (['PRINT_SPEED'], {}), '(PRINT_SPEED)\n', (3714, 3727), False, 'import time\n'), ((820, 841), 'random.randint', 'random.randint', (['(1)', '(20)'], {}), '(1, 20)\n', (834, 841), False, 'import random\n'), ((861, 881), 'random.randint', 'random.randint', (['(1)', '(3)'], {}), '(1, 3)\n', (875, 881), False, 'import random\n'), ((2509, 2532), 'random.randint', 'random.randint', (['(97)', '(122)'], {}), '(97, 122)\n', (2523, 2532), False, 'import random\n'), ((3645, 3667), 'sys.stdout.write', 'sys.stdout.write', (['char'], {}), '(char)\n', (3661, 3667), False, 'import sys\n'), ((3587, 3617), 'termcolor.colored', 'termcolor.colored', (['char', 'color'], {}), '(char, color)\n', (3604, 3617), False, 'import termcolor\n'), ((211, 247), 'linecache.getline', 'linecache.getline', (['"""settings.txt"""', '(1)'], {}), "('settings.txt', 1)\n", (228, 247), False, 'import linecache\n'), ((342, 378), 'linecache.getline', 'linecache.getline', (['"""settings.txt"""', '(2)'], {}), "('settings.txt', 2)\n", (359, 378), False, 'import linecache\n')] |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.20.7
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class IoCertManagerV1ClusterIssuerSpecCa(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'crl_distribution_points': 'list[str]',
'ocsp_servers': 'list[str]',
'secret_name': 'str'
}
attribute_map = {
'crl_distribution_points': 'crlDistributionPoints',
'ocsp_servers': 'ocspServers',
'secret_name': 'secretName'
}
def __init__(self, crl_distribution_points=None, ocsp_servers=None, secret_name=None, local_vars_configuration=None): # noqa: E501
"""IoCertManagerV1ClusterIssuerSpecCa - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._crl_distribution_points = None
self._ocsp_servers = None
self._secret_name = None
self.discriminator = None
if crl_distribution_points is not None:
self.crl_distribution_points = crl_distribution_points
if ocsp_servers is not None:
self.ocsp_servers = ocsp_servers
self.secret_name = secret_name
@property
def crl_distribution_points(self):
"""Gets the crl_distribution_points of this IoCertManagerV1ClusterIssuerSpecCa. # noqa: E501
The CRL distribution points is an X.509 v3 certificate extension which identifies the location of the CRL from which the revocation of this certificate can be checked. If not set, certificates will be issued without distribution points set. # noqa: E501
:return: The crl_distribution_points of this IoCertManagerV1ClusterIssuerSpecCa. # noqa: E501
:rtype: list[str]
"""
return self._crl_distribution_points
@crl_distribution_points.setter
def crl_distribution_points(self, crl_distribution_points):
"""Sets the crl_distribution_points of this IoCertManagerV1ClusterIssuerSpecCa.
The CRL distribution points is an X.509 v3 certificate extension which identifies the location of the CRL from which the revocation of this certificate can be checked. If not set, certificates will be issued without distribution points set. # noqa: E501
:param crl_distribution_points: The crl_distribution_points of this IoCertManagerV1ClusterIssuerSpecCa. # noqa: E501
:type: list[str]
"""
self._crl_distribution_points = crl_distribution_points
@property
def ocsp_servers(self):
"""Gets the ocsp_servers of this IoCertManagerV1ClusterIssuerSpecCa. # noqa: E501
The OCSP server list is an X.509 v3 extension that defines a list of URLs of OCSP responders. The OCSP responders can be queried for the revocation status of an issued certificate. If not set, the certificate will be issued with no OCSP servers set. For example, an OCSP server URL could be \"http://ocsp.int-x3.letsencrypt.org\". # noqa: E501
:return: The ocsp_servers of this IoCertManagerV1ClusterIssuerSpecCa. # noqa: E501
:rtype: list[str]
"""
return self._ocsp_servers
@ocsp_servers.setter
def ocsp_servers(self, ocsp_servers):
"""Sets the ocsp_servers of this IoCertManagerV1ClusterIssuerSpecCa.
The OCSP server list is an X.509 v3 extension that defines a list of URLs of OCSP responders. The OCSP responders can be queried for the revocation status of an issued certificate. If not set, the certificate will be issued with no OCSP servers set. For example, an OCSP server URL could be \"http://ocsp.int-x3.letsencrypt.org\". # noqa: E501
:param ocsp_servers: The ocsp_servers of this IoCertManagerV1ClusterIssuerSpecCa. # noqa: E501
:type: list[str]
"""
self._ocsp_servers = ocsp_servers
@property
def secret_name(self):
"""Gets the secret_name of this IoCertManagerV1ClusterIssuerSpecCa. # noqa: E501
SecretName is the name of the secret used to sign Certificates issued by this Issuer. # noqa: E501
:return: The secret_name of this IoCertManagerV1ClusterIssuerSpecCa. # noqa: E501
:rtype: str
"""
return self._secret_name
@secret_name.setter
def secret_name(self, secret_name):
"""Sets the secret_name of this IoCertManagerV1ClusterIssuerSpecCa.
SecretName is the name of the secret used to sign Certificates issued by this Issuer. # noqa: E501
:param secret_name: The secret_name of this IoCertManagerV1ClusterIssuerSpecCa. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and secret_name is None: # noqa: E501
raise ValueError("Invalid value for `secret_name`, must not be `None`") # noqa: E501
self._secret_name = secret_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IoCertManagerV1ClusterIssuerSpecCa):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, IoCertManagerV1ClusterIssuerSpecCa):
return True
return self.to_dict() != other.to_dict()
| [
"kubernetes.client.configuration.Configuration",
"six.iteritems"
] | [((5703, 5736), 'six.iteritems', 'six.iteritems', (['self.openapi_types'], {}), '(self.openapi_types)\n', (5716, 5736), False, 'import six\n'), ((1467, 1482), 'kubernetes.client.configuration.Configuration', 'Configuration', ([], {}), '()\n', (1480, 1482), False, 'from kubernetes.client.configuration import Configuration\n')] |
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import atexit
import json
import os
import tempfile
import fixtures
import mock
import testtools
from os_apply_config import apply_config
from os_apply_config import config_exception as exc
from os_apply_config import oac_file
# example template tree
TEMPLATES = os.path.join(os.path.dirname(__file__), 'templates')
# config for example tree
CONFIG = {
"x": "foo",
"y": False,
"z": None,
"btrue": True,
"bfalse": False,
"database": {
"url": "sqlite:///blah"
},
"l": [1, 2],
}
# config for example tree - with subhash
CONFIG_SUBHASH = {
"OpenStack::Config": {
"x": "foo",
"database": {
"url": "sqlite:///blah"
}
}
}
# expected output for example tree
OUTPUT = {
"/etc/glance/script.conf": oac_file.OacFile(
"foo\n"),
"/etc/keystone/keystone.conf": oac_file.OacFile(
"[foo]\ndatabase = sqlite:///blah\n"),
"/etc/control/empty": oac_file.OacFile(
"foo\n"),
"/etc/control/allow_empty": oac_file.OacFile(
"").set('allow_empty', False),
"/etc/control/mode": oac_file.OacFile(
"lorem modus\n").set('mode', 0o755),
}
TEMPLATE_PATHS = OUTPUT.keys()
# expected output for chown tests
# separated out to avoid needing to mock os.chown for most tests
CHOWN_TEMPLATES = os.path.join(os.path.dirname(__file__), 'chown_templates')
CHOWN_OUTPUT = {
"owner.uid": oac_file.OacFile("lorem uido\n").set('owner', 0),
"owner.name": oac_file.OacFile("namo uido\n").set('owner', 0),
"group.gid": oac_file.OacFile("lorem gido\n").set('group', 0),
"group.name": oac_file.OacFile("namo gido\n").set('group', 0),
}
def main_path():
return (
os.path.dirname(os.path.realpath(__file__)) +
'/../os_apply_config.py')
def template(relpath):
return os.path.join(TEMPLATES, relpath[1:])
class TestRunOSConfigApplier(testtools.TestCase):
"""Tests the commandline options."""
def setUp(self):
super(TestRunOSConfigApplier, self).setUp()
self.useFixture(fixtures.NestedTempfile())
self.stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.stdout))
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.logger = self.useFixture(
fixtures.FakeLogger(name="os-apply-config"))
fd, self.path = tempfile.mkstemp()
with os.fdopen(fd, 'w') as t:
t.write(json.dumps(CONFIG))
t.flush()
def test_print_key(self):
self.assertEqual(0, apply_config.main(
['os-apply-config.py', '--metadata', self.path, '--key',
'database.url', '--type', 'raw']))
self.stdout.seek(0)
self.assertEqual(CONFIG['database']['url'],
self.stdout.read().strip())
self.assertEqual('', self.logger.output)
def test_print_key_json_dict(self):
self.assertEqual(0, apply_config.main(
['os-apply-config.py', '--metadata', self.path, '--key',
'database', '--type', 'raw']))
self.stdout.seek(0)
self.assertEqual(CONFIG['database'],
json.loads(self.stdout.read().strip()))
self.assertEqual('', self.logger.output)
def test_print_key_json_list(self):
self.assertEqual(0, apply_config.main(
['os-apply-config.py', '--metadata', self.path, '--key',
'l', '--type', 'raw']))
self.stdout.seek(0)
self.assertEqual(CONFIG['l'],
json.loads(self.stdout.read().strip()))
self.assertEqual('', self.logger.output)
def test_print_non_string_key(self):
self.assertEqual(0, apply_config.main(
['os-apply-config.py', '--metadata', self.path, '--key',
'y', '--type', 'raw']))
self.stdout.seek(0)
self.assertEqual("false",
self.stdout.read().strip())
self.assertEqual('', self.logger.output)
def test_print_null_key(self):
self.assertEqual(0, apply_config.main(
['os-apply-config.py', '--metadata', self.path, '--key',
'z', '--type', 'raw', '--key-default', '']))
self.stdout.seek(0)
self.assertEqual('', self.stdout.read().strip())
self.assertEqual('', self.logger.output)
def test_print_key_missing(self):
self.assertEqual(1, apply_config.main(
['os-apply-config.py', '--metadata', self.path, '--key',
'does.not.exist']))
self.assertIn('does not exist', self.logger.output)
def test_print_key_missing_default(self):
self.assertEqual(0, apply_config.main(
['os-apply-config.py', '--metadata', self.path, '--key',
'does.not.exist', '--key-default', '']))
self.stdout.seek(0)
self.assertEqual('', self.stdout.read().strip())
self.assertEqual('', self.logger.output)
def test_print_key_wrong_type(self):
self.assertEqual(1, apply_config.main(
['os-apply-config.py', '--metadata', self.path, '--key',
'x', '--type', 'int']))
self.assertIn('cannot interpret value', self.logger.output)
def test_print_key_from_list(self):
self.assertEqual(0, apply_config.main(
['os-apply-config.py', '--metadata', self.path, '--key',
'l.0', '--type', 'int']))
self.stdout.seek(0)
self.assertEqual(str(CONFIG['l'][0]),
self.stdout.read().strip())
self.assertEqual('', self.logger.output)
def test_print_key_from_list_missing(self):
self.assertEqual(1, apply_config.main(
['os-apply-config.py', '--metadata', self.path, '--key',
'l.2', '--type', 'int']))
self.assertIn('does not exist', self.logger.output)
def test_print_key_from_list_missing_default(self):
self.assertEqual(0, apply_config.main(
['os-apply-config.py', '--metadata', self.path, '--key',
'l.2', '--type', 'int', '--key-default', '']))
self.stdout.seek(0)
self.assertEqual('', self.stdout.read().strip())
self.assertEqual('', self.logger.output)
def test_print_templates(self):
apply_config.main(['os-apply-config', '--print-templates'])
self.stdout.seek(0)
self.assertEqual(
self.stdout.read().strip(), apply_config.TEMPLATES_DIR)
self.assertEqual('', self.logger.output)
def test_boolean_key(self):
rcode = apply_config.main(['os-apply-config', '--metadata',
self.path, '--boolean-key', 'btrue'])
self.assertEqual(0, rcode)
rcode = apply_config.main(['os-apply-config', '--metadata',
self.path, '--boolean-key', 'bfalse'])
self.assertEqual(1, rcode)
rcode = apply_config.main(['os-apply-config', '--metadata',
self.path, '--boolean-key', 'x'])
self.assertEqual(-1, rcode)
def test_boolean_key_and_key(self):
rcode = apply_config.main(['os-apply-config', '--metadata',
self.path, '--boolean-key', 'btrue',
'--key', 'x'])
self.assertEqual(0, rcode)
self.stdout.seek(0)
self.assertEqual(self.stdout.read().strip(), 'foo')
self.assertIn('--boolean-key ignored', self.logger.output)
def test_os_config_files(self):
with tempfile.NamedTemporaryFile() as fake_os_config_files:
with tempfile.NamedTemporaryFile() as fake_config:
fake_config.write(json.dumps(CONFIG).encode('utf-8'))
fake_config.flush()
fake_os_config_files.write(
json.dumps([fake_config.name]).encode('utf-8'))
fake_os_config_files.flush()
apply_config.main(['os-apply-config',
'--key', 'database.url',
'--type', 'raw',
'--os-config-files',
fake_os_config_files.name])
self.stdout.seek(0)
self.assertEqual(
CONFIG['database']['url'], self.stdout.read().strip())
class OSConfigApplierTestCase(testtools.TestCase):
def setUp(self):
super(OSConfigApplierTestCase, self).setUp()
self.logger = self.useFixture(fixtures.FakeLogger('os-apply-config'))
self.useFixture(fixtures.NestedTempfile())
def write_config(self, config):
fd, path = tempfile.mkstemp()
with os.fdopen(fd, 'w') as t:
t.write(json.dumps(config))
t.flush()
return path
def check_output_file(self, tmpdir, path, obj):
full_path = os.path.join(tmpdir, path[1:])
if obj.allow_empty:
assert os.path.exists(full_path), "%s doesn't exist" % path
self.assertEqual(obj.body, open(full_path).read())
else:
assert not os.path.exists(full_path), "%s exists" % path
def test_install_config(self):
path = self.write_config(CONFIG)
tmpdir = tempfile.mkdtemp()
apply_config.install_config([path], TEMPLATES, tmpdir, False)
for path, obj in OUTPUT.items():
self.check_output_file(tmpdir, path, obj)
def test_install_config_subhash(self):
tpath = self.write_config(CONFIG_SUBHASH)
tmpdir = tempfile.mkdtemp()
apply_config.install_config(
[tpath], TEMPLATES, tmpdir, False, 'OpenStack::Config')
for path, obj in OUTPUT.items():
self.check_output_file(tmpdir, path, obj)
def test_delete_if_not_allowed_empty(self):
path = self.write_config(CONFIG)
tmpdir = tempfile.mkdtemp()
template = "/etc/control/allow_empty"
target_file = os.path.join(tmpdir, template[1:])
# Touch the file
os.makedirs(os.path.dirname(target_file))
open(target_file, 'a').close()
apply_config.install_config([path], TEMPLATES, tmpdir, False)
# File should be gone
self.assertFalse(os.path.exists(target_file))
def test_respect_file_permissions(self):
path = self.write_config(CONFIG)
tmpdir = tempfile.mkdtemp()
template = "/etc/keystone/keystone.conf"
target_file = os.path.join(tmpdir, template[1:])
os.makedirs(os.path.dirname(target_file))
# File doesn't exist, use the default mode (644)
apply_config.install_config([path], TEMPLATES, tmpdir, False)
self.assertEqual(0o100644, os.stat(target_file).st_mode)
self.assertEqual(OUTPUT[template].body, open(target_file).read())
# Set a different mode:
os.chmod(target_file, 0o600)
apply_config.install_config([path], TEMPLATES, tmpdir, False)
# The permissions should be preserved
self.assertEqual(0o100600, os.stat(target_file).st_mode)
self.assertEqual(OUTPUT[template].body, open(target_file).read())
def test_build_tree(self):
tree = apply_config.build_tree(
apply_config.template_paths(TEMPLATES), CONFIG)
self.assertEqual(OUTPUT, tree)
def test_render_template(self):
# execute executable files, moustache non-executables
self.assertEqual("abc\n", apply_config.render_template(template(
"/etc/glance/script.conf"), {"x": "abc"}))
self.assertRaises(
exc.ConfigException,
apply_config.render_template,
template("/etc/glance/script.conf"), {})
def test_render_template_bad_template(self):
tdir = self.useFixture(fixtures.TempDir())
bt_path = os.path.join(tdir.path, 'bad_template')
with open(bt_path, 'w') as bt:
bt.write("{{#foo}}bar={{bar}}{{/bar}}")
e = self.assertRaises(exc.ConfigException,
apply_config.render_template,
bt_path, {'foo': [{'bar':
'abc'}]})
self.assertIn('could not render moustache template', str(e))
self.assertIn('Section end tag mismatch', self.logger.output)
def test_render_moustache(self):
self.assertEqual(
"ab123cd",
apply_config.render_moustache("ab{{x.a}}cd", {"x": {"a": "123"}}))
def test_render_moustache_bad_key(self):
self.assertEqual(u'', apply_config.render_moustache("{{badkey}}", {}))
def test_render_executable(self):
params = {"x": "foo"}
self.assertEqual("foo\n", apply_config.render_executable(
template("/etc/glance/script.conf"), params))
def test_render_executable_failure(self):
self.assertRaises(
exc.ConfigException,
apply_config.render_executable,
template("/etc/glance/script.conf"), {})
def test_template_paths(self):
expected = list(map(lambda p: (template(p), p), TEMPLATE_PATHS))
actual = apply_config.template_paths(TEMPLATES)
expected.sort(key=lambda tup: tup[1])
actual.sort(key=lambda tup: tup[1])
self.assertEqual(expected, actual)
def test_strip_hash(self):
h = {'a': {'b': {'x': 'y'}}, "c": [1, 2, 3]}
self.assertEqual({'x': 'y'}, apply_config.strip_hash(h, 'a.b'))
self.assertRaises(exc.ConfigException,
apply_config.strip_hash, h, 'a.nonexistent')
self.assertRaises(exc.ConfigException,
apply_config.strip_hash, h, 'a.c')
def test_load_list_from_json(self):
def mkstemp():
fd, path = tempfile.mkstemp()
atexit.register(
lambda: os.path.exists(path) and os.remove(path))
return (fd, path)
def write_contents(fd, contents):
with os.fdopen(fd, 'w') as t:
t.write(contents)
t.flush()
fd, path = mkstemp()
load_list = apply_config.load_list_from_json
self.assertRaises(ValueError, load_list, path)
write_contents(fd, json.dumps(["/tmp/config.json"]))
json_obj = load_list(path)
self.assertEqual(["/tmp/config.json"], json_obj)
os.remove(path)
self.assertEqual([], load_list(path))
fd, path = mkstemp()
write_contents(fd, json.dumps({}))
self.assertRaises(ValueError, load_list, path)
def test_default_templates_dir_current(self):
default = '/usr/libexec/os-apply-config/templates'
with mock.patch('os.path.isdir', lambda x: x == default):
self.assertEqual(default, apply_config.templates_dir())
def test_default_templates_dir_deprecated(self):
default = '/opt/stack/os-apply-config/templates'
with mock.patch('os.path.isdir', lambda x: x == default):
self.assertEqual(default, apply_config.templates_dir())
def test_default_templates_dir_old_deprecated(self):
default = '/opt/stack/os-config-applier/templates'
with mock.patch('os.path.isdir', lambda x: x == default):
self.assertEqual(default, apply_config.templates_dir())
def test_default_templates_dir_both(self):
default = '/usr/libexec/os-apply-config/templates'
deprecated = '/opt/stack/os-apply-config/templates'
with mock.patch('os.path.isdir', lambda x: (x == default or
x == deprecated)):
self.assertEqual(default, apply_config.templates_dir())
def test_control_mode(self):
path = self.write_config(CONFIG)
tmpdir = tempfile.mkdtemp()
template = "/etc/control/mode"
target_file = os.path.join(tmpdir, template[1:])
apply_config.install_config([path], TEMPLATES, tmpdir, False)
self.assertEqual(0o100755, os.stat(target_file).st_mode)
@mock.patch('os.chown')
def test_control_chown(self, chown_mock):
path = self.write_config(CONFIG)
tmpdir = tempfile.mkdtemp()
apply_config.install_config([path], CHOWN_TEMPLATES, tmpdir, False)
chown_mock.assert_has_calls([mock.call(mock.ANY, 0, -1), # uid
mock.call(mock.ANY, 0, -1), # username
mock.call(mock.ANY, -1, 0), # gid
mock.call(mock.ANY, -1, 0)], # groupname
any_order=True)
| [
"os_apply_config.apply_config.main",
"os.remove",
"os.path.exists",
"mock.patch",
"json.dumps",
"os.chmod",
"tempfile.NamedTemporaryFile",
"fixtures.MonkeyPatch",
"fixtures.FakeLogger",
"os_apply_config.apply_config.strip_hash",
"os.path.dirname",
"fixtures.TempDir",
"tempfile.mkdtemp",
"o... | [((890, 915), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (905, 915), False, 'import os\n'), ((1393, 1418), 'os_apply_config.oac_file.OacFile', 'oac_file.OacFile', (['"""foo\n"""'], {}), "('foo\\n')\n", (1409, 1418), False, 'from os_apply_config import oac_file\n'), ((1464, 1520), 'os_apply_config.oac_file.OacFile', 'oac_file.OacFile', (['"""[foo]\ndatabase = sqlite:///blah\n"""'], {}), '("""[foo]\ndatabase = sqlite:///blah\n""")\n', (1480, 1520), False, 'from os_apply_config import oac_file\n'), ((1555, 1580), 'os_apply_config.oac_file.OacFile', 'oac_file.OacFile', (['"""foo\n"""'], {}), "('foo\\n')\n", (1571, 1580), False, 'from os_apply_config import oac_file\n'), ((1932, 1957), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1947, 1957), False, 'import os\n'), ((2421, 2457), 'os.path.join', 'os.path.join', (['TEMPLATES', 'relpath[1:]'], {}), '(TEMPLATES, relpath[1:])\n', (2433, 2457), False, 'import os\n'), ((16671, 16693), 'mock.patch', 'mock.patch', (['"""os.chown"""'], {}), "('os.chown')\n", (16681, 16693), False, 'import mock\n'), ((3088, 3106), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (3104, 3106), False, 'import tempfile\n'), ((6959, 7018), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config', '--print-templates']"], {}), "(['os-apply-config', '--print-templates'])\n", (6976, 7018), False, 'from os_apply_config import apply_config\n'), ((7239, 7332), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config', '--metadata', self.path, '--boolean-key', 'btrue']"], {}), "(['os-apply-config', '--metadata', self.path,\n '--boolean-key', 'btrue'])\n", (7256, 7332), False, 'from os_apply_config import apply_config\n'), ((7415, 7509), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config', '--metadata', self.path, '--boolean-key', 'bfalse']"], {}), "(['os-apply-config', '--metadata', self.path,\n '--boolean-key', 'bfalse'])\n", (7432, 7509), False, 'from os_apply_config import apply_config\n'), ((7592, 7681), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config', '--metadata', self.path, '--boolean-key', 'x']"], {}), "(['os-apply-config', '--metadata', self.path,\n '--boolean-key', 'x'])\n", (7609, 7681), False, 'from os_apply_config import apply_config\n'), ((7806, 7913), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config', '--metadata', self.path, '--boolean-key', 'btrue',\n '--key', 'x']"], {}), "(['os-apply-config', '--metadata', self.path,\n '--boolean-key', 'btrue', '--key', 'x'])\n", (7823, 7913), False, 'from os_apply_config import apply_config\n'), ((9344, 9362), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (9360, 9362), False, 'import tempfile\n'), ((9556, 9586), 'os.path.join', 'os.path.join', (['tmpdir', 'path[1:]'], {}), '(tmpdir, path[1:])\n', (9568, 9586), False, 'import os\n'), ((9927, 9945), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (9943, 9945), False, 'import tempfile\n'), ((9954, 10015), 'os_apply_config.apply_config.install_config', 'apply_config.install_config', (['[path]', 'TEMPLATES', 'tmpdir', '(False)'], {}), '([path], TEMPLATES, tmpdir, False)\n', (9981, 10015), False, 'from os_apply_config import apply_config\n'), ((10222, 10240), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (10238, 10240), False, 'import tempfile\n'), ((10249, 10336), 'os_apply_config.apply_config.install_config', 'apply_config.install_config', (['[tpath]', 'TEMPLATES', 'tmpdir', '(False)', '"""OpenStack::Config"""'], {}), "([tpath], TEMPLATES, tmpdir, False,\n 'OpenStack::Config')\n", (10276, 10336), False, 'from os_apply_config import apply_config\n'), ((10548, 10566), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (10564, 10566), False, 'import tempfile\n'), ((10635, 10669), 'os.path.join', 'os.path.join', (['tmpdir', 'template[1:]'], {}), '(tmpdir, template[1:])\n', (10647, 10669), False, 'import os\n'), ((10792, 10853), 'os_apply_config.apply_config.install_config', 'apply_config.install_config', (['[path]', 'TEMPLATES', 'tmpdir', '(False)'], {}), '([path], TEMPLATES, tmpdir, False)\n', (10819, 10853), False, 'from os_apply_config import apply_config\n'), ((11042, 11060), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (11058, 11060), False, 'import tempfile\n'), ((11132, 11166), 'os.path.join', 'os.path.join', (['tmpdir', 'template[1:]'], {}), '(tmpdir, template[1:])\n', (11144, 11166), False, 'import os\n'), ((11282, 11343), 'os_apply_config.apply_config.install_config', 'apply_config.install_config', (['[path]', 'TEMPLATES', 'tmpdir', '(False)'], {}), '([path], TEMPLATES, tmpdir, False)\n', (11309, 11343), False, 'from os_apply_config import apply_config\n'), ((11523, 11549), 'os.chmod', 'os.chmod', (['target_file', '(384)'], {}), '(target_file, 384)\n', (11531, 11549), False, 'import os\n'), ((11560, 11621), 'os_apply_config.apply_config.install_config', 'apply_config.install_config', (['[path]', 'TEMPLATES', 'tmpdir', '(False)'], {}), '([path], TEMPLATES, tmpdir, False)\n', (11587, 11621), False, 'from os_apply_config import apply_config\n'), ((12479, 12518), 'os.path.join', 'os.path.join', (['tdir.path', '"""bad_template"""'], {}), "(tdir.path, 'bad_template')\n", (12491, 12518), False, 'import os\n'), ((13789, 13827), 'os_apply_config.apply_config.template_paths', 'apply_config.template_paths', (['TEMPLATES'], {}), '(TEMPLATES)\n', (13816, 13827), False, 'from os_apply_config import apply_config\n'), ((15019, 15034), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (15028, 15034), False, 'import os\n'), ((16415, 16433), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (16431, 16433), False, 'import tempfile\n'), ((16495, 16529), 'os.path.join', 'os.path.join', (['tmpdir', 'template[1:]'], {}), '(tmpdir, template[1:])\n', (16507, 16529), False, 'import os\n'), ((16538, 16599), 'os_apply_config.apply_config.install_config', 'apply_config.install_config', (['[path]', 'TEMPLATES', 'tmpdir', '(False)'], {}), '([path], TEMPLATES, tmpdir, False)\n', (16565, 16599), False, 'from os_apply_config import apply_config\n'), ((16798, 16816), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (16814, 16816), False, 'import tempfile\n'), ((16825, 16892), 'os_apply_config.apply_config.install_config', 'apply_config.install_config', (['[path]', 'CHOWN_TEMPLATES', 'tmpdir', '(False)'], {}), '([path], CHOWN_TEMPLATES, tmpdir, False)\n', (16852, 16892), False, 'from os_apply_config import apply_config\n'), ((1623, 1643), 'os_apply_config.oac_file.OacFile', 'oac_file.OacFile', (['""""""'], {}), "('')\n", (1639, 1643), False, 'from os_apply_config import oac_file\n'), ((1705, 1738), 'os_apply_config.oac_file.OacFile', 'oac_file.OacFile', (['"""lorem modus\n"""'], {}), "('lorem modus\\n')\n", (1721, 1738), False, 'from os_apply_config import oac_file\n'), ((2012, 2044), 'os_apply_config.oac_file.OacFile', 'oac_file.OacFile', (['"""lorem uido\n"""'], {}), "('lorem uido\\n')\n", (2028, 2044), False, 'from os_apply_config import oac_file\n'), ((2080, 2111), 'os_apply_config.oac_file.OacFile', 'oac_file.OacFile', (['"""namo uido\n"""'], {}), "('namo uido\\n')\n", (2096, 2111), False, 'from os_apply_config import oac_file\n'), ((2146, 2178), 'os_apply_config.oac_file.OacFile', 'oac_file.OacFile', (['"""lorem gido\n"""'], {}), "('lorem gido\\n')\n", (2162, 2178), False, 'from os_apply_config import oac_file\n'), ((2214, 2245), 'os_apply_config.oac_file.OacFile', 'oac_file.OacFile', (['"""namo gido\n"""'], {}), "('namo gido\\n')\n", (2230, 2245), False, 'from os_apply_config import oac_file\n'), ((2321, 2347), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (2337, 2347), False, 'import os\n'), ((2649, 2674), 'fixtures.NestedTempfile', 'fixtures.NestedTempfile', ([], {}), '()\n', (2672, 2674), False, 'import fixtures\n'), ((2778, 2825), 'fixtures.MonkeyPatch', 'fixtures.MonkeyPatch', (['"""sys.stdout"""', 'self.stdout'], {}), "('sys.stdout', self.stdout)\n", (2798, 2825), False, 'import fixtures\n'), ((2924, 2966), 'fixtures.MonkeyPatch', 'fixtures.MonkeyPatch', (['"""sys.stderr"""', 'stderr'], {}), "('sys.stderr', stderr)\n", (2944, 2966), False, 'import fixtures\n'), ((3019, 3062), 'fixtures.FakeLogger', 'fixtures.FakeLogger', ([], {'name': '"""os-apply-config"""'}), "(name='os-apply-config')\n", (3038, 3062), False, 'import fixtures\n'), ((3120, 3138), 'os.fdopen', 'os.fdopen', (['fd', '"""w"""'], {}), "(fd, 'w')\n", (3129, 3138), False, 'import os\n'), ((3266, 3378), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config.py', '--metadata', self.path, '--key', 'database.url',\n '--type', 'raw']"], {}), "(['os-apply-config.py', '--metadata', self.path, '--key',\n 'database.url', '--type', 'raw'])\n", (3283, 3378), False, 'from os_apply_config import apply_config\n'), ((3653, 3761), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config.py', '--metadata', self.path, '--key', 'database',\n '--type', 'raw']"], {}), "(['os-apply-config.py', '--metadata', self.path, '--key',\n 'database', '--type', 'raw'])\n", (3670, 3761), False, 'from os_apply_config import apply_config\n'), ((4041, 4142), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config.py', '--metadata', self.path, '--key', 'l', '--type', 'raw']"], {}), "(['os-apply-config.py', '--metadata', self.path, '--key',\n 'l', '--type', 'raw'])\n", (4058, 4142), False, 'from os_apply_config import apply_config\n'), ((4416, 4517), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config.py', '--metadata', self.path, '--key', 'y', '--type', 'raw']"], {}), "(['os-apply-config.py', '--metadata', self.path, '--key',\n 'y', '--type', 'raw'])\n", (4433, 4517), False, 'from os_apply_config import apply_config\n'), ((4769, 4891), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config.py', '--metadata', self.path, '--key', 'z', '--type',\n 'raw', '--key-default', '']"], {}), "(['os-apply-config.py', '--metadata', self.path, '--key',\n 'z', '--type', 'raw', '--key-default', ''])\n", (4786, 4891), False, 'from os_apply_config import apply_config\n'), ((5116, 5213), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config.py', '--metadata', self.path, '--key', 'does.not.exist']"], {}), "(['os-apply-config.py', '--metadata', self.path, '--key',\n 'does.not.exist'])\n", (5133, 5213), False, 'from os_apply_config import apply_config\n'), ((5372, 5490), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config.py', '--metadata', self.path, '--key', 'does.not.exist',\n '--key-default', '']"], {}), "(['os-apply-config.py', '--metadata', self.path, '--key',\n 'does.not.exist', '--key-default', ''])\n", (5389, 5490), False, 'from os_apply_config import apply_config\n'), ((5718, 5819), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config.py', '--metadata', self.path, '--key', 'x', '--type', 'int']"], {}), "(['os-apply-config.py', '--metadata', self.path, '--key',\n 'x', '--type', 'int'])\n", (5735, 5819), False, 'from os_apply_config import apply_config\n'), ((5980, 6083), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config.py', '--metadata', self.path, '--key', 'l.0', '--type', 'int'\n ]"], {}), "(['os-apply-config.py', '--metadata', self.path, '--key',\n 'l.0', '--type', 'int'])\n", (5997, 6083), False, 'from os_apply_config import apply_config\n'), ((6360, 6463), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config.py', '--metadata', self.path, '--key', 'l.2', '--type', 'int'\n ]"], {}), "(['os-apply-config.py', '--metadata', self.path, '--key',\n 'l.2', '--type', 'int'])\n", (6377, 6463), False, 'from os_apply_config import apply_config\n'), ((6632, 6756), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config.py', '--metadata', self.path, '--key', 'l.2', '--type',\n 'int', '--key-default', '']"], {}), "(['os-apply-config.py', '--metadata', self.path, '--key',\n 'l.2', '--type', 'int', '--key-default', ''])\n", (6649, 6756), False, 'from os_apply_config import apply_config\n'), ((8220, 8249), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (8247, 8249), False, 'import tempfile\n'), ((9197, 9235), 'fixtures.FakeLogger', 'fixtures.FakeLogger', (['"""os-apply-config"""'], {}), "('os-apply-config')\n", (9216, 9235), False, 'import fixtures\n'), ((9261, 9286), 'fixtures.NestedTempfile', 'fixtures.NestedTempfile', ([], {}), '()\n', (9284, 9286), False, 'import fixtures\n'), ((9376, 9394), 'os.fdopen', 'os.fdopen', (['fd', '"""w"""'], {}), "(fd, 'w')\n", (9385, 9394), False, 'import os\n'), ((9634, 9659), 'os.path.exists', 'os.path.exists', (['full_path'], {}), '(full_path)\n', (9648, 9659), False, 'import os\n'), ((10715, 10743), 'os.path.dirname', 'os.path.dirname', (['target_file'], {}), '(target_file)\n', (10730, 10743), False, 'import os\n'), ((10909, 10936), 'os.path.exists', 'os.path.exists', (['target_file'], {}), '(target_file)\n', (10923, 10936), False, 'import os\n'), ((11187, 11215), 'os.path.dirname', 'os.path.dirname', (['target_file'], {}), '(target_file)\n', (11202, 11215), False, 'import os\n'), ((11891, 11929), 'os_apply_config.apply_config.template_paths', 'apply_config.template_paths', (['TEMPLATES'], {}), '(TEMPLATES)\n', (11918, 11929), False, 'from os_apply_config import apply_config\n'), ((12441, 12459), 'fixtures.TempDir', 'fixtures.TempDir', ([], {}), '()\n', (12457, 12459), False, 'import fixtures\n'), ((13074, 13139), 'os_apply_config.apply_config.render_moustache', 'apply_config.render_moustache', (['"""ab{{x.a}}cd"""', "{'x': {'a': '123'}}"], {}), "('ab{{x.a}}cd', {'x': {'a': '123'}})\n", (13103, 13139), False, 'from os_apply_config import apply_config\n'), ((13217, 13264), 'os_apply_config.apply_config.render_moustache', 'apply_config.render_moustache', (['"""{{badkey}}"""', '{}'], {}), "('{{badkey}}', {})\n", (13246, 13264), False, 'from os_apply_config import apply_config\n'), ((14083, 14116), 'os_apply_config.apply_config.strip_hash', 'apply_config.strip_hash', (['h', '"""a.b"""'], {}), "(h, 'a.b')\n", (14106, 14116), False, 'from os_apply_config import apply_config\n'), ((14431, 14449), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (14447, 14449), False, 'import tempfile\n'), ((14885, 14917), 'json.dumps', 'json.dumps', (["['/tmp/config.json']"], {}), "(['/tmp/config.json'])\n", (14895, 14917), False, 'import json\n'), ((15138, 15152), 'json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (15148, 15152), False, 'import json\n'), ((15332, 15383), 'mock.patch', 'mock.patch', (['"""os.path.isdir"""', '(lambda x: x == default)'], {}), "('os.path.isdir', lambda x: x == default)\n", (15342, 15383), False, 'import mock\n'), ((15577, 15628), 'mock.patch', 'mock.patch', (['"""os.path.isdir"""', '(lambda x: x == default)'], {}), "('os.path.isdir', lambda x: x == default)\n", (15587, 15628), False, 'import mock\n'), ((15828, 15879), 'mock.patch', 'mock.patch', (['"""os.path.isdir"""', '(lambda x: x == default)'], {}), "('os.path.isdir', lambda x: x == default)\n", (15838, 15879), False, 'import mock\n'), ((16129, 16199), 'mock.patch', 'mock.patch', (['"""os.path.isdir"""', '(lambda x: x == default or x == deprecated)'], {}), "('os.path.isdir', lambda x: x == default or x == deprecated)\n", (16139, 16199), False, 'import mock\n'), ((2714, 2745), 'fixtures.StringStream', 'fixtures.StringStream', (['"""stdout"""'], {}), "('stdout')\n", (2735, 2745), False, 'import fixtures\n'), ((2860, 2891), 'fixtures.StringStream', 'fixtures.StringStream', (['"""stderr"""'], {}), "('stderr')\n", (2881, 2891), False, 'import fixtures\n'), ((3165, 3183), 'json.dumps', 'json.dumps', (['CONFIG'], {}), '(CONFIG)\n', (3175, 3183), False, 'import json\n'), ((8292, 8321), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (8319, 8321), False, 'import tempfile\n'), ((8617, 8749), 'os_apply_config.apply_config.main', 'apply_config.main', (["['os-apply-config', '--key', 'database.url', '--type', 'raw',\n '--os-config-files', fake_os_config_files.name]"], {}), "(['os-apply-config', '--key', 'database.url', '--type',\n 'raw', '--os-config-files', fake_os_config_files.name])\n", (8634, 8749), False, 'from os_apply_config import apply_config\n'), ((9421, 9439), 'json.dumps', 'json.dumps', (['config'], {}), '(config)\n', (9431, 9439), False, 'import json\n'), ((9787, 9812), 'os.path.exists', 'os.path.exists', (['full_path'], {}), '(full_path)\n', (9801, 9812), False, 'import os\n'), ((11379, 11399), 'os.stat', 'os.stat', (['target_file'], {}), '(target_file)\n', (11386, 11399), False, 'import os\n'), ((11703, 11723), 'os.stat', 'os.stat', (['target_file'], {}), '(target_file)\n', (11710, 11723), False, 'import os\n'), ((14635, 14653), 'os.fdopen', 'os.fdopen', (['fd', '"""w"""'], {}), "(fd, 'w')\n", (14644, 14653), False, 'import os\n'), ((15423, 15451), 'os_apply_config.apply_config.templates_dir', 'apply_config.templates_dir', ([], {}), '()\n', (15449, 15451), False, 'from os_apply_config import apply_config\n'), ((15668, 15696), 'os_apply_config.apply_config.templates_dir', 'apply_config.templates_dir', ([], {}), '()\n', (15694, 15696), False, 'from os_apply_config import apply_config\n'), ((15919, 15947), 'os_apply_config.apply_config.templates_dir', 'apply_config.templates_dir', ([], {}), '()\n', (15945, 15947), False, 'from os_apply_config import apply_config\n'), ((16293, 16321), 'os_apply_config.apply_config.templates_dir', 'apply_config.templates_dir', ([], {}), '()\n', (16319, 16321), False, 'from os_apply_config import apply_config\n'), ((16635, 16655), 'os.stat', 'os.stat', (['target_file'], {}), '(target_file)\n', (16642, 16655), False, 'import os\n'), ((16930, 16956), 'mock.call', 'mock.call', (['mock.ANY', '(0)', '(-1)'], {}), '(mock.ANY, 0, -1)\n', (16939, 16956), False, 'import mock\n'), ((17003, 17029), 'mock.call', 'mock.call', (['mock.ANY', '(0)', '(-1)'], {}), '(mock.ANY, 0, -1)\n', (17012, 17029), False, 'import mock\n'), ((17081, 17107), 'mock.call', 'mock.call', (['mock.ANY', '(-1)', '(0)'], {}), '(mock.ANY, -1, 0)\n', (17090, 17107), False, 'import mock\n'), ((17154, 17180), 'mock.call', 'mock.call', (['mock.ANY', '(-1)', '(0)'], {}), '(mock.ANY, -1, 0)\n', (17163, 17180), False, 'import mock\n'), ((14503, 14523), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (14517, 14523), False, 'import os\n'), ((14528, 14543), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (14537, 14543), False, 'import os\n'), ((8372, 8390), 'json.dumps', 'json.dumps', (['CONFIG'], {}), '(CONFIG)\n', (8382, 8390), False, 'import json\n'), ((8508, 8538), 'json.dumps', 'json.dumps', (['[fake_config.name]'], {}), '([fake_config.name])\n', (8518, 8538), False, 'import json\n')] |
import tensorflow as tf
from tensorflow.contrib import slim
def head(endpoints, embedding_dim, is_training):
batch_norm_params = {
'decay': 0.9,
'epsilon': 1e-5,
'scale': True,
'updates_collections': tf.GraphKeys.UPDATE_OPS,
'fused': None,
}
with slim.arg_scope(
[slim.conv2d, slim.max_pool2d, slim.conv2d_transpose],
weights_regularizer=slim.l2_regularizer(0.0),
weights_initializer=slim.variance_scaling_initializer(),
activation_fn=tf.nn.relu,
normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_params):
with slim.arg_scope([slim.batch_norm], **batch_norm_params):
projection_conv = slim.conv2d(endpoints['resnet_v2_50/block4'], 256, [1, 1], scope='projection_conv')
preprocess_residual_block = residual_block(projection_conv)
trunk_branch_block1 = residual_block(preprocess_residual_block)
trunk_branch_block2 = residual_block(trunk_branch_block1)
mask_branch_block1 = residual_block(preprocess_residual_block)
mask_branch_block2 = residual_block(mask_branch_block1)
mask_branch_conv = slim.conv2d(mask_branch_block2, 256, [1, 1], scope='mask_branch_conv')
mask_branch_prob = tf.sigmoid(mask_branch_conv)
_masked = (1 + mask_branch_prob) * trunk_branch_block2
endpoints['attention_mask'] = mask_branch_prob
endpoints['model_output'] = endpoints['global_pool'] = tf.reduce_mean(
_masked, [1, 2], name='_pool5', keep_dims=False)
endpoints['head_output'] = slim.fully_connected(
endpoints['model_output'], 1024, normalizer_fn=slim.batch_norm,
normalizer_params={
'decay': 0.9,
'epsilon': 1e-5,
'scale': True,
'is_training': is_training,
'updates_collections': tf.GraphKeys.UPDATE_OPS,
})
endpoints['emb'] = endpoints['emb_raw'] = slim.fully_connected(
endpoints['head_output'], embedding_dim, activation_fn=None,
weights_initializer=tf.orthogonal_initializer(), scope='emb')
return endpoints
def residual_block(input_features):
batch_norm_params = {
'decay': 0.9,
'epsilon': 1e-5,
'scale': True,
'updates_collections': tf.GraphKeys.UPDATE_OPS,
'fused': None,
}
with slim.arg_scope(
[slim.conv2d],
weights_regularizer=slim.l2_regularizer(0.0),
weights_initializer=slim.variance_scaling_initializer(),
activation_fn=tf.nn.relu,
normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_params):
with slim.arg_scope([slim.batch_norm], **batch_norm_params):
residual_conv1 = slim.conv2d(input_features, 64, [1, 1])
residual_conv2 = slim.conv2d(residual_conv1, 64, [3, 3])
residual_conv3 = slim.conv2d(residual_conv2, 256, [1, 1])
output = residual_conv3 + input_features
return output
| [
"tensorflow.orthogonal_initializer",
"tensorflow.contrib.slim.l2_regularizer",
"tensorflow.contrib.slim.arg_scope",
"tensorflow.contrib.slim.variance_scaling_initializer",
"tensorflow.sigmoid",
"tensorflow.contrib.slim.fully_connected",
"tensorflow.reduce_mean",
"tensorflow.contrib.slim.conv2d"
] | [((1554, 1617), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['_masked', '[1, 2]'], {'name': '"""_pool5"""', 'keep_dims': '(False)'}), "(_masked, [1, 2], name='_pool5', keep_dims=False)\n", (1568, 1617), True, 'import tensorflow as tf\n'), ((1663, 1904), 'tensorflow.contrib.slim.fully_connected', 'slim.fully_connected', (["endpoints['model_output']", '(1024)'], {'normalizer_fn': 'slim.batch_norm', 'normalizer_params': "{'decay': 0.9, 'epsilon': 1e-05, 'scale': True, 'is_training': is_training,\n 'updates_collections': tf.GraphKeys.UPDATE_OPS}"}), "(endpoints['model_output'], 1024, normalizer_fn=slim.\n batch_norm, normalizer_params={'decay': 0.9, 'epsilon': 1e-05, 'scale':\n True, 'is_training': is_training, 'updates_collections': tf.GraphKeys.\n UPDATE_OPS})\n", (1683, 1904), False, 'from tensorflow.contrib import slim\n'), ((682, 736), 'tensorflow.contrib.slim.arg_scope', 'slim.arg_scope', (['[slim.batch_norm]'], {}), '([slim.batch_norm], **batch_norm_params)\n', (696, 736), False, 'from tensorflow.contrib import slim\n'), ((768, 856), 'tensorflow.contrib.slim.conv2d', 'slim.conv2d', (["endpoints['resnet_v2_50/block4']", '(256)', '[1, 1]'], {'scope': '"""projection_conv"""'}), "(endpoints['resnet_v2_50/block4'], 256, [1, 1], scope=\n 'projection_conv')\n", (779, 856), False, 'from tensorflow.contrib import slim\n'), ((1244, 1314), 'tensorflow.contrib.slim.conv2d', 'slim.conv2d', (['mask_branch_block2', '(256)', '[1, 1]'], {'scope': '"""mask_branch_conv"""'}), "(mask_branch_block2, 256, [1, 1], scope='mask_branch_conv')\n", (1255, 1314), False, 'from tensorflow.contrib import slim\n'), ((1346, 1374), 'tensorflow.sigmoid', 'tf.sigmoid', (['mask_branch_conv'], {}), '(mask_branch_conv)\n', (1356, 1374), True, 'import tensorflow as tf\n'), ((2144, 2171), 'tensorflow.orthogonal_initializer', 'tf.orthogonal_initializer', ([], {}), '()\n', (2169, 2171), True, 'import tensorflow as tf\n'), ((2777, 2831), 'tensorflow.contrib.slim.arg_scope', 'slim.arg_scope', (['[slim.batch_norm]'], {}), '([slim.batch_norm], **batch_norm_params)\n', (2791, 2831), False, 'from tensorflow.contrib import slim\n'), ((2862, 2901), 'tensorflow.contrib.slim.conv2d', 'slim.conv2d', (['input_features', '(64)', '[1, 1]'], {}), '(input_features, 64, [1, 1])\n', (2873, 2901), False, 'from tensorflow.contrib import slim\n'), ((2931, 2970), 'tensorflow.contrib.slim.conv2d', 'slim.conv2d', (['residual_conv1', '(64)', '[3, 3]'], {}), '(residual_conv1, 64, [3, 3])\n', (2942, 2970), False, 'from tensorflow.contrib import slim\n'), ((3000, 3040), 'tensorflow.contrib.slim.conv2d', 'slim.conv2d', (['residual_conv2', '(256)', '[1, 1]'], {}), '(residual_conv2, 256, [1, 1])\n', (3011, 3040), False, 'from tensorflow.contrib import slim\n'), ((443, 467), 'tensorflow.contrib.slim.l2_regularizer', 'slim.l2_regularizer', (['(0.0)'], {}), '(0.0)\n', (462, 467), False, 'from tensorflow.contrib import slim\n'), ((501, 536), 'tensorflow.contrib.slim.variance_scaling_initializer', 'slim.variance_scaling_initializer', ([], {}), '()\n', (534, 536), False, 'from tensorflow.contrib import slim\n'), ((2538, 2562), 'tensorflow.contrib.slim.l2_regularizer', 'slim.l2_regularizer', (['(0.0)'], {}), '(0.0)\n', (2557, 2562), False, 'from tensorflow.contrib import slim\n'), ((2596, 2631), 'tensorflow.contrib.slim.variance_scaling_initializer', 'slim.variance_scaling_initializer', ([], {}), '()\n', (2629, 2631), False, 'from tensorflow.contrib import slim\n')] |
import random
DAY_COUNT = 10
MOVIE_COUNT = 4
def create_movie():
return sorted([random.randint(0, DAY_COUNT - 1), random.randint(0, DAY_COUNT - 1)])
def create_movies():
return [create_movie() for _ in range(MOVIE_COUNT)]
def visualize_movie(movie):
print("", "-" * movie[0] + "#" * (movie[1] - movie[0] + 1) + "-" * (DAY_COUNT - movie[1] - 1))
def visualize_movies(movies):
for movie_index, movie in enumerate(movies):
print(movie_index, end="")
visualize_movie(movie)
print()
def solve_accept_earliest_start(movies):
day_index = 0
movies = sorted(movies)
movies_accepted = []
for movie in movies:
if day_index <= movie[0]:
movies_accepted.append(movie)
day_index = movie[1] + 1
return movies_accepted
def solve_exhaustive(movies):
return
def main():
movies = create_movies()
print(movies, end="\n\n")
print(" " + "".join([str(i) for i in range(DAY_COUNT)]))
visualize_movies(movies)
solution_accept_first = solve_accept_earliest_start(movies)
visualize_movies(solution_accept_first)
if __name__ == '__main__':
main() | [
"random.randint"
] | [((83, 115), 'random.randint', 'random.randint', (['(0)', '(DAY_COUNT - 1)'], {}), '(0, DAY_COUNT - 1)\n', (97, 115), False, 'import random\n'), ((117, 149), 'random.randint', 'random.randint', (['(0)', '(DAY_COUNT - 1)'], {}), '(0, DAY_COUNT - 1)\n', (131, 149), False, 'import random\n')] |
import os
import urllib
import requests
import time
from bs4 import BeautifulSoup
from time import sleep, strftime, gmtime
from random import randint
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
sectionList = []
# returns the unique semester identifier
def getSemester():
# start a new web scraping session
s = requests.session()
# download the main page of classes
try:
html = s.get("https://ntst.umd.edu/soc")
except requests.exceptions.RequestException as e:
post_params = { 'bot_id' : 'yourbotapi', 'text': "something wrong" }
requests.post('https://api.groupme.com/v3/bots/post', params = post_params)
print(e)
sleep(10)
# parse the html of the class page
options = BeautifulSoup(html.text, "html.parser")
options = options.find("select", {"id": "term-id-input"})
options = str(options).split("</option>")
# find the option with the semester code in it
for option in options:
if '"selected"' in option:
semester = option
# extract the semester code
semester = semester[semester.index('value="')+7:]
semester = semester[:semester.index('"')]
# close the session
s.close()
return semester
# returns a list of sections
def getSections(course):
# start a new web scraping session
s = requests.session()
# begin composing the url
url = "https://ntst.umd.edu/soc/search"
url += "?courseId=" + course
url += "§ionId="
url += "&termId="+getSemester()
url += "&_openSectionsOnly=on"
url += "&creditCompare="
url += "&credits="
url += "&courseLevelFilter=ALL"
url += "&instructor="
url += "&_facetoface=on"
url += "&_blended=on"
url += "&_online=on"
url += "&courseStartCompare="
url += "&courseStartHour="
url += "&courseStartMin="
url += "&courseStartAM="
url += "&courseEndHour="
url += "&courseEndMin="
url += "&courseEndAM="
url += "&teachingCenter=ALL"
url += "&_classDay1=on"
url += "&_classDay2=on"
url += "&_classDay3=on"
url += "&_classDay4=on"
url += "&_classDay5=on"
# download the list of classes
try:
html = s.get(url).text
except requests.exceptions.RequestException as e:
post_params = { 'bot_id' : 'yourbotapi', 'text': "something wrong" }
requests.post('https://api.groupme.com/v3/bots/post', params = post_params)
print(e)
sleep(10)
# parse the html with bs4
courses = BeautifulSoup(html, "html.parser").find_all(
"div", {"class": "section"})
# make an empty list to contain all sections
sections = []
# loop through every section in the course list
for course in courses:
# declare a blank list to hold section and time info
section = []
times = []
# get the times avaiable
slots = course.find("div", {"class": "class-days-container"})
slots = slots.find_all("div", {"class": "row"})
# loops thorugh and add all time to the list
for slot in slots:
time = slot.find("div", {"class": "section-day-time-group"})
time = " ".join(time.text.strip().split("\n"))
times.append(time)
# get the name of the course
name = str(course.find(
"div", {"class": "section-action-links-container"}))
name = name[name.index('value="')+7:]
name = name[:name.index('"')]
# append the name of the course to the list
section.append(name)
# get the amount of open seats
openSeatsCount = int(course.find(
"span", {"class": "open-seats-count"}).text)
# say whether class is open
if openSeatsCount > 0:
section.append("open")
else:
section.append("closed")
# get the section number, and the instructor
section.append(course.find(
"span", {"class": "section-id"}).text.strip())
section.append(course.find(
"span", {"class": "section-instructor"}).text)
sectionList.append(course.find(
"span", {"class": "section-id"}).text.strip())
# add the section information and the times
sections.append(section)
section.append(times)
# close the current session
s.close()
# return all sections
return sections
# returns if a section is open
def isOpen(section):
if section[1] != "open":
return False
else:
return True
# main function, continuously checks for openings
#global vars
rows = 15
columns = 15
sections_to_check = [[0 for x in range(columns)] for y in range(rows)]
to_remove = [[0 for x in range(columns)] for y in range(rows)]
base_sections = []
course = []
def testudo():
post_params = { 'bot_id' : 'yourbotapi', 'text': "Starting Bot" }
requests.post('https://api.groupme.com/v3/bots/post', params = post_params)
# if section not open, continuously check
last_message = ""
remove_mes = "remove"
while True:
request_params = {'token': 'your request token'}
request_params['limit'] = 1
response_messages = requests.get(
'https://api.groupme.com/v3/groups/yourgroupID/messages', params=request_params).json()['response']['messages']
for message in response_messages:
if(message['user_id'] == 'YourUserID' and message['text'] != last_message):
# list function
if(message['text'].lower() == "list"):
listFunction()
break
if(remove_mes in message['text'].lower()):
deleteSectionWithMessage(message['text'])
print(message['text'])
last_message = message['text']
sleep(1)
break
print(message['text'])
last_message = message['text']
index_of_space = message['text'].find(" ")
# accepts new course
new_course = message['text'][0:index_of_space]
new_section_num = message['text'][index_of_space +
1: len(message['text'])]
got_new = True
for curr_course in course:
if(new_course.lower() == curr_course.lower()):
got_new = False
# if this is a new course
if (got_new == True):
base_sections.append(getSections(new_course))
print("creating new course")
#this is where we add a new course
course.append(new_course.lower())
# adds section to this list
index_of_course = course.index(new_course.lower())
curr_sections = getSections(course[index_of_course])
counter = 0
while(counter < len(curr_sections)):
if(curr_sections[counter][2] == new_section_num):
command = 'curl -X POST \"https://api.groupme.com/v3/bots/post?bot_id=yourbotapi&text=' + \
"(ADDED)-->" + course[index_of_course] + "-->status:" + \
curr_sections[counter][1] + \
"-->Section:" + new_section_num + '\"'
os.system(command)
counter += 1
sections_to_check[index_of_course].append(new_section_num)
break
index_of_course = 0
#This is where we check the status of each section of each course
while (index_of_course < len(course)):
checkStatus(index_of_course)
index_of_course += 1
sleep(randint(10, 20))
# course: open/close: section#: proffName: times:
def listFunction():
course_index = 0
while (course_index < len(course)):
sections = getSections(course[course_index])
counter = 0
while(counter < len(sections)):
for curr_section in sections_to_check[course_index]:
if(sections[counter][2] == curr_section):
command = 'curl -X POST \"https://api.groupme.com/v3/bots/post?bot_id=yourbotapi&text=' + \
str(len(sections_to_check[course_index]))+"_" + course[course_index] + "-->status:" + \
sections[counter][1] + \
"-->Section:" + str(curr_section) + '\"'
os.system(command)
sleep(1)
counter += 1
course_index += 1
def checkStatus(course_index):
if(len(to_remove[course_index]) > 0):
for index in reversed(to_remove[course_index]):
del sections_to_check[course_index][index]
to_remove[course_index].clear()
# print(course[course_index])
if(course[course_index] != "0"):
# checks for new sections
newSection(course_index, base_sections[course_index])
# gets new list of sections (updates)
sections = getSections(course[course_index])
counter = 0
while(counter < len(sections)):
indexForSection = 0
for curr_section in sections_to_check[course_index]:
#if(sections[counter][2] == curr_section):
#print("checking " +
# course[course_index] + "section: " + curr_section)
if(sections[counter][2] == curr_section and sections[counter][1] == "open"):
#print(curr_section + " is open")
command = 'curl -X POST \"https://api.groupme.com/v3/bots/post?bot_id=yourbotapi&text=' + \
str(len(sections_to_check[course_index]))+"_" + course[course_index] + "__IS_OPEN__" + \
"-->Section:" + curr_section + '\"'
os.system(command)
to_remove[course_index].append(indexForSection)
indexForSection += 1
counter += 1
# returns if a new section is open
def deleteSectionWithMessage(message):
checking_course = message[7:message.index(" ", 8)].lower()
section = message[message.index(" ", 8)+1:len(message)]
print("_"+checking_course + "_remove")
print("_"+section + "_remove")
if(checking_course in course):
course_index = course.index(checking_course.lower())
deleteSection(course_index, section)
def deleteSection(course_index, section_to_remove):
print("courseindex:_" + str(course_index) + "_")
print("sectiontoremove_" + section_to_remove + "_")
print("sectiontocheck:_"+sections_to_check[course_index][0])
if(sections_to_check[course_index].count(section_to_remove) > 0):
print("found section")
index = sections_to_check[course_index].index(section_to_remove)
command = 'curl -X POST \"https://api.groupme.com/v3/bots/post?bot_id=yourbotapi&text=' + \
"Removed:__" + course[course_index] + \
"-->Section:" + section_to_remove + '\"'
os.system(command)
del sections_to_check[course_index][index]
# if(len(sections_to_check[course_index]==0)):
# del course[course_index]
def newSection(course_index, currsections):
#print("checking new section: "+ currsections[1][0])
updated_section = getSections(course[course_index])
counter = 0
while(counter < len(updated_section)):
section_number = updated_section[counter][2]
if section_number not in currsections[counter]:
command = 'curl -X POST \"https://api.groupme.com/v3/bots/post?bot_id=yourbotapi&text=' + \
course + "_(NEW)section_open-->" + section_number + '\"'
os.system(command)
base_sections[course_index] = getSections(course)
counter += 1
# define the command line arguments
if __name__ == '__main__':
testudo()
| [
"requests.session",
"requests.post",
"time.sleep",
"requests.get",
"bs4.BeautifulSoup",
"time.text.strip",
"os.system",
"random.randint"
] | [((473, 491), 'requests.session', 'requests.session', ([], {}), '()\n', (489, 491), False, 'import requests\n'), ((896, 935), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html.text', '"""html.parser"""'], {}), "(html.text, 'html.parser')\n", (909, 935), False, 'from bs4 import BeautifulSoup\n'), ((1485, 1503), 'requests.session', 'requests.session', ([], {}), '()\n', (1501, 1503), False, 'import requests\n'), ((5018, 5091), 'requests.post', 'requests.post', (['"""https://api.groupme.com/v3/bots/post"""'], {'params': 'post_params'}), "('https://api.groupme.com/v3/bots/post', params=post_params)\n", (5031, 5091), False, 'import requests\n'), ((11265, 11283), 'os.system', 'os.system', (['command'], {}), '(command)\n', (11274, 11283), False, 'import os\n'), ((730, 803), 'requests.post', 'requests.post', (['"""https://api.groupme.com/v3/bots/post"""'], {'params': 'post_params'}), "('https://api.groupme.com/v3/bots/post', params=post_params)\n", (743, 803), False, 'import requests\n'), ((832, 841), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (837, 841), False, 'from time import sleep, strftime, gmtime\n'), ((2498, 2571), 'requests.post', 'requests.post', (['"""https://api.groupme.com/v3/bots/post"""'], {'params': 'post_params'}), "('https://api.groupme.com/v3/bots/post', params=post_params)\n", (2511, 2571), False, 'import requests\n'), ((2600, 2609), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (2605, 2609), False, 'from time import sleep, strftime, gmtime\n'), ((2655, 2689), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""html.parser"""'], {}), "(html, 'html.parser')\n", (2668, 2689), False, 'from bs4 import BeautifulSoup\n'), ((11935, 11953), 'os.system', 'os.system', (['command'], {}), '(command)\n', (11944, 11953), False, 'import os\n'), ((7956, 7971), 'random.randint', 'randint', (['(10)', '(20)'], {}), '(10, 20)\n', (7963, 7971), False, 'from random import randint\n'), ((5976, 5984), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (5981, 5984), False, 'from time import sleep, strftime, gmtime\n'), ((8708, 8726), 'os.system', 'os.system', (['command'], {}), '(command)\n', (8717, 8726), False, 'import os\n'), ((8747, 8755), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (8752, 8755), False, 'from time import sleep, strftime, gmtime\n'), ((10082, 10100), 'os.system', 'os.system', (['command'], {}), '(command)\n', (10091, 10100), False, 'import os\n'), ((3329, 3346), 'time.text.strip', 'time.text.strip', ([], {}), '()\n', (3344, 3346), False, 'import time\n'), ((5325, 5422), 'requests.get', 'requests.get', (['"""https://api.groupme.com/v3/groups/yourgroupID/messages"""'], {'params': 'request_params'}), "('https://api.groupme.com/v3/groups/yourgroupID/messages',\n params=request_params)\n", (5337, 5422), False, 'import requests\n'), ((7566, 7584), 'os.system', 'os.system', (['command'], {}), '(command)\n', (7575, 7584), False, 'import os\n')] |
import b3
import unittest
class TestCondition(unittest.TestCase):
def test_category(self):
self.assertEqual(b3.Condition.category, b3.CONDITION)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main"
] | [((192, 207), 'unittest.main', 'unittest.main', ([], {}), '()\n', (205, 207), False, 'import unittest\n')] |
from __future__ import print_function, absolute_import
from distutils import sysconfig
from distutils import version
from distutils.core import Extension
import glob
import io
import multiprocessing
import os
import re
import subprocess
import sys
import warnings
from textwrap import fill
PY3 = (sys.version_info[0] >= 3)
try:
from subprocess import check_output
except ImportError:
# check_output is not available in Python 2.6
def check_output(*popenargs, **kwargs):
"""
Run command with arguments and return its output as a byte
string.
Backported from Python 2.7 as it's implemented as pure python
on stdlib.
"""
process = subprocess.Popen(
stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
error = subprocess.CalledProcessError(retcode, cmd)
error.output = output
raise error
return output
if sys.platform != 'win32':
if sys.version_info[0] < 3:
from commands import getstatusoutput
else:
from subprocess import getstatusoutput
if PY3:
import configparser
else:
import ConfigParser as configparser
# matplotlib build options, which can be altered using setup.cfg
options = {
'display_status': True,
'verbose': False,
'backend': None,
'basedirlist': None
}
setup_cfg = os.environ.get('MPLSETUPCFG', 'setup.cfg')
if os.path.exists(setup_cfg):
config = configparser.SafeConfigParser()
config.read(setup_cfg)
try:
options['display_status'] = not config.getboolean("status", "suppress")
except:
pass
try:
options['backend'] = config.get("rc_options", "backend")
except:
pass
try:
options['basedirlist'] = [
x.strip() for x in
config.get("directories", "basedirlist").split(',')]
except:
pass
else:
config = None
def get_win32_compiler():
"""
Determine the compiler being used on win32.
"""
# Used to determine mingw32 or msvc
# This is pretty bad logic, someone know a better way?
for v in sys.argv:
if 'mingw32' in v:
return 'mingw32'
return 'msvc'
win32_compiler = get_win32_compiler()
def extract_versions():
"""
Extracts version values from the main matplotlib __init__.py and
returns them as a dictionary.
"""
with open('lib/matplotlib/__init__.py') as fd:
for line in fd.readlines():
if (line.startswith('__version__')):
exec(line.strip())
return locals()
def has_include_file(include_dirs, filename):
"""
Returns `True` if `filename` can be found in one of the
directories in `include_dirs`.
"""
for dir in include_dirs:
if os.path.exists(os.path.join(dir, filename)):
return True
return False
def check_include_file(include_dirs, filename, package):
"""
Raises an exception if the given include file can not be found.
"""
if sys.platform == 'win32':
include_dirs.extend(os.getenv('INCLUDE', '.').split(';'))
if not has_include_file(include_dirs, filename):
raise CheckFailed(
"The C/C++ header for %s (%s) could not be found. You "
"may need to install the development package." %
(package, filename))
def get_base_dirs():
"""
Returns a list of standard base directories on this platform.
"""
if options['basedirlist']:
return options['basedirlist']
basedir_map = {
'win32': ['win32_static',],
'darwin': ['/usr/local/', '/usr', '/usr/X11', '/opt/local'],
'sunos5': [os.getenv('MPLIB_BASE') or '/usr/local',],
'gnu0': ['/usr'],
'aix5': ['/usr/local'],
}
return basedir_map.get(sys.platform, ['/usr/local', '/usr'])
def is_min_version(found, minversion):
"""
Returns `True` if `found` is at least as high a version as
`minversion`.
"""
expected_version = version.LooseVersion(minversion)
found_version = version.LooseVersion(found)
return found_version >= expected_version
# Define the display functions only if display_status is True.
if options['display_status']:
def print_line(char='='):
print(char * 76)
def print_status(package, status):
initial_indent = "%22s: " % package
indent = ' ' * 24
print(fill(str(status), width=76,
initial_indent=initial_indent,
subsequent_indent=indent))
def print_message(message):
indent = ' ' * 24 + "* "
print(fill(str(message), width=76,
initial_indent=indent,
subsequent_indent=indent))
def print_raw(section):
print(section)
else:
def print_line(*args, **kwargs):
pass
print_status = print_message = print_raw = print_line
# Remove the -Wstrict-prototypesoption, is it's not valid for C++
customize_compiler = sysconfig.customize_compiler
def my_customize_compiler(compiler):
retval = customize_compiler(compiler)
try:
compiler.compiler_so.remove('-Wstrict-prototypes')
except (ValueError, AttributeError):
pass
return retval
sysconfig.customize_compiler = my_customize_compiler
def make_extension(name, files, *args, **kwargs):
"""
Make a new extension. Automatically sets include_dirs and
library_dirs to the base directories appropriate for this
platform.
`name` is the name of the extension.
`files` is a list of source files.
Any additional arguments are passed to the
`distutils.core.Extension` constructor.
"""
ext = DelayedExtension(name, files, *args, **kwargs)
for dir in get_base_dirs():
include_dir = os.path.join(dir, 'include')
if os.path.exists(include_dir):
ext.include_dirs.append(include_dir)
for lib in ('lib', 'lib64'):
lib_dir = os.path.join(dir, lib)
if os.path.exists(lib_dir):
ext.library_dirs.append(lib_dir)
ext.include_dirs.append('.')
return ext
class PkgConfig(object):
"""
This is a class for communicating with pkg-config.
"""
def __init__(self):
"""
Determines whether pkg-config exists on this machine.
"""
if sys.platform == 'win32':
self.has_pkgconfig = False
else:
self.set_pkgconfig_path()
status, output = getstatusoutput("pkg-config --help")
self.has_pkgconfig = (status == 0)
def set_pkgconfig_path(self):
pkgconfig_path = sysconfig.get_config_var('LIBDIR')
if pkgconfig_path is None:
return
pkgconfig_path = os.path.join(pkgconfig_path, 'pkgconfig')
if not os.path.isdir(pkgconfig_path):
return
try:
os.environ['PKG_CONFIG_PATH'] += ':' + pkgconfig_path
except KeyError:
os.environ['PKG_CONFIG_PATH'] = pkgconfig_path
def setup_extension(self, ext, package, default_include_dirs=[],
default_library_dirs=[], default_libraries=[],
alt_exec=None):
"""
Add parameters to the given `ext` for the given `package`.
"""
flag_map = {
'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries'}
executable = alt_exec
if self.has_pkgconfig:
executable = 'pkg-config {0}'.format(package)
use_defaults = True
if executable is not None:
command = "{0} --libs --cflags ".format(executable)
try:
output = check_output(command, shell=True,
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
pass
else:
output = output.decode(sys.getfilesystemencoding())
use_defaults = False
for token in output.split():
attr = flag_map.get(token[:2])
if attr is not None:
getattr(ext, attr).insert(0, token[2:])
if use_defaults:
basedirs = get_base_dirs()
for base in basedirs:
for include in default_include_dirs:
dir = os.path.join(base, include)
if os.path.exists(dir):
ext.include_dirs.append(dir)
for lib in default_library_dirs:
dir = os.path.join(base, lib)
if os.path.exists(dir):
ext.library_dirs.append(dir)
ext.libraries.extend(default_libraries)
return True
return False
def get_version(self, package):
"""
Get the version of the package from pkg-config.
"""
if not self.has_pkgconfig:
return None
status, output = getstatusoutput(
"pkg-config %s --modversion" % (package))
if status == 0:
return output
return None
# The PkgConfig class should be used through this singleton
pkg_config = PkgConfig()
class CheckFailed(Exception):
"""
Exception thrown when a `SetupPackage.check` method fails.
"""
pass
class SetupPackage(object):
optional = False
def check(self):
"""
Checks whether the dependencies are met. Should raise a
`CheckFailed` exception if the dependency could not be met,
otherwise return a string indicating a version number or some
other message indicating what was found.
"""
pass
def get_packages(self):
"""
Get a list of package names to add to the configuration.
These are added to the `packages` list passed to
`distutils.setup`.
"""
return []
def get_namespace_packages(self):
"""
Get a list of namespace package names to add to the configuration.
These are added to the `namespace_packages` list passed to
`distutils.setup`.
"""
return []
def get_py_modules(self):
"""
Get a list of top-level modules to add to the configuration.
These are added to the `py_modules` list passed to
`distutils.setup`.
"""
return []
def get_package_data(self):
"""
Get a package data dictionary to add to the configuration.
These are merged into to the `package_data` list passed to
`distutils.setup`.
"""
return {}
def get_extension(self):
"""
Get a list of C extensions (`distutils.core.Extension`
objects) to add to the configuration. These are added to the
`extensions` list passed to `distutils.setup`.
"""
return None
def get_install_requires(self):
"""
Get a list of Python packages that we require.
pip/easy_install will attempt to download and install this
package if it is not installed.
"""
return []
def get_setup_requires(self):
"""
Get a list of Python packages that we require at build time.
pip/easy_install will attempt to download and install this
package if it is not installed.
"""
return []
def _check_for_pkg_config(self, package, include_file, min_version=None,
version=None):
"""
A convenience function for writing checks for a
pkg_config-defined dependency.
`package` is the pkg_config package name.
`include_file` is a top-level include file we expect to find.
`min_version` is the minimum version required.
`version` will override the found version if this package
requires an alternate method for that.
"""
if version is None:
version = pkg_config.get_version(package)
if version is None:
raise CheckFailed(
"pkg-config information for '%s' could not be found." %
package)
if min_version == 'PATCH':
raise CheckFailed(
"Requires patches that have not been merged upstream.")
if min_version:
if (not is_min_version(version, min_version)):
raise CheckFailed(
"Requires %s %s or later. Found %s." %
(package, min_version, version))
ext = self.get_extension()
if ext is None:
ext = make_extension('test', [])
pkg_config.setup_extension(ext, package)
check_include_file(ext.include_dirs, include_file, package)
return 'version %s' % version
class OptionalPackage(SetupPackage):
optional = True
force = False
config_category = "packages"
def get_config(self):
"""
Look at `setup.cfg` and return one of ["auto", True, False] indicating
if the package is at default state ("auto"), forced by the user (True)
or opted-out (False).
"""
try:
return config.getboolean(self.config_category, self.name)
except:
return "auto"
def check(self):
"""
Do not override this method!
For custom dependency checks override self.check_requirements().
Two things are checked: Configuration file and requirements.
"""
# Check configuration file
conf = self.get_config()
# Default "auto" state or install forced by user
if conf in [True, 'auto']:
message = "installing"
# Set non-optional if user sets `True` in config
if conf is True:
self.optional = False
# Configuration opt-out by user
else:
# Some backend extensions (e.g. Agg) need to be built for certain
# other GUI backends (e.g. TkAgg) even when manually disabled
if self.force is True:
message = "installing forced (config override)"
else:
raise CheckFailed("skipping due to configuration")
# Check requirements and add extra information (if any) to message.
# If requirements are not met a CheckFailed should be raised in there.
additional_info = self.check_requirements()
if additional_info:
message += ", " + additional_info
# No CheckFailed raised until now, return install message.
return message
def check_requirements(self):
"""
Override this method to do custom dependency checks.
- Raise CheckFailed() if requirements are not met.
- Return message with additional information, or an empty string
(or None) for no additional information.
"""
return ""
class OptionalBackendPackage(OptionalPackage):
config_category = "gui_support"
class Platform(SetupPackage):
name = "platform"
def check(self):
return sys.platform
class Python(SetupPackage):
name = "python"
def check(self):
major, minor1, minor2, s, tmp = sys.version_info
if major < 2:
raise CheckFailed(
"Requires Python 2.6 or later")
elif major == 2 and minor1 < 6:
raise CheckFailed(
"Requires Python 2.6 or later (in the 2.x series)")
elif major == 3 and minor1 < 1:
raise CheckFailed(
"Requires Python 3.1 or later (in the 3.x series)")
return sys.version
class Matplotlib(SetupPackage):
name = "matplotlib"
def check(self):
return extract_versions()['__version__']
def get_packages(self):
return [
'matplotlib',
'matplotlib.backends',
'matplotlib.backends.qt_editor',
'matplotlib.compat',
'matplotlib.projections',
'matplotlib.axes',
'matplotlib.sphinxext',
'matplotlib.style',
'matplotlib.testing',
'matplotlib.testing.jpl_units',
'matplotlib.tri',
]
def get_py_modules(self):
return ['pylab']
def get_package_data(self):
return {
'matplotlib':
[
'mpl-data/fonts/afm/*.afm',
'mpl-data/fonts/pdfcorefonts/*.afm',
'mpl-data/fonts/pdfcorefonts/*.txt',
'mpl-data/fonts/ttf/*.ttf',
'mpl-data/fonts/ttf/LICENSE_STIX',
'mpl-data/fonts/ttf/COPYRIGHT.TXT',
'mpl-data/fonts/ttf/README.TXT',
'mpl-data/fonts/ttf/RELEASENOTES.TXT',
'mpl-data/images/*.xpm',
'mpl-data/images/*.svg',
'mpl-data/images/*.gif',
'mpl-data/images/*.png',
'mpl-data/images/*.ppm',
'mpl-data/example/*.npy',
'mpl-data/matplotlibrc',
'backends/web_backend/*.*',
'backends/web_backend/jquery/js/*',
'backends/web_backend/jquery/css/themes/base/*.*',
'backends/web_backend/jquery/css/themes/base/images/*',
'backends/web_backend/css/*.*',
'backends/Matplotlib.nib/*',
'style/stylelib/*.mplstyle',
]}
class SampleData(OptionalPackage):
"""
This handles the sample data that ships with matplotlib. It is
technically optional, though most often will be desired.
"""
name = "sample_data"
def get_package_data(self):
return {
'matplotlib':
[
'mpl-data/sample_data/*.*',
'mpl-data/sample_data/axes_grid/*.*',
]}
class Toolkits(OptionalPackage):
name = "toolkits"
def get_packages(self):
return [
'mpl_toolkits',
'mpl_toolkits.mplot3d',
'mpl_toolkits.axes_grid',
'mpl_toolkits.axes_grid1',
'mpl_toolkits.axisartist',
]
def get_namespace_packages(self):
return ['mpl_toolkits']
class Tests(OptionalPackage):
name = "tests"
nose_min_version = '0.11.1'
def check(self):
super(Tests, self).check()
msgs = []
msg_template = ('{package} is required to run the matplotlib test '
'suite. pip/easy_install may attempt to install it '
'after matplotlib.')
bad_nose = msg_template.format(
package='nose %s or later' % self.nose_min_version
)
try:
import nose
if is_min_version(nose.__version__, self.nose_min_version):
msgs += ['using nose version %s' % nose.__version__]
else:
msgs += [bad_nose]
except ImportError:
msgs += [bad_nose]
if sys.version_info >= (3, 3):
msgs += ['using unittest.mock']
else:
try:
import mock
msgs += ['using mock %s' % mock.__version__]
except ImportError:
msgs += [msg_template.format(package='mock')]
return ' / '.join(msgs)
def get_packages(self):
return [
'matplotlib.tests',
]
def get_package_data(self):
baseline_images = [
'tests/baseline_images/%s/*' % x
for x in os.listdir('lib/matplotlib/tests/baseline_images')]
return {
'matplotlib':
baseline_images +
[
'tests/mpltest.ttf',
'tests/test_rcparams.rc'
]}
def get_install_requires(self):
requires = ['nose>=%s' % self.nose_min_version]
if not sys.version_info >= (3, 3):
requires += ['mock']
return requires
class DelayedExtension(Extension, object):
"""
A distutils Extension subclass where some of its members
may have delayed computation until reaching the build phase.
This is so we can, for example, get the Numpy include dirs
after pip has installed Numpy for us if it wasn't already
on the system.
"""
def __init__(self, *args, **kwargs):
super(DelayedExtension, self).__init__(*args, **kwargs)
self._finalized = False
self._hooks = {}
def add_hook(self, member, func):
"""
Add a hook to dynamically compute a member.
Parameters
----------
member : string
The name of the member
func : callable
The function to call to get dynamically-computed values
for the member.
"""
self._hooks[member] = func
def finalize(self):
self._finalized = True
class DelayedMember(property):
def __init__(self, name):
self._name = name
def __get__(self, obj, objtype=None):
result = getattr(obj, '_' + self._name, [])
if obj._finalized:
if self._name in obj._hooks:
result = obj._hooks[self._name]() + result
return result
def __set__(self, obj, value):
setattr(obj, '_' + self._name, value)
include_dirs = DelayedMember('include_dirs')
class Numpy(SetupPackage):
name = "numpy"
@staticmethod
def include_dirs_hook():
if sys.version_info[0] >= 3:
import builtins
if hasattr(builtins, '__NUMPY_SETUP__'):
del builtins.__NUMPY_SETUP__
import imp
import numpy
imp.reload(numpy)
else:
import __builtin__
if hasattr(__builtin__, '__NUMPY_SETUP__'):
del __builtin__.__NUMPY_SETUP__
import numpy
reload(numpy)
ext = Extension('test', [])
ext.include_dirs.append(numpy.get_include())
if not has_include_file(
ext.include_dirs, os.path.join("numpy", "arrayobject.h")):
warnings.warn(
"The C headers for numpy could not be found. "
"You may need to install the development package")
return [numpy.get_include()]
def check(self):
min_version = extract_versions()['__version__numpy__']
try:
import numpy
except ImportError:
return 'not found. pip may install it below.'
if not is_min_version(numpy.__version__, min_version):
raise SystemExit(
"Requires numpy %s or later to build. (Found %s)" %
(min_version, numpy.__version__))
return 'version %s' % numpy.__version__
def add_flags(self, ext):
# Ensure that PY_ARRAY_UNIQUE_SYMBOL is uniquely defined for
# each extension
array_api_name = 'MPL_' + ext.name.replace('.', '_') + '_ARRAY_API'
ext.define_macros.append(('PY_ARRAY_UNIQUE_SYMBOL', array_api_name))
ext.add_hook('include_dirs', self.include_dirs_hook)
def get_setup_requires(self):
return ['numpy>=1.5']
def get_install_requires(self):
return ['numpy>=1.5']
class CXX(SetupPackage):
name = 'pycxx'
def check(self):
if PY3:
# There is no version of PyCXX in the wild that will work
# with Python 3.x
self.__class__.found_external = False
return ("Official versions of PyCXX are not compatible with "
"Python 3.x. Using local copy")
self.__class__.found_external = True
old_stdout = sys.stdout
if PY3:
sys.stdout = io.StringIO()
else:
sys.stdout = io.BytesIO()
try:
import CXX
except ImportError:
self.__class__.found_external = False
return "Couldn't import. Using local copy."
finally:
sys.stdout = old_stdout
try:
return self._check_for_pkg_config(
'PyCXX', 'CXX/Extensions.hxx', min_version='6.2.4')
except CheckFailed as e:
# It's ok to just proceed here, since the `import CXX`
# worked above, and PyCXX (at least upstream) ensures that
# its header files are on the default distutils include
# path (either in a standard C place such as /usr/include,
# or in /usr/include/pythonX.Y.
return 'Using system CXX (version unknown, no pkg-config info)'
def add_flags(self, ext):
if self.found_external and not 'sdist' in sys.argv:
support_dir = os.path.normpath(
os.path.join(
sys.prefix,
'share',
'python%d.%d' % (
sys.version_info[0], sys.version_info[1]),
'CXX'))
if not os.path.exists(support_dir):
# On Fedora 17, these files are installed in /usr/share/CXX
support_dir = '/usr/src/CXX'
ext.sources.extend([
os.path.join(support_dir, x) for x in
['cxxsupport.cxx', 'cxx_extensions.cxx',
'IndirectPythonInterface.cxx',
'cxxextensions.c']])
pkg_config.setup_extension(ext, 'PyCXX')
else:
ext.include_dirs.append('extern')
ext.sources.extend(glob.glob('extern/CXX/*.cxx'))
ext.sources.extend(glob.glob('extern/CXX/*.c'))
ext.define_macros.append(('PYCXX_ISO_CPP_LIB', '1'))
if PY3:
ext.define_macros.append(('PYCXX_PYTHON_2TO3', '1'))
if not (sys.platform == 'win32' and win32_compiler == 'msvc'):
ext.libraries.append('stdc++')
ext.libraries.append('m')
class LibAgg(SetupPackage):
name = 'libagg'
def check(self):
self.__class__.found_external = True
try:
return self._check_for_pkg_config(
'libagg', 'agg2/agg_basics.h', min_version='PATCH')
except CheckFailed as e:
self.__class__.found_external = False
return str(e) + ' Using local copy.'
def add_flags(self, ext):
if self.found_external:
pkg_config.setup_extension(ext, 'libagg')
else:
ext.include_dirs.append('extern/agg24/include')
agg_sources = [
'agg_bezier_arc.cpp',
'agg_curves.cpp',
'agg_image_filters.cpp',
'agg_trans_affine.cpp',
'agg_vcgen_contour.cpp',
'agg_vcgen_dash.cpp',
'agg_vcgen_stroke.cpp',
'agg_vpgen_segmentator.cpp'
]
ext.sources.extend(
os.path.join('extern', 'agg24', 'src', x) for x in agg_sources)
class FreeType(SetupPackage):
name = "freetype"
def check(self):
if sys.platform == 'win32':
return "Unknown version"
status, output = getstatusoutput("freetype-config --version")
if status == 0:
version = output
else:
version = None
return self._check_for_pkg_config(
'freetype2', 'ft2build.h',
min_version='2.4', version=version)
def add_flags(self, ext):
pkg_config.setup_extension(
ext, 'freetype2',
default_include_dirs=[
'freetype2', 'lib/freetype2/include',
'lib/freetype2/include/freetype2'],
default_library_dirs=[
'freetype2/lib'],
default_libraries=['freetype', 'z'],
alt_exec='freetype-config')
def get_extension(self):
ext = make_extension('freetype2', [])
self.add_flags(ext)
return ext
class FT2Font(SetupPackage):
name = 'ft2font'
def get_extension(self):
sources = [
'src/ft2font.cpp',
'src/mplutils.cpp'
]
ext = make_extension('matplotlib.ft2font', sources)
FreeType().add_flags(ext)
Numpy().add_flags(ext)
CXX().add_flags(ext)
return ext
class Png(SetupPackage):
name = "png"
def check(self):
try:
return self._check_for_pkg_config(
'libpng', 'png.h',
min_version='1.2')
except CheckFailed as e:
self.__class__.found_external = False
return str(e) + ' Using unknown version.'
def get_extension(self):
sources = [
'src/_png.cpp', 'src/mplutils.cpp'
]
ext = make_extension('matplotlib._png', sources)
pkg_config.setup_extension(
ext, 'libpng', default_libraries=['png', 'z'])
Numpy().add_flags(ext)
CXX().add_flags(ext)
return ext
class Qhull(SetupPackage):
name = "qhull"
def check(self):
self.__class__.found_external = True
try:
return self._check_for_pkg_config(
'qhull', 'qhull/qhull_a.h', min_version='2003.1')
except CheckFailed as e:
self.__class__.found_pkgconfig = False
# Qhull may not be in the pkg-config system but may still be
# present on this system, so check if the header files can be
# found.
include_dirs = [
os.path.join(x, 'include', 'qhull') for x in get_base_dirs()]
if has_include_file(include_dirs, 'qhull_a.h'):
return 'Using system Qhull (version unknown, no pkg-config info)'
else:
self.__class__.found_external = False
return str(e) + ' Using local copy.'
def add_flags(self, ext):
if self.found_external:
pkg_config.setup_extension(ext, 'qhull',
default_libraries=['qhull'])
else:
ext.include_dirs.append('extern')
ext.sources.extend(glob.glob('extern/qhull/*.c'))
class TTConv(SetupPackage):
name = "ttconv"
def get_extension(self):
sources = [
'src/_ttconv.cpp',
'extern/ttconv/pprdrv_tt.cpp',
'extern/ttconv/pprdrv_tt2.cpp',
'extern/ttconv/ttutil.cpp'
]
ext = make_extension('matplotlib.ttconv', sources)
Numpy().add_flags(ext)
CXX().add_flags(ext)
ext.include_dirs.append('extern')
return ext
class Path(SetupPackage):
name = "path"
def get_extension(self):
sources = [
'src/_path.cpp',
'src/path_cleanup.cpp',
'src/agg_py_transforms.cpp'
]
ext = make_extension('matplotlib._path', sources)
Numpy().add_flags(ext)
LibAgg().add_flags(ext)
CXX().add_flags(ext)
return ext
class Image(SetupPackage):
name = "image"
def get_extension(self):
sources = [
'src/_image.cpp', 'src/mplutils.cpp'
]
ext = make_extension('matplotlib._image', sources)
Numpy().add_flags(ext)
LibAgg().add_flags(ext)
CXX().add_flags(ext)
return ext
class Contour(SetupPackage):
name = "contour"
def get_extension(self):
sources = [
"src/cntr.c"
]
ext = make_extension('matplotlib._cntr', sources)
Numpy().add_flags(ext)
return ext
class Delaunay(SetupPackage):
name = "delaunay"
def get_packages(self):
return ['matplotlib.delaunay']
def get_extension(self):
sources = ["_delaunay.cpp", "VoronoiDiagramGenerator.cpp",
"delaunay_utils.cpp", "natneighbors.cpp"]
sources = [os.path.join('lib/matplotlib/delaunay', s) for s in sources]
ext = make_extension('matplotlib._delaunay', sources)
Numpy().add_flags(ext)
return ext
class QhullWrap(SetupPackage):
name = "qhull_wrap"
def get_extension(self):
sources = ['src/qhull_wrap.c']
ext = make_extension('matplotlib._qhull', sources,
define_macros=[('MPL_DEVNULL', os.devnull)])
Numpy().add_flags(ext)
Qhull().add_flags(ext)
return ext
class Tri(SetupPackage):
name = "tri"
def get_extension(self):
sources = [
"lib/matplotlib/tri/_tri.cpp",
"src/mplutils.cpp"
]
ext = make_extension('matplotlib._tri', sources)
Numpy().add_flags(ext)
CXX().add_flags(ext)
return ext
class Six(SetupPackage):
name = "six"
min_version = "1.3"
def check(self):
try:
import six
except ImportError:
return (
"six was not found.")
if not is_min_version(six.__version__, self.min_version):
raise CheckFailed(
"Requires six %s or later. Found %s." %
(self.min_version, six.__version__))
return "using six version %s" % six.__version__
def get_install_requires(self):
return ['six>={0}'.format(self.min_version)]
class Dateutil(SetupPackage):
name = "dateutil"
def __init__(self, version=None):
self.version = version
def check(self):
try:
import dateutil
except ImportError:
# dateutil 2.1 has a file encoding bug that breaks installation on
# python 3.3
# https://github.com/matplotlib/matplotlib/issues/2373
# hack around the problem by installing the the (working) v2.0
major, minor1, _, _, _ = sys.version_info
if self.version is None and (major, minor1) == (3, 3):
self.version = '!=2.1'
return (
"dateutil was not found. It is required for date axis "
"support. pip/easy_install may attempt to install it "
"after matplotlib.")
return "using dateutil version %s" % dateutil.__version__
def get_install_requires(self):
dateutil = 'python-dateutil'
if self.version is not None:
dateutil += self.version
return [dateutil]
class Tornado(OptionalPackage):
name = "tornado"
def check(self):
try:
import tornado
except ImportError:
return (
"tornado was not found. It is required for the WebAgg "
"backend. pip/easy_install may attempt to install it "
"after matplotlib.")
return "using tornado version %s" % tornado.version
class Pyparsing(SetupPackage):
name = "pyparsing"
def is_ok(self):
# pyparsing 2.0.0 bug, but it may be patched in distributions
try:
import pyparsing
f = pyparsing.Forward()
f <<= pyparsing.Literal('a')
return f is not None
except (ImportError, TypeError):
return False
def check(self):
try:
import pyparsing
except ImportError:
return (
"pyparsing was not found. It is required for mathtext "
"support. pip/easy_install may attempt to install it "
"after matplotlib.")
required = [1, 5, 6]
if [int(x) for x in pyparsing.__version__.split('.')] < required:
return (
"matplotlib requires pyparsing >= {0}".format(
'.'.join(str(x) for x in required)))
if not self.is_ok():
return (
"Your pyparsing contains a bug that will be monkey-patched by "
"matplotlib. For best results, upgrade to pyparsing 2.0.1 or "
"later.")
return "using pyparsing version %s" % pyparsing.__version__
def get_install_requires(self):
if self.is_ok():
return ['pyparsing>=1.5.6']
else:
return ['pyparsing>=1.5.6,!=2.0.0']
class BackendAgg(OptionalBackendPackage):
name = "agg"
def get_extension(self):
sources = [
"src/mplutils.cpp",
"src/agg_py_transforms.cpp",
"src/_backend_agg.cpp"
]
ext = make_extension('matplotlib.backends._backend_agg', sources)
Numpy().add_flags(ext)
LibAgg().add_flags(ext)
FreeType().add_flags(ext)
CXX().add_flags(ext)
return ext
class BackendTkAgg(OptionalBackendPackage):
name = "tkagg"
def __init__(self):
self.tcl_tk_cache = None
def check_requirements(self):
try:
if PY3:
import tkinter as Tkinter
else:
import Tkinter
except ImportError:
raise CheckFailed('TKAgg requires Tkinter.')
except RuntimeError:
raise CheckFailed('Tkinter present but import failed.')
else:
if Tkinter.TkVersion < 8.3:
raise CheckFailed("Tcl/Tk v8.3 or later required.")
ext = self.get_extension()
check_include_file(ext.include_dirs, "tk.h", "Tk")
try:
tk_v = Tkinter.__version__.split()[-2]
except (AttributeError, IndexError):
# Tkinter.__version__ has been removed in python 3
tk_v = 'version not identified'
BackendAgg.force = True
return "version %s" % tk_v
def get_extension(self):
sources = [
'src/agg_py_transforms.cpp',
'src/_tkagg.cpp'
]
ext = make_extension('matplotlib.backends._tkagg', sources)
self.add_flags(ext)
Numpy().add_flags(ext)
LibAgg().add_flags(ext)
CXX().add_flags(ext)
return ext
def query_tcltk(self):
"""
Tries to open a Tk window in order to query the Tk object
about its library paths. This should never be called more
than once by the same process, as Tk intricacies may cause the
Python interpreter to hang. The function also has a workaround
if no X server is running (useful for autobuild systems).
"""
# Use cached values if they exist, which ensures this function
# only executes once
if self.tcl_tk_cache is not None:
return self.tcl_tk_cache
# By this point, we already know that Tkinter imports correctly
if PY3:
import tkinter as Tkinter
else:
import Tkinter
tcl_lib_dir = ''
tk_lib_dir = ''
# First try to open a Tk window (requires a running X server)
try:
tk = Tkinter.Tk()
except Tkinter.TclError:
# Next, start Tcl interpreter without opening a Tk window
# (no need for X server) This feature is available in
# python version 2.4 and up
try:
tcl = Tkinter.Tcl()
except AttributeError: # Python version not high enough
pass
except Tkinter.TclError: # Something went wrong while opening Tcl
pass
else:
tcl_lib_dir = str(tcl.getvar('tcl_library'))
# Guess Tk location based on Tcl location
(head, tail) = os.path.split(tcl_lib_dir)
tail = tail.replace('Tcl', 'Tk').replace('tcl', 'tk')
tk_lib_dir = os.path.join(head, tail)
if not os.path.exists(tk_lib_dir):
tk_lib_dir = tcl_lib_dir.replace(
'Tcl', 'Tk').replace('tcl', 'tk')
else:
# Obtain Tcl and Tk locations from Tk widget
tk.withdraw()
tcl_lib_dir = str(tk.getvar('tcl_library'))
tk_lib_dir = str(tk.getvar('tk_library'))
tk.destroy()
# Save directories and version string to cache
self.tcl_tk_cache = tcl_lib_dir, tk_lib_dir, str(Tkinter.TkVersion)[:3]
return self.tcl_tk_cache
def parse_tcl_config(self, tcl_lib_dir, tk_lib_dir):
try:
if PY3:
import tkinter as Tkinter
else:
import Tkinter
except ImportError:
return None
tcl_poss = [tcl_lib_dir,
os.path.normpath(os.path.join(tcl_lib_dir, '..')),
"/usr/lib/tcl" + str(Tkinter.TclVersion),
"/usr/lib"]
tk_poss = [tk_lib_dir,
os.path.normpath(os.path.join(tk_lib_dir, '..')),
"/usr/lib/tk" + str(Tkinter.TkVersion),
"/usr/lib"]
for ptcl, ptk in zip(tcl_poss, tk_poss):
tcl_config = os.path.join(ptcl, "tclConfig.sh")
tk_config = os.path.join(ptk, "tkConfig.sh")
if (os.path.exists(tcl_config) and os.path.exists(tk_config)):
break
if not (os.path.exists(tcl_config) and os.path.exists(tk_config)):
return None
def get_var(file, varname):
p = subprocess.Popen(
'. %s ; eval echo ${%s}' % (file, varname),
shell=True,
executable="/bin/sh",
stdout=subprocess.PIPE)
result = p.communicate()[0]
return result.decode('ascii')
tcl_lib_dir = get_var(
tcl_config, 'TCL_LIB_SPEC').split()[0][2:].strip()
tcl_inc_dir = get_var(
tcl_config, 'TCL_INCLUDE_SPEC')[2:].strip()
tcl_lib = get_var(tcl_config, 'TCL_LIB_FLAG')[2:].strip()
tk_lib_dir = get_var(tk_config, 'TK_LIB_SPEC').split()[0][2:].strip()
tk_inc_dir = get_var(tk_config, 'TK_INCLUDE_SPEC').strip()
if tk_inc_dir == '':
tk_inc_dir = tcl_inc_dir
else:
tk_inc_dir = tk_inc_dir[2:]
tk_lib = get_var(tk_config, 'TK_LIB_FLAG')[2:].strip()
if not os.path.exists(os.path.join(tk_inc_dir, 'tk.h')):
return None
return (tcl_lib_dir, tcl_inc_dir, tcl_lib,
tk_lib_dir, tk_inc_dir, tk_lib)
def guess_tcl_config(self, tcl_lib_dir, tk_lib_dir, tk_ver):
if not (os.path.exists(tcl_lib_dir) and os.path.exists(tk_lib_dir)):
return None
tcl_lib = os.path.normpath(os.path.join(tcl_lib_dir, '../'))
tk_lib = os.path.normpath(os.path.join(tk_lib_dir, '../'))
tcl_inc = os.path.normpath(
os.path.join(tcl_lib_dir,
'../../include/tcl' + tk_ver))
if not os.path.exists(tcl_inc):
tcl_inc = os.path.normpath(
os.path.join(tcl_lib_dir,
'../../include'))
tk_inc = os.path.normpath(os.path.join(
tk_lib_dir,
'../../include/tk' + tk_ver))
if not os.path.exists(tk_inc):
tk_inc = os.path.normpath(os.path.join(
tk_lib_dir,
'../../include'))
if not os.path.exists(os.path.join(tk_inc, 'tk.h')):
tk_inc = tcl_inc
if not os.path.exists(tcl_inc):
# this is a hack for suse linux, which is broken
if (sys.platform.startswith('linux') and
os.path.exists('/usr/include/tcl.h') and
os.path.exists('/usr/include/tk.h')):
tcl_inc = '/usr/include'
tk_inc = '/usr/include'
if not os.path.exists(os.path.join(tk_inc, 'tk.h')):
return None
return tcl_lib, tcl_inc, 'tcl' + tk_ver, tk_lib, tk_inc, 'tk' + tk_ver
def hardcoded_tcl_config(self):
tcl_inc = "/usr/local/include"
tk_inc = "/usr/local/include"
tcl_lib = "/usr/local/lib"
tk_lib = "/usr/local/lib"
return tcl_lib, tcl_inc, 'tcl', tk_lib, tk_inc, 'tk'
def add_flags(self, ext):
if sys.platform == 'win32':
major, minor1, minor2, s, tmp = sys.version_info
if sys.version_info[0:2] < (3, 4):
ext.include_dirs.extend(['win32_static/include/tcl85'])
ext.libraries.extend(['tk85', 'tcl85'])
else:
ext.include_dirs.extend(['win32_static/include/tcl86'])
ext.libraries.extend(['tk86t', 'tcl86t'])
ext.library_dirs.extend([os.path.join(sys.prefix, 'dlls')])
elif sys.platform == 'darwin':
# this config section lifted directly from Imaging - thanks to
# the effbot!
# First test for a MacOSX/darwin framework install
from os.path import join, exists
framework_dirs = [
join(os.getenv('HOME'), '/Library/Frameworks'),
'/Library/Frameworks',
'/System/Library/Frameworks/',
]
# Find the directory that contains the Tcl.framework and
# Tk.framework bundles.
tk_framework_found = 0
for F in framework_dirs:
# both Tcl.framework and Tk.framework should be present
for fw in 'Tcl', 'Tk':
if not exists(join(F, fw + '.framework')):
break
else:
# ok, F is now directory with both frameworks. Continure
# building
tk_framework_found = 1
break
if tk_framework_found:
# For 8.4a2, we must add -I options that point inside
# the Tcl and Tk frameworks. In later release we
# should hopefully be able to pass the -F option to
# gcc, which specifies a framework lookup path.
tk_include_dirs = [
join(F, fw + '.framework', H)
for fw in ('Tcl', 'Tk')
for H in ('Headers', 'Versions/Current/PrivateHeaders')
]
# For 8.4a2, the X11 headers are not included. Rather
# than include a complicated search, this is a
# hard-coded path. It could bail out if X11 libs are
# not found...
# tk_include_dirs.append('/usr/X11R6/include')
frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
ext.include_dirs.extend(tk_include_dirs)
ext.extra_link_args.extend(frameworks)
ext.extra_compile_args.extend(frameworks)
# you're still here? ok we'll try it this way...
else:
# There are 3 methods to try, in decreasing order of "smartness"
#
# 1. Parse the tclConfig.sh and tkConfig.sh files that have
# all the information we need
#
# 2. Guess the include and lib dirs based on the location of
# Tkinter's 'tcl_library' and 'tk_library' variables.
#
# 3. Use some hardcoded locations that seem to work on a lot
# of distros.
# Query Tcl/Tk system for library paths and version string
try:
tcl_lib_dir, tk_lib_dir, tk_ver = self.query_tcltk()
except:
tk_ver = ''
result = self.hardcoded_tcl_config()
else:
result = self.parse_tcl_config(tcl_lib_dir, tk_lib_dir)
if result is None:
result = self.guess_tcl_config(
tcl_lib_dir, tk_lib_dir, tk_ver)
if result is None:
result = self.hardcoded_tcl_config()
# Add final versions of directories and libraries to ext lists
(tcl_lib_dir, tcl_inc_dir, tcl_lib,
tk_lib_dir, tk_inc_dir, tk_lib) = result
ext.include_dirs.extend([tcl_inc_dir, tk_inc_dir])
ext.library_dirs.extend([tcl_lib_dir, tk_lib_dir])
ext.libraries.extend([tcl_lib, tk_lib])
class BackendGtk(OptionalBackendPackage):
name = "gtk"
def check_requirements(self):
try:
import gtk
except ImportError:
raise CheckFailed("Requires pygtk")
except RuntimeError:
raise CheckFailed('pygtk present, but import failed.')
else:
version = (2, 2, 0)
if gtk.pygtk_version < version:
raise CheckFailed(
"Requires pygtk %d.%d.%d or later. "
"Found %d.%d.%d" % (version + gtk.pygtk_version))
ext = self.get_extension()
self.add_flags(ext)
check_include_file(ext.include_dirs,
os.path.join("gtk", "gtk.h"),
'gtk')
check_include_file(ext.include_dirs,
os.path.join("pygtk", "pygtk.h"),
'pygtk')
return 'Gtk: %s pygtk: %s' % (
".".join(str(x) for x in gtk.gtk_version),
".".join(str(x) for x in gtk.pygtk_version))
def get_package_data(self):
return {'matplotlib': ['mpl-data/*.glade']}
def get_extension(self):
sources = [
'src/_backend_gdk.c'
]
ext = make_extension('matplotlib.backends._backend_gdk', sources)
self.add_flags(ext)
Numpy().add_flags(ext)
return ext
def add_flags(self, ext):
if sys.platform == 'win32':
def getoutput(s):
ret = os.popen(s).read().strip()
return ret
if 'PKG_CONFIG_PATH' not in os.environ:
# If Gtk+ is installed, pkg-config is required to be installed
os.environ['PKG_CONFIG_PATH'] = 'C:\\GTK\\lib\\pkgconfig'
# popen broken on my win32 plaform so I can't use pkgconfig
ext.library_dirs.extend(
['C:/GTK/bin', 'C:/GTK/lib'])
ext.include_dirs.extend(
['win32_static/include/pygtk-2.0',
'C:/GTK/include',
'C:/GTK/include/gobject',
'C:/GTK/include/gext',
'C:/GTK/include/glib',
'C:/GTK/include/pango',
'C:/GTK/include/atk',
'C:/GTK/include/X11',
'C:/GTK/include/cairo',
'C:/GTK/include/gdk',
'C:/GTK/include/gdk-pixbuf',
'C:/GTK/include/gtk',
])
pygtkIncludes = getoutput(
'pkg-config --cflags-only-I pygtk-2.0').split()
gtkIncludes = getoutput(
'pkg-config --cflags-only-I gtk+-2.0').split()
includes = pygtkIncludes + gtkIncludes
ext.include_dirs.extend([include[2:] for include in includes])
pygtkLinker = getoutput('pkg-config --libs pygtk-2.0').split()
gtkLinker = getoutput('pkg-config --libs gtk+-2.0').split()
linkerFlags = pygtkLinker + gtkLinker
ext.libraries.extend(
[flag[2:] for flag in linkerFlags if flag.startswith('-l')])
ext.library_dirs.extend(
[flag[2:] for flag in linkerFlags if flag.startswith('-L')])
ext.extra_link_args.extend(
[flag for flag in linkerFlags if not
(flag.startswith('-l') or flag.startswith('-L'))])
# visual studio doesn't need the math library
if (sys.platform == 'win32' and
win32_compiler == 'msvc' and
'm' in ext.libraries):
ext.libraries.remove('m')
elif sys.platform != 'win32':
pkg_config.setup_extension(ext, 'pygtk-2.0')
pkg_config.setup_extension(ext, 'gtk+-2.0')
class BackendGtkAgg(BackendGtk):
name = "gtkagg"
def check(self):
try:
return super(BackendGtkAgg, self).check()
except:
raise
else:
BackendAgg.force = True
def get_package_data(self):
return {'matplotlib': ['mpl-data/*.glade']}
def get_extension(self):
sources = [
'src/agg_py_transforms.cpp',
'src/_gtkagg.cpp',
'src/mplutils.cpp'
]
ext = make_extension('matplotlib.backends._gtkagg', sources)
self.add_flags(ext)
LibAgg().add_flags(ext)
CXX().add_flags(ext)
Numpy().add_flags(ext)
return ext
def backend_gtk3agg_internal_check(x):
try:
import gi
except ImportError:
return (False, "Requires pygobject to be installed.")
try:
gi.require_version("Gtk", "3.0")
except ValueError:
return (False, "Requires gtk3 development files to be installed.")
except AttributeError:
return (False, "pygobject version too old.")
try:
from gi.repository import Gtk, Gdk, GObject
except (ImportError, RuntimeError):
return (False, "Requires pygobject to be installed.")
return (True, "version %s.%s.%s" % (
Gtk.get_major_version(),
Gtk.get_micro_version(),
Gtk.get_minor_version()))
class BackendGtk3Agg(OptionalBackendPackage):
name = "gtk3agg"
def check_requirements(self):
if 'TRAVIS' in os.environ:
raise CheckFailed("Can't build with Travis")
if PY3:
raise CheckFailed("gtk3agg backend does not work on Python 3")
# This check needs to be performed out-of-process, because
# importing gi and then importing regular old pygtk afterward
# segfaults the interpreter.
try:
p = multiprocessing.Pool()
except:
return "unknown (can not use multiprocessing to determine)"
try:
success, msg = p.map(backend_gtk3agg_internal_check, [0])[0]
except:
success = False
msg = "Could not determine"
finally:
p.close()
p.join()
if success:
BackendAgg.force = True
return msg
else:
raise CheckFailed(msg)
def get_package_data(self):
return {'matplotlib': ['mpl-data/*.glade']}
def backend_gtk3cairo_internal_check(x):
try:
import cairocffi
except ImportError:
try:
import cairo
except ImportError:
return (False, "Requires cairocffi or pycairo to be installed.")
try:
import gi
except ImportError:
return (False, "Requires pygobject to be installed.")
try:
gi.require_version("Gtk", "3.0")
except ValueError:
return (False, "Requires gtk3 development files to be installed.")
except AttributeError:
return (False, "pygobject version too old.")
try:
from gi.repository import Gtk, Gdk, GObject
except (RuntimeError, ImportError):
return (False, "Requires pygobject to be installed.")
return (True, "version %s.%s.%s" % (
Gtk.get_major_version(),
Gtk.get_micro_version(),
Gtk.get_minor_version()))
class BackendGtk3Cairo(OptionalBackendPackage):
name = "gtk3cairo"
def check_requirements(self):
if 'TRAVIS' in os.environ:
raise CheckFailed("Can't build with Travis")
# This check needs to be performed out-of-process, because
# importing gi and then importing regular old pygtk afterward
# segfaults the interpreter.
try:
p = multiprocessing.Pool()
except:
return "unknown (can not use multiprocessing to determine)"
success, msg = p.map(backend_gtk3cairo_internal_check, [0])[0]
p.close()
p.join()
if success:
BackendAgg.force = True
return msg
else:
raise CheckFailed(msg)
def get_package_data(self):
return {'matplotlib': ['mpl-data/*.glade']}
class BackendWxAgg(OptionalBackendPackage):
name = "wxagg"
def check_requirements(self):
try:
import wxversion
except ImportError:
raise CheckFailed("requires wxPython")
try:
_wx_ensure_failed = wxversion.AlreadyImportedError
except AttributeError:
_wx_ensure_failed = wxversion.VersionError
try:
wxversion.ensureMinimal('2.8')
except _wx_ensure_failed:
pass
try:
import wx
backend_version = wx.VERSION_STRING
except ImportError:
raise CheckFailed("requires wxPython")
# Extra version check in case wxversion lacks AlreadyImportedError;
# then VersionError might have been raised and ignored when
# there really *is* a problem with the version.
major, minor = [int(n) for n in backend_version.split('.')[:2]]
if major < 2 or (major < 3 and minor < 8):
raise CheckFailed(
"Requires wxPython 2.8, found %s" % backend_version)
BackendAgg.force = True
return "version %s" % backend_version
class BackendMacOSX(OptionalBackendPackage):
name = 'macosx'
def check_requirements(self):
if sys.platform != 'darwin':
raise CheckFailed("Mac OS-X only")
return 'darwin'
def get_extension(self):
sources = [
'src/_macosx.m',
'src/agg_py_transforms.cpp',
'src/path_cleanup.cpp'
]
ext = make_extension('matplotlib.backends._macosx', sources)
Numpy().add_flags(ext)
LibAgg().add_flags(ext)
CXX().add_flags(ext)
ext.extra_link_args.extend(['-framework', 'Cocoa'])
return ext
class Windowing(OptionalBackendPackage):
"""
Builds the windowing extension.
"""
name = "windowing"
def check_requirements(self):
if sys.platform != 'win32':
raise CheckFailed("Microsoft Windows only")
config = self.get_config()
if config is False:
raise CheckFailed("skipping due to configuration")
return "installing"
def get_extension(self):
sources = [
"src/_windowing.cpp"
]
ext = make_extension('matplotlib._windowing', sources)
ext.include_dirs.extend(['C:/include'])
ext.libraries.extend(['user32'])
ext.library_dirs.extend(['C:/lib'])
ext.extra_link_args.append("-mwindows")
return ext
class BackendQtBase(OptionalBackendPackage):
def convert_qt_version(self, version):
version = '%x' % version
temp = []
while len(version) > 0:
version, chunk = version[:-2], version[-2:]
temp.insert(0, str(int(chunk, 16)))
return '.'.join(temp)
def check_requirements(self):
'''
If PyQt4/PyQt5 is already imported, importing PyQt5/PyQt4 will fail
so we need to test in a subprocess (as for Gtk3).
'''
try:
p = multiprocessing.Pool()
except:
# Can't do multiprocessing, fall back to normal approach ( this will fail if importing both PyQt4 and PyQt5 )
try:
# Try in-process
msg = self.callback(self)
except RuntimeError:
raise CheckFailed("Could not import: are PyQt4 & PyQt5 both installed?")
except:
# Raise any other exceptions
raise
else:
# Multiprocessing OK
try:
msg = p.map(self.callback, [self])[0]
except:
# If we hit an error on multiprocessing raise it
raise
finally:
# Tidy up multiprocessing
p.close()
p.join()
return msg
def backend_qt4_internal_check(self):
try:
from PyQt4 import QtCore
except ImportError:
raise CheckFailed("PyQt4 not found")
try:
qt_version = QtCore.QT_VERSION
pyqt_version_str = QtCore.QT_VERSION_STR
except AttributeError:
raise CheckFailed('PyQt4 not correctly imported')
else:
BackendAgg.force = True
return ("Qt: %s, PyQt: %s" % (self.convert_qt_version(qt_version), pyqt_version_str))
class BackendQt4(BackendQtBase):
name = "qt4agg"
def __init__(self, *args, **kwargs):
BackendQtBase.__init__(self, *args, **kwargs)
self.callback = backend_qt4_internal_check
def backend_qt5_internal_check(self):
try:
from PyQt5 import QtCore
except ImportError:
raise CheckFailed("PyQt5 not found")
try:
qt_version = QtCore.QT_VERSION
pyqt_version_str = QtCore.QT_VERSION_STR
except AttributeError:
raise CheckFailed('PyQt5 not correctly imported')
else:
BackendAgg.force = True
return ("Qt: %s, PyQt: %s" % (self.convert_qt_version(qt_version), pyqt_version_str))
class BackendQt5(BackendQtBase):
name = "qt5agg"
def __init__(self, *args, **kwargs):
BackendQtBase.__init__(self, *args, **kwargs)
self.callback = backend_qt5_internal_check
def backend_pyside_internal_check(self):
try:
from PySide import __version__
from PySide import QtCore
except ImportError:
raise CheckFailed("PySide not found")
else:
BackendAgg.force = True
return ("Qt: %s, PySide: %s" %
(QtCore.__version__, __version__))
class BackendPySide(BackendQtBase):
name = "pyside"
def __init__(self, *args, **kwargs):
BackendQtBase.__init__(self, *args, **kwargs)
self.callback = backend_pyside_internal_check
class BackendCairo(OptionalBackendPackage):
name = "cairo"
def check_requirements(self):
try:
import cairocffi
except ImportError:
try:
import cairo
except ImportError:
raise CheckFailed("cairocffi or pycairo not found")
else:
return "pycairo version %s" % cairo.version
else:
return "cairocffi version %s" % cairocffi.version
class DviPng(SetupPackage):
name = "dvipng"
optional = True
def check(self):
try:
output = check_output('dvipng -version', shell=True,
stderr=subprocess.STDOUT)
return "version %s" % output.splitlines()[1].decode().split()[-1]
except (IndexError, ValueError, subprocess.CalledProcessError):
raise CheckFailed()
class Ghostscript(SetupPackage):
name = "ghostscript"
optional = True
def check(self):
try:
if sys.platform == 'win32':
command = 'gswin32c --version'
try:
output = check_output(command, shell=True,
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
command = 'gswin64c --version'
output = check_output(command, shell=True,
stderr=subprocess.STDOUT)
else:
command = 'gs --version'
output = check_output(command, shell=True,
stderr=subprocess.STDOUT)
return "version %s" % output.decode()[:-1]
except (IndexError, ValueError, subprocess.CalledProcessError):
raise CheckFailed()
class LaTeX(SetupPackage):
name = "latex"
optional = True
def check(self):
try:
output = check_output('latex -version', shell=True,
stderr=subprocess.STDOUT)
line = output.splitlines()[0].decode()
pattern = '(3\.1\d+)|(MiKTeX \d+.\d+)'
match = re.search(pattern, line)
return "version %s" % match.group(0)
except (IndexError, ValueError, AttributeError, subprocess.CalledProcessError):
raise CheckFailed()
class PdfToPs(SetupPackage):
name = "pdftops"
optional = True
def check(self):
try:
output = check_output('pdftops -v', shell=True,
stderr=subprocess.STDOUT)
for line in output.splitlines():
line = line.decode()
if 'version' in line:
return "version %s" % line.split()[2]
except (IndexError, ValueError, subprocess.CalledProcessError):
pass
raise CheckFailed()
| [
"io.BytesIO",
"sys.platform.startswith",
"gi.repository.Gtk.get_micro_version",
"re.search",
"os.path.exists",
"os.listdir",
"sys.getfilesystemencoding",
"pyparsing.Forward",
"CXX",
"subprocess.Popen",
"subprocess.CalledProcessError",
"os.path.split",
"os.path.isdir",
"os.popen",
"numpy.... | [((1564, 1606), 'os.environ.get', 'os.environ.get', (['"""MPLSETUPCFG"""', '"""setup.cfg"""'], {}), "('MPLSETUPCFG', 'setup.cfg')\n", (1578, 1606), False, 'import os\n'), ((1610, 1635), 'os.path.exists', 'os.path.exists', (['setup_cfg'], {}), '(setup_cfg)\n', (1624, 1635), False, 'import os\n'), ((1650, 1681), 'ConfigParser.SafeConfigParser', 'configparser.SafeConfigParser', ([], {}), '()\n', (1679, 1681), True, 'import ConfigParser as configparser\n'), ((4201, 4233), 'distutils.version.LooseVersion', 'version.LooseVersion', (['minversion'], {}), '(minversion)\n', (4221, 4233), False, 'from distutils import version\n'), ((4254, 4281), 'distutils.version.LooseVersion', 'version.LooseVersion', (['found'], {}), '(found)\n', (4274, 4281), False, 'from distutils import version\n'), ((5971, 5999), 'os.path.join', 'os.path.join', (['dir', '"""include"""'], {}), "(dir, 'include')\n", (5983, 5999), False, 'import os\n'), ((6011, 6038), 'os.path.exists', 'os.path.exists', (['include_dir'], {}), '(include_dir)\n', (6025, 6038), False, 'import os\n'), ((6817, 6851), 'distutils.sysconfig.get_config_var', 'sysconfig.get_config_var', (['"""LIBDIR"""'], {}), "('LIBDIR')\n", (6841, 6851), False, 'from distutils import sysconfig\n'), ((6932, 6973), 'os.path.join', 'os.path.join', (['pkgconfig_path', '"""pkgconfig"""'], {}), "(pkgconfig_path, 'pkgconfig')\n", (6944, 6973), False, 'import os\n'), ((9155, 9210), 'subprocess.getstatusoutput', 'getstatusoutput', (["('pkg-config %s --modversion' % package)"], {}), "('pkg-config %s --modversion' % package)\n", (9170, 9210), False, 'from subprocess import getstatusoutput\n'), ((22115, 22136), 'distutils.core.Extension', 'Extension', (['"""test"""', '[]'], {}), "('test', [])\n", (22124, 22136), False, 'from distutils.core import Extension\n'), ((27288, 27332), 'subprocess.getstatusoutput', 'getstatusoutput', (['"""freetype-config --version"""'], {}), "('freetype-config --version')\n", (27303, 27332), False, 'from subprocess import getstatusoutput\n'), ((52850, 52882), 'gi.require_version', 'gi.require_version', (['"""Gtk"""', '"""3.0"""'], {}), "('Gtk', '3.0')\n", (52868, 52882), False, 'import gi\n'), ((54790, 54822), 'gi.require_version', 'gi.require_version', (['"""Gtk"""', '"""3.0"""'], {}), "('Gtk', '3.0')\n", (54808, 54822), False, 'import gi\n'), ((702, 764), 'subprocess.Popen', 'subprocess.Popen', (['*popenargs'], {'stdout': 'subprocess.PIPE'}), '(*popenargs, stdout=subprocess.PIPE, **kwargs)\n', (718, 764), False, 'import subprocess\n'), ((2990, 3017), 'os.path.join', 'os.path.join', (['dir', 'filename'], {}), '(dir, filename)\n', (3002, 3017), False, 'import os\n'), ((6148, 6170), 'os.path.join', 'os.path.join', (['dir', 'lib'], {}), '(dir, lib)\n', (6160, 6170), False, 'import os\n'), ((6186, 6209), 'os.path.exists', 'os.path.exists', (['lib_dir'], {}), '(lib_dir)\n', (6200, 6209), False, 'import os\n'), ((6673, 6709), 'subprocess.getstatusoutput', 'getstatusoutput', (['"""pkg-config --help"""'], {}), "('pkg-config --help')\n", (6688, 6709), False, 'from subprocess import getstatusoutput\n'), ((6989, 7018), 'os.path.isdir', 'os.path.isdir', (['pkgconfig_path'], {}), '(pkgconfig_path)\n', (7002, 7018), False, 'import os\n'), ((21882, 21899), 'imp.reload', 'imp.reload', (['numpy'], {}), '(numpy)\n', (21892, 21899), False, 'import imp\n'), ((22169, 22188), 'numpy.get_include', 'numpy.get_include', ([], {}), '()\n', (22186, 22188), False, 'import numpy\n'), ((22310, 22428), 'warnings.warn', 'warnings.warn', (['"""The C headers for numpy could not be found. You may need to install the development package"""'], {}), "(\n 'The C headers for numpy could not be found. You may need to install the development package'\n )\n", (22323, 22428), False, 'import warnings\n'), ((22472, 22491), 'numpy.get_include', 'numpy.get_include', ([], {}), '()\n', (22489, 22491), False, 'import numpy\n'), ((23916, 23929), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (23927, 23929), False, 'import io\n'), ((23969, 23981), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (23979, 23981), False, 'import io\n'), ((31990, 32032), 'os.path.join', 'os.path.join', (['"""lib/matplotlib/delaunay"""', 's'], {}), "('lib/matplotlib/delaunay', s)\n", (32002, 32032), False, 'import os\n'), ((35066, 35085), 'pyparsing.Forward', 'pyparsing.Forward', ([], {}), '()\n', (35083, 35085), False, 'import pyparsing\n'), ((35104, 35126), 'pyparsing.Literal', 'pyparsing.Literal', (['"""a"""'], {}), "('a')\n", (35121, 35126), False, 'import pyparsing\n'), ((38884, 38896), 'Tkinter.Tk', 'Tkinter.Tk', ([], {}), '()\n', (38894, 38896), False, 'import Tkinter\n'), ((40932, 40966), 'os.path.join', 'os.path.join', (['ptcl', '"""tclConfig.sh"""'], {}), "(ptcl, 'tclConfig.sh')\n", (40944, 40966), False, 'import os\n'), ((40991, 41023), 'os.path.join', 'os.path.join', (['ptk', '"""tkConfig.sh"""'], {}), "(ptk, 'tkConfig.sh')\n", (41003, 41023), False, 'import os\n'), ((41273, 41395), 'subprocess.Popen', 'subprocess.Popen', (["('. %s ; eval echo ${%s}' % (file, varname))"], {'shell': '(True)', 'executable': '"""/bin/sh"""', 'stdout': 'subprocess.PIPE'}), "('. %s ; eval echo ${%s}' % (file, varname), shell=True,\n executable='/bin/sh', stdout=subprocess.PIPE)\n", (41289, 41395), False, 'import subprocess\n'), ((42509, 42541), 'os.path.join', 'os.path.join', (['tcl_lib_dir', '"""../"""'], {}), "(tcl_lib_dir, '../')\n", (42521, 42541), False, 'import os\n'), ((42577, 42608), 'os.path.join', 'os.path.join', (['tk_lib_dir', '"""../"""'], {}), "(tk_lib_dir, '../')\n", (42589, 42608), False, 'import os\n'), ((42659, 42714), 'os.path.join', 'os.path.join', (['tcl_lib_dir', "('../../include/tcl' + tk_ver)"], {}), "(tcl_lib_dir, '../../include/tcl' + tk_ver)\n", (42671, 42714), False, 'import os\n'), ((42756, 42779), 'os.path.exists', 'os.path.exists', (['tcl_inc'], {}), '(tcl_inc)\n', (42770, 42779), False, 'import os\n'), ((42945, 42998), 'os.path.join', 'os.path.join', (['tk_lib_dir', "('../../include/tk' + tk_ver)"], {}), "(tk_lib_dir, '../../include/tk' + tk_ver)\n", (42957, 42998), False, 'import os\n'), ((43040, 43062), 'os.path.exists', 'os.path.exists', (['tk_inc'], {}), '(tk_inc)\n', (43054, 43062), False, 'import os\n'), ((43285, 43308), 'os.path.exists', 'os.path.exists', (['tcl_inc'], {}), '(tcl_inc)\n', (43299, 43308), False, 'import os\n'), ((48846, 48874), 'os.path.join', 'os.path.join', (['"""gtk"""', '"""gtk.h"""'], {}), "('gtk', 'gtk.h')\n", (48858, 48874), False, 'import os\n'), ((48982, 49014), 'os.path.join', 'os.path.join', (['"""pygtk"""', '"""pygtk.h"""'], {}), "('pygtk', 'pygtk.h')\n", (48994, 49014), False, 'import os\n'), ((53859, 53881), 'multiprocessing.Pool', 'multiprocessing.Pool', ([], {}), '()\n', (53879, 53881), False, 'import multiprocessing\n'), ((55711, 55733), 'multiprocessing.Pool', 'multiprocessing.Pool', ([], {}), '()\n', (55731, 55733), False, 'import multiprocessing\n'), ((56552, 56582), 'wxversion.ensureMinimal', 'wxversion.ensureMinimal', (['"""2.8"""'], {}), "('2.8')\n", (56575, 56582), False, 'import wxversion\n'), ((59211, 59233), 'multiprocessing.Pool', 'multiprocessing.Pool', ([], {}), '()\n', (59231, 59233), False, 'import multiprocessing\n'), ((62512, 62581), 'subprocess.check_output', 'check_output', (['"""dvipng -version"""'], {'shell': '(True)', 'stderr': 'subprocess.STDOUT'}), "('dvipng -version', shell=True, stderr=subprocess.STDOUT)\n", (62524, 62581), False, 'from subprocess import check_output\n'), ((63853, 63921), 'subprocess.check_output', 'check_output', (['"""latex -version"""'], {'shell': '(True)', 'stderr': 'subprocess.STDOUT'}), "('latex -version', shell=True, stderr=subprocess.STDOUT)\n", (63865, 63921), False, 'from subprocess import check_output\n'), ((64078, 64102), 're.search', 're.search', (['pattern', 'line'], {}), '(pattern, line)\n', (64087, 64102), False, 'import re\n'), ((64400, 64464), 'subprocess.check_output', 'check_output', (['"""pdftops -v"""'], {'shell': '(True)', 'stderr': 'subprocess.STDOUT'}), "('pdftops -v', shell=True, stderr=subprocess.STDOUT)\n", (64412, 64464), False, 'from subprocess import check_output\n'), ((1002, 1045), 'subprocess.CalledProcessError', 'subprocess.CalledProcessError', (['retcode', 'cmd'], {}), '(retcode, cmd)\n', (1031, 1045), False, 'import subprocess\n'), ((3864, 3887), 'os.getenv', 'os.getenv', (['"""MPLIB_BASE"""'], {}), "('MPLIB_BASE')\n", (3873, 3887), False, 'import os\n'), ((7863, 7922), 'subprocess.check_output', 'check_output', (['command'], {'shell': '(True)', 'stderr': 'subprocess.STDOUT'}), '(command, shell=True, stderr=subprocess.STDOUT)\n', (7875, 7922), False, 'from subprocess import check_output\n'), ((19708, 19758), 'os.listdir', 'os.listdir', (['"""lib/matplotlib/tests/baseline_images"""'], {}), "('lib/matplotlib/tests/baseline_images')\n", (19718, 19758), False, 'import os\n'), ((22257, 22295), 'os.path.join', 'os.path.join', (['"""numpy"""', '"""arrayobject.h"""'], {}), "('numpy', 'arrayobject.h')\n", (22269, 22295), False, 'import os\n'), ((24920, 25025), 'os.path.join', 'os.path.join', (['sys.prefix', '"""share"""', "('python%d.%d' % (sys.version_info[0], sys.version_info[1]))", '"""CXX"""'], {}), "(sys.prefix, 'share', 'python%d.%d' % (sys.version_info[0], sys\n .version_info[1]), 'CXX')\n", (24932, 25025), False, 'import os\n'), ((25162, 25189), 'os.path.exists', 'os.path.exists', (['support_dir'], {}), '(support_dir)\n', (25176, 25189), False, 'import os\n'), ((25686, 25715), 'glob.glob', 'glob.glob', (['"""extern/CXX/*.cxx"""'], {}), "('extern/CXX/*.cxx')\n", (25695, 25715), False, 'import glob\n'), ((25748, 25775), 'glob.glob', 'glob.glob', (['"""extern/CXX/*.c"""'], {}), "('extern/CXX/*.c')\n", (25757, 25775), False, 'import glob\n'), ((28387, 28392), 'CXX', 'CXX', ([], {}), '()\n', (28390, 28392), False, 'import CXX\n'), ((29062, 29067), 'CXX', 'CXX', ([], {}), '()\n', (29065, 29067), False, 'import CXX\n'), ((30244, 30273), 'glob.glob', 'glob.glob', (['"""extern/qhull/*.c"""'], {}), "('extern/qhull/*.c')\n", (30253, 30273), False, 'import glob\n'), ((30644, 30649), 'CXX', 'CXX', ([], {}), '()\n', (30647, 30649), False, 'import CXX\n'), ((31071, 31076), 'CXX', 'CXX', ([], {}), '()\n', (31074, 31076), False, 'import CXX\n'), ((31402, 31407), 'CXX', 'CXX', ([], {}), '()\n', (31405, 31407), False, 'import CXX\n'), ((32781, 32786), 'CXX', 'CXX', ([], {}), '()\n', (32784, 32786), False, 'import CXX\n'), ((36646, 36651), 'CXX', 'CXX', ([], {}), '()\n', (36649, 36651), False, 'import CXX\n'), ((37400, 37427), 'Tkinter.__version__.split', 'Tkinter.__version__.split', ([], {}), '()\n', (37425, 37427), False, 'import Tkinter\n'), ((37955, 37960), 'CXX', 'CXX', ([], {}), '()\n', (37958, 37960), False, 'import CXX\n'), ((40539, 40570), 'os.path.join', 'os.path.join', (['tcl_lib_dir', '""".."""'], {}), "(tcl_lib_dir, '..')\n", (40551, 40570), False, 'import os\n'), ((40735, 40765), 'os.path.join', 'os.path.join', (['tk_lib_dir', '""".."""'], {}), "(tk_lib_dir, '..')\n", (40747, 40765), False, 'import os\n'), ((41040, 41066), 'os.path.exists', 'os.path.exists', (['tcl_config'], {}), '(tcl_config)\n', (41054, 41066), False, 'import os\n'), ((41071, 41096), 'os.path.exists', 'os.path.exists', (['tk_config'], {}), '(tk_config)\n', (41085, 41096), False, 'import os\n'), ((41137, 41163), 'os.path.exists', 'os.path.exists', (['tcl_config'], {}), '(tcl_config)\n', (41151, 41163), False, 'import os\n'), ((41168, 41193), 'os.path.exists', 'os.path.exists', (['tk_config'], {}), '(tk_config)\n', (41182, 41193), False, 'import os\n'), ((42147, 42179), 'os.path.join', 'os.path.join', (['tk_inc_dir', '"""tk.h"""'], {}), "(tk_inc_dir, 'tk.h')\n", (42159, 42179), False, 'import os\n'), ((42388, 42415), 'os.path.exists', 'os.path.exists', (['tcl_lib_dir'], {}), '(tcl_lib_dir)\n', (42402, 42415), False, 'import os\n'), ((42420, 42446), 'os.path.exists', 'os.path.exists', (['tk_lib_dir'], {}), '(tk_lib_dir)\n', (42434, 42446), False, 'import os\n'), ((42837, 42879), 'os.path.join', 'os.path.join', (['tcl_lib_dir', '"""../../include"""'], {}), "(tcl_lib_dir, '../../include')\n", (42849, 42879), False, 'import os\n'), ((43102, 43143), 'os.path.join', 'os.path.join', (['tk_lib_dir', '"""../../include"""'], {}), "(tk_lib_dir, '../../include')\n", (43114, 43143), False, 'import os\n'), ((43209, 43237), 'os.path.join', 'os.path.join', (['tk_inc', '"""tk.h"""'], {}), "(tk_inc, 'tk.h')\n", (43221, 43237), False, 'import os\n'), ((43387, 43419), 'sys.platform.startswith', 'sys.platform.startswith', (['"""linux"""'], {}), "('linux')\n", (43410, 43419), False, 'import sys\n'), ((43440, 43476), 'os.path.exists', 'os.path.exists', (['"""/usr/include/tcl.h"""'], {}), "('/usr/include/tcl.h')\n", (43454, 43476), False, 'import os\n'), ((43497, 43532), 'os.path.exists', 'os.path.exists', (['"""/usr/include/tk.h"""'], {}), "('/usr/include/tk.h')\n", (43511, 43532), False, 'import os\n'), ((43647, 43675), 'os.path.join', 'os.path.join', (['tk_inc', '"""tk.h"""'], {}), "(tk_inc, 'tk.h')\n", (43659, 43675), False, 'import os\n'), ((52607, 52612), 'CXX', 'CXX', ([], {}), '()\n', (52610, 52612), False, 'import CXX\n'), ((53275, 53298), 'gi.repository.Gtk.get_major_version', 'Gtk.get_major_version', ([], {}), '()\n', (53296, 53298), False, 'from gi.repository import Gtk, Gdk, GObject\n'), ((53308, 53331), 'gi.repository.Gtk.get_micro_version', 'Gtk.get_micro_version', ([], {}), '()\n', (53329, 53331), False, 'from gi.repository import Gtk, Gdk, GObject\n'), ((53341, 53364), 'gi.repository.Gtk.get_minor_version', 'Gtk.get_minor_version', ([], {}), '()\n', (53362, 53364), False, 'from gi.repository import Gtk, Gdk, GObject\n'), ((55215, 55238), 'gi.repository.Gtk.get_major_version', 'Gtk.get_major_version', ([], {}), '()\n', (55236, 55238), False, 'from gi.repository import Gtk, Gdk, GObject\n'), ((55248, 55271), 'gi.repository.Gtk.get_micro_version', 'Gtk.get_micro_version', ([], {}), '()\n', (55269, 55271), False, 'from gi.repository import Gtk, Gdk, GObject\n'), ((55281, 55304), 'gi.repository.Gtk.get_minor_version', 'Gtk.get_minor_version', ([], {}), '()\n', (55302, 55304), False, 'from gi.repository import Gtk, Gdk, GObject\n'), ((57822, 57827), 'CXX', 'CXX', ([], {}), '()\n', (57825, 57827), False, 'import CXX\n'), ((63472, 63531), 'subprocess.check_output', 'check_output', (['command'], {'shell': '(True)', 'stderr': 'subprocess.STDOUT'}), '(command, shell=True, stderr=subprocess.STDOUT)\n', (63484, 63531), False, 'from subprocess import check_output\n'), ((3264, 3289), 'os.getenv', 'os.getenv', (['"""INCLUDE"""', '"""."""'], {}), "('INCLUDE', '.')\n", (3273, 3289), False, 'import os\n'), ((8089, 8116), 'sys.getfilesystemencoding', 'sys.getfilesystemencoding', ([], {}), '()\n', (8114, 8116), False, 'import sys\n'), ((8534, 8561), 'os.path.join', 'os.path.join', (['base', 'include'], {}), '(base, include)\n', (8546, 8561), False, 'import os\n'), ((8585, 8604), 'os.path.exists', 'os.path.exists', (['dir'], {}), '(dir)\n', (8599, 8604), False, 'import os\n'), ((8734, 8757), 'os.path.join', 'os.path.join', (['base', 'lib'], {}), '(base, lib)\n', (8746, 8757), False, 'import os\n'), ((8781, 8800), 'os.path.exists', 'os.path.exists', (['dir'], {}), '(dir)\n', (8795, 8800), False, 'import os\n'), ((25361, 25389), 'os.path.join', 'os.path.join', (['support_dir', 'x'], {}), '(support_dir, x)\n', (25373, 25389), False, 'import os\n'), ((27049, 27090), 'os.path.join', 'os.path.join', (['"""extern"""', '"""agg24"""', '"""src"""', 'x'], {}), "('extern', 'agg24', 'src', x)\n", (27061, 27090), False, 'import os\n'), ((29640, 29675), 'os.path.join', 'os.path.join', (['x', '"""include"""', '"""qhull"""'], {}), "(x, 'include', 'qhull')\n", (29652, 29675), False, 'import os\n'), ((35577, 35609), 'pyparsing.__version__.split', 'pyparsing.__version__.split', (['"""."""'], {}), "('.')\n", (35604, 35609), False, 'import pyparsing\n'), ((39145, 39158), 'Tkinter.Tcl', 'Tkinter.Tcl', ([], {}), '()\n', (39156, 39158), False, 'import Tkinter\n'), ((39519, 39545), 'os.path.split', 'os.path.split', (['tcl_lib_dir'], {}), '(tcl_lib_dir)\n', (39532, 39545), False, 'import os\n'), ((39645, 39669), 'os.path.join', 'os.path.join', (['head', 'tail'], {}), '(head, tail)\n', (39657, 39669), False, 'import os\n'), ((44514, 44546), 'os.path.join', 'os.path.join', (['sys.prefix', '"""dlls"""'], {}), "(sys.prefix, 'dlls')\n", (44526, 44546), False, 'import os\n'), ((63050, 63109), 'subprocess.check_output', 'check_output', (['command'], {'shell': '(True)', 'stderr': 'subprocess.STDOUT'}), '(command, shell=True, stderr=subprocess.STDOUT)\n', (63062, 63109), False, 'from subprocess import check_output\n'), ((39693, 39719), 'os.path.exists', 'os.path.exists', (['tk_lib_dir'], {}), '(tk_lib_dir)\n', (39707, 39719), False, 'import os\n'), ((44851, 44868), 'os.getenv', 'os.getenv', (['"""HOME"""'], {}), "('HOME')\n", (44860, 44868), False, 'import os\n'), ((45934, 45963), 'os.path.join', 'join', (['F', "(fw + '.framework')", 'H'], {}), "(F, fw + '.framework', H)\n", (45938, 45963), False, 'from os.path import join, exists\n'), ((63286, 63345), 'subprocess.check_output', 'check_output', (['command'], {'shell': '(True)', 'stderr': 'subprocess.STDOUT'}), '(command, shell=True, stderr=subprocess.STDOUT)\n', (63298, 63345), False, 'from subprocess import check_output\n'), ((45317, 45343), 'os.path.join', 'join', (['F', "(fw + '.framework')"], {}), "(F, fw + '.framework')\n", (45321, 45343), False, 'from os.path import join, exists\n'), ((49657, 49668), 'os.popen', 'os.popen', (['s'], {}), '(s)\n', (49665, 49668), False, 'import os\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('izi_accounts', '0002_core_accounts'),
]
operations = [
migrations.AlterField(
model_name='ipaddressrecord',
name='ip_address',
field=models.GenericIPAddressField(unique=True, verbose_name='IP address'),
),
]
| [
"django.db.models.GenericIPAddressField"
] | [((365, 433), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', ([], {'unique': '(True)', 'verbose_name': '"""IP address"""'}), "(unique=True, verbose_name='IP address')\n", (393, 433), False, 'from django.db import migrations, models\n')] |
import os
import os
test_list = [line. rstrip('\n') for line in open('./food-101/meta/test.txt')]
os.mkdir('./food-101/test')
source_base = './food-101/images/'
target_base = './food-101/test/'
for item in test_list:
c = item.split('/')[0]
if not os.path.exists(os.path.join(base, c)):
os.mkdir(os.path.join(base, c))
os.rename(os.path.join(source_base, item) + '.jpg', os.path.join(target_base, item) + '.jpg') | [
"os.path.join",
"os.mkdir"
] | [((100, 127), 'os.mkdir', 'os.mkdir', (['"""./food-101/test"""'], {}), "('./food-101/test')\n", (108, 127), False, 'import os\n'), ((272, 293), 'os.path.join', 'os.path.join', (['base', 'c'], {}), '(base, c)\n', (284, 293), False, 'import os\n'), ((313, 334), 'os.path.join', 'os.path.join', (['base', 'c'], {}), '(base, c)\n', (325, 334), False, 'import os\n'), ((350, 381), 'os.path.join', 'os.path.join', (['source_base', 'item'], {}), '(source_base, item)\n', (362, 381), False, 'import os\n'), ((392, 423), 'os.path.join', 'os.path.join', (['target_base', 'item'], {}), '(target_base, item)\n', (404, 423), False, 'import os\n')] |
# vim:ts=4:sw=4:et:
# Copyright 2012-present Facebook, Inc.
# Licensed under the Apache License, Version 2.0
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# no unicode literals
import functools
import inspect
import errno
try:
import unittest2 as unittest
except ImportError:
import unittest
import pywatchman
import time
import tempfile
import os.path
import os
import WatchmanInstance
import TempDir
if pywatchman.compat.PYTHON3:
STRING_TYPES = (str, bytes)
else:
STRING_TYPES = (str, unicode)
if os.name == 'nt':
# monkey patch to hopefully minimize test flakiness
def wrap_with_backoff(fn):
def wrapper(*args, **kwargs):
delay = 0.01
attempts = 10
while True:
try:
return fn(*args, **kwargs)
except WindowsError as e:
if attempts == 0:
raise
# WindowsError: [Error 32] The process cannot access the
# file because it is being used by another process.
# Error 5: Access is denied.
if e.winerror not in (5, 32):
raise
attempts = attempts - 1
time.sleep(delay)
delay = delay * 2
return wrapper
for name in ['rename', 'unlink', 'remove', 'rmdir']:
setattr(os, name, wrap_with_backoff(getattr(os, name)))
class WatchmanTestCase(unittest.TestCase):
def __init__(self, methodName='run'):
super(WatchmanTestCase, self).__init__(methodName)
self.setDefaultConfiguration()
self.maxDiff = None
self.attempt = 0
if pywatchman.compat.PYTHON3:
self.assertItemsEqual = self.assertCountEqual
def requiresPersistentSession(self):
return False
def checkPersistentSession(self):
if self.requiresPersistentSession() and self.transport == 'cli':
self.skipTest('need persistent session')
def checkOSApplicability(self):
# override this to call self.skipTest if this test class should skip
# on the current OS
pass
def setUp(self):
self.checkPersistentSession()
self.checkOSApplicability()
def getClient(self, inst=None):
if inst or not hasattr(self, 'client'):
client = pywatchman.client(
# ASAN-enabled builds can be slower enough that we hit timeouts
# with the default of 1 second
timeout=3.0,
transport=self.transport,
sendEncoding=self.encoding,
recvEncoding=self.encoding,
sockpath=(inst or
WatchmanInstance.getSharedInstance()).getSockPath())
if not inst:
# only cache the client if it points to the shared instance
self.client = client
return client
return self.client
def __logTestInfo(self, test, msg):
if hasattr(self, 'client'):
try:
self.getClient().query('log', 'debug',
'TEST: %s %s\n\n' % (test, msg))
except Exception as e:
pass
def mkdtemp(self, **kwargs):
return self.normAbsolutePath(tempfile.mkdtemp(dir=self.tempdir,
**kwargs))
def mktemp(self, prefix=''):
f, name = tempfile.mkstemp(prefix=prefix, dir=self.tempdir)
os.close(f)
return name
def setAttemptNumber(self, attempt):
self.attempt = attempt
def run(self, result):
if result is None:
raise Exception('MUST be a runtests.py:Result instance')
# Arrange for any temporary stuff we create to go under
# our global tempdir and put it in a dir named for the test
id = '%s.%s.%s' % (self.id(), self.transport, self.encoding)
try:
self.tempdir = os.path.join(TempDir.get_temp_dir().get_dir(), id)
if self.attempt > 0:
self.tempdir += "-%d" % self.attempt
os.mkdir(self.tempdir)
self.__logTestInfo(id, 'BEGIN')
super(WatchmanTestCase, self).run(result)
finally:
try:
self.watchmanCommand('log-level', 'off')
self.getClient().getLog(remove=True)
except:
pass
self.__logTestInfo(id, 'END')
self.__clearWatches()
if hasattr(self, 'client'):
self.client.close()
delattr(self, 'client')
return result
def dumpLogs(self):
''' used in travis CI to show the hopefully relevant log snippets '''
inst = WatchmanInstance.getSharedInstance()
def tail(logstr, n):
lines = logstr.split('\n')[-n:]
return '\n'.join(lines)
print(self.getLogSample())
def getLogSample(self):
''' used in CI to show the hopefully relevant log snippets '''
inst = WatchmanInstance.getSharedInstance()
def tail(logstr, n):
lines = logstr.split('\n')[-n:]
return '\n'.join(lines)
return '\n'.join([
'CLI logs',
tail(inst.getCLILogContents(), 500),
'Server logs',
tail(inst.getServerLogContents(), 500),
])
def setConfiguration(self, transport, encoding):
self.transport = transport
self.encoding = encoding
def removeRelative(self, base, *fname):
fname = os.path.join(base, *fname)
os.remove(fname)
def touch(self, fname, times=None):
try:
os.utime(fname, times)
except OSError as e:
if e.errno == errno.ENOENT:
with open(fname, 'a'):
os.utime(fname, times)
else:
raise
def touchRelative(self, base, *fname):
fname = os.path.join(base, *fname)
self.touch(fname, None)
def __clearWatches(self):
if hasattr(self, 'client'):
try:
self.client.subs = {}
self.client.sub_by_root = {}
self.watchmanCommand('watch-del-all')
except Exception as e:
pass
def __del__(self):
self.__clearWatches()
def watchmanCommand(self, *args):
return self.getClient().query(*args)
def normRelativePath(self, path):
# TODO: in the future we will standardize on `/` as the
# dir separator so we can remove the replace call from here.
# We do not need to normcase because all of our tests are
# using the appropriate case already, and watchman returns
# paths in the canonical file replace case anyway.
return path.replace('\\', '/')
def normAbsolutePath(self, path):
# TODO: in the future we will standardize on `/` as the
# dir separator so we can remove the replace call.
return path.replace('\\', '/')
def _waitForCheck(self, cond, res_check, timeout):
deadline = time.time() + timeout
res = None
while time.time() < deadline:
res = cond()
if res_check(res):
return [True, res]
time.sleep(0.03)
return [False, res]
# Continually invoke `cond` until it returns true or timeout
# is reached. Returns a tuple of [bool, result] where the
# first element of the tuple indicates success/failure and
# the second element is the return value from the condition
def waitFor(self, cond, timeout=10):
return self._waitForCheck(cond, lambda res: res, timeout)
def waitForEqual(self, expected, actual_cond, timeout=10):
return self._waitForCheck(
actual_cond, lambda res: res == expected, timeout
)
def assertWaitFor(self, cond, timeout=10, message=None):
status, res = self.waitFor(cond, timeout)
if status:
return res
if message is None:
message = "%s was not met in %s seconds: %s" % (cond, timeout, res)
self.fail(message)
def assertWaitForEqual(
self, expected, actual_cond,
timeout=10, message=None
):
status, res = self.waitForEqual(expected, actual_cond, timeout)
if status:
return res
if message is None:
message = "%s was not equal to %s in %s seconds: %s" % (
actual_cond, expected, timeout, res
)
self.fail(message)
def getFileList(self, root, cursor=None, relativeRoot=None):
expr = {
"expression": ["exists"],
"fields": ["name"],
}
if cursor:
expr['since'] = cursor
if relativeRoot:
expr['relative_root'] = relativeRoot
res = self.watchmanCommand('query', root, expr)
files = self.normWatchmanFileList(res['files'])
self.last_file_list = files
return files
def waitForSync(self, root):
""" ensure that watchman has observed any pending file changes
This is most useful after mutating the filesystem and before
attempting to perform a since query
"""
self.watchmanCommand('query', root, {
'expression': ['name', '_bogus_'],
'fields': ['name']})
def getWatchList(self):
watch_list = self.watchmanCommand('watch-list')['roots']
self.last_root_list = watch_list
return watch_list
def normWatchmanFileList(self, files):
return sorted(list(map(self.normRelativePath, files)))
def normFileList(self, files):
return sorted(list(map(self.normRelativePath, files)))
def assertFileListsEqual(self, list1, list2, message=None):
list1 = [self.normRelativePath(f) for f in list1]
list2 = [self.normRelativePath(f) for f in list2]
self.assertItemsEqual(list1, list2, message)
def fileListsEqual(self, list1, list2):
list1 = [self.normRelativePath(f) for f in list1]
list2 = [self.normRelativePath(f) for f in list2]
return sorted(list1) == sorted(list2)
def fileListContains(self, list1, list2):
""" return true if list1 contains each unique element in list2 """
set1 = set([self.normRelativePath(f) for f in list1])
list2 = [self.normRelativePath(f) for f in list2]
return set1.issuperset(list2)
def assertFileListContains(self, list1, list2, message=None):
if not self.fileListContains(list1, list2):
self.fail(message)
# Wait for the file list to match the input set
def assertFileList(self, root, files=[], cursor=None,
relativeRoot=None, message=None):
expected_files = self.normFileList(files)
if (cursor is not None) and cursor[0:2] == 'n:':
# it doesn't make sense to repeat named cursor queries, as
# the cursor moves each time
self.getFileList(root, cursor=cursor, relativeRoot=relativeRoot)
else:
st, res = self.waitFor(
lambda: self.fileListsEqual(self.getFileList(root, cursor=cursor,
relativeRoot=relativeRoot
), expected_files))
self.assertFileListsEqual(self.last_file_list, expected_files, message)
# Wait for the list of watched roots to match the input set
def assertWatchListContains(self, roots, message=None):
st, res = self.waitFor(
lambda: self.fileListContains(self.getWatchList(), roots))
self.assertFileListContains(self.last_root_list, roots, message)
def waitForSub(self, name, root, accept=None, timeout=10, remove=True):
client = self.getClient()
def default_accept(dat):
return True
if accept is None:
accept = default_accept
deadline = time.time() + timeout
while time.time() < deadline:
sub = self.getSubscription(name, root=root, remove=False)
if sub is not None:
res = accept(sub)
if res:
return self.getSubscription(name, root=root, remove=remove)
# wait for more data
client.setTimeout(deadline - time.time())
client.receive()
return None
def getSubscription(self, name, root, remove=True, normalize=True):
data = self.getClient().getSubscription(name, root=root, remove=remove)
if data is None or not normalize:
return data
def norm_sub_item(item):
if isinstance(item, STRING_TYPES):
return self.normRelativePath(item)
item['name'] = self.normRelativePath(item['name'])
return item
def norm_sub(sub):
if 'files' in sub:
files = []
for item in sub['files']:
files.append(norm_sub_item(item))
sub['files'] = files
return sub
return list(map(norm_sub, data))
def findSubscriptionContainingFile(self, subdata, filename):
filename = self.normRelativePath(filename)
for dat in subdata:
if ('files' in dat and
filename in self.normWatchmanFileList(dat['files'])):
return dat
return None
def isCaseInsensitive(self):
if hasattr(self, '_case_insensitive'):
return self._case_insensitive
d = self.mkdtemp()
self.touchRelative(d, 'a')
self._case_insensitive = os.path.exists(os.path.join(d, 'A'))
return self._case_insensitive
def suspendWatchman(self):
WatchmanInstance.getSharedInstance().suspend()
def resumeWatchman(self):
WatchmanInstance.getSharedInstance().resume()
def rootIsWatched(self, r):
watches = [self.normAbsolutePath(
root) for root in self.watchmanCommand('watch-list')['roots']]
return r in watches
def skip_for(transports=tuple(), codecs=tuple()):
"""
Decorator to allow skipping tests for particular transports or codecs."""
transports = set(transports)
codecs = set(codecs)
def skip(f):
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
if self.transport in transports or self.encoding in codecs:
self.skipTest(
'test skipped for transport %s, codec %s' %
(self.transport, self.encoding)
)
return f(self, *args, **kwargs)
return wrapper
return skip
def expand_matrix(test_class):
'''
A decorator function used to create different permutations from
a given input test class.
Given a test class named "MyTest", this will create 4 separate
classes named "MyTestLocalBser", "MyTestLocalBser2",
"MyTestLocalJson" and "MyTestCliJson" that will exercise the
different transport and encoding options implied by their names.
'''
matrix = [
('local', 'bser', 'LocalBser2'),
('local', 'json', 'LocalJson'),
('cli', 'json', 'CliJson'),
]
if not pywatchman.compat.PYTHON3:
matrix += [
('local', 'bser-v1', 'LocalBser'),
]
# We do some rather hacky things here to define new test class types
# in our caller's scope. This is needed so that the unittest TestLoader
# will find the subclasses we define.
caller_scope = inspect.currentframe().f_back.f_locals
for (transport, encoding, suffix) in matrix:
def make_class(transport, encoding, suffix):
subclass_name = test_class.__name__ + suffix
# Define a new class that derives from the input class
class MatrixTest(test_class):
def setDefaultConfiguration(self):
self.setConfiguration(transport, encoding)
# Set the name and module information on our new subclass
MatrixTest.__name__ = subclass_name
MatrixTest.__qualname__ = subclass_name
MatrixTest.__module__ = test_class.__module__
# Before we publish the test, check whether that generated
# configuration would always skip
try:
t = MatrixTest()
t.checkPersistentSession()
t.checkOSApplicability()
caller_scope[subclass_name] = MatrixTest
except unittest.SkipTest:
pass
make_class(transport, encoding, suffix)
return None
| [
"WatchmanInstance.getSharedInstance",
"os.close",
"inspect.currentframe",
"os.path.join",
"functools.wraps",
"os.utime",
"time.sleep",
"tempfile.mkdtemp",
"os.mkdir",
"time.time",
"tempfile.mkstemp",
"TempDir.get_temp_dir",
"os.remove"
] | [((3540, 3589), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'prefix': 'prefix', 'dir': 'self.tempdir'}), '(prefix=prefix, dir=self.tempdir)\n', (3556, 3589), False, 'import tempfile\n'), ((3598, 3609), 'os.close', 'os.close', (['f'], {}), '(f)\n', (3606, 3609), False, 'import os\n'), ((4858, 4894), 'WatchmanInstance.getSharedInstance', 'WatchmanInstance.getSharedInstance', ([], {}), '()\n', (4892, 4894), False, 'import WatchmanInstance\n'), ((5156, 5192), 'WatchmanInstance.getSharedInstance', 'WatchmanInstance.getSharedInstance', ([], {}), '()\n', (5190, 5192), False, 'import WatchmanInstance\n'), ((5677, 5703), 'os.path.join', 'os.path.join', (['base', '*fname'], {}), '(base, *fname)\n', (5689, 5703), False, 'import os\n'), ((5712, 5728), 'os.remove', 'os.remove', (['fname'], {}), '(fname)\n', (5721, 5728), False, 'import os\n'), ((6069, 6095), 'os.path.join', 'os.path.join', (['base', '*fname'], {}), '(base, *fname)\n', (6081, 6095), False, 'import os\n'), ((14430, 14448), 'functools.wraps', 'functools.wraps', (['f'], {}), '(f)\n', (14445, 14448), False, 'import functools\n'), ((3388, 3432), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'dir': 'self.tempdir'}), '(dir=self.tempdir, **kwargs)\n', (3404, 3432), False, 'import tempfile\n'), ((4218, 4240), 'os.mkdir', 'os.mkdir', (['self.tempdir'], {}), '(self.tempdir)\n', (4226, 4240), False, 'import os\n'), ((5795, 5817), 'os.utime', 'os.utime', (['fname', 'times'], {}), '(fname, times)\n', (5803, 5817), False, 'import os\n'), ((7222, 7233), 'time.time', 'time.time', ([], {}), '()\n', (7231, 7233), False, 'import time\n'), ((7277, 7288), 'time.time', 'time.time', ([], {}), '()\n', (7286, 7288), False, 'import time\n'), ((7404, 7420), 'time.sleep', 'time.sleep', (['(0.03)'], {}), '(0.03)\n', (7414, 7420), False, 'import time\n'), ((12107, 12118), 'time.time', 'time.time', ([], {}), '()\n', (12116, 12118), False, 'import time\n'), ((12143, 12154), 'time.time', 'time.time', ([], {}), '()\n', (12152, 12154), False, 'import time\n'), ((13797, 13817), 'os.path.join', 'os.path.join', (['d', '"""A"""'], {}), "(d, 'A')\n", (13809, 13817), False, 'import os\n'), ((15687, 15709), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (15707, 15709), False, 'import inspect\n'), ((1313, 1330), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (1323, 1330), False, 'import time\n'), ((13897, 13933), 'WatchmanInstance.getSharedInstance', 'WatchmanInstance.getSharedInstance', ([], {}), '()\n', (13931, 13933), False, 'import WatchmanInstance\n'), ((13983, 14019), 'WatchmanInstance.getSharedInstance', 'WatchmanInstance.getSharedInstance', ([], {}), '()\n', (14017, 14019), False, 'import WatchmanInstance\n'), ((12481, 12492), 'time.time', 'time.time', ([], {}), '()\n', (12490, 12492), False, 'import time\n'), ((4082, 4104), 'TempDir.get_temp_dir', 'TempDir.get_temp_dir', ([], {}), '()\n', (4102, 4104), False, 'import TempDir\n'), ((5946, 5968), 'os.utime', 'os.utime', (['fname', 'times'], {}), '(fname, times)\n', (5954, 5968), False, 'import os\n'), ((2796, 2832), 'WatchmanInstance.getSharedInstance', 'WatchmanInstance.getSharedInstance', ([], {}), '()\n', (2830, 2832), False, 'import WatchmanInstance\n')] |
import hikari
from motor.motor_asyncio import AsyncIOMotorClient
from os import environ
MONGOTOKEN = environ["MONGOTOKEN"]
cluster = AsyncIOMotorClient(MONGOTOKEN)
database = cluster.discordlocale
class helper:
"Helper, simplifier for webhooks and embeds"
def embed_builder(
embed: hikari.Embed, author: hikari.User | hikari.Member | hikari.Guild
):
if type(author) == hikari.Guild or type(author) == hikari.GatewayGuild:
icon = (
author.icon_url
or "https://avatanplus.com/files/resources/original/5aae7d3da9fb816239993900.png"
)
else:
icon = author.avatar_url or author.display_avatar_url
return embed.set_author(name=str(author), icon=icon)
async def webhook_send(
guild_id: int, bot: hikari.GatewayBot, embed: hikari.Embed, data: dict = None
):
db = DB()
if not data:
data: dict = await db.findo({"_id": guild_id})
data = data["modules"]
try:
channel_id = int(data.get("logs"))
except KeyError:
return
guild = bot.cache.get_available_guild(guild_id)
wh = None
for wh in list(await bot.rest.fetch_channel_webhooks(channel_id)):
if wh.name == f"CB{guild.name}":
break
if wh == None or wh.name != f"CB{guild.name}":
wh = await bot.rest.create_webhook(
channel_id, f"CB{guild.name}", reason="Log Creation"
)
await wh.execute(
username=guild.name, avatar_url=guild.icon_url or "https://avatanplus.com/files/resources/original/5aae7d3da9fb816239993900.png", embed=embed
)
class DB:
"""Usual wrapper for MongoDB using motor"""
def __init__(self, base: str = "CommunityBot") -> None:
self.base = database[base]
async def inserto(self, arg: dict) -> None:
"""
Inserts ONE object
await db.inserto({"_id": "smth"})
"""
await self.base.insert_one(arg)
async def findm(self, arg=None) -> list:
"""
Returns MANY objects in a list
data = await db.findm()
"""
res = self.base.find(arg)
res = await res.to_list(length=1000)
return res
def insertm(self, args: list) -> None:
"""
Inserts MANY objects
db.insertm([{"_id": "smth"}, {"_id": "smthagain}])
"""
self.base.insert_many(args)
async def deleteo(self, arg: dict) -> None:
"""
Delete ONE object
await db.deleteo({"_id": "smth"})
"""
await self.base.delete_one(arg)
async def deletem(self, args: list) -> None:
"""
Delete MANY object
await db.deletem([{"_id": "smth"}, {"_id": "smthagain"}])
"""
for arg in args:
await self.base.delete_one(arg)
async def findo(self, arg=None) -> dict:
"""
Returns ONE object in a dict
await db.findo({"_id": "smth"})
"""
return await self.base.find_one(arg)
async def updateo(self, arg1: dict, arg2: dict, act="set"):
"""
Update ONE object
set - Sets the value of a field in a document.
unset - Removes the specified field from a document.
inc - Increments the value of the field by the specified amount.
pop - Removes the first or last item of an array.
pull - Removes all array elements that match a specified query.
push - Adds an item to an array.
await db.updateo({"_id": "smth"}, {"channel": CHANNEL_ID})
"""
await self.base.update_one(arg1, {f"${act}": arg2})
| [
"motor.motor_asyncio.AsyncIOMotorClient"
] | [((135, 165), 'motor.motor_asyncio.AsyncIOMotorClient', 'AsyncIOMotorClient', (['MONGOTOKEN'], {}), '(MONGOTOKEN)\n', (153, 165), False, 'from motor.motor_asyncio import AsyncIOMotorClient\n')] |
import requests
import datetime
#Latitude and Longitudes
# https://www.latlong.net/
now = int(datetime.datetime.now().strftime("%H:%M:%S").split(":")[0])
SEATTLE = {"lat": 47.606209, "lng": -122.332069, 'formatted':0}
PITTSBURGH = {"lat": 40.442169, "lng": -79.994957, 'formatted':0}
#CHOOSE A CITY VARIABLE FROM ABOVE
CITY = SEATTLE
response = requests.get(url='http://api.open-notify.org/iss-now.json')
response.raise_for_status()
data = response.json()
iss_longitude = float(data['iss_position']['longitude'])
iss_latitude = float(data['iss_position']['latitude'])
response = requests.get("https://api.sunrise-sunset.org/json", params=CITY)
response.raise_for_status()
data=response.json()
sunrise=(int(data['results']['sunrise'].split("T")[1].split(":")[0]) + 24 - 8) % 24
sunset=(int(data['results']['sunset'].split("T")[1].split(":")[0]) + 24 - 8) % 24
if (now >= sunrise) and (now <= sunset):
print('its light out')
elif (abs(iss_latitude - CITY['lat']) <= 5) and (abs(iss_longitude - CITY['lng']) <= 5):
print("Look up!")
else:
print("not above") | [
"datetime.datetime.now",
"requests.get"
] | [((358, 417), 'requests.get', 'requests.get', ([], {'url': '"""http://api.open-notify.org/iss-now.json"""'}), "(url='http://api.open-notify.org/iss-now.json')\n", (370, 417), False, 'import requests\n'), ((597, 661), 'requests.get', 'requests.get', (['"""https://api.sunrise-sunset.org/json"""'], {'params': 'CITY'}), "('https://api.sunrise-sunset.org/json', params=CITY)\n", (609, 661), False, 'import requests\n'), ((100, 123), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (121, 123), False, 'import datetime\n')] |
"""Text transitions used for segment displays."""
import abc
from typing import Optional, List
from mpf.core.placeholder_manager import TextTemplate
from mpf.core.rgb_color import RGBColor
from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText
STEP_OUT_OF_RANGE_ERROR = "Step is out of range"
TRANSITION_DIRECTION_UNKNOWN_ERROR = "Transition uses an unknown direction value"
class TransitionBase(metaclass=abc.ABCMeta):
"""Base class for text transitions in segment displays."""
__slots__ = ["output_length", "config", "collapse_dots", "collapse_commas"]
def __init__(self, output_length: int, collapse_dots: bool, collapse_commas: bool, config: dict) -> None:
"""Initialize the transition."""
self.output_length = output_length
self.config = config
self.collapse_dots = collapse_dots
self.collapse_commas = collapse_commas
for key, value in config.items():
if hasattr(self, key):
setattr(self, key, value)
@abc.abstractmethod
def get_step_count(self):
"""Return the total number of steps required for the transition."""
raise NotImplementedError
# pylint: disable=too-many-arguments
@abc.abstractmethod
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
raise NotImplementedError
class TransitionRunner:
"""Class to run/execute transitions using an iterator."""
__slots__ = ["_transition", "_step", "_current_placeholder", "_new_placeholder", "_current_colors", "_new_colors"]
# pylint: disable=too-many-arguments
def __init__(self, machine, transition: TransitionBase, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> None:
"""Class initializer."""
self._transition = transition
self._step = 0
self._current_placeholder = TextTemplate(machine, current_text)
self._new_placeholder = TextTemplate(machine, new_text)
self._current_colors = current_colors
self._new_colors = new_colors
def __iter__(self):
"""Return the iterator."""
return self
def __next__(self):
"""Evaluate and return the next transition step."""
if self._step >= self._transition.get_step_count():
raise StopIteration
transition_step = self._transition.get_transition_step(self._step,
self._current_placeholder.evaluate({}),
self._new_placeholder.evaluate({}),
self._current_colors,
self._new_colors)
self._step += 1
return transition_step
class NoTransition(TransitionBase):
"""Segment display no transition effect."""
def get_step_count(self):
"""Return the total number of steps required for the transition."""
return 1
# pylint: disable=too-many-arguments
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
if step < 0 or step >= self.get_step_count():
raise AssertionError(STEP_OUT_OF_RANGE_ERROR)
return SegmentDisplayText.from_str(new_text, self.output_length, self.collapse_dots, self.collapse_commas,
new_colors)
class PushTransition(TransitionBase):
"""Segment display push transition effect."""
def __init__(self, output_length: int, collapse_dots: bool, collapse_commas: bool, config: dict) -> None:
"""Class initializer."""
self.direction = 'right'
self.text = None
self.text_color = None
super().__init__(output_length, collapse_dots, collapse_commas, config)
if self.text is None:
self.text = ''
def get_step_count(self):
"""Return the total number of steps required for the transition."""
return self.output_length + len(self.text)
# pylint: disable=too-many-arguments
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
if step < 0 or step >= self.get_step_count():
raise AssertionError(STEP_OUT_OF_RANGE_ERROR)
current_display_text = SegmentDisplayText.from_str(current_text, self.output_length, self.collapse_dots,
self.collapse_commas, current_colors)
new_display_text = SegmentDisplayText.from_str(new_text, self.output_length, self.collapse_dots,
self.collapse_commas, new_colors)
if self.text:
if new_colors and not self.text_color:
text_color = [new_colors[0]]
else:
text_color = self.text_color
transition_text = SegmentDisplayText.from_str(self.text, len(self.text), self.collapse_dots,
self.collapse_commas, text_color)
else:
transition_text = UncoloredSegmentDisplayText([], self.collapse_dots, self.collapse_commas)
if self.direction == 'right':
temp_list = new_display_text
temp_list.extend(transition_text)
temp_list.extend(current_display_text)
return temp_list[
self.output_length + len(self.text) - (step + 1):2 * self.output_length + len(
self.text) - (step + 1)]
if self.direction == 'left':
temp_list = current_display_text
temp_list.extend(transition_text)
temp_list.extend(new_display_text)
return temp_list[step + 1:step + 1 + self.output_length]
raise AssertionError(TRANSITION_DIRECTION_UNKNOWN_ERROR)
class CoverTransition(TransitionBase):
"""Segment display cover transition effect."""
def __init__(self, output_length: int, collapse_dots: bool, collapse_commas: bool, config: dict) -> None:
"""Class initializer."""
self.direction = 'right'
self.text = None
self.text_color = None
super().__init__(output_length, collapse_dots, collapse_commas, config)
if self.text is None:
self.text = ''
def get_step_count(self):
"""Return the total number of steps required for the transition."""
return self.output_length + len(self.text)
# pylint: disable=too-many-arguments
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
if step < 0 or step >= self.get_step_count():
raise AssertionError(STEP_OUT_OF_RANGE_ERROR)
current_display_text = SegmentDisplayText.from_str(current_text, self.output_length, self.collapse_dots,
self.collapse_commas, current_colors)
new_display_text = SegmentDisplayText.from_str(new_text, self.output_length, self.collapse_dots,
self.collapse_commas, new_colors)
if self.text:
if new_colors and not self.text_color:
text_color = [new_colors[0]]
else:
text_color = self.text_color
transition_text = SegmentDisplayText.from_str(self.text, len(self.text), self.collapse_dots,
self.collapse_commas, text_color)
else:
transition_text = UncoloredSegmentDisplayText([], self.collapse_dots, self.collapse_commas)
if self.direction == 'right':
new_extended_display_text = new_display_text
new_extended_display_text.extend(transition_text)
if step < self.output_length:
temp_text = new_extended_display_text[-(step + 1):]
temp_text.extend(current_display_text[step + 1:])
else:
temp_text = new_display_text[-(step + 1):-(step + 1) + self.output_length]
return temp_text
if self.direction == 'left':
new_extended_display_text = transition_text
new_extended_display_text.extend(new_display_text)
if step < self.output_length:
temp_text = current_display_text[:self.output_length - (step + 1)]
temp_text.extend(new_extended_display_text[:step + 1])
else:
temp_text = new_extended_display_text[step - self.output_length + 1:step + 1]
return temp_text
raise AssertionError(TRANSITION_DIRECTION_UNKNOWN_ERROR)
class UncoverTransition(TransitionBase):
"""Segment display uncover transition effect."""
def __init__(self, output_length: int, collapse_dots: bool, collapse_commas: bool, config: dict) -> None:
"""Class initializer."""
self.direction = 'right'
self.text = None
self.text_color = None
super().__init__(output_length, collapse_dots, collapse_commas, config)
if self.text is None:
self.text = ''
def get_step_count(self):
"""Return the total number of steps required for the transition."""
return self.output_length + len(self.text)
# pylint: disable=too-many-arguments
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
if step < 0 or step >= self.get_step_count():
raise AssertionError(STEP_OUT_OF_RANGE_ERROR)
current_display_text = SegmentDisplayText.from_str(current_text, self.output_length, self.collapse_dots,
self.collapse_commas, current_colors)
new_display_text = SegmentDisplayText.from_str(new_text, self.output_length, self.collapse_dots,
self.collapse_commas, new_colors)
if self.text:
if new_colors and not self.text_color:
text_color = [new_colors[0]]
else:
text_color = self.text_color
transition_text = SegmentDisplayText.from_str(self.text, len(self.text), self.collapse_dots,
self.collapse_commas, text_color)
else:
transition_text = UncoloredSegmentDisplayText([], self.collapse_dots, self.collapse_commas)
if self.direction == 'right':
current_extended_display_text = transition_text
current_extended_display_text.extend(current_display_text)
if step < len(self.text):
temp_text = current_extended_display_text[
len(self.text) - step - 1:len(self.text) - step - 1 + self.output_length]
else:
temp_text = new_display_text[:step - len(self.text) + 1]
temp_text.extend(current_extended_display_text[:self.output_length - len(temp_text)])
return temp_text
if self.direction == 'left':
current_extended_display_text = current_display_text
current_extended_display_text.extend(transition_text)
if step < len(self.text):
temp_text = current_extended_display_text[step + 1:step + 1 + self.output_length]
else:
temp_text = current_display_text[step + 1:]
temp_text.extend(new_display_text[-(self.output_length - len(temp_text)):])
return temp_text
raise AssertionError(TRANSITION_DIRECTION_UNKNOWN_ERROR)
class WipeTransition(TransitionBase):
"""Segment display wipe transition effect."""
def __init__(self, output_length: int, collapse_dots: bool, collapse_commas: bool, config: dict) -> None:
"""Class initializer."""
self.direction = 'right'
self.text = None
self.text_color = None
super().__init__(output_length, collapse_dots, collapse_commas, config)
if self.text is None:
self.text = ''
def get_step_count(self):
"""Return the total number of steps required for the transition."""
return self.output_length + len(self.text)
# pylint: disable=too-many-arguments,too-many-branches,too-many-return-statements
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
if step < 0 or step >= self.get_step_count():
raise AssertionError(STEP_OUT_OF_RANGE_ERROR)
current_display_text = SegmentDisplayText.from_str(current_text, self.output_length, self.collapse_dots,
self.collapse_commas, current_colors)
new_display_text = SegmentDisplayText.from_str(new_text, self.output_length, self.collapse_dots,
self.collapse_commas, new_colors)
if self.text:
if new_colors and not self.text_color:
text_color = [new_colors[0]]
else:
text_color = self.text_color
transition_text = SegmentDisplayText.from_str(self.text, len(self.text), self.collapse_dots,
self.collapse_commas, text_color)
else:
transition_text = UncoloredSegmentDisplayText([], self.collapse_dots, self.collapse_commas)
if self.direction == 'right':
if step < len(self.text):
temp_text = transition_text[-(step + 1):]
temp_text.extend(current_display_text[step + 1:])
elif step < self.output_length:
temp_text = new_display_text[:step - len(self.text) + 1]
temp_text.extend(transition_text)
temp_text.extend(current_display_text[len(temp_text):])
else:
temp_text = new_display_text[:step - len(self.text) + 1]
temp_text.extend(transition_text[:self.output_length - len(temp_text)])
return temp_text
if self.direction == 'left':
if step < len(self.text):
temp_text = current_display_text[:self.output_length - (step + 1)]
temp_text.extend(transition_text[:step + 1])
elif step < self.output_length:
temp_text = current_display_text[:self.output_length - (step + 1)]
temp_text.extend(transition_text)
temp_text.extend(new_display_text[len(temp_text):])
elif step < self.output_length + len(self.text) - 1:
temp_text = transition_text[step - (self.output_length + len(self.text)) + 1:]
temp_text.extend(new_display_text[-(self.output_length - len(temp_text)):])
else:
temp_text = new_display_text
return temp_text
raise AssertionError(TRANSITION_DIRECTION_UNKNOWN_ERROR)
class SplitTransition(TransitionBase):
"""Segment display split transition effect."""
def __init__(self, output_length: int, collapse_dots: bool, collapse_commas: bool, config: dict) -> None:
"""Class initializer."""
self.direction = 'out'
self.mode = 'push'
super().__init__(output_length, collapse_dots, collapse_commas, config)
def get_step_count(self):
"""Return the total number of steps required for the transition."""
return int((self.output_length + 1) / 2)
# pylint: disable=too-many-arguments,too-many-branches,too-many-return-statements
def get_transition_step(self, step: int, current_text: str, new_text: str,
current_colors: Optional[List[RGBColor]] = None,
new_colors: Optional[List[RGBColor]] = None) -> SegmentDisplayText:
"""Calculate all the steps in the transition."""
if step < 0 or step >= self.get_step_count():
raise AssertionError(STEP_OUT_OF_RANGE_ERROR)
current_display_text = SegmentDisplayText.from_str(current_text, self.output_length, self.collapse_dots,
self.collapse_commas, current_colors)
new_display_text = SegmentDisplayText.from_str(new_text, self.output_length, self.collapse_dots,
self.collapse_commas, new_colors)
if self.mode == 'push':
if self.direction == 'out':
if step == self.get_step_count() - 1:
return new_display_text
characters = int(self.output_length / 2)
split_point = characters
if characters * 2 == self.output_length:
characters -= 1
else:
split_point += 1
characters -= step
temp_text = current_display_text[split_point - characters:split_point]
temp_text.extend(new_display_text[characters:characters + (self.output_length - 2 * characters)])
temp_text.extend(current_display_text[split_point:split_point + characters])
return temp_text
if self.direction == 'in':
if step == self.get_step_count() - 1:
return new_display_text
split_point = int(self.output_length / 2)
characters = 1
if split_point * 2 < self.output_length:
split_point += 1
characters += step
temp_text = new_display_text[split_point - characters:split_point]
temp_text.extend(current_display_text[characters:characters + (self.output_length - 2 * characters)])
temp_text.extend(new_display_text[split_point:split_point + characters])
return temp_text
raise AssertionError(TRANSITION_DIRECTION_UNKNOWN_ERROR)
if self.mode == 'wipe':
if self.direction == 'out':
if step == self.get_step_count() - 1:
return new_display_text
characters = int(self.output_length / 2)
if characters * 2 == self.output_length:
characters -= 1
characters -= step
temp_text = current_display_text[:characters]
temp_text.extend(new_display_text[characters:characters + (self.output_length - 2 * characters)])
temp_text.extend(current_display_text[-characters:])
return temp_text
if self.direction == 'in':
if step == self.get_step_count() - 1:
return new_display_text
temp_text = new_display_text[:step + 1]
temp_text.extend(current_display_text[step + 1:step + 1 + (self.output_length - 2 * len(temp_text))])
temp_text.extend(new_display_text[-(step + 1):])
return temp_text
raise AssertionError(TRANSITION_DIRECTION_UNKNOWN_ERROR)
raise AssertionError("Transition uses an unknown mode value")
| [
"mpf.devices.segment_display.segment_display_text.SegmentDisplayText.from_str",
"mpf.core.placeholder_manager.TextTemplate",
"mpf.devices.segment_display.segment_display_text.UncoloredSegmentDisplayText"
] | [((2243, 2278), 'mpf.core.placeholder_manager.TextTemplate', 'TextTemplate', (['machine', 'current_text'], {}), '(machine, current_text)\n', (2255, 2278), False, 'from mpf.core.placeholder_manager import TextTemplate\n'), ((2311, 2342), 'mpf.core.placeholder_manager.TextTemplate', 'TextTemplate', (['machine', 'new_text'], {}), '(machine, new_text)\n', (2323, 2342), False, 'from mpf.core.placeholder_manager import TextTemplate\n'), ((3873, 3989), 'mpf.devices.segment_display.segment_display_text.SegmentDisplayText.from_str', 'SegmentDisplayText.from_str', (['new_text', 'self.output_length', 'self.collapse_dots', 'self.collapse_commas', 'new_colors'], {}), '(new_text, self.output_length, self.\n collapse_dots, self.collapse_commas, new_colors)\n', (3900, 3989), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((5143, 5267), 'mpf.devices.segment_display.segment_display_text.SegmentDisplayText.from_str', 'SegmentDisplayText.from_str', (['current_text', 'self.output_length', 'self.collapse_dots', 'self.collapse_commas', 'current_colors'], {}), '(current_text, self.output_length, self.\n collapse_dots, self.collapse_commas, current_colors)\n', (5170, 5267), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((5349, 5465), 'mpf.devices.segment_display.segment_display_text.SegmentDisplayText.from_str', 'SegmentDisplayText.from_str', (['new_text', 'self.output_length', 'self.collapse_dots', 'self.collapse_commas', 'new_colors'], {}), '(new_text, self.output_length, self.\n collapse_dots, self.collapse_commas, new_colors)\n', (5376, 5465), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((7788, 7912), 'mpf.devices.segment_display.segment_display_text.SegmentDisplayText.from_str', 'SegmentDisplayText.from_str', (['current_text', 'self.output_length', 'self.collapse_dots', 'self.collapse_commas', 'current_colors'], {}), '(current_text, self.output_length, self.\n collapse_dots, self.collapse_commas, current_colors)\n', (7815, 7912), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((7994, 8110), 'mpf.devices.segment_display.segment_display_text.SegmentDisplayText.from_str', 'SegmentDisplayText.from_str', (['new_text', 'self.output_length', 'self.collapse_dots', 'self.collapse_commas', 'new_colors'], {}), '(new_text, self.output_length, self.\n collapse_dots, self.collapse_commas, new_colors)\n', (8021, 8110), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((10815, 10939), 'mpf.devices.segment_display.segment_display_text.SegmentDisplayText.from_str', 'SegmentDisplayText.from_str', (['current_text', 'self.output_length', 'self.collapse_dots', 'self.collapse_commas', 'current_colors'], {}), '(current_text, self.output_length, self.\n collapse_dots, self.collapse_commas, current_colors)\n', (10842, 10939), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((11021, 11137), 'mpf.devices.segment_display.segment_display_text.SegmentDisplayText.from_str', 'SegmentDisplayText.from_str', (['new_text', 'self.output_length', 'self.collapse_dots', 'self.collapse_commas', 'new_colors'], {}), '(new_text, self.output_length, self.\n collapse_dots, self.collapse_commas, new_colors)\n', (11048, 11137), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((14001, 14125), 'mpf.devices.segment_display.segment_display_text.SegmentDisplayText.from_str', 'SegmentDisplayText.from_str', (['current_text', 'self.output_length', 'self.collapse_dots', 'self.collapse_commas', 'current_colors'], {}), '(current_text, self.output_length, self.\n collapse_dots, self.collapse_commas, current_colors)\n', (14028, 14125), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((14207, 14323), 'mpf.devices.segment_display.segment_display_text.SegmentDisplayText.from_str', 'SegmentDisplayText.from_str', (['new_text', 'self.output_length', 'self.collapse_dots', 'self.collapse_commas', 'new_colors'], {}), '(new_text, self.output_length, self.\n collapse_dots, self.collapse_commas, new_colors)\n', (14234, 14323), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((17465, 17589), 'mpf.devices.segment_display.segment_display_text.SegmentDisplayText.from_str', 'SegmentDisplayText.from_str', (['current_text', 'self.output_length', 'self.collapse_dots', 'self.collapse_commas', 'current_colors'], {}), '(current_text, self.output_length, self.\n collapse_dots, self.collapse_commas, current_colors)\n', (17492, 17589), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((17671, 17787), 'mpf.devices.segment_display.segment_display_text.SegmentDisplayText.from_str', 'SegmentDisplayText.from_str', (['new_text', 'self.output_length', 'self.collapse_dots', 'self.collapse_commas', 'new_colors'], {}), '(new_text, self.output_length, self.\n collapse_dots, self.collapse_commas, new_colors)\n', (17698, 17787), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((5939, 6012), 'mpf.devices.segment_display.segment_display_text.UncoloredSegmentDisplayText', 'UncoloredSegmentDisplayText', (['[]', 'self.collapse_dots', 'self.collapse_commas'], {}), '([], self.collapse_dots, self.collapse_commas)\n', (5966, 6012), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((8584, 8657), 'mpf.devices.segment_display.segment_display_text.UncoloredSegmentDisplayText', 'UncoloredSegmentDisplayText', (['[]', 'self.collapse_dots', 'self.collapse_commas'], {}), '([], self.collapse_dots, self.collapse_commas)\n', (8611, 8657), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((11611, 11684), 'mpf.devices.segment_display.segment_display_text.UncoloredSegmentDisplayText', 'UncoloredSegmentDisplayText', (['[]', 'self.collapse_dots', 'self.collapse_commas'], {}), '([], self.collapse_dots, self.collapse_commas)\n', (11638, 11684), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n'), ((14797, 14870), 'mpf.devices.segment_display.segment_display_text.UncoloredSegmentDisplayText', 'UncoloredSegmentDisplayText', (['[]', 'self.collapse_dots', 'self.collapse_commas'], {}), '([], self.collapse_dots, self.collapse_commas)\n', (14824, 14870), False, 'from mpf.devices.segment_display.segment_display_text import SegmentDisplayText, UncoloredSegmentDisplayText\n')] |
"""
pypi setup
"""
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="qclib",
version="0.0.8",
author="<NAME>",
author_email="<EMAIL>",
description="A quantum computing library using qiskit",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/qclib/qclib",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
python_requires='>=3.7',
install_requires=[
'scipy>=1.7.1',
'qiskit>=0.18.3',
'deprecation',
'graphviz'
]
)
| [
"setuptools.find_packages"
] | [((419, 445), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (443, 445), False, 'import setuptools\n')] |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_serialization import jsonutils
import six
from heat.common import exception
from heat.common.i18n import _
from heat.common.i18n import _LW
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
from heat.engine.resources.openstack.neutron import neutron
from heat.engine.resources.openstack.neutron import subnet
from heat.engine import support
LOG = logging.getLogger(__name__)
class Port(neutron.NeutronResource):
PROPERTIES = (
NAME, NETWORK_ID, NETWORK, FIXED_IPS, SECURITY_GROUPS,
REPLACEMENT_POLICY, DEVICE_ID, DEVICE_OWNER
) = (
'name', 'network_id', 'network', 'fixed_ips', 'security_groups',
'replacement_policy', 'device_id', 'device_owner'
)
EXTRA_PROPERTIES = (
VALUE_SPECS, ADMIN_STATE_UP, MAC_ADDRESS,
ALLOWED_ADDRESS_PAIRS, VNIC_TYPE, PORT_SECURITY_ENABLED,
) = (
'value_specs', 'admin_state_up', 'mac_address',
'allowed_address_pairs', 'binding:vnic_type',
'port_security_enabled',
)
_FIXED_IP_KEYS = (
FIXED_IP_SUBNET_ID, FIXED_IP_SUBNET, FIXED_IP_IP_ADDRESS,
) = (
'subnet_id', 'subnet', 'ip_address',
)
_ALLOWED_ADDRESS_PAIR_KEYS = (
ALLOWED_ADDRESS_PAIR_MAC_ADDRESS, ALLOWED_ADDRESS_PAIR_IP_ADDRESS,
) = (
'mac_address', 'ip_address',
)
ATTRIBUTES = (
ADMIN_STATE_UP_ATTR, DEVICE_ID_ATTR, DEVICE_OWNER_ATTR, FIXED_IPS_ATTR,
MAC_ADDRESS_ATTR, NAME_ATTR, NETWORK_ID_ATTR, SECURITY_GROUPS_ATTR,
STATUS, TENANT_ID, ALLOWED_ADDRESS_PAIRS_ATTR, SUBNETS_ATTR,
PORT_SECURITY_ENABLED_ATTR,
) = (
'admin_state_up', 'device_id', 'device_owner', 'fixed_ips',
'mac_address', 'name', 'network_id', 'security_groups',
'status', 'tenant_id', 'allowed_address_pairs', 'subnets',
'port_security_enabled',
)
properties_schema = {
NAME: properties.Schema(
properties.Schema.STRING,
_('A symbolic name for this port.'),
update_allowed=True
),
NETWORK_ID: properties.Schema(
properties.Schema.STRING,
support_status=support.SupportStatus(
status=support.HIDDEN,
version='5.0.0',
message=_('Use property %s.') % NETWORK,
previous_status=support.SupportStatus(
status=support.DEPRECATED,
version='2014.2'
)
),
constraints=[
constraints.CustomConstraint('neutron.network')
],
),
NETWORK: properties.Schema(
properties.Schema.STRING,
_('Network this port belongs to. If you plan to use current port '
'to assign Floating IP, you should specify %(fixed_ips)s '
'with %(subnet)s') % {'fixed_ips': FIXED_IPS,
'subnet': FIXED_IP_SUBNET},
support_status=support.SupportStatus(version='2014.2'),
constraints=[
constraints.CustomConstraint('neutron.network')
],
),
DEVICE_ID: properties.Schema(
properties.Schema.STRING,
_('Device ID of this port.'),
update_allowed=True
),
DEVICE_OWNER: properties.Schema(
properties.Schema.STRING,
_('Name of the network owning the port. '
'The value is typically network:floatingip '
'or network:router_interface or network:dhcp'),
update_allowed=True
),
FIXED_IPS: properties.Schema(
properties.Schema.LIST,
_('Desired IPs for this port.'),
default=[],
schema=properties.Schema(
properties.Schema.MAP,
schema={
FIXED_IP_SUBNET_ID: properties.Schema(
properties.Schema.STRING,
support_status=support.SupportStatus(
status=support.HIDDEN,
version='5.0.0',
message=_('Use property %s.') % FIXED_IP_SUBNET,
previous_status=support.SupportStatus(
status=support.DEPRECATED,
version='2014.2 '
)
),
constraints=[
constraints.CustomConstraint('neutron.subnet')
]
),
FIXED_IP_SUBNET: properties.Schema(
properties.Schema.STRING,
_('Subnet in which to allocate the IP address for '
'this port.'),
support_status=support.SupportStatus(version='2014.2'),
constraints=[
constraints.CustomConstraint('neutron.subnet')
]
),
FIXED_IP_IP_ADDRESS: properties.Schema(
properties.Schema.STRING,
_('IP address desired in the subnet for this port.'),
constraints=[
constraints.CustomConstraint('ip_addr')
]
),
},
),
update_allowed=True
),
SECURITY_GROUPS: properties.Schema(
properties.Schema.LIST,
_('Security group IDs to associate with this port.'),
update_allowed=True
),
REPLACEMENT_POLICY: properties.Schema(
properties.Schema.STRING,
_('Policy on how to respond to a stack-update for this resource. '
'REPLACE_ALWAYS will replace the port regardless of any '
'property changes. AUTO will update the existing port for any '
'changed update-allowed property.'),
default='AUTO',
constraints=[
constraints.AllowedValues(['REPLACE_ALWAYS', 'AUTO']),
],
update_allowed=True
),
}
# NOTE(prazumovsky): properties_schema has been separated because some
# properties used in server for creating internal port.
extra_properties_schema = {
VALUE_SPECS: properties.Schema(
properties.Schema.MAP,
_('Extra parameters to include in the "port" object in the '
'creation request.'),
default={}
),
ADMIN_STATE_UP: properties.Schema(
properties.Schema.BOOLEAN,
_('The administrative state of this port.'),
default=True,
update_allowed=True
),
MAC_ADDRESS: properties.Schema(
properties.Schema.STRING,
_('MAC address to give to this port.'),
constraints=[
constraints.CustomConstraint('mac_addr')
]
),
ALLOWED_ADDRESS_PAIRS: properties.Schema(
properties.Schema.LIST,
_('Additional MAC/IP address pairs allowed to pass through the '
'port.'),
schema=properties.Schema(
properties.Schema.MAP,
schema={
ALLOWED_ADDRESS_PAIR_MAC_ADDRESS: properties.Schema(
properties.Schema.STRING,
_('MAC address to allow through this port.'),
constraints=[
constraints.CustomConstraint('mac_addr')
]
),
ALLOWED_ADDRESS_PAIR_IP_ADDRESS: properties.Schema(
properties.Schema.STRING,
_('IP address to allow through this port.'),
required=True,
constraints=[
constraints.CustomConstraint('ip_addr')
]
),
},
)
),
VNIC_TYPE: properties.Schema(
properties.Schema.STRING,
_('The vnic type to be bound on the neutron port. '
'To support SR-IOV PCI passthrough networking, you can request '
'that the neutron port to be realized as normal (virtual nic), '
'direct (pci passthrough), or macvtap '
'(virtual interface with a tap-like software interface). Note'
' that this only works for Neutron deployments that support '
'the bindings extension.'),
constraints=[
constraints.AllowedValues(['normal', 'direct', 'macvtap']),
],
support_status=support.SupportStatus(version='2015.1'),
update_allowed=True
),
PORT_SECURITY_ENABLED: properties.Schema(
properties.Schema.BOOLEAN,
_('Flag to enable/disable port security on the port. '
'When disable this feature(set it to False), there will be no '
'packages filtering, like security-group and address-pairs.'),
update_allowed=True,
support_status=support.SupportStatus(version='5.0.0')
),
}
attributes_schema = {
ADMIN_STATE_UP_ATTR: attributes.Schema(
_("The administrative state of this port."),
type=attributes.Schema.STRING
),
DEVICE_ID_ATTR: attributes.Schema(
_("Unique identifier for the device."),
type=attributes.Schema.STRING
),
DEVICE_OWNER: attributes.Schema(
_("Name of the network owning the port."),
type=attributes.Schema.STRING
),
FIXED_IPS_ATTR: attributes.Schema(
_("Fixed IP addresses."),
type=attributes.Schema.LIST
),
MAC_ADDRESS_ATTR: attributes.Schema(
_("MAC address of the port."),
type=attributes.Schema.STRING
),
NAME_ATTR: attributes.Schema(
_("Friendly name of the port."),
type=attributes.Schema.STRING
),
NETWORK_ID_ATTR: attributes.Schema(
_("Unique identifier for the network owning the port."),
type=attributes.Schema.STRING
),
SECURITY_GROUPS_ATTR: attributes.Schema(
_("A list of security groups for the port."),
type=attributes.Schema.LIST
),
STATUS: attributes.Schema(
_("The status of the port."),
type=attributes.Schema.STRING
),
TENANT_ID: attributes.Schema(
_("Tenant owning the port."),
type=attributes.Schema.STRING
),
ALLOWED_ADDRESS_PAIRS_ATTR: attributes.Schema(
_("Additional MAC/IP address pairs allowed to pass through "
"a port."),
type=attributes.Schema.LIST
),
SUBNETS_ATTR: attributes.Schema(
_("A list of all subnet attributes for the port."),
type=attributes.Schema.LIST
),
PORT_SECURITY_ENABLED_ATTR: attributes.Schema(
_("Port security enabled of the port."),
support_status=support.SupportStatus(version='5.0.0'),
type=attributes.Schema.BOOLEAN
),
}
def __init__(self, name, definition, stack):
"""Overloaded init in case of merging two schemas to one."""
self.properties_schema.update(self.extra_properties_schema)
super(Port, self).__init__(name, definition, stack)
def translation_rules(self):
return [
properties.TranslationRule(
self.properties,
properties.TranslationRule.REPLACE,
[self.NETWORK],
value_path=[self.NETWORK_ID]
),
properties.TranslationRule(
self.properties,
properties.TranslationRule.REPLACE,
[self.FIXED_IPS, self.FIXED_IP_SUBNET],
value_name=self.FIXED_IP_SUBNET_ID
)
]
def validate(self):
super(Port, self).validate()
self._validate_depr_property_required(self.properties,
self.NETWORK, self.NETWORK_ID)
def add_dependencies(self, deps):
super(Port, self).add_dependencies(deps)
# Depend on any Subnet in this template with the same
# network_id as this network_id.
# It is not known which subnet a port might be assigned
# to so all subnets in a network should be created before
# the ports in that network.
for res in six.itervalues(self.stack):
if res.has_interface('OS::Neutron::Subnet'):
dep_network = res.properties.get(
subnet.Subnet.NETWORK) or res.properties.get(
subnet.Subnet.NETWORK_ID)
network = self.properties[
self.NETWORK] or self.properties[self.NETWORK_ID]
if dep_network == network:
deps += (self, res)
def handle_create(self):
props = self.prepare_properties(
self.properties,
self.physical_resource_name())
self.client_plugin().resolve_network(props, self.NETWORK, 'network_id')
self._prepare_port_properties(props)
port = self.client().create_port({'port': props})['port']
self.resource_id_set(port['id'])
def _prepare_port_properties(self, props, prepare_for_update=False):
for fixed_ip in props.get(self.FIXED_IPS, []):
for key, value in list(fixed_ip.items()):
if value is None:
fixed_ip.pop(key)
if fixed_ip.get(self.FIXED_IP_SUBNET):
self.client_plugin().resolve_subnet(
fixed_ip, self.FIXED_IP_SUBNET, 'subnet_id')
# delete empty MAC addresses so that Neutron validation code
# wouldn't fail as it not accepts Nones
for pair in props.get(self.ALLOWED_ADDRESS_PAIRS, []):
if (self.ALLOWED_ADDRESS_PAIR_MAC_ADDRESS in pair and
pair[self.ALLOWED_ADDRESS_PAIR_MAC_ADDRESS] is None):
del pair[self.ALLOWED_ADDRESS_PAIR_MAC_ADDRESS]
# if without 'security_groups', don't set the 'security_groups'
# property when creating, neutron will create the port with the
# 'default' securityGroup. If has the 'security_groups' and the
# value is [], which means to create the port without securityGroup.
if props.get(self.SECURITY_GROUPS) is not None:
props[self.SECURITY_GROUPS] = self.client_plugin(
).get_secgroup_uuids(props.get(self.SECURITY_GROUPS))
else:
# And the update should has the same behavior.
if prepare_for_update:
props[self.SECURITY_GROUPS] = self.client_plugin(
).get_secgroup_uuids(['default'])
if not props[self.FIXED_IPS]:
del(props[self.FIXED_IPS])
del(props[self.REPLACEMENT_POLICY])
def _show_resource(self):
return self.client().show_port(
self.resource_id)['port']
def check_create_complete(self, *args):
attributes = self._show_resource()
return self.is_built(attributes)
def handle_delete(self):
try:
self.client().delete_port(self.resource_id)
except Exception as ex:
self.client_plugin().ignore_not_found(ex)
else:
return True
def _resolve_attribute(self, name):
if name == self.SUBNETS_ATTR:
subnets = []
try:
fixed_ips = self._show_resource().get('fixed_ips', [])
for fixed_ip in fixed_ips:
subnet_id = fixed_ip.get('subnet_id')
if subnet_id:
subnets.append(self.client().show_subnet(
subnet_id)['subnet'])
except Exception as ex:
LOG.warn(_LW("Failed to fetch resource attributes: %s"), ex)
return
return subnets
return super(Port, self)._resolve_attribute(name)
def _needs_update(self, after, before, after_props, before_props,
prev_resource, check_init_complete=True):
if after_props.get(self.REPLACEMENT_POLICY) == 'REPLACE_ALWAYS':
raise exception.UpdateReplace(self.name)
return super(Port, self)._needs_update(
after, before, after_props, before_props, prev_resource,
check_init_complete)
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
props = self.prepare_update_properties(json_snippet)
self._prepare_port_properties(props, prepare_for_update=True)
LOG.debug('updating port with %s' % props)
self.client().update_port(self.resource_id, {'port': props})
def check_update_complete(self, *args):
attributes = self._show_resource()
return self.is_built(attributes)
def prepare_for_replace(self):
# store port fixed_ips for restoring after failed update
fixed_ips = self._show_resource().get('fixed_ips', [])
self.data_set('port_fip', jsonutils.dumps(fixed_ips))
# reset fixed_ips for this port by setting fixed_ips to []
props = {'fixed_ips': []}
self.client().update_port(self.resource_id, {'port': props})
def restore_prev_rsrc(self, convergence=False):
# In case of convergence, during rollback, the previous rsrc is
# already selected and is being acted upon.
prev_port = self if convergence else \
self.stack._backup_stack().resources.get(self.name)
fixed_ips = prev_port.data().get('port_fip', [])
props = {'fixed_ips': []}
if convergence:
existing_port, stack = resource.Resource.load(
prev_port.context, prev_port.replaced_by, True,
prev_port.stack.cache_data
)
existing_port_id = existing_port.resource_id
else:
existing_port_id = self.resource_id
if existing_port_id:
# reset fixed_ips to [] for new resource
self.client().update_port(existing_port_id, {'port': props})
if fixed_ips and prev_port.resource_id:
# restore ip for old port
prev_port_props = {'fixed_ips': jsonutils.loads(fixed_ips)}
self.client().update_port(prev_port.resource_id,
{'port': prev_port_props})
def resource_mapping():
return {
'OS::Neutron::Port': Port,
}
| [
"heat.common.i18n._LW",
"heat.engine.constraints.CustomConstraint",
"heat.engine.constraints.AllowedValues",
"six.itervalues",
"heat.engine.resource.Resource.load",
"heat.common.exception.UpdateReplace",
"heat.engine.support.SupportStatus",
"heat.engine.properties.TranslationRule",
"heat.common.i18n... | [((1059, 1086), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1076, 1086), True, 'from oslo_log import log as logging\n'), ((13416, 13442), 'six.itervalues', 'six.itervalues', (['self.stack'], {}), '(self.stack)\n', (13430, 13442), False, 'import six\n'), ((2662, 2697), 'heat.common.i18n._', '_', (['"""A symbolic name for this port."""'], {}), "('A symbolic name for this port.')\n", (2663, 2697), False, 'from heat.common.i18n import _\n'), ((3909, 3937), 'heat.common.i18n._', '_', (['"""Device ID of this port."""'], {}), "('Device ID of this port.')\n", (3910, 3937), False, 'from heat.common.i18n import _\n'), ((4073, 4205), 'heat.common.i18n._', '_', (['"""Name of the network owning the port. The value is typically network:floatingip or network:router_interface or network:dhcp"""'], {}), "('Name of the network owning the port. The value is typically network:floatingip or network:router_interface or network:dhcp'\n )\n", (4074, 4205), False, 'from heat.common.i18n import _\n'), ((4365, 4396), 'heat.common.i18n._', '_', (['"""Desired IPs for this port."""'], {}), "('Desired IPs for this port.')\n", (4366, 4396), False, 'from heat.common.i18n import _\n'), ((6240, 6292), 'heat.common.i18n._', '_', (['"""Security group IDs to associate with this port."""'], {}), "('Security group IDs to associate with this port.')\n", (6241, 6292), False, 'from heat.common.i18n import _\n'), ((6434, 6654), 'heat.common.i18n._', '_', (['"""Policy on how to respond to a stack-update for this resource. REPLACE_ALWAYS will replace the port regardless of any property changes. AUTO will update the existing port for any changed update-allowed property."""'], {}), "('Policy on how to respond to a stack-update for this resource. REPLACE_ALWAYS will replace the port regardless of any property changes. AUTO will update the existing port for any changed update-allowed property.'\n )\n", (6435, 6654), False, 'from heat.common.i18n import _\n'), ((7146, 7224), 'heat.common.i18n._', '_', (['"""Extra parameters to include in the "port" object in the creation request."""'], {}), '(\'Extra parameters to include in the "port" object in the creation request.\')\n', (7147, 7224), False, 'from heat.common.i18n import _\n'), ((7371, 7414), 'heat.common.i18n._', '_', (['"""The administrative state of this port."""'], {}), "('The administrative state of this port.')\n", (7372, 7414), False, 'from heat.common.i18n import _\n'), ((7575, 7613), 'heat.common.i18n._', '_', (['"""MAC address to give to this port."""'], {}), "('MAC address to give to this port.')\n", (7576, 7613), False, 'from heat.common.i18n import _\n'), ((7821, 7891), 'heat.common.i18n._', '_', (['"""Additional MAC/IP address pairs allowed to pass through the port."""'], {}), "('Additional MAC/IP address pairs allowed to pass through the port.')\n", (7822, 7891), False, 'from heat.common.i18n import _\n'), ((8878, 9238), 'heat.common.i18n._', '_', (['"""The vnic type to be bound on the neutron port. To support SR-IOV PCI passthrough networking, you can request that the neutron port to be realized as normal (virtual nic), direct (pci passthrough), or macvtap (virtual interface with a tap-like software interface). Note that this only works for Neutron deployments that support the bindings extension."""'], {}), "('The vnic type to be bound on the neutron port. To support SR-IOV PCI passthrough networking, you can request that the neutron port to be realized as normal (virtual nic), direct (pci passthrough), or macvtap (virtual interface with a tap-like software interface). Note that this only works for Neutron deployments that support the bindings extension.'\n )\n", (8879, 9238), False, 'from heat.common.i18n import _\n'), ((9666, 9845), 'heat.common.i18n._', '_', (['"""Flag to enable/disable port security on the port. When disable this feature(set it to False), there will be no packages filtering, like security-group and address-pairs."""'], {}), "('Flag to enable/disable port security on the port. When disable this feature(set it to False), there will be no packages filtering, like security-group and address-pairs.'\n )\n", (9667, 9845), False, 'from heat.common.i18n import _\n'), ((10079, 10122), 'heat.common.i18n._', '_', (['"""The administrative state of this port."""'], {}), "('The administrative state of this port.')\n", (10080, 10122), False, 'from heat.common.i18n import _\n'), ((10232, 10270), 'heat.common.i18n._', '_', (['"""Unique identifier for the device."""'], {}), "('Unique identifier for the device.')\n", (10233, 10270), False, 'from heat.common.i18n import _\n'), ((10378, 10419), 'heat.common.i18n._', '_', (['"""Name of the network owning the port."""'], {}), "('Name of the network owning the port.')\n", (10379, 10419), False, 'from heat.common.i18n import _\n'), ((10529, 10553), 'heat.common.i18n._', '_', (['"""Fixed IP addresses."""'], {}), "('Fixed IP addresses.')\n", (10530, 10553), False, 'from heat.common.i18n import _\n'), ((10663, 10692), 'heat.common.i18n._', '_', (['"""MAC address of the port."""'], {}), "('MAC address of the port.')\n", (10664, 10692), False, 'from heat.common.i18n import _\n'), ((10797, 10828), 'heat.common.i18n._', '_', (['"""Friendly name of the port."""'], {}), "('Friendly name of the port.')\n", (10798, 10828), False, 'from heat.common.i18n import _\n'), ((10939, 10994), 'heat.common.i18n._', '_', (['"""Unique identifier for the network owning the port."""'], {}), "('Unique identifier for the network owning the port.')\n", (10940, 10994), False, 'from heat.common.i18n import _\n'), ((11110, 11154), 'heat.common.i18n._', '_', (['"""A list of security groups for the port."""'], {}), "('A list of security groups for the port.')\n", (11111, 11154), False, 'from heat.common.i18n import _\n'), ((11254, 11282), 'heat.common.i18n._', '_', (['"""The status of the port."""'], {}), "('The status of the port.')\n", (11255, 11282), False, 'from heat.common.i18n import _\n'), ((11387, 11415), 'heat.common.i18n._', '_', (['"""Tenant owning the port."""'], {}), "('Tenant owning the port.')\n", (11388, 11415), False, 'from heat.common.i18n import _\n'), ((11537, 11605), 'heat.common.i18n._', '_', (['"""Additional MAC/IP address pairs allowed to pass through a port."""'], {}), "('Additional MAC/IP address pairs allowed to pass through a port.')\n", (11538, 11605), False, 'from heat.common.i18n import _\n'), ((11728, 11778), 'heat.common.i18n._', '_', (['"""A list of all subnet attributes for the port."""'], {}), "('A list of all subnet attributes for the port.')\n", (11729, 11778), False, 'from heat.common.i18n import _\n'), ((11898, 11937), 'heat.common.i18n._', '_', (['"""Port security enabled of the port."""'], {}), "('Port security enabled of the port.')\n", (11899, 11937), False, 'from heat.common.i18n import _\n'), ((12376, 12506), 'heat.engine.properties.TranslationRule', 'properties.TranslationRule', (['self.properties', 'properties.TranslationRule.REPLACE', '[self.NETWORK]'], {'value_path': '[self.NETWORK_ID]'}), '(self.properties, properties.TranslationRule.\n REPLACE, [self.NETWORK], value_path=[self.NETWORK_ID])\n', (12402, 12506), False, 'from heat.engine import properties\n'), ((12593, 12758), 'heat.engine.properties.TranslationRule', 'properties.TranslationRule', (['self.properties', 'properties.TranslationRule.REPLACE', '[self.FIXED_IPS, self.FIXED_IP_SUBNET]'], {'value_name': 'self.FIXED_IP_SUBNET_ID'}), '(self.properties, properties.TranslationRule.\n REPLACE, [self.FIXED_IPS, self.FIXED_IP_SUBNET], value_name=self.\n FIXED_IP_SUBNET_ID)\n', (12619, 12758), False, 'from heat.engine import properties\n'), ((17224, 17258), 'heat.common.exception.UpdateReplace', 'exception.UpdateReplace', (['self.name'], {}), '(self.name)\n', (17247, 17258), False, 'from heat.common import exception\n'), ((18055, 18081), 'oslo_serialization.jsonutils.dumps', 'jsonutils.dumps', (['fixed_ips'], {}), '(fixed_ips)\n', (18070, 18081), False, 'from oslo_serialization import jsonutils\n'), ((18692, 18794), 'heat.engine.resource.Resource.load', 'resource.Resource.load', (['prev_port.context', 'prev_port.replaced_by', '(True)', 'prev_port.stack.cache_data'], {}), '(prev_port.context, prev_port.replaced_by, True,\n prev_port.stack.cache_data)\n', (18714, 18794), False, 'from heat.engine import resource\n'), ((3373, 3516), 'heat.common.i18n._', '_', (['"""Network this port belongs to. If you plan to use current port to assign Floating IP, you should specify %(fixed_ips)s with %(subnet)s"""'], {}), "('Network this port belongs to. If you plan to use current port to assign Floating IP, you should specify %(fixed_ips)s with %(subnet)s'\n )\n", (3374, 3516), False, 'from heat.common.i18n import _\n'), ((3664, 3703), 'heat.engine.support.SupportStatus', 'support.SupportStatus', ([], {'version': '"""2014.2"""'}), "(version='2014.2')\n", (3685, 3703), False, 'from heat.engine import support\n'), ((9481, 9520), 'heat.engine.support.SupportStatus', 'support.SupportStatus', ([], {'version': '"""2015.1"""'}), "(version='2015.1')\n", (9502, 9520), False, 'from heat.engine import support\n'), ((9936, 9974), 'heat.engine.support.SupportStatus', 'support.SupportStatus', ([], {'version': '"""5.0.0"""'}), "(version='5.0.0')\n", (9957, 9974), False, 'from heat.engine import support\n'), ((11966, 12004), 'heat.engine.support.SupportStatus', 'support.SupportStatus', ([], {'version': '"""5.0.0"""'}), "(version='5.0.0')\n", (11987, 12004), False, 'from heat.engine import support\n'), ((19241, 19267), 'oslo_serialization.jsonutils.loads', 'jsonutils.loads', (['fixed_ips'], {}), '(fixed_ips)\n', (19256, 19267), False, 'from oslo_serialization import jsonutils\n'), ((3212, 3259), 'heat.engine.constraints.CustomConstraint', 'constraints.CustomConstraint', (['"""neutron.network"""'], {}), "('neutron.network')\n", (3240, 3259), False, 'from heat.engine import constraints\n'), ((3747, 3794), 'heat.engine.constraints.CustomConstraint', 'constraints.CustomConstraint', (['"""neutron.network"""'], {}), "('neutron.network')\n", (3775, 3794), False, 'from heat.engine import constraints\n'), ((6772, 6825), 'heat.engine.constraints.AllowedValues', 'constraints.AllowedValues', (["['REPLACE_ALWAYS', 'AUTO']"], {}), "(['REPLACE_ALWAYS', 'AUTO'])\n", (6797, 6825), False, 'from heat.engine import constraints\n'), ((7657, 7697), 'heat.engine.constraints.CustomConstraint', 'constraints.CustomConstraint', (['"""mac_addr"""'], {}), "('mac_addr')\n", (7685, 7697), False, 'from heat.engine import constraints\n'), ((9379, 9437), 'heat.engine.constraints.AllowedValues', 'constraints.AllowedValues', (["['normal', 'direct', 'macvtap']"], {}), "(['normal', 'direct', 'macvtap'])\n", (9404, 9437), False, 'from heat.engine import constraints\n'), ((3030, 3096), 'heat.engine.support.SupportStatus', 'support.SupportStatus', ([], {'status': 'support.DEPRECATED', 'version': '"""2014.2"""'}), "(status=support.DEPRECATED, version='2014.2')\n", (3051, 3096), False, 'from heat.engine import support\n'), ((16837, 16883), 'heat.common.i18n._LW', '_LW', (['"""Failed to fetch resource attributes: %s"""'], {}), "('Failed to fetch resource attributes: %s')\n", (16840, 16883), False, 'from heat.common.i18n import _LW\n'), ((2965, 2986), 'heat.common.i18n._', '_', (['"""Use property %s."""'], {}), "('Use property %s.')\n", (2966, 2986), False, 'from heat.common.i18n import _\n'), ((5393, 5455), 'heat.common.i18n._', '_', (['"""Subnet in which to allocate the IP address for this port."""'], {}), "('Subnet in which to allocate the IP address for this port.')\n", (5394, 5455), False, 'from heat.common.i18n import _\n'), ((5862, 5914), 'heat.common.i18n._', '_', (['"""IP address desired in the subnet for this port."""'], {}), "('IP address desired in the subnet for this port.')\n", (5863, 5914), False, 'from heat.common.i18n import _\n'), ((8159, 8203), 'heat.common.i18n._', '_', (['"""MAC address to allow through this port."""'], {}), "('MAC address to allow through this port.')\n", (8160, 8203), False, 'from heat.common.i18n import _\n'), ((8507, 8550), 'heat.common.i18n._', '_', (['"""IP address to allow through this port."""'], {}), "('IP address to allow through this port.')\n", (8508, 8550), False, 'from heat.common.i18n import _\n'), ((5525, 5564), 'heat.engine.support.SupportStatus', 'support.SupportStatus', ([], {'version': '"""2014.2"""'}), "(version='2014.2')\n", (5546, 5564), False, 'from heat.engine import support\n'), ((5167, 5213), 'heat.engine.constraints.CustomConstraint', 'constraints.CustomConstraint', (['"""neutron.subnet"""'], {}), "('neutron.subnet')\n", (5195, 5213), False, 'from heat.engine import constraints\n'), ((5632, 5678), 'heat.engine.constraints.CustomConstraint', 'constraints.CustomConstraint', (['"""neutron.subnet"""'], {}), "('neutron.subnet')\n", (5660, 5678), False, 'from heat.engine import constraints\n'), ((5982, 6021), 'heat.engine.constraints.CustomConstraint', 'constraints.CustomConstraint', (['"""ip_addr"""'], {}), "('ip_addr')\n", (6010, 6021), False, 'from heat.engine import constraints\n'), ((8271, 8311), 'heat.engine.constraints.CustomConstraint', 'constraints.CustomConstraint', (['"""mac_addr"""'], {}), "('mac_addr')\n", (8299, 8311), False, 'from heat.engine import constraints\n'), ((8657, 8696), 'heat.engine.constraints.CustomConstraint', 'constraints.CustomConstraint', (['"""ip_addr"""'], {}), "('ip_addr')\n", (8685, 8696), False, 'from heat.engine import constraints\n'), ((4912, 4979), 'heat.engine.support.SupportStatus', 'support.SupportStatus', ([], {'status': 'support.DEPRECATED', 'version': '"""2014.2 """'}), "(status=support.DEPRECATED, version='2014.2 ')\n", (4933, 4979), False, 'from heat.engine import support\n'), ((4827, 4848), 'heat.common.i18n._', '_', (['"""Use property %s."""'], {}), "('Use property %s.')\n", (4828, 4848), False, 'from heat.common.i18n import _\n')] |
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
MUSDB18 data-iterator code for MSS.
'''
import random
import numpy as np
import musdb
from nnabla.utils.data_source import DataSource
class Compose():
"""Composes several augmentation transforms.
Args:
augmentations: list of augmentations to compose.
"""
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, audio):
for t in self.transforms:
audio = t(audio)
return audio
def _augment_gain(audio, low=0.75, high=1.25):
"""Applies a random gain between `low` and `high`"""
g = random.uniform(low, high)
return audio * g
def _augment_channelswap(audio):
"""Swap channels of stereo signals with a probability of p=0.5"""
if audio.shape[0] == 2 and random.random() < 0.5:
return np.flip(audio, 0)
else:
return audio
def load_datasources(parser, args):
"""Loads the specified dataset from commandline arguments
Returns:
train_dataset, validation_dataset
"""
parser.add_argument('--is-wav', action='store_true', default=True,
help='loads wav instead of STEMS')
parser.add_argument('--samples-per-track', type=int, default=64)
parser.add_argument(
'--source-augmentations', type=str, nargs='+',
default=['gain', 'channelswap']
)
args = parser.parse_args()
source_augmentations = Compose(
[globals()['_augment_' + aug] for aug in args.source_augmentations]
)
train_dataset = MUSDBDataSource(
source_augmentations=source_augmentations, random_track_mix=True, args=args)
return train_dataset, args
class MUSDBDataSource(DataSource):
def __init__(
self,
args,
download=False,
samples_per_track=64,
source_augmentations=lambda audio: audio,
random_track_mix=False,
dtype=np.float32,
seed=42,
rng=None
):
"""
MUSDB18 nnabla.utils.data_source that samples from the MUSDB tracks
using track and excerpts with replacement.
Parameters
----------
args : additional arguments used to add further control for
the musdb dataset initialization function.
download : boolean
automatically download 7s preview version of MUS
samples_per_track : int
sets the number of samples, yielded from each track per epoch.
Defaults to 64
source_augmentations : list[callables]
provide list of augmentation function that take a multi-channel
audio file of shape (src, samples) as input and output. Defaults to
no-augmentations (input = output)
random_track_mix : boolean
randomly mixes sources from different tracks to assemble a
custom mix. This augmenation is only applied for the train subset.
seed : int
control randomness of dataset iterations
dtype : numeric type
data type of torch output tuple x and y
"""
super(MUSDBDataSource, self).__init__(shuffle=True)
if rng is None:
rng = np.random.RandomState(seed)
self.rng = rng
random.seed(seed)
self.args = args
self.download = args.root is None
self.samples_per_track = samples_per_track
self.source_augmentations = source_augmentations
self.random_track_mix = random_track_mix
self.mus = musdb.DB(
root=args.root,
is_wav=args.is_wav,
split=None,
subsets='train',
download=download
)
print(f"Finished loading dataset with {len(self.mus.tracks)} tracks.")
self.sample_rate = 44100 # musdb has fixed sample rate
self.dtype = dtype
self._size = len(self.mus.tracks) * self.samples_per_track
self._variables = ('mixture', 'target')
self.reset()
def _get_data(self, position):
index = self._indexes[position]
audio_sources = []
target_ind = None
# select track
track = self.mus.tracks[index // self.samples_per_track]
# at training time we assemble a custom mix
if self.args.seq_dur:
for k, source in enumerate(self.mus.setup['sources']):
# memorize index of target source
if source == self.args.target:
target_ind = k
# select a random track
if self.random_track_mix:
track = random.choice(self.mus.tracks)
# set the excerpt duration
track.chunk_duration = self.args.seq_dur
# set random start index
track.chunk_start = random.uniform(
0, track.duration - self.args.seq_dur
)
# load source audio and apply time domain source_augmentations
audio = track.sources[source].audio.T
audio = self.source_augmentations(audio)
audio_sources.append(audio)
# create stem tensor of shape (source, channel, samples)
stems = np.stack(audio_sources, axis=0)
# # apply linear mix over source index=0
x = np.sum(stems, axis=0)
# get the target stem
if target_ind is not None:
y = stems[target_ind]
# assuming vocal/accompaniment scenario if target!=source
else:
vocind = list(self.mus.setup['sources'].keys()).index('vocals')
# apply time domain subtraction
y = x - stems[vocind]
# for validation and test, we deterministically yield the full musdb track
else:
# get the non-linear source mix straight from musdb
x = track.audio.T
y = track.targets[self.args.target].audio.T
return x, y
def reset(self):
if self._shuffle:
self._indexes = self.rng.permutation(self._size)
else:
self._indexes = np.arange(self._size)
super(MUSDBDataSource, self).reset()
| [
"numpy.flip",
"random.uniform",
"random.choice",
"numpy.arange",
"random.seed",
"numpy.stack",
"musdb.DB",
"numpy.sum",
"random.random",
"numpy.random.RandomState"
] | [((1174, 1199), 'random.uniform', 'random.uniform', (['low', 'high'], {}), '(low, high)\n', (1188, 1199), False, 'import random\n'), ((1395, 1412), 'numpy.flip', 'np.flip', (['audio', '(0)'], {}), '(audio, 0)\n', (1402, 1412), True, 'import numpy as np\n'), ((3811, 3828), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (3822, 3828), False, 'import random\n'), ((4072, 4168), 'musdb.DB', 'musdb.DB', ([], {'root': 'args.root', 'is_wav': 'args.is_wav', 'split': 'None', 'subsets': '"""train"""', 'download': 'download'}), "(root=args.root, is_wav=args.is_wav, split=None, subsets='train',\n download=download)\n", (4080, 4168), False, 'import musdb\n'), ((1357, 1372), 'random.random', 'random.random', ([], {}), '()\n', (1370, 1372), False, 'import random\n'), ((3751, 3778), 'numpy.random.RandomState', 'np.random.RandomState', (['seed'], {}), '(seed)\n', (3772, 3778), True, 'import numpy as np\n'), ((5783, 5814), 'numpy.stack', 'np.stack', (['audio_sources'], {'axis': '(0)'}), '(audio_sources, axis=0)\n', (5791, 5814), True, 'import numpy as np\n'), ((5884, 5905), 'numpy.sum', 'np.sum', (['stems'], {'axis': '(0)'}), '(stems, axis=0)\n', (5890, 5905), True, 'import numpy as np\n'), ((6691, 6712), 'numpy.arange', 'np.arange', (['self._size'], {}), '(self._size)\n', (6700, 6712), True, 'import numpy as np\n'), ((5366, 5419), 'random.uniform', 'random.uniform', (['(0)', '(track.duration - self.args.seq_dur)'], {}), '(0, track.duration - self.args.seq_dur)\n', (5380, 5419), False, 'import random\n'), ((5156, 5186), 'random.choice', 'random.choice', (['self.mus.tracks'], {}), '(self.mus.tracks)\n', (5169, 5186), False, 'import random\n')] |
# -*- coding: utf-8 -*-
# 使用两个算法模型并行识别
import os, sys
import concurrent.futures
from datetime import datetime
from settings import ALGORITHM, algorithm_settings
import knn
def predict_thread(face_algorithm, model_name, image_file):
# https://discuss.streamlit.io/t/attributeerror-thread-local-object-has-no-attribute-value/574/3
import keras.backend.tensorflow_backend as tb
tb._SYMBOLIC_SCOPE.value = True
return knn.predict(image_file,
model_path=model_name,
distance_threshold=ALGORITHM[face_algorithm]['distance_threshold'],
face_algorithm=face_algorithm)
def predict_parallel(image_file):
all_predictions = {}
with concurrent.futures.ThreadPoolExecutor() as executor:
future1 = executor.submit(predict_thread, algorithm_settings[1][0], algorithm_settings[1][1], image_file)
future2 = executor.submit(predict_thread, algorithm_settings[2][0], algorithm_settings[2][1], image_file)
for future in concurrent.futures.as_completed([future1, future2]):
predictions = future.result()
if future==future1:
all_predictions[1] = predictions
else:
all_predictions[2] = predictions
#print(all_predictions)
# 综合结果判断:
# 1. 如果两个结果唯一且相同,则无异议
# 2. 如果都为unkonw,则无结果
# 3. 如果有一个为unknown, 则返回非unknown的
# 4. 如果有一个为multi, 则返回非multi的
# 5. 如果都是multi, 优先返回算法1的
# 6. 如果两个都有唯一结果,优先返回算法1的
# 7. 如果结果都为0,则无结果
# 8. 如果有一个为0, 则返回非0的
# 9. 最后优先返回算法1的结果
final_result=[]
len1 = len(all_predictions[1])
len2 = len(all_predictions[2])
name1=name2=''
if len1>0:
name1 = all_predictions[1][0][0]
if len2>0:
name2 = all_predictions[2][0][0]
# 条件 7
if len1==len2==0:
final_result=[]
# 条件 8
elif 0 in (len1, len2):
if len2==0:
final_result = all_predictions[1]
else:
final_result = all_predictions[2]
# 条件 2
elif name1==name2=='unknown':
final_result = all_predictions[1]
# 条件 3
elif 'unknown' in (name1, name2):
if name1=='unknown':
final_result = all_predictions[2]
else:
final_result = all_predictions[1]
# 条件 1, 6
elif len1==len2==1:
final_result = all_predictions[1]
# 条件 4, 5
elif len1>1 or len2>1:
if len2==1:
final_result = all_predictions[2]
else:
final_result = all_predictions[1]
# 条件 9
else:
final_result = all_predictions[1]
return final_result
if __name__ == "__main__":
if len(sys.argv)<2:
print("usage: python3 %s <test dir or file>" % sys.argv[0])
sys.exit(2)
test_thing = sys.argv[1]
if os.path.isdir(test_thing):
images = os.listdir(test_thing)
images = [os.path.join(test_thing, i) for i in images]
else:
images = [ test_thing ]
# Using the trained classifier, make predictions for unknown images
for image_file in images:
print("Looking for faces in {}".format(image_file))
# Find all people in the image using a trained classifier model
# Note: You can pass in either a classifier file name or a classifier model instance
start_time = datetime.now()
predictions = predict_parallel(image_file)
print('[Time taken: {!s}]'.format(datetime.now() - start_time))
# Print results on the console
for name, (top, right, bottom, left), distance, count in predictions:
print("- Found {} at ({}, {}), distance={}, count={}".format(name, left, top, distance, count))
if len(predictions)==0:
print('Face not found!')
# Display results overlaid on an image
#knn.show_prediction_labels_on_image(image_file, predictions)
| [
"os.listdir",
"os.path.join",
"knn.predict",
"datetime.datetime.now",
"os.path.isdir",
"sys.exit"
] | [((434, 584), 'knn.predict', 'knn.predict', (['image_file'], {'model_path': 'model_name', 'distance_threshold': "ALGORITHM[face_algorithm]['distance_threshold']", 'face_algorithm': 'face_algorithm'}), "(image_file, model_path=model_name, distance_threshold=ALGORITHM\n [face_algorithm]['distance_threshold'], face_algorithm=face_algorithm)\n", (445, 584), False, 'import knn\n'), ((2750, 2775), 'os.path.isdir', 'os.path.isdir', (['test_thing'], {}), '(test_thing)\n', (2763, 2775), False, 'import os, sys\n'), ((2700, 2711), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (2708, 2711), False, 'import os, sys\n'), ((2794, 2816), 'os.listdir', 'os.listdir', (['test_thing'], {}), '(test_thing)\n', (2804, 2816), False, 'import os, sys\n'), ((3281, 3295), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3293, 3295), False, 'from datetime import datetime\n'), ((2835, 2862), 'os.path.join', 'os.path.join', (['test_thing', 'i'], {}), '(test_thing, i)\n', (2847, 2862), False, 'import os, sys\n'), ((3389, 3403), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3401, 3403), False, 'from datetime import datetime\n')] |
import datetime
import re
from collections import Sequence, OrderedDict
from html import escape
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.contrib.postgres.fields import ArrayField
from django.core.serializers.json import DjangoJSONEncoder
from django.core.exceptions import ValidationError
from django.db import models, transaction
from django.utils.translation import gettext_lazy as _
from users.models import User, PRIVILEGE_LEVELS
from .helpers import DATE_SERIALIZATION_FORMAT, validate_identifier
from projects.helpers import get_ad_user
from users.serializers import PersonnelSerializer
class AttributeQuerySet(models.QuerySet):
def filterable(self):
return self.filter(
value_type__in=[
Attribute.TYPE_INTEGER,
Attribute.TYPE_DECIMAL,
Attribute.TYPE_SHORT_STRING,
Attribute.TYPE_BOOLEAN,
Attribute.TYPE_DATE,
Attribute.TYPE_USER,
Attribute.TYPE_CHOICE,
]
)
def report_friendly(self):
return self.filter(
value_type__in=[
Attribute.TYPE_FIELDSET,
Attribute.TYPE_INTEGER,
Attribute.TYPE_DECIMAL,
Attribute.TYPE_SHORT_STRING,
Attribute.TYPE_LONG_STRING,
Attribute.TYPE_RICH_TEXT,
Attribute.TYPE_RICH_TEXT_SHORT,
Attribute.TYPE_BOOLEAN,
Attribute.TYPE_DATE,
Attribute.TYPE_USER,
Attribute.TYPE_CHOICE,
]
)
class DataRetentionPlan(models.Model):
"""Defines a data retention plan for an attribute"""
TYPE_PERMANENT = "permanent"
TYPE_PROCESSING = "processing"
TYPE_CUSTOM = "custom"
TYPE_CHOICES = (
(TYPE_PERMANENT, _("permanent")),
(TYPE_PROCESSING, _("while processing")),
(TYPE_CUSTOM, _("custom duration after archival")),
)
UNIT_YEARS = "years"
UNIT_MONTHS = "months"
UNIT_DAYS = "days"
UNIT_CHOICES = (
(UNIT_YEARS, _("years")),
(UNIT_MONTHS, _("months")),
(UNIT_DAYS, _("days")),
)
label = models.CharField(max_length=255, verbose_name=_("label"), unique=True)
plan_type = models.CharField(
max_length=10,
verbose_name=_("plan type"),
choices=TYPE_CHOICES,
)
custom_time = models.PositiveIntegerField(
verbose_name=_("custom time"),
null=True,
blank=True,
)
custom_time_unit = models.CharField(
max_length=6,
verbose_name=_("unit for custom time"),
choices=UNIT_CHOICES,
null=True,
blank=True,
)
def __str__(self):
return self.label
class Attribute(models.Model):
"""Defines a single attribute type.
Fieldset defines a group of tightly related attributes that define a single entity. E.g. information regarding
a person might consist of several fields. If there is a need to store information for multiple entities, we
can define a fieldset which knows the attributes for a single entity.
"""
TYPE_FIELDSET = "fieldset"
TYPE_INTEGER = "integer"
TYPE_DECIMAL = "decimal"
TYPE_SHORT_STRING = "short_string"
TYPE_LONG_STRING = "long_string"
TYPE_RICH_TEXT = "rich_text"
TYPE_RICH_TEXT_SHORT = "rich_text_short"
TYPE_BOOLEAN = "boolean"
TYPE_DATE = "date"
TYPE_USER = "user"
TYPE_PERSONNEL = "personnel"
TYPE_GEOMETRY = "geometry"
TYPE_IMAGE = "image"
TYPE_FILE = "file"
TYPE_LINK = "link"
TYPE_CHOICE = "choice"
ALLOWED_CALCULATION_OPERATORS = ["+", "-", "*", "/"]
TYPE_CHOICES = (
(TYPE_FIELDSET, _("fieldset")),
(TYPE_INTEGER, _("integer")),
(TYPE_DECIMAL, _("decimal")),
(TYPE_SHORT_STRING, _("short string")),
(TYPE_LONG_STRING, _("long string")),
(TYPE_RICH_TEXT, _("rich text")),
(TYPE_RICH_TEXT_SHORT, _("short rich text")),
(TYPE_BOOLEAN, _("boolean")),
(TYPE_DATE, _("date")),
(TYPE_USER, _("user")),
(TYPE_PERSONNEL, _("personnel")),
(TYPE_GEOMETRY, _("geometry")),
(TYPE_IMAGE, _("image")),
(TYPE_FILE, _("file")),
(TYPE_LINK, _("link")),
(TYPE_CHOICE, _("choice")),
)
DISPLAY_DROPDOWN = "dropdown"
DISPLAY_CHECKBOX = "checkbox"
DISPLAY_READONLY = "readonly"
DISPLAY_READONLY_CHECKBOX = "readonly_checkbox"
DISPLAY_SIMPLE_INTEGER = "simple_integer"
DISPLAY_CHOICES = (
(None, _("default")),
(DISPLAY_DROPDOWN, _("dropdown")),
(DISPLAY_CHECKBOX, _("checkbox")),
(DISPLAY_READONLY, _("read only")),
(DISPLAY_READONLY_CHECKBOX, _("read only checkbox")),
(DISPLAY_SIMPLE_INTEGER, _("integer without thousand separator")),
)
SOURCE_PARENT_FIELDSET = "fieldset"
SOURCE_FACTA = "/facta/v1/kiinteisto/<pk>/all"
SOURCE_GEOSERVER = "/geoserver/v1/kiinteisto/<pk>/all"
SOURCE_CHOICES = (
(SOURCE_PARENT_FIELDSET, _("Same as parent fieldset")),
(SOURCE_FACTA, _("FACTA")),
(SOURCE_GEOSERVER, _("Geoserver")),
)
AD_DATA_KEY_ID = "id"
AD_DATA_KEY_NAME = "name"
AD_DATA_KEY_PHONE = "phone"
AD_DATA_KEY_EMAIL = "email"
AD_DATA_KEY_TITLE = "title"
AD_DATA_KEY_OFFICE = "office"
AD_DATA_KEY_COMPANY = "company"
AD_DATA_KEY_CHOICES = (
(AD_DATA_KEY_ID, "id"),
(AD_DATA_KEY_NAME, "name"),
(AD_DATA_KEY_PHONE, "phone"),
(AD_DATA_KEY_EMAIL, "email"),
(AD_DATA_KEY_TITLE, "title"),
(AD_DATA_KEY_OFFICE, "office"),
(AD_DATA_KEY_COMPANY, "company"),
)
name = models.CharField(max_length=255, verbose_name=_("name"))
value_type = models.CharField(
max_length=64, verbose_name=_("value type"), choices=TYPE_CHOICES
)
display = models.CharField(
max_length=64,
verbose_name=_("display style"),
choices=DISPLAY_CHOICES,
default=None,
null=True,
blank=True,
)
visibility_conditions = ArrayField(
models.JSONField(
verbose_name=_("show attribute if any condition applies"),
default=dict,
blank=True,
null=True,
encoder=DjangoJSONEncoder,
),
verbose_name=_("visibility condition"),
null=True,
blank=True,
)
hide_conditions = ArrayField(
models.JSONField(
verbose_name=_("hide attribute if any condition applies"),
default=dict,
blank=True,
null=True,
encoder=DjangoJSONEncoder,
),
verbose_name=_("hide condition"),
null=True,
blank=True,
)
unit = models.CharField(
max_length=255, verbose_name=_("unit"), null=True, blank=True
)
public = models.BooleanField(verbose_name=_("public information"), default=False)
searchable = models.BooleanField(verbose_name=_("searchable field"), default=False)
generated = models.BooleanField(verbose_name=_("generated"), default=False)
data_retention_plan = models.ForeignKey(
"DataRetentionPlan",
verbose_name=_("data retention plan"),
blank=True,
null=True,
on_delete=models.PROTECT,
)
calculations = ArrayField(
models.CharField(max_length=255, blank=True), blank=True, null=True
)
related_fields = ArrayField(
models.TextField(blank=True), blank=True, null=True
)
required = models.BooleanField(verbose_name=_("required"), default=False)
multiple_choice = models.BooleanField(
verbose_name=_("multiple choice"), default=False
)
character_limit = models.PositiveIntegerField(
verbose_name=_("character limit"),
null=True,
blank=True,
)
placeholder_text = models.TextField(
verbose_name=_("placeholder text"),
null=True,
blank=True,
)
unique = models.BooleanField(
verbose_name=_("unique"),
default=False,
)
error_message = models.TextField(
verbose_name=_("error message"),
null=True,
blank=True,
)
identifier = models.CharField(
max_length=60,
verbose_name=_("identifier"),
db_index=True,
unique=True,
validators=[validate_identifier],
)
fieldset_attributes = models.ManyToManyField(
"self",
symmetrical=False,
related_name="fieldsets",
through="FieldSetAttribute",
through_fields=("attribute_source", "attribute_target"),
)
help_text = models.TextField(verbose_name=_("Help text"), blank=True)
help_link = models.URLField(verbose_name=_("Help link"), blank=True, null=True)
broadcast_changes = models.BooleanField(default=False)
autofill_readonly = models.BooleanField(verbose_name=_("read-only autofill field"), null=True)
autofill_rule = models.JSONField(
verbose_name=_("autofill rule"),
default=dict,
blank=True,
null=True,
encoder=DjangoJSONEncoder,
)
updates_autofill = models.BooleanField(verbose_name=_("updates related autofill fields"), default=False)
highlight_group = models.ForeignKey(
Group,
verbose_name=_("highlight field for group"),
on_delete=models.PROTECT,
null=True,
blank=True,
)
owner_editable = models.BooleanField(
default=False,
verbose_name=_("owner can edit"),
)
owner_viewable = models.BooleanField(
default=True,
verbose_name=_("owner can view"),
)
view_privilege = models.CharField(
verbose_name=_("privilege for viewing"),
max_length=6,
choices=PRIVILEGE_LEVELS,
default="browse",
null=True,
blank=True,
)
edit_privilege = models.CharField(
verbose_name=_("privilege for editing"),
max_length=6,
choices=PRIVILEGE_LEVELS,
default=None,
null=True,
blank=True,
)
# attributes which are linked to static Project fields
static_property = models.CharField(max_length=255, blank=True, null=True)
# attributes whose data is fetched from an external source
data_source = models.CharField(
max_length=255,
verbose_name=_("external data source"),
choices=SOURCE_CHOICES,
null=True,
blank=True,
)
data_source_key = models.CharField(
verbose_name=_("field key for external data source"),
max_length=255,
null=True,
blank=True,
)
key_attribute = models.ForeignKey(
"Attribute",
verbose_name=_("key attribute for fetching external data"),
related_name="key_for_attributes",
null=True,
blank=True,
on_delete=models.PROTECT,
)
key_attribute_path = models.CharField(
max_length=255,
null=True,
blank=True,
)
ad_key_attribute = models.ForeignKey(
"Attribute",
verbose_name=_("key attribute for fetching AD user data"),
null=True,
blank=True,
on_delete=models.PROTECT,
)
ad_data_key = models.CharField(
verbose_name=_("AD user data key"),
max_length=7,
choices=AD_DATA_KEY_CHOICES,
null=True,
blank=True,
)
objects = AttributeQuerySet.as_manager()
class Meta:
verbose_name = _("attribute")
verbose_name_plural = _("attributes")
ordering = ("identifier",)
def __str__(self):
return f"{self.name} ({self.value_type})"
@transaction.atomic
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self.value_type == Attribute.TYPE_GEOMETRY:
if (
Attribute.objects.exclude(id=self.id)
.filter(value_type=Attribute.TYPE_GEOMETRY)
.exists()
):
raise NotImplementedError(
"Currently only one geometry type attribute at a time is supported."
)
def clean(self):
if not len(self.calculations):
return
# Only allow for uneven arrays
if len(self.calculations) % 2 != 1:
raise ValidationError(
f"Calculations needs to be uneven in length and"
f"follow the style '(<attribtute> <operator> <attribute>)*n. "
f"Error in {self.identifier} with calculations {self.calculations}."
)
if self.calculations[-1] in self.ALLOWED_CALCULATION_OPERATORS:
raise ValidationError(
f"Calculation can not end with operator. "
f"Error in {self.identifier} with calculations {self.calculations}."
)
if not all(
operator in self.ALLOWED_CALCULATION_OPERATORS
for operator in self.calculation_operators
):
raise ValidationError(
f"Calculation operators can only be {self.ALLOWED_CALCULATION_OPERATORS}. "
f"Error in {self.identifier} with calculation {self.calculations}."
)
if (
len(self.calculation_operators)
!= len(self.calculation_attribute_identifiers) - 1
):
raise ValidationError(
f"There must be exactly one more attribute then operators"
f"Error in {self.identifier} with calculation {self.calculations}."
)
def serialize_value(self, value):
if self.value_type != Attribute.TYPE_FIELDSET \
and (self.data_source or self.ad_data_key):
return None
if self.value_type == Attribute.TYPE_CHOICE:
value_choices = self.value_choices.all()
else:
value_choices = None
if value_choices and value_choices.exists():
if self.multiple_choice and value is not None:
return [v.identifier for v in value]
else:
return value.identifier if value else None
elif self.value_type == Attribute.TYPE_INTEGER:
if self.multiple_choice and value is not None:
return [
int(v) if v is not None else None
for v in value
]
else:
return int(value) if value is not None else None
elif self.value_type == Attribute.TYPE_DECIMAL:
return str(value) if value is not None else None
elif self.value_type in (
Attribute.TYPE_SHORT_STRING,
Attribute.TYPE_LONG_STRING,
Attribute.TYPE_LINK,
Attribute.TYPE_CHOICE,
Attribute.TYPE_PERSONNEL,
):
if self.multiple_choice and value is not None:
return [
str(v) if v else None
for v in value
]
else:
return str(value) if value else None
elif self.value_type in (
Attribute.TYPE_RICH_TEXT,
Attribute.TYPE_RICH_TEXT_SHORT,
):
if self.multiple_choice and value is not None:
return [v for v in value]
else:
return value
elif self.value_type == Attribute.TYPE_BOOLEAN:
if self.multiple_choice and value is not None:
return [
bool(v) if v is not None else None
for v in value
]
else:
return bool(value) if value is not None else None
elif self.value_type == Attribute.TYPE_DATE:
if isinstance(value, str):
return value
return (
datetime.datetime.strftime(
value, DATE_SERIALIZATION_FORMAT
) if value else None
)
elif self.value_type == Attribute.TYPE_USER:
# allow saving non-existing users using their names (str) at least for now.
# actual users are saved using their ids (int).
if isinstance(value, get_user_model()):
if self.multiple_choice and value is not None:
return [v.uuid for v in value]
else:
return value.uuid
else:
if self.multiple_choice and value is not None:
return [v or None for v in value]
else:
return value or None
elif self.value_type == Attribute.TYPE_FIELDSET:
return self._get_fieldset_serialization(value)
elif self.value_type in (Attribute.TYPE_FILE, Attribute.TYPE_IMAGE):
if value is None:
return None
else:
return ""
else:
raise Exception('Cannot serialize attribute type "%s".' % self.value_type)
def deserialize_value(self, value):
if self.value_type == Attribute.TYPE_CHOICE:
value_choices = self.value_choices.all()
else:
value_choices = None
if value_choices and value_choices.exists():
if self.multiple_choice and value is not None:
return [v for v in value_choices.filter(identifier__in=value)]
else:
try:
return value_choices.get(identifier=value)
except AttributeValueChoice.DoesNotExist:
return None
elif self.value_type in (
Attribute.TYPE_INTEGER,
Attribute.TYPE_DECIMAL,
Attribute.TYPE_SHORT_STRING,
Attribute.TYPE_LONG_STRING,
Attribute.TYPE_BOOLEAN,
Attribute.TYPE_LINK,
Attribute.TYPE_CHOICE,
Attribute.TYPE_PERSONNEL,
):
return value
elif self.value_type in (
Attribute.TYPE_RICH_TEXT,
Attribute.TYPE_RICH_TEXT_SHORT,
):
return value
elif self.value_type == Attribute.TYPE_DATE:
return (
datetime.datetime.strptime(
value, DATE_SERIALIZATION_FORMAT
).date() if value else None
)
elif self.value_type == Attribute.TYPE_USER:
try:
return get_user_model().objects.get(uuid=value)
except get_user_model().DoesNotExist:
return None
elif self.value_type == Attribute.TYPE_FIELDSET:
return self._get_fieldset_serialization(value, deserialize=True)
else:
raise Exception('Cannot deserialize attribute type "%s".' % self.value_type)
def _get_fieldset_serialization(self, value: Sequence, deserialize: bool = False):
"""Recursively go through the fields in the fieldset and (de)serialize them."""
if isinstance(value, OrderedDict):
value = [value]
elif not isinstance(value, Sequence):
return None
entities = []
fieldset_attributes = self.fieldset_attributes.all()
for i, listitem in enumerate(value):
processed_entity = {}
processed_entity_has_files = False
for key, val in listitem.items():
if key == "_deleted":
processed_entity[key] = val
continue
for attr in fieldset_attributes:
if attr.value_type in (
Attribute.TYPE_FILE, Attribute.TYPE_IMAGE
):
# TODO If alternate file deletion method is needed,
# add if val is None check
processed_entity_has_files = True
elif attr.identifier == key:
if deserialize:
processed_value = attr.deserialize_value(
val
)
else:
processed_value = attr.serialize_value(val)
processed_entity[attr.identifier] = processed_value
else:
continue
if processed_entity or processed_entity_has_files:
entities.append(processed_entity)
return entities
def _get_single_display_value(self, value):
if value is None or self.value_type == Attribute.TYPE_GEOMETRY:
return None
if self.value_type in (
Attribute.TYPE_RICH_TEXT_SHORT,
Attribute.TYPE_RICH_TEXT,
) and value:
try:
return ("".join(
[item["insert"] for item in value["ops"]]
).strip())
except TypeError:
return None
# remove checking type should pinonumero attribute (and possible others) be
# fixed in the attribute excel
elif self.value_type == Attribute.TYPE_INTEGER and isinstance(value, int):
if self.display == Attribute.DISPLAY_SIMPLE_INTEGER:
return str(value)
else:
return '{:,}'.format(value).replace(',', ' ')
elif self.value_type == Attribute.TYPE_DECIMAL and self.unit in ["ha", "k-m2"]:
return '{:,}'.format(int(float(value))).replace(',', ' ')
elif self.value_type == Attribute.TYPE_DATE:
date_value = datetime.datetime.strptime(value, "%Y-%m-%d")
return '{d.day}.{d.month}.{d.year}'.format(d=date_value)
elif self.value_type == Attribute.TYPE_CHOICE:
try:
return self.value_choices.get(identifier=value).value
except AttributeValueChoice.DoesNotExist:
return value
elif isinstance(value, bool):
return "Kyllä" if value else "Ei"
elif self.value_type == Attribute.TYPE_PERSONNEL:
try:
return PersonnelSerializer(get_ad_user(value)).data.get("name")
except AttributeError:
return value
elif self.value_type == Attribute.TYPE_USER:
if not isinstance(value, User):
try:
user = User.objects.get(uuid=value)
except ValidationError:
return value
else:
user = value
return user.get_full_name()
else:
return escape(str(value))
def get_attribute_display(self, value):
if isinstance(value, list):
if self.value_type == Attribute.TYPE_FIELDSET:
return [
{k: self._get_single_display_value(v) for k, v in item.items()}
for item in value
]
return [self._get_single_display_value(v) for v in value]
else:
return self._get_single_display_value(value)
@property
def calculation_attribute_identifiers(self):
if not self.calculations:
return []
return self.calculations[0::2]
@property
def calculation_operators(self):
if not self.calculations:
return []
return self.calculations[1::2]
class AttributeValueChoice(models.Model):
"""Single value choice for a single attribute."""
attribute = models.ForeignKey(
Attribute,
verbose_name=_("attribute"),
related_name="value_choices",
on_delete=models.CASCADE,
)
value = models.TextField(verbose_name=_("value"))
identifier = models.CharField(
max_length=150,
verbose_name=_("identifier"),
db_index=True,
validators=[validate_identifier],
)
index = models.PositiveIntegerField(verbose_name=_("index"), default=0)
class Meta:
verbose_name = _("attribute value choice")
verbose_name_plural = _("attribute value choices")
unique_together = (("attribute", "identifier"), ("attribute", "index"))
ordering = ("index",)
def __str__(self):
return self.value
class AttributeAutoValue(models.Model):
"""
Automatic, dynamic value that gets injected into attribute data based on another
attribute value
Differs from autofill rules in that these are not calculated in frontend,
will update the contents of all projects' fields if database changes are made,
and will not generate log entries whereas autofill rule based field contents only
change when frontend sends updated values, and they do generate log entries.
"""
value_attribute = models.OneToOneField(
Attribute,
on_delete=models.CASCADE,
related_name="automatic_value",
verbose_name=_("automatic attribute"),
)
key_attribute = models.ForeignKey(
Attribute,
on_delete=models.CASCADE,
related_name="automatically_sets",
verbose_name=_("key attribute"),
)
def get_value(self, key):
key = str(key)
try:
return self.value_map.get(key_str=key).value
except AttributeAutoValueMapping.DoesNotExist:
return None
def __str__(self):
return f"{self.key_attribute.name} -> {self.value_attribute.name}"
class AttributeAutoValueMapping(models.Model):
"""A single key-value pair related to an auto value attribute pair"""
auto_attr = models.ForeignKey(
AttributeAutoValue,
on_delete=models.CASCADE,
related_name="value_map",
verbose_name=_("attribute link"),
)
key_str = models.TextField(verbose_name=_("key"))
value_str = models.TextField(verbose_name=_("value"))
@property
def key(self):
return self.auto_attr.key_attribute.deserialize_value(self.key_str)
@property
def value(self):
return self.auto_attr.value_attribute.deserialize_value(self.value_str)
class Meta:
unique_together = ('auto_attr', 'key_str')
class FieldSetAttribute(models.Model):
attribute_source = models.ForeignKey(
Attribute, on_delete=models.CASCADE, related_name="fieldset_attribute_source"
)
attribute_target = models.ForeignKey(
Attribute, on_delete=models.CASCADE, related_name="fieldset_attribute_target"
)
phase_indices = models.ManyToManyField(
"ProjectPhase",
symmetrical=False,
related_name="fieldsets",
through="ProjectPhaseFieldSetAttributeIndex",
)
class Meta:
verbose_name = _("fieldset attribute")
verbose_name_plural = _("fieldset attributes")
def __str__(self):
return f"{self.attribute_source} -> {self.attribute_target}"
class DocumentLinkSection(models.Model):
"""Defines a project card document link subsection"""
name = models.CharField(
max_length=255,
verbose_name=_("name"),
)
index = models.PositiveIntegerField(
verbose_name=_("index"),
default=0,
)
class Meta:
verbose_name = _("document link section")
verbose_name_plural = _("document link sections")
ordering = ("index",)
def __str__(self):
return f"{self.name}"
class DocumentLinkFieldSet(models.Model):
"""Connects external document link and name within a fieldset"""
section = models.ForeignKey(
DocumentLinkSection,
on_delete=models.CASCADE,
)
fieldset_attribute = models.ForeignKey(
Attribute,
on_delete=models.CASCADE,
related_name="document_fieldsets",
verbose_name=_("fieldset attribute"),
)
document_name_attribute = models.ForeignKey(
Attribute,
on_delete=models.CASCADE,
related_name="name_in_document_fieldsets",
verbose_name=_("document name attribute"),
null=True,
blank=True,
)
document_custom_name_attribute = models.ForeignKey(
Attribute,
on_delete=models.CASCADE,
related_name="custom_name_in_document_fieldsets",
verbose_name=_("document custom name attribute"),
null=True,
blank=True,
)
document_link_attribute = models.ForeignKey(
Attribute,
on_delete=models.CASCADE,
related_name="link_in_document_fieldsets",
verbose_name=_("document link attribute"),
)
class Meta:
verbose_name = _("document link fieldset")
verbose_name_plural = _("document link fieldsets")
def __str__(self):
return f"{self.fieldset_attribute.name}"
class OverviewFilter(models.Model):
"""Defines a filter on project overview views"""
name = models.CharField(
max_length=255,
verbose_name=_("name"),
)
identifier = models.CharField(
max_length=20,
verbose_name=_("identifier"),
db_index=True,
unique=True,
validators=[validate_identifier],
)
class Meta:
verbose_name = _("overview filter")
verbose_name_plural = _("overview filters")
def __str__(self):
return f"{self.name}"
class OverviewFilterAttribute(models.Model):
"""Defines an attribute in a filter"""
attribute = models.ForeignKey(
Attribute,
on_delete=models.CASCADE,
verbose_name=_("attribute"),
)
overview_filter = models.ForeignKey(
OverviewFilter,
on_delete=models.CASCADE,
verbose_name=_("overview filter"),
related_name="attributes",
)
filters_by_subtype = models.BooleanField(
verbose_name=_("filters project subtype overview"),
default=False,
)
filters_floor_area = models.BooleanField(
verbose_name=_("filters project floor area overview"),
default=False,
)
filters_on_map = models.BooleanField(
verbose_name=_("filters project map overview"),
default=False,
)
class Meta:
verbose_name = _("overview filter attribute")
verbose_name_plural = _("overview filter attributes")
def __str__(self):
return f"{self.overview_filter.name}/{self.attribute.name}"
| [
"django.contrib.auth.get_user_model",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"datetime.datetime.strptime",
"projects.helpers.get_ad_user",
"django.utils.translation.gettext_lazy",
"django.db.models.ManyToManyField",
"django.core.exceptions.ValidationError",
"django.db.models.Bo... | [((8497, 8666), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['"""self"""'], {'symmetrical': '(False)', 'related_name': '"""fieldsets"""', 'through': '"""FieldSetAttribute"""', 'through_fields': "('attribute_source', 'attribute_target')"}), "('self', symmetrical=False, related_name='fieldsets',\n through='FieldSetAttribute', through_fields=('attribute_source',\n 'attribute_target'))\n", (8519, 8666), False, 'from django.db import models, transaction\n'), ((8888, 8922), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (8907, 8922), False, 'from django.db import models, transaction\n'), ((10232, 10287), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)', 'null': '(True)'}), '(max_length=255, blank=True, null=True)\n', (10248, 10287), False, 'from django.db import models, transaction\n'), ((10983, 11038), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (10999, 11038), False, 'from django.db import models, transaction\n'), ((26143, 26244), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Attribute'], {'on_delete': 'models.CASCADE', 'related_name': '"""fieldset_attribute_source"""'}), "(Attribute, on_delete=models.CASCADE, related_name=\n 'fieldset_attribute_source')\n", (26160, 26244), False, 'from django.db import models, transaction\n'), ((26277, 26378), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Attribute'], {'on_delete': 'models.CASCADE', 'related_name': '"""fieldset_attribute_target"""'}), "(Attribute, on_delete=models.CASCADE, related_name=\n 'fieldset_attribute_target')\n", (26294, 26378), False, 'from django.db import models, transaction\n'), ((26408, 26542), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['"""ProjectPhase"""'], {'symmetrical': '(False)', 'related_name': '"""fieldsets"""', 'through': '"""ProjectPhaseFieldSetAttributeIndex"""'}), "('ProjectPhase', symmetrical=False, related_name=\n 'fieldsets', through='ProjectPhaseFieldSetAttributeIndex')\n", (26430, 26542), False, 'from django.db import models, transaction\n'), ((27416, 27480), 'django.db.models.ForeignKey', 'models.ForeignKey', (['DocumentLinkSection'], {'on_delete': 'models.CASCADE'}), '(DocumentLinkSection, on_delete=models.CASCADE)\n', (27433, 27480), False, 'from django.db import models, transaction\n'), ((7436, 7480), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)'}), '(max_length=255, blank=True)\n', (7452, 7480), False, 'from django.db import models, transaction\n'), ((7551, 7579), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (7567, 7579), False, 'from django.db import models, transaction\n'), ((11550, 11564), 'django.utils.translation.gettext_lazy', '_', (['"""attribute"""'], {}), "('attribute')\n", (11551, 11564), True, 'from django.utils.translation import gettext_lazy as _\n'), ((11595, 11610), 'django.utils.translation.gettext_lazy', '_', (['"""attributes"""'], {}), "('attributes')\n", (11596, 11610), True, 'from django.utils.translation import gettext_lazy as _\n'), ((23956, 23983), 'django.utils.translation.gettext_lazy', '_', (['"""attribute value choice"""'], {}), "('attribute value choice')\n", (23957, 23983), True, 'from django.utils.translation import gettext_lazy as _\n'), ((24014, 24042), 'django.utils.translation.gettext_lazy', '_', (['"""attribute value choices"""'], {}), "('attribute value choices')\n", (24015, 24042), True, 'from django.utils.translation import gettext_lazy as _\n'), ((26617, 26640), 'django.utils.translation.gettext_lazy', '_', (['"""fieldset attribute"""'], {}), "('fieldset attribute')\n", (26618, 26640), True, 'from django.utils.translation import gettext_lazy as _\n'), ((26671, 26695), 'django.utils.translation.gettext_lazy', '_', (['"""fieldset attributes"""'], {}), "('fieldset attributes')\n", (26672, 26695), True, 'from django.utils.translation import gettext_lazy as _\n'), ((27120, 27146), 'django.utils.translation.gettext_lazy', '_', (['"""document link section"""'], {}), "('document link section')\n", (27121, 27146), True, 'from django.utils.translation import gettext_lazy as _\n'), ((27177, 27204), 'django.utils.translation.gettext_lazy', '_', (['"""document link sections"""'], {}), "('document link sections')\n", (27178, 27204), True, 'from django.utils.translation import gettext_lazy as _\n'), ((28465, 28492), 'django.utils.translation.gettext_lazy', '_', (['"""document link fieldset"""'], {}), "('document link fieldset')\n", (28466, 28492), True, 'from django.utils.translation import gettext_lazy as _\n'), ((28523, 28551), 'django.utils.translation.gettext_lazy', '_', (['"""document link fieldsets"""'], {}), "('document link fieldsets')\n", (28524, 28551), True, 'from django.utils.translation import gettext_lazy as _\n'), ((29035, 29055), 'django.utils.translation.gettext_lazy', '_', (['"""overview filter"""'], {}), "('overview filter')\n", (29036, 29055), True, 'from django.utils.translation import gettext_lazy as _\n'), ((29086, 29107), 'django.utils.translation.gettext_lazy', '_', (['"""overview filters"""'], {}), "('overview filters')\n", (29087, 29107), True, 'from django.utils.translation import gettext_lazy as _\n'), ((30006, 30036), 'django.utils.translation.gettext_lazy', '_', (['"""overview filter attribute"""'], {}), "('overview filter attribute')\n", (30007, 30036), True, 'from django.utils.translation import gettext_lazy as _\n'), ((30067, 30098), 'django.utils.translation.gettext_lazy', '_', (['"""overview filter attributes"""'], {}), "('overview filter attributes')\n", (30068, 30098), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1894, 1908), 'django.utils.translation.gettext_lazy', '_', (['"""permanent"""'], {}), "('permanent')\n", (1895, 1908), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1937, 1958), 'django.utils.translation.gettext_lazy', '_', (['"""while processing"""'], {}), "('while processing')\n", (1938, 1958), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1983, 2018), 'django.utils.translation.gettext_lazy', '_', (['"""custom duration after archival"""'], {}), "('custom duration after archival')\n", (1984, 2018), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2146, 2156), 'django.utils.translation.gettext_lazy', '_', (['"""years"""'], {}), "('years')\n", (2147, 2156), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2181, 2192), 'django.utils.translation.gettext_lazy', '_', (['"""months"""'], {}), "('months')\n", (2182, 2192), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2215, 2224), 'django.utils.translation.gettext_lazy', '_', (['"""days"""'], {}), "('days')\n", (2216, 2224), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2292, 2302), 'django.utils.translation.gettext_lazy', '_', (['"""label"""'], {}), "('label')\n", (2293, 2302), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2395, 2409), 'django.utils.translation.gettext_lazy', '_', (['"""plan type"""'], {}), "('plan type')\n", (2396, 2409), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2515, 2531), 'django.utils.translation.gettext_lazy', '_', (['"""custom time"""'], {}), "('custom time')\n", (2516, 2531), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2662, 2687), 'django.utils.translation.gettext_lazy', '_', (['"""unit for custom time"""'], {}), "('unit for custom time')\n", (2663, 2687), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3782, 3795), 'django.utils.translation.gettext_lazy', '_', (['"""fieldset"""'], {}), "('fieldset')\n", (3783, 3795), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3821, 3833), 'django.utils.translation.gettext_lazy', '_', (['"""integer"""'], {}), "('integer')\n", (3822, 3833), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3859, 3871), 'django.utils.translation.gettext_lazy', '_', (['"""decimal"""'], {}), "('decimal')\n", (3860, 3871), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3902, 3919), 'django.utils.translation.gettext_lazy', '_', (['"""short string"""'], {}), "('short string')\n", (3903, 3919), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3949, 3965), 'django.utils.translation.gettext_lazy', '_', (['"""long string"""'], {}), "('long string')\n", (3950, 3965), True, 'from django.utils.translation import gettext_lazy as _\n'), ((3993, 4007), 'django.utils.translation.gettext_lazy', '_', (['"""rich text"""'], {}), "('rich text')\n", (3994, 4007), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4041, 4061), 'django.utils.translation.gettext_lazy', '_', (['"""short rich text"""'], {}), "('short rich text')\n", (4042, 4061), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4087, 4099), 'django.utils.translation.gettext_lazy', '_', (['"""boolean"""'], {}), "('boolean')\n", (4088, 4099), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4122, 4131), 'django.utils.translation.gettext_lazy', '_', (['"""date"""'], {}), "('date')\n", (4123, 4131), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4154, 4163), 'django.utils.translation.gettext_lazy', '_', (['"""user"""'], {}), "('user')\n", (4155, 4163), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4191, 4205), 'django.utils.translation.gettext_lazy', '_', (['"""personnel"""'], {}), "('personnel')\n", (4192, 4205), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4232, 4245), 'django.utils.translation.gettext_lazy', '_', (['"""geometry"""'], {}), "('geometry')\n", (4233, 4245), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4269, 4279), 'django.utils.translation.gettext_lazy', '_', (['"""image"""'], {}), "('image')\n", (4270, 4279), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4302, 4311), 'django.utils.translation.gettext_lazy', '_', (['"""file"""'], {}), "('file')\n", (4303, 4311), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4334, 4343), 'django.utils.translation.gettext_lazy', '_', (['"""link"""'], {}), "('link')\n", (4335, 4343), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4368, 4379), 'django.utils.translation.gettext_lazy', '_', (['"""choice"""'], {}), "('choice')\n", (4369, 4379), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4629, 4641), 'django.utils.translation.gettext_lazy', '_', (['"""default"""'], {}), "('default')\n", (4630, 4641), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4671, 4684), 'django.utils.translation.gettext_lazy', '_', (['"""dropdown"""'], {}), "('dropdown')\n", (4672, 4684), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4714, 4727), 'django.utils.translation.gettext_lazy', '_', (['"""checkbox"""'], {}), "('checkbox')\n", (4715, 4727), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4757, 4771), 'django.utils.translation.gettext_lazy', '_', (['"""read only"""'], {}), "('read only')\n", (4758, 4771), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4810, 4833), 'django.utils.translation.gettext_lazy', '_', (['"""read only checkbox"""'], {}), "('read only checkbox')\n", (4811, 4833), True, 'from django.utils.translation import gettext_lazy as _\n'), ((4869, 4908), 'django.utils.translation.gettext_lazy', '_', (['"""integer without thousand separator"""'], {}), "('integer without thousand separator')\n", (4870, 4908), True, 'from django.utils.translation import gettext_lazy as _\n'), ((5125, 5153), 'django.utils.translation.gettext_lazy', '_', (['"""Same as parent fieldset"""'], {}), "('Same as parent fieldset')\n", (5126, 5153), True, 'from django.utils.translation import gettext_lazy as _\n'), ((5179, 5189), 'django.utils.translation.gettext_lazy', '_', (['"""FACTA"""'], {}), "('FACTA')\n", (5180, 5189), True, 'from django.utils.translation import gettext_lazy as _\n'), ((5219, 5233), 'django.utils.translation.gettext_lazy', '_', (['"""Geoserver"""'], {}), "('Geoserver')\n", (5220, 5233), True, 'from django.utils.translation import gettext_lazy as _\n'), ((5822, 5831), 'django.utils.translation.gettext_lazy', '_', (['"""name"""'], {}), "('name')\n", (5823, 5831), True, 'from django.utils.translation import gettext_lazy as _\n'), ((5904, 5919), 'django.utils.translation.gettext_lazy', '_', (['"""value type"""'], {}), "('value type')\n", (5905, 5919), True, 'from django.utils.translation import gettext_lazy as _\n'), ((6024, 6042), 'django.utils.translation.gettext_lazy', '_', (['"""display style"""'], {}), "('display style')\n", (6025, 6042), True, 'from django.utils.translation import gettext_lazy as _\n'), ((6425, 6450), 'django.utils.translation.gettext_lazy', '_', (['"""visibility condition"""'], {}), "('visibility condition')\n", (6426, 6450), True, 'from django.utils.translation import gettext_lazy as _\n'), ((6772, 6791), 'django.utils.translation.gettext_lazy', '_', (['"""hide condition"""'], {}), "('hide condition')\n", (6773, 6791), True, 'from django.utils.translation import gettext_lazy as _\n'), ((6904, 6913), 'django.utils.translation.gettext_lazy', '_', (['"""unit"""'], {}), "('unit')\n", (6905, 6913), True, 'from django.utils.translation import gettext_lazy as _\n'), ((6989, 7012), 'django.utils.translation.gettext_lazy', '_', (['"""public information"""'], {}), "('public information')\n", (6990, 7012), True, 'from django.utils.translation import gettext_lazy as _\n'), ((7079, 7100), 'django.utils.translation.gettext_lazy', '_', (['"""searchable field"""'], {}), "('searchable field')\n", (7080, 7100), True, 'from django.utils.translation import gettext_lazy as _\n'), ((7166, 7180), 'django.utils.translation.gettext_lazy', '_', (['"""generated"""'], {}), "('generated')\n", (7167, 7180), True, 'from django.utils.translation import gettext_lazy as _\n'), ((7292, 7316), 'django.utils.translation.gettext_lazy', '_', (['"""data retention plan"""'], {}), "('data retention plan')\n", (7293, 7316), True, 'from django.utils.translation import gettext_lazy as _\n'), ((7657, 7670), 'django.utils.translation.gettext_lazy', '_', (['"""required"""'], {}), "('required')\n", (7658, 7670), True, 'from django.utils.translation import gettext_lazy as _\n'), ((7751, 7771), 'django.utils.translation.gettext_lazy', '_', (['"""multiple choice"""'], {}), "('multiple choice')\n", (7752, 7771), True, 'from django.utils.translation import gettext_lazy as _\n'), ((7865, 7885), 'django.utils.translation.gettext_lazy', '_', (['"""character limit"""'], {}), "('character limit')\n", (7866, 7885), True, 'from django.utils.translation import gettext_lazy as _\n'), ((7994, 8015), 'django.utils.translation.gettext_lazy', '_', (['"""placeholder text"""'], {}), "('placeholder text')\n", (7995, 8015), True, 'from django.utils.translation import gettext_lazy as _\n'), ((8117, 8128), 'django.utils.translation.gettext_lazy', '_', (['"""unique"""'], {}), "('unique')\n", (8118, 8128), True, 'from django.utils.translation import gettext_lazy as _\n'), ((8218, 8236), 'django.utils.translation.gettext_lazy', '_', (['"""error message"""'], {}), "('error message')\n", (8219, 8236), True, 'from django.utils.translation import gettext_lazy as _\n'), ((8362, 8377), 'django.utils.translation.gettext_lazy', '_', (['"""identifier"""'], {}), "('identifier')\n", (8363, 8377), True, 'from django.utils.translation import gettext_lazy as _\n'), ((8752, 8766), 'django.utils.translation.gettext_lazy', '_', (['"""Help text"""'], {}), "('Help text')\n", (8753, 8766), True, 'from django.utils.translation import gettext_lazy as _\n'), ((8825, 8839), 'django.utils.translation.gettext_lazy', '_', (['"""Help link"""'], {}), "('Help link')\n", (8826, 8839), True, 'from django.utils.translation import gettext_lazy as _\n'), ((8980, 9009), 'django.utils.translation.gettext_lazy', '_', (['"""read-only autofill field"""'], {}), "('read-only autofill field')\n", (8981, 9009), True, 'from django.utils.translation import gettext_lazy as _\n'), ((9081, 9099), 'django.utils.translation.gettext_lazy', '_', (['"""autofill rule"""'], {}), "('autofill rule')\n", (9082, 9099), True, 'from django.utils.translation import gettext_lazy as _\n'), ((9259, 9295), 'django.utils.translation.gettext_lazy', '_', (['"""updates related autofill fields"""'], {}), "('updates related autofill fields')\n", (9260, 9295), True, 'from django.utils.translation import gettext_lazy as _\n'), ((9389, 9419), 'django.utils.translation.gettext_lazy', '_', (['"""highlight field for group"""'], {}), "('highlight field for group')\n", (9390, 9419), True, 'from django.utils.translation import gettext_lazy as _\n'), ((9586, 9605), 'django.utils.translation.gettext_lazy', '_', (['"""owner can edit"""'], {}), "('owner can edit')\n", (9587, 9605), True, 'from django.utils.translation import gettext_lazy as _\n'), ((9698, 9717), 'django.utils.translation.gettext_lazy', '_', (['"""owner can view"""'], {}), "('owner can view')\n", (9699, 9717), True, 'from django.utils.translation import gettext_lazy as _\n'), ((9785, 9811), 'django.utils.translation.gettext_lazy', '_', (['"""privilege for viewing"""'], {}), "('privilege for viewing')\n", (9786, 9811), True, 'from django.utils.translation import gettext_lazy as _\n'), ((10000, 10026), 'django.utils.translation.gettext_lazy', '_', (['"""privilege for editing"""'], {}), "('privilege for editing')\n", (10001, 10026), True, 'from django.utils.translation import gettext_lazy as _\n'), ((10433, 10458), 'django.utils.translation.gettext_lazy', '_', (['"""external data source"""'], {}), "('external data source')\n", (10434, 10458), True, 'from django.utils.translation import gettext_lazy as _\n'), ((10598, 10637), 'django.utils.translation.gettext_lazy', '_', (['"""field key for external data source"""'], {}), "('field key for external data source')\n", (10599, 10637), True, 'from django.utils.translation import gettext_lazy as _\n'), ((10789, 10834), 'django.utils.translation.gettext_lazy', '_', (['"""key attribute for fetching external data"""'], {}), "('key attribute for fetching external data')\n", (10790, 10834), True, 'from django.utils.translation import gettext_lazy as _\n'), ((11154, 11198), 'django.utils.translation.gettext_lazy', '_', (['"""key attribute for fetching AD user data"""'], {}), "('key attribute for fetching AD user data')\n", (11155, 11198), True, 'from django.utils.translation import gettext_lazy as _\n'), ((11336, 11357), 'django.utils.translation.gettext_lazy', '_', (['"""AD user data key"""'], {}), "('AD user data key')\n", (11337, 11357), True, 'from django.utils.translation import gettext_lazy as _\n'), ((12380, 12579), 'django.core.exceptions.ValidationError', 'ValidationError', (['f"""Calculations needs to be uneven in length andfollow the style \'(<attribtute> <operator> <attribute>)*n. Error in {self.identifier} with calculations {self.calculations}."""'], {}), '(\n f"Calculations needs to be uneven in length andfollow the style \'(<attribtute> <operator> <attribute>)*n. Error in {self.identifier} with calculations {self.calculations}."\n )\n', (12395, 12579), False, 'from django.core.exceptions import ValidationError\n'), ((12731, 12865), 'django.core.exceptions.ValidationError', 'ValidationError', (['f"""Calculation can not end with operator. Error in {self.identifier} with calculations {self.calculations}."""'], {}), "(\n f'Calculation can not end with operator. Error in {self.identifier} with calculations {self.calculations}.'\n )\n", (12746, 12865), False, 'from django.core.exceptions import ValidationError\n'), ((13070, 13236), 'django.core.exceptions.ValidationError', 'ValidationError', (['f"""Calculation operators can only be {self.ALLOWED_CALCULATION_OPERATORS}. Error in {self.identifier} with calculation {self.calculations}."""'], {}), "(\n f'Calculation operators can only be {self.ALLOWED_CALCULATION_OPERATORS}. Error in {self.identifier} with calculation {self.calculations}.'\n )\n", (13085, 13236), False, 'from django.core.exceptions import ValidationError\n'), ((13427, 13576), 'django.core.exceptions.ValidationError', 'ValidationError', (['f"""There must be exactly one more attribute then operatorsError in {self.identifier} with calculation {self.calculations}."""'], {}), "(\n f'There must be exactly one more attribute then operatorsError in {self.identifier} with calculation {self.calculations}.'\n )\n", (13442, 13576), False, 'from django.core.exceptions import ValidationError\n'), ((23524, 23538), 'django.utils.translation.gettext_lazy', '_', (['"""attribute"""'], {}), "('attribute')\n", (23525, 23538), True, 'from django.utils.translation import gettext_lazy as _\n'), ((23660, 23670), 'django.utils.translation.gettext_lazy', '_', (['"""value"""'], {}), "('value')\n", (23661, 23670), True, 'from django.utils.translation import gettext_lazy as _\n'), ((23752, 23767), 'django.utils.translation.gettext_lazy', '_', (['"""identifier"""'], {}), "('identifier')\n", (23753, 23767), True, 'from django.utils.translation import gettext_lazy as _\n'), ((23893, 23903), 'django.utils.translation.gettext_lazy', '_', (['"""index"""'], {}), "('index')\n", (23894, 23903), True, 'from django.utils.translation import gettext_lazy as _\n'), ((24854, 24878), 'django.utils.translation.gettext_lazy', '_', (['"""automatic attribute"""'], {}), "('automatic attribute')\n", (24855, 24878), True, 'from django.utils.translation import gettext_lazy as _\n'), ((25042, 25060), 'django.utils.translation.gettext_lazy', '_', (['"""key attribute"""'], {}), "('key attribute')\n", (25043, 25060), True, 'from django.utils.translation import gettext_lazy as _\n'), ((25645, 25664), 'django.utils.translation.gettext_lazy', '_', (['"""attribute link"""'], {}), "('attribute link')\n", (25646, 25664), True, 'from django.utils.translation import gettext_lazy as _\n'), ((25716, 25724), 'django.utils.translation.gettext_lazy', '_', (['"""key"""'], {}), "('key')\n", (25717, 25724), True, 'from django.utils.translation import gettext_lazy as _\n'), ((25772, 25782), 'django.utils.translation.gettext_lazy', '_', (['"""value"""'], {}), "('value')\n", (25773, 25782), True, 'from django.utils.translation import gettext_lazy as _\n'), ((26964, 26973), 'django.utils.translation.gettext_lazy', '_', (['"""name"""'], {}), "('name')\n", (26965, 26973), True, 'from django.utils.translation import gettext_lazy as _\n'), ((27043, 27053), 'django.utils.translation.gettext_lazy', '_', (['"""index"""'], {}), "('index')\n", (27044, 27053), True, 'from django.utils.translation import gettext_lazy as _\n'), ((27665, 27688), 'django.utils.translation.gettext_lazy', '_', (['"""fieldset attribute"""'], {}), "('fieldset attribute')\n", (27666, 27688), True, 'from django.utils.translation import gettext_lazy as _\n'), ((27870, 27898), 'django.utils.translation.gettext_lazy', '_', (['"""document name attribute"""'], {}), "('document name attribute')\n", (27871, 27898), True, 'from django.utils.translation import gettext_lazy as _\n'), ((28133, 28168), 'django.utils.translation.gettext_lazy', '_', (['"""document custom name attribute"""'], {}), "('document custom name attribute')\n", (28134, 28168), True, 'from django.utils.translation import gettext_lazy as _\n'), ((28389, 28417), 'django.utils.translation.gettext_lazy', '_', (['"""document link attribute"""'], {}), "('document link attribute')\n", (28390, 28417), True, 'from django.utils.translation import gettext_lazy as _\n'), ((28790, 28799), 'django.utils.translation.gettext_lazy', '_', (['"""name"""'], {}), "('name')\n", (28791, 28799), True, 'from django.utils.translation import gettext_lazy as _\n'), ((28886, 28901), 'django.utils.translation.gettext_lazy', '_', (['"""identifier"""'], {}), "('identifier')\n", (28887, 28901), True, 'from django.utils.translation import gettext_lazy as _\n'), ((29361, 29375), 'django.utils.translation.gettext_lazy', '_', (['"""attribute"""'], {}), "('attribute')\n", (29362, 29375), True, 'from django.utils.translation import gettext_lazy as _\n'), ((29503, 29523), 'django.utils.translation.gettext_lazy', '_', (['"""overview filter"""'], {}), "('overview filter')\n", (29504, 29523), True, 'from django.utils.translation import gettext_lazy as _\n'), ((29633, 29670), 'django.utils.translation.gettext_lazy', '_', (['"""filters project subtype overview"""'], {}), "('filters project subtype overview')\n", (29634, 29670), True, 'from django.utils.translation import gettext_lazy as _\n'), ((29768, 29808), 'django.utils.translation.gettext_lazy', '_', (['"""filters project floor area overview"""'], {}), "('filters project floor area overview')\n", (29769, 29808), True, 'from django.utils.translation import gettext_lazy as _\n'), ((29902, 29935), 'django.utils.translation.gettext_lazy', '_', (['"""filters project map overview"""'], {}), "('filters project map overview')\n", (29903, 29935), True, 'from django.utils.translation import gettext_lazy as _\n'), ((6235, 6279), 'django.utils.translation.gettext_lazy', '_', (['"""show attribute if any condition applies"""'], {}), "('show attribute if any condition applies')\n", (6236, 6279), True, 'from django.utils.translation import gettext_lazy as _\n'), ((6582, 6626), 'django.utils.translation.gettext_lazy', '_', (['"""hide attribute if any condition applies"""'], {}), "('hide attribute if any condition applies')\n", (6583, 6626), True, 'from django.utils.translation import gettext_lazy as _\n'), ((21568, 21613), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['value', '"""%Y-%m-%d"""'], {}), "(value, '%Y-%m-%d')\n", (21594, 21613), False, 'import datetime\n'), ((18221, 18281), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['value', 'DATE_SERIALIZATION_FORMAT'], {}), '(value, DATE_SERIALIZATION_FORMAT)\n', (18247, 18281), False, 'import datetime\n'), ((18513, 18529), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (18527, 18529), False, 'from django.contrib.auth import get_user_model\n'), ((15883, 15943), 'datetime.datetime.strftime', 'datetime.datetime.strftime', (['value', 'DATE_SERIALIZATION_FORMAT'], {}), '(value, DATE_SERIALIZATION_FORMAT)\n', (15909, 15943), False, 'import datetime\n'), ((16249, 16265), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (16263, 16265), False, 'from django.contrib.auth import get_user_model\n'), ((18453, 18469), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (18467, 18469), False, 'from django.contrib.auth import get_user_model\n'), ((22356, 22384), 'users.models.User.objects.get', 'User.objects.get', ([], {'uuid': 'value'}), '(uuid=value)\n', (22372, 22384), False, 'from users.models import User, PRIVILEGE_LEVELS\n'), ((22110, 22128), 'projects.helpers.get_ad_user', 'get_ad_user', (['value'], {}), '(value)\n', (22121, 22128), False, 'from projects.helpers import get_ad_user\n')] |
from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union
import attr
from ..models.billing_invoice import BillingInvoice
from ..types import UNSET, Unset
from ..util.serialization import is_not_none
T = TypeVar("T", bound="ListAccountBillingInvoicesResponse")
@attr.s(auto_attribs=True)
class ListAccountBillingInvoicesResponse:
"""
Attributes:
billing_invoices (List[BillingInvoice]):
next_page_token (Union[Unset, str]):
"""
billing_invoices: List[BillingInvoice]
next_page_token: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self, pick_by_predicate: Optional[Callable[[Any], bool]] = is_not_none) -> Dict[str, Any]:
billing_invoices = []
for billing_invoices_item_data in self.billing_invoices:
billing_invoices_item = billing_invoices_item_data.to_dict()
billing_invoices.append(billing_invoices_item)
next_page_token = self.next_page_token
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"billingInvoices": billing_invoices,
}
)
if next_page_token is not UNSET:
field_dict["nextPageToken"] = next_page_token
field_dict = {k: v for k, v in field_dict.items() if v != UNSET}
if pick_by_predicate is not None:
field_dict = {k: v for k, v in field_dict.items() if pick_by_predicate(v)}
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
billing_invoices = []
_billing_invoices = d.pop("billingInvoices")
for billing_invoices_item_data in _billing_invoices:
billing_invoices_item = BillingInvoice.from_dict(billing_invoices_item_data)
billing_invoices.append(billing_invoices_item)
next_page_token = d.pop("nextPageToken", UNSET)
list_account_billing_invoices_response = cls(
billing_invoices=billing_invoices,
next_page_token=next_page_token,
)
list_account_billing_invoices_response.additional_properties = d
return list_account_billing_invoices_response
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties
| [
"attr.s",
"attr.ib",
"typing.TypeVar"
] | [((226, 282), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {'bound': '"""ListAccountBillingInvoicesResponse"""'}), "('T', bound='ListAccountBillingInvoicesResponse')\n", (233, 282), False, 'from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union\n'), ((286, 311), 'attr.s', 'attr.s', ([], {'auto_attribs': '(True)'}), '(auto_attribs=True)\n', (292, 311), False, 'import attr\n'), ((615, 648), 'attr.ib', 'attr.ib', ([], {'init': '(False)', 'factory': 'dict'}), '(init=False, factory=dict)\n', (622, 648), False, 'import attr\n')] |
import requests
from npt import log
# from npt import query
from npt import datasets
API_URL = 'https://oderest.rsl.wustl.edu/live2'
DESCRIPTORS = {
'ctx': {
'product_image': ('Description','PRODUCT DATA FILE WITH LABEL'),
'browse_image': ('Description','BROWSE IMAGE'),
'browse_thumbnail': ('Description','THUMBNAIL IMAGE')
},
'hirise': {
'product_image': ('Description', 'PRODUCT DATA FILE'),
'product_label': ('Description', 'PRODUCT LABEL FILE'),
'browse_image': ('Description', 'BROWSE'),
'browse_thumbnail': ('Description', 'THUMBNAIL')
},
'hrsc': {
'product_image': ('Description', 'PRODUCT DATA FILE'),
'product_label': ('Description', 'PRODUCT LABEL FILE'),
'browse_image': ('Description', 'BROWSE'),
'browse_thumbnail': ('Description', 'THUMBNAIL')
}
}
DB_ID = 'usgs_ode'
# class ODE(query.Query):
class ODE:
_result = None
def __init__(self, target, mission, instrument, product_type):
super().__init__()
self.set_dataset(target, mission, instrument, product_type)
def list_datasets(self):
# return datasets.ode.list()
_datasets = datasets.db.query("""
SELECT * FROM datasets WHERE db_id == '{db_id}'
""".format(db_id=DB_ID))
def set_dataset(self, target, host, instr, ptype, dataset=None):
"""
Args:
target:
host:
instr:
ptype:
Returns:
ODE
"""
if dataset is None:
msg = "Either set 'dataset' or all the others."
assert all([target, host, instr, ptype]), msg
self.host = host
self.instr = instr
self.ptype = ptype
self.target = target
return self
def query_bbox(self, bbox, contains=False):
"""
Return list of found products (in dictionaries)
dataset be like: 'mro/hirise/rdrv11'
bbox: {'minlat': -0.5, 'maxlat': 0.5, 'westlon': 359.5, 'eastlon': 0.5}
"""
assert all(k in bbox for k in ('minlat','maxlat','westlon','eastlon')), (
"Expected 'bbox' with keys: 'minlat','maxlat','westlon','eastlon'"
)
req = request_products(bbox, self.target, self.host, self.instr, self.ptype, contains=contains)
result = req.json()
self._result = result
status = result['ODEResults']['Status']
if status.lower() != 'success':
print('oops, request failed. check `result`')
return req
def count(self):
if self._result is None:
return None
try:
cnt = self._result['Count']
return cnt
except:
return 0
def read_products(self, request):
products = read_products(request)
return products
def parse_products(self, products, schema):
if not products:
print("No products found")
return None
products_output = []
for i,product in enumerate(products):
_meta = readout_product_meta(product)
_files = readout_product_files(product)
_fprint = readout_product_footprint(product)
_pfile = find_product_file(product_files=_files,
product_type='product_image',
descriptors=DESCRIPTORS[self.instr])
_pfile = _pfile['URL']
try:
_lfile = find_product_file(product_files=_files,
product_type='product_label',
descriptors=DESCRIPTORS[self.instr])
_lfile = _lfile['URL']
except KeyError as err:
_lfile = _pfile
_dout = _meta
_dout['geometry'] = _fprint
_dout['image_url'] = _pfile
_dout['label_url'] = _lfile
products_output.append(_dout)
print("{} products found".format(len(products_output)))
return products_output
# ODEQuery.read_products
def read_products(request):
if not (request.status_code == 200
and request.json()['ODEResults']['Status'].lower() == 'success'):
return None
try:
products = request.json()['ODEResults']['Products']['Product']
assert isinstance(products, list), "Was expecting 'list', got '{}' instead".format(type(products))
except:
log.info("No products were found")
products = None
return products
# USED by 'query_bbox'
def request_products(bbox, target=None, host=None, instr=None, ptype=None, contains=False):
"""
bbox = {
'minlat': [-65:65],
'minlat': [-65:65],
'westlon': [0:360],
'eastlon': [0:360]
}
'ptype' (eg, "rdrv11") is used only when 'instr' is also defined (e.g, "hirise").
"""
api_endpoint = API_URL
payload = dict(
query='product',
results='fmpc',
output='JSON',
loc='f',
minlat=bbox['minlat'],
maxlat=bbox['maxlat'],
westlon=bbox['westlon'],
eastlon=bbox['eastlon']
)
if target:
payload.update({'target':target})
if host:
payload.update({'ihid':host})
if instr:
payload.update({'iid':instr})
if ptype:
payload.update({'pt':ptype})
if contains:
payload.update({'loc':'o'})
#payload.update({'pretty':True})
return requests.get(api_endpoint, params=payload)
# USED by 'parse_products'
def readout_product_files(product_json):
product_files = product_json['Product_files']['Product_file']
return product_files
# USED by 'parse_products'
def readout_product_footprint(product_json):
# 'Footprint_geometry' and 'Footprint_C0_geometry' may contain 'GEOMETRYCOLLECTION'
# when the footprint cross the meridian in "c180" or "c0" frames
#product_geom = request.json()['ODEResults']['Products']['Product']['Footprint_geometry']
#product_geom = request.json()['ODEResults']['Products']['Product']['Footprint_C0_geometry']
product_geom = product_json['Footprint_GL_geometry']
return product_geom
# USED by 'parse_products'
def readout_product_meta(product_json):
product = {}
# <pdsid>ESP_011712_1820_COLOR</pdsid>
product['id'] = product_json['pdsid']
# <ihid>MRO</ihid>
product['mission'] = product_json['ihid']
# <iid>HIRISE</iid>
product['inst'] = product_json['iid']
# <pt>RDRV11</pt>
product['type'] = product_json['pt']
fields = [
'Target_name',
'Footprints_cross_meridian',
'Map_scale',
'Center_latitude',
'Center_longitude',
'Easternmost_longitude',
'Westernmost_longitude',
'Minimum_latitude',
'Maximum_latitude',
'Emission_angle',
'Incidence_angle',
'Phase_angle',
'Solar_longitude',
'Observation_time',
'Product_creation_time',
'UTC_start_time',
'UTC_stop_time'
]
for key in fields:
product[key] = product_json[key]
return product
# USED by 'parse_products'
def find_product_file(product_files, product_type, descriptors=DESCRIPTORS):
desc_key, desc_val = descriptors[product_type]
is_val_regex = desc_val.strip()[-1]=='*'
desc_val_token = desc_val[:-1].strip()
_foo = (lambda pf:
pf[desc_key] == desc_val
if not is_val_regex
else
desc_val_token in pf[desc_key])
pfl = list(filter(_foo, product_files))
multiple_matches = "I was expecting only one Product matching ptype '{}' bu got '{}'."
assert len(pfl) == 1, multiple_matches.format(product_type, len(pfl))
return pfl[0]
# USED by 'download.get_product'
def request_product(PRODUCTID, api_endpoint):
payload = dict(
query='product',
results='fmp',
output='JSON',
productid=PRODUCTID
)
#payload.update({'pretty':True})
return requests.get(api_endpoint, params=payload)
| [
"requests.get",
"npt.log.info"
] | [((5526, 5568), 'requests.get', 'requests.get', (['api_endpoint'], {'params': 'payload'}), '(api_endpoint, params=payload)\n', (5538, 5568), False, 'import requests\n'), ((8060, 8102), 'requests.get', 'requests.get', (['api_endpoint'], {'params': 'payload'}), '(api_endpoint, params=payload)\n', (8072, 8102), False, 'import requests\n'), ((4507, 4541), 'npt.log.info', 'log.info', (['"""No products were found"""'], {}), "('No products were found')\n", (4515, 4541), False, 'from npt import log\n')] |
from typing import (List,
Tuple)
from hypothesis import given
from tests.port_tests.hints import (PortedOperation,
PortedSweepEvent)
from . import strategies
@given(strategies.non_empty_sweep_events_lists_with_indices_and_booleans_lists)
def test_basic(events_with_position_and_processed
: Tuple[List[PortedSweepEvent], int, List[bool]]) -> None:
events, position, processed = events_with_position_and_processed
result = PortedOperation.to_next_position(position, events, processed)
assert isinstance(result, int)
@given(strategies.non_empty_sweep_events_lists_with_indices_and_booleans_lists)
def test_properties(events_with_position_and_processed
: Tuple[List[PortedSweepEvent], int, List[bool]]) -> None:
events, position, processed = events_with_position_and_processed
result = PortedOperation.to_next_position(position, events, processed)
assert result in range(len(events))
| [
"hypothesis.given",
"tests.port_tests.hints.PortedOperation.to_next_position"
] | [((219, 297), 'hypothesis.given', 'given', (['strategies.non_empty_sweep_events_lists_with_indices_and_booleans_lists'], {}), '(strategies.non_empty_sweep_events_lists_with_indices_and_booleans_lists)\n', (224, 297), False, 'from hypothesis import given\n'), ((606, 684), 'hypothesis.given', 'given', (['strategies.non_empty_sweep_events_lists_with_indices_and_booleans_lists'], {}), '(strategies.non_empty_sweep_events_lists_with_indices_and_booleans_lists)\n', (611, 684), False, 'from hypothesis import given\n'), ((505, 566), 'tests.port_tests.hints.PortedOperation.to_next_position', 'PortedOperation.to_next_position', (['position', 'events', 'processed'], {}), '(position, events, processed)\n', (537, 566), False, 'from tests.port_tests.hints import PortedOperation, PortedSweepEvent\n'), ((902, 963), 'tests.port_tests.hints.PortedOperation.to_next_position', 'PortedOperation.to_next_position', (['position', 'events', 'processed'], {}), '(position, events, processed)\n', (934, 963), False, 'from tests.port_tests.hints import PortedOperation, PortedSweepEvent\n')] |
import unittest
import numpy as np
from revpy import fare_transformation
class FareTransformationTest(unittest.TestCase):
def setUp(self):
# example data from page 13 of research paper
# "Optimization of Mixed Fare Structures: Theory and Applications"
# by <NAME> al. (2010)
self.fares = np.array([1200, 1000, 800, 600, 400, 200])
self.demands = np.array([31.2, 10.9, 14.8, 19.9, 26.9, 36.3])
def test_faretrafo_zero_demand(self):
demands = np.zeros(self.fares.shape)
adjusted_fares, adjusted_demand = \
fare_transformation.calc_fare_transformation(self.fares, demands)
np.testing.assert_equal([1200, np.nan, np.nan, np.nan, np.nan, np.nan],
adjusted_fares)
np.testing.assert_equal([0, np.nan, np.nan, np.nan, np.nan, np.nan],
adjusted_demand)
def test_example1(self):
# test example from above mentioned paper
adjusted_fares, adjusted_demand = \
fare_transformation.calc_fare_transformation(self.fares,
self.demands)
np.testing.assert_almost_equal(adjusted_fares, [1200, 427, 231, 28,
np.nan, np.nan], 0)
def test_example2(self):
# example containing some zero demands
demands = np.array([0, 15, 0, 30, 2, 60])
adjusted_fares, adjusted_demand = \
fare_transformation.calc_fare_transformation(self.fares, demands)
np.testing.assert_almost_equal(adjusted_fares, [1200, 1000, np.nan,
400, np.nan, np.nan, ])
def test_efficient_strategies(self):
fares = np.array([69.5, 59.5, 48.5, 37.5, 29.])
demands = np.array([3, 1, 0, 0, 10])
Q = demands.cumsum()
TR = Q*fares
__, __, __, __, eff_indices = \
fare_transformation.efficient_strategies(Q, TR, fares[0])
self.assertEqual(eff_indices.tolist(), [0, 1, 4])
| [
"revpy.fare_transformation.calc_fare_transformation",
"revpy.fare_transformation.efficient_strategies",
"numpy.testing.assert_equal",
"numpy.array",
"numpy.zeros",
"numpy.testing.assert_almost_equal"
] | [((329, 371), 'numpy.array', 'np.array', (['[1200, 1000, 800, 600, 400, 200]'], {}), '([1200, 1000, 800, 600, 400, 200])\n', (337, 371), True, 'import numpy as np\n'), ((395, 441), 'numpy.array', 'np.array', (['[31.2, 10.9, 14.8, 19.9, 26.9, 36.3]'], {}), '([31.2, 10.9, 14.8, 19.9, 26.9, 36.3])\n', (403, 441), True, 'import numpy as np\n'), ((503, 529), 'numpy.zeros', 'np.zeros', (['self.fares.shape'], {}), '(self.fares.shape)\n', (511, 529), True, 'import numpy as np\n'), ((587, 652), 'revpy.fare_transformation.calc_fare_transformation', 'fare_transformation.calc_fare_transformation', (['self.fares', 'demands'], {}), '(self.fares, demands)\n', (631, 652), False, 'from revpy import fare_transformation\n'), ((662, 753), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['[1200, np.nan, np.nan, np.nan, np.nan, np.nan]', 'adjusted_fares'], {}), '([1200, np.nan, np.nan, np.nan, np.nan, np.nan],\n adjusted_fares)\n', (685, 753), True, 'import numpy as np\n'), ((790, 879), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['[0, np.nan, np.nan, np.nan, np.nan, np.nan]', 'adjusted_demand'], {}), '([0, np.nan, np.nan, np.nan, np.nan, np.nan],\n adjusted_demand)\n', (813, 879), True, 'import numpy as np\n'), ((1045, 1115), 'revpy.fare_transformation.calc_fare_transformation', 'fare_transformation.calc_fare_transformation', (['self.fares', 'self.demands'], {}), '(self.fares, self.demands)\n', (1089, 1115), False, 'from revpy import fare_transformation\n'), ((1182, 1273), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['adjusted_fares', '[1200, 427, 231, 28, np.nan, np.nan]', '(0)'], {}), '(adjusted_fares, [1200, 427, 231, 28, np.nan,\n np.nan], 0)\n', (1212, 1273), True, 'import numpy as np\n'), ((1421, 1452), 'numpy.array', 'np.array', (['[0, 15, 0, 30, 2, 60]'], {}), '([0, 15, 0, 30, 2, 60])\n', (1429, 1452), True, 'import numpy as np\n'), ((1510, 1575), 'revpy.fare_transformation.calc_fare_transformation', 'fare_transformation.calc_fare_transformation', (['self.fares', 'demands'], {}), '(self.fares, demands)\n', (1554, 1575), False, 'from revpy import fare_transformation\n'), ((1585, 1679), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['adjusted_fares', '[1200, 1000, np.nan, 400, np.nan, np.nan]'], {}), '(adjusted_fares, [1200, 1000, np.nan, 400, np\n .nan, np.nan])\n', (1615, 1679), True, 'import numpy as np\n'), ((1791, 1831), 'numpy.array', 'np.array', (['[69.5, 59.5, 48.5, 37.5, 29.0]'], {}), '([69.5, 59.5, 48.5, 37.5, 29.0])\n', (1799, 1831), True, 'import numpy as np\n'), ((1853, 1879), 'numpy.array', 'np.array', (['[3, 1, 0, 0, 10]'], {}), '([3, 1, 0, 0, 10])\n', (1861, 1879), True, 'import numpy as np\n'), ((1983, 2040), 'revpy.fare_transformation.efficient_strategies', 'fare_transformation.efficient_strategies', (['Q', 'TR', 'fares[0]'], {}), '(Q, TR, fares[0])\n', (2023, 2040), False, 'from revpy import fare_transformation\n')] |
#
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#!/usr/bin/env python
"""Main test file for SSM document."""
import ConfigParser
import glob
import logging
import os
import sys
import unittest
import boto3
import demjson
DOC_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
REPO_ROOT = os.path.dirname(DOC_DIR)
# Import shared testing code
sys.path.append(
os.path.join(
REPO_ROOT,
'Testing'
)
)
import ssm_testing # noqa pylint: disable=import-error,wrong-import-position
CONFIG = ConfigParser.ConfigParser()
CONFIG.readfp(open(os.path.join(REPO_ROOT, 'Testing', 'defaults.cfg')))
CONFIG.read([os.path.join(REPO_ROOT, 'Testing', 'local.cfg')])
REGION = CONFIG.get('general', 'region')
PREFIX = CONFIG.get('general', 'resource_prefix')
AMIID = CONFIG.get('linux', 'ami')
SERVICE_ROLE_NAME = CONFIG.get('general', 'automation_service_role_name')
INSTANCE_TYPE = CONFIG.get('linux', 'instance_type')
SSM_DOC_NAME = PREFIX + 'automation-stopinstance-with-approval'
INSTANCE_CFN_STACK_NAME = PREFIX + 'automation-stopinstance-with-approval'
if CONFIG.get('general', 'log_level') == 'warn':
logging.basicConfig(level=logging.WARN)
elif CONFIG.get('general', 'log_level') == 'info':
logging.basicConfig(level=logging.INFO)
LOGGER = logging.getLogger(__name__)
class TestCase(unittest.TestCase):
"""Main test class for SSM document."""
@staticmethod
def test_json_linting():
"""Verify correct json syntax."""
for i in glob.glob(os.path.join(DOC_DIR, 'Documents', '*.json')):
assert demjson.jsonlint('jsonlint').main([i]) == 0, (
'JSON documents are not well formed')
@staticmethod
def test_document():
"""Verify correct deployment and use of document."""
cfn_client = boto3.client('cloudformation', region_name=REGION)
ec2_client = boto3.client('ec2', region_name=REGION)
ssm_client = boto3.client('ssm', region_name=REGION)
ssm_doc = ssm_testing.SSMTester(
ssm_client=ssm_client,
doc_filename=os.path.join(DOC_DIR,
'Documents',
'aws-StopEC2InstanceWithApproval.json'),
doc_name=SSM_DOC_NAME,
doc_type='Automation'
)
test_cf_stack = ssm_testing.CFNTester(
cfn_client=cfn_client,
template_filename=os.path.join(DOC_DIR,
'Tests',
'CloudFormationTemplates',
'TwoInstancesWithSNS.yml'),
stack_name=INSTANCE_CFN_STACK_NAME
)
automation_role = ssm_doc.get_automation_role(
boto3.client('sts', region_name=REGION),
boto3.client('iam', region_name=REGION),
SERVICE_ROLE_NAME
)
LOGGER.info('Starting 2 instances for testing')
test_cf_stack.create_stack([
{
'ParameterKey': 'AMI',
'ParameterValue': AMIID
},
{
'ParameterKey': 'INSTANCETYPE',
'ParameterValue': INSTANCE_TYPE
}
])
try:
LOGGER.info('Creating automation document')
assert ssm_doc.create_document() == 'Active', ('Document not '
'created '
'successfully')
ec2_instance_ids = [
test_cf_stack.stack_outputs['Instance0Id'],
test_cf_stack.stack_outputs['Instance1Id']
]
user_arn = boto3.client('sts', region_name=REGION).get_caller_identity().get('Arn')
sns_topic_arn = test_cf_stack.stack_outputs['SNSTopicArn']
LOGGER.info("User ARN for approval: " + user_arn)
LOGGER.info("SNS Topic ARN for approval: " + sns_topic_arn)
LOGGER.info('Verifying all instances are running')
describe_res = ec2_client.describe_instance_status(
InstanceIds=ec2_instance_ids,
IncludeAllInstances=True
)
assert all(d['InstanceState']['Name'] == 'running' for d in describe_res['InstanceStatuses']) is True, ( # noqa pylint: disable=line-too-long
'Instances not started')
LOGGER.info('Running automation to stop multiple instances '
'(using defined role)')
ssm_doc_params = {'InstanceId': ec2_instance_ids,
'AutomationAssumeRole': [automation_role],
'Approvers': [user_arn],
'SNSTopicArn': [sns_topic_arn]}
execution = ssm_doc.execute_automation(params=ssm_doc_params)
LOGGER.info('Verifying automation executions have concluded '
'successfully')
# since this automation requires approval to continue, the correct status at this point should be 'Waiting'
assert ssm_doc.automation_execution_status(ssm_client, execution, False) == 'Waiting', \
'Automation not waiting for approval'
LOGGER.info('Approving continuation of execution')
ssm_client.send_automation_signal(
AutomationExecutionId=execution,
SignalType='Approve'
)
# this will block until the automation is back in a running state
assert ssm_doc.automation_execution_status(ssm_client, execution) == 'Success', \
'Automation step unsuccessful'
LOGGER.info('Verifying all instances are stopped')
describe_res = ec2_client.describe_instance_status(
InstanceIds=ec2_instance_ids,
IncludeAllInstances=True
)
assert all(d['InstanceState']['Name'] == 'stopped' for d in describe_res['InstanceStatuses']) is True, ( # noqa pylint: disable=line-too-long
'Instances not stopped')
finally:
test_cf_stack.delete_stack()
ssm_doc.destroy()
if __name__ == '__main__':
unittest.main()
| [
"logging.getLogger",
"logging.basicConfig",
"boto3.client",
"os.path.join",
"demjson.jsonlint",
"ConfigParser.ConfigParser",
"os.path.realpath",
"os.path.dirname",
"unittest.main"
] | [((1220, 1244), 'os.path.dirname', 'os.path.dirname', (['DOC_DIR'], {}), '(DOC_DIR)\n', (1235, 1244), False, 'import os\n'), ((1443, 1470), 'ConfigParser.ConfigParser', 'ConfigParser.ConfigParser', ([], {}), '()\n', (1468, 1470), False, 'import ConfigParser\n'), ((2197, 2224), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2214, 2224), False, 'import logging\n'), ((1296, 1330), 'os.path.join', 'os.path.join', (['REPO_ROOT', '"""Testing"""'], {}), "(REPO_ROOT, 'Testing')\n", (1308, 1330), False, 'import os\n'), ((2053, 2092), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.WARN'}), '(level=logging.WARN)\n', (2072, 2092), False, 'import logging\n'), ((7148, 7163), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7161, 7163), False, 'import unittest\n'), ((1179, 1205), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1195, 1205), False, 'import os\n'), ((1490, 1540), 'os.path.join', 'os.path.join', (['REPO_ROOT', '"""Testing"""', '"""defaults.cfg"""'], {}), "(REPO_ROOT, 'Testing', 'defaults.cfg')\n", (1502, 1540), False, 'import os\n'), ((1556, 1603), 'os.path.join', 'os.path.join', (['REPO_ROOT', '"""Testing"""', '"""local.cfg"""'], {}), "(REPO_ROOT, 'Testing', 'local.cfg')\n", (1568, 1603), False, 'import os\n'), ((2148, 2187), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (2167, 2187), False, 'import logging\n'), ((2716, 2766), 'boto3.client', 'boto3.client', (['"""cloudformation"""'], {'region_name': 'REGION'}), "('cloudformation', region_name=REGION)\n", (2728, 2766), False, 'import boto3\n'), ((2788, 2827), 'boto3.client', 'boto3.client', (['"""ec2"""'], {'region_name': 'REGION'}), "('ec2', region_name=REGION)\n", (2800, 2827), False, 'import boto3\n'), ((2849, 2888), 'boto3.client', 'boto3.client', (['"""ssm"""'], {'region_name': 'REGION'}), "('ssm', region_name=REGION)\n", (2861, 2888), False, 'import boto3\n'), ((2423, 2467), 'os.path.join', 'os.path.join', (['DOC_DIR', '"""Documents"""', '"""*.json"""'], {}), "(DOC_DIR, 'Documents', '*.json')\n", (2435, 2467), False, 'import os\n'), ((3675, 3714), 'boto3.client', 'boto3.client', (['"""sts"""'], {'region_name': 'REGION'}), "('sts', region_name=REGION)\n", (3687, 3714), False, 'import boto3\n'), ((3728, 3767), 'boto3.client', 'boto3.client', (['"""iam"""'], {'region_name': 'REGION'}), "('iam', region_name=REGION)\n", (3740, 3767), False, 'import boto3\n'), ((2991, 3065), 'os.path.join', 'os.path.join', (['DOC_DIR', '"""Documents"""', '"""aws-StopEC2InstanceWithApproval.json"""'], {}), "(DOC_DIR, 'Documents', 'aws-StopEC2InstanceWithApproval.json')\n", (3003, 3065), False, 'import os\n'), ((3335, 3423), 'os.path.join', 'os.path.join', (['DOC_DIR', '"""Tests"""', '"""CloudFormationTemplates"""', '"""TwoInstancesWithSNS.yml"""'], {}), "(DOC_DIR, 'Tests', 'CloudFormationTemplates',\n 'TwoInstancesWithSNS.yml')\n", (3347, 3423), False, 'import os\n'), ((2489, 2517), 'demjson.jsonlint', 'demjson.jsonlint', (['"""jsonlint"""'], {}), "('jsonlint')\n", (2505, 2517), False, 'import demjson\n'), ((4625, 4664), 'boto3.client', 'boto3.client', (['"""sts"""'], {'region_name': 'REGION'}), "('sts', region_name=REGION)\n", (4637, 4664), False, 'import boto3\n')] |
#!/usr/bin/env python
"""
Example of nested autocompletion.
"""
from prompt_toolkit.completion import NestedCompleter
from prompt_toolkit.shortcuts import PromptSession as Prompt
completer = NestedCompleter.from_nested_dict(
{
"show":
{
"version": None,
"clock": None,
"ip":
{
"interface":
{
"brief": None
}
}
},
"exit": None,
}
)
session = Prompt(completer=completer)
def main():
text = session.prompt("Type a command: ", completer=completer)
print("You said: %s" % text)
if __name__ == "__main__":
main()
| [
"prompt_toolkit.shortcuts.PromptSession",
"prompt_toolkit.completion.NestedCompleter.from_nested_dict"
] | [((192, 324), 'prompt_toolkit.completion.NestedCompleter.from_nested_dict', 'NestedCompleter.from_nested_dict', (["{'show': {'version': None, 'clock': None, 'ip': {'interface': {'brief':\n None}}}, 'exit': None}"], {}), "({'show': {'version': None, 'clock': None,\n 'ip': {'interface': {'brief': None}}}, 'exit': None})\n", (224, 324), False, 'from prompt_toolkit.completion import NestedCompleter\n'), ((531, 558), 'prompt_toolkit.shortcuts.PromptSession', 'Prompt', ([], {'completer': 'completer'}), '(completer=completer)\n', (537, 558), True, 'from prompt_toolkit.shortcuts import PromptSession as Prompt\n')] |
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from matplotlib.externals import six
from matplotlib.tri import Triangulation
import _tri as _tri
import numpy as np
class TriFinder(object):
"""
Abstract base class for classes used to find the triangles of a
Triangulation in which (x,y) points lie.
Rather than instantiate an object of a class derived from TriFinder, it is
usually better to use the function
:func:`matplotlib.tri.Triangulation.get_trifinder`.
Derived classes implement __call__(x,y) where x,y are array_like point
coordinates of the same shape.
"""
def __init__(self, triangulation):
if not isinstance(triangulation, Triangulation):
raise ValueError('Expected a Triangulation object')
self._triangulation = triangulation
class TrapezoidMapTriFinder(TriFinder):
"""
:class:`~matplotlib.tri.TriFinder` class implemented using the trapezoid
map algorithm from the book "Computational Geometry, Algorithms and
Applications", second edition, by <NAME>, <NAME>, <NAME>
and <NAME>.
The triangulation must be valid, i.e. it must not have duplicate points,
triangles formed from colinear points, or overlapping triangles. The
algorithm has some tolerance to triangles formed from colinear points, but
this should not be relied upon.
"""
def __init__(self, triangulation):
TriFinder.__init__(self, triangulation)
self._cpp_trifinder = _tri.TrapezoidMapTriFinder(
triangulation.get_cpp_triangulation())
self._initialize()
def __call__(self, x, y):
"""
Return an array containing the indices of the triangles in which the
specified x,y points lie, or -1 for points that do not lie within a
triangle.
*x*, *y* are array_like x and y coordinates of the same shape and any
number of dimensions.
Returns integer array with the same shape and *x* and *y*.
"""
x = np.asarray(x, dtype=np.float64)
y = np.asarray(y, dtype=np.float64)
if x.shape != y.shape:
raise ValueError("x and y must be array-like with the same shape")
# C++ does the heavy lifting, and expects 1D arrays.
indices = self._cpp_trifinder.find_many(x.ravel(), y.ravel())
indices.shape = x.shape
return indices
def _get_tree_stats(self):
"""
Return a python list containing the statistics about the node tree:
0: number of nodes (tree size)
1: number of unique nodes
2: number of trapezoids (tree leaf nodes)
3: number of unique trapezoids
4: maximum parent count (max number of times a node is repeated in
tree)
5: maximum depth of tree (one more than the maximum number of
comparisons needed to search through the tree)
6: mean of all trapezoid depths (one more than the average number
of comparisons needed to search through the tree)
"""
return self._cpp_trifinder.get_tree_stats()
def _initialize(self):
"""
Initialize the underlying C++ object. Can be called multiple times if,
for example, the triangulation is modified.
"""
self._cpp_trifinder.initialize()
def _print_tree(self):
"""
Print a text representation of the node tree, which is useful for
debugging purposes.
"""
self._cpp_trifinder.print_tree()
| [
"numpy.asarray"
] | [((2063, 2094), 'numpy.asarray', 'np.asarray', (['x'], {'dtype': 'np.float64'}), '(x, dtype=np.float64)\n', (2073, 2094), True, 'import numpy as np\n'), ((2107, 2138), 'numpy.asarray', 'np.asarray', (['y'], {'dtype': 'np.float64'}), '(y, dtype=np.float64)\n', (2117, 2138), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
"""Check a font family using Google Fonts QA tools.
Examples:
Check a local family against the same family hosted on Google Fonts:
`gftools qa -f [fonts.ttf] -gfb -a -o qa`
Check a local family against another local family and generate reports
for Font Diffenator only:
`gftools qa -f [fonts_a.ttf] -fb [fonts_b.ttf] --diffenator -o qa`
Check a local family against the same family hosted on Google Fonts and
generate reports for Diffbrowsers only:
`gftools qa -f [fonts.ttf] -gf --diffbrowsers -o qa
Compare a pull request against the same family hosted on Google Fonts:
`gftools qa -pr www.github.com/user/repo/pull/1 -gfb -a -o qa`
Compare a github folder of fonts against the same family hosted on Google
Fonts:
`gftools qa -gh www.github.com/user/repo/tree/fonts/ttf -gfb -a -o qa`
"""
from fontTools.ttLib import TTFont
import argparse
import shutil
import os
from glob import glob
import subprocess
import logging
from uuid import uuid4
import re
import requests
from io import BytesIO
import json
from zipfile import ZipFile
from gftools.utils import (
download_family_from_Google_Fonts,
download_files_in_github_pr,
download_files_in_github_dir,
download_file,
Google_Fonts_has_family,
load_Google_Fonts_api_key,
mkdir,
)
try:
from diffenator.diff import DiffFonts
from diffenator.font import DFont
from diffbrowsers.diffbrowsers import DiffBrowsers
from diffbrowsers.browsers import test_browsers
from diffbrowsers.utils import load_browserstack_credentials as bstack_creds
except ModuleNotFoundError:
raise ModuleNotFoundError(("gftools was installed without the QA "
"dependencies. To install the dependencies, see the ReadMe, "
"https://github.com/googlefonts/gftools#installation"))
__version__ = "2.1.3"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def load_browserstack_credentials():
"""Return the user's Browserstack credentials"""
credentials = bstack_creds()
if not credentials:
username = os.environ.get("BSTACK_USERNAME")
access_key = os.environ.get("BSTACK_ACCESS_KEY")
if all([username, access_key]):
return (username, access_key)
return False
return credentials
class FontQA:
GFR_URL = "http://172.16.17.32/"
def __init__(self, fonts, fonts_before=None, out="out"):
self.fonts = fonts
self.fonts_before = fonts_before
self.instances = self._instances_in_fonts(self.fonts)
self.instances_before = self._instances_in_fonts(self.fonts_before)
self.matching_instances = self._matching_instances()
self._bstack_auth = load_browserstack_credentials()
self.out = out
def _instances_in_fonts(self, ttfonts):
"""Get all font instances from a collection of fonts.
This function works for both a static and variable font collections.
If a font is variable, it will retrieve the font's instances
using the fvar table. If a font is static, it will only return a
single instance by using the font's filename.
"""
if not ttfonts:
return None
results = {}
for ttfont in ttfonts:
if "fvar" in ttfont:
for instance in ttfont['fvar'].instances:
nameid = instance.subfamilyNameID
name = ttfont['name'].getName(nameid, 3, 1, 1033).toUnicode()
name = name.replace(" ", "")
results[name] = {
"coordinates": instance.coordinates,
"filename": ttfont.reader.file.name
}
else:
filename = os.path.basename(ttfont.reader.file.name)
name = filename.split("-")[1]
name = re.sub(".ttf|.otf", "", name)
results[name] = {
"coordinates": {"wght": ttfont['OS/2'].usWeightClass},
"filename": ttfont.reader.file.name
}
return results
def _matching_instances(self):
if not self.fonts_before:
logger.info(
"No regression checks possible since there are no previous fonts."
)
return None
shared = set(self.instances_before.keys()) & set(self.instances.keys())
new = set(self.instances.keys()) - set(self.instances_before.keys())
missing = set(self.instances_before.keys()) - set(self.instances.keys())
if new:
logger.warning("New fonts: {}".format(", ".join(new)))
if missing:
logger.warning("Missing fonts: {}".format(", ".join(missing)))
if not shared:
raise Exception(
(
"Cannot find matching fonts!\n"
"fonts: [{}]\nfonts_before: [{}]".format(
", ".join(set(self.instances.keys())),
", ".join(set(self.instances_before.keys()))
)
)
)
return shared
def diffenator(self, **kwargs):
logger.info("Running Diffenator")
dst = os.path.join(self.out, "Diffenator")
mkdir(dst)
for style in self.matching_instances:
font_before = DFont(self.instances_before[style]['filename'])
font_after = DFont(self.instances[style]['filename'])
out = os.path.join(dst, style)
if font_after.is_variable and not font_before.is_variable:
font_after.set_variations_from_static(font_before)
elif not font_after.is_variable and font_before.is_variable:
font_before.set_variations_from_static(font_after)
elif font_after.is_variable and font_before.is_variable:
coordinates = self.instances_before[style]['coordinates']
font_after.set_variations(coordinates)
font_before.set_variations(coordinates)
# TODO add settings
diff = DiffFonts(font_before, font_after, {"render_diffs": True})
diff.to_gifs(dst=out)
diff.to_txt(20, os.path.join(out, "report.txt"))
diff.to_md(20, os.path.join(out, "report.md"))
diff.to_html(20, os.path.join(out, "report.html"), image_dir=".")
@staticmethod
def chunkify(items, size):
return [items[i : i + size] for i in range(0, len(items), size)]
def diffbrowsers(self, **kwargs):
"""Test fonts on GFR regression and take screenshots using
diffbrowsers. A browserstack account is required."""
logger.info("Running Diffbrowsers")
if not self._bstack_auth:
logger.info("Skipping. No Browserstack credentials. "
"See https://github.com/googlefonts/"
"diffbrowsers#installation on how to add them.")
return
dst = os.path.join(self.out, "Diffbrowsers")
mkdir(dst)
browsers_to_test = test_browsers["vf_browsers"]
fonts = [(k, self.instances_before[k]['filename'],
self.instances[k]['filename']) for k in self.matching_instances]
font_groups = self.chunkify(sorted(fonts), 4)
for group in font_groups:
styles = [i[0] for i in group]
dir_name = "_".join(styles)
fonts_before = [i[1] for i in group]
fonts_after = [i[2] for i in group]
out = os.path.join(dst, dir_name)
diff_browsers = DiffBrowsers(
auth=self._bstack_auth,
gfr_instance_url=self.GFR_URL,
dst_dir=out,
browsers=browsers_to_test,
)
diff_browsers.new_session(set(fonts_before), set(fonts_after))
diff_browsers.diff_view("waterfall", styles=styles)
info = os.path.join(out, "info.json")
json.dump(diff_browsers.stats, open(info, "w"))
diff_browsers.diff_view("glyphs_all", pt=16, styles=styles)
def fontbakery(self):
logger.info("Running Fontbakery")
out = os.path.join(self.out, "Fontbakery")
mkdir(out)
cmd = (
["fontbakery", "check-googlefonts", "-l", "WARN"]
+ [f.reader.file.name for f in self.fonts]
+ ["-C"]
+ ["--ghmarkdown", os.path.join(out, "report.md")]
)
subprocess.call(cmd)
def plot_glyphs(self):
logger.info("Running plot glyphs")
out = os.path.join(self.out, "plot_glyphs")
mkdir(out)
fonts = [f.reader.file.name for f in self.fonts]
for font in fonts:
font_filename = os.path.basename(font)[:-4]
dfont = DFont(font)
if dfont.is_variable:
for _, coords in dfont.instances_coordinates.items():
dfont.set_variations(coords)
img_out = os.path.join(
out,
"%s_%s.png"
% (font_filename, self._instance_coords_to_filename(coords)),
)
dfont.glyphs.to_png(img_out, limit=100000)
else:
img_out = os.path.join(out, font_filename + ".png")
dfont.glyphs.to_png(dst=img_out)
def _instance_coords_to_filename(self, d):
name = ""
for k, v in d.items():
name += "{}_{}_".format(k, v)
return name[:-1]
def browser_previews(self, **kwargs):
"""Use GFR and diffbrowsers to take screenshots of how the fonts
will look on different browsers. A Browserstack account is
required."""
logger.info("Running browser previews")
if not self._bstack_auth:
logger.info("Skipping. No Browserstack credentials. "
"See https://github.com/googlefonts/"
"diffbrowsers#installation on how to add them.")
return
out = os.path.join(self.out, "browser_previews")
mkdir(out)
browsers_to_test = test_browsers["vf_browsers"]
font_groups = self.chunkify(list([i['filename'] for i in self.instances.values()]), 4)
name_groups = self.chunkify(list(self.instances.keys()), 4)
for name_group, font_group in zip(name_groups, font_groups):
name = "_".join(sorted(name_group))
diff_browsers = DiffBrowsers(
auth=self._bstack_auth,
gfr_instance_url=FontQA.GFR_URL,
dst_dir=os.path.join(out, name),
browsers=browsers_to_test,
gfr_is_local=False,
)
diff_browsers.new_session(font_group, font_group)
diff_browsers.diff_view("waterfall", styles=name_group)
diff_browsers.diff_view("glyphs_all", styles=name_group, pt=15)
def googlefonts_upgrade(self):
self.fontbakery()
self.diffenator()
self.diffbrowsers()
def googlefonts_new(self):
self.fontbakery()
self.plot_glyphs()
self.browser_previews()
def post_to_github(self, url):
"""Zip and post the check results as a comment to the github
issue or pr."""
report_zip = shutil.make_archive(self.out, "zip", self.out)
uuid = str(uuid4())
zip_url = self._post_media_to_gfr([report_zip], uuid)
url_split = url.split("/")
repo_slug = "{}/{}".format(url_split[3], url_split[4])
pull = url_split[-1] if "pull" in url else None
fontbakery_report = os.path.join(self.out, "Fontbakery", "report.md")
if os.path.isfile(fontbakery_report):
with open(fontbakery_report, "r") as fb:
msg = "{}\n\n## Diff images: [{}]({})".format(
fb.read(), os.path.basename(zip_url[0]), zip_url[0]
)
else:
msg = "## Diff images: [{}]({})".format(
os.path.basename(zip_url[0]), zip_url[0]
)
self._post_gh_msg(msg, repo_slug, pull)
def _post_media_to_gfr(self, paths, uuid):
"""Post images to GF Regression"""
url_endpoint = self.GFR_URL + "/api/upload-media"
payload = [("files", open(path, "rb")) for path in paths]
r = requests.post(
url_endpoint,
data={"uuid": uuid},
files=payload,
headers={"Access-Token": os.environ["GFR_TOKEN"]},
)
return [os.path.join(self.GFR_URL, i) for i in r.json()["items"]]
def _post_gh_msg(self, msg, repo_slug=None, pull_id=None):
if pull_id:
url = "https://api.github.com/repos/{}/issues/{}/comments".format(
repo_slug, pull_id
)
r = requests.post(
url,
data=json.dumps({"body": msg}),
headers={"Authorization": "token {}".format(os.environ["GH_TOKEN"])},
)
else:
url = "https://api.github.com/repos/{}/issues".format(repo_slug)
r = requests.post(
url,
data=json.dumps({"title": "Google Fonts QA report", "body": msg}),
headers={"Authorization": "token {}".format(os.environ["GH_TOKEN"])},
)
def family_name_from_fonts(fonts):
results = []
for font in fonts:
family_name = font["name"].getName(1, 3, 1, 1033)
typo_family_name = font["name"].getName(16, 3, 1, 1033)
if typo_family_name:
results.append(typo_family_name.toUnicode())
elif family_name:
results.append(family_name.toUnicode())
else:
raise Exception(
"Font: {} has no family name records".format(
os.path.basename(font.reader.file.name)
)
)
if len(set(results)) > 1:
raise Exception("Multiple family names found: [{}]".format(", ".join(results)))
return results[0]
def main():
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
font_group = parser.add_argument_group(title="Fonts to qa")
font_input_group = font_group.add_mutually_exclusive_group(required=True)
font_input_group.add_argument("-f", "--fonts", nargs="+",
help="Paths to fonts")
font_input_group.add_argument("-pr", "--pull-request",
help="Get fonts from a Github pull request")
font_input_group.add_argument("-gh", "--github-dir",
help="Get fonts from a Github directory")
font_input_group.add_argument("-gf", "--googlefonts",
help="Get fonts from Google Fonts")
font_before_group = parser.add_argument_group(title="Fonts before input")
font_before_input_group = font_before_group.add_mutually_exclusive_group(
required=False
)
font_before_input_group.add_argument(
"-fb", "--fonts-before", nargs="+",
help="Paths to previous fonts"
)
font_before_input_group.add_argument("-prb", "--pull-request-before",
help="Get previous fonts from a Github pull request")
font_before_input_group.add_argument("-ghb", "--github-dir-before",
help="Get previous fonts from a Github dir")
font_before_input_group.add_argument(
"-gfb",
"--googlefonts-before",
action="store_true",
help="Get previous fonts from Google Fonts",
)
check_group = parser.add_argument_group(title="QA checks")
check_group.add_argument(
"-a",
"--auto-qa",
action="store_true",
help="Check fonts against against the same fonts hosted on Google Fonts",
)
check_group.add_argument(
"--diffenator", action="store_true", help="Run Fontdiffenator"
)
check_group.add_argument(
"--diffbrowsers", action="store_true", help="Run Diffbrowsers"
)
check_group.add_argument(
"--fontbakery", action="store_true", help="Run FontBakery"
)
check_group.add_argument(
"--plot-glyphs",
action="store_true",
help="Gen images of full charset, useful for new familes",
)
check_group.add_argument(
"--browser-previews",
action="store_true",
help="Gen images on diff browsers, useful for new families",
)
check_group.add_argument(
"-dm", "--diff-mode", choices=("weak", "normal", "strict"), default="normal"
)
parser.add_argument("-re", "--filter-fonts", help="Filter fonts by regex")
parser.add_argument(
"-o", "--out", default="out", help="Output path for check results"
)
parser.add_argument(
"-ogh",
"--out-github",
action="store_true",
help=(
"Post report data to either the pull request as a comment "
"open a new issue. This can only be used if fonts have been "
"fetched from either a pull request or github dir."
),
)
parser.add_argument(
"--out-url",
help=(
"Post report data to a github pr. This can be used with any font "
"fetching method."
)
)
parser.add_argument("--version", action="version", version=__version__)
args = parser.parse_args()
if args.out_github and not any([args.pull_request, args.github_dir]):
raise Exception(
"Cannot upload results to a github issue or pr. "
"Font input must either a github dir or a pull request"
)
if not any([args.auto_qa,
args.fontbakery,
args.plot_glyphs,
args.diffbrowsers,
args.diffenator,
args.browser_previews]):
raise Exception("Terminating. No checks selected. Run gftools qa "
"--help to see all possible commands.")
# Retrieve fonts and store in out dir
mkdir(args.out)
fonts_dir = os.path.join(args.out, "fonts")
mkdir(fonts_dir)
if args.fonts:
[shutil.copy(f, fonts_dir) for f in args.fonts]
fonts = args.fonts
elif args.pull_request:
fonts = download_files_in_github_pr(
args.pull_request,
fonts_dir,
ignore_static_dir=False,
)
if not fonts:
logger.info("No fonts found in pull request. Skipping")
return
elif args.github_dir:
fonts = download_files_in_github_dir(args.github_dir, fonts_dir)
if not fonts:
logger.info("No fonts found in github dir. Skipping")
return
elif args.googlefonts:
fonts = download_family_from_Google_Fonts(args.googlefonts, fonts_dir)
if args.filter_fonts:
re_filter = re.compile(args.filter_fonts)
fonts = [f for f in fonts if re_filter.search(f)]
ttfonts = [TTFont(f) for f in fonts if f.endswith((".ttf", ".otf"))
and "static" not in f]
family_name = family_name_from_fonts(ttfonts)
family_on_gf = Google_Fonts_has_family(family_name)
# Retrieve fonts_before and store in out dir
fonts_before = None
if any([args.fonts_before, args.pull_request_before, args.github_dir_before]) or \
(args.googlefonts_before and family_on_gf):
fonts_before_dir = os.path.join(args.out, "fonts_before")
mkdir(fonts_before_dir, overwrite=False)
if args.fonts_before:
[shutil.copy(f, fonts_before_dir) for f in args.fonts_before]
fonts_before = args.fonts_before
elif args.pull_request_before:
fonts_before = download_files_in_github_pr(
args.pull_request_before,
fonts_before_dir,
ignore_static_dir=False
)
elif args.github_dir_before:
fonts_before = download_files_in_github_dir(
args.github_dir_before, fonts_before_dir
)
elif args.googlefonts_before and family_on_gf:
fonts_before = download_family_from_Google_Fonts(
family_name, fonts_before_dir
)
if fonts_before:
ttfonts_before = [TTFont(f) for f in fonts_before if f.endswith((".ttf", ".otf"))
and "static" not in f]
qa = FontQA(ttfonts, ttfonts_before, args.out)
else:
qa = FontQA(ttfonts, out=args.out)
if args.auto_qa and family_on_gf:
qa.googlefonts_upgrade()
elif args.auto_qa and not family_on_gf:
qa.googlefonts_new()
if args.plot_glyphs:
qa.plot_glyphs()
if args.browser_previews:
qa.browser_previews()
if args.fontbakery:
qa.fontbakery()
if args.diffenator:
qa.diffenator()
if args.diffbrowsers:
qa.diffbrowsers()
if args.out_url:
qa.post_to_github(args.out_url)
elif args.out_github and args.pull_request:
qa.post_to_github(args.pull_request)
elif args.out_github and args.github_dir:
qa.post_to_github(args.github_dir)
if __name__ == "__main__":
main()
| [
"logging.getLogger",
"requests.post",
"diffenator.font.DFont",
"diffenator.diff.DiffFonts",
"gftools.utils.download_family_from_Google_Fonts",
"re.compile",
"gftools.utils.download_files_in_github_dir",
"gftools.utils.download_files_in_github_pr",
"diffbrowsers.diffbrowsers.DiffBrowsers",
"fontToo... | [((1825, 1852), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1842, 1852), False, 'import logging\n'), ((1993, 2007), 'diffbrowsers.utils.load_browserstack_credentials', 'bstack_creds', ([], {}), '()\n', (2005, 2007), True, 'from diffbrowsers.utils import load_browserstack_credentials as bstack_creds\n'), ((14024, 14127), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__', 'formatter_class': 'argparse.RawDescriptionHelpFormatter'}), '(description=__doc__, formatter_class=argparse.\n RawDescriptionHelpFormatter)\n', (14047, 14127), False, 'import argparse\n'), ((17902, 17917), 'gftools.utils.mkdir', 'mkdir', (['args.out'], {}), '(args.out)\n', (17907, 17917), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((17934, 17965), 'os.path.join', 'os.path.join', (['args.out', '"""fonts"""'], {}), "(args.out, 'fonts')\n", (17946, 17965), False, 'import os\n'), ((17970, 17986), 'gftools.utils.mkdir', 'mkdir', (['fonts_dir'], {}), '(fonts_dir)\n', (17975, 17986), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((18999, 19035), 'gftools.utils.Google_Fonts_has_family', 'Google_Fonts_has_family', (['family_name'], {}), '(family_name)\n', (19022, 19035), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((2051, 2084), 'os.environ.get', 'os.environ.get', (['"""BSTACK_USERNAME"""'], {}), "('BSTACK_USERNAME')\n", (2065, 2084), False, 'import os\n'), ((2106, 2141), 'os.environ.get', 'os.environ.get', (['"""BSTACK_ACCESS_KEY"""'], {}), "('BSTACK_ACCESS_KEY')\n", (2120, 2141), False, 'import os\n'), ((5196, 5232), 'os.path.join', 'os.path.join', (['self.out', '"""Diffenator"""'], {}), "(self.out, 'Diffenator')\n", (5208, 5232), False, 'import os\n'), ((5241, 5251), 'gftools.utils.mkdir', 'mkdir', (['dst'], {}), '(dst)\n', (5246, 5251), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((6952, 6990), 'os.path.join', 'os.path.join', (['self.out', '"""Diffbrowsers"""'], {}), "(self.out, 'Diffbrowsers')\n", (6964, 6990), False, 'import os\n'), ((6999, 7009), 'gftools.utils.mkdir', 'mkdir', (['dst'], {}), '(dst)\n', (7004, 7009), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((8144, 8180), 'os.path.join', 'os.path.join', (['self.out', '"""Fontbakery"""'], {}), "(self.out, 'Fontbakery')\n", (8156, 8180), False, 'import os\n'), ((8189, 8199), 'gftools.utils.mkdir', 'mkdir', (['out'], {}), '(out)\n', (8194, 8199), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((8435, 8455), 'subprocess.call', 'subprocess.call', (['cmd'], {}), '(cmd)\n', (8450, 8455), False, 'import subprocess\n'), ((8541, 8578), 'os.path.join', 'os.path.join', (['self.out', '"""plot_glyphs"""'], {}), "(self.out, 'plot_glyphs')\n", (8553, 8578), False, 'import os\n'), ((8587, 8597), 'gftools.utils.mkdir', 'mkdir', (['out'], {}), '(out)\n', (8592, 8597), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((10014, 10056), 'os.path.join', 'os.path.join', (['self.out', '"""browser_previews"""'], {}), "(self.out, 'browser_previews')\n", (10026, 10056), False, 'import os\n'), ((10065, 10075), 'gftools.utils.mkdir', 'mkdir', (['out'], {}), '(out)\n', (10070, 10075), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((11274, 11320), 'shutil.make_archive', 'shutil.make_archive', (['self.out', '"""zip"""', 'self.out'], {}), "(self.out, 'zip', self.out)\n", (11293, 11320), False, 'import shutil\n'), ((11595, 11644), 'os.path.join', 'os.path.join', (['self.out', '"""Fontbakery"""', '"""report.md"""'], {}), "(self.out, 'Fontbakery', 'report.md')\n", (11607, 11644), False, 'import os\n'), ((11656, 11689), 'os.path.isfile', 'os.path.isfile', (['fontbakery_report'], {}), '(fontbakery_report)\n', (11670, 11689), False, 'import os\n'), ((12310, 12429), 'requests.post', 'requests.post', (['url_endpoint'], {'data': "{'uuid': uuid}", 'files': 'payload', 'headers': "{'Access-Token': os.environ['GFR_TOKEN']}"}), "(url_endpoint, data={'uuid': uuid}, files=payload, headers={\n 'Access-Token': os.environ['GFR_TOKEN']})\n", (12323, 12429), False, 'import requests\n'), ((18731, 18760), 're.compile', 're.compile', (['args.filter_fonts'], {}), '(args.filter_fonts)\n', (18741, 18760), False, 'import re\n'), ((18835, 18844), 'fontTools.ttLib.TTFont', 'TTFont', (['f'], {}), '(f)\n', (18841, 18844), False, 'from fontTools.ttLib import TTFont\n'), ((19279, 19317), 'os.path.join', 'os.path.join', (['args.out', '"""fonts_before"""'], {}), "(args.out, 'fonts_before')\n", (19291, 19317), False, 'import os\n'), ((19326, 19366), 'gftools.utils.mkdir', 'mkdir', (['fonts_before_dir'], {'overwrite': '(False)'}), '(fonts_before_dir, overwrite=False)\n', (19331, 19366), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((5324, 5371), 'diffenator.font.DFont', 'DFont', (["self.instances_before[style]['filename']"], {}), "(self.instances_before[style]['filename'])\n", (5329, 5371), False, 'from diffenator.font import DFont\n'), ((5397, 5437), 'diffenator.font.DFont', 'DFont', (["self.instances[style]['filename']"], {}), "(self.instances[style]['filename'])\n", (5402, 5437), False, 'from diffenator.font import DFont\n'), ((5456, 5480), 'os.path.join', 'os.path.join', (['dst', 'style'], {}), '(dst, style)\n', (5468, 5480), False, 'import os\n'), ((6067, 6125), 'diffenator.diff.DiffFonts', 'DiffFonts', (['font_before', 'font_after', "{'render_diffs': True}"], {}), "(font_before, font_after, {'render_diffs': True})\n", (6076, 6125), False, 'from diffenator.diff import DiffFonts\n'), ((7497, 7524), 'os.path.join', 'os.path.join', (['dst', 'dir_name'], {}), '(dst, dir_name)\n', (7509, 7524), False, 'import os\n'), ((7553, 7665), 'diffbrowsers.diffbrowsers.DiffBrowsers', 'DiffBrowsers', ([], {'auth': 'self._bstack_auth', 'gfr_instance_url': 'self.GFR_URL', 'dst_dir': 'out', 'browsers': 'browsers_to_test'}), '(auth=self._bstack_auth, gfr_instance_url=self.GFR_URL, dst_dir\n =out, browsers=browsers_to_test)\n', (7565, 7665), False, 'from diffbrowsers.diffbrowsers import DiffBrowsers\n'), ((7898, 7928), 'os.path.join', 'os.path.join', (['out', '"""info.json"""'], {}), "(out, 'info.json')\n", (7910, 7928), False, 'import os\n'), ((8758, 8769), 'diffenator.font.DFont', 'DFont', (['font'], {}), '(font)\n', (8763, 8769), False, 'from diffenator.font import DFont\n'), ((11340, 11347), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (11345, 11347), False, 'from uuid import uuid4\n'), ((12500, 12529), 'os.path.join', 'os.path.join', (['self.GFR_URL', 'i'], {}), '(self.GFR_URL, i)\n', (12512, 12529), False, 'import os\n'), ((18015, 18040), 'shutil.copy', 'shutil.copy', (['f', 'fonts_dir'], {}), '(f, fonts_dir)\n', (18026, 18040), False, 'import shutil\n'), ((18133, 18220), 'gftools.utils.download_files_in_github_pr', 'download_files_in_github_pr', (['args.pull_request', 'fonts_dir'], {'ignore_static_dir': '(False)'}), '(args.pull_request, fonts_dir, ignore_static_dir\n =False)\n', (18160, 18220), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((19402, 19434), 'shutil.copy', 'shutil.copy', (['f', 'fonts_before_dir'], {}), '(f, fonts_before_dir)\n', (19413, 19434), False, 'import shutil\n'), ((19562, 19662), 'gftools.utils.download_files_in_github_pr', 'download_files_in_github_pr', (['args.pull_request_before', 'fonts_before_dir'], {'ignore_static_dir': '(False)'}), '(args.pull_request_before, fonts_before_dir,\n ignore_static_dir=False)\n', (19589, 19662), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((20063, 20072), 'fontTools.ttLib.TTFont', 'TTFont', (['f'], {}), '(f)\n', (20069, 20072), False, 'from fontTools.ttLib import TTFont\n'), ((3731, 3772), 'os.path.basename', 'os.path.basename', (['ttfont.reader.file.name'], {}), '(ttfont.reader.file.name)\n', (3747, 3772), False, 'import os\n'), ((3842, 3871), 're.sub', 're.sub', (['""".ttf|.otf"""', '""""""', 'name'], {}), "('.ttf|.otf', '', name)\n", (3848, 3871), False, 'import re\n'), ((6188, 6219), 'os.path.join', 'os.path.join', (['out', '"""report.txt"""'], {}), "(out, 'report.txt')\n", (6200, 6219), False, 'import os\n'), ((6248, 6278), 'os.path.join', 'os.path.join', (['out', '"""report.md"""'], {}), "(out, 'report.md')\n", (6260, 6278), False, 'import os\n'), ((6309, 6341), 'os.path.join', 'os.path.join', (['out', '"""report.html"""'], {}), "(out, 'report.html')\n", (6321, 6341), False, 'import os\n'), ((8385, 8415), 'os.path.join', 'os.path.join', (['out', '"""report.md"""'], {}), "(out, 'report.md')\n", (8397, 8415), False, 'import os\n'), ((8710, 8732), 'os.path.basename', 'os.path.basename', (['font'], {}), '(font)\n', (8726, 8732), False, 'import os\n'), ((9247, 9288), 'os.path.join', 'os.path.join', (['out', "(font_filename + '.png')"], {}), "(out, font_filename + '.png')\n", (9259, 9288), False, 'import os\n'), ((11980, 12008), 'os.path.basename', 'os.path.basename', (['zip_url[0]'], {}), '(zip_url[0])\n', (11996, 12008), False, 'import os\n'), ((18414, 18470), 'gftools.utils.download_files_in_github_dir', 'download_files_in_github_dir', (['args.github_dir', 'fonts_dir'], {}), '(args.github_dir, fonts_dir)\n', (18442, 18470), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((19761, 19831), 'gftools.utils.download_files_in_github_dir', 'download_files_in_github_dir', (['args.github_dir_before', 'fonts_before_dir'], {}), '(args.github_dir_before, fonts_before_dir)\n', (19789, 19831), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((10567, 10590), 'os.path.join', 'os.path.join', (['out', 'name'], {}), '(out, name)\n', (10579, 10590), False, 'import os\n'), ((11838, 11866), 'os.path.basename', 'os.path.basename', (['zip_url[0]'], {}), '(zip_url[0])\n', (11854, 11866), False, 'import os\n'), ((12843, 12868), 'json.dumps', 'json.dumps', (["{'body': msg}"], {}), "({'body': msg})\n", (12853, 12868), False, 'import json\n'), ((13134, 13194), 'json.dumps', 'json.dumps', (["{'title': 'Google Fonts QA report', 'body': msg}"], {}), "({'title': 'Google Fonts QA report', 'body': msg})\n", (13144, 13194), False, 'import json\n'), ((18621, 18683), 'gftools.utils.download_family_from_Google_Fonts', 'download_family_from_Google_Fonts', (['args.googlefonts', 'fonts_dir'], {}), '(args.googlefonts, fonts_dir)\n', (18654, 18683), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((19928, 19992), 'gftools.utils.download_family_from_Google_Fonts', 'download_family_from_Google_Fonts', (['family_name', 'fonts_before_dir'], {}), '(family_name, fonts_before_dir)\n', (19961, 19992), False, 'from gftools.utils import download_family_from_Google_Fonts, download_files_in_github_pr, download_files_in_github_dir, download_file, Google_Fonts_has_family, load_Google_Fonts_api_key, mkdir\n'), ((13785, 13824), 'os.path.basename', 'os.path.basename', (['font.reader.file.name'], {}), '(font.reader.file.name)\n', (13801, 13824), False, 'import os\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-07-16 13:27
from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('visualizer', '0011_visualization_data_source'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('service_builder', '0024_auto_20190716_1627'),
('dashboard_builder', '0014_auto_20190716_1627'),
('aggregator', '0041_auto_20190716_1627'),
]
operations = [
migrations.CreateModel(
name='UniqueDashboardViewsView',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dashboard_id', models.IntegerField(default=1)),
('count', models.IntegerField(default=1)),
],
options={
'db_table': 'unique_dashboard_views_view',
'managed': False,
},
),
migrations.CreateModel(
name='UniqueDatasetPreview',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dataset_id', models.IntegerField(default=1)),
('count', models.IntegerField(default=1)),
],
options={
'db_table': 'unique_dataset_preview',
'managed': False,
},
),
migrations.CreateModel(
name='UniqueServiceUsesView',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('service_id', models.IntegerField(default=1)),
('count', models.IntegerField(default=1)),
],
options={
'db_table': 'unique_service_uses_view',
'managed': False,
},
),
migrations.CreateModel(
name='BDO_Plan',
fields=[
('plan_name', models.TextField(primary_key=True, serialize=False)),
('plan_title', models.TextField(default='Untitled Plan')),
('query_limit', models.IntegerField(default=120, null=True)),
('price', models.FloatField(default=0, null=True)),
('access_to_beta_services', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='DashboardDisplays',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dash_display_count', models.IntegerField(default=1)),
('dashboard', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_dashboard_displays_dashboard', to='dashboard_builder.Dashboard')),
],
),
migrations.CreateModel(
name='DashboardUniqueViews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dash_display_count', models.IntegerField(default=1)),
('dashboard', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_dashboard_unique_views_dashboard', to='dashboard_builder.Dashboard')),
('dashboard_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_dashboard_unique_views_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='DatasetCombined',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('combination_count', models.IntegerField(default=1)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_dataset_combined_dataset', to='aggregator.Dataset')),
],
),
migrations.CreateModel(
name='DatasetExplored',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('exploration_count', models.IntegerField(default=1)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_dataset_explored_dataset', to='aggregator.Dataset')),
],
),
migrations.CreateModel(
name='DatasetPageViews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('preview_count', models.IntegerField(default=1)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_dataset_page_views_dataset', to='aggregator.Dataset')),
],
),
migrations.CreateModel(
name='DatasetUniqueViews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('preview_count', models.IntegerField(default=1)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_dataset_unique_views_dataset', to='aggregator.Dataset')),
('dataset_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_dataset_unique_views_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='DatasetUseInService',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('use_count', models.IntegerField(default=1)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_dataset_use_in_service_dataset', to='aggregator.Dataset')),
('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_dataset_use_in_service_service', to='service_builder.Service')),
],
),
migrations.CreateModel(
name='DatasetUseInVisualisation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('viz_use_count', models.IntegerField(default=1)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_dataset_use_in_visualisation_dataset', to='aggregator.Dataset')),
],
),
migrations.CreateModel(
name='MareProtectionService',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('scenario', models.IntegerField(default=1)),
('simulation_length', models.IntegerField(default=24)),
('time_interval', models.IntegerField(default=2)),
('ocean_circulation_model', models.CharField(default='Poseidon High Resolution Aegean Model', max_length=100)),
('wave_model', models.CharField(default='Poseidon WAM Cycle 4 for the Aegean', max_length=100)),
('natura_layer', models.BooleanField(default=False)),
('ais_layer', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='ServicePerUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('service_runs', models.IntegerField(default=1)),
('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='service_per_user_service', to='service_builder.Service')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='service_per_user_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ServiceUse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('serv_use_count', models.IntegerField(default=1)),
('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_service_use_service', to='service_builder.Service')),
],
),
migrations.CreateModel(
name='ServiceUsers',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('serv_use_count', models.IntegerField(default=1)),
('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_service_users_service', to='service_builder.Service')),
('service_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_service_users_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserPlans',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_start', models.DateTimeField(auto_now_add=True)),
('date_end', models.DateTimeField(default=datetime.datetime(2019, 8, 15, 16, 27, 30, 138000))),
('active', models.BooleanField(default=True)),
('auto_renewal', models.BooleanField(default=True)),
('query_count', models.IntegerField(default=0)),
('plan', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='plan_plan', to='website_analytics.BDO_Plan')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='plan_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='VisualisationTypeUses',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('viz_use_count', models.IntegerField(default=1)),
('visualisation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_visualisation_type_uses_visualisation', to='visualizer.Visualization')),
],
),
migrations.CreateModel(
name='WaveEnergyResourceAssessment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_nester_statistics_dataset', to='aggregator.Dataset')),
('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analytics_nester_statistics_service', to='service_builder.Service')),
],
),
]
| [
"datetime.datetime",
"django.db.models.FloatField",
"django.db.models.TextField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.migrations.swappable_dependency",
"django.db... | [((384, 441), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (415, 441), False, 'from django.db import migrations, models\n'), ((755, 848), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (771, 848), False, 'from django.db import migrations, models\n'), ((880, 910), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (899, 910), False, 'from django.db import migrations, models\n'), ((939, 969), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (958, 969), False, 'from django.db import migrations, models\n'), ((1245, 1338), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1261, 1338), False, 'from django.db import migrations, models\n'), ((1368, 1398), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (1387, 1398), False, 'from django.db import migrations, models\n'), ((1427, 1457), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (1446, 1457), False, 'from django.db import migrations, models\n'), ((1729, 1822), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1745, 1822), False, 'from django.db import migrations, models\n'), ((1852, 1882), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (1871, 1882), False, 'from django.db import migrations, models\n'), ((1911, 1941), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (1930, 1941), False, 'from django.db import migrations, models\n'), ((2209, 2260), 'django.db.models.TextField', 'models.TextField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (2225, 2260), False, 'from django.db import migrations, models\n'), ((2294, 2335), 'django.db.models.TextField', 'models.TextField', ([], {'default': '"""Untitled Plan"""'}), "(default='Untitled Plan')\n", (2310, 2335), False, 'from django.db import migrations, models\n'), ((2370, 2413), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(120)', 'null': '(True)'}), '(default=120, null=True)\n', (2389, 2413), False, 'from django.db import migrations, models\n'), ((2442, 2481), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0)', 'null': '(True)'}), '(default=0, null=True)\n', (2459, 2481), False, 'from django.db import migrations, models\n'), ((2528, 2561), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (2547, 2561), False, 'from django.db import migrations, models\n'), ((2704, 2797), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2720, 2797), False, 'from django.db import migrations, models\n'), ((2835, 2865), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (2854, 2865), False, 'from django.db import migrations, models\n'), ((2898, 3059), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_dashboard_displays_dashboard"""', 'to': '"""dashboard_builder.Dashboard"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_dashboard_displays_dashboard', to='dashboard_builder.Dashboard'\n )\n", (2915, 3059), False, 'from django.db import migrations, models\n'), ((3195, 3288), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3211, 3288), False, 'from django.db import migrations, models\n'), ((3326, 3356), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (3345, 3356), False, 'from django.db import migrations, models\n'), ((3389, 3554), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_dashboard_unique_views_dashboard"""', 'to': '"""dashboard_builder.Dashboard"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_dashboard_unique_views_dashboard', to=\n 'dashboard_builder.Dashboard')\n", (3406, 3554), False, 'from django.db import migrations, models\n'), ((3582, 3732), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_dashboard_unique_views_user"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_dashboard_unique_views_user', to=settings.AUTH_USER_MODEL)\n", (3599, 3732), False, 'from django.db import migrations, models\n'), ((3868, 3961), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3884, 3961), False, 'from django.db import migrations, models\n'), ((3998, 4028), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (4017, 4028), False, 'from django.db import migrations, models\n'), ((4059, 4202), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_dataset_combined_dataset"""', 'to': '"""aggregator.Dataset"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_dataset_combined_dataset', to='aggregator.Dataset')\n", (4076, 4202), False, 'from django.db import migrations, models\n'), ((4338, 4431), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (4354, 4431), False, 'from django.db import migrations, models\n'), ((4468, 4498), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (4487, 4498), False, 'from django.db import migrations, models\n'), ((4529, 4672), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_dataset_explored_dataset"""', 'to': '"""aggregator.Dataset"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_dataset_explored_dataset', to='aggregator.Dataset')\n", (4546, 4672), False, 'from django.db import migrations, models\n'), ((4809, 4902), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (4825, 4902), False, 'from django.db import migrations, models\n'), ((4935, 4965), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (4954, 4965), False, 'from django.db import migrations, models\n'), ((4996, 5141), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_dataset_page_views_dataset"""', 'to': '"""aggregator.Dataset"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_dataset_page_views_dataset', to='aggregator.Dataset')\n", (5013, 5141), False, 'from django.db import migrations, models\n'), ((5280, 5373), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (5296, 5373), False, 'from django.db import migrations, models\n'), ((5406, 5436), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (5425, 5436), False, 'from django.db import migrations, models\n'), ((5467, 5614), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_dataset_unique_views_dataset"""', 'to': '"""aggregator.Dataset"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_dataset_unique_views_dataset', to='aggregator.Dataset')\n", (5484, 5614), False, 'from django.db import migrations, models\n'), ((5645, 5793), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_dataset_unique_views_user"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_dataset_unique_views_user', to=settings.AUTH_USER_MODEL)\n", (5662, 5793), False, 'from django.db import migrations, models\n'), ((5933, 6026), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (5949, 6026), False, 'from django.db import migrations, models\n'), ((6055, 6085), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (6074, 6085), False, 'from django.db import migrations, models\n'), ((6116, 6265), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_dataset_use_in_service_dataset"""', 'to': '"""aggregator.Dataset"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_dataset_use_in_service_dataset', to='aggregator.Dataset')\n", (6133, 6265), False, 'from django.db import migrations, models\n'), ((6291, 6445), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_dataset_use_in_service_service"""', 'to': '"""service_builder.Service"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_dataset_use_in_service_service', to='service_builder.Service')\n", (6308, 6445), False, 'from django.db import migrations, models\n'), ((6591, 6684), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (6607, 6684), False, 'from django.db import migrations, models\n'), ((6717, 6747), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (6736, 6747), False, 'from django.db import migrations, models\n'), ((6778, 6933), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_dataset_use_in_visualisation_dataset"""', 'to': '"""aggregator.Dataset"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_dataset_use_in_visualisation_dataset', to='aggregator.Dataset')\n", (6795, 6933), False, 'from django.db import migrations, models\n'), ((7075, 7168), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (7091, 7168), False, 'from django.db import migrations, models\n'), ((7196, 7226), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (7215, 7226), False, 'from django.db import migrations, models\n'), ((7267, 7298), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(24)'}), '(default=24)\n', (7286, 7298), False, 'from django.db import migrations, models\n'), ((7335, 7365), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(2)'}), '(default=2)\n', (7354, 7365), False, 'from django.db import migrations, models\n'), ((7412, 7497), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Poseidon High Resolution Aegean Model"""', 'max_length': '(100)'}), "(default='Poseidon High Resolution Aegean Model',\n max_length=100)\n", (7428, 7497), False, 'from django.db import migrations, models\n'), ((7527, 7606), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Poseidon WAM Cycle 4 for the Aegean"""', 'max_length': '(100)'}), "(default='Poseidon WAM Cycle 4 for the Aegean', max_length=100)\n", (7543, 7606), False, 'from django.db import migrations, models\n'), ((7642, 7676), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (7661, 7676), False, 'from django.db import migrations, models\n'), ((7709, 7743), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (7728, 7743), False, 'from django.db import migrations, models\n'), ((7883, 7976), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (7899, 7976), False, 'from django.db import migrations, models\n'), ((8008, 8038), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (8027, 8038), False, 'from django.db import migrations, models\n'), ((8069, 8207), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""service_per_user_service"""', 'to': '"""service_builder.Service"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='service_per_user_service', to='service_builder.Service')\n", (8086, 8207), False, 'from django.db import migrations, models\n'), ((8230, 8364), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""service_per_user_user"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='service_per_user_user', to=settings.AUTH_USER_MODEL)\n", (8247, 8364), False, 'from django.db import migrations, models\n'), ((8495, 8588), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (8511, 8588), False, 'from django.db import migrations, models\n'), ((8622, 8652), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (8641, 8652), False, 'from django.db import migrations, models\n'), ((8683, 8826), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_service_use_service"""', 'to': '"""service_builder.Service"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_service_use_service', to='service_builder.Service')\n", (8700, 8826), False, 'from django.db import migrations, models\n'), ((8959, 9052), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (8975, 9052), False, 'from django.db import migrations, models\n'), ((9086, 9116), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (9105, 9116), False, 'from django.db import migrations, models\n'), ((9147, 9292), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_service_users_service"""', 'to': '"""service_builder.Service"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_service_users_service', to='service_builder.Service')\n", (9164, 9292), False, 'from django.db import migrations, models\n'), ((9323, 9464), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_service_users_user"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_service_users_user', to=settings.AUTH_USER_MODEL)\n", (9340, 9464), False, 'from django.db import migrations, models\n'), ((9594, 9687), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (9610, 9687), False, 'from django.db import migrations, models\n'), ((9717, 9756), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (9737, 9756), False, 'from django.db import migrations, models\n'), ((9898, 9931), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (9917, 9931), False, 'from django.db import migrations, models\n'), ((9967, 10000), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (9986, 10000), False, 'from django.db import migrations, models\n'), ((10035, 10065), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (10054, 10065), False, 'from django.db import migrations, models\n'), ((10093, 10219), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""plan_plan"""', 'to': '"""website_analytics.BDO_Plan"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='plan_plan', to='website_analytics.BDO_Plan')\n", (10110, 10219), False, 'from django.db import migrations, models\n'), ((10242, 10364), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""plan_user"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='plan_user', to=settings.AUTH_USER_MODEL)\n", (10259, 10364), False, 'from django.db import migrations, models\n'), ((10506, 10599), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (10522, 10599), False, 'from django.db import migrations, models\n'), ((10632, 10662), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (10651, 10662), False, 'from django.db import migrations, models\n'), ((10699, 10866), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_visualisation_type_uses_visualisation"""', 'to': '"""visualizer.Visualization"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_visualisation_type_uses_visualisation', to=\n 'visualizer.Visualization')\n", (10716, 10866), False, 'from django.db import migrations, models\n'), ((11010, 11103), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (11026, 11103), False, 'from django.db import migrations, models\n'), ((11130, 11274), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_nester_statistics_dataset"""', 'to': '"""aggregator.Dataset"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_nester_statistics_dataset', to='aggregator.Dataset')\n", (11147, 11274), False, 'from django.db import migrations, models\n'), ((11300, 11449), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""analytics_nester_statistics_service"""', 'to': '"""service_builder.Service"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='analytics_nester_statistics_service', to='service_builder.Service')\n", (11317, 11449), False, 'from django.db import migrations, models\n'), ((9817, 9867), 'datetime.datetime', 'datetime.datetime', (['(2019)', '(8)', '(15)', '(16)', '(27)', '(30)', '(138000)'], {}), '(2019, 8, 15, 16, 27, 30, 138000)\n', (9834, 9867), False, 'import datetime\n')] |
# coding: utf-8
import json
import requests
from sentry.plugins.bases.notify import NotificationPlugin
import sentry_dingding
from .forms import DingDingOptionsForm
DingTalk_API = "https://oapi.dingtalk.com/robot/send?access_token={token}"
class DingDingPlugin(NotificationPlugin):
"""
Sentry plugin to send error counts to DingDing.
"""
author = 'y1024'
author_url = 'https://github.com/y1024/sentry-dingding'
version = sentry_dingding.VERSION
description = 'Send error counts to DingDing.'
resource_links = [
('Source', 'https://github.com/y1024/sentry-dingding'),
('Bug Tracker', 'https://github.com/y1024/sentry-dingding/issues'),
('README', 'https://github.com/y1024/sentry-dingding/blob/master/README.md'),
]
slug = 'DingDing'
title = 'DingDing'
conf_key = slug
conf_title = title
project_conf_form = DingDingOptionsForm
def is_configured(self, project):
"""
Check if plugin is configured.
"""
return bool(self.get_option('access_token', project))
def notify_users(self, group, event, *args, **kwargs):
self.post_process(group, event, *args, **kwargs)
def post_process(self, group, event, *args, **kwargs):
"""
Process error.
"""
if not self.is_configured(group.project):
return
if group.is_ignored():
return
access_token = self.get_option('access_token', group.project)
send_url = DingTalk_API.format(token=access_token)
title = u"New alert from {}".format(event.project.slug)
data = {
"msgtype": "markdown",
"markdown": {
"title": title,
"text": u"#### {title} \n > {message} \n\n [查看详情]({url})".format(
title=title,
# https://github.com/getsentry/sentry/pull/15759/commits/cfc474be32ba64dcd87994bd42584f007443ad6a
message=event.title,
url=u"{}events/{}/".format(group.get_absolute_url(), event.event_id),
)
}
}
requests.post(
url=send_url,
headers={"Content-Type": "application/json"},
data=json.dumps(data).encode("utf-8")
)
| [
"json.dumps"
] | [((2256, 2272), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2266, 2272), False, 'import json\n')] |
""" Generate BpForms for all of the proteins in PRO, verify
them, and calculate their properties
:Author: <NAME> <<EMAIL>>
:Date: 2019-06-24
:Copyright: 2019, Karr Lab
:License: MIT
"""
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from matplotlib import pyplot
from xml.etree import ElementTree
import bpforms
import copy
import csv
import matplotlib
import numpy
import os
import pickle
import re
import requests
import requests_cache
IN_URL = 'https://proconsortium.org/download/current/pro_nonreasoned.obo'
IN_OBO_FILENAME = os.path.join('examples', 'pro_nonreasoned.obo')
IN_PKL_FILENAME = os.path.join('examples', 'pro_nonreasoned.pkl')
IN_TSV_FILELANE = os.path.join('examples', 'pro_input.in.tsv') # from <NAME>
IN_MONOMERS_FILENAME = os.path.join('examples', 'pro.monomers.csv')
UNIPROT_SEQ_ENDPOINT = 'https://www.uniprot.org/uniprot/{}.fasta'
UNIPROT_XML_ENDPOINT = 'https://www.uniprot.org/uniprot/{}.xml'
OUT_PICKLE_FILENAME = os.path.join('examples', 'pro_input.out.pkl')
OUT_PICKLE_FILENAME_2 = os.path.join('examples', 'pro_input.out.2.pkl')
OUT_TSV_FILENAME = os.path.join('examples', 'pro_input.out.tsv')
OUT_FASTA_FILENAME = os.path.join('examples', 'pro_input.fasta')
OUT_FIG_FILENAME = os.path.join('examples', 'pro_input.svg')
OUT_STRUCTURE_DIRNAME = os.path.join('examples', 'pro_input_structure')
OUT_VIZ_DIRNAME = os.path.join('examples', 'pro_input_viz')
cache_name = os.path.join('examples', 'pro')
session = requests_cache.core.CachedSession(cache_name, backend='sqlite', expire_after=None)
session.mount('https://www.uniprot.org/', requests.adapters.HTTPAdapter(max_retries=5))
AA_CHARS_TO_CODES = {
'Ala': 'A',
'Arg': 'R',
'Asn': 'N',
'Asp': 'D',
'Cys': 'C',
'Glu': 'E',
'Gln': 'Q',
'Gly': 'G',
'His': 'H',
'Ile': 'I',
'Leu': 'L',
'Lys': 'K',
'Met': 'M',
'Phe': 'F',
'Pro': 'P',
'Ser': 'S',
'Thr': 'T',
'Trp': 'W',
'Tyr': 'Y',
'Val': 'V',
}
def run(in_obo_filename=IN_OBO_FILENAME, in_pkl_filename=IN_PKL_FILENAME, in_tsv_filename=IN_TSV_FILELANE,
in_monomers_filename=IN_MONOMERS_FILENAME,
max_num_proteins=None,
out_pickle_filename=OUT_PICKLE_FILENAME, out_pickle_filename_2=OUT_PICKLE_FILENAME_2,
out_tsv_filename=OUT_TSV_FILENAME, out_fasta_filename=OUT_FASTA_FILENAME,
out_fig_filename=OUT_FIG_FILENAME, out_structure_dirname=OUT_STRUCTURE_DIRNAME,
out_viz_dirname=OUT_VIZ_DIRNAME):
""" Download PRO ontology, generate proteoforms, and encode with BpForms
Args:
in_obo_filename (:obj:`str`, optional): path to save/read PRO ontology in OBO format
in_pkl_filename (:obj:`str`, optional): path to save/read parsed content of PRO ontology
in_tsv_filename (:obj:`str`, optional): path to read PRO entries in TSV format
in_monomers_filename (:obj:`str`, optional): path to list of ids of monomeric forms used
by PRO and their alphabet code in tab-separated format
max_num_proteins (:obj:`int`, optional): maximum number of proteins to analyze
out_pickle_filename (:obj:`str`, optional): path to save results in pickle format
out_pickle_filename_2 (:obj:`str`, optional): path to save results in pickle format
out_tsv_filename (:obj:`str`, optional): path to save results in tab-separated format
out_fasta_filename (:obj:`str`, optional): path to save results in FASTA format
out_fig_filename (:obj:`str`, optional): path to save plot of results
out_structure_dirname (:obj:`str`, optional): path to save preoteoforms in CML format
out_viz_dirname (:obj:`str`, optional): path to save preoteoforms im SVG format
Returns:
:obj:`list` of :obj:`dict`: proteoforms encoded with BpForms
"""
# get the PRO ontology and extract the modified proteins from the ontology
# proteins = get_pro_from_obo(obo_filename=in_obo_filename, pkl_filename=in_pkl_filename, max_num_proteins=max_num_proteins)
proteins = get_pro_from_tsv(in_tsv_filename, max_num_proteins=max_num_proteins)
# parse the modified proteins and retrieve their sequences
if not os.path.isfile(out_pickle_filename):
# parse the modified proteins and retrieve their sequences
parsed_proteins = []
for i_protein, protein in enumerate(proteins):
if i_protein % 100 == 0:
print('Parsing protein {} of {}'.format(i_protein + 1, len(proteins)))
parsed_proteins.append(parse_protein(protein))
# save the parsed proteins in pickle format
with open(out_pickle_filename, 'wb') as file:
pickle.dump(parsed_proteins, file)
else:
# load saved parsed proteins in pickle format
with open(out_pickle_filename, 'rb') as file:
parsed_proteins = pickle.load(file)
# read list of monomers
monomers = {}
with open(in_monomers_filename, 'r') as file:
reader = csv.DictReader(file, dialect='excel')
for row in reader:
monomers[row['PRO id']] = {
'mod': bpforms.protein_alphabet.monomers.get(row['BpForms code'], None),
'origin': [],
}
if row['Base monomer']:
monomers[row['PRO id']]['origin'] = row['Base monomer'].split(', ')
# generate list of modified monomeric forms
for protein in parsed_proteins:
for modification in protein['modifications']:
if modification['monomer'] not in monomers:
monomers[modification['monomer']] = {
'mod': None,
'origin': [],
}
# print list of unmapped monomers
unmapped_monomers = []
for monomer, code in monomers.items():
if not code['mod']:
unmapped_monomers.append(monomer)
unmapped_monomers.sort()
if unmapped_monomers:
print('Several PRO monomeric forms have not been mapped to BpForms monomeric forms:\n {}'.format(
'\n '.join(unmapped_monomers)))
# check for inconsistencies between residue and modified monomeric form
monomer_codes = {}
for code, monomer in bpforms.protein_alphabet.monomers.items():
monomer_codes[monomer] = code
for protein in parsed_proteins:
for modification in protein.get('modifications', []):
if modification['residue'] and modification['monomer']:
monomer = monomers.get(modification['monomer'], None)
if (monomer['mod'] and monomer['mod'].get_canonical_code(monomer_codes) != modification['residue']) \
or (monomer['origin'] and modification['residue'] not in monomer['origin']):
codes = set(monomer['origin'])
if monomer['mod']:
codes.add(monomer['mod'].get_canonical_code(monomer_codes))
msg = 'Modified monomeric form {} potentially inconsistent with residue {} != {}'.format(
modification['monomer'], modification['residue'],
', '.join(codes))
print(protein['id'] + ': ' + msg)
# generate BpForms for each protein
if not os.path.isdir(out_structure_dirname):
os.mkdir(out_structure_dirname)
if not os.path.isdir(out_viz_dirname):
os.mkdir(out_viz_dirname)
if not os.path.isfile(out_pickle_filename_2):
for i_protein, protein in enumerate(parsed_proteins):
if i_protein % 100 == 0:
print('Generating BpForms {} of {}'.format(i_protein + 1, len(parsed_proteins)))
protein['modified_seq'] = None
if not protein['uniprot_id']:
continue
if not protein['seq']:
continue
if protein['pro_errors']:
continue
processed_form = gen_bpform(protein, monomers, monomer_codes, apply_modifications=False)
protein['processed_seq'] = str(processed_form)
if not processed_form.validate():
processed_formula = processed_form.get_formula()
protein['processed_formula'] = str(processed_formula)
protein['processed_mol_wt'] = processed_form.get_mol_wt()
protein['processed_charge'] = processed_form.get_charge()
if not protein['modifications']:
continue
modified_form = gen_bpform(protein, monomers, monomer_codes, include_annotations=False)
protein['modified_seq'] = str(modified_form)
modified_form = gen_bpform(protein, monomers, monomer_codes)
if not modified_form.validate():
modified_formula = modified_form.get_formula()
protein['modified_full_seq'] = str(modified_form)
protein['modified_formula'] = str(modified_formula)
protein['modified_mol_wt'] = modified_form.get_mol_wt()
protein['modified_charge'] = modified_form.get_charge()
protein['modifications_formula'] = str(modified_formula - processed_formula)
protein['modifications_mol_wt'] = protein['modified_mol_wt'] - protein['processed_mol_wt']
protein['modifications_charge'] = protein['modified_charge'] - protein['processed_charge']
# with open(os.path.join(out_structure_dirname, protein['id'] + '.cml'), 'w') as file:
# file.write(modified_form.export('cml'))
form = gen_bpform(protein, monomers, monomer_codes,
apply_processing=False, include_annotations=True)
seq_features = []
if protein['processing']:
seq_features.append({
'label': 'Processed',
'color': '#cccccc',
'positions': [],
})
last = 0
for p in protein['processing']:
seq_features[0]['positions'].append([last + 1, p['start'] - 1])
last = p['end']
seq_features[0]['positions'].append([
protein['processing'][-1]['end'] + 1,
len(form.seq),
])
if protein['processing'][0]['start'] == 1:
seq_features[0]['positions'].pop(0)
if protein['processing'][-1]['end'] == len(form.seq):
seq_features[0]['positions'].pop(len(seq_features[0]['positions']) - 1)
with open(os.path.join(out_viz_dirname, protein['id'] + '.svg'), 'w') as file:
file.write(form.get_genomic_image(seq_features, width=910))
if modified_form.get_canonical_seq(monomer_codes) != protein['processed_seq']:
protein['pro_errors'].append('Modified sequence for {} not compatible with the processed sequence'.format(
protein['id']))
# save the parsed proteins in pickle format
with open(out_pickle_filename_2, 'wb') as file:
pickle.dump(parsed_proteins, file)
else:
with open(out_pickle_filename_2, 'rb') as file:
parsed_proteins = pickle.load(file)
# save the proteoforms in TSV format
with open(out_tsv_filename, 'w') as file:
writer = csv.writer(file, dialect='excel-tab')
writer.writerow(['PRO id', 'UniProt id', 'Organism',
'Unmodified sequence (IUBMB)',
'Processing', 'Deletions', 'Processsed sequence (IUBMB)', 'Processsed formula',
'Processsed molecular weight', 'Processsed charge',
'Modifications', 'Crosslinks', 'Modified sequence (abbreviated BpForms)', 'Modified sequence (BpForms)',
'Is modified sequence concrete', 'Modified formula', 'Modified molecular weight', 'Modified charge',
'Modifications formula', 'Modifications molecular weight', 'Modifications charge',
'PRO issues', 'Monomeric form issues'])
for parsed_protein in parsed_proteins:
if parsed_protein.get('pro_errors', None):
pro_errors = '. '.join(parsed_protein['pro_errors']) + '.'
else:
pro_errors = None
if parsed_protein.get('modified_errors', None):
modified_errors = '. '.join(parsed_protein['modified_errors']) + '.'
else:
modified_errors = None
writer.writerow([
parsed_protein['id'],
parsed_protein.get('uniprot_id', None),
parsed_protein.get('organism', None),
parsed_protein.get('seq', None),
', '.join('{}-{}'.format(p['start'], p['end']) for p in parsed_protein['processing']),
', '.join('{}-{}'.format(deletion[0], deletion[1]) for deletion in parsed_protein.get('deletions', [])),
parsed_protein.get('processed_seq', None),
parsed_protein.get('processed_formula', None),
parsed_protein.get('processed_mol_wt', None),
parsed_protein.get('processed_charge', None),
', '.join('{} --> {} ({})'.format(m['residue'] or '?', m['monomer'], ', '.join(str(p) for p in m['positions']))
for m in parsed_protein['modifications']),
', '.join('{}{}-{}{}'.format(xlink[0][1], xlink[0][0], xlink[1][1], xlink[1][0])
for xlink in parsed_protein.get('crosslinks', [])),
parsed_protein.get('modified_seq', None),
parsed_protein.get('modified_full_seq', None),
parsed_protein.get('modified_concrete', False),
parsed_protein.get('modified_formula', None),
parsed_protein.get('modified_mol_wt', None),
parsed_protein.get('modified_charge', None),
parsed_protein.get('modifications_formula', None),
parsed_protein.get('modifications_mol_wt', None),
parsed_protein.get('modifications_charge', None),
pro_errors,
modified_errors,
])
# save the proteoforms in FASTA format
seqs = (SeqRecord(id='{} | {}'.format(protein['id'], protein['uniprot_id']),
seq=Seq(protein['modified_seq']),
description='')
for protein in parsed_proteins
if protein['modified_seq'])
SeqIO.write(seqs, out_fasta_filename, "fasta")
# analyze frequency of modifications
plot_modifications(parsed_proteins, fig_filename=out_fig_filename)
# return proteins
return proteins, parsed_proteins
def get_pro_from_obo(obo_filename=IN_OBO_FILENAME, pkl_filename=IN_PKL_FILENAME, max_num_proteins=None):
""" Get the PRO ontology and extract the modified proteins from the ontology
Args:
obo_filename (:obj:`str`, optional): filename to save PRO in OBO format
pkl_filename (:obj:`str`, optional): filename to save/read PRO from pickled file
max_num_proteins (:obj:`int`, optional): maximum number of proteins to analyze
Returns:
:obj:`list` of :obj:`dict`: list of PRO ontology terms for modified proteins
"""
# download PRO
if not os.path.isfile(obo_filename):
response = requests.get(IN_URL)
response.raise_for_status()
with open(obo_filename, 'wb') as file:
file.write(response.content)
# parse PRO or read from cache
if not os.path.isfile(pkl_filename):
# parse PRO
proteins = []
protein = None
with open(obo_filename, 'r') as file:
for line in file:
line = line.rstrip('\n')
if line.startswith('['):
if line.startswith('[Term]'):
if max_num_proteins is not None and len(proteins) >= max_num_proteins:
break
protein = {}
else:
protein = None
elif line and protein is not None:
key, _, value = line.partition(': ')
if key not in protein:
protein[key] = []
protein[key].append(value)
if key == 'comment' and value.startswith('Category=organism-modification.'):
proteins.append(protein)
# save PRO in pickle format
with open(pkl_filename, 'wb') as file:
pickle.dump(proteins, file)
else:
# load PRO from pickle format
with open(pkl_filename, 'rb') as file:
proteins = pickle.load(file)
if max_num_proteins is not None and max_num_proteins < len(proteins):
proteins = proteins[0:max_num_proteins]
# return PRO
return proteins
def get_pro_from_tsv(filename, max_num_proteins=None):
""" Extract PRO entries from TSV file
Args:
obo_filename (:obj:`str`, optional): filename to save PRO in OBO format
max_num_proteins (:obj:`int`, optional): maximum number of proteins to analyze
Returns:
:obj:`list` of :obj:`dict`: list of PRO ontology terms for modified proteins
"""
proteins = []
with open(filename, 'r') as file:
reader = csv.DictReader(file, fieldnames=('id', 'category', 'synonym_type', 'seq'), dialect='excel-tab')
for row in reader:
proteins.append({
'id': [row['id']],
'category': [row['category']],
'synonym': ['"{}" {} PRO-proteoform-std'.format(row['seq'], row['synonym_type'])],
})
if max_num_proteins is not None and len(proteins) >= max_num_proteins:
break
return proteins
def parse_protein(protein):
""" Parse the modification information from a term for a modified protein
Args:
protein (:obj:`dict`): term for a modified protein
Returns:
:obj:`dict` with PRO id, UniProt id, processing start position, processing end position, unmodified sequence, and modifications
"""
assert len(protein['id']) == 1
id = protein['id'][0]
errors = []
seq_synonyms = []
for synonym in protein.get('synonym', []):
if synonym.startswith('"UniProtKB:') and ' PRO-proteoform-std' in synonym:
seq_synonyms.append(synonym)
if not seq_synonyms:
errors.append('No synonym which defines a modified sequence')
return {
'id': id,
'uniprot_id': None,
'processing': [],
'modifications': [],
'seq': None,
'pro_errors': errors,
}
elif len(seq_synonyms) > 1:
errors.append('Multiple synonyms which define modified sequences')
synonym = seq_synonyms[0]
uniprot_id, _, processing_modifications_type = synonym.partition(', ')
uniprot_id = uniprot_id.partition(':')[2]
organism_name = None
response = session.get(UNIPROT_XML_ENDPOINT.format(uniprot_id))
response.raise_for_status()
if response.content:
xml_root = ElementTree.fromstring(response.content)
entry = xml_root.find('{http://uniprot.org/uniprot}entry')
organism = entry.find('{http://uniprot.org/uniprot}organism')
names = organism.findall('{http://uniprot.org/uniprot}name')
for name in names:
if name.get('type') == 'scientific':
organism_name = name.text
break
response = session.get(UNIPROT_SEQ_ENDPOINT.format(uniprot_id))
response.raise_for_status()
seq = response.content.decode('utf-8').partition('\n')[2].replace('\n', '')
if not seq:
errors.append('No sequence for UniProt entry; entry may be deprecated')
processing_modifications = processing_modifications_type.partition('"')[0]
processing = []
while True:
match = re.match(r'^(\?|\d+)\-(\?|\d+)(, |$)', processing_modifications)
if match:
if match.group(1) == '?':
start = None
errors.append('Unknown processing start position')
else:
start = int(float(match.group(1)))
if start <= 0 or start > len(seq):
errors.append('Start position must be within sequence')
if match.group(2) == '?':
end = None
errors.append('Unknown processing end position')
else:
end = int(float(match.group(2)))
if end <= 0 or end > len(seq):
errors.append('End position must be within sequence')
if start and end and start > end:
errors.append('End position must be after start position')
processing.append({
'start': start,
'end': end,
})
processing_modifications = processing_modifications[len(match.group(0)):]
else:
break
if processing_modifications.startswith('which') \
or processing_modifications.startswith('with') \
or 'MOD:00046 OR Thr-163, MOD:00047' in processing_modifications:
modifications_str = []
errors.append('Unable to parse sequence')
elif processing_modifications:
modifications_str = processing_modifications.split('|')
else:
modifications_str = []
modifications = []
for modification in modifications_str:
if modification and modification[0] == '(' and modification[-1] == ')':
modification = modification[1:-1]
if ' or ' in modification or ' and/or ' in modification:
errors.append('Boolean logic not supported')
continue
if ', ' in modification:
residue_positions, _, monomer = modification.partition(', ')
residue_codes = set()
positions = []
for residue_position in residue_positions.split('/'):
residue_chars, _, position = residue_position.partition('-')
residue_code = AA_CHARS_TO_CODES[residue_chars]
position = int(float(position))
if position > len(seq):
errors.append('Position {} is greater than the sequence length {}'.format(position, len(seq)))
elif seq[position - 1] != residue_code:
errors.append('Position {} != {}'.format(position, residue_code))
residue_codes.add(residue_code)
positions.append(position)
if len(residue_codes) != 1 and monomer != 'PR:000026291':
residue_code = None
errors.append('Residues {{{}}} annotated with the same modification {}'.format(
', '.join(residue_codes), monomer))
else:
residue_code = None
positions = []
monomer = modification
if monomer == 'PR:000026291':
for residue_code in residue_codes:
modifications.append({
'residue': residue_code,
'positions': [p for p in positions if seq[p - 1] == residue_code],
'monomer': monomer,
})
else:
modifications.append({
'residue': residue_code,
'positions': positions,
'monomer': monomer,
})
return {
'id': id,
'uniprot_id': uniprot_id,
'organism': organism_name,
'processing': processing,
'modifications': modifications,
'seq': seq,
'pro_errors': errors,
}
def gen_bpform(protein, pro_ids_to_bpform_monomers, monomer_codes,
apply_processing=True, apply_modifications=True, include_annotations=True):
""" Generate BpForm for a modified protein in PRO
Args:
protein (:obj:`dict`): term for modified protein
pro_ids_to_bpform_monomers (:obj:`dict`): dictionary which maps ids of monomeric forms
used by PRO to monomeric forms in the BpForms protein alphabet
monomer_codes (:obj:`dict`): dictionary that maps monomers to their codes in the alphabet
apply_processing (:obj:`bool`, optional): if :obj:`True`, include processing in proteoform
apply_modifications (:obj:`bool`, optional): if :obj:`True`, include modifications in proteoform
include_annotations (:obj:`bool`, optional): if :obj:`True`, include metadata about modified monomers
Returns:
:obj:`bpforms.ProteinForm`: BpForm for a term in PRO
"""
form = bpforms.ProteinForm()
monomers = bpforms.protein_alphabet.monomers
# generate BpForm for unmodified sequence
for base in protein['seq']:
form.seq.append(monomers[base])
# apply processing
modifications = copy.deepcopy(protein['modifications'])
seq = protein['seq']
if apply_processing and protein['processing']:
procesed_seq = []
seq = ''
for processing in protein['processing']:
procesed_seq.extend(form.seq[processing['start']-1:processing['end']])
seq += protein['seq'][processing['start']-1:processing['end']]
form.seq = procesed_seq
for modification in modifications:
modification['processed_positions'] = []
for position in modification['positions']:
seq_len = 0
processed_position = None
for processing in protein['processing']:
if position >= processing['start'] and position <= processing['end']:
processed_position = seq_len + position - processing['start'] + 1
break
seq_len += processing['end'] - processing['start'] + 1
if processed_position is not None:
modification['processed_positions'].append(processed_position)
else:
for modification in modifications:
modification['processed_positions'] = modification['positions']
# apply modifications
if apply_modifications:
concrete = True
protein['modified_errors'] = []
for modification in modifications:
monomer = pro_ids_to_bpform_monomers[modification['monomer']]['mod']
origin = pro_ids_to_bpform_monomers[modification['monomer']]['origin']
if modification['monomer'].startswith('CHEBI:'):
mod_ns = 'chebi'
elif modification['monomer'].startswith('MOD:'):
mod_ns = 'mod'
elif modification['monomer'].startswith('PR:'):
mod_ns = 'pr'
elif modification['monomer'].startswith('UniCarbKB:'):
mod_ns = 'unicarbkb'
else:
raise ValueError('Unsupported identifier {}'.format(modification['monomer']))
if modification['monomer'] == 'PR:000026291':
if include_annotations:
monomer = bpforms.Monomer().from_dict(
monomers[modification['residue']].to_dict(
alphabet=bpforms.protein_alphabet),
alphabet=bpforms.protein_alphabet)
else:
monomer = bpforms.Monomer()
monomer.id = None
monomer.name = None
monomer.synonyms = []
monomer.identifiers = [bpforms.Identifier('pr', modification['monomer'])]
monomer.comments = None
elif modification['monomer'].startswith('CHEBI:'):
if include_annotations:
monomer = bpforms.Monomer().from_dict(
monomers[modification['residue']].to_dict(
alphabet=bpforms.protein_alphabet),
alphabet=bpforms.protein_alphabet)
else:
monomer = bpforms.Monomer()
monomer.id = None
monomer.name = None
monomer.synonyms = []
monomer.identifiers = [bpforms.Identifier('chebi', modification['monomer'])]
monomer.comments = None
elif monomer is None:
concrete = False
monomer = bpforms.Monomer(
identifiers=[bpforms.Identifier(mod_ns, modification['monomer'])])
if modification['positions']:
for position in modification['processed_positions']:
if form.seq[position - 1] == monomers[seq[position - 1]]:
if monomer not in bpforms.protein_alphabet.monomers.values():
monomer.base_monomers = [form.seq[position - 1]]
form.seq[position - 1] = monomer
else:
protein['modified_errors'].append('Unable to set monomeric form at position {}'.format(
position))
elif modification['residue']:
concrete = False
if include_annotations:
monomer2 = bpforms.Monomer().from_dict(
monomer.to_dict(
alphabet=bpforms.protein_alphabet),
alphabet=bpforms.protein_alphabet)
else:
monomer2 = bpforms.Monomer()
monomer2.id = None
monomer2.name = None
monomer2.synonyms = []
monomer2.identifiers = [bpforms.Identifier(mod_ns, modification['monomer'])]
monomer2.base_monomers = [bpforms.protein_alphabet.monomers.get(modification['positions'])]
monomer2.start_position = seq.find(modification['residue']) + 1
monomer2.end_position = seq.rfind(modification['residue']) + 1
set_monomer = False
for i_monomer in range(monomer2.start_position, monomer2.end_position + 1):
if form.seq[i_monomer - 1] == monomers[seq[i_monomer - 1]]:
set_monomer = True
form.seq[i_monomer - 1] = monomer2
break
if not set_monomer:
protein['modified_errors'].append('Unable to set monomeric form')
else:
concrete = False
canonical_code = monomer.get_canonical_code(monomer_codes)
if include_annotations:
monomer2 = bpforms.Monomer().from_dict(
monomer.to_dict(
alphabet=bpforms.protein_alphabet),
alphabet=bpforms.protein_alphabet)
else:
monomer2 = bpforms.Monomer()
monomer2.id = None
monomer2.name = None
monomer2.synonyms = []
monomer2.identifiers = [bpforms.Identifier(mod_ns, modification['monomer'])]
monomer2.monomers_position = [
bpforms.protein_alphabet.monomers.get(code) for code in origin]
if canonical_code and canonical_code != '?':
start_position = seq.find(canonical_code) + 1
end_position = seq.rfind(canonical_code) + 1
if start_position == 0:
protein['modified_errors'].append('Sequence does not contain residue {} for modification {}'.format(
canonical_code, modification['monomer']))
else:
monomer2.start_position = start_position
monomer2.end_position = end_position
elif origin:
start_position = float('inf')
end_position = -float('inf')
for base in origin:
start_pos = seq.find(base) + 1
if start_pos > 0:
start_position = min(start_position, start_pos)
end_pos = seq.rfind(base) + 1
if end_pos > 0:
end_position = max(end_position, end_pos)
if numpy.isinf(start_position):
protein['modified_errors'].append('Sequence does not contain residues {} for modification {}'.format(
', '.join(origin), modification['monomer']))
else:
monomer2.start_position = start_position
monomer2.end_position = end_position
else:
monomer2.start_position = 1
monomer2.end_position = len(seq)
if monomer2.start_position:
set_monomer = False
for i_monomer in range(monomer2.start_position, monomer2.end_position + 1):
if form.seq[i_monomer - 1] == monomers[seq[i_monomer - 1]]:
monomer2.base_monomers = [bpforms.protein_alphabet.monomers.get(seq[i_monomer - 1])]
form.seq[i_monomer - 1] = monomer2
set_monomer = True
break
if not set_monomer:
protein['modified_errors'].append('Unable to set monomeric form')
# crosslinks
if protein['processing']:
xlinks = []
seq_len = 0
protein['crosslinks'] = []
protein['deletions'] = []
for left, right in zip(protein['processing'][0:-1], protein['processing'][1:]):
seq_len += left['end'] - left['start'] + 1
i_left = seq_len
i_right = i_left + 1
if left['end'] + 1 == right['start']:
protein['crosslinks'].append(((left['end'], protein['seq'][left['end']-1]),
(right['start'], protein['seq'][right['start'] - 1])))
else:
protein['deletions'].append((left['end'] + 1, right['start'] - 1))
if left['end'] + 1 != right['start']:
continue
#err = False
# if protein['seq'][left['end'] - 1] != 'C' or form.seq[i_left - 1] != bpforms.protein_alphabet.monomers.C:
# err = True
# protein['modified_errors'].append('Disulfide bond site {}{} != C'.format(
# protein['seq'][left['end'] - 1], left['end']))
# if protein['seq'][right['start'] - 1] != 'C' or form.seq[i_right - 1] != bpforms.protein_alphabet.monomers.C:
# err = True
# protein['modified_errors'].append('Disulfide bond site {}{} != C'.format(
# protein['seq'][right['start'] - 1], right['start']))
#
# if err:
# continue
concrete = False
i_left = '{}-{}'.format(seq_len - (left['end'] - left['start'] + 1) + 1, seq_len)
i_right = '{}-{}'.format(seq_len + 1, seq_len + (right['end'] - right['start'] + 1))
if apply_modifications:
form.crosslinks.add(bpforms.Bond(
#l_bond_atoms=[bpforms.Atom(bpforms.Monomer, 'S', position=11, monomer=i_left)],
#r_bond_atoms=[bpforms.Atom(bpforms.Monomer, 'S', position=11, monomer=i_right)],
#l_displaced_atoms=[bpforms.Atom(bpforms.Monomer, 'H', position=11, monomer=i_left)],
#r_displaced_atoms=[bpforms.Atom(bpforms.Monomer, 'H', position=11, monomer=i_right)],
comments='The polymer contains a disulfide bond between the ranges {} and {}'.format(i_left, i_right),
))
# validate
if apply_modifications:
protein['modified_concrete'] = concrete
protein['modified_errors'].extend(form.validate())
# return proteoform represented with BpForms
return form
def plot_modifications(proteins, organism='Homo sapiens', fig_filename=OUT_FIG_FILENAME):
""" Plot a summary of the modifications in PRO
Args:
proteins (:obj:`list` of :obj:`dict`): entries in PRO ontology
organism (:obj:`str`, optional): organism to analyze
fig_filename (:obj:`str`, optional): path to save analysis
"""
code_freq = {}
canonical_code_freq = {}
for protein in proteins:
if (organism is None or protein.get('organism', None) == organism) and protein.get('modified_seq', None):
for modification in protein['modifications']:
if modification['residue'] and modification['monomer']:
n_mods = max(1, len(modification['positions']))
if modification['residue'] not in canonical_code_freq:
canonical_code_freq[modification['residue']] = 0
if modification['monomer'] not in code_freq:
code_freq[modification['monomer']] = 0
canonical_code_freq[modification['residue']] += n_mods
code_freq[modification['monomer']] += n_mods
pyplot.style.use('ggplot')
fig, axes = pyplot.subplots(nrows=1, ncols=2, gridspec_kw={'width_ratios': [1, 4]})
fig.set_size_inches(9.3, 1.5)
plot_codes(canonical_code_freq,
'Frequency of modifications',
axes[0], ignore_canonical=False)
plot_codes(code_freq,
'Frequency of modified monomeric forms',
axes[1], ignore_canonical=True)
fig.savefig(fig_filename, transparent=True,
bbox_inches=matplotlib.transforms.Bbox([[0.69, -0.5], [8.35, 1.5]]))
pyplot.close(fig)
def plot_codes(code_freq, title, axis, ignore_canonical=False):
id_freqs = []
for code, count in code_freq.items():
if ignore_canonical and code in ['A', 'C', 'G', 'U']:
continue
id_freqs.append((code, count))
id_freqs.sort()
y_pos = numpy.arange(len(id_freqs))
freq = numpy.array([id_freq[-1] for id_freq in id_freqs])
freq = freq / numpy.sum(freq) * 100.
x_tick_labels = {id: y_pos for y_pos, (id, _) in enumerate(id_freqs)}
axis.bar(y_pos, freq, align='center', alpha=0.5)
axis.set_xticks(y_pos)
axis.set_xticklabels(x_tick_labels, rotation=270, fontsize=6, fontfamily='Raleway')
axis.set_ylabel('Frequency (%)', fontdict={
'fontsize': 10,
'fontweight': 'regular',
'fontfamily': 'Raleway',
})
axis.set_title(title, fontdict={
'fontsize': 10,
'fontweight': 'regular',
'fontfamily': 'Raleway',
})
axis.set_xlim((-0.75, len(id_freqs) - 0.25))
| [
"csv.DictReader",
"Bio.Seq.Seq",
"numpy.array",
"bpforms.protein_alphabet.monomers.values",
"copy.deepcopy",
"requests_cache.core.CachedSession",
"matplotlib.pyplot.style.use",
"bpforms.protein_alphabet.monomers.items",
"matplotlib.pyplot.close",
"os.path.isdir",
"os.mkdir",
"Bio.SeqIO.write",... | [((568, 615), 'os.path.join', 'os.path.join', (['"""examples"""', '"""pro_nonreasoned.obo"""'], {}), "('examples', 'pro_nonreasoned.obo')\n", (580, 615), False, 'import os\n'), ((634, 681), 'os.path.join', 'os.path.join', (['"""examples"""', '"""pro_nonreasoned.pkl"""'], {}), "('examples', 'pro_nonreasoned.pkl')\n", (646, 681), False, 'import os\n'), ((700, 744), 'os.path.join', 'os.path.join', (['"""examples"""', '"""pro_input.in.tsv"""'], {}), "('examples', 'pro_input.in.tsv')\n", (712, 744), False, 'import os\n'), ((783, 827), 'os.path.join', 'os.path.join', (['"""examples"""', '"""pro.monomers.csv"""'], {}), "('examples', 'pro.monomers.csv')\n", (795, 827), False, 'import os\n'), ((981, 1026), 'os.path.join', 'os.path.join', (['"""examples"""', '"""pro_input.out.pkl"""'], {}), "('examples', 'pro_input.out.pkl')\n", (993, 1026), False, 'import os\n'), ((1051, 1098), 'os.path.join', 'os.path.join', (['"""examples"""', '"""pro_input.out.2.pkl"""'], {}), "('examples', 'pro_input.out.2.pkl')\n", (1063, 1098), False, 'import os\n'), ((1118, 1163), 'os.path.join', 'os.path.join', (['"""examples"""', '"""pro_input.out.tsv"""'], {}), "('examples', 'pro_input.out.tsv')\n", (1130, 1163), False, 'import os\n'), ((1185, 1228), 'os.path.join', 'os.path.join', (['"""examples"""', '"""pro_input.fasta"""'], {}), "('examples', 'pro_input.fasta')\n", (1197, 1228), False, 'import os\n'), ((1248, 1289), 'os.path.join', 'os.path.join', (['"""examples"""', '"""pro_input.svg"""'], {}), "('examples', 'pro_input.svg')\n", (1260, 1289), False, 'import os\n'), ((1314, 1361), 'os.path.join', 'os.path.join', (['"""examples"""', '"""pro_input_structure"""'], {}), "('examples', 'pro_input_structure')\n", (1326, 1361), False, 'import os\n'), ((1380, 1421), 'os.path.join', 'os.path.join', (['"""examples"""', '"""pro_input_viz"""'], {}), "('examples', 'pro_input_viz')\n", (1392, 1421), False, 'import os\n'), ((1436, 1467), 'os.path.join', 'os.path.join', (['"""examples"""', '"""pro"""'], {}), "('examples', 'pro')\n", (1448, 1467), False, 'import os\n'), ((1478, 1564), 'requests_cache.core.CachedSession', 'requests_cache.core.CachedSession', (['cache_name'], {'backend': '"""sqlite"""', 'expire_after': 'None'}), "(cache_name, backend='sqlite',\n expire_after=None)\n", (1511, 1564), False, 'import requests_cache\n'), ((1603, 1647), 'requests.adapters.HTTPAdapter', 'requests.adapters.HTTPAdapter', ([], {'max_retries': '(5)'}), '(max_retries=5)\n', (1632, 1647), False, 'import requests\n'), ((6201, 6242), 'bpforms.protein_alphabet.monomers.items', 'bpforms.protein_alphabet.monomers.items', ([], {}), '()\n', (6240, 6242), False, 'import bpforms\n'), ((14665, 14711), 'Bio.SeqIO.write', 'SeqIO.write', (['seqs', 'out_fasta_filename', '"""fasta"""'], {}), "(seqs, out_fasta_filename, 'fasta')\n", (14676, 14711), False, 'from Bio import SeqIO\n'), ((24825, 24846), 'bpforms.ProteinForm', 'bpforms.ProteinForm', ([], {}), '()\n', (24844, 24846), False, 'import bpforms\n'), ((25059, 25098), 'copy.deepcopy', 'copy.deepcopy', (["protein['modifications']"], {}), "(protein['modifications'])\n", (25072, 25098), False, 'import copy\n'), ((37373, 37399), 'matplotlib.pyplot.style.use', 'pyplot.style.use', (['"""ggplot"""'], {}), "('ggplot')\n", (37389, 37399), False, 'from matplotlib import pyplot\n'), ((37416, 37487), 'matplotlib.pyplot.subplots', 'pyplot.subplots', ([], {'nrows': '(1)', 'ncols': '(2)', 'gridspec_kw': "{'width_ratios': [1, 4]}"}), "(nrows=1, ncols=2, gridspec_kw={'width_ratios': [1, 4]})\n", (37431, 37487), False, 'from matplotlib import pyplot\n'), ((37917, 37934), 'matplotlib.pyplot.close', 'pyplot.close', (['fig'], {}), '(fig)\n', (37929, 37934), False, 'from matplotlib import pyplot\n'), ((38255, 38305), 'numpy.array', 'numpy.array', (['[id_freq[-1] for id_freq in id_freqs]'], {}), '([id_freq[-1] for id_freq in id_freqs])\n', (38266, 38305), False, 'import numpy\n'), ((4189, 4224), 'os.path.isfile', 'os.path.isfile', (['out_pickle_filename'], {}), '(out_pickle_filename)\n', (4203, 4224), False, 'import os\n'), ((4994, 5031), 'csv.DictReader', 'csv.DictReader', (['file'], {'dialect': '"""excel"""'}), "(file, dialect='excel')\n", (5008, 5031), False, 'import csv\n'), ((7244, 7280), 'os.path.isdir', 'os.path.isdir', (['out_structure_dirname'], {}), '(out_structure_dirname)\n', (7257, 7280), False, 'import os\n'), ((7290, 7321), 'os.mkdir', 'os.mkdir', (['out_structure_dirname'], {}), '(out_structure_dirname)\n', (7298, 7321), False, 'import os\n'), ((7334, 7364), 'os.path.isdir', 'os.path.isdir', (['out_viz_dirname'], {}), '(out_viz_dirname)\n', (7347, 7364), False, 'import os\n'), ((7374, 7399), 'os.mkdir', 'os.mkdir', (['out_viz_dirname'], {}), '(out_viz_dirname)\n', (7382, 7399), False, 'import os\n'), ((7412, 7449), 'os.path.isfile', 'os.path.isfile', (['out_pickle_filename_2'], {}), '(out_pickle_filename_2)\n', (7426, 7449), False, 'import os\n'), ((11446, 11483), 'csv.writer', 'csv.writer', (['file'], {'dialect': '"""excel-tab"""'}), "(file, dialect='excel-tab')\n", (11456, 11483), False, 'import csv\n'), ((15477, 15505), 'os.path.isfile', 'os.path.isfile', (['obo_filename'], {}), '(obo_filename)\n', (15491, 15505), False, 'import os\n'), ((15526, 15546), 'requests.get', 'requests.get', (['IN_URL'], {}), '(IN_URL)\n', (15538, 15546), False, 'import requests\n'), ((15718, 15746), 'os.path.isfile', 'os.path.isfile', (['pkl_filename'], {}), '(pkl_filename)\n', (15732, 15746), False, 'import os\n'), ((17525, 17624), 'csv.DictReader', 'csv.DictReader', (['file'], {'fieldnames': "('id', 'category', 'synonym_type', 'seq')", 'dialect': '"""excel-tab"""'}), "(file, fieldnames=('id', 'category', 'synonym_type', 'seq'),\n dialect='excel-tab')\n", (17539, 17624), False, 'import csv\n'), ((19335, 19375), 'xml.etree.ElementTree.fromstring', 'ElementTree.fromstring', (['response.content'], {}), '(response.content)\n', (19357, 19375), False, 'from xml.etree import ElementTree\n'), ((20131, 20199), 're.match', 're.match', (['"""^(\\\\?|\\\\d+)\\\\-(\\\\?|\\\\d+)(, |$)"""', 'processing_modifications'], {}), "('^(\\\\?|\\\\d+)\\\\-(\\\\?|\\\\d+)(, |$)', processing_modifications)\n", (20139, 20199), False, 'import re\n'), ((4679, 4713), 'pickle.dump', 'pickle.dump', (['parsed_proteins', 'file'], {}), '(parsed_proteins, file)\n', (4690, 4713), False, 'import pickle\n'), ((4862, 4879), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (4873, 4879), False, 'import pickle\n'), ((11192, 11226), 'pickle.dump', 'pickle.dump', (['parsed_proteins', 'file'], {}), '(parsed_proteins, file)\n', (11203, 11226), False, 'import pickle\n'), ((11323, 11340), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (11334, 11340), False, 'import pickle\n'), ((16735, 16762), 'pickle.dump', 'pickle.dump', (['proteins', 'file'], {}), '(proteins, file)\n', (16746, 16762), False, 'import pickle\n'), ((16881, 16898), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (16892, 16898), False, 'import pickle\n'), ((37856, 37911), 'matplotlib.transforms.Bbox', 'matplotlib.transforms.Bbox', (['[[0.69, -0.5], [8.35, 1.5]]'], {}), '([[0.69, -0.5], [8.35, 1.5]])\n', (37882, 37911), False, 'import matplotlib\n'), ((38324, 38339), 'numpy.sum', 'numpy.sum', (['freq'], {}), '(freq)\n', (38333, 38339), False, 'import numpy\n'), ((5122, 5186), 'bpforms.protein_alphabet.monomers.get', 'bpforms.protein_alphabet.monomers.get', (["row['BpForms code']", 'None'], {}), "(row['BpForms code'], None)\n", (5159, 5186), False, 'import bpforms\n'), ((14510, 14538), 'Bio.Seq.Seq', 'Seq', (["protein['modified_seq']"], {}), "(protein['modified_seq'])\n", (14513, 14538), False, 'from Bio.Seq import Seq\n'), ((27504, 27521), 'bpforms.Monomer', 'bpforms.Monomer', ([], {}), '()\n', (27519, 27521), False, 'import bpforms\n'), ((27669, 27718), 'bpforms.Identifier', 'bpforms.Identifier', (['"""pr"""', "modification['monomer']"], {}), "('pr', modification['monomer'])\n", (27687, 27718), False, 'import bpforms\n'), ((10671, 10724), 'os.path.join', 'os.path.join', (['out_viz_dirname', "(protein['id'] + '.svg')"], {}), "(out_viz_dirname, protein['id'] + '.svg')\n", (10683, 10724), False, 'import os\n'), ((28165, 28182), 'bpforms.Monomer', 'bpforms.Monomer', ([], {}), '()\n', (28180, 28182), False, 'import bpforms\n'), ((28330, 28382), 'bpforms.Identifier', 'bpforms.Identifier', (['"""chebi"""', "modification['monomer']"], {}), "('chebi', modification['monomer'])\n", (28348, 28382), False, 'import bpforms\n'), ((29604, 29621), 'bpforms.Monomer', 'bpforms.Monomer', ([], {}), '()\n', (29619, 29621), False, 'import bpforms\n'), ((29773, 29824), 'bpforms.Identifier', 'bpforms.Identifier', (['mod_ns', "modification['monomer']"], {}), "(mod_ns, modification['monomer'])\n", (29791, 29824), False, 'import bpforms\n'), ((29868, 29932), 'bpforms.protein_alphabet.monomers.get', 'bpforms.protein_alphabet.monomers.get', (["modification['positions']"], {}), "(modification['positions'])\n", (29905, 29932), False, 'import bpforms\n'), ((31000, 31017), 'bpforms.Monomer', 'bpforms.Monomer', ([], {}), '()\n', (31015, 31017), False, 'import bpforms\n'), ((31169, 31220), 'bpforms.Identifier', 'bpforms.Identifier', (['mod_ns', "modification['monomer']"], {}), "(mod_ns, modification['monomer'])\n", (31187, 31220), False, 'import bpforms\n'), ((31289, 31332), 'bpforms.protein_alphabet.monomers.get', 'bpforms.protein_alphabet.monomers.get', (['code'], {}), '(code)\n', (31326, 31332), False, 'import bpforms\n'), ((27233, 27250), 'bpforms.Monomer', 'bpforms.Monomer', ([], {}), '()\n', (27248, 27250), False, 'import bpforms\n'), ((28855, 28897), 'bpforms.protein_alphabet.monomers.values', 'bpforms.protein_alphabet.monomers.values', ([], {}), '()\n', (28895, 28897), False, 'import bpforms\n'), ((32468, 32495), 'numpy.isinf', 'numpy.isinf', (['start_position'], {}), '(start_position)\n', (32479, 32495), False, 'import numpy\n'), ((27894, 27911), 'bpforms.Monomer', 'bpforms.Monomer', ([], {}), '()\n', (27909, 27911), False, 'import bpforms\n'), ((29358, 29375), 'bpforms.Monomer', 'bpforms.Monomer', ([], {}), '()\n', (29373, 29375), False, 'import bpforms\n'), ((30754, 30771), 'bpforms.Monomer', 'bpforms.Monomer', ([], {}), '()\n', (30769, 30771), False, 'import bpforms\n'), ((28569, 28620), 'bpforms.Identifier', 'bpforms.Identifier', (['mod_ns', "modification['monomer']"], {}), "(mod_ns, modification['monomer'])\n", (28587, 28620), False, 'import bpforms\n'), ((33291, 33348), 'bpforms.protein_alphabet.monomers.get', 'bpforms.protein_alphabet.monomers.get', (['seq[i_monomer - 1]'], {}), '(seq[i_monomer - 1])\n', (33328, 33348), False, 'import bpforms\n')] |
from obspy.core.event import Event
from obspy.geodetics import gps2dist_azimuth
from obspy.geodetics import kilometer2degrees
from obspy import UTCDateTime as utct
from obspy.taup import TauPyModel
import obspy
import instaseis
from SS_MTI import PhaseTracer
from SS_MTI import GreensFunctions
class EventObj:
@staticmethod
def Get_location(la_s, lo_s, la_r, lo_r, radius=3389.5, flattening=0):
dist, az, baz = gps2dist_azimuth(
lat1=la_s, lon1=lo_s, lat2=la_r, lon2=lo_r, a=radius, f=flattening
)
epi = kilometer2degrees(dist, radius=radius)
return epi, az, baz
def __init__(
self,
or_time: utct = utct("2020-3-10T12:00:00"),
lat_src: float = 10.99032013,
lon_src: float = 170,
lat_rec: float = 4.502384,
lon_rec: float = 135.623447,
depth: float = 45.0,
name: str = "Test_Event",
):
"""
Create a seismic event
:param rec_lat: latitude receiver
:param rec_lon: longitude receiver
"""
self.event = Event()
self.event.latitude = lat_src
self.event.longitude = lon_src
self.event.depth = depth
self.event.name = name
self.lat_rec = lat_rec
self.lon_rec = lon_rec
epi, az, baz = EventObj.Get_location(
self.event.latitude, self.event.longitude, self.lat_rec, self.lon_rec
)
self.event.distance = epi
print(self.event.distance)
self.event.az = az
self.event.baz = baz
self.event.origin_time = or_time
def add_picks(self, taup_model: TauPyModel, depth: float, phases: [str] = ["P", "S"]):
self.event.picks = {}
for phase in phases:
self.event.picks[phase] = utct(
self.event.origin_time
+ PhaseTracer.get_traveltime(
model=taup_model, phase=phase, depth=depth, distance=self.event.distance
)
)
def add_waveforms(
self,
instaseis_db_path: str,
focal_mech: [float],
M0: float = None,
dt: float = 0.05,
components: str = "ZRT",
kind: str = "displacement",
noise: bool = False,
):
"""
Add waveforms to event object using instaseis
:param instaseis_db_path: path or url to instaseis database
:param rec_lat: latitude receiver
:param rec_lon: longitude receiver
:param focal_mech: strike,dip,rake or m_rr, m_tt, m_pp, m_rt, m_rp, m_tp
:param M0: scalar moment, only necessesary when focal_mech strike,dip,rake
:param components: components of the seismogram (ZRT, ZNE, LQT)
:param noise: real Martian noise will be added to the seismogram
"""
assert (M0 is None and len(focal_mech) == 6) or (
M0 is not None and len(focal_mech) == 3
), (
"focal_mech length is incorrect. "
"If you specify M0, focal_mech is [strike,dip,rake]. "
"Otherwise focal_mech is [m_rr, m_tt, m_pp, m_rt, m_rp, m_tp]"
)
receiver = instaseis.Receiver(
latitude=self.lat_rec,
longitude=self.lon_rec,
network="XB",
station="ELYSE",
location="02",
)
db = instaseis.open_db(instaseis_db_path)
if len(focal_mech) == 3:
focal_mech = GreensFunctions.convert_SDR(
focal_mech[0], focal_mech[1], focal_mech[2], M0
)
m_rr = focal_mech[0]
m_tt = focal_mech[1]
m_pp = focal_mech[2]
m_rt = focal_mech[3]
m_rp = focal_mech[4]
m_tp = focal_mech[5]
print(m_rr, m_tt, m_pp, m_rt, m_rp, m_tp)
src = instaseis.Source(
latitude=self.event.latitude,
longitude=self.event.longitude,
depth_in_m=self.event.depth * 1000,
m_rr=m_rr,
m_tt=m_tt,
m_pp=m_pp,
m_rt=m_rt,
m_rp=m_rp,
m_tp=m_tp,
time_shift=None,
sliprate=None,
dt=None,
origin_time=self.event.origin_time,
)
if components == "LQT":
st_obs = db.get_seismograms(
source=src, receiver=receiver, components=components, kind=kind, dt=dt
)
st_obs.rotate(method="RT->NE", back_azimuth=self.event.baz)
else:
st_obs = db.get_seismograms(
source=src, receiver=receiver, components=components, kind=kind, dt=dt
)
st_obs[0].stats.channel = st_obs[0].stats.channel.replace("X", "H")
st_obs[1].stats.channel = st_obs[1].stats.channel.replace("X", "H")
st_obs[2].stats.channel = st_obs[2].stats.channel.replace("X", "H")
st_obs.trim(starttime=self.event.origin_time, endtime=self.event.origin_time + 800.0)
if noise:
# Path = "/home/nienke/Data_2020/Noise/"
Path = "/home/nienke/Documents/Research/Data/Noise/"
File_names = [
"XB.02.ELYSE.BHE-2019.274T0809-2019.274T0920",
"XB.02.ELYSE.BHN-2019.274T0809-2019.274T0920",
"XB.02.ELYSE.BHZ-2019.274T0809-2019.274T0920",
]
st_noise = obspy.Stream()
for file in File_names:
tr = obspy.read(Path + file)
st_noise += tr
if components == "LQT":
raise ValueError("LQT orientation Not implemented yet")
# TODO: implement LQT orientation
else:
st_noise.rotate(method="NE->RT", back_azimuth=self.event.baz)
for trace in st_obs:
chan = trace.stats.channel
desired_dt = trace.stats.delta
desired_npts = len(trace.data)
noise_trace = st_noise.select(channel=chan)[0]
noise = noise_trace.data[int(1200 / dt) : int(1200 / dt) + desired_npts]
trace.data += noise
self.event.waveforms_VBB = st_obs
| [
"obspy.read",
"obspy.Stream",
"obspy.geodetics.gps2dist_azimuth",
"instaseis.open_db",
"instaseis.Receiver",
"SS_MTI.PhaseTracer.get_traveltime",
"instaseis.Source",
"obspy.UTCDateTime",
"SS_MTI.GreensFunctions.convert_SDR",
"obspy.core.event.Event",
"obspy.geodetics.kilometer2degrees"
] | [((430, 519), 'obspy.geodetics.gps2dist_azimuth', 'gps2dist_azimuth', ([], {'lat1': 'la_s', 'lon1': 'lo_s', 'lat2': 'la_r', 'lon2': 'lo_r', 'a': 'radius', 'f': 'flattening'}), '(lat1=la_s, lon1=lo_s, lat2=la_r, lon2=lo_r, a=radius, f=\n flattening)\n', (446, 519), False, 'from obspy.geodetics import gps2dist_azimuth\n'), ((551, 589), 'obspy.geodetics.kilometer2degrees', 'kilometer2degrees', (['dist'], {'radius': 'radius'}), '(dist, radius=radius)\n', (568, 589), False, 'from obspy.geodetics import kilometer2degrees\n'), ((675, 701), 'obspy.UTCDateTime', 'utct', (['"""2020-3-10T12:00:00"""'], {}), "('2020-3-10T12:00:00')\n", (679, 701), True, 'from obspy import UTCDateTime as utct\n'), ((1058, 1065), 'obspy.core.event.Event', 'Event', ([], {}), '()\n', (1063, 1065), False, 'from obspy.core.event import Event\n'), ((3086, 3202), 'instaseis.Receiver', 'instaseis.Receiver', ([], {'latitude': 'self.lat_rec', 'longitude': 'self.lon_rec', 'network': '"""XB"""', 'station': '"""ELYSE"""', 'location': '"""02"""'}), "(latitude=self.lat_rec, longitude=self.lon_rec, network=\n 'XB', station='ELYSE', location='02')\n", (3104, 3202), False, 'import instaseis\n'), ((3283, 3319), 'instaseis.open_db', 'instaseis.open_db', (['instaseis_db_path'], {}), '(instaseis_db_path)\n', (3300, 3319), False, 'import instaseis\n'), ((3726, 3997), 'instaseis.Source', 'instaseis.Source', ([], {'latitude': 'self.event.latitude', 'longitude': 'self.event.longitude', 'depth_in_m': '(self.event.depth * 1000)', 'm_rr': 'm_rr', 'm_tt': 'm_tt', 'm_pp': 'm_pp', 'm_rt': 'm_rt', 'm_rp': 'm_rp', 'm_tp': 'm_tp', 'time_shift': 'None', 'sliprate': 'None', 'dt': 'None', 'origin_time': 'self.event.origin_time'}), '(latitude=self.event.latitude, longitude=self.event.\n longitude, depth_in_m=self.event.depth * 1000, m_rr=m_rr, m_tt=m_tt,\n m_pp=m_pp, m_rt=m_rt, m_rp=m_rp, m_tp=m_tp, time_shift=None, sliprate=\n None, dt=None, origin_time=self.event.origin_time)\n', (3742, 3997), False, 'import instaseis\n'), ((3379, 3455), 'SS_MTI.GreensFunctions.convert_SDR', 'GreensFunctions.convert_SDR', (['focal_mech[0]', 'focal_mech[1]', 'focal_mech[2]', 'M0'], {}), '(focal_mech[0], focal_mech[1], focal_mech[2], M0)\n', (3406, 3455), False, 'from SS_MTI import GreensFunctions\n'), ((5268, 5282), 'obspy.Stream', 'obspy.Stream', ([], {}), '()\n', (5280, 5282), False, 'import obspy\n'), ((5341, 5364), 'obspy.read', 'obspy.read', (['(Path + file)'], {}), '(Path + file)\n', (5351, 5364), False, 'import obspy\n'), ((1828, 1932), 'SS_MTI.PhaseTracer.get_traveltime', 'PhaseTracer.get_traveltime', ([], {'model': 'taup_model', 'phase': 'phase', 'depth': 'depth', 'distance': 'self.event.distance'}), '(model=taup_model, phase=phase, depth=depth,\n distance=self.event.distance)\n', (1854, 1932), False, 'from SS_MTI import PhaseTracer\n')] |
from collections import defaultdict
import inspect
from typing import Callable
def api_overload(fn: Callable) -> Callable:
"""
Creates an overload that's visible to the api generate scripts. use this decorator instead of `typing.overload` when exposing overloads from `_impl`.
You will need to suppress mypy errors using a `# type: ignore[no-redef]` comment
"""
dictionary = inspect.getmodule(fn).__dict__
overloads_key = "__overloads__"
if dictionary.get(overloads_key) is None:
dictionary[overloads_key] = defaultdict(list)
dictionary[overloads_key][fn.__name__].append(fn)
return fn
| [
"inspect.getmodule",
"collections.defaultdict"
] | [((397, 418), 'inspect.getmodule', 'inspect.getmodule', (['fn'], {}), '(fn)\n', (414, 418), False, 'import inspect\n'), ((546, 563), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (557, 563), False, 'from collections import defaultdict\n')] |
import argparse
import contextlib
import csv
import logging
import os
import random
import subprocess
import tempfile
from typing import Callable, Dict, Iterable, List
import numpy as np
import ray
from ray.experimental.raysort import constants
from ray.experimental.raysort import logging_utils
from ray.experimental.raysort import sortlib
from ray.experimental.raysort import tracing_utils
from ray.experimental.raysort.types import (
BlockInfo,
ByteCount,
RecordCount,
PartId,
PartInfo,
Path,
)
Args = argparse.Namespace
# ------------------------------------------------------------
# Parse Arguments
# ------------------------------------------------------------
def get_args(*args, **kwargs):
parser = argparse.ArgumentParser()
parser.add_argument(
"--ray_address",
default="auto",
type=str,
help="if set to None, will launch a local Ray cluster",
)
parser.add_argument(
"--total_data_size",
default=1 * 1000 * 1024 * 1024 * 1024,
type=ByteCount,
help="total data size in bytes",
)
parser.add_argument(
"--num_mappers",
default=256,
type=int,
help="number of map tasks",
)
parser.add_argument(
"--num_mappers_per_round",
default=16,
type=int,
help="number of map tasks per first-stage merge tasks",
)
parser.add_argument(
"--num_reducers",
default=16,
type=int,
help="number of second-stage reduce tasks",
)
parser.add_argument(
"--num_concurrent_rounds",
default=4,
type=int,
help="max number of rounds of map/merge tasks in flight",
)
parser.add_argument(
"--reducer_input_chunk",
default=100 * 1024 * 1024,
type=ByteCount,
help="bytes to read from each file in reduce tasks",
)
parser.add_argument(
"--skip_sorting",
default=False,
action="store_true",
help="if set, no sorting is actually performed",
)
parser.add_argument(
"--skip_input",
default=False,
action="store_true",
help="if set, mappers will not read data from disk",
)
parser.add_argument(
"--skip_output",
default=False,
action="store_true",
help="if set, reducers will not write out results to disk",
)
# Which tasks to run?
tasks_group = parser.add_argument_group(
"tasks to run", "if no task is specified, will run all tasks"
)
tasks = ["generate_input", "sort", "validate_output"]
for task in tasks:
tasks_group.add_argument(f"--{task}", action="store_true")
args = parser.parse_args(*args, **kwargs)
# Derive additional arguments.
args.input_part_size = ByteCount(args.total_data_size / args.num_mappers)
assert args.num_mappers % args.num_mappers_per_round == 0
args.num_rounds = int(args.num_mappers / args.num_mappers_per_round)
args.mount_points = _get_mount_points()
# If no tasks are specified, run all tasks.
args_dict = vars(args)
if not any(args_dict[task] for task in tasks):
for task in tasks:
args_dict[task] = True
return args
def _get_mount_points():
default_ret = [tempfile.gettempdir()]
mnt = "/mnt"
if os.path.exists(mnt):
ret = [os.path.join(mnt, d) for d in os.listdir(mnt)]
if len(ret) > 0:
return ret
return default_ret
# ------------------------------------------------------------
# Generate Input
# ------------------------------------------------------------
def _part_info(args: Args, part_id: PartId, kind="input") -> PartInfo:
node = ray.worker.global_worker.node_ip_address
mnt = random.choice(args.mount_points)
filepath = _get_part_path(mnt, part_id, kind)
return PartInfo(part_id, node, filepath)
def _get_part_path(mnt: Path, part_id: PartId, kind="input") -> Path:
assert kind in {"input", "output", "temp"}
dir_fmt = constants.DATA_DIR_FMT[kind]
dirpath = dir_fmt.format(mnt=mnt)
os.makedirs(dirpath, exist_ok=True)
filename_fmt = constants.FILENAME_FMT[kind]
filename = filename_fmt.format(part_id=part_id)
filepath = os.path.join(dirpath, filename)
return filepath
@ray.remote
def generate_part(
args: Args, part_id: PartId, size: RecordCount, offset: RecordCount
) -> PartInfo:
logging_utils.init()
pinfo = _part_info(args, part_id)
subprocess.run(
[constants.GENSORT_PATH, f"-b{offset}", f"{size}", pinfo.path], check=True
)
logging.info(f"Generated input {pinfo}")
return pinfo
def generate_input(args: Args):
if args.skip_input:
return
size = constants.bytes_to_records(args.input_part_size)
offset = 0
tasks = []
for part_id in range(args.num_mappers):
tasks.append(generate_part.remote(args, part_id, size, offset))
offset += size
assert offset == constants.bytes_to_records(args.total_data_size), args
logging.info(f"Generating {len(tasks)} partitions")
parts = ray.get(tasks)
with open(constants.INPUT_MANIFEST_FILE, "w") as fout:
writer = csv.writer(fout)
writer.writerows(parts)
# ------------------------------------------------------------
# Sort
# ------------------------------------------------------------
def _load_manifest(args: Args, path: Path) -> List[PartInfo]:
if args.skip_input:
return [PartInfo(i, None, None) for i in range(args.num_mappers)]
with open(path) as fin:
reader = csv.reader(fin)
return [PartInfo(int(part_id), node, path) for part_id, node, path in reader]
def _load_partition(args: Args, path: Path) -> np.ndarray:
if args.skip_input:
return np.frombuffer(
np.random.bytes(args.input_part_size), dtype=np.uint8
).copy()
return np.fromfile(path, dtype=np.uint8)
def _dummy_sort_and_partition(
part: np.ndarray, boundaries: List[int]
) -> List[BlockInfo]:
N = len(boundaries)
offset = 0
size = int(np.ceil(part.size / N))
blocks = []
for _ in range(N):
blocks.append((offset, size))
offset += size
return blocks
@ray.remote
@tracing_utils.timeit("map")
def mapper(
args: Args, mapper_id: PartId, boundaries: List[int], path: Path
) -> List[np.ndarray]:
logging_utils.init()
part = _load_partition(args, path)
sort_fn = (
_dummy_sort_and_partition if args.skip_sorting else sortlib.sort_and_partition
)
blocks = sort_fn(part, boundaries)
return [part[offset : offset + size] for offset, size in blocks]
def _dummy_merge(
num_blocks: int, _n: int, get_block: Callable[[int, int], np.ndarray]
) -> Iterable[np.ndarray]:
blocks = [((i, 0), get_block(i, 0)) for i in range(num_blocks)]
while len(blocks) > 0:
(m, d), block = blocks.pop(random.randrange(len(blocks)))
yield block
d_ = d + 1
block = get_block(m, d_)
if block is None:
continue
blocks.append(((m, d_), block))
def _merge_impl(
args: Args,
M: int,
pinfo: PartInfo,
get_block: Callable[[int, int], np.ndarray],
skip_output=False,
):
merge_fn = _dummy_merge if args.skip_sorting else sortlib.merge_partitions
merger = merge_fn(M, get_block)
if skip_output:
for datachunk in merger:
del datachunk
else:
with open(pinfo.path, "wb") as fout:
for datachunk in merger:
fout.write(datachunk)
return pinfo
# See worker_placement_groups() for why `num_cpus=0`.
@ray.remote(num_cpus=0, resources={"worker": 1})
@tracing_utils.timeit("merge")
def merge_mapper_blocks(
args: Args, reducer_id: PartId, mapper_id: PartId, *blocks: List[np.ndarray]
) -> PartInfo:
part_id = constants.merge_part_ids(reducer_id, mapper_id)
pinfo = _part_info(args, part_id, kind="temp")
M = len(blocks)
def get_block(i, d):
if i >= M or d > 0:
return None
return blocks[i]
return _merge_impl(args, M, pinfo, get_block)
# See worker_placement_groups() for why `num_cpus=0`.
@ray.remote(num_cpus=0, resources={"worker": 1})
@tracing_utils.timeit("reduce")
def final_merge(
args: Args, reducer_id: PartId, *merged_parts: List[PartInfo]
) -> PartInfo:
M = len(merged_parts)
def _load_block_chunk(pinfo: PartInfo, d: int) -> np.ndarray:
return np.fromfile(
pinfo.path,
dtype=np.uint8,
count=args.reducer_input_chunk,
offset=d * args.reducer_input_chunk,
)
def get_block(i, d):
ret = _load_block_chunk(merged_parts[i], d)
if ret.size == 0:
return None
return ret
pinfo = _part_info(args, reducer_id, "output")
return _merge_impl(args, M, pinfo, get_block, args.skip_output)
def _node_res(node: str) -> Dict[str, float]:
return {"resources": {f"node:{node}": 1e-3}}
@contextlib.contextmanager
def worker_placement_groups(args: Args) -> List[ray.PlacementGroupID]:
"""
Returns one placement group per node with a `worker` resource. To run
tasks in the placement group, use
`@ray.remote(num_cpus=0, resources={"worker": 1})`. Ray does not
automatically reserve CPU resources, so tasks must specify `num_cpus=0`
in order to run in a placement group.
"""
pgs = [ray.util.placement_group([{"worker": 1}]) for _ in range(args.num_reducers)]
ray.get([pg.ready() for pg in pgs])
try:
yield pgs
finally:
for pg in pgs:
ray.util.remove_placement_group(pg)
@tracing_utils.timeit("sort", report_time=True)
def sort_main(args: Args):
parts = _load_manifest(args, constants.INPUT_MANIFEST_FILE)
assert len(parts) == args.num_mappers
boundaries = sortlib.get_boundaries(args.num_reducers)
mapper_opt = {
"num_returns": args.num_reducers,
"num_cpus": os.cpu_count() / args.num_concurrent_rounds,
} # Load balance across worker nodes by setting `num_cpus`.
merge_results = np.empty((args.num_rounds, args.num_reducers), dtype=object)
part_id = 0
with worker_placement_groups(args) as pgs:
for round in range(args.num_rounds):
# Limit the number of in-flight rounds.
num_extra_rounds = round - args.num_concurrent_rounds + 1
if num_extra_rounds > 0:
ray.wait(
[f for f in merge_results.flatten() if f is not None],
num_returns=num_extra_rounds * args.num_reducers,
)
# Submit map tasks.
mapper_results = np.empty(
(args.num_mappers_per_round, args.num_reducers), dtype=object
)
for _ in range(args.num_mappers_per_round):
_, node, path = parts[part_id]
m = part_id % args.num_mappers_per_round
mapper_results[m, :] = mapper.options(**mapper_opt).remote(
args, part_id, boundaries, path
)
part_id += 1
# Submit merge tasks.
merge_results[round, :] = [
merge_mapper_blocks.options(placement_group=pgs[r]).remote(
args, r, round, *mapper_results[:, r].tolist()
)
for r in range(args.num_reducers)
]
# Delete local references to mapper results.
mapper_results = None
# Submit second-stage reduce tasks.
reducer_results = [
final_merge.options(placement_group=pgs[r]).remote(
args, r, *merge_results[:, r].tolist()
)
for r in range(args.num_reducers)
]
reducer_results = ray.get(reducer_results)
if not args.skip_output:
with open(constants.OUTPUT_MANIFEST_FILE, "w") as fout:
writer = csv.writer(fout)
writer.writerows(reducer_results)
logging.info(ray.internal.internal_api.memory_summary(stats_only=True))
# ------------------------------------------------------------
# Validate Output
# ------------------------------------------------------------
def _run_valsort(args: List[str]):
proc = subprocess.run([constants.VALSORT_PATH] + args, capture_output=True)
if proc.returncode != 0:
logging.critical("\n" + proc.stderr.decode("ascii"))
raise RuntimeError(f"Validation failed: {args}")
@ray.remote
def validate_part(path: Path):
logging_utils.init()
sum_path = path + ".sum"
_run_valsort(["-o", sum_path, path])
logging.info(f"Validated output {path}")
with open(sum_path, "rb") as fin:
return os.path.getsize(path), fin.read()
def validate_output(args: Args):
if args.skip_sorting or args.skip_output:
return
partitions = _load_manifest(args, constants.OUTPUT_MANIFEST_FILE)
results = []
for _, node, path in partitions:
results.append(validate_part.options(**_node_res(node)).remote(path))
logging.info(f"Validating {len(results)} partitions")
results = ray.get(results)
total = sum(s for s, _ in results)
assert total == args.total_data_size, total - args.total_data_size
all_checksum = b"".join(c for _, c in results)
with tempfile.NamedTemporaryFile() as fout:
fout.write(all_checksum)
fout.flush()
_run_valsort(["-s", fout.name])
logging.info("All OK!")
# ------------------------------------------------------------
# Main
# ------------------------------------------------------------
def init(args: Args):
if not args.ray_address:
ray.init(resources={"worker": os.cpu_count()})
else:
ray.init(address=args.ray_address)
logging_utils.init()
logging.info(args)
os.makedirs(constants.WORK_DIR, exist_ok=True)
resources = ray.cluster_resources()
logging.info(resources)
args.num_workers = resources["worker"]
progress_tracker = tracing_utils.create_progress_tracker(args)
return progress_tracker
def main(args: Args):
# Keep the actor handle in scope for the duration of the program.
_progress_tracker = init(args) # noqa F841
if args.generate_input:
generate_input(args)
if args.sort:
sort_main(args)
if args.validate_output:
validate_output(args)
if __name__ == "__main__":
main(get_args())
| [
"numpy.fromfile",
"numpy.random.bytes",
"ray.cluster_resources",
"ray.experimental.raysort.types.PartInfo",
"os.cpu_count",
"ray.init",
"logging.info",
"os.path.exists",
"os.listdir",
"argparse.ArgumentParser",
"ray.experimental.raysort.logging_utils.init",
"subprocess.run",
"ray.util.remove... | [((6260, 6287), 'ray.experimental.raysort.tracing_utils.timeit', 'tracing_utils.timeit', (['"""map"""'], {}), "('map')\n", (6280, 6287), False, 'from ray.experimental.raysort import tracing_utils\n'), ((7656, 7703), 'ray.remote', 'ray.remote', ([], {'num_cpus': '(0)', 'resources': "{'worker': 1}"}), "(num_cpus=0, resources={'worker': 1})\n", (7666, 7703), False, 'import ray\n'), ((7705, 7734), 'ray.experimental.raysort.tracing_utils.timeit', 'tracing_utils.timeit', (['"""merge"""'], {}), "('merge')\n", (7725, 7734), False, 'from ray.experimental.raysort import tracing_utils\n'), ((8200, 8247), 'ray.remote', 'ray.remote', ([], {'num_cpus': '(0)', 'resources': "{'worker': 1}"}), "(num_cpus=0, resources={'worker': 1})\n", (8210, 8247), False, 'import ray\n'), ((8249, 8279), 'ray.experimental.raysort.tracing_utils.timeit', 'tracing_utils.timeit', (['"""reduce"""'], {}), "('reduce')\n", (8269, 8279), False, 'from ray.experimental.raysort import tracing_utils\n'), ((9675, 9721), 'ray.experimental.raysort.tracing_utils.timeit', 'tracing_utils.timeit', (['"""sort"""'], {'report_time': '(True)'}), "('sort', report_time=True)\n", (9695, 9721), False, 'from ray.experimental.raysort import tracing_utils\n'), ((746, 771), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (769, 771), False, 'import argparse\n'), ((2819, 2869), 'ray.experimental.raysort.types.ByteCount', 'ByteCount', (['(args.total_data_size / args.num_mappers)'], {}), '(args.total_data_size / args.num_mappers)\n', (2828, 2869), False, 'from ray.experimental.raysort.types import BlockInfo, ByteCount, RecordCount, PartId, PartInfo, Path\n'), ((3346, 3365), 'os.path.exists', 'os.path.exists', (['mnt'], {}), '(mnt)\n', (3360, 3365), False, 'import os\n'), ((3784, 3816), 'random.choice', 'random.choice', (['args.mount_points'], {}), '(args.mount_points)\n', (3797, 3816), False, 'import random\n'), ((3878, 3911), 'ray.experimental.raysort.types.PartInfo', 'PartInfo', (['part_id', 'node', 'filepath'], {}), '(part_id, node, filepath)\n', (3886, 3911), False, 'from ray.experimental.raysort.types import BlockInfo, ByteCount, RecordCount, PartId, PartInfo, Path\n'), ((4116, 4151), 'os.makedirs', 'os.makedirs', (['dirpath'], {'exist_ok': '(True)'}), '(dirpath, exist_ok=True)\n', (4127, 4151), False, 'import os\n'), ((4267, 4298), 'os.path.join', 'os.path.join', (['dirpath', 'filename'], {}), '(dirpath, filename)\n', (4279, 4298), False, 'import os\n'), ((4443, 4463), 'ray.experimental.raysort.logging_utils.init', 'logging_utils.init', ([], {}), '()\n', (4461, 4463), False, 'from ray.experimental.raysort import logging_utils\n'), ((4506, 4601), 'subprocess.run', 'subprocess.run', (["[constants.GENSORT_PATH, f'-b{offset}', f'{size}', pinfo.path]"], {'check': '(True)'}), "([constants.GENSORT_PATH, f'-b{offset}', f'{size}', pinfo.\n path], check=True)\n", (4520, 4601), False, 'import subprocess\n'), ((4615, 4655), 'logging.info', 'logging.info', (['f"""Generated input {pinfo}"""'], {}), "(f'Generated input {pinfo}')\n", (4627, 4655), False, 'import logging\n'), ((4757, 4805), 'ray.experimental.raysort.constants.bytes_to_records', 'constants.bytes_to_records', (['args.input_part_size'], {}), '(args.input_part_size)\n', (4783, 4805), False, 'from ray.experimental.raysort import constants\n'), ((5119, 5133), 'ray.get', 'ray.get', (['tasks'], {}), '(tasks)\n', (5126, 5133), False, 'import ray\n'), ((5916, 5949), 'numpy.fromfile', 'np.fromfile', (['path'], {'dtype': 'np.uint8'}), '(path, dtype=np.uint8)\n', (5927, 5949), True, 'import numpy as np\n'), ((6396, 6416), 'ray.experimental.raysort.logging_utils.init', 'logging_utils.init', ([], {}), '()\n', (6414, 6416), False, 'from ray.experimental.raysort import logging_utils\n'), ((7870, 7917), 'ray.experimental.raysort.constants.merge_part_ids', 'constants.merge_part_ids', (['reducer_id', 'mapper_id'], {}), '(reducer_id, mapper_id)\n', (7894, 7917), False, 'from ray.experimental.raysort import constants\n'), ((9872, 9913), 'ray.experimental.raysort.sortlib.get_boundaries', 'sortlib.get_boundaries', (['args.num_reducers'], {}), '(args.num_reducers)\n', (9894, 9913), False, 'from ray.experimental.raysort import sortlib\n'), ((10126, 10186), 'numpy.empty', 'np.empty', (['(args.num_rounds, args.num_reducers)'], {'dtype': 'object'}), '((args.num_rounds, args.num_reducers), dtype=object)\n', (10134, 10186), True, 'import numpy as np\n'), ((12301, 12369), 'subprocess.run', 'subprocess.run', (['([constants.VALSORT_PATH] + args)'], {'capture_output': '(True)'}), '([constants.VALSORT_PATH] + args, capture_output=True)\n', (12315, 12369), False, 'import subprocess\n'), ((12566, 12586), 'ray.experimental.raysort.logging_utils.init', 'logging_utils.init', ([], {}), '()\n', (12584, 12586), False, 'from ray.experimental.raysort import logging_utils\n'), ((12661, 12701), 'logging.info', 'logging.info', (['f"""Validated output {path}"""'], {}), "(f'Validated output {path}')\n", (12673, 12701), False, 'import logging\n'), ((13159, 13175), 'ray.get', 'ray.get', (['results'], {}), '(results)\n', (13166, 13175), False, 'import ray\n'), ((13483, 13506), 'logging.info', 'logging.info', (['"""All OK!"""'], {}), "('All OK!')\n", (13495, 13506), False, 'import logging\n'), ((13811, 13831), 'ray.experimental.raysort.logging_utils.init', 'logging_utils.init', ([], {}), '()\n', (13829, 13831), False, 'from ray.experimental.raysort import logging_utils\n'), ((13836, 13854), 'logging.info', 'logging.info', (['args'], {}), '(args)\n', (13848, 13854), False, 'import logging\n'), ((13859, 13905), 'os.makedirs', 'os.makedirs', (['constants.WORK_DIR'], {'exist_ok': '(True)'}), '(constants.WORK_DIR, exist_ok=True)\n', (13870, 13905), False, 'import os\n'), ((13922, 13945), 'ray.cluster_resources', 'ray.cluster_resources', ([], {}), '()\n', (13943, 13945), False, 'import ray\n'), ((13950, 13973), 'logging.info', 'logging.info', (['resources'], {}), '(resources)\n', (13962, 13973), False, 'import logging\n'), ((14040, 14083), 'ray.experimental.raysort.tracing_utils.create_progress_tracker', 'tracing_utils.create_progress_tracker', (['args'], {}), '(args)\n', (14077, 14083), False, 'from ray.experimental.raysort import tracing_utils\n'), ((3299, 3320), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (3318, 3320), False, 'import tempfile\n'), ((4996, 5044), 'ray.experimental.raysort.constants.bytes_to_records', 'constants.bytes_to_records', (['args.total_data_size'], {}), '(args.total_data_size)\n', (5022, 5044), False, 'from ray.experimental.raysort import constants\n'), ((5210, 5226), 'csv.writer', 'csv.writer', (['fout'], {}), '(fout)\n', (5220, 5226), False, 'import csv\n'), ((5605, 5620), 'csv.reader', 'csv.reader', (['fin'], {}), '(fin)\n', (5615, 5620), False, 'import csv\n'), ((6103, 6125), 'numpy.ceil', 'np.ceil', (['(part.size / N)'], {}), '(part.size / N)\n', (6110, 6125), True, 'import numpy as np\n'), ((8486, 8598), 'numpy.fromfile', 'np.fromfile', (['pinfo.path'], {'dtype': 'np.uint8', 'count': 'args.reducer_input_chunk', 'offset': '(d * args.reducer_input_chunk)'}), '(pinfo.path, dtype=np.uint8, count=args.reducer_input_chunk,\n offset=d * args.reducer_input_chunk)\n', (8497, 8598), True, 'import numpy as np\n'), ((9444, 9485), 'ray.util.placement_group', 'ray.util.placement_group', (["[{'worker': 1}]"], {}), "([{'worker': 1}])\n", (9468, 9485), False, 'import ray\n'), ((11823, 11847), 'ray.get', 'ray.get', (['reducer_results'], {}), '(reducer_results)\n', (11830, 11847), False, 'import ray\n'), ((12044, 12101), 'ray.internal.internal_api.memory_summary', 'ray.internal.internal_api.memory_summary', ([], {'stats_only': '(True)'}), '(stats_only=True)\n', (12084, 12101), False, 'import ray\n'), ((13346, 13375), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (13373, 13375), False, 'import tempfile\n'), ((13772, 13806), 'ray.init', 'ray.init', ([], {'address': 'args.ray_address'}), '(address=args.ray_address)\n', (13780, 13806), False, 'import ray\n'), ((3382, 3402), 'os.path.join', 'os.path.join', (['mnt', 'd'], {}), '(mnt, d)\n', (3394, 3402), False, 'import os\n'), ((5502, 5525), 'ray.experimental.raysort.types.PartInfo', 'PartInfo', (['i', 'None', 'None'], {}), '(i, None, None)\n', (5510, 5525), False, 'from ray.experimental.raysort.types import BlockInfo, ByteCount, RecordCount, PartId, PartInfo, Path\n'), ((9636, 9671), 'ray.util.remove_placement_group', 'ray.util.remove_placement_group', (['pg'], {}), '(pg)\n', (9667, 9671), False, 'import ray\n'), ((9996, 10010), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (10008, 10010), False, 'import os\n'), ((10706, 10777), 'numpy.empty', 'np.empty', (['(args.num_mappers_per_round, args.num_reducers)'], {'dtype': 'object'}), '((args.num_mappers_per_round, args.num_reducers), dtype=object)\n', (10714, 10777), True, 'import numpy as np\n'), ((11963, 11979), 'csv.writer', 'csv.writer', (['fout'], {}), '(fout)\n', (11973, 11979), False, 'import csv\n'), ((12755, 12776), 'os.path.getsize', 'os.path.getsize', (['path'], {}), '(path)\n', (12770, 12776), False, 'import os\n'), ((3412, 3427), 'os.listdir', 'os.listdir', (['mnt'], {}), '(mnt)\n', (3422, 3427), False, 'import os\n'), ((5834, 5871), 'numpy.random.bytes', 'np.random.bytes', (['args.input_part_size'], {}), '(args.input_part_size)\n', (5849, 5871), True, 'import numpy as np\n'), ((13737, 13751), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (13749, 13751), False, 'import os\n')] |
# Generated by Django 3.2.12 on 2022-02-24 09:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('publications', '0027_fill_platform_name'),
]
operations = [
migrations.AddField(
model_name='title', name='proprietary_ids', field=models.JSONField(default=list),
),
migrations.AddField(model_name='title', name='uris', field=models.JSONField(default=list)),
]
| [
"django.db.models.JSONField"
] | [((324, 354), 'django.db.models.JSONField', 'models.JSONField', ([], {'default': 'list'}), '(default=list)\n', (340, 354), False, 'from django.db import migrations, models\n'), ((434, 464), 'django.db.models.JSONField', 'models.JSONField', ([], {'default': 'list'}), '(default=list)\n', (450, 464), False, 'from django.db import migrations, models\n')] |
"""
A simple webhook event handler for Sendgrid
"""
import os
import json
import flask
from flask import request, jsonify
import flask_sqlalchemy
from sqlalchemy.ext.hybrid import hybrid_property
app = flask.Flask(__name__)
app.config['BASE_URL'] = os.environ['BASE_URL']
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['SQLALCHEMY_DATABASE_URI']
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
# username/pass to POST
post_user, post_pass = os.environ['POST_USERNAME'], os.environ['POST_PASSWORD']
db = flask_sqlalchemy.SQLAlchemy()
db.init_app(app)
# Based on
# https://sendgrid.com/docs/for-developers/tracking-events/event/#event-objects
# These are other rarer(?) possibilities:
# asm_group_id, unique_args, marketing_campaign_id, marketing_campaign_name, pool
class Event(db.Model):
email = db.Column(db.Text)
timestamp = db.Column(db.Integer) # DateTime)
event = db.Column(db.Text)
smtp_id = db.Column(db.Text) # sg key is 'smtp-id'
useragent = db.Column(db.Text)
ip = db.Column(db.Text)
sg_event_id = db.Column(db.String(100), primary_key=True)
sg_message_id = db.Column(db.Text)
reason = db.Column(db.Text)
status = db.Column(db.Text)
response = db.Column(db.Text)
tls = db.Column(db.Text)
url = db.Column(db.Text)
urloffset = db.Column(db.Text)
attempt = db.Column(db.Text)
category = db.Column(db.Text)
type_ = db.Column(db.Text)
_other = db.Column('other', db.Text, default='[]')
@hybrid_property
def other(self):
return json.loads(self._other)
@other.setter
def other(self, lst):
self._other = json.dumps(lst)
event_keys = [k.strip('_')
for k in flask_sqlalchemy.inspect(Event).columns.keys()
if not k.startswith('_')]
@app.route('/', methods=['POST'])
def home():
if request.authorization["username"] != post_user or \
request.authorization["password"] != post_pass:
return jsonify({"message": "Unauthorized"}), 401
# No data, just return
if not request.json:
return ""
for item in request.json:
# fix name mangling
if 'smtp-id' in item.keys():
item['smtp_id'] = item.pop('smtp-id')
# collect keys not in model
other = {}
for k in list(item.keys()):
if k not in event_keys:
other[k] = str(item.pop(k))
obj = Event(**item)
obj.other = other
db.session.merge(obj)
db.session.commit()
return ""
@app.cli.command("initdb")
def init_db():
db.create_all()
db.session.commit()
return
if __name__ == "__main__":
app.run(debug=True, threaded=True, use_reloader=True)
| [
"json.loads",
"flask_sqlalchemy.inspect",
"flask.Flask",
"json.dumps",
"flask_sqlalchemy.SQLAlchemy",
"flask.jsonify"
] | [((207, 228), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (218, 228), False, 'import flask\n'), ((519, 548), 'flask_sqlalchemy.SQLAlchemy', 'flask_sqlalchemy.SQLAlchemy', ([], {}), '()\n', (546, 548), False, 'import flask_sqlalchemy\n'), ((1687, 1710), 'json.loads', 'json.loads', (['self._other'], {}), '(self._other)\n', (1697, 1710), False, 'import json\n'), ((1778, 1793), 'json.dumps', 'json.dumps', (['lst'], {}), '(lst)\n', (1788, 1793), False, 'import json\n'), ((2092, 2128), 'flask.jsonify', 'jsonify', (["{'message': 'Unauthorized'}"], {}), "({'message': 'Unauthorized'})\n", (2099, 2128), False, 'from flask import request, jsonify\n'), ((1835, 1866), 'flask_sqlalchemy.inspect', 'flask_sqlalchemy.inspect', (['Event'], {}), '(Event)\n', (1859, 1866), False, 'import flask_sqlalchemy\n')] |
#!/usr/bin/env python
"""
zumoco worker
Called by AWS Lambda to discover service instances
which are then added to AWS CloudWatch and monitored.
Copyright 2019 zulily, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import datetime
import json
import logging
from time import strftime
import boto3
from botocore import exceptions
import parsedatetime as pdt
Logger = logging.getLogger()
Logger.setLevel(logging.INFO)
CW_C = boto3.client('cloudwatch')
SNS_C = boto3.client('sns')
S3_C = boto3.client('s3')
# services zumoco has permission to describe
SERVICE_LIST = ['ec2', 'cloudwatch', 'lambda', 'sns', 'rds', 'autoscaling']
DEFS_PATH = 'monitordefs/'
TEAM_FILEPATH = DEFS_PATH + 'team.json'
DASHBOARD_MAX_WIDTH = 16
DASHBOARD_MAX_WIDGET = 50
MAX_SNS_MESSAGE = 1024 * 256
def load_monitor_file(file_name):
"""
Load team JSON
"""
try:
with open(file_name, 'r') as monfile:
mydict = json.load(monfile)
except IOError as error:
mydict = ""
Logger.warning('Failed to load ' + file_name)
Logger.critical('Critical Error: ' + str(error))
return mydict
def load_instances(bucket, filename):
"""
Load JSON instances from S3 file
"""
try:
obj = S3_C.get_object(Bucket=bucket, Key=filename)
last_str = obj['Body'].read().decode('utf-8')
insts = json.loads(last_str)
except exceptions.ClientError as err:
if err.response['Error']['Code'] == "NoSuchKey":
Logger.warning('No file found:' + filename)
insts = []
else:
raise
return insts
def save_instances(inst_list, bucket, filename):
"""
Save instances to S3 json
"""
try:
out = S3_C.put_object(Bucket=bucket, Key=filename,
Body=json.dumps(inst_list, ensure_ascii=False,
default=dateconverter))
except exceptions.ClientError as err:
Logger.error('Issue writing file:' + filename + ':' + err)
return out['ResponseMetadata']['HTTPStatusCode']
def dateconverter(date_obj):
"""
Stringify datetime.datetime in a given instance
"""
if isinstance(date_obj, datetime.datetime):
return date_obj.__str__()
def determine_deltas(my_insts, my_last_insts):
"""
Create lists of created and deleted instances since previous run.
"""
if not my_last_insts:
return None, my_insts
else:
idict = {a['myname']:a for a in my_insts}
ldict = {a['myname']:a for a in my_last_insts}
set_insts = set(tuple(idict.keys()))
set_last_insts = set(tuple(ldict.keys()))
newinstkeys = list(set_insts - set_last_insts)
delinstkeys = list(set_last_insts - set_insts)
newinsts = [idict[a] for a in newinstkeys]
delinsts = [ldict[a] for a in delinstkeys]
return delinsts, newinsts
def format_report(count, new_inst, del_inst, svc_info):
"""
Given a service's new, deleted inst, return a string representation for email
"""
needed = False
body = ''
if new_inst is not None and len(new_inst) > 0:
needed = True
body += '\n\n New Instances: '
for inst in new_inst:
body += '\n ' + inst['myname']
else:
body += '\n\n No new instances.'
if del_inst is not None and len(del_inst) > 0:
needed = True
body += '\n\n Deleted Instances: '
for inst in del_inst:
body += '\n ' + inst['myname']
else:
body += '\n\n No deleted instances.'
if needed:
output = 'Service: ' + svc_info['Service']
output += '\n Total Instances: ' + str(count)
output += '\n\n'
return output + body
return None
def send_report(report_text, svc_info, now_str):
"""
Publish report to AWS SNS endpoint
Note: publish takes a max of 256KB.
"""
overage = len(report_text) - MAX_SNS_MESSAGE
if overage > 0:
report_text = report_text[:-overage - 20] + '\n<message truncated/>'
resp = SNS_C.publish(TopicArn=svc_info['ReportARN'],
Message=report_text,
Subject='New/Deleted Instance Report for ' + now_str)
return resp
def get_notify_targets(a_dest):
"""
Return a dict of SNS alarm ARNs
"""
topics = SNS_C.list_topics()['Topics']
arns = [i['TopicArn'] for i in topics]
return {a: a_dest[a] for a in a_dest if a_dest[a] in arns}
def get_service_instance_tag_value(inst, svc_client, svc_info, tag_name):
"""
Retrieve given tag value from the given instance
"""
# Requires another API call
if svc_info['DiscoverTags']:
cmd = 'svc_client.' + svc_info['DiscoverTags']
if svc_info['DiscoverTagsInstParm']:
cmd += 'inst["' + svc_info['DiscoverTagsInstParm'] + '"]'
cmd += ')'
inst = eval(cmd)
# turn tag list into dictionary
tagl = {i['Key']:i['Value'] for i in inst[svc_info['TagsKey']] if i['Value']}
try:
value = tagl[tag_name]
except KeyError:
value = ""
return value
def create_friendly_name(instance, svc_client, svc_info):
"""
Return the best name for alarm
"""
Maxlen = 253
friendly = None
name = svc_info['AlarmPrefix'] + '_' + svc_info['Service']
if svc_info['FriendlyName']:
friendly = get_service_instance_tag_value(instance, svc_client, svc_info,
svc_info['FriendlyName'])
if friendly:
name += '_' + friendly[:Maxlen-(len(name)+len(svc_info['AlarmDimName']))]
if svc_info['EnsureUniqueName'] or not friendly:
name += '_' + instance[svc_info['AlarmDimName']]
return name
def create_service_alarms(svc_inst, svc_client, svc_info):
"""
Parse instances, creating alarms for each
"""
alarms = svc_info['Alarms']
alm_tgt = get_notify_targets(svc_info['AlarmDestinations'])
for instance in svc_inst:
for alarm in alarms:
alarmname = instance['myname'] + '_' + alarm
try:
if alarms[alarm]['send_ok']:
CW_C.put_metric_alarm(AlarmName=alarmname,
MetricName=alarms[alarm]['MetricName'],
Namespace=alarms[alarm]['Namespace'],
AlarmDescription=alarms[alarm]['AlarmDescription'],
Statistic=alarms[alarm]['Statistic'],
Period=alarms[alarm]['Period'],
Threshold=alarms[alarm]['Threshold'],
ComparisonOperator=alarms[alarm]['ComparisonOperator'],
EvaluationPeriods=alarms[alarm]['EvaluationPeriods'],
AlarmActions=[alm_tgt[alarms[alarm]['AlarmAction']]],
OKActions=[alm_tgt[alarms[alarm]['AlarmAction']]],
Dimensions=[{'Name':svc_info['AlarmDimName'],
'Value':instance[svc_info['AlarmDimName']]}])
else:
CW_C.put_metric_alarm(AlarmName=alarmname,
MetricName=alarms[alarm]['MetricName'],
Namespace=alarms[alarm]['Namespace'],
AlarmDescription=alarms[alarm]['AlarmDescription'],
Statistic=alarms[alarm]['Statistic'],
Period=alarms[alarm]['Period'],
Threshold=alarms[alarm]['Threshold'],
ComparisonOperator=alarms[alarm]['ComparisonOperator'],
EvaluationPeriods=alarms[alarm]['EvaluationPeriods'],
AlarmActions=[alm_tgt[alarms[alarm]['AlarmAction']]],
Dimensions=[{'Name':svc_info['AlarmDimName'],
'Value':instance[svc_info['AlarmDimName']]}])
except KeyError:
Logger.warning('Failed to create alarm: ' + alarmname)
Logger.warning('Ensure valid AlarmDestinations / AlarmDimName')
Logger.warning('in monitor definitions:' + svc_info['Service'])
return get_service_alarms(svc_info['AlarmPrefix'], svc_info['Service'],
alarm_list=['All'])
def get_service_alarms(prefix, service, alarm_list):
"""
Get all alarms for a given AlarmPrefix + service
"""
alarms = []
alarmprefix = prefix + '_' + service
paginator = CW_C.get_paginator('describe_alarms')
if alarm_list is not None:
if alarm_list == ['All']:
alarminst = alarmprefix
for response in paginator.paginate(AlarmNamePrefix=alarminst):
alarms.extend(response['MetricAlarms'])
else:
for inst in alarm_list:
alarminst = alarmprefix + '_' + inst['myname']
for response in paginator.paginate(AlarmNamePrefix=alarminst):
alarms.extend(response['MetricAlarms'])
return alarms
def delete_service_alarms(alarm_list):
"""
Delete all alarms passed to the function
"""
alarmnames = []
for alarm in alarm_list:
alarmnames.append(alarm['AlarmName'])
# limit of 100 for delete
if len(alarmnames) > 90:
CW_C.delete_alarms(AlarmNames=alarmnames)
alarmnames = []
if alarmnames:
CW_C.delete_alarms(AlarmNames=alarmnames)
def format_widget_props(svc_info, cht_name, chart, inst, alarms):
"""
Helper to format AWS dashboard widget dictionary
"""
metrics = []
props = {}
if chart['is_alarm']:
arns = []
for alarm in alarms:
if alarm['MetricName'] in chart['metric_list']:
if alarm['Dimensions'][0]['Value'] == inst[svc_info['AlarmDimName']]:
arns.append(alarm['AlarmArn'])
# currently AWS handles 1 alarm
break
props['annotations'] = {'alarms' : arns}
else:
for mts in chart['metric_list']:
metric = mts[:]
metric.append(inst[svc_info['AlarmDimName']])
metrics.append(metric)
props['metrics'] = metrics
avail_zone = eval('inst' + chart['avail'])
props['region'] = avail_zone[:-1]
props['stat'] = chart['stat']
props['period'] = chart['period']
props['view'] = chart['view']
props['stacked'] = chart['stacked']
props['title'] = cht_name
return props
def build_dashboard_widgets(svc_inst, alarms, svc_info):
"""
Parse instances, creating chart widgets
"""
widgets = []
x_val = 0
y_val = 0
width = 6
height = 4
chts = svc_info['Charts']
for inst in svc_inst:
for cht in chts:
# build a chart widget
widg = {}
cht_name = inst['myname'] + ' ' + cht
widg['properties'] = format_widget_props(svc_info, cht_name,
chts[cht], inst, alarms)
widg['type'] = chts[cht]['ch_type']
# position graph
widg['x'] = x_val
widg['y'] = y_val
widg['height'] = height
widg['width'] = width
# go small if singleValue chart
if widg['properties']['view'] == 'singleValue':
widg['width'] /= 2
widgets.append(widg)
# wrap to next line, if necessary
x_val += widg['width']
if x_val > DASHBOARD_MAX_WIDTH:
x_val = 0
y_val += height
return widgets
def generate_dashboard(name, chart_j):
"""
Given chart widgets, create a dashboard
"""
wgtcount = len(chart_j['widgets'])
dashcount = int(wgtcount / DASHBOARD_MAX_WIDGET + 1)
for dash in range(0, dashcount):
dname = name + '_' + str(dash+1)
widglist = chart_j['widgets'][dash * DASHBOARD_MAX_WIDGET: \
min(dash * DASHBOARD_MAX_WIDGET + wgtcount,
(dash+1) * DASHBOARD_MAX_WIDGET)]
dwidgets = {'widgets' : widglist}
CW_C.put_dashboard(DashboardName=dname, DashboardBody=json.dumps(dwidgets))
wgtcount -= DASHBOARD_MAX_WIDGET
return get_dashboards(name)
def get_dashboards(prefix):
"""
Get Cloudwatch dashboards for a given prefix
"""
dashboards = CW_C.list_dashboards(DashboardNamePrefix=prefix)
return dashboards['DashboardEntries']
def delete_dashboards(dashboard_list):
"""
Delete all dashboards passed to the function
"""
dashboardnames = []
for dashboard in dashboard_list:
dashboardnames.append(dashboard['DashboardName'])
if dashboardnames:
CW_C.delete_dashboards(DashboardNames=dashboardnames)
def parse_service_response(svc_client, svc_info, response):
"""
Handle paginated response from service
"""
inst = []
if svc_info['InstanceIterator2']:
# Two levels of lists
for tmp in response[svc_info['InstanceIterator1']]:
for tmp2 in tmp[svc_info['InstanceIterator2']]:
tmp2['myname'] = create_friendly_name(tmp2, svc_client, svc_info)
inst.append(tmp2)
elif svc_info['InstanceIterator1']:
for tmp in response[svc_info['InstanceIterator1']]:
tmp['myname'] = create_friendly_name(tmp, svc_client, svc_info)
inst.append(tmp)
else:
for tmp in response:
tmp['myname'] = create_friendly_name(tmp, svc_client, svc_info)
inst.append(tmp)
return inst
def get_service_instances(svc_client, svc_info):
"""
Retrieve instances for the given service,
Flattening AWS structure if necessary
"""
instances = []
paginator = svc_client.get_paginator(svc_info['DiscoverInstance'])
if svc_info['InstanceFilters']:
for response in paginator.paginate(Filters=svc_info['InstanceFilters']):
instances.extend(parse_service_response(svc_client, svc_info, response))
else:
for response in paginator.paginate():
instances.extend(parse_service_response(svc_client, svc_info, response))
return instances
def main(event, context):
"""
Main functionality
"""
all_widgets = []
cons = pdt.Constants()
cons.YearParseStyle = 0
pdtcal = pdt.Calendar(cons)
now_tm = pdtcal.parse("now")
now_str = strftime('%c', now_tm[0])
##### PROGRAM FLOW #####
# Load team file
team_info = load_monitor_file(TEAM_FILEPATH)
# For each service file in MonitorDefs,
for svc in team_info['MonitorDefs']:
# Load service file
svc_info = load_monitor_file(DEFS_PATH + svc)
# Ensure API exists for service
try:
svc_client = boto3.client(svc_info['Service'])
except exceptions.UnknownServiceError:
Logger.critical('Service unknown to AWS API:' + svc_info['Service'])
if svc_info['Service'] in SERVICE_LIST:
# Get new instances.
instances = get_service_instances(svc_client, svc_info)
instfile = svc_info['Service'] + '_' + svc_info['S3Suffix'] + '.json'
# Get old instances
old_inst = load_instances(team_info['Bucket'], instfile)
# Determine what's new and deleted.
del_inst, new_inst = determine_deltas(list(instances), old_inst)
# Cleanup any old instance alarms.
delete_service_alarms(get_service_alarms(svc_info['AlarmPrefix'],
svc_info['Service'],
alarm_list=del_inst))
http_status = save_instances(instances, team_info['Bucket'],
instfile)
if http_status != 200:
Logger.error('Unable to write instances file:' + instfile)
report_text = format_report(len(instances), new_inst, del_inst, svc_info)
if team_info['SendStatusUpdates'] and report_text:
send_report(report_text, svc_info, now_str)
# Create instance alarms for new instances.
alarms = create_service_alarms(new_inst, svc_client, svc_info)
dash_j = build_dashboard_widgets(instances, alarms, svc_info)
all_widgets.extend(dash_j)
# If service dashboard is requested, create one.
if svc_info['CreateServiceDashboard']:
name = svc_info['AlarmPrefix'] + '_' + svc_info['Service']
name += '_' + svc_info['S3Suffix']
chart_j = {'widgets' : dash_j}
delete_dashboards(get_dashboards(name))
generate_dashboard(name, chart_j)
else:
Logger.warning('No permissions for listing instances. Service: ')
Logger.warning(svc_info['Service'])
# If team dashboard is requested, create one
if team_info['CreateTeamDashboard']:
name = svc_info['AlarmPrefix'] + '_' + team_info['Team']
chart_j = {'widgets' : all_widgets}
delete_dashboards(get_dashboards(name))
generate_dashboard(name, chart_j)
#main('foo', 'bar')
| [
"logging.getLogger",
"json.loads",
"boto3.client",
"time.strftime",
"json.dumps",
"parsedatetime.Calendar",
"json.load",
"parsedatetime.Constants"
] | [((892, 911), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (909, 911), False, 'import logging\n'), ((950, 976), 'boto3.client', 'boto3.client', (['"""cloudwatch"""'], {}), "('cloudwatch')\n", (962, 976), False, 'import boto3\n'), ((985, 1004), 'boto3.client', 'boto3.client', (['"""sns"""'], {}), "('sns')\n", (997, 1004), False, 'import boto3\n'), ((1012, 1030), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (1024, 1030), False, 'import boto3\n'), ((15316, 15331), 'parsedatetime.Constants', 'pdt.Constants', ([], {}), '()\n', (15329, 15331), True, 'import parsedatetime as pdt\n'), ((15373, 15391), 'parsedatetime.Calendar', 'pdt.Calendar', (['cons'], {}), '(cons)\n', (15385, 15391), True, 'import parsedatetime as pdt\n'), ((15439, 15464), 'time.strftime', 'strftime', (['"""%c"""', 'now_tm[0]'], {}), "('%c', now_tm[0])\n", (15447, 15464), False, 'from time import strftime\n'), ((1876, 1896), 'json.loads', 'json.loads', (['last_str'], {}), '(last_str)\n', (1886, 1896), False, 'import json\n'), ((1448, 1466), 'json.load', 'json.load', (['monfile'], {}), '(monfile)\n', (1457, 1466), False, 'import json\n'), ((15817, 15850), 'boto3.client', 'boto3.client', (["svc_info['Service']"], {}), "(svc_info['Service'])\n", (15829, 15850), False, 'import boto3\n'), ((2325, 2389), 'json.dumps', 'json.dumps', (['inst_list'], {'ensure_ascii': '(False)', 'default': 'dateconverter'}), '(inst_list, ensure_ascii=False, default=dateconverter)\n', (2335, 2389), False, 'import json\n'), ((13195, 13215), 'json.dumps', 'json.dumps', (['dwidgets'], {}), '(dwidgets)\n', (13205, 13215), False, 'import json\n')] |
# -*- coding: utf-8 -*-
# Copyright 2015-2019 grafana-dashboard-builder contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
import pytest
from mock import patch, MagicMock
from grafana_dashboards.exporter import ProjectProcessor, FileExporter
__author__ = '<NAME> <<EMAIL>>'
def test_project_processor():
dashboard_processor = MagicMock()
processor = ProjectProcessor([dashboard_processor])
project = MagicMock()
context = MagicMock()
dashboard = MagicMock()
project.get_contexts.return_value = [context]
project.get_dashboards.return_value = [dashboard]
parent_context = MagicMock()
# noinspection PyTypeChecker
processor.process_projects([project], parent_context)
project.get_contexts.assert_called_once_with(parent_context)
dashboard.gen_json.assert_called_with(context)
context.expand_placeholders.assert_called_with(dashboard.name)
dashboard_processor.process_dashboard.assert_called_once_with(project.name, context.expand_placeholders(),
dashboard.gen_json())
@patch('grafana_dashboards.exporter.open', create=True)
@patch('json.dump')
@patch('os.makedirs', return_value=True)
@patch('os.path.isdir', return_value=True)
@patch('os.path.exists', return_value=True)
def test_file_exporter(patch_exists, path_isdir, makedirs, json_dump, mock_file):
exporter = FileExporter('output_folder')
dashboard_data = {'some_key': 'some_value'}
exporter.process_dashboard('project_name', 'dashboard_name', dashboard_data)
json_dump.assert_called_once_with(dashboard_data, mock_file().__enter__(), sort_keys=True, indent=2,
separators=(',', ': '))
@patch('os.makedirs', side_effect=[True, OSError('testing')])
@patch('os.path.isdir', return_value=True)
@patch('os.path.exists', return_value=False)
def test_file_exporter_path_not_exist(patch_exists, path_isdir, makedirs):
exporter = FileExporter('output_folder')
dashboard_data = {'some_key': 'some_value'}
with pytest.raises(Exception) as e:
exporter.process_dashboard('project_name', 'dashboard_name', dashboard_data)
assert 'testing' in str(e.value)
@patch('os.makedirs', return_value=True)
@patch('os.path.isdir', return_value=False)
@patch('os.path.exists', return_value=False)
def test_file_exporter_output_not_dir(patch_exists, path_isdir, makedirs):
with pytest.raises(Exception) as e:
FileExporter('output_folder')
assert "'output_folder' must be a directory" in str(e.value)
| [
"mock.patch",
"grafana_dashboards.exporter.ProjectProcessor",
"grafana_dashboards.exporter.FileExporter",
"pytest.raises",
"mock.MagicMock"
] | [((1642, 1696), 'mock.patch', 'patch', (['"""grafana_dashboards.exporter.open"""'], {'create': '(True)'}), "('grafana_dashboards.exporter.open', create=True)\n", (1647, 1696), False, 'from mock import patch, MagicMock\n'), ((1698, 1716), 'mock.patch', 'patch', (['"""json.dump"""'], {}), "('json.dump')\n", (1703, 1716), False, 'from mock import patch, MagicMock\n'), ((1718, 1757), 'mock.patch', 'patch', (['"""os.makedirs"""'], {'return_value': '(True)'}), "('os.makedirs', return_value=True)\n", (1723, 1757), False, 'from mock import patch, MagicMock\n'), ((1759, 1800), 'mock.patch', 'patch', (['"""os.path.isdir"""'], {'return_value': '(True)'}), "('os.path.isdir', return_value=True)\n", (1764, 1800), False, 'from mock import patch, MagicMock\n'), ((1802, 1844), 'mock.patch', 'patch', (['"""os.path.exists"""'], {'return_value': '(True)'}), "('os.path.exists', return_value=True)\n", (1807, 1844), False, 'from mock import patch, MagicMock\n'), ((2335, 2376), 'mock.patch', 'patch', (['"""os.path.isdir"""'], {'return_value': '(True)'}), "('os.path.isdir', return_value=True)\n", (2340, 2376), False, 'from mock import patch, MagicMock\n'), ((2378, 2421), 'mock.patch', 'patch', (['"""os.path.exists"""'], {'return_value': '(False)'}), "('os.path.exists', return_value=False)\n", (2383, 2421), False, 'from mock import patch, MagicMock\n'), ((2756, 2795), 'mock.patch', 'patch', (['"""os.makedirs"""'], {'return_value': '(True)'}), "('os.makedirs', return_value=True)\n", (2761, 2795), False, 'from mock import patch, MagicMock\n'), ((2797, 2839), 'mock.patch', 'patch', (['"""os.path.isdir"""'], {'return_value': '(False)'}), "('os.path.isdir', return_value=False)\n", (2802, 2839), False, 'from mock import patch, MagicMock\n'), ((2841, 2884), 'mock.patch', 'patch', (['"""os.path.exists"""'], {'return_value': '(False)'}), "('os.path.exists', return_value=False)\n", (2846, 2884), False, 'from mock import patch, MagicMock\n'), ((879, 890), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (888, 890), False, 'from mock import patch, MagicMock\n'), ((907, 946), 'grafana_dashboards.exporter.ProjectProcessor', 'ProjectProcessor', (['[dashboard_processor]'], {}), '([dashboard_processor])\n', (923, 946), False, 'from grafana_dashboards.exporter import ProjectProcessor, FileExporter\n'), ((961, 972), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (970, 972), False, 'from mock import patch, MagicMock\n'), ((987, 998), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (996, 998), False, 'from mock import patch, MagicMock\n'), ((1015, 1026), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1024, 1026), False, 'from mock import patch, MagicMock\n'), ((1152, 1163), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1161, 1163), False, 'from mock import patch, MagicMock\n'), ((1942, 1971), 'grafana_dashboards.exporter.FileExporter', 'FileExporter', (['"""output_folder"""'], {}), "('output_folder')\n", (1954, 1971), False, 'from grafana_dashboards.exporter import ProjectProcessor, FileExporter\n'), ((2512, 2541), 'grafana_dashboards.exporter.FileExporter', 'FileExporter', (['"""output_folder"""'], {}), "('output_folder')\n", (2524, 2541), False, 'from grafana_dashboards.exporter import ProjectProcessor, FileExporter\n'), ((2600, 2624), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (2613, 2624), False, 'import pytest\n'), ((2969, 2993), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (2982, 2993), False, 'import pytest\n'), ((3008, 3037), 'grafana_dashboards.exporter.FileExporter', 'FileExporter', (['"""output_folder"""'], {}), "('output_folder')\n", (3020, 3037), False, 'from grafana_dashboards.exporter import ProjectProcessor, FileExporter\n')] |
# Copyright 2012 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from neutron._i18n import _
DEFAULT_BRIDGE_MAPPINGS = []
DEFAULT_INTERFACE_MAPPINGS = []
DEFAULT_VXLAN_GROUP = '192.168.127.12'
DEFAULT_KERNEL_HZ_VALUE = 250 # [Hz]
DEFAULT_TC_TBF_LATENCY = 50 # [ms]
vxlan_opts = [
cfg.BoolOpt('enable_vxlan', default=True,
help=_("Enable VXLAN on the agent. Can be enabled when "
"agent is managed by ml2 plugin using linuxbridge "
"mechanism driver")),
cfg.IntOpt('ttl',
help=_("TTL for vxlan interface protocol packets.")),
cfg.IntOpt('tos',
help=_("TOS for vxlan interface protocol packets.")),
cfg.StrOpt('vxlan_group', default=DEFAULT_VXLAN_GROUP,
help=_("Multicast group(s) for vxlan interface. A range of "
"group addresses may be specified by using CIDR "
"notation. Specifying a range allows different VNIs to "
"use different group addresses, reducing or eliminating "
"spurious broadcast traffic to the tunnel endpoints. "
"To reserve a unique group for each possible "
"(24-bit) VNI, use a /8 such as 172.16.31.10/8. This "
"setting must be the same on all the agents.")),
cfg.IPOpt('local_ip', help=_("Local IP address of the VXLAN endpoints.")),
cfg.BoolOpt('l2_population', default=False,
help=_("Extension to use alongside ml2 plugin's l2population "
"mechanism driver. It enables the plugin to populate "
"VXLAN forwarding table.")),
cfg.BoolOpt('arp_responder', default=False,
help=_("Enable local ARP responder which provides local "
"responses instead of performing ARP broadcast into "
"the overlay. Enabling local ARP responder is not "
"fully compatible with the allowed-address-pairs "
"extension.")
),
]
bridge_opts = [
cfg.ListOpt('physical_interface_mappings',
default=DEFAULT_INTERFACE_MAPPINGS,
help=_("Comma-separated list of "
"<physical_network>:<physical_interface> tuples "
"mapping physical network names to the agent's "
"node-specific physical network interfaces to be used "
"for flat and VLAN networks. All physical networks "
"listed in network_vlan_ranges on the server should "
"have mappings to appropriate interfaces on each "
"agent.")),
cfg.ListOpt('bridge_mappings',
default=DEFAULT_BRIDGE_MAPPINGS,
help=_("List of <physical_network>:<physical_bridge>")),
]
qos_options = [
cfg.IntOpt('kernel_hz', default=DEFAULT_KERNEL_HZ_VALUE,
help=_("Value of host kernel tick rate (hz) for calculating "
"minimum burst value in bandwidth limit rules for "
"a port with QoS. See kernel configuration file for "
"HZ value and tc-tbf manual for more information.")),
cfg.IntOpt('tbf_latency', default=DEFAULT_TC_TBF_LATENCY,
help=_("Value of latency (ms) for calculating size of queue "
"for a port with QoS. See tc-tbf manual for more "
"information."))
]
cfg.CONF.register_opts(vxlan_opts, "VXLAN")
cfg.CONF.register_opts(bridge_opts, "LINUX_BRIDGE")
cfg.CONF.register_opts(qos_options, "QOS")
| [
"neutron._i18n._",
"oslo_config.cfg.CONF.register_opts"
] | [((4152, 4195), 'oslo_config.cfg.CONF.register_opts', 'cfg.CONF.register_opts', (['vxlan_opts', '"""VXLAN"""'], {}), "(vxlan_opts, 'VXLAN')\n", (4174, 4195), False, 'from oslo_config import cfg\n'), ((4196, 4247), 'oslo_config.cfg.CONF.register_opts', 'cfg.CONF.register_opts', (['bridge_opts', '"""LINUX_BRIDGE"""'], {}), "(bridge_opts, 'LINUX_BRIDGE')\n", (4218, 4247), False, 'from oslo_config import cfg\n'), ((4248, 4290), 'oslo_config.cfg.CONF.register_opts', 'cfg.CONF.register_opts', (['qos_options', '"""QOS"""'], {}), "(qos_options, 'QOS')\n", (4270, 4290), False, 'from oslo_config import cfg\n'), ((949, 1071), 'neutron._i18n._', '_', (['"""Enable VXLAN on the agent. Can be enabled when agent is managed by ml2 plugin using linuxbridge mechanism driver"""'], {}), "('Enable VXLAN on the agent. Can be enabled when agent is managed by ml2 plugin using linuxbridge mechanism driver'\n )\n", (950, 1071), False, 'from neutron._i18n import _\n'), ((1163, 1209), 'neutron._i18n._', '_', (['"""TTL for vxlan interface protocol packets."""'], {}), "('TTL for vxlan interface protocol packets.')\n", (1164, 1209), False, 'from neutron._i18n import _\n'), ((1254, 1300), 'neutron._i18n._', '_', (['"""TOS for vxlan interface protocol packets."""'], {}), "('TOS for vxlan interface protocol packets.')\n", (1255, 1300), False, 'from neutron._i18n import _\n'), ((1382, 1790), 'neutron._i18n._', '_', (['"""Multicast group(s) for vxlan interface. A range of group addresses may be specified by using CIDR notation. Specifying a range allows different VNIs to use different group addresses, reducing or eliminating spurious broadcast traffic to the tunnel endpoints. To reserve a unique group for each possible (24-bit) VNI, use a /8 such as 172.16.31.10/8. This setting must be the same on all the agents."""'], {}), "('Multicast group(s) for vxlan interface. A range of group addresses may be specified by using CIDR notation. Specifying a range allows different VNIs to use different group addresses, reducing or eliminating spurious broadcast traffic to the tunnel endpoints. To reserve a unique group for each possible (24-bit) VNI, use a /8 such as 172.16.31.10/8. This setting must be the same on all the agents.'\n )\n", (1383, 1790), False, 'from neutron._i18n import _\n'), ((1994, 2039), 'neutron._i18n._', '_', (['"""Local IP address of the VXLAN endpoints."""'], {}), "('Local IP address of the VXLAN endpoints.')\n", (1995, 2039), False, 'from neutron._i18n import _\n'), ((2111, 2249), 'neutron._i18n._', '_', (['"""Extension to use alongside ml2 plugin\'s l2population mechanism driver. It enables the plugin to populate VXLAN forwarding table."""'], {}), '("Extension to use alongside ml2 plugin\'s l2population mechanism driver. It enables the plugin to populate VXLAN forwarding table."\n )\n', (2112, 2249), False, 'from neutron._i18n import _\n'), ((2368, 2584), 'neutron._i18n._', '_', (['"""Enable local ARP responder which provides local responses instead of performing ARP broadcast into the overlay. Enabling local ARP responder is not fully compatible with the allowed-address-pairs extension."""'], {}), "('Enable local ARP responder which provides local responses instead of performing ARP broadcast into the overlay. Enabling local ARP responder is not fully compatible with the allowed-address-pairs extension.'\n )\n", (2369, 2584), False, 'from neutron._i18n import _\n'), ((2842, 3177), 'neutron._i18n._', '_', (['"""Comma-separated list of <physical_network>:<physical_interface> tuples mapping physical network names to the agent\'s node-specific physical network interfaces to be used for flat and VLAN networks. All physical networks listed in network_vlan_ranges on the server should have mappings to appropriate interfaces on each agent."""'], {}), '("Comma-separated list of <physical_network>:<physical_interface> tuples mapping physical network names to the agent\'s node-specific physical network interfaces to be used for flat and VLAN networks. All physical networks listed in network_vlan_ranges on the server should have mappings to appropriate interfaces on each agent."\n )\n', (2843, 3177), False, 'from neutron._i18n import _\n'), ((3462, 3511), 'neutron._i18n._', '_', (['"""List of <physical_network>:<physical_bridge>"""'], {}), "('List of <physical_network>:<physical_bridge>')\n", (3463, 3511), False, 'from neutron._i18n import _\n'), ((3614, 3824), 'neutron._i18n._', '_', (['"""Value of host kernel tick rate (hz) for calculating minimum burst value in bandwidth limit rules for a port with QoS. See kernel configuration file for HZ value and tc-tbf manual for more information."""'], {}), "('Value of host kernel tick rate (hz) for calculating minimum burst value in bandwidth limit rules for a port with QoS. See kernel configuration file for HZ value and tc-tbf manual for more information.'\n )\n", (3615, 3824), False, 'from neutron._i18n import _\n'), ((3979, 4101), 'neutron._i18n._', '_', (['"""Value of latency (ms) for calculating size of queue for a port with QoS. See tc-tbf manual for more information."""'], {}), "('Value of latency (ms) for calculating size of queue for a port with QoS. See tc-tbf manual for more information.'\n )\n", (3980, 4101), False, 'from neutron._i18n import _\n')] |
#!/usr/bin/python
# Copyright: (c) 2018, Pluribus Networks
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: pn_port_cos_rate_setting
author: "Pluribus Networks (@rajaspachipulusu17)"
short_description: CLI command to modify port-cos-rate-setting
description:
- This modules can be used to update the port cos rate limit.
options:
pn_cliswitch:
description:
- Target switch to run the CLI on.
required: false
type: str
state:
description:
- State the action to perform. Use C(update) to modify
the port-cos-rate-setting.
required: true
type: str
choices: ['update']
pn_cos0_rate:
description:
- cos0 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos1_rate:
description:
- cos1 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos2_rate:
description:
- cos2 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos3_rate:
description:
- cos3 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos4_rate:
description:
- cos4 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos5_rate:
description:
- cos5 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos6_rate:
description:
- cos6 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos7_rate:
description:
- cos7 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_port:
description:
- port.
required: false
type: str
choices: ['control-port', 'data-port', 'span-ports']
'''
EXAMPLES = """
- name: port cos rate modify
pn_port_cos_rate_setting:
pn_cliswitch: "sw01"
state: "update"
pn_port: "control-port"
pn_cos1_rate: "1000"
pn_cos5_rate: "1000"
pn_cos2_rate: "1000"
pn_cos0_rate: "1000"
- name: port cos rate modify
pn_port_cos_rate_setting:
pn_cliswitch: "sw01"
state: "update"
pn_port: "data-port"
pn_cos1_rate: "2000"
pn_cos5_rate: "2000"
pn_cos2_rate: "2000"
pn_cos0_rate: "2000"
"""
RETURN = """
command:
description: the CLI command run on the target node.
returned: always
type: str
stdout:
description: set of responses from the port-cos-rate-setting command.
returned: always
type: list
stderr:
description: set of error responses from the port-cos-rate-setting command.
returned: on error
type: list
changed:
description: indicates whether the CLI caused changes on the target.
returned: always
type: bool
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.general.plugins.module_utils.network.netvisor.pn_nvos import pn_cli, run_cli
def main():
""" This section is for arguments parsing """
state_map = dict(
update='port-cos-rate-setting-modify'
)
module = AnsibleModule(
argument_spec=dict(
pn_cliswitch=dict(required=False, type='str'),
state=dict(required=True, type='str',
choices=state_map.keys()),
pn_cos1_rate=dict(required=False, type='str'),
pn_cos5_rate=dict(required=False, type='str'),
pn_cos2_rate=dict(required=False, type='str'),
pn_cos0_rate=dict(required=False, type='str'),
pn_cos6_rate=dict(required=False, type='str'),
pn_cos3_rate=dict(required=False, type='str'),
pn_cos4_rate=dict(required=False, type='str'),
pn_cos7_rate=dict(required=False, type='str'),
pn_port=dict(required=False, type='str',
choices=['control-port', 'data-port', 'span-ports']),
),
required_if=(
['state', 'update', ['pn_port']],
),
required_one_of=[['pn_cos0_rate',
'pn_cos1_rate',
'pn_cos2_rate',
'pn_cos3_rate',
'pn_cos4_rate',
'pn_cos5_rate',
'pn_cos6_rate',
'pn_cos7_rate']],
)
# Accessing the arguments
cliswitch = module.params['pn_cliswitch']
state = module.params['state']
cos1_rate = module.params['pn_cos1_rate']
cos5_rate = module.params['pn_cos5_rate']
cos2_rate = module.params['pn_cos2_rate']
cos0_rate = module.params['pn_cos0_rate']
cos6_rate = module.params['pn_cos6_rate']
cos3_rate = module.params['pn_cos3_rate']
cos4_rate = module.params['pn_cos4_rate']
cos7_rate = module.params['pn_cos7_rate']
port = module.params['pn_port']
command = state_map[state]
# Building the CLI command string
cli = pn_cli(module, cliswitch)
if command == 'port-cos-rate-setting-modify':
cli += ' %s ' % command
if cos1_rate:
cli += ' cos1-rate ' + cos1_rate
if cos5_rate:
cli += ' cos5-rate ' + cos5_rate
if cos2_rate:
cli += ' cos2-rate ' + cos2_rate
if cos0_rate:
cli += ' cos0-rate ' + cos0_rate
if cos6_rate:
cli += ' cos6-rate ' + cos6_rate
if cos3_rate:
cli += ' cos3-rate ' + cos3_rate
if cos4_rate:
cli += ' cos4-rate ' + cos4_rate
if cos7_rate:
cli += ' cos7-rate ' + cos7_rate
if port:
cli += ' port ' + port
run_cli(module, cli, state_map)
if __name__ == '__main__':
main()
| [
"ansible_collections.community.general.plugins.module_utils.network.netvisor.pn_nvos.run_cli",
"ansible_collections.community.general.plugins.module_utils.network.netvisor.pn_nvos.pn_cli"
] | [((5153, 5178), 'ansible_collections.community.general.plugins.module_utils.network.netvisor.pn_nvos.pn_cli', 'pn_cli', (['module', 'cliswitch'], {}), '(module, cliswitch)\n', (5159, 5178), False, 'from ansible_collections.community.general.plugins.module_utils.network.netvisor.pn_nvos import pn_cli, run_cli\n'), ((5855, 5886), 'ansible_collections.community.general.plugins.module_utils.network.netvisor.pn_nvos.run_cli', 'run_cli', (['module', 'cli', 'state_map'], {}), '(module, cli, state_map)\n', (5862, 5886), False, 'from ansible_collections.community.general.plugins.module_utils.network.netvisor.pn_nvos import pn_cli, run_cli\n')] |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import os
import json
import time
import pdb
import platform
import string
import random
# 此脚本与create_script.sh由算法同事
# 帮忙维护,当代码变更时需更新此版本号
code_version="1.0"
def create_hccl_mindspore():
done = 0
rank_id = 0
hccl_data = {}
# for test only
#os.environ['DLWS_WORKER_NUM'] = "2"
#os.environ['DLWS_JOB_ID'] = "test_npu_device"
#os.environ['DLWS_USER_NAME'] = "bifeng.peng"
#
## 单机任务,用DLWS_PS_NUM=0判断最好
if "DLWS_WORKER_NUM" not in os.environ:
os.environ['DLWS_WORKER_NUM'] = "1"
else:
pass
worker_num = int(os.environ['DLWS_WORKER_NUM'])
job_id = os.environ['DLWS_JOB_ID']
user_name = os.environ['DLWS_USER_NAME']
# 1)hccl文件和相关脚本都会放到此目录
# 2)文件和具体的JOB有关, 不同JOB隔离存储
npu_dir = '/home/%s/.npu/%s/' % (user_name, job_id)
# 以下变量写死
hccl_data["board_id"] = "0x0020"
hccl_data["chip_info"] = "910"
hccl_data["deploy_mode"] = "lab"
hccl_data["group_count"] = "1"
hccl_data["para_plane_nic_location"] = "device"
hccl_data["para_plane_nic_name"] = [
"eth0",
"eth1",
"eth2",
"eth3",
"eth4",
"eth5",
"eth6",
"eth7"
]
hccl_data["para_plane_nic_num"] = "8"
hccl_data["status"] = "completed"
hccl_data["group_list"] = []
group = {}
group["device_num"] = str(worker_num * 8)
group["server_num"] = str(worker_num)
group["group_name"] = "test"
group["instance_count"] = group["device_num"]
group["instance_list"] = []
## 生成npu_idx.info文件
## 文件数量和worker个数一致
while True:
PATH = npu_dir + ('/npu_%d.info' % (done))
if os.path.isfile(PATH) and os.access(PATH, os.R_OK):
with open(PATH, "r") as f:
ips = ""
host_ip = ""
# 文件中的格式:
# ip=id1:ip1,id2:ip2
# host=xxx
for line in f:
print(line)
if "ip=" in line:
_, ips = line.strip().split("=")
elif "host=" in line:
_, host_ip = line.strip().split("=")
ip_list = ips.split(",")
ip_list = sorted(ip_list)
for ip_elem in ip_list:
# 设备id和ip
device_id, device_ip = ip_elem.split(":")
## set up group list
device_item = {} # item of instance list
device_item["devices"] = [{
"device_id" : device_id,
"device_ip" : device_ip
}]
device_item["rank_id"] = str(rank_id)
device_item["server_id"] = str(host_ip)
#pdb.set_trace()
rank_id = rank_id + 1
group["instance_list"].append(device_item)
f.close()
done = done + 1
else:
pass
if done == worker_num:
break
else:
pass
time.sleep(1)
group["instance_count"] = group["device_num"] = str(len(group["instance_list"]))
print("succ!")
hccl_data["group_list"].append(group)
# dump to json file
with open(npu_dir + '/hccl_ms.json', 'w') as fp:
json.dump(hccl_data, fp)
return
def create_hccl_tensorflow():
done = 0 # worker node to process
rank_id = 0 # equals to device count
hccl_data = {}
# for test only
#os.environ['DLWS_WORKER_NUM'] = "2"
#os.environ['DLWS_JOB_ID'] = "test_npu_device"
#os.environ['DLWS_USER_NAME'] = "bifeng.peng"
#
## non distributed job
if "DLWS_WORKER_NUM" not in os.environ:
os.environ['DLWS_WORKER_NUM'] = "1"
else:
pass
worker_num = int(os.environ['DLWS_WORKER_NUM'])
job_id = os.environ['DLWS_JOB_ID']
pod_name = os.environ['POD_NAME']
user_name = os.environ['DLWS_USER_NAME']
distributing_job= False
if "DLWS_NUM_PS" in os.environ:
if int(os.environ["DLWS_NUM_PS"]) > 0:
distributing_job = True
else:
pass
else:
pass
# 1)hccl文件和相关脚本都会放到此目录
# 2)文件和具体的JOB有关, 不同JOB隔离存储
npu_dir = '/home/%s/.npu/%s/' % (user_name, job_id)
hccl_data["group_count"] = "1"
hccl_data["status"] = "completed"
hccl_data["group_list"] = []
group = {}
#group["device_count"] = worker_num * 8
group["instance_count"] = str(worker_num)
group["group_name"] = "test"
group["instance_list"] = []
## 生成npu_idx.info文件
## 文件数量和worker个数一致
while True:
PATH = npu_dir + ('/npu_%d.info' % (done))
if os.path.isfile(PATH) and os.access(PATH, os.R_OK):
with open(PATH, "r") as f:
ips = ""
host_ip = ""
# 文件中的格式:
# ip=id1:ip1,id2:ip2
# host=xxx
for line in f:
print(line)
if "ip=" in line:
_, ips = line.strip().split("=")
elif "host=" in line:
_, host_ip = line.strip().split("=")
instance_item = {} # item of instance list
if distributing_job is True:
instance_item["pod_name"] = job_id + "-worker-" + str(done)
else:
instance_item["pod_name"] = pod_name
instance_item["server_id"] = host_ip
instance_item["devices"] = []
# parse string to get all device ips
ip_list = ips.split(",")
ip_list = sorted(ip_list)
for ip_elem in ip_list:
# one device
device_id, device_ip = ip_elem.split(":")
## set up group list
device_item = {
"device_id" : device_id,
"device_ip" : device_ip
}
# append to instance list
rank_id = rank_id + 1
instance_item["devices"].append(device_item)
#pdb.set_trace()
group["instance_list"].append(instance_item)
f.close()
done = done + 1
else:
pass
if done == worker_num:
break
else:
pass
time.sleep(1)
group["device_count"] = str(rank_id)
group["instance_count"] = str(len(group["instance_list"]))
hccl_data["group_list"].append(group)
print("succ!")
# dump to json file
with open(npu_dir + '/hccl_tf.json', 'w') as fp:
json.dump(hccl_data, fp)
return
# 从/pod.env导入环境变量
def load_env(file_path):
envs = {}
with open(file_path, "r") as f:
lines = f.readlines()
for line in lines:
line = line.strip().lstrip("export")
if line is not "" and "=" in line:
key_val = line.strip().split("=")
key = key_val[0]
value = key_val[1]
envs[key] = value
else:
pass
f.close()
return envs
# 向/pod.env写入环境变量
# 先判断是否存在此环境量,如果已存在,则覆盖
def add_env(path, envs):
# 覆盖相同key数据,文件已有的key保持不变
envs_orig = load_env(path)
for k, v in envs.items():
envs_orig[k] = v
with open(path, "w") as f:
for k, v in envs_orig.items():
f.write("export %s=%s\n" % (k, v))
f.close()
return
def get_os_flag():
osflag="x86_64"
if platform.machine() == "aarch64":
osflag = "arm64"
else:
pass
return osflag
# gnu安装目录中的架构和算法组件的不一样
# 单独处理
def get_gnu_arch_flag():
osflag="x86_64"
if platform.machine() == "aarch64":
osflag = "aarch64"
else:
pass
return osflag
def get_random_num(length):
return ''.join(random.choice(string.digits) for _ in range(length))
# 用于将环境变量更新 写入指定用户的shell加载文件
def set_bashrc(username):
path = ""
if username == "root":
path = "/root/.bashrc"
else:
path = "/home/" + username + "/.bashrc"
with open(path, "a") as f:
cmd = '''
if [ -f "/pod.env" ]; then
. /pod.env
fi
'''
f.write(cmd + "\n")
f.close()
return
# 准备mindspore环境
# 1) 预备环境变量,并写入/pod.env
# 2) 创建算法需要的训练shell脚本
# 3) 创建算法需要的hccl文件
def handle_mindspore():
path = "/pod.env"
envs = load_env(path) # 导入平台加载过程中已创建的环境变量
envs_to_add= {}
envs_to_add["DEVICE_ID"] = "0"
# 解析GPU/NPU设备ID
if "VISIBLE_IDS" in envs:
envs["VISIBLE_IDS"] = envs["VISIBLE_IDS"].replace("\\","")
envs_to_add["VISIBLE_IDS"] = envs["VISIBLE_IDS"]
else:
pass
# 解析NPU Device ID
if "NPU_IPS" in envs:
envs["NPU_IPS"] = envs["NPU_IPS"].replace("\\","")
envs_to_add["NPU_IPS"] = envs["NPU_IPS"]
else:
pass
## 将/pod.env已有的环境变量
## 与os当前具有的环境变量合并, 放入envs
for k, v in os.environ.items():
if k not in envs:
envs[k] = v
else:
pass
## 不需要解析device id
## 设置随机参数, 算法要求
envs["RANDOM"] = get_random_num(6)
envs["osflag"] = get_os_flag()
envs["gnu_arch"] = get_gnu_arch_flag()
# mindspore环境变量模板
mindspore_envs = [
"PYTHONPATH=/usr/local/lib/python3.7/site-packages/mindspore/lib:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/${osflag}-linux/opp/op_impl/built-in/ai_core/tbe:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/pyACL/python/site-packages/acl:${PYTHONPATH}",
"LD_LIBRARY_PATH=/usr/lib/${gnu_arch}-linux-gnu/hdf5/serial:/usr/local/Ascend/add-ons/:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/fwkacllib/lib64:/usr/local/Ascend/add-ons:/home/HwHiAiUser/Ascend/nnae/latest/fwkacllib/lib64:/usr/local/Ascend/driver/lib64/common/:/usr/local/Ascend/driver/lib64/driver/:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/${osflag}-linux/atc/lib64:/usr/local/Ascend/fwkacllib/lib64/:/usr/local/lib/python3.7/site-packages/mindspore/lib/:/usr/local/lib/python3.7/site-packages/torch/lib:/usr/local/lib:/home/clang+llvm/lib/:$LD_LIBRARY_PATH",
"TBE_IMPL_PATH=/home/HwHiAiUser/Ascend/ascend-toolkit/latest/${osflag}-linux/opp/op_impl/built-in/ai_core/tbe:/usr/local/Ascend/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe",
"PATH=$PATH:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/${osflag}-linux/fwkacllib/ccec_compiler/bin/:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin/:/home/clang+llvm/bin/:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/atc/bin",
"ASCEND_OPP_PATH=/home/HwHiAiUser/Ascend/ascend-toolkit/latest/opp",
"LLVM_CONFIG=/home/clang+llvm/bin/llvm-config",
"SOC_VERSION=Ascend910",
"POD_NAME=${DLWS_JOB_ID}",
"JOB_ID=${RANDOM}",
"RANK_SIZE=1",
"ASCEND_GLOBAL_LOG_LEVEL=3",
"ASCEND_GLOBAL_EVENT_ENABLE=0"
]
# 模板渲染
for item in mindspore_envs:
tpl = string.Template(item)
new_item = tpl.safe_substitute(envs)
if "=" in new_item:
key_val = new_item.strip().split("=")
k = key_val[0]
v = key_val[1]
envs_to_add[k] = v
else:
pass
# 1) 更新/pod.env, 创建环境变量
add_env(path, envs_to_add)
# 2) 生成shell训练脚本
pod_cmd = os.environ["DLWS_LAUNCH_CMD"]
npu_info_dir = "/home/" + os.environ["DLWS_USER_NAME"] + "/.npu/" + os.environ["DLWS_JOB_ID"] + "/train.sh"
cmd = 'python /pod/scripts/create_script.py --type mindspore --command "%s" --out %s'% (pod_cmd, npu_info_dir)
os.system(cmd)
os.system("chmod 777 " + npu_info_dir)
# 将环境变量更新写入 root
set_bashrc("root")
## 3) 生成hccl_tf.json
if need_create_hccl() is True:
create_hccl_mindspore()
else:
pass
# 4) 分布式训练任务,环境配置同步
if is_distributed_job() is True and is_ps_pod() is True:
notify()
elif is_distributed_job() is True and is_worker_pod() is True:
wait()
else:
pass
return
# 准备tensorflow环境
# 1) 预备环境变量,并写入/pod.env
# 2) 创建算法需要的训练shell脚本
# 3) 创建算法需要的hccl文件
def handle_tensorflow():
# 1) 预备环境变量,并写入/pod.env
path = "/pod.env"
envs = load_env(path) # 导入平台加载过程中已创建的环境变量
envs_to_add= {}
# 解析GPU/NPU设备ID
if "VISIBLE_IDS" in envs:
envs["VISIBLE_IDS"] = envs["VISIBLE_IDS"].replace("\\","")
envs_to_add["VISIBLE_IDS"] = envs["VISIBLE_IDS"]
else:
pass
if "NPU_IPS" in envs:
envs["NPU_IPS"] = envs["NPU_IPS"].replace("\\","")
envs_to_add["NPU_IPS"] = envs["NPU_IPS"]
else:
pass
## 将/pod.env已有的环境变量
## 与os当前具有的环境变量合并, 放入envs
for k, v in os.environ.items():
if k not in envs:
envs[k] = v
else:
pass
## 第一个设备id
device_id="0"
device_index="0"
if "VISIBLE_IDS" in envs:
devid = envs["VISIBLE_IDS"].split(",")[0].strip()
if len(devid) > 0:
device_id = devid
else:
pass
else:
pass
device_index = device_id
## 设置随机参数
envs["RANDOM"] = get_random_num(6)
envs["osflag"] = get_os_flag()
envs["gnu_arch"] = get_gnu_arch_flag()
# 模板配置
tensorflow_envs = [
"PYTHONPATH=/usr/local/lib/python3.7/site-packages/mindspore/lib:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/${osflag}-linux/opp/op_impl/built-in/ai_core/tbe:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/pyACL/python/site-packages/acl:${PYTHONPATH}",
"LD_LIBRARY_PATH=/usr/lib/${gnu_arch}-linux-gnu/hdf5/serial:/usr/local/Ascend/add-ons/:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/fwkacllib/lib64:/usr/local/Ascend/add-ons:/home/HwHiAiUser/Ascend/nnae/latest/fwkacllib/lib64:/usr/local/Ascend/driver/lib64/common/:/usr/local/Ascend/driver/lib64/driver/:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe/op_tiling:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/${osflag}-linux/atc/lib64:/usr/local/Ascend/fwkacllib/lib64/:/usr/local/lib/python3.7/site-packages/mindspore/lib/:/usr/local/lib/python3.7/site-packages/torch/lib:/usr/local/lib:/home/clang+llvm/lib/:$LD_LIBRARY_PATH",
"TBE_IMPL_PATH=/home/HwHiAiUser/Ascend/ascend-toolkit/latest/${osflag}-linux/opp/op_impl/built-in/ai_core/tbe:/usr/local/Ascend/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe",
"PATH=$PATH:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/${osflag}-linux/fwkacllib/ccec_compiler/bin/:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin/:/home/clang+llvm/bin/:/home/HwHiAiUser/Ascend/ascend-toolkit/latest/atc/bin",
"ASCEND_OPP_PATH=/home/HwHiAiUser/Ascend/ascend-toolkit/latest/opp",
"LLVM_CONFIG=/home/clang+llvm/bin/llvm-config",
"SOC_VERSION=Ascend910",
"POD_NAME=${DLWS_JOB_ID}",
"JOB_ID=${RANDOM}",
"RANK_SIZE=1",
"ASCEND_GLOBAL_LOG_LEVEL=3",
"ASCEND_GLOBAL_EVENT_ENABLE=0"
]
envs_to_add["DEVICE_ID"] = device_id
envs_to_add["DEVICE_INDEX"] = device_index
# 渲染模板
for item in tensorflow_envs:
tpl = string.Template(item)
new_item = tpl.safe_substitute(envs)
if "=" in new_item:
key_val = new_item.strip().split("=")
k = key_val[0]
v = key_val[1]
envs_to_add[k] = v
else:
pass
# 1) 更新环境变量
add_env(path, envs_to_add)
## 2) 生成shell脚本
pod_cmd = os.environ["DLWS_LAUNCH_CMD"]
npu_info_dir = "/home/" + os.environ["DLWS_USER_NAME"] + "/.npu/" + os.environ["DLWS_JOB_ID"] + "/train.sh"
cmd = 'python /pod/scripts/create_script.py --type tensorflow --command "%s" --out %s'% (pod_cmd, npu_info_dir)
print(cmd, "==========================")
os.system(cmd)
os.system("chmod 777 " + npu_info_dir)
# 更新用户bash脚本
set_bashrc("root")
# 3) 生成hccl_tf.json
if need_create_hccl() is True:
create_hccl_tensorflow()
else:
pass
# 4) 分布式训练任务,环境配置同步
if is_distributed_job() is True and is_ps_pod() is True:
notify()
elif is_distributed_job() is True and is_worker_pod() is True:
wait()
else:
pass
return
# 是否分布式训练任务
def is_distributed_job():
if "DLWS_NUM_PS" in os.environ:
dlws_num_ps = os.environ["DLWS_NUM_PS"].strip().lower()
if len(dlws_num_ps) > 0 and int(dlws_num_ps) >0:
print("is_distributed_job return true")
return True
return False
# 是否master节点
def is_ps_pod():
if "DLWS_ROLE_NAME" in os.environ:
dlws_role_name = os.environ["DLWS_ROLE_NAME"].strip().lower()
## Ps表示多机多卡ps pod
if dlws_role_name == "ps":
return True
return False
# 是否worker节点
def is_worker_pod():
if "DLWS_ROLE_NAME" in os.environ:
dlws_role_name = os.environ["DLWS_ROLE_NAME"].strip().lower()
## Ps表示多机多卡ps pod
if dlws_role_name == "worker":
return True
return False
# 分布式训练任务
# ps节点在环境预备结束后,创建setup_environment_done文件
# 用作环境准备完成的标识
def notify():
# 单机训练任务,只有一个POD不需要做协同
if is_distributed_job() is False:
return
setup_environment_done = "/home/" + os.environ["DLWS_USER_NAME"] + "/.npu/" + os.environ["DLWS_JOB_ID"] + "/setup_environment_done"
# 多机多卡训练,ps节点预备环境
if not os.path.exists(setup_environment_done):
open(setup_environment_done, 'a').close()
return
# 分布式训练任务
# worker节点通过检查setup_environment_done文件
# 来判断环境准备是否结束
def wait():
# 单机训练任务,只有一个POD不需要等待环境
if is_distributed_job() is False:
return
setup_environment_done = "/home/" + os.environ["DLWS_USER_NAME"] + "/.npu/" + os.environ["DLWS_JOB_ID"] + "/setup_environment_done"
# 多机多卡训练,ps节点预备环境
while True:
if not os.path.exists(setup_environment_done):
print("===========", setup_environment_done, " not found. wait")
time.sleep(1)
else:
break
return
# 1) 单机训练中,需要创建hccl文件
# 2)多机多卡中,需要在ps pod创建hccl文件, 此文件会被worker pod共同读取
def need_create_hccl():
if "DLWS_ROLE_NAME" in os.environ:
dlws_role_name = os.environ["DLWS_ROLE_NAME"].strip().lower()
## master表示单机POD
## Ps表示多机多卡ps pod
if dlws_role_name == "ps" or dlws_role_name == "master":
return True
return False
if __name__ == "__main__":
# 1) 训练框架类别由前端传入
# 本脚本依据此字段, 为不同框架创建不同的环境参数
# hccl文件、环境变量等等
# 2) 脚本经平台bootstrap.sh调用
# 仅在JOB为单机节点或者 分布式任务的PS节点被执行
if "aiframework" in os.environ:
framework = os.environ["aiframework"].strip().lower()
if framework == "tensorflow":
handle_tensorflow()
elif framework == "mindspore":
handle_mindspore()
else:
handle_tensorflow()
else:
# 兼容版本<v1.3.0
create_hccl_mindspore()
create_hccl_tensorflow()
pass
| [
"os.path.exists",
"random.choice",
"string.Template",
"os.access",
"time.sleep",
"os.environ.items",
"os.path.isfile",
"platform.machine",
"os.system",
"json.dump"
] | [((9460, 9478), 'os.environ.items', 'os.environ.items', ([], {}), '()\n', (9476, 9478), False, 'import os\n'), ((12144, 12158), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (12153, 12158), False, 'import os\n'), ((12163, 12201), 'os.system', 'os.system', (["('chmod 777 ' + npu_info_dir)"], {}), "('chmod 777 ' + npu_info_dir)\n", (12172, 12201), False, 'import os\n'), ((13240, 13258), 'os.environ.items', 'os.environ.items', ([], {}), '()\n', (13256, 13258), False, 'import os\n'), ((16299, 16313), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (16308, 16313), False, 'import os\n'), ((16318, 16356), 'os.system', 'os.system', (["('chmod 777 ' + npu_info_dir)"], {}), "('chmod 777 ' + npu_info_dir)\n", (16327, 16356), False, 'import os\n'), ((3421, 3434), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3431, 3434), False, 'import time\n'), ((3668, 3692), 'json.dump', 'json.dump', (['hccl_data', 'fp'], {}), '(hccl_data, fp)\n', (3677, 3692), False, 'import json\n'), ((6830, 6843), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (6840, 6843), False, 'import time\n'), ((7097, 7121), 'json.dump', 'json.dump', (['hccl_data', 'fp'], {}), '(hccl_data, fp)\n', (7106, 7121), False, 'import json\n'), ((8006, 8024), 'platform.machine', 'platform.machine', ([], {}), '()\n', (8022, 8024), False, 'import platform\n'), ((8191, 8209), 'platform.machine', 'platform.machine', ([], {}), '()\n', (8207, 8209), False, 'import platform\n'), ((11523, 11544), 'string.Template', 'string.Template', (['item'], {}), '(item)\n', (11538, 11544), False, 'import string\n'), ((15644, 15665), 'string.Template', 'string.Template', (['item'], {}), '(item)\n', (15659, 15665), False, 'import string\n'), ((17863, 17901), 'os.path.exists', 'os.path.exists', (['setup_environment_done'], {}), '(setup_environment_done)\n', (17877, 17901), False, 'import os\n'), ((1978, 1998), 'os.path.isfile', 'os.path.isfile', (['PATH'], {}), '(PATH)\n', (1992, 1998), False, 'import os\n'), ((2003, 2027), 'os.access', 'os.access', (['PATH', 'os.R_OK'], {}), '(PATH, os.R_OK)\n', (2012, 2027), False, 'import os\n'), ((5062, 5082), 'os.path.isfile', 'os.path.isfile', (['PATH'], {}), '(PATH)\n', (5076, 5082), False, 'import os\n'), ((5087, 5111), 'os.access', 'os.access', (['PATH', 'os.R_OK'], {}), '(PATH, os.R_OK)\n', (5096, 5111), False, 'import os\n'), ((8342, 8370), 'random.choice', 'random.choice', (['string.digits'], {}), '(string.digits)\n', (8355, 8370), False, 'import random\n'), ((18315, 18353), 'os.path.exists', 'os.path.exists', (['setup_environment_done'], {}), '(setup_environment_done)\n', (18329, 18353), False, 'import os\n'), ((18444, 18457), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (18454, 18457), False, 'import time\n')] |
from typing import List
import torch
from torch.utils.data.dataset import Dataset
def noise(outlier_classes: List[int], generated_noise: torch.Tensor, norm: torch.Tensor,
nom_class: int, train_set: Dataset, gt: bool = False) -> Dataset:
"""
Creates a dataset based on the nominal classes of a given dataset and generated noise anomalies.
:param outlier_classes: a list of all outlier class indices.
:param generated_noise: torch tensor of noise images (might also be Outlier Exposure based noise) (n x c x h x w).
:param norm: torch tensor of nominal images (n x c x h x w).
:param nom_class: the index of the class that is considered nominal.
:param train_set: some training dataset.
:param gt: whether to provide ground-truth maps as well, atm not available!
:return: a modified dataset, with training data consisting of nominal samples and artificial anomalies.
"""
if gt:
raise ValueError('No GT mode for pure noise available!')
anom = generated_noise.clamp(0, 255).byte()
data = torch.cat((norm, anom))
targets = torch.cat(
(torch.ones(norm.size(0)) * nom_class,
torch.ones(anom.size(0)) * outlier_classes[0])
)
train_set.data = data
train_set.targets = targets
return train_set
def malformed_normal(outlier_classes: List[int], generated_noise: torch.Tensor, norm: torch.Tensor, nom_class: int,
train_set: Dataset, gt: bool = False, brightness_threshold: float = 0.11*255) -> Dataset:
"""
Creates a dataset based on the nominal classes of a given dataset and generated noise anomalies.
Unlike above, the noise images are not directly utilized as anomalies, but added to nominal samples to
create malformed normal anomalies.
:param outlier_classes: a list of all outlier class indices.
:param generated_noise: torch tensor of noise images (might also be Outlier Exposure based noise) (n x c x h x w).
:param norm: torch tensor of nominal images (n x c x h x w).
:param nom_class: the index of the class that is considered nominal.
:param train_set: some training dataset.
:param gt: whether to provide ground-truth maps as well.
:param brightness_threshold: if the average brightness (averaged over color channels) of a pixel exceeds this
threshold, the noise image's pixel value is subtracted instead of added.
This avoids adding brightness values to bright pixels, where approximately no effect is achieved at all.
:return: a modified dataset, with training data consisting of nominal samples and artificial anomalies.
"""
assert (norm.dim() == 4 or norm.dim() == 3) and generated_noise.shape == norm.shape
norm_dim = norm.dim()
if norm_dim == 3:
norm, generated_noise = norm.unsqueeze(1), generated_noise.unsqueeze(1) # assuming ch dim is skipped
anom = norm.clone()
# invert noise for bright regions (bright regions are considered being on average > brightness_threshold)
generated_noise = generated_noise.int()
bright_regions = norm.sum(1) > brightness_threshold * norm.shape[1]
for ch in range(norm.shape[1]):
gnch = generated_noise[:, ch]
gnch[bright_regions] = gnch[bright_regions] * -1
generated_noise[:, ch] = gnch
anom = (anom.int() + generated_noise).clamp(0, 255).byte()
data = torch.cat((norm, anom))
targets = torch.cat(
(torch.ones(norm.size(0)) * nom_class,
torch.ones(anom.size(0)) * outlier_classes[0])
)
if norm_dim == 3:
data = data.squeeze(1)
train_set.data = data
train_set.targets = targets
if gt:
gtmaps = torch.cat(
(torch.zeros_like(norm)[:, 0].float(), # 0 for nominal
(norm != anom).max(1)[0].clone().float()) # 1 for anomalous
)
if norm_dim == 4:
gtmaps = gtmaps.unsqueeze(1)
return train_set, gtmaps
else:
return train_set
| [
"torch.zeros_like",
"torch.cat"
] | [((1057, 1080), 'torch.cat', 'torch.cat', (['(norm, anom)'], {}), '((norm, anom))\n', (1066, 1080), False, 'import torch\n'), ((3371, 3394), 'torch.cat', 'torch.cat', (['(norm, anom)'], {}), '((norm, anom))\n', (3380, 3394), False, 'import torch\n'), ((3692, 3714), 'torch.zeros_like', 'torch.zeros_like', (['norm'], {}), '(norm)\n', (3708, 3714), False, 'import torch\n')] |
from collections import deque
from typing import List
class AtCoderBase:
def __init__(self, all_input: List[str]):
self.all_input = deque(all_input)
self.ret_str = ""
self.msg = list()
def input(self):
def _input() -> str:
line = self.all_input.pop()
yield line
return _input().__next__()
def print(self, data):
self.msg.append(str(data))
def process(self):
raise NotImplementedError
| [
"collections.deque"
] | [((146, 162), 'collections.deque', 'deque', (['all_input'], {}), '(all_input)\n', (151, 162), False, 'from collections import deque\n')] |
import pandas as pd
import pyproj
from preprocess.load_data.data_loader import load_hotel_reserve
customer_tb, hotel_tb, reserve_tb = load_hotel_reserve()
# 분,초를 도로 변환하는 함수 정의
def convert_to_continuous(x):
# 아래의 식으로 실행하면 약간의 오차가 발생,
# 정확한 값으로 계산하고 싶을 땐, 문자열로 변환한 후 도,분,초의 값을 숫자로 변환하여 계산
x_min = (x * 100 - int(x * 100)) * 100
x_sec = (x - int(x) - x_min / 10000) * 100
return int(x) + x_sec / 60 + x_min / 60 / 60
# 분, 초를 도로 변홤
customer_tb['home_latitude'] = customer_tb['home_latitude'] \
.apply(lambda x: convert_to_continuous(x))
customer_tb['home_longitude'] = customer_tb['home_longitude'] \
.apply(lambda x: convert_to_continuous(x))
# 세계 측지계(EPSG 코드 4326은 WGS84와 같다)를 구함
epsg_world = pyproj.Proj('+init=EPSG:4326')
# 동경 측지계 획득
epsg_japan = pyproj.Proj('+init=EPSG:4301')
# 동경 측지계를 세계 측지계로 변환
home_position = customer_tb[['home_longitude', 'home_latitude']] \
.apply(lambda x:
pyproj.transform(epsg_japan, epsg_world, x[0], x[1]), axis=1)
# customer_tb의 위도 경도 값을 세계 측지계로 갱신
customer_tb['home_longitude'] = [x[0] for x in home_position]
customer_tb['home_latitude'] = [x[1] for x in home_position]
# 아래 부터 책에 게재
# python으로 위도 경도 위치정보를 다루기위한 라이브러리 로드
import math
import pyproj
# 거리를 계산하기 위한 라이브러리 로드
from geopy.distance import great_circle, vincenty
# ・・・측지계 변화 코드는 생략・・・
# 예약 테아블에 고객 테이블과 호텔 테이블을 결합
reserve_tb = \
pd.merge(reserve_tb, customer_tb, on='customer_id', how='inner')
reserve_tb = pd.merge(reserve_tb, hotel_tb, on='hotel_id', how='inner')
# 집과 호텔의 위도 경도 정보를 획득
home_and_hotel_points = reserve_tb \
.loc[:, ['home_longitude', 'home_latitude',
'hotel_longitude', 'hotel_latitude']]
# 적도 반경을 WGS84기준으로 설정
g = pyproj.Geod(ellps='WGS84')
# 방위각 반방위각, Vincenty식을 이용한 거리 계산
home_to_hotel = home_and_hotel_points \
.apply(lambda x: g.inv(x[0], x[1], x[2], x[3]), axis=1)
# 방위각을 구함
[x[0] for x in home_to_hotel]
# Vincenty식을 이용한 거리를 구함
[x[2] for x in home_to_hotel]
# Haversine식을 이용한 거리 계산
home_and_hotel_points.apply(
lambda x: great_circle((x[1], x[0]), (x[3], x[2])).meters, axis=1)
# Vincenty식을 이용한 거리 계산
home_and_hotel_points.apply(
lambda x: vincenty((x[1], x[0]), (x[3], x[2])).meters, axis=1)
# Hubeny식의 함수 정의
def hubeny(lon1, lat1, lon2, lat2, a=6378137, b=6356752.314245):
e2 = (a ** 2 - b ** 2) / a ** 2
(lon1, lat1, lon2, lat2) = \
[x * (2 * math.pi) / 360 for x in (lon1, lat1, lon2, lat2)]
w = 1 - e2 * math.sin((lat1 + lat2) / 2) ** 2
c2 = math.cos((lat1 + lat2) / 2) ** 2
return math.sqrt((b ** 2 / w ** 3) * (lat1 - lat2) ** 2 +
(a ** 2 / w) * c2 * (lon1 - lon2) ** 2)
# Hubeny식을 이용한 거리 계산
home_and_hotel_points \
.apply(lambda x: hubeny(x[0], x[1], x[2], x[3]), axis=1)
| [
"pyproj.Geod",
"pandas.merge",
"math.sqrt",
"pyproj.transform",
"math.cos",
"geopy.distance.great_circle",
"geopy.distance.vincenty",
"pyproj.Proj",
"preprocess.load_data.data_loader.load_hotel_reserve",
"math.sin"
] | [((134, 154), 'preprocess.load_data.data_loader.load_hotel_reserve', 'load_hotel_reserve', ([], {}), '()\n', (152, 154), False, 'from preprocess.load_data.data_loader import load_hotel_reserve\n'), ((717, 747), 'pyproj.Proj', 'pyproj.Proj', (['"""+init=EPSG:4326"""'], {}), "('+init=EPSG:4326')\n", (728, 747), False, 'import pyproj\n'), ((774, 804), 'pyproj.Proj', 'pyproj.Proj', (['"""+init=EPSG:4301"""'], {}), "('+init=EPSG:4301')\n", (785, 804), False, 'import pyproj\n'), ((1367, 1431), 'pandas.merge', 'pd.merge', (['reserve_tb', 'customer_tb'], {'on': '"""customer_id"""', 'how': '"""inner"""'}), "(reserve_tb, customer_tb, on='customer_id', how='inner')\n", (1375, 1431), True, 'import pandas as pd\n'), ((1445, 1503), 'pandas.merge', 'pd.merge', (['reserve_tb', 'hotel_tb'], {'on': '"""hotel_id"""', 'how': '"""inner"""'}), "(reserve_tb, hotel_tb, on='hotel_id', how='inner')\n", (1453, 1503), True, 'import pandas as pd\n'), ((1686, 1712), 'pyproj.Geod', 'pyproj.Geod', ([], {'ellps': '"""WGS84"""'}), "(ellps='WGS84')\n", (1697, 1712), False, 'import pyproj\n'), ((2504, 2594), 'math.sqrt', 'math.sqrt', (['(b ** 2 / w ** 3 * (lat1 - lat2) ** 2 + a ** 2 / w * c2 * (lon1 - lon2) ** 2)'], {}), '(b ** 2 / w ** 3 * (lat1 - lat2) ** 2 + a ** 2 / w * c2 * (lon1 -\n lon2) ** 2)\n', (2513, 2594), False, 'import math\n'), ((922, 974), 'pyproj.transform', 'pyproj.transform', (['epsg_japan', 'epsg_world', 'x[0]', 'x[1]'], {}), '(epsg_japan, epsg_world, x[0], x[1])\n', (938, 974), False, 'import pyproj\n'), ((2460, 2487), 'math.cos', 'math.cos', (['((lat1 + lat2) / 2)'], {}), '((lat1 + lat2) / 2)\n', (2468, 2487), False, 'import math\n'), ((2007, 2047), 'geopy.distance.great_circle', 'great_circle', (['(x[1], x[0])', '(x[3], x[2])'], {}), '((x[1], x[0]), (x[3], x[2]))\n', (2019, 2047), False, 'from geopy.distance import great_circle, vincenty\n'), ((2129, 2165), 'geopy.distance.vincenty', 'vincenty', (['(x[1], x[0])', '(x[3], x[2])'], {}), '((x[1], x[0]), (x[3], x[2]))\n', (2137, 2165), False, 'from geopy.distance import great_circle, vincenty\n'), ((2418, 2445), 'math.sin', 'math.sin', (['((lat1 + lat2) / 2)'], {}), '((lat1 + lat2) / 2)\n', (2426, 2445), False, 'import math\n')] |
"""Management command to attempt to add an HLS video to edX via API call"""
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from ui.api import post_hls_to_edx
from ui.models import VideoFile
from ui.encodings import EncodingNames
from ui.utils import get_error_response_summary_dict
User = get_user_model()
class Command(BaseCommand):
"""Attempts to add an HLS video to edX via API call"""
help = __doc__
def add_arguments(self, parser):
group = parser.add_mutually_exclusive_group()
group.add_argument(
"--video-file-id",
type=int,
help="The id of the VideoFile that you want to add to edX",
)
group.add_argument(
"--edx-course-id",
type=str,
help="The edx_course_id value for the Collection that the video file belongs to",
)
parser.add_argument(
"--video-title",
type=str,
help="The video title of the video file you want to add to edX",
)
def handle(self, *args, **options):
if not options["video_file_id"] and not any(
(options["edx_course_id"], options["video_title"])
):
raise CommandError(
"Please provide --video-file-id or at least one of --edx-course-id and --video-title"
)
if options["video_file_id"] and options["video_title"]:
raise CommandError(
"Please provide --video-file-id or --video-title, not both"
)
filters = dict(encoding=EncodingNames.HLS)
if options["video_file_id"]:
filters["pk"] = options["video_file_id"]
else:
if options["edx_course_id"]:
filters["video__collection__edx_course_id"] = options["edx_course_id"]
if options["video_title"]:
filters["video__title"] = options["video_title"]
video_files = list(VideoFile.objects.filter(**filters).all())
if not video_files:
raise CommandError(
"No HLS-encoded VideoFiles found that match the given parameters ({})".format(
filters
)
)
self.stdout.write("Attempting to post video(s) to edX...")
for video_file in video_files:
response_dict = post_hls_to_edx(video_file)
good_responses = {
endpoint: resp
for endpoint, resp in response_dict.items()
if getattr(resp, "ok", None)
}
bad_responses = {
endpoint: resp
for endpoint, resp in response_dict.items()
if endpoint not in good_responses
}
for _, resp in good_responses.items():
self.stdout.write(
self.style.SUCCESS(
"Video successfully added to edX – VideoFile: {} ({}), edX url: {}".format(
video_file.video.title,
video_file.pk,
resp.url,
)
)
)
for edx_endpoint, resp in bad_responses.items():
resp_summary = (
None if resp is None else get_error_response_summary_dict(resp)
)
self.stdout.write(
self.style.ERROR(
"Request to add HLS video to edX failed – "
"VideoFile: {} ({}), edX url: {}, API response: {}".format(
video_file.video.title,
video_file.pk,
edx_endpoint.full_api_url,
resp_summary,
)
)
)
| [
"django.contrib.auth.get_user_model",
"ui.api.post_hls_to_edx",
"ui.models.VideoFile.objects.filter",
"ui.utils.get_error_response_summary_dict",
"django.core.management.base.CommandError"
] | [((357, 373), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (371, 373), False, 'from django.contrib.auth import get_user_model\n'), ((1277, 1386), 'django.core.management.base.CommandError', 'CommandError', (['"""Please provide --video-file-id or at least one of --edx-course-id and --video-title"""'], {}), "(\n 'Please provide --video-file-id or at least one of --edx-course-id and --video-title'\n )\n", (1289, 1386), False, 'from django.core.management.base import BaseCommand, CommandError\n'), ((1489, 1562), 'django.core.management.base.CommandError', 'CommandError', (['"""Please provide --video-file-id or --video-title, not both"""'], {}), "('Please provide --video-file-id or --video-title, not both')\n", (1501, 1562), False, 'from django.core.management.base import BaseCommand, CommandError\n'), ((2401, 2428), 'ui.api.post_hls_to_edx', 'post_hls_to_edx', (['video_file'], {}), '(video_file)\n', (2416, 2428), False, 'from ui.api import post_hls_to_edx\n'), ((2008, 2043), 'ui.models.VideoFile.objects.filter', 'VideoFile.objects.filter', ([], {}), '(**filters)\n', (2032, 2043), False, 'from ui.models import VideoFile\n'), ((3360, 3397), 'ui.utils.get_error_response_summary_dict', 'get_error_response_summary_dict', (['resp'], {}), '(resp)\n', (3391, 3397), False, 'from ui.utils import get_error_response_summary_dict\n')] |
__author__ = 'sibirrer'
from lenstronomy.LensModel.Profiles.flexion import Flexion
from lenstronomy.LensModel.lens_model import LensModel
import numpy as np
import numpy.testing as npt
import pytest
class TestExternalShear(object):
"""
tests the Gaussian methods
"""
def setup(self):
self.flex = Flexion()
g1, g2, g3, g4 = 0.01, 0.02, 0.03, 0.04
self.kwargs_lens = {'g1': g1, 'g2': g2, 'g3': g3, 'g4': g4}
def test_function(self):
x = np.array([1])
y = np.array([2])
values = self.flex.function(x, y, **self.kwargs_lens)
npt.assert_almost_equal(values[0], 0.135, decimal=5)
x = np.array([0])
y = np.array([0])
values = self.flex.function(x, y, **self.kwargs_lens)
npt.assert_almost_equal(values[0], 0, decimal=5)
x = np.array([2, 3, 4])
y = np.array([1, 1, 1])
values = self.flex.function(x, y, **self.kwargs_lens)
npt.assert_almost_equal(values[0], 0.09, decimal=5)
npt.assert_almost_equal(values[1], 0.18666666666666668, decimal=5)
def test_derivatives(self):
x = np.array([1])
y = np.array([2])
f_x, f_y = self.flex.derivatives(x, y, **self.kwargs_lens)
npt.assert_almost_equal(f_x[0], 0.105, decimal=5)
npt.assert_almost_equal(f_y[0], 0.15, decimal=5)
x = np.array([1, 3, 4])
y = np.array([2, 1, 1])
values = self.flex.derivatives(x, y, **self.kwargs_lens)
npt.assert_almost_equal(values[0][0], 0.105, decimal=5)
npt.assert_almost_equal(values[1][0], 0.15, decimal=5)
def test_hessian(self):
x = np.array(1)
y = np.array(2)
f_xx, f_xy, f_yx, f_yy = self.flex.hessian(x, y, **self.kwargs_lens)
npt.assert_almost_equal(f_xx, 0.05, decimal=5)
npt.assert_almost_equal(f_yy, 0.11, decimal=5)
npt.assert_almost_equal(f_xy, 0.08, decimal=5)
npt.assert_almost_equal(f_xy, f_yx, decimal=8)
x = np.array([1,3,4])
y = np.array([2,1,1])
values = self.flex.hessian(x, y, **self.kwargs_lens)
npt.assert_almost_equal(values[0][0], 0.05, decimal=5)
npt.assert_almost_equal(values[3][0], 0.11, decimal=5)
npt.assert_almost_equal(values[2][0], 0.08, decimal=5)
npt.assert_almost_equal(values[1][0], 0.08, decimal=5)
def test_flexion(self):
x = np.array(0)
y = np.array(2)
flex = LensModel(['FLEXION'])
f_xxx, f_xxy, f_xyy, f_yyy = flex.flexion(x, y, [self.kwargs_lens])
npt.assert_almost_equal(f_xxx, self.kwargs_lens['g1'], decimal=9)
npt.assert_almost_equal(f_xxy, self.kwargs_lens['g2'], decimal=9)
npt.assert_almost_equal(f_xyy, self.kwargs_lens['g3'], decimal=9)
npt.assert_almost_equal(f_yyy, self.kwargs_lens['g4'], decimal=9)
def test_magnification(self):
ra_0, dec_0 = 1, -1
flex = LensModel(['FLEXION'])
g1, g2, g3, g4 = 0.01, 0.02, 0.03, 0.04
kwargs = {'g1': g1, 'g2': g2, 'g3': g3, 'g4': g4, 'ra_0': ra_0, 'dec_0': dec_0}
mag = flex.magnification(ra_0, dec_0, [kwargs])
npt.assert_almost_equal(mag, 1, decimal=8)
if __name__ == '__main__':
pytest.main()
| [
"lenstronomy.LensModel.lens_model.LensModel",
"pytest.main",
"numpy.testing.assert_almost_equal",
"numpy.array",
"lenstronomy.LensModel.Profiles.flexion.Flexion"
] | [((3229, 3242), 'pytest.main', 'pytest.main', ([], {}), '()\n', (3240, 3242), False, 'import pytest\n'), ((325, 334), 'lenstronomy.LensModel.Profiles.flexion.Flexion', 'Flexion', ([], {}), '()\n', (332, 334), False, 'from lenstronomy.LensModel.Profiles.flexion import Flexion\n'), ((494, 507), 'numpy.array', 'np.array', (['[1]'], {}), '([1])\n', (502, 507), True, 'import numpy as np\n'), ((520, 533), 'numpy.array', 'np.array', (['[2]'], {}), '([2])\n', (528, 533), True, 'import numpy as np\n'), ((604, 656), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['values[0]', '(0.135)'], {'decimal': '(5)'}), '(values[0], 0.135, decimal=5)\n', (627, 656), True, 'import numpy.testing as npt\n'), ((669, 682), 'numpy.array', 'np.array', (['[0]'], {}), '([0])\n', (677, 682), True, 'import numpy as np\n'), ((695, 708), 'numpy.array', 'np.array', (['[0]'], {}), '([0])\n', (703, 708), True, 'import numpy as np\n'), ((779, 827), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['values[0]', '(0)'], {'decimal': '(5)'}), '(values[0], 0, decimal=5)\n', (802, 827), True, 'import numpy.testing as npt\n'), ((841, 860), 'numpy.array', 'np.array', (['[2, 3, 4]'], {}), '([2, 3, 4])\n', (849, 860), True, 'import numpy as np\n'), ((873, 892), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (881, 892), True, 'import numpy as np\n'), ((963, 1014), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['values[0]', '(0.09)'], {'decimal': '(5)'}), '(values[0], 0.09, decimal=5)\n', (986, 1014), True, 'import numpy.testing as npt\n'), ((1024, 1090), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['values[1]', '(0.18666666666666668)'], {'decimal': '(5)'}), '(values[1], 0.18666666666666668, decimal=5)\n', (1047, 1090), True, 'import numpy.testing as npt\n'), ((1136, 1149), 'numpy.array', 'np.array', (['[1]'], {}), '([1])\n', (1144, 1149), True, 'import numpy as np\n'), ((1162, 1175), 'numpy.array', 'np.array', (['[2]'], {}), '([2])\n', (1170, 1175), True, 'import numpy as np\n'), ((1251, 1300), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['f_x[0]', '(0.105)'], {'decimal': '(5)'}), '(f_x[0], 0.105, decimal=5)\n', (1274, 1300), True, 'import numpy.testing as npt\n'), ((1309, 1357), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['f_y[0]', '(0.15)'], {'decimal': '(5)'}), '(f_y[0], 0.15, decimal=5)\n', (1332, 1357), True, 'import numpy.testing as npt\n'), ((1371, 1390), 'numpy.array', 'np.array', (['[1, 3, 4]'], {}), '([1, 3, 4])\n', (1379, 1390), True, 'import numpy as np\n'), ((1403, 1422), 'numpy.array', 'np.array', (['[2, 1, 1]'], {}), '([2, 1, 1])\n', (1411, 1422), True, 'import numpy as np\n'), ((1496, 1551), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['values[0][0]', '(0.105)'], {'decimal': '(5)'}), '(values[0][0], 0.105, decimal=5)\n', (1519, 1551), True, 'import numpy.testing as npt\n'), ((1560, 1614), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['values[1][0]', '(0.15)'], {'decimal': '(5)'}), '(values[1][0], 0.15, decimal=5)\n', (1583, 1614), True, 'import numpy.testing as npt\n'), ((1656, 1667), 'numpy.array', 'np.array', (['(1)'], {}), '(1)\n', (1664, 1667), True, 'import numpy as np\n'), ((1680, 1691), 'numpy.array', 'np.array', (['(2)'], {}), '(2)\n', (1688, 1691), True, 'import numpy as np\n'), ((1777, 1823), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['f_xx', '(0.05)'], {'decimal': '(5)'}), '(f_xx, 0.05, decimal=5)\n', (1800, 1823), True, 'import numpy.testing as npt\n'), ((1832, 1878), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['f_yy', '(0.11)'], {'decimal': '(5)'}), '(f_yy, 0.11, decimal=5)\n', (1855, 1878), True, 'import numpy.testing as npt\n'), ((1887, 1933), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['f_xy', '(0.08)'], {'decimal': '(5)'}), '(f_xy, 0.08, decimal=5)\n', (1910, 1933), True, 'import numpy.testing as npt\n'), ((1942, 1988), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['f_xy', 'f_yx'], {'decimal': '(8)'}), '(f_xy, f_yx, decimal=8)\n', (1965, 1988), True, 'import numpy.testing as npt\n'), ((2002, 2021), 'numpy.array', 'np.array', (['[1, 3, 4]'], {}), '([1, 3, 4])\n', (2010, 2021), True, 'import numpy as np\n'), ((2032, 2051), 'numpy.array', 'np.array', (['[2, 1, 1]'], {}), '([2, 1, 1])\n', (2040, 2051), True, 'import numpy as np\n'), ((2119, 2173), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['values[0][0]', '(0.05)'], {'decimal': '(5)'}), '(values[0][0], 0.05, decimal=5)\n', (2142, 2173), True, 'import numpy.testing as npt\n'), ((2182, 2236), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['values[3][0]', '(0.11)'], {'decimal': '(5)'}), '(values[3][0], 0.11, decimal=5)\n', (2205, 2236), True, 'import numpy.testing as npt\n'), ((2245, 2299), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['values[2][0]', '(0.08)'], {'decimal': '(5)'}), '(values[2][0], 0.08, decimal=5)\n', (2268, 2299), True, 'import numpy.testing as npt\n'), ((2308, 2362), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['values[1][0]', '(0.08)'], {'decimal': '(5)'}), '(values[1][0], 0.08, decimal=5)\n', (2331, 2362), True, 'import numpy.testing as npt\n'), ((2404, 2415), 'numpy.array', 'np.array', (['(0)'], {}), '(0)\n', (2412, 2415), True, 'import numpy as np\n'), ((2428, 2439), 'numpy.array', 'np.array', (['(2)'], {}), '(2)\n', (2436, 2439), True, 'import numpy as np\n'), ((2455, 2477), 'lenstronomy.LensModel.lens_model.LensModel', 'LensModel', (["['FLEXION']"], {}), "(['FLEXION'])\n", (2464, 2477), False, 'from lenstronomy.LensModel.lens_model import LensModel\n'), ((2562, 2627), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['f_xxx', "self.kwargs_lens['g1']"], {'decimal': '(9)'}), "(f_xxx, self.kwargs_lens['g1'], decimal=9)\n", (2585, 2627), True, 'import numpy.testing as npt\n'), ((2636, 2701), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['f_xxy', "self.kwargs_lens['g2']"], {'decimal': '(9)'}), "(f_xxy, self.kwargs_lens['g2'], decimal=9)\n", (2659, 2701), True, 'import numpy.testing as npt\n'), ((2710, 2775), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['f_xyy', "self.kwargs_lens['g3']"], {'decimal': '(9)'}), "(f_xyy, self.kwargs_lens['g3'], decimal=9)\n", (2733, 2775), True, 'import numpy.testing as npt\n'), ((2784, 2849), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['f_yyy', "self.kwargs_lens['g4']"], {'decimal': '(9)'}), "(f_yyy, self.kwargs_lens['g4'], decimal=9)\n", (2807, 2849), True, 'import numpy.testing as npt\n'), ((2929, 2951), 'lenstronomy.LensModel.lens_model.LensModel', 'LensModel', (["['FLEXION']"], {}), "(['FLEXION'])\n", (2938, 2951), False, 'from lenstronomy.LensModel.lens_model import LensModel\n'), ((3153, 3195), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['mag', '(1)'], {'decimal': '(8)'}), '(mag, 1, decimal=8)\n', (3176, 3195), True, 'import numpy.testing as npt\n')] |
from unittest.mock import Mock, PropertyMock, MagicMock, patch
import numpy as np
import gym_connect4
from test_fixtures import Connect4Task
import regym
from regym.environments import EnvType
from regym.rl_algorithms import build_MCTS_Agent
from regym.rl_algorithms.agents import Agent, build_Deterministic_Agent, DeterministicAgent
from regym.rl_loops import Trajectory
from regym.rl_algorithms import build_Deterministic_Agent, build_MCTS_Agent
from regym.rl_loops.multiagent_loops.vectorenv_sequential_action_rl_loop import async_run_episode
from regym.rl_loops.multiagent_loops.sequential_action_rl_loop import propagate_experience, propagate_last_experience
def test_sequential_trajectories_feature_agent_predictions_single_env(Connect4Task):
agent_1 = build_Deterministic_Agent(
Connect4Task, {'action': 0}, 'Col-0-DeterministicAgent')
agent_1.requires_opponents_prediction = True # Required!
agent_2 = build_Deterministic_Agent(
Connect4Task, {'action': 1}, 'Col-0-DeterministicAgent')
trajectory = Connect4Task.run_episode([agent_1, agent_2], training=False)
expected_prediction_1 = {'a': 0, 'probs': [[1., 0., 0., 0., 0., 0., 0.]]}
expected_prediction_2 = {'a': 1, 'probs': [[0., 1., 0., 0., 0., 0., 0.]]}
expected_predictions = [expected_prediction_1,
expected_prediction_2]
compare_trajectory_extra_info_against_expected(trajectory, expected_predictions)
def test_sequential_trajectories_feature_agent_predictions_multienv(Connect4Task):
agent_1 = build_Deterministic_Agent(
Connect4Task, {'action': 0}, 'Col-0-DeterministicAgent')
agent_1.requires_opponents_prediction = True # Required!
agent_2 = build_Deterministic_Agent(
Connect4Task, {'action': 1}, 'Col-0-DeterministicAgent')
trajectories = Connect4Task.run_episodes([agent_1, agent_2], training=False,
num_envs=2, num_episodes=2)
# on single agents there's a batch dimension in 'probs', but not
# on multiagent_loops. Does this matter?
expected_prediction_1 = {'a': 0, 'probs': [1., 0., 0., 0., 0., 0., 0.]}
expected_prediction_2 = {'a': 1, 'probs': [0., 1., 0., 0., 0., 0., 0.]}
expected_predictions = [expected_prediction_1, expected_prediction_2]
for trajectory in trajectories:
compare_trajectory_extra_info_against_expected(trajectory, expected_predictions)
def test_agents_in_sequential_environments_handle_experiences_with_extra_info_single_env(Connect4Task):
'''
In this test we want to ensure that when agents process experiences
via `Agent.handle_experience(...)` calls, they obtain the are able
to observe the `predicions` of other agents.
There are 2 cases to consider:
- Handling an experience in the middle of a trajectory
- Handling an experience when the episode just finshed and some agents
need to process the last (terminal) timestep
'''
mock_agent_1 = Mock(spec=DeterministicAgent)
mock_agent_2 = Mock(spec=DeterministicAgent)
agent_vector = [mock_agent_1, mock_agent_2]
mock_agent_1.requires_opponents_prediction = True
mock_agent_1.training = True
mock_agent_2.requires_opponents_prediction = False
mock_agent_2.training = True
prediction_1 = {'a': 0, 'probs': [1., 0., 0., 0., 0., 0., 0.]}
prediction_2 = {'a': 1, 'probs': [0., 1., 0., 0., 0., 0., 0.]}
predictions = [prediction_1, prediction_2]
'''
Creates a trajectory for the game of Connect4 looks like this
Total timesteps 7. P1 (x) actions: 4. P2 (o) actions: 3.
Board: | |
| |
|x |
|x o |
|x o |
|x o . . . . . |
|--------------|
'''
sample_trajectory = Trajectory(
env_type=EnvType.MULTIAGENT_SEQUENTIAL_ACTION, num_agents=2)
o, a, r, succ_o, done = [None, None], None, [0, 0], [None, None], False
sample_trajectory.add_timestep(
o, a, r, succ_o, done, acting_agents=[0],
extra_info={0: predictions[0]})
sample_trajectory.add_timestep(
o, a, r, succ_o, done, acting_agents=[1],
extra_info={1: predictions[1]})
# Update agent 0
propagate_experience(agent_vector, sample_trajectory)
sample_trajectory.add_timestep(
o, a, r, succ_o, done, acting_agents=[0],
extra_info={0: predictions[0]})
# Update agent 1
propagate_experience(agent_vector, sample_trajectory)
sample_trajectory.add_timestep(
o, a, r, succ_o, done, acting_agents=[1],
extra_info={1: predictions[1]})
# Update agent 0
propagate_experience(agent_vector, sample_trajectory)
sample_trajectory.add_timestep(
o, a, r, succ_o, done, acting_agents=[0],
extra_info={0: predictions[0]})
# Update agent 1
propagate_experience(agent_vector, sample_trajectory)
sample_trajectory.add_timestep(
o, a, r, succ_o, done, acting_agents=[1],
extra_info={1: predictions[1]})
# Update agent 0
propagate_experience(agent_vector, sample_trajectory)
done = True
sample_trajectory.add_timestep(
o, a, [1, -1], succ_o, done, acting_agents=[0],
extra_info={0: predictions[0]})
# Update player 1
propagate_experience(agent_vector, sample_trajectory)
# Episode termination
# Update player 0 (After done flag)
propagate_last_experience(agent_vector, sample_trajectory)
def compare_trajectory_extra_info_against_expected(trajectory, expected_predictions):
for timestep in trajectory:
# Only one agent acts at a time in Connect4
a_i = timestep.acting_agents[0]
actual_prediction = timestep.extra_info[a_i]
assert 'a' in actual_prediction
assert 'probs' in actual_prediction
assert actual_prediction['a'] == expected_predictions[a_i]['a']
np.testing.assert_array_equal(
actual_prediction['probs'], expected_predictions[a_i]['probs'])
| [
"unittest.mock.Mock",
"regym.rl_algorithms.build_Deterministic_Agent",
"regym.rl_loops.Trajectory",
"test_fixtures.Connect4Task.run_episodes",
"regym.rl_loops.multiagent_loops.sequential_action_rl_loop.propagate_last_experience",
"regym.rl_loops.multiagent_loops.sequential_action_rl_loop.propagate_experie... | [((771, 857), 'regym.rl_algorithms.build_Deterministic_Agent', 'build_Deterministic_Agent', (['Connect4Task', "{'action': 0}", '"""Col-0-DeterministicAgent"""'], {}), "(Connect4Task, {'action': 0},\n 'Col-0-DeterministicAgent')\n", (796, 857), False, 'from regym.rl_algorithms import build_Deterministic_Agent, build_MCTS_Agent\n'), ((939, 1025), 'regym.rl_algorithms.build_Deterministic_Agent', 'build_Deterministic_Agent', (['Connect4Task', "{'action': 1}", '"""Col-0-DeterministicAgent"""'], {}), "(Connect4Task, {'action': 1},\n 'Col-0-DeterministicAgent')\n", (964, 1025), False, 'from regym.rl_algorithms import build_Deterministic_Agent, build_MCTS_Agent\n'), ((1049, 1109), 'test_fixtures.Connect4Task.run_episode', 'Connect4Task.run_episode', (['[agent_1, agent_2]'], {'training': '(False)'}), '([agent_1, agent_2], training=False)\n', (1073, 1109), False, 'from test_fixtures import Connect4Task\n'), ((1554, 1640), 'regym.rl_algorithms.build_Deterministic_Agent', 'build_Deterministic_Agent', (['Connect4Task', "{'action': 0}", '"""Col-0-DeterministicAgent"""'], {}), "(Connect4Task, {'action': 0},\n 'Col-0-DeterministicAgent')\n", (1579, 1640), False, 'from regym.rl_algorithms import build_Deterministic_Agent, build_MCTS_Agent\n'), ((1722, 1808), 'regym.rl_algorithms.build_Deterministic_Agent', 'build_Deterministic_Agent', (['Connect4Task', "{'action': 1}", '"""Col-0-DeterministicAgent"""'], {}), "(Connect4Task, {'action': 1},\n 'Col-0-DeterministicAgent')\n", (1747, 1808), False, 'from regym.rl_algorithms import build_Deterministic_Agent, build_MCTS_Agent\n'), ((1834, 1927), 'test_fixtures.Connect4Task.run_episodes', 'Connect4Task.run_episodes', (['[agent_1, agent_2]'], {'training': '(False)', 'num_envs': '(2)', 'num_episodes': '(2)'}), '([agent_1, agent_2], training=False, num_envs=2,\n num_episodes=2)\n', (1859, 1927), False, 'from test_fixtures import Connect4Task\n'), ((3002, 3031), 'unittest.mock.Mock', 'Mock', ([], {'spec': 'DeterministicAgent'}), '(spec=DeterministicAgent)\n', (3006, 3031), False, 'from unittest.mock import Mock, PropertyMock, MagicMock, patch\n'), ((3051, 3080), 'unittest.mock.Mock', 'Mock', ([], {'spec': 'DeterministicAgent'}), '(spec=DeterministicAgent)\n', (3055, 3080), False, 'from unittest.mock import Mock, PropertyMock, MagicMock, patch\n'), ((3885, 3956), 'regym.rl_loops.Trajectory', 'Trajectory', ([], {'env_type': 'EnvType.MULTIAGENT_SEQUENTIAL_ACTION', 'num_agents': '(2)'}), '(env_type=EnvType.MULTIAGENT_SEQUENTIAL_ACTION, num_agents=2)\n', (3895, 3956), False, 'from regym.rl_loops import Trajectory\n'), ((4323, 4376), 'regym.rl_loops.multiagent_loops.sequential_action_rl_loop.propagate_experience', 'propagate_experience', (['agent_vector', 'sample_trajectory'], {}), '(agent_vector, sample_trajectory)\n', (4343, 4376), False, 'from regym.rl_loops.multiagent_loops.sequential_action_rl_loop import propagate_experience, propagate_last_experience\n'), ((4530, 4583), 'regym.rl_loops.multiagent_loops.sequential_action_rl_loop.propagate_experience', 'propagate_experience', (['agent_vector', 'sample_trajectory'], {}), '(agent_vector, sample_trajectory)\n', (4550, 4583), False, 'from regym.rl_loops.multiagent_loops.sequential_action_rl_loop import propagate_experience, propagate_last_experience\n'), ((4737, 4790), 'regym.rl_loops.multiagent_loops.sequential_action_rl_loop.propagate_experience', 'propagate_experience', (['agent_vector', 'sample_trajectory'], {}), '(agent_vector, sample_trajectory)\n', (4757, 4790), False, 'from regym.rl_loops.multiagent_loops.sequential_action_rl_loop import propagate_experience, propagate_last_experience\n'), ((4944, 4997), 'regym.rl_loops.multiagent_loops.sequential_action_rl_loop.propagate_experience', 'propagate_experience', (['agent_vector', 'sample_trajectory'], {}), '(agent_vector, sample_trajectory)\n', (4964, 4997), False, 'from regym.rl_loops.multiagent_loops.sequential_action_rl_loop import propagate_experience, propagate_last_experience\n'), ((5151, 5204), 'regym.rl_loops.multiagent_loops.sequential_action_rl_loop.propagate_experience', 'propagate_experience', (['agent_vector', 'sample_trajectory'], {}), '(agent_vector, sample_trajectory)\n', (5171, 5204), False, 'from regym.rl_loops.multiagent_loops.sequential_action_rl_loop import propagate_experience, propagate_last_experience\n'), ((5381, 5434), 'regym.rl_loops.multiagent_loops.sequential_action_rl_loop.propagate_experience', 'propagate_experience', (['agent_vector', 'sample_trajectory'], {}), '(agent_vector, sample_trajectory)\n', (5401, 5434), False, 'from regym.rl_loops.multiagent_loops.sequential_action_rl_loop import propagate_experience, propagate_last_experience\n'), ((5506, 5564), 'regym.rl_loops.multiagent_loops.sequential_action_rl_loop.propagate_last_experience', 'propagate_last_experience', (['agent_vector', 'sample_trajectory'], {}), '(agent_vector, sample_trajectory)\n', (5531, 5564), False, 'from regym.rl_loops.multiagent_loops.sequential_action_rl_loop import propagate_experience, propagate_last_experience\n'), ((5995, 6092), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (["actual_prediction['probs']", "expected_predictions[a_i]['probs']"], {}), "(actual_prediction['probs'],\n expected_predictions[a_i]['probs'])\n", (6024, 6092), True, 'import numpy as np\n')] |
from sympy.core.function import expand_func
from sympy.core import EulerGamma
from sympy.core.numbers import (I, Rational, nan, oo, pi, zoo)
from sympy.core.singleton import S
from sympy.core.symbol import (Dummy, Symbol)
from sympy.functions.combinatorial.factorials import factorial
from sympy.functions.combinatorial.numbers import harmonic
from sympy.functions.elementary.complexes import (Abs, conjugate, im, re)
from sympy.functions.elementary.exponential import (exp, exp_polar, log)
from sympy.functions.elementary.hyperbolic import tanh
from sympy.functions.elementary.miscellaneous import sqrt
from sympy.functions.elementary.trigonometric import (cos, sin)
from sympy.functions.special.error_functions import (Ei, erf, erfc)
from sympy.functions.special.gamma_functions import (digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma)
from sympy.functions.special.zeta_functions import zeta
from sympy.series.order import O
from sympy.core.expr import unchanged
from sympy.core.function import ArgumentIndexError
from sympy.testing.pytest import raises
from sympy.core.random import (test_derivative_numerically as td,
random_complex_number as randcplx,
verify_numerically as tn)
x = Symbol('x')
y = Symbol('y')
n = Symbol('n', integer=True)
w = Symbol('w', real=True)
def test_gamma():
assert gamma(nan) is nan
assert gamma(oo) is oo
assert gamma(-100) is zoo
assert gamma(0) is zoo
assert gamma(-100.0) is zoo
assert gamma(1) == 1
assert gamma(2) == 1
assert gamma(3) == 2
assert gamma(102) == factorial(101)
assert gamma(S.Half) == sqrt(pi)
assert gamma(Rational(3, 2)) == sqrt(pi)*S.Half
assert gamma(Rational(5, 2)) == sqrt(pi)*Rational(3, 4)
assert gamma(Rational(7, 2)) == sqrt(pi)*Rational(15, 8)
assert gamma(Rational(-1, 2)) == -2*sqrt(pi)
assert gamma(Rational(-3, 2)) == sqrt(pi)*Rational(4, 3)
assert gamma(Rational(-5, 2)) == sqrt(pi)*Rational(-8, 15)
assert gamma(Rational(-15, 2)) == sqrt(pi)*Rational(256, 2027025)
assert gamma(Rational(
-11, 8)).expand(func=True) == Rational(64, 33)*gamma(Rational(5, 8))
assert gamma(Rational(
-10, 3)).expand(func=True) == Rational(81, 280)*gamma(Rational(2, 3))
assert gamma(Rational(
14, 3)).expand(func=True) == Rational(880, 81)*gamma(Rational(2, 3))
assert gamma(Rational(
17, 7)).expand(func=True) == Rational(30, 49)*gamma(Rational(3, 7))
assert gamma(Rational(
19, 8)).expand(func=True) == Rational(33, 64)*gamma(Rational(3, 8))
assert gamma(x).diff(x) == gamma(x)*polygamma(0, x)
assert gamma(x - 1).expand(func=True) == gamma(x)/(x - 1)
assert gamma(x + 2).expand(func=True, mul=False) == x*(x + 1)*gamma(x)
assert conjugate(gamma(x)) == gamma(conjugate(x))
assert expand_func(gamma(x + Rational(3, 2))) == \
(x + S.Half)*gamma(x + S.Half)
assert expand_func(gamma(x - S.Half)) == \
gamma(S.Half + x)/(x - S.Half)
# Test a bug:
assert expand_func(gamma(x + Rational(3, 4))) == gamma(x + Rational(3, 4))
# XXX: Not sure about these tests. I can fix them by defining e.g.
# exp_polar.is_integer but I'm not sure if that makes sense.
assert gamma(3*exp_polar(I*pi)/4).is_nonnegative is False
assert gamma(3*exp_polar(I*pi)/4).is_extended_nonpositive is True
y = Symbol('y', nonpositive=True, integer=True)
assert gamma(y).is_real == False
y = Symbol('y', positive=True, noninteger=True)
assert gamma(y).is_real == True
assert gamma(-1.0, evaluate=False).is_real == False
assert gamma(0, evaluate=False).is_real == False
assert gamma(-2, evaluate=False).is_real == False
def test_gamma_rewrite():
assert gamma(n).rewrite(factorial) == factorial(n - 1)
def test_gamma_series():
assert gamma(x + 1).series(x, 0, 3) == \
1 - EulerGamma*x + x**2*(EulerGamma**2/2 + pi**2/12) + O(x**3)
assert gamma(x).series(x, -1, 3) == \
-1/(x + 1) + EulerGamma - 1 + (x + 1)*(-1 - pi**2/12 - EulerGamma**2/2 + \
EulerGamma) + (x + 1)**2*(-1 - pi**2/12 - EulerGamma**2/2 + EulerGamma**3/6 - \
polygamma(2, 1)/6 + EulerGamma*pi**2/12 + EulerGamma) + O((x + 1)**3, (x, -1))
def tn_branch(s, func):
from sympy.core.random import uniform
c = uniform(1, 5)
expr = func(s, c*exp_polar(I*pi)) - func(s, c*exp_polar(-I*pi))
eps = 1e-15
expr2 = func(s + eps, -c + eps*I) - func(s + eps, -c - eps*I)
return abs(expr.n() - expr2.n()).n() < 1e-10
def test_lowergamma():
from sympy.functions.special.error_functions import expint
from sympy.functions.special.hyper import meijerg
assert lowergamma(x, 0) == 0
assert lowergamma(x, y).diff(y) == y**(x - 1)*exp(-y)
assert td(lowergamma(randcplx(), y), y)
assert td(lowergamma(x, randcplx()), x)
assert lowergamma(x, y).diff(x) == \
gamma(x)*digamma(x) - uppergamma(x, y)*log(y) \
- meijerg([], [1, 1], [0, 0, x], [], y)
assert lowergamma(S.Half, x) == sqrt(pi)*erf(sqrt(x))
assert not lowergamma(S.Half - 3, x).has(lowergamma)
assert not lowergamma(S.Half + 3, x).has(lowergamma)
assert lowergamma(S.Half, x, evaluate=False).has(lowergamma)
assert tn(lowergamma(S.Half + 3, x, evaluate=False),
lowergamma(S.Half + 3, x), x)
assert tn(lowergamma(S.Half - 3, x, evaluate=False),
lowergamma(S.Half - 3, x), x)
assert tn_branch(-3, lowergamma)
assert tn_branch(-4, lowergamma)
assert tn_branch(Rational(1, 3), lowergamma)
assert tn_branch(pi, lowergamma)
assert lowergamma(3, exp_polar(4*pi*I)*x) == lowergamma(3, x)
assert lowergamma(y, exp_polar(5*pi*I)*x) == \
exp(4*I*pi*y)*lowergamma(y, x*exp_polar(pi*I))
assert lowergamma(-2, exp_polar(5*pi*I)*x) == \
lowergamma(-2, x*exp_polar(I*pi)) + 2*pi*I
assert conjugate(lowergamma(x, y)) == lowergamma(conjugate(x), conjugate(y))
assert conjugate(lowergamma(x, 0)) == 0
assert unchanged(conjugate, lowergamma(x, -oo))
assert lowergamma(0, x)._eval_is_meromorphic(x, 0) == False
assert lowergamma(S(1)/3, x)._eval_is_meromorphic(x, 0) == False
assert lowergamma(1, x, evaluate=False)._eval_is_meromorphic(x, 0) == True
assert lowergamma(x, x)._eval_is_meromorphic(x, 0) == False
assert lowergamma(x + 1, x)._eval_is_meromorphic(x, 0) == False
assert lowergamma(1/x, x)._eval_is_meromorphic(x, 0) == False
assert lowergamma(0, x + 1)._eval_is_meromorphic(x, 0) == False
assert lowergamma(S(1)/3, x + 1)._eval_is_meromorphic(x, 0) == True
assert lowergamma(1, x + 1, evaluate=False)._eval_is_meromorphic(x, 0) == True
assert lowergamma(x, x + 1)._eval_is_meromorphic(x, 0) == True
assert lowergamma(x + 1, x + 1)._eval_is_meromorphic(x, 0) == True
assert lowergamma(1/x, x + 1)._eval_is_meromorphic(x, 0) == False
assert lowergamma(0, 1/x)._eval_is_meromorphic(x, 0) == False
assert lowergamma(S(1)/3, 1/x)._eval_is_meromorphic(x, 0) == False
assert lowergamma(1, 1/x, evaluate=False)._eval_is_meromorphic(x, 0) == False
assert lowergamma(x, 1/x)._eval_is_meromorphic(x, 0) == False
assert lowergamma(x + 1, 1/x)._eval_is_meromorphic(x, 0) == False
assert lowergamma(1/x, 1/x)._eval_is_meromorphic(x, 0) == False
assert lowergamma(x, 2).series(x, oo, 3) == \
2**x*(1 + 2/(x + 1))*exp(-2)/x + O(exp(x*log(2))/x**3, (x, oo))
assert lowergamma(
x, y).rewrite(expint) == -y**x*expint(-x + 1, y) + gamma(x)
k = Symbol('k', integer=True)
assert lowergamma(
k, y).rewrite(expint) == -y**k*expint(-k + 1, y) + gamma(k)
k = Symbol('k', integer=True, positive=False)
assert lowergamma(k, y).rewrite(expint) == lowergamma(k, y)
assert lowergamma(x, y).rewrite(uppergamma) == gamma(x) - uppergamma(x, y)
assert lowergamma(70, 6) == factorial(69) - 69035724522603011058660187038367026272747334489677105069435923032634389419656200387949342530805432320 * exp(-6)
assert (lowergamma(S(77) / 2, 6) - lowergamma(S(77) / 2, 6, evaluate=False)).evalf() < 1e-16
assert (lowergamma(-S(77) / 2, 6) - lowergamma(-S(77) / 2, 6, evaluate=False)).evalf() < 1e-16
def test_uppergamma():
from sympy.functions.special.error_functions import expint
from sympy.functions.special.hyper import meijerg
assert uppergamma(4, 0) == 6
assert uppergamma(x, y).diff(y) == -y**(x - 1)*exp(-y)
assert td(uppergamma(randcplx(), y), y)
assert uppergamma(x, y).diff(x) == \
uppergamma(x, y)*log(y) + meijerg([], [1, 1], [0, 0, x], [], y)
assert td(uppergamma(x, randcplx()), x)
p = Symbol('p', positive=True)
assert uppergamma(0, p) == -Ei(-p)
assert uppergamma(p, 0) == gamma(p)
assert uppergamma(S.Half, x) == sqrt(pi)*erfc(sqrt(x))
assert not uppergamma(S.Half - 3, x).has(uppergamma)
assert not uppergamma(S.Half + 3, x).has(uppergamma)
assert uppergamma(S.Half, x, evaluate=False).has(uppergamma)
assert tn(uppergamma(S.Half + 3, x, evaluate=False),
uppergamma(S.Half + 3, x), x)
assert tn(uppergamma(S.Half - 3, x, evaluate=False),
uppergamma(S.Half - 3, x), x)
assert unchanged(uppergamma, x, -oo)
assert unchanged(uppergamma, x, 0)
assert tn_branch(-3, uppergamma)
assert tn_branch(-4, uppergamma)
assert tn_branch(Rational(1, 3), uppergamma)
assert tn_branch(pi, uppergamma)
assert uppergamma(3, exp_polar(4*pi*I)*x) == uppergamma(3, x)
assert uppergamma(y, exp_polar(5*pi*I)*x) == \
exp(4*I*pi*y)*uppergamma(y, x*exp_polar(pi*I)) + \
gamma(y)*(1 - exp(4*pi*I*y))
assert uppergamma(-2, exp_polar(5*pi*I)*x) == \
uppergamma(-2, x*exp_polar(I*pi)) - 2*pi*I
assert uppergamma(-2, x) == expint(3, x)/x**2
assert conjugate(uppergamma(x, y)) == uppergamma(conjugate(x), conjugate(y))
assert unchanged(conjugate, uppergamma(x, -oo))
assert uppergamma(x, y).rewrite(expint) == y**x*expint(-x + 1, y)
assert uppergamma(x, y).rewrite(lowergamma) == gamma(x) - lowergamma(x, y)
assert uppergamma(70, 6) == 69035724522603011058660187038367026272747334489677105069435923032634389419656200387949342530805432320*exp(-6)
assert (uppergamma(S(77) / 2, 6) - uppergamma(S(77) / 2, 6, evaluate=False)).evalf() < 1e-16
assert (uppergamma(-S(77) / 2, 6) - uppergamma(-S(77) / 2, 6, evaluate=False)).evalf() < 1e-16
def test_polygamma():
assert polygamma(n, nan) is nan
assert polygamma(0, oo) is oo
assert polygamma(0, -oo) is oo
assert polygamma(0, I*oo) is oo
assert polygamma(0, -I*oo) is oo
assert polygamma(1, oo) == 0
assert polygamma(5, oo) == 0
assert polygamma(0, -9) is zoo
assert polygamma(0, -9) is zoo
assert polygamma(0, -1) is zoo
assert polygamma(0, 0) is zoo
assert polygamma(0, 1) == -EulerGamma
assert polygamma(0, 7) == Rational(49, 20) - EulerGamma
assert polygamma(1, 1) == pi**2/6
assert polygamma(1, 2) == pi**2/6 - 1
assert polygamma(1, 3) == pi**2/6 - Rational(5, 4)
assert polygamma(3, 1) == pi**4 / 15
assert polygamma(3, 5) == 6*(Rational(-22369, 20736) + pi**4/90)
assert polygamma(5, 1) == 8 * pi**6 / 63
assert polygamma(1, S.Half) == pi**2 / 2
assert polygamma(2, S.Half) == -14*zeta(3)
assert polygamma(11, S.Half) == 176896*pi**12
def t(m, n):
x = S(m)/n
r = polygamma(0, x)
if r.has(polygamma):
return False
return abs(polygamma(0, x.n()).n() - r.n()).n() < 1e-10
assert t(1, 2)
assert t(3, 2)
assert t(-1, 2)
assert t(1, 4)
assert t(-3, 4)
assert t(1, 3)
assert t(4, 3)
assert t(3, 4)
assert t(2, 3)
assert t(123, 5)
assert polygamma(0, x).rewrite(zeta) == polygamma(0, x)
assert polygamma(1, x).rewrite(zeta) == zeta(2, x)
assert polygamma(2, x).rewrite(zeta) == -2*zeta(3, x)
assert polygamma(I, 2).rewrite(zeta) == polygamma(I, 2)
n1 = Symbol('n1')
n2 = Symbol('n2', real=True)
n3 = Symbol('n3', integer=True)
n4 = Symbol('n4', positive=True)
n5 = Symbol('n5', positive=True, integer=True)
assert polygamma(n1, x).rewrite(zeta) == polygamma(n1, x)
assert polygamma(n2, x).rewrite(zeta) == polygamma(n2, x)
assert polygamma(n3, x).rewrite(zeta) == polygamma(n3, x)
assert polygamma(n4, x).rewrite(zeta) == polygamma(n4, x)
assert polygamma(n5, x).rewrite(zeta) == (-1)**(n5 + 1) * factorial(n5) * zeta(n5 + 1, x)
assert polygamma(3, 7*x).diff(x) == 7*polygamma(4, 7*x)
assert polygamma(0, x).rewrite(harmonic) == harmonic(x - 1) - EulerGamma
assert polygamma(2, x).rewrite(harmonic) == 2*harmonic(x - 1, 3) - 2*zeta(3)
ni = Symbol("n", integer=True)
assert polygamma(ni, x).rewrite(harmonic) == (-1)**(ni + 1)*(-harmonic(x - 1, ni + 1)
+ zeta(ni + 1))*factorial(ni)
# Polygamma of non-negative integer order is unbranched:
k = Symbol('n', integer=True, nonnegative=True)
assert polygamma(k, exp_polar(2*I*pi)*x) == polygamma(k, x)
# but negative integers are branched!
k = Symbol('n', integer=True)
assert polygamma(k, exp_polar(2*I*pi)*x).args == (k, exp_polar(2*I*pi)*x)
# Polygamma of order -1 is loggamma:
assert polygamma(-1, x) == loggamma(x)
# But smaller orders are iterated integrals and don't have a special name
assert polygamma(-2, x).func is polygamma
# Test a bug
assert polygamma(0, -x).expand(func=True) == polygamma(0, -x)
assert polygamma(2, 2.5).is_positive == False
assert polygamma(2, -2.5).is_positive == False
assert polygamma(3, 2.5).is_positive == True
assert polygamma(3, -2.5).is_positive is True
assert polygamma(-2, -2.5).is_positive is None
assert polygamma(-3, -2.5).is_positive is None
assert polygamma(2, 2.5).is_negative == True
assert polygamma(3, 2.5).is_negative == False
assert polygamma(3, -2.5).is_negative == False
assert polygamma(2, -2.5).is_negative is True
assert polygamma(-2, -2.5).is_negative is None
assert polygamma(-3, -2.5).is_negative is None
assert polygamma(I, 2).is_positive is None
assert polygamma(I, 3).is_negative is None
# issue 17350
assert polygamma(pi, 3).evalf() == polygamma(pi, 3)
assert (I*polygamma(I, pi)).as_real_imag() == \
(-im(polygamma(I, pi)), re(polygamma(I, pi)))
assert (tanh(polygamma(I, 1))).rewrite(exp) == \
(exp(polygamma(I, 1)) - exp(-polygamma(I, 1)))/(exp(polygamma(I, 1)) + exp(-polygamma(I, 1)))
assert (I / polygamma(I, 4)).rewrite(exp) == \
I*sqrt(re(polygamma(I, 4))**2 + im(polygamma(I, 4))**2)\
/((re(polygamma(I, 4)) + I*im(polygamma(I, 4)))*Abs(polygamma(I, 4)))
assert unchanged(polygamma, 2.3, 1.0)
# issue 12569
assert unchanged(im, polygamma(0, I))
assert polygamma(Symbol('a', positive=True), Symbol('b', positive=True)).is_real is True
assert polygamma(0, I).is_real is None
def test_polygamma_expand_func():
assert polygamma(0, x).expand(func=True) == polygamma(0, x)
assert polygamma(0, 2*x).expand(func=True) == \
polygamma(0, x)/2 + polygamma(0, S.Half + x)/2 + log(2)
assert polygamma(1, 2*x).expand(func=True) == \
polygamma(1, x)/4 + polygamma(1, S.Half + x)/4
assert polygamma(2, x).expand(func=True) == \
polygamma(2, x)
assert polygamma(0, -1 + x).expand(func=True) == \
polygamma(0, x) - 1/(x - 1)
assert polygamma(0, 1 + x).expand(func=True) == \
1/x + polygamma(0, x )
assert polygamma(0, 2 + x).expand(func=True) == \
1/x + 1/(1 + x) + polygamma(0, x)
assert polygamma(0, 3 + x).expand(func=True) == \
polygamma(0, x) + 1/x + 1/(1 + x) + 1/(2 + x)
assert polygamma(0, 4 + x).expand(func=True) == \
polygamma(0, x) + 1/x + 1/(1 + x) + 1/(2 + x) + 1/(3 + x)
assert polygamma(1, 1 + x).expand(func=True) == \
polygamma(1, x) - 1/x**2
assert polygamma(1, 2 + x).expand(func=True, multinomial=False) == \
polygamma(1, x) - 1/x**2 - 1/(1 + x)**2
assert polygamma(1, 3 + x).expand(func=True, multinomial=False) == \
polygamma(1, x) - 1/x**2 - 1/(1 + x)**2 - 1/(2 + x)**2
assert polygamma(1, 4 + x).expand(func=True, multinomial=False) == \
polygamma(1, x) - 1/x**2 - 1/(1 + x)**2 - \
1/(2 + x)**2 - 1/(3 + x)**2
assert polygamma(0, x + y).expand(func=True) == \
polygamma(0, x + y)
assert polygamma(1, x + y).expand(func=True) == \
polygamma(1, x + y)
assert polygamma(1, 3 + 4*x + y).expand(func=True, multinomial=False) == \
polygamma(1, y + 4*x) - 1/(y + 4*x)**2 - \
1/(1 + y + 4*x)**2 - 1/(2 + y + 4*x)**2
assert polygamma(3, 3 + 4*x + y).expand(func=True, multinomial=False) == \
polygamma(3, y + 4*x) - 6/(y + 4*x)**4 - \
6/(1 + y + 4*x)**4 - 6/(2 + y + 4*x)**4
assert polygamma(3, 4*x + y + 1).expand(func=True, multinomial=False) == \
polygamma(3, y + 4*x) - 6/(y + 4*x)**4
e = polygamma(3, 4*x + y + Rational(3, 2))
assert e.expand(func=True) == e
e = polygamma(3, x + y + Rational(3, 4))
assert e.expand(func=True, basic=False) == e
def test_digamma():
assert digamma(nan) == nan
assert digamma(oo) == oo
assert digamma(-oo) == oo
assert digamma(I*oo) == oo
assert digamma(-I*oo) == oo
assert digamma(-9) == zoo
assert digamma(-9) == zoo
assert digamma(-1) == zoo
assert digamma(0) == zoo
assert digamma(1) == -EulerGamma
assert digamma(7) == Rational(49, 20) - EulerGamma
def t(m, n):
x = S(m)/n
r = digamma(x)
if r.has(digamma):
return False
return abs(digamma(x.n()).n() - r.n()).n() < 1e-10
assert t(1, 2)
assert t(3, 2)
assert t(-1, 2)
assert t(1, 4)
assert t(-3, 4)
assert t(1, 3)
assert t(4, 3)
assert t(3, 4)
assert t(2, 3)
assert t(123, 5)
assert digamma(x).rewrite(zeta) == polygamma(0, x)
assert digamma(x).rewrite(harmonic) == harmonic(x - 1) - EulerGamma
assert digamma(I).is_real is None
assert digamma(x,evaluate=False).fdiff() == polygamma(1, x)
assert digamma(x,evaluate=False).is_real is None
assert digamma(x,evaluate=False).is_positive is None
assert digamma(x,evaluate=False).is_negative is None
assert digamma(x,evaluate=False).rewrite(polygamma) == polygamma(0, x)
def test_digamma_expand_func():
assert digamma(x).expand(func=True) == polygamma(0, x)
assert digamma(2*x).expand(func=True) == \
polygamma(0, x)/2 + polygamma(0, Rational(1, 2) + x)/2 + log(2)
assert digamma(-1 + x).expand(func=True) == \
polygamma(0, x) - 1/(x - 1)
assert digamma(1 + x).expand(func=True) == \
1/x + polygamma(0, x )
assert digamma(2 + x).expand(func=True) == \
1/x + 1/(1 + x) + polygamma(0, x)
assert digamma(3 + x).expand(func=True) == \
polygamma(0, x) + 1/x + 1/(1 + x) + 1/(2 + x)
assert digamma(4 + x).expand(func=True) == \
polygamma(0, x) + 1/x + 1/(1 + x) + 1/(2 + x) + 1/(3 + x)
assert digamma(x + y).expand(func=True) == \
polygamma(0, x + y)
def test_trigamma():
assert trigamma(nan) == nan
assert trigamma(oo) == 0
assert trigamma(1) == pi**2/6
assert trigamma(2) == pi**2/6 - 1
assert trigamma(3) == pi**2/6 - Rational(5, 4)
assert trigamma(x, evaluate=False).rewrite(zeta) == zeta(2, x)
assert trigamma(x, evaluate=False).rewrite(harmonic) == \
trigamma(x).rewrite(polygamma).rewrite(harmonic)
assert trigamma(x,evaluate=False).fdiff() == polygamma(2, x)
assert trigamma(x,evaluate=False).is_real is None
assert trigamma(x,evaluate=False).is_positive is None
assert trigamma(x,evaluate=False).is_negative is None
assert trigamma(x,evaluate=False).rewrite(polygamma) == polygamma(1, x)
def test_trigamma_expand_func():
assert trigamma(2*x).expand(func=True) == \
polygamma(1, x)/4 + polygamma(1, Rational(1, 2) + x)/4
assert trigamma(1 + x).expand(func=True) == \
polygamma(1, x) - 1/x**2
assert trigamma(2 + x).expand(func=True, multinomial=False) == \
polygamma(1, x) - 1/x**2 - 1/(1 + x)**2
assert trigamma(3 + x).expand(func=True, multinomial=False) == \
polygamma(1, x) - 1/x**2 - 1/(1 + x)**2 - 1/(2 + x)**2
assert trigamma(4 + x).expand(func=True, multinomial=False) == \
polygamma(1, x) - 1/x**2 - 1/(1 + x)**2 - \
1/(2 + x)**2 - 1/(3 + x)**2
assert trigamma(x + y).expand(func=True) == \
polygamma(1, x + y)
assert trigamma(3 + 4*x + y).expand(func=True, multinomial=False) == \
polygamma(1, y + 4*x) - 1/(y + 4*x)**2 - \
1/(1 + y + 4*x)**2 - 1/(2 + y + 4*x)**2
def test_loggamma():
raises(TypeError, lambda: loggamma(2, 3))
raises(ArgumentIndexError, lambda: loggamma(x).fdiff(2))
assert loggamma(-1) is oo
assert loggamma(-2) is oo
assert loggamma(0) is oo
assert loggamma(1) == 0
assert loggamma(2) == 0
assert loggamma(3) == log(2)
assert loggamma(4) == log(6)
n = Symbol("n", integer=True, positive=True)
assert loggamma(n) == log(gamma(n))
assert loggamma(-n) is oo
assert loggamma(n/2) == log(2**(-n + 1)*sqrt(pi)*gamma(n)/gamma(n/2 + S.Half))
assert loggamma(oo) is oo
assert loggamma(-oo) is zoo
assert loggamma(I*oo) is zoo
assert loggamma(-I*oo) is zoo
assert loggamma(zoo) is zoo
assert loggamma(nan) is nan
L = loggamma(Rational(16, 3))
E = -5*log(3) + loggamma(Rational(1, 3)) + log(4) + log(7) + log(10) + log(13)
assert expand_func(L).doit() == E
assert L.n() == E.n()
L = loggamma(Rational(19, 4))
E = -4*log(4) + loggamma(Rational(3, 4)) + log(3) + log(7) + log(11) + log(15)
assert expand_func(L).doit() == E
assert L.n() == E.n()
L = loggamma(Rational(23, 7))
E = -3*log(7) + log(2) + loggamma(Rational(2, 7)) + log(9) + log(16)
assert expand_func(L).doit() == E
assert L.n() == E.n()
L = loggamma(Rational(19, 4) - 7)
E = -log(9) - log(5) + loggamma(Rational(3, 4)) + 3*log(4) - 3*I*pi
assert expand_func(L).doit() == E
assert L.n() == E.n()
L = loggamma(Rational(23, 7) - 6)
E = -log(19) - log(12) - log(5) + loggamma(Rational(2, 7)) + 3*log(7) - 3*I*pi
assert expand_func(L).doit() == E
assert L.n() == E.n()
assert loggamma(x).diff(x) == polygamma(0, x)
s1 = loggamma(1/(x + sin(x)) + cos(x)).nseries(x, n=4)
s2 = (-log(2*x) - 1)/(2*x) - log(x/pi)/2 + (4 - log(2*x))*x/24 + O(x**2) + \
log(x)*x**2/2
assert (s1 - s2).expand(force=True).removeO() == 0
s1 = loggamma(1/x).series(x)
s2 = (1/x - S.Half)*log(1/x) - 1/x + log(2*pi)/2 + \
x/12 - x**3/360 + x**5/1260 + O(x**7)
assert ((s1 - s2).expand(force=True)).removeO() == 0
assert loggamma(x).rewrite('intractable') == log(gamma(x))
s1 = loggamma(x).series(x).cancel()
assert s1 == -log(x) - EulerGamma*x + pi**2*x**2/12 + x**3*polygamma(2, 1)/6 + \
pi**4*x**4/360 + x**5*polygamma(4, 1)/120 + O(x**6)
assert s1 == loggamma(x).rewrite('intractable').series(x).cancel()
assert conjugate(loggamma(x)) == loggamma(conjugate(x))
assert conjugate(loggamma(0)) is oo
assert conjugate(loggamma(1)) == loggamma(conjugate(1))
assert conjugate(loggamma(-oo)) == conjugate(zoo)
assert loggamma(Symbol('v', positive=True)).is_real is True
assert loggamma(Symbol('v', zero=True)).is_real is False
assert loggamma(Symbol('v', negative=True)).is_real is False
assert loggamma(Symbol('v', nonpositive=True)).is_real is False
assert loggamma(Symbol('v', nonnegative=True)).is_real is None
assert loggamma(Symbol('v', imaginary=True)).is_real is None
assert loggamma(Symbol('v', real=True)).is_real is None
assert loggamma(Symbol('v')).is_real is None
assert loggamma(S.Half).is_real is True
assert loggamma(0).is_real is False
assert loggamma(Rational(-1, 2)).is_real is False
assert loggamma(I).is_real is None
assert loggamma(2 + 3*I).is_real is None
def tN(N, M):
assert loggamma(1/x)._eval_nseries(x, n=N).getn() == M
tN(0, 0)
tN(1, 1)
tN(2, 2)
tN(3, 3)
tN(4, 4)
tN(5, 5)
def test_polygamma_expansion():
# A. & S., pa. 259 and 260
assert polygamma(0, 1/x).nseries(x, n=3) == \
-log(x) - x/2 - x**2/12 + O(x**3)
assert polygamma(1, 1/x).series(x, n=5) == \
x + x**2/2 + x**3/6 + O(x**5)
assert polygamma(3, 1/x).nseries(x, n=11) == \
2*x**3 + 3*x**4 + 2*x**5 - x**7 + 4*x**9/3 + O(x**11)
def test_issue_8657():
n = Symbol('n', negative=True, integer=True)
m = Symbol('m', integer=True)
o = Symbol('o', positive=True)
p = Symbol('p', negative=True, integer=False)
assert gamma(n).is_real is False
assert gamma(m).is_real is None
assert gamma(o).is_real is True
assert gamma(p).is_real is True
assert gamma(w).is_real is None
def test_issue_8524():
x = Symbol('x', positive=True)
y = Symbol('y', negative=True)
z = Symbol('z', positive=False)
p = Symbol('p', negative=False)
q = Symbol('q', integer=True)
r = Symbol('r', integer=False)
e = Symbol('e', even=True, negative=True)
assert gamma(x).is_positive is True
assert gamma(y).is_positive is None
assert gamma(z).is_positive is None
assert gamma(p).is_positive is None
assert gamma(q).is_positive is None
assert gamma(r).is_positive is None
assert gamma(e + S.Half).is_positive is True
assert gamma(e - S.Half).is_positive is False
def test_issue_14450():
assert uppergamma(Rational(3, 8), x).evalf() == uppergamma(Rational(3, 8), x)
assert lowergamma(x, Rational(3, 8)).evalf() == lowergamma(x, Rational(3, 8))
# some values from Wolfram Alpha for comparison
assert abs(uppergamma(Rational(3, 8), 2).evalf() - 0.07105675881) < 1e-9
assert abs(lowergamma(Rational(3, 8), 2).evalf() - 2.2993794256) < 1e-9
def test_issue_14528():
k = Symbol('k', integer=True, nonpositive=True)
assert isinstance(gamma(k), gamma)
def test_multigamma():
from sympy.concrete.products import Product
p = Symbol('p')
_k = Dummy('_k')
assert multigamma(x, p).dummy_eq(pi**(p*(p - 1)/4)*\
Product(gamma(x + (1 - _k)/2), (_k, 1, p)))
assert conjugate(multigamma(x, p)).dummy_eq(pi**((conjugate(p) - 1)*\
conjugate(p)/4)*Product(gamma(conjugate(x) + (1-conjugate(_k))/2), (_k, 1, p)))
assert conjugate(multigamma(x, 1)) == gamma(conjugate(x))
p = Symbol('p', positive=True)
assert conjugate(multigamma(x, p)).dummy_eq(pi**((p - 1)*p/4)*\
Product(gamma(conjugate(x) + (1-conjugate(_k))/2), (_k, 1, p)))
assert multigamma(nan, 1) is nan
assert multigamma(oo, 1).doit() is oo
assert multigamma(1, 1) == 1
assert multigamma(2, 1) == 1
assert multigamma(3, 1) == 2
assert multigamma(102, 1) == factorial(101)
assert multigamma(S.Half, 1) == sqrt(pi)
assert multigamma(1, 2) == pi
assert multigamma(2, 2) == pi/2
assert multigamma(1, 3) is zoo
assert multigamma(2, 3) == pi**2/2
assert multigamma(3, 3) == 3*pi**2/2
assert multigamma(x, 1).diff(x) == gamma(x)*polygamma(0, x)
assert multigamma(x, 2).diff(x) == sqrt(pi)*gamma(x)*gamma(x - S.Half)*\
polygamma(0, x) + sqrt(pi)*gamma(x)*gamma(x - S.Half)*polygamma(0, x - S.Half)
assert multigamma(x - 1, 1).expand(func=True) == gamma(x)/(x - 1)
assert multigamma(x + 2, 1).expand(func=True, mul=False) == x*(x + 1)*\
gamma(x)
assert multigamma(x - 1, 2).expand(func=True) == sqrt(pi)*gamma(x)*\
gamma(x + S.Half)/(x**3 - 3*x**2 + x*Rational(11, 4) - Rational(3, 4))
assert multigamma(x - 1, 3).expand(func=True) == pi**Rational(3, 2)*gamma(x)**2*\
gamma(x + S.Half)/(x**5 - 6*x**4 + 55*x**3/4 - 15*x**2 + x*Rational(31, 4) - Rational(3, 2))
assert multigamma(n, 1).rewrite(factorial) == factorial(n - 1)
assert multigamma(n, 2).rewrite(factorial) == sqrt(pi)*\
factorial(n - Rational(3, 2))*factorial(n - 1)
assert multigamma(n, 3).rewrite(factorial) == pi**Rational(3, 2)*\
factorial(n - 2)*factorial(n - Rational(3, 2))*factorial(n - 1)
assert multigamma(Rational(-1, 2), 3, evaluate=False).is_real == False
assert multigamma(S.Half, 3, evaluate=False).is_real == False
assert multigamma(0, 1, evaluate=False).is_real == False
assert multigamma(1, 3, evaluate=False).is_real == False
assert multigamma(-1.0, 3, evaluate=False).is_real == False
assert multigamma(0.7, 3, evaluate=False).is_real == True
assert multigamma(3, 3, evaluate=False).is_real == True
def test_gamma_as_leading_term():
assert gamma(x).as_leading_term(x) == 1/x
assert gamma(2 + x).as_leading_term(x) == S(1)
assert gamma(cos(x)).as_leading_term(x) == S(1)
assert gamma(sin(x)).as_leading_term(x) == 1/x
| [
"sympy.functions.special.gamma_functions.digamma",
"sympy.functions.elementary.miscellaneous.sqrt",
"sympy.functions.special.gamma_functions.uppergamma",
"sympy.core.symbol.Symbol",
"sympy.functions.special.error_functions.expint",
"sympy.core.function.expand_func",
"sympy.functions.elementary.trigonome... | [((1299, 1310), 'sympy.core.symbol.Symbol', 'Symbol', (['"""x"""'], {}), "('x')\n", (1305, 1310), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((1315, 1326), 'sympy.core.symbol.Symbol', 'Symbol', (['"""y"""'], {}), "('y')\n", (1321, 1326), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((1331, 1356), 'sympy.core.symbol.Symbol', 'Symbol', (['"""n"""'], {'integer': '(True)'}), "('n', integer=True)\n", (1337, 1356), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((1361, 1383), 'sympy.core.symbol.Symbol', 'Symbol', (['"""w"""'], {'real': '(True)'}), "('w', real=True)\n", (1367, 1383), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((3451, 3494), 'sympy.core.symbol.Symbol', 'Symbol', (['"""y"""'], {'nonpositive': '(True)', 'integer': '(True)'}), "('y', nonpositive=True, integer=True)\n", (3457, 3494), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((3540, 3583), 'sympy.core.symbol.Symbol', 'Symbol', (['"""y"""'], {'positive': '(True)', 'noninteger': '(True)'}), "('y', positive=True, noninteger=True)\n", (3546, 3583), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((4388, 4401), 'sympy.core.random.uniform', 'uniform', (['(1)', '(5)'], {}), '(1, 5)\n', (4395, 4401), False, 'from sympy.core.random import uniform\n'), ((7609, 7634), 'sympy.core.symbol.Symbol', 'Symbol', (['"""k"""'], {'integer': '(True)'}), "('k', integer=True)\n", (7615, 7634), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((7734, 7775), 'sympy.core.symbol.Symbol', 'Symbol', (['"""k"""'], {'integer': '(True)', 'positive': '(False)'}), "('k', integer=True, positive=False)\n", (7740, 7775), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((8720, 8746), 'sympy.core.symbol.Symbol', 'Symbol', (['"""p"""'], {'positive': '(True)'}), "('p', positive=True)\n", (8726, 8746), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((9278, 9307), 'sympy.core.expr.unchanged', 'unchanged', (['uppergamma', 'x', '(-oo)'], {}), '(uppergamma, x, -oo)\n', (9287, 9307), False, 'from sympy.core.expr import unchanged\n'), ((9319, 9346), 'sympy.core.expr.unchanged', 'unchanged', (['uppergamma', 'x', '(0)'], {}), '(uppergamma, x, 0)\n', (9328, 9346), False, 'from sympy.core.expr import unchanged\n'), ((12066, 12078), 'sympy.core.symbol.Symbol', 'Symbol', (['"""n1"""'], {}), "('n1')\n", (12072, 12078), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((12088, 12111), 'sympy.core.symbol.Symbol', 'Symbol', (['"""n2"""'], {'real': '(True)'}), "('n2', real=True)\n", (12094, 12111), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((12121, 12147), 'sympy.core.symbol.Symbol', 'Symbol', (['"""n3"""'], {'integer': '(True)'}), "('n3', integer=True)\n", (12127, 12147), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((12157, 12184), 'sympy.core.symbol.Symbol', 'Symbol', (['"""n4"""'], {'positive': '(True)'}), "('n4', positive=True)\n", (12163, 12184), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((12194, 12235), 'sympy.core.symbol.Symbol', 'Symbol', (['"""n5"""'], {'positive': '(True)', 'integer': '(True)'}), "('n5', positive=True, integer=True)\n", (12200, 12235), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((12807, 12832), 'sympy.core.symbol.Symbol', 'Symbol', (['"""n"""'], {'integer': '(True)'}), "('n', integer=True)\n", (12813, 12832), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((13088, 13131), 'sympy.core.symbol.Symbol', 'Symbol', (['"""n"""'], {'integer': '(True)', 'nonnegative': '(True)'}), "('n', integer=True, nonnegative=True)\n", (13094, 13131), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((13247, 13272), 'sympy.core.symbol.Symbol', 'Symbol', (['"""n"""'], {'integer': '(True)'}), "('n', integer=True)\n", (13253, 13272), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((14899, 14929), 'sympy.core.expr.unchanged', 'unchanged', (['polygamma', '(2.3)', '(1.0)'], {}), '(polygamma, 2.3, 1.0)\n', (14908, 14929), False, 'from sympy.core.expr import unchanged\n'), ((21297, 21337), 'sympy.core.symbol.Symbol', 'Symbol', (['"""n"""'], {'integer': '(True)', 'positive': '(True)'}), "('n', integer=True, positive=True)\n", (21303, 21337), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((24852, 24892), 'sympy.core.symbol.Symbol', 'Symbol', (['"""n"""'], {'negative': '(True)', 'integer': '(True)'}), "('n', negative=True, integer=True)\n", (24858, 24892), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((24901, 24926), 'sympy.core.symbol.Symbol', 'Symbol', (['"""m"""'], {'integer': '(True)'}), "('m', integer=True)\n", (24907, 24926), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((24935, 24961), 'sympy.core.symbol.Symbol', 'Symbol', (['"""o"""'], {'positive': '(True)'}), "('o', positive=True)\n", (24941, 24961), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((24970, 25011), 'sympy.core.symbol.Symbol', 'Symbol', (['"""p"""'], {'negative': '(True)', 'integer': '(False)'}), "('p', negative=True, integer=False)\n", (24976, 25011), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((25226, 25252), 'sympy.core.symbol.Symbol', 'Symbol', (['"""x"""'], {'positive': '(True)'}), "('x', positive=True)\n", (25232, 25252), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((25261, 25287), 'sympy.core.symbol.Symbol', 'Symbol', (['"""y"""'], {'negative': '(True)'}), "('y', negative=True)\n", (25267, 25287), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((25296, 25323), 'sympy.core.symbol.Symbol', 'Symbol', (['"""z"""'], {'positive': '(False)'}), "('z', positive=False)\n", (25302, 25323), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((25332, 25359), 'sympy.core.symbol.Symbol', 'Symbol', (['"""p"""'], {'negative': '(False)'}), "('p', negative=False)\n", (25338, 25359), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((25368, 25393), 'sympy.core.symbol.Symbol', 'Symbol', (['"""q"""'], {'integer': '(True)'}), "('q', integer=True)\n", (25374, 25393), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((25402, 25428), 'sympy.core.symbol.Symbol', 'Symbol', (['"""r"""'], {'integer': '(False)'}), "('r', integer=False)\n", (25408, 25428), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((25437, 25474), 'sympy.core.symbol.Symbol', 'Symbol', (['"""e"""'], {'even': '(True)', 'negative': '(True)'}), "('e', even=True, negative=True)\n", (25443, 25474), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((26241, 26284), 'sympy.core.symbol.Symbol', 'Symbol', (['"""k"""'], {'integer': '(True)', 'nonpositive': '(True)'}), "('k', integer=True, nonpositive=True)\n", (26247, 26284), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((26404, 26415), 'sympy.core.symbol.Symbol', 'Symbol', (['"""p"""'], {}), "('p')\n", (26410, 26415), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((26425, 26436), 'sympy.core.symbol.Dummy', 'Dummy', (['"""_k"""'], {}), "('_k')\n", (26430, 26436), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((26781, 26807), 'sympy.core.symbol.Symbol', 'Symbol', (['"""p"""'], {'positive': '(True)'}), "('p', positive=True)\n", (26787, 26807), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((1414, 1424), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['nan'], {}), '(nan)\n', (1419, 1424), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((1443, 1452), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['oo'], {}), '(oo)\n', (1448, 1452), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((1471, 1482), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(-100)'], {}), '(-100)\n', (1476, 1482), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((1501, 1509), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(0)'], {}), '(0)\n', (1506, 1509), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((1528, 1541), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(-100.0)'], {}), '(-100.0)\n', (1533, 1541), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((1561, 1569), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(1)'], {}), '(1)\n', (1566, 1569), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((1586, 1594), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(2)'], {}), '(2)\n', (1591, 1594), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((1611, 1619), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(3)'], {}), '(3)\n', (1616, 1619), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((1637, 1647), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(102)'], {}), '(102)\n', (1642, 1647), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((1651, 1665), 'sympy.functions.combinatorial.factorials.factorial', 'factorial', (['(101)'], {}), '(101)\n', (1660, 1665), False, 'from sympy.functions.combinatorial.factorials import factorial\n'), ((1678, 1691), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['S.Half'], {}), '(S.Half)\n', (1683, 1691), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((1695, 1703), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (1699, 1703), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((3854, 3870), 'sympy.functions.combinatorial.factorials.factorial', 'factorial', (['(n - 1)'], {}), '(n - 1)\n', (3863, 3870), False, 'from sympy.functions.combinatorial.factorials import factorial\n'), ((4754, 4770), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', '(0)'], {}), '(x, 0)\n', (4764, 4770), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((5079, 5100), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['S.Half', 'x'], {}), '(S.Half, x)\n', (5089, 5100), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((5319, 5360), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(S.Half + 3)', 'x'], {'evaluate': '(False)'}), '(S.Half + 3, x, evaluate=False)\n', (5329, 5360), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((5376, 5401), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(S.Half + 3)', 'x'], {}), '(S.Half + 3, x)\n', (5386, 5401), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((5420, 5461), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(S.Half - 3)', 'x'], {'evaluate': '(False)'}), '(S.Half - 3, x, evaluate=False)\n', (5430, 5461), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((5477, 5502), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(S.Half - 3)', 'x'], {}), '(S.Half - 3, x)\n', (5487, 5502), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((5603, 5617), 'sympy.core.numbers.Rational', 'Rational', (['(1)', '(3)'], {}), '(1, 3)\n', (5611, 5617), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((5717, 5733), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(3)', 'x'], {}), '(3, x)\n', (5727, 5733), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((6101, 6119), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', '(-oo)'], {}), '(x, -oo)\n', (6111, 6119), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7823, 7839), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['k', 'y'], {}), '(k, y)\n', (7833, 7839), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7931, 7948), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(70)', '(6)'], {}), '(70, 6)\n', (7941, 7948), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((8429, 8445), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['(4)', '(0)'], {}), '(4, 0)\n', (8439, 8445), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((8758, 8774), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['(0)', 'p'], {}), '(0, p)\n', (8768, 8774), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((8797, 8813), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['p', '(0)'], {}), '(p, 0)\n', (8807, 8813), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((8817, 8825), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['p'], {}), '(p)\n', (8822, 8825), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((8837, 8858), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['S.Half', 'x'], {}), '(S.Half, x)\n', (8847, 8858), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((9078, 9119), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['(S.Half + 3)', 'x'], {'evaluate': '(False)'}), '(S.Half + 3, x, evaluate=False)\n', (9088, 9119), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((9135, 9160), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['(S.Half + 3)', 'x'], {}), '(S.Half + 3, x)\n', (9145, 9160), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((9179, 9220), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['(S.Half - 3)', 'x'], {'evaluate': '(False)'}), '(S.Half - 3, x, evaluate=False)\n', (9189, 9220), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((9236, 9261), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['(S.Half - 3)', 'x'], {}), '(S.Half - 3, x)\n', (9246, 9261), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((9443, 9457), 'sympy.core.numbers.Rational', 'Rational', (['(1)', '(3)'], {}), '(1, 3)\n', (9451, 9457), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((9557, 9573), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['(3)', 'x'], {}), '(3, x)\n', (9567, 9573), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((9836, 9853), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['(-2)', 'x'], {}), '(-2, x)\n', (9846, 9853), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((9989, 10007), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['x', '(-oo)'], {}), '(x, -oo)\n', (9999, 10007), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10171, 10188), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['(70)', '(6)'], {}), '(70, 6)\n', (10181, 10188), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10533, 10550), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['n', 'nan'], {}), '(n, nan)\n', (10542, 10550), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10570, 10586), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'oo'], {}), '(0, oo)\n', (10579, 10586), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10604, 10621), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(-oo)'], {}), '(0, -oo)\n', (10613, 10621), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10639, 10659), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(I * oo)'], {}), '(0, I * oo)\n', (10648, 10659), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10675, 10696), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(-I * oo)'], {}), '(0, -I * oo)\n', (10684, 10696), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10712, 10728), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'oo'], {}), '(1, oo)\n', (10721, 10728), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10745, 10761), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(5)', 'oo'], {}), '(5, oo)\n', (10754, 10761), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10779, 10795), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(-9)'], {}), '(0, -9)\n', (10788, 10795), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10815, 10831), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(-9)'], {}), '(0, -9)\n', (10824, 10831), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10850, 10866), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(-1)'], {}), '(0, -1)\n', (10859, 10866), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10886, 10901), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(0)'], {}), '(0, 0)\n', (10895, 10901), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10921, 10936), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(1)'], {}), '(0, 1)\n', (10930, 10936), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10963, 10978), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(7)'], {}), '(0, 7)\n', (10972, 10978), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11024, 11039), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(1)'], {}), '(1, 1)\n', (11033, 11039), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11062, 11077), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(2)'], {}), '(1, 2)\n', (11071, 11077), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11104, 11119), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(3)'], {}), '(1, 3)\n', (11113, 11119), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11159, 11174), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(3)', '(1)'], {}), '(3, 1)\n', (11168, 11174), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11200, 11215), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(3)', '(5)'], {}), '(3, 5)\n', (11209, 11215), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11269, 11284), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(5)', '(1)'], {}), '(5, 1)\n', (11278, 11284), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11315, 11335), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'S.Half'], {}), '(1, S.Half)\n', (11324, 11335), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11360, 11380), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(2)', 'S.Half'], {}), '(2, S.Half)\n', (11369, 11380), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11407, 11428), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(11)', 'S.Half'], {}), '(11, S.Half)\n', (11416, 11428), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11495, 11510), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (11504, 11510), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11868, 11883), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (11877, 11883), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11928, 11938), 'sympy.functions.special.zeta_functions.zeta', 'zeta', (['(2)', 'x'], {}), '(2, x)\n', (11932, 11938), False, 'from sympy.functions.special.zeta_functions import zeta\n'), ((12041, 12056), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(2)'], {}), '(I, 2)\n', (12050, 12056), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12281, 12297), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['n1', 'x'], {}), '(n1, x)\n', (12290, 12297), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12343, 12359), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['n2', 'x'], {}), '(n2, x)\n', (12352, 12359), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12405, 12421), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['n3', 'x'], {}), '(n3, x)\n', (12414, 12421), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12467, 12483), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['n4', 'x'], {}), '(n4, x)\n', (12476, 12483), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((13180, 13195), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['k', 'x'], {}), '(k, x)\n', (13189, 13195), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((13404, 13420), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(-1)', 'x'], {}), '(-1, x)\n', (13413, 13420), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((13424, 13435), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['x'], {}), '(x)\n', (13432, 13435), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((13628, 13644), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(-x)'], {}), '(0, -x)\n', (13637, 13644), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14404, 14420), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['pi', '(3)'], {}), '(pi, 3)\n', (14413, 14420), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14974, 14989), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'I'], {}), '(0, I)\n', (14983, 14989), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15211, 15226), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (15220, 15226), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15508, 15523), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(2)', 'x'], {}), '(2, x)\n', (15517, 15523), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16591, 16610), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(x + y)'], {}), '(0, x + y)\n', (16600, 16610), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16673, 16692), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(x + y)'], {}), '(1, x + y)\n', (16682, 16692), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17384, 17396), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['nan'], {}), '(nan)\n', (17391, 17396), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17416, 17427), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['oo'], {}), '(oo)\n', (17423, 17427), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17445, 17457), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(-oo)'], {}), '(-oo)\n', (17452, 17457), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17475, 17490), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(I * oo)'], {}), '(I * oo)\n', (17482, 17490), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17506, 17522), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(-I * oo)'], {}), '(-I * oo)\n', (17513, 17522), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17539, 17550), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(-9)'], {}), '(-9)\n', (17546, 17550), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17570, 17581), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(-9)'], {}), '(-9)\n', (17577, 17581), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17600, 17611), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(-1)'], {}), '(-1)\n', (17607, 17611), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17631, 17641), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(0)'], {}), '(0)\n', (17638, 17641), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17661, 17671), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(1)'], {}), '(1)\n', (17668, 17671), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17698, 17708), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(7)'], {}), '(7)\n', (17705, 17708), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17791, 17801), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['x'], {}), '(x)\n', (17798, 17801), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18147, 18162), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (18156, 18162), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18324, 18339), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (18333, 18339), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18570, 18585), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (18579, 18585), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18663, 18678), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (18672, 18678), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19330, 19349), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(x + y)'], {}), '(0, x + y)\n', (19339, 19349), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19383, 19396), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['nan'], {}), '(nan)\n', (19391, 19396), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19416, 19428), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['oo'], {}), '(oo)\n', (19424, 19428), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19446, 19457), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['(1)'], {}), '(1)\n', (19454, 19457), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19480, 19491), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['(2)'], {}), '(2)\n', (19488, 19491), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19518, 19529), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['(3)'], {}), '(3)\n', (19526, 19529), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19615, 19625), 'sympy.functions.special.zeta_functions.zeta', 'zeta', (['(2)', 'x'], {}), '(2, x)\n', (19619, 19625), False, 'from sympy.functions.special.zeta_functions import zeta\n'), ((19795, 19810), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(2)', 'x'], {}), '(2, x)\n', (19804, 19810), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20045, 20060), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (20054, 20060), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20753, 20772), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(x + y)'], {}), '(1, x + y)\n', (20762, 20772), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21088, 21100), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(-1)'], {}), '(-1)\n', (21096, 21100), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21118, 21130), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(-2)'], {}), '(-2)\n', (21126, 21130), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21148, 21159), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(0)'], {}), '(0)\n', (21156, 21159), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21177, 21188), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(1)'], {}), '(1)\n', (21185, 21188), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21205, 21216), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(2)'], {}), '(2)\n', (21213, 21216), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21233, 21244), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(3)'], {}), '(3)\n', (21241, 21244), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21248, 21254), 'sympy.functions.elementary.exponential.log', 'log', (['(2)'], {}), '(2)\n', (21251, 21254), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((21266, 21277), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(4)'], {}), '(4)\n', (21274, 21277), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21281, 21287), 'sympy.functions.elementary.exponential.log', 'log', (['(6)'], {}), '(6)\n', (21284, 21287), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((21349, 21360), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['n'], {}), '(n)\n', (21357, 21360), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21389, 21401), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(-n)'], {}), '(-n)\n', (21397, 21401), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21419, 21434), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(n / 2)'], {}), '(n / 2)\n', (21427, 21434), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21503, 21515), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['oo'], {}), '(oo)\n', (21511, 21515), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21533, 21546), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(-oo)'], {}), '(-oo)\n', (21541, 21546), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21565, 21581), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(I * oo)'], {}), '(I * oo)\n', (21573, 21581), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21598, 21615), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(-I * oo)'], {}), '(-I * oo)\n', (21606, 21615), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21632, 21645), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['zoo'], {}), '(zoo)\n', (21640, 21645), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21664, 21677), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['nan'], {}), '(nan)\n', (21672, 21677), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21703, 21718), 'sympy.core.numbers.Rational', 'Rational', (['(16)', '(3)'], {}), '(16, 3)\n', (21711, 21718), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((21795, 21802), 'sympy.functions.elementary.exponential.log', 'log', (['(13)'], {}), '(13)\n', (21798, 21802), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((21885, 21900), 'sympy.core.numbers.Rational', 'Rational', (['(19)', '(4)'], {}), '(19, 4)\n', (21893, 21900), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((21977, 21984), 'sympy.functions.elementary.exponential.log', 'log', (['(15)'], {}), '(15)\n', (21980, 21984), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22067, 22082), 'sympy.core.numbers.Rational', 'Rational', (['(23)', '(7)'], {}), '(23, 7)\n', (22075, 22082), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((22149, 22156), 'sympy.functions.elementary.exponential.log', 'log', (['(16)'], {}), '(16)\n', (22152, 22156), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22617, 22632), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (22626, 22632), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((22978, 22987), 'sympy.series.order.O', 'O', (['(x ** 7)'], {}), '(x ** 7)\n', (22979, 22987), False, 'from sympy.series.order import O\n'), ((23564, 23578), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['zoo'], {}), '(zoo)\n', (23573, 23578), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((26307, 26315), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['k'], {}), '(k)\n', (26312, 26315), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((26960, 26978), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['nan', '(1)'], {}), '(nan, 1)\n', (26970, 26978), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27040, 27056), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(1)', '(1)'], {}), '(1, 1)\n', (27050, 27056), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27073, 27089), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(2)', '(1)'], {}), '(2, 1)\n', (27083, 27089), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27106, 27122), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(3)', '(1)'], {}), '(3, 1)\n', (27116, 27122), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27140, 27158), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(102)', '(1)'], {}), '(102, 1)\n', (27150, 27158), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27162, 27176), 'sympy.functions.combinatorial.factorials.factorial', 'factorial', (['(101)'], {}), '(101)\n', (27171, 27176), False, 'from sympy.functions.combinatorial.factorials import factorial\n'), ((27188, 27209), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['S.Half', '(1)'], {}), '(S.Half, 1)\n', (27198, 27209), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27213, 27221), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (27217, 27221), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((27234, 27250), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(1)', '(2)'], {}), '(1, 2)\n', (27244, 27250), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27268, 27284), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(2)', '(2)'], {}), '(2, 2)\n', (27278, 27284), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27305, 27321), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(1)', '(3)'], {}), '(1, 3)\n', (27315, 27321), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27340, 27356), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(2)', '(3)'], {}), '(2, 3)\n', (27350, 27356), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27379, 27395), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(3)', '(3)'], {}), '(3, 3)\n', (27389, 27395), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28192, 28208), 'sympy.functions.combinatorial.factorials.factorial', 'factorial', (['(n - 1)'], {}), '(n - 1)\n', (28201, 28208), False, 'from sympy.functions.combinatorial.factorials import factorial\n'), ((29045, 29049), 'sympy.core.singleton.S', 'S', (['(1)'], {}), '(1)\n', (29046, 29049), False, 'from sympy.core.singleton import S\n'), ((29097, 29101), 'sympy.core.singleton.S', 'S', (['(1)'], {}), '(1)\n', (29098, 29101), False, 'from sympy.core.singleton import S\n'), ((1722, 1736), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(2)'], {}), '(3, 2)\n', (1730, 1736), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((1741, 1749), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (1745, 1749), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((1774, 1788), 'sympy.core.numbers.Rational', 'Rational', (['(5)', '(2)'], {}), '(5, 2)\n', (1782, 1788), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((1793, 1801), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (1797, 1801), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((1802, 1816), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(4)'], {}), '(3, 4)\n', (1810, 1816), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((1834, 1848), 'sympy.core.numbers.Rational', 'Rational', (['(7)', '(2)'], {}), '(7, 2)\n', (1842, 1848), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((1853, 1861), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (1857, 1861), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((1862, 1877), 'sympy.core.numbers.Rational', 'Rational', (['(15)', '(8)'], {}), '(15, 8)\n', (1870, 1877), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((1896, 1911), 'sympy.core.numbers.Rational', 'Rational', (['(-1)', '(2)'], {}), '(-1, 2)\n', (1904, 1911), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((1919, 1927), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (1923, 1927), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((1945, 1960), 'sympy.core.numbers.Rational', 'Rational', (['(-3)', '(2)'], {}), '(-3, 2)\n', (1953, 1960), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((1965, 1973), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (1969, 1973), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((1974, 1988), 'sympy.core.numbers.Rational', 'Rational', (['(4)', '(3)'], {}), '(4, 3)\n', (1982, 1988), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2006, 2021), 'sympy.core.numbers.Rational', 'Rational', (['(-5)', '(2)'], {}), '(-5, 2)\n', (2014, 2021), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2026, 2034), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (2030, 2034), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((2035, 2051), 'sympy.core.numbers.Rational', 'Rational', (['(-8)', '(15)'], {}), '(-8, 15)\n', (2043, 2051), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2070, 2086), 'sympy.core.numbers.Rational', 'Rational', (['(-15)', '(2)'], {}), '(-15, 2)\n', (2078, 2086), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2091, 2099), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (2095, 2099), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((2100, 2122), 'sympy.core.numbers.Rational', 'Rational', (['(256)', '(2027025)'], {}), '(256, 2027025)\n', (2108, 2122), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2189, 2205), 'sympy.core.numbers.Rational', 'Rational', (['(64)', '(33)'], {}), '(64, 33)\n', (2197, 2205), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2293, 2310), 'sympy.core.numbers.Rational', 'Rational', (['(81)', '(280)'], {}), '(81, 280)\n', (2301, 2310), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2397, 2414), 'sympy.core.numbers.Rational', 'Rational', (['(880)', '(81)'], {}), '(880, 81)\n', (2405, 2414), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2501, 2517), 'sympy.core.numbers.Rational', 'Rational', (['(30)', '(49)'], {}), '(30, 49)\n', (2509, 2517), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2604, 2620), 'sympy.core.numbers.Rational', 'Rational', (['(33)', '(64)'], {}), '(33, 64)\n', (2612, 2620), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2675, 2683), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (2680, 2683), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((2684, 2699), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (2693, 2699), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((2746, 2754), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (2751, 2754), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((2829, 2837), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (2834, 2837), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((2860, 2868), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (2865, 2868), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((2879, 2891), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['x'], {}), '(x)\n', (2888, 2891), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((2970, 2987), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(x + S.Half)'], {}), '(x + S.Half)\n', (2975, 2987), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((3012, 3029), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(x - S.Half)'], {}), '(x - S.Half)\n', (3017, 3029), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((3044, 3061), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(S.Half + x)'], {}), '(S.Half + x)\n', (3049, 3061), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((3506, 3514), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['y'], {}), '(y)\n', (3511, 3514), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((3595, 3603), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['y'], {}), '(y)\n', (3600, 3603), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((3632, 3659), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(-1.0)'], {'evaluate': '(False)'}), '(-1.0, evaluate=False)\n', (3637, 3659), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((3688, 3712), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(0)'], {'evaluate': '(False)'}), '(0, evaluate=False)\n', (3693, 3712), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((3741, 3766), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(-2)'], {'evaluate': '(False)'}), '(-2, evaluate=False)\n', (3746, 3766), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((4006, 4015), 'sympy.series.order.O', 'O', (['(x ** 3)'], {}), '(x ** 3)\n', (4007, 4015), False, 'from sympy.series.order import O\n'), ((4289, 4313), 'sympy.series.order.O', 'O', (['((x + 1) ** 3)', '(x, -1)'], {}), '((x + 1) ** 3, (x, -1))\n', (4290, 4313), False, 'from sympy.series.order import O\n'), ((4826, 4833), 'sympy.functions.elementary.exponential.exp', 'exp', (['(-y)'], {}), '(-y)\n', (4829, 4833), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((4859, 4869), 'sympy.core.random.random_complex_number', 'randcplx', ([], {}), '()\n', (4867, 4869), True, 'from sympy.core.random import test_derivative_numerically as td, random_complex_number as randcplx, verify_numerically as tn\n'), ((4906, 4916), 'sympy.core.random.random_complex_number', 'randcplx', ([], {}), '()\n', (4914, 4916), True, 'from sympy.core.random import test_derivative_numerically as td, random_complex_number as randcplx, verify_numerically as tn\n'), ((5029, 5066), 'sympy.functions.special.hyper.meijerg', 'meijerg', (['[]', '[1, 1]', '[0, 0, x]', '[]', 'y'], {}), '([], [1, 1], [0, 0, x], [], y)\n', (5036, 5066), False, 'from sympy.functions.special.hyper import meijerg\n'), ((5104, 5112), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (5108, 5112), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((5251, 5288), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['S.Half', 'x'], {'evaluate': '(False)'}), '(S.Half, x, evaluate=False)\n', (5261, 5288), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((5793, 5812), 'sympy.functions.elementary.exponential.exp', 'exp', (['(4 * I * pi * y)'], {}), '(4 * I * pi * y)\n', (5796, 5812), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((5965, 5981), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', 'y'], {}), '(x, y)\n', (5975, 5981), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((5997, 6009), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['x'], {}), '(x)\n', (6006, 6009), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((6011, 6023), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['y'], {}), '(y)\n', (6020, 6023), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((6046, 6062), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', '(0)'], {}), '(x, 0)\n', (6056, 6062), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7592, 7600), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (7597, 7600), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7717, 7725), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['k'], {}), '(k)\n', (7722, 7725), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7891, 7899), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (7896, 7899), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7902, 7918), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['x', 'y'], {}), '(x, y)\n', (7912, 7918), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7952, 7965), 'sympy.functions.combinatorial.factorials.factorial', 'factorial', (['(69)'], {}), '(69)\n', (7961, 7965), False, 'from sympy.functions.combinatorial.factorials import factorial\n'), ((8502, 8509), 'sympy.functions.elementary.exponential.exp', 'exp', (['(-y)'], {}), '(-y)\n', (8505, 8509), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((8535, 8545), 'sympy.core.random.random_complex_number', 'randcplx', ([], {}), '()\n', (8543, 8545), True, 'from sympy.core.random import test_derivative_numerically as td, random_complex_number as randcplx, verify_numerically as tn\n'), ((8629, 8666), 'sympy.functions.special.hyper.meijerg', 'meijerg', (['[]', '[1, 1]', '[0, 0, x]', '[]', 'y'], {}), '([], [1, 1], [0, 0, x], [], y)\n', (8636, 8666), False, 'from sympy.functions.special.hyper import meijerg\n'), ((8695, 8705), 'sympy.core.random.random_complex_number', 'randcplx', ([], {}), '()\n', (8703, 8705), True, 'from sympy.core.random import test_derivative_numerically as td, random_complex_number as randcplx, verify_numerically as tn\n'), ((8779, 8785), 'sympy.functions.special.error_functions.Ei', 'Ei', (['(-p)'], {}), '(-p)\n', (8781, 8785), False, 'from sympy.functions.special.error_functions import Ei, erf, erfc\n'), ((8862, 8870), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (8866, 8870), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((9010, 9047), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['S.Half', 'x'], {'evaluate': '(False)'}), '(S.Half, x, evaluate=False)\n', (9020, 9047), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((9857, 9869), 'sympy.functions.special.error_functions.expint', 'expint', (['(3)', 'x'], {}), '(3, x)\n', (9863, 9869), False, 'from sympy.functions.special.error_functions import expint\n'), ((9897, 9913), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['x', 'y'], {}), '(x, y)\n', (9907, 9913), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((9929, 9941), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['x'], {}), '(x)\n', (9938, 9941), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((9943, 9955), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['y'], {}), '(y)\n', (9952, 9955), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((10062, 10079), 'sympy.functions.special.error_functions.expint', 'expint', (['(-x + 1)', 'y'], {}), '(-x + 1, y)\n', (10068, 10079), False, 'from sympy.functions.special.error_functions import expint\n'), ((10131, 10139), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (10136, 10139), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10142, 10158), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', 'y'], {}), '(x, y)\n', (10152, 10158), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10294, 10301), 'sympy.functions.elementary.exponential.exp', 'exp', (['(-6)'], {}), '(-6)\n', (10297, 10301), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((10982, 10998), 'sympy.core.numbers.Rational', 'Rational', (['(49)', '(20)'], {}), '(49, 20)\n', (10990, 10998), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((11133, 11147), 'sympy.core.numbers.Rational', 'Rational', (['(5)', '(4)'], {}), '(5, 4)\n', (11141, 11147), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((11388, 11395), 'sympy.functions.special.zeta_functions.zeta', 'zeta', (['(3)'], {}), '(3)\n', (11392, 11395), False, 'from sympy.functions.special.zeta_functions import zeta\n'), ((11476, 11480), 'sympy.core.singleton.S', 'S', (['m'], {}), '(m)\n', (11477, 11480), False, 'from sympy.core.singleton import S\n'), ((11986, 11996), 'sympy.functions.special.zeta_functions.zeta', 'zeta', (['(3)', 'x'], {}), '(3, x)\n', (11990, 11996), False, 'from sympy.functions.special.zeta_functions import zeta\n'), ((12562, 12577), 'sympy.functions.special.zeta_functions.zeta', 'zeta', (['(n5 + 1)', 'x'], {}), '(n5 + 1, x)\n', (12566, 12577), False, 'from sympy.functions.special.zeta_functions import zeta\n'), ((12621, 12640), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(4)', '(7 * x)'], {}), '(4, 7 * x)\n', (12630, 12640), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12688, 12703), 'sympy.functions.combinatorial.numbers.harmonic', 'harmonic', (['(x - 1)'], {}), '(x - 1)\n', (12696, 12703), False, 'from sympy.functions.combinatorial.numbers import harmonic\n'), ((13004, 13017), 'sympy.functions.combinatorial.factorials.factorial', 'factorial', (['ni'], {}), '(ni)\n', (13013, 13017), False, 'from sympy.functions.combinatorial.factorials import factorial\n'), ((13526, 13542), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(-2)', 'x'], {}), '(-2, x)\n', (13535, 13542), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((13657, 13674), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(2)', '(2.5)'], {}), '(2, 2.5)\n', (13666, 13674), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((13707, 13725), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(2)', '(-2.5)'], {}), '(2, -2.5)\n', (13716, 13725), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((13758, 13775), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(3)', '(2.5)'], {}), '(3, 2.5)\n', (13767, 13775), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((13807, 13825), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(3)', '(-2.5)'], {}), '(3, -2.5)\n', (13816, 13825), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((13857, 13876), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(-2)', '(-2.5)'], {}), '(-2, -2.5)\n', (13866, 13876), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((13908, 13927), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(-3)', '(-2.5)'], {}), '(-3, -2.5)\n', (13917, 13927), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((13960, 13977), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(2)', '(2.5)'], {}), '(2, 2.5)\n', (13969, 13977), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14009, 14026), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(3)', '(2.5)'], {}), '(3, 2.5)\n', (14018, 14026), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14059, 14077), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(3)', '(-2.5)'], {}), '(3, -2.5)\n', (14068, 14077), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14110, 14128), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(2)', '(-2.5)'], {}), '(2, -2.5)\n', (14119, 14128), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14160, 14179), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(-2)', '(-2.5)'], {}), '(-2, -2.5)\n', (14169, 14179), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14211, 14230), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(-3)', '(-2.5)'], {}), '(-3, -2.5)\n', (14220, 14230), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14263, 14278), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(2)'], {}), '(I, 2)\n', (14272, 14278), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14310, 14325), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(3)'], {}), '(I, 3)\n', (14319, 14325), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15095, 15110), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'I'], {}), '(0, I)\n', (15104, 15110), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15336, 15342), 'sympy.functions.elementary.exponential.log', 'log', (['(2)'], {}), '(2)\n', (15339, 15342), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((15587, 15602), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (15596, 15602), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15683, 15698), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (15692, 15698), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15780, 15795), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (15789, 15795), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16086, 16101), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (16095, 16101), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17136, 17159), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(3)', '(y + 4 * x)'], {}), '(3, y + 4 * x)\n', (17145, 17159), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17206, 17220), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(2)'], {}), '(3, 2)\n', (17214, 17220), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((17287, 17301), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(4)'], {}), '(3, 4)\n', (17295, 17301), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((17712, 17728), 'sympy.core.numbers.Rational', 'Rational', (['(49)', '(20)'], {}), '(49, 20)\n', (17720, 17728), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((17772, 17776), 'sympy.core.singleton.S', 'S', (['m'], {}), '(m)\n', (17773, 17776), False, 'from sympy.core.singleton import S\n'), ((18207, 18222), 'sympy.functions.combinatorial.numbers.harmonic', 'harmonic', (['(x - 1)'], {}), '(x - 1)\n', (18215, 18222), False, 'from sympy.functions.combinatorial.numbers import harmonic\n'), ((18248, 18258), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['I'], {}), '(I)\n', (18255, 18258), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18352, 18378), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['x'], {'evaluate': '(False)'}), '(x, evaluate=False)\n', (18359, 18378), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18406, 18432), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['x'], {'evaluate': '(False)'}), '(x, evaluate=False)\n', (18413, 18432), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18464, 18490), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['x'], {'evaluate': '(False)'}), '(x, evaluate=False)\n', (18471, 18490), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18791, 18797), 'sympy.functions.elementary.exponential.log', 'log', (['(2)'], {}), '(2)\n', (18794, 18797), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((18856, 18871), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (18865, 18871), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18947, 18962), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (18956, 18962), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19039, 19054), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (19048, 19054), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19543, 19557), 'sympy.core.numbers.Rational', 'Rational', (['(5)', '(4)'], {}), '(5, 4)\n', (19551, 19557), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((19823, 19850), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['x'], {'evaluate': '(False)'}), '(x, evaluate=False)\n', (19831, 19850), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19878, 19905), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['x'], {'evaluate': '(False)'}), '(x, evaluate=False)\n', (19886, 19905), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19937, 19964), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['x'], {'evaluate': '(False)'}), '(x, evaluate=False)\n', (19945, 19964), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20264, 20279), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (20273, 20279), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20999, 21013), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(2)', '(3)'], {}), '(2, 3)\n', (21007, 21013), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21368, 21376), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['n'], {}), '(n)\n', (21373, 21376), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21785, 21792), 'sympy.functions.elementary.exponential.log', 'log', (['(10)'], {}), '(10)\n', (21788, 21792), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((21967, 21974), 'sympy.functions.elementary.exponential.log', 'log', (['(11)'], {}), '(11)\n', (21970, 21974), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22140, 22146), 'sympy.functions.elementary.exponential.log', 'log', (['(9)'], {}), '(9)\n', (22143, 22146), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22239, 22254), 'sympy.core.numbers.Rational', 'Rational', (['(19)', '(4)'], {}), '(19, 4)\n', (22247, 22254), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((22414, 22429), 'sympy.core.numbers.Rational', 'Rational', (['(23)', '(7)'], {}), '(23, 7)\n', (22422, 22429), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((22761, 22770), 'sympy.series.order.O', 'O', (['(x ** 2)'], {}), '(x ** 2)\n', (22762, 22770), False, 'from sympy.series.order import O\n'), ((22859, 22874), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(1 / x)'], {}), '(1 / x)\n', (22867, 22874), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((23097, 23105), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (23102, 23105), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((23285, 23294), 'sympy.series.order.O', 'O', (['(x ** 6)'], {}), '(x ** 6)\n', (23286, 23294), False, 'from sympy.series.order import O\n'), ((23386, 23397), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['x'], {}), '(x)\n', (23394, 23397), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((23411, 23423), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['x'], {}), '(x)\n', (23420, 23423), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((23446, 23457), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(0)'], {}), '(0)\n', (23454, 23457), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((23486, 23497), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(1)'], {}), '(1)\n', (23494, 23497), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((23511, 23523), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['(1)'], {}), '(1)\n', (23520, 23523), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((23546, 23559), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(-oo)'], {}), '(-oo)\n', (23554, 23559), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((24091, 24107), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['S.Half'], {}), '(S.Half)\n', (24099, 24107), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((24135, 24146), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(0)'], {}), '(0)\n', (24143, 24146), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((24229, 24240), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['I'], {}), '(I)\n', (24237, 24240), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((24268, 24287), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(2 + 3 * I)'], {}), '(2 + 3 * I)\n', (24276, 24287), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((24611, 24620), 'sympy.series.order.O', 'O', (['(x ** 3)'], {}), '(x ** 3)\n', (24612, 24620), False, 'from sympy.series.order import O\n'), ((24698, 24707), 'sympy.series.order.O', 'O', (['(x ** 5)'], {}), '(x ** 5)\n', (24699, 24707), False, 'from sympy.series.order import O\n'), ((24810, 24820), 'sympy.series.order.O', 'O', (['(x ** 11)'], {}), '(x ** 11)\n', (24811, 24820), False, 'from sympy.series.order import O\n'), ((25023, 25031), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['n'], {}), '(n)\n', (25028, 25031), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25060, 25068), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['m'], {}), '(m)\n', (25065, 25068), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25096, 25104), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['o'], {}), '(o)\n', (25101, 25104), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25132, 25140), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['p'], {}), '(p)\n', (25137, 25140), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25168, 25176), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['w'], {}), '(w)\n', (25173, 25176), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25486, 25494), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (25491, 25494), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25526, 25534), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['y'], {}), '(y)\n', (25531, 25534), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25566, 25574), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['z'], {}), '(z)\n', (25571, 25574), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25606, 25614), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['p'], {}), '(p)\n', (25611, 25614), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25646, 25654), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['q'], {}), '(q)\n', (25651, 25654), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25686, 25694), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['r'], {}), '(r)\n', (25691, 25694), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25726, 25743), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(e + S.Half)'], {}), '(e + S.Half)\n', (25731, 25743), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25775, 25792), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(e - S.Half)'], {}), '(e - S.Half)\n', (25780, 25792), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25902, 25916), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(8)'], {}), '(3, 8)\n', (25910, 25916), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((25987, 26001), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(8)'], {}), '(3, 8)\n', (25995, 26001), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((26449, 26465), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['x', 'p'], {}), '(x, p)\n', (26459, 26465), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((26731, 26747), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['x', '(1)'], {}), '(x, 1)\n', (26741, 26747), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((26758, 26770), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['x'], {}), '(x)\n', (26767, 26770), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((27449, 27457), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (27454, 27457), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27458, 27473), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (27467, 27473), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27692, 27700), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (27697, 27700), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27793, 27801), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (27798, 27801), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28308, 28324), 'sympy.functions.combinatorial.factorials.factorial', 'factorial', (['(n - 1)'], {}), '(n - 1)\n', (28317, 28324), False, 'from sympy.functions.combinatorial.factorials import factorial\n'), ((28451, 28467), 'sympy.functions.combinatorial.factorials.factorial', 'factorial', (['(n - 1)'], {}), '(n - 1)\n', (28460, 28467), False, 'from sympy.functions.combinatorial.factorials import factorial\n'), ((28555, 28592), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['S.Half', '(3)'], {'evaluate': '(False)'}), '(S.Half, 3, evaluate=False)\n', (28565, 28592), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28621, 28653), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(0)', '(1)'], {'evaluate': '(False)'}), '(0, 1, evaluate=False)\n', (28631, 28653), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28682, 28714), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(1)', '(3)'], {'evaluate': '(False)'}), '(1, 3, evaluate=False)\n', (28692, 28714), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28743, 28778), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(-1.0)', '(3)'], {'evaluate': '(False)'}), '(-1.0, 3, evaluate=False)\n', (28753, 28778), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28807, 28841), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(0.7)', '(3)'], {'evaluate': '(False)'}), '(0.7, 3, evaluate=False)\n', (28817, 28841), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28869, 28901), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(3)', '(3)'], {'evaluate': '(False)'}), '(3, 3, evaluate=False)\n', (28879, 28901), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((2212, 2226), 'sympy.core.numbers.Rational', 'Rational', (['(5)', '(8)'], {}), '(5, 8)\n', (2220, 2226), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2317, 2331), 'sympy.core.numbers.Rational', 'Rational', (['(2)', '(3)'], {}), '(2, 3)\n', (2325, 2331), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2421, 2435), 'sympy.core.numbers.Rational', 'Rational', (['(2)', '(3)'], {}), '(2, 3)\n', (2429, 2435), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2524, 2538), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(7)'], {}), '(3, 7)\n', (2532, 2538), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2627, 2641), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(8)'], {}), '(3, 8)\n', (2635, 2641), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2655, 2663), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (2660, 2663), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((2712, 2724), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(x - 1)'], {}), '(x - 1)\n', (2717, 2724), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((2774, 2786), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(x + 2)'], {}), '(x + 2)\n', (2779, 2786), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((3157, 3171), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(4)'], {}), '(3, 4)\n', (3165, 3171), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((3823, 3831), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['n'], {}), '(n)\n', (3828, 3831), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((3909, 3921), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(x + 1)'], {}), '(x + 1)\n', (3914, 3921), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((4025, 4033), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (4030, 4033), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((4423, 4440), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(I * pi)'], {}), '(I * pi)\n', (4432, 4440), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((4452, 4470), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(-I * pi)'], {}), '(-I * pi)\n', (4461, 4470), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((4787, 4803), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', 'y'], {}), '(x, y)\n', (4797, 4803), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((4933, 4949), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', 'y'], {}), '(x, y)\n', (4943, 4949), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((5117, 5124), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['x'], {}), '(x)\n', (5121, 5124), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((5141, 5166), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(S.Half - 3)', 'x'], {}), '(S.Half - 3, x)\n', (5151, 5166), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((5198, 5223), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(S.Half + 3)', 'x'], {}), '(S.Half + 3, x)\n', (5208, 5223), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((5693, 5714), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(4 * pi * I)'], {}), '(4 * pi * I)\n', (5702, 5714), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((5759, 5780), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(5 * pi * I)'], {}), '(5 * pi * I)\n', (5768, 5780), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((5866, 5887), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(5 * pi * I)'], {}), '(5 * pi * I)\n', (5875, 5887), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((6133, 6149), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(0)', 'x'], {}), '(0, x)\n', (6143, 6149), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((6266, 6298), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(1)', 'x'], {'evaluate': '(False)'}), '(1, x, evaluate=False)\n', (6276, 6298), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((6345, 6361), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', 'x'], {}), '(x, x)\n', (6355, 6361), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((6409, 6429), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(x + 1)', 'x'], {}), '(x + 1, x)\n', (6419, 6429), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((6477, 6497), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(1 / x)', 'x'], {}), '(1 / x, x)\n', (6487, 6497), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((6543, 6563), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(0)', '(x + 1)'], {}), '(0, x + 1)\n', (6553, 6563), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((6683, 6719), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(1)', '(x + 1)'], {'evaluate': '(False)'}), '(1, x + 1, evaluate=False)\n', (6693, 6719), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((6766, 6786), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', '(x + 1)'], {}), '(x, x + 1)\n', (6776, 6786), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((6833, 6857), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(x + 1)', '(x + 1)'], {}), '(x + 1, x + 1)\n', (6843, 6857), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((6904, 6928), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(1 / x)', '(x + 1)'], {}), '(1 / x, x + 1)\n', (6914, 6928), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((6974, 6994), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(0)', '(1 / x)'], {}), '(0, 1 / x)\n', (6984, 6994), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7111, 7147), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(1)', '(1 / x)'], {'evaluate': '(False)'}), '(1, 1 / x, evaluate=False)\n', (7121, 7147), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7193, 7213), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', '(1 / x)'], {}), '(x, 1 / x)\n', (7203, 7213), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7259, 7283), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(x + 1)', '(1 / x)'], {}), '(x + 1, 1 / x)\n', (7269, 7283), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7329, 7353), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['(1 / x)', '(1 / x)'], {}), '(1 / x, 1 / x)\n', (7339, 7353), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7398, 7414), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', '(2)'], {}), '(x, 2)\n', (7408, 7414), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7521, 7537), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', 'y'], {}), '(x, y)\n', (7531, 7537), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7572, 7589), 'sympy.functions.special.error_functions.expint', 'expint', (['(-x + 1)', 'y'], {}), '(-x + 1, y)\n', (7578, 7589), False, 'from sympy.functions.special.error_functions import expint\n'), ((7646, 7662), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['k', 'y'], {}), '(k, y)\n', (7656, 7662), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7697, 7714), 'sympy.functions.special.error_functions.expint', 'expint', (['(-k + 1)', 'y'], {}), '(-k + 1, y)\n', (7703, 7714), False, 'from sympy.functions.special.error_functions import expint\n'), ((7787, 7803), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['k', 'y'], {}), '(k, y)\n', (7797, 7803), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((7851, 7867), 'sympy.functions.special.gamma_functions.lowergamma', 'lowergamma', (['x', 'y'], {}), '(x, y)\n', (7861, 7867), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((8072, 8079), 'sympy.functions.elementary.exponential.exp', 'exp', (['(-6)'], {}), '(-6)\n', (8075, 8079), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((8462, 8478), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['x', 'y'], {}), '(x, y)\n', (8472, 8478), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((8565, 8581), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['x', 'y'], {}), '(x, y)\n', (8575, 8581), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((8603, 8619), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['x', 'y'], {}), '(x, y)\n', (8613, 8619), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((8620, 8626), 'sympy.functions.elementary.exponential.log', 'log', (['y'], {}), '(y)\n', (8623, 8626), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((8876, 8883), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['x'], {}), '(x)\n', (8880, 8883), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((8900, 8925), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['(S.Half - 3)', 'x'], {}), '(S.Half - 3, x)\n', (8910, 8925), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((8957, 8982), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['(S.Half + 3)', 'x'], {}), '(S.Half + 3, x)\n', (8967, 8982), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((9533, 9554), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(4 * pi * I)'], {}), '(4 * pi * I)\n', (9542, 9554), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((9599, 9620), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(5 * pi * I)'], {}), '(5 * pi * I)\n', (9608, 9620), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((9633, 9652), 'sympy.functions.elementary.exponential.exp', 'exp', (['(4 * I * pi * y)'], {}), '(4 * I * pi * y)\n', (9636, 9652), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((9692, 9700), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['y'], {}), '(y)\n', (9697, 9700), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((9747, 9768), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(5 * pi * I)'], {}), '(5 * pi * I)\n', (9756, 9768), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((10021, 10037), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['x', 'y'], {}), '(x, y)\n', (10031, 10037), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((10091, 10107), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['x', 'y'], {}), '(x, y)\n', (10101, 10107), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11222, 11245), 'sympy.core.numbers.Rational', 'Rational', (['(-22369)', '(20736)'], {}), '(-22369, 20736)\n', (11230, 11245), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((11835, 11850), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (11844, 11850), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11895, 11910), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (11904, 11910), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((11950, 11965), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(2)', 'x'], {}), '(2, x)\n', (11959, 11965), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12008, 12023), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(2)'], {}), '(I, 2)\n', (12017, 12023), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12247, 12263), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['n1', 'x'], {}), '(n1, x)\n', (12256, 12263), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12309, 12325), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['n2', 'x'], {}), '(n2, x)\n', (12318, 12325), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12371, 12387), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['n3', 'x'], {}), '(n3, x)\n', (12380, 12387), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12433, 12449), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['n4', 'x'], {}), '(n4, x)\n', (12442, 12449), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12495, 12511), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['n5', 'x'], {}), '(n5, x)\n', (12504, 12511), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12546, 12559), 'sympy.functions.combinatorial.factorials.factorial', 'factorial', (['n5'], {}), '(n5)\n', (12555, 12559), False, 'from sympy.functions.combinatorial.factorials import factorial\n'), ((12590, 12609), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(3)', '(7 * x)'], {}), '(3, 7 * x)\n', (12599, 12609), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12651, 12666), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (12660, 12666), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12728, 12743), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(2)', 'x'], {}), '(2, x)\n', (12737, 12743), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((12767, 12785), 'sympy.functions.combinatorial.numbers.harmonic', 'harmonic', (['(x - 1)', '(3)'], {}), '(x - 1, 3)\n', (12775, 12785), False, 'from sympy.functions.combinatorial.numbers import harmonic\n'), ((12790, 12797), 'sympy.functions.special.zeta_functions.zeta', 'zeta', (['(3)'], {}), '(3)\n', (12794, 12797), False, 'from sympy.functions.special.zeta_functions import zeta\n'), ((12844, 12860), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['ni', 'x'], {}), '(ni, x)\n', (12853, 12860), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((13156, 13177), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(2 * I * pi)'], {}), '(2 * I * pi)\n', (13165, 13177), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((13330, 13351), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(2 * I * pi)'], {}), '(2 * I * pi)\n', (13339, 13351), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((13590, 13606), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(-x)'], {}), '(0, -x)\n', (13599, 13606), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14376, 14392), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['pi', '(3)'], {}), '(pi, 3)\n', (14385, 14392), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14511, 14527), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', 'pi'], {}), '(I, pi)\n', (14520, 14527), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15012, 15038), 'sympy.core.symbol.Symbol', 'Symbol', (['"""a"""'], {'positive': '(True)'}), "('a', positive=True)\n", (15018, 15038), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((15040, 15066), 'sympy.core.symbol.Symbol', 'Symbol', (['"""b"""'], {'positive': '(True)'}), "('b', positive=True)\n", (15046, 15066), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((15174, 15189), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (15183, 15189), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15238, 15257), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(2 * x)'], {}), '(0, 2 * x)\n', (15247, 15257), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15354, 15373), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(2 * x)'], {}), '(1, 2 * x)\n', (15363, 15373), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15403, 15418), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (15412, 15418), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15423, 15447), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(S.Half + x)'], {}), '(1, S.Half + x)\n', (15432, 15447), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15461, 15476), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(2)', 'x'], {}), '(2, x)\n', (15470, 15476), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15535, 15555), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(-1 + x)'], {}), '(0, -1 + x)\n', (15544, 15555), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15626, 15645), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(1 + x)'], {}), '(0, 1 + x)\n', (15635, 15645), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15711, 15730), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(2 + x)'], {}), '(0, 2 + x)\n', (15720, 15730), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15807, 15826), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(3 + x)'], {}), '(0, 3 + x)\n', (15816, 15826), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15915, 15934), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(4 + x)'], {}), '(0, 4 + x)\n', (15924, 15934), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16035, 16054), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(1 + x)'], {}), '(1, 1 + x)\n', (16044, 16054), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16122, 16141), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(2 + x)'], {}), '(1, 2 + x)\n', (16131, 16141), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16192, 16207), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (16201, 16207), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16243, 16262), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(3 + x)'], {}), '(1, 3 + x)\n', (16252, 16262), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16379, 16398), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(4 + x)'], {}), '(1, 4 + x)\n', (16388, 16398), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16540, 16559), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(x + y)'], {}), '(0, x + y)\n', (16549, 16559), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16622, 16641), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(x + y)'], {}), '(1, x + y)\n', (16631, 16641), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16704, 16731), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(3 + 4 * x + y)'], {}), '(1, 3 + 4 * x + y)\n', (16713, 16731), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16882, 16909), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(3)', '(3 + 4 * x + y)'], {}), '(3, 3 + 4 * x + y)\n', (16891, 16909), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((17060, 17087), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(3)', '(4 * x + y + 1)'], {}), '(3, 4 * x + y + 1)\n', (17069, 17087), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18119, 18129), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['x'], {}), '(x)\n', (18126, 18129), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18175, 18185), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['x'], {}), '(x)\n', (18182, 18185), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18287, 18313), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['x'], {'evaluate': '(False)'}), '(x, evaluate=False)\n', (18294, 18313), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18522, 18548), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['x'], {'evaluate': '(False)'}), '(x, evaluate=False)\n', (18529, 18548), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18631, 18641), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['x'], {}), '(x)\n', (18638, 18641), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18690, 18704), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(2 * x)'], {}), '(2 * x)\n', (18697, 18704), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18809, 18824), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(-1 + x)'], {}), '(-1 + x)\n', (18816, 18824), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18895, 18909), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(1 + x)'], {}), '(1 + x)\n', (18902, 18909), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18975, 18989), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(2 + x)'], {}), '(2 + x)\n', (18982, 18989), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19066, 19080), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(3 + x)'], {}), '(3 + x)\n', (19073, 19080), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19169, 19183), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(4 + x)'], {}), '(4 + x)\n', (19176, 19183), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19284, 19298), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['(x + y)'], {}), '(x + y)\n', (19291, 19298), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19570, 19597), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['x'], {'evaluate': '(False)'}), '(x, evaluate=False)\n', (19578, 19597), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19637, 19664), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['x'], {'evaluate': '(False)'}), '(x, evaluate=False)\n', (19645, 19664), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19757, 19784), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['x'], {'evaluate': '(False)'}), '(x, evaluate=False)\n', (19765, 19784), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19996, 20023), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['x'], {'evaluate': '(False)'}), '(x, evaluate=False)\n', (20004, 20023), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20106, 20121), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['(2 * x)'], {}), '(2 * x)\n', (20114, 20121), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20151, 20166), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (20160, 20166), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20217, 20232), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['(1 + x)'], {}), '(1 + x)\n', (20225, 20232), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20300, 20315), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['(2 + x)'], {}), '(2 + x)\n', (20308, 20315), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20366, 20381), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (20375, 20381), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20417, 20432), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['(3 + x)'], {}), '(3 + x)\n', (20425, 20432), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20549, 20564), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['(4 + x)'], {}), '(4 + x)\n', (20557, 20564), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20706, 20721), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['(x + y)'], {}), '(x + y)\n', (20714, 20721), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20784, 20807), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['(3 + 4 * x + y)'], {}), '(3 + 4 * x + y)\n', (20792, 20807), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21470, 21491), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(n / 2 + S.Half)'], {}), '(n / 2 + S.Half)\n', (21475, 21491), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21776, 21782), 'sympy.functions.elementary.exponential.log', 'log', (['(7)'], {}), '(7)\n', (21779, 21782), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((21814, 21828), 'sympy.core.function.expand_func', 'expand_func', (['L'], {}), '(L)\n', (21825, 21828), False, 'from sympy.core.function import expand_func\n'), ((21958, 21964), 'sympy.functions.elementary.exponential.log', 'log', (['(7)'], {}), '(7)\n', (21961, 21964), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((21996, 22010), 'sympy.core.function.expand_func', 'expand_func', (['L'], {}), '(L)\n', (22007, 22010), False, 'from sympy.core.function import expand_func\n'), ((22168, 22182), 'sympy.core.function.expand_func', 'expand_func', (['L'], {}), '(L)\n', (22179, 22182), False, 'from sympy.core.function import expand_func\n'), ((22316, 22322), 'sympy.functions.elementary.exponential.log', 'log', (['(4)'], {}), '(4)\n', (22319, 22322), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22343, 22357), 'sympy.core.function.expand_func', 'expand_func', (['L'], {}), '(L)\n', (22354, 22357), False, 'from sympy.core.function import expand_func\n'), ((22502, 22508), 'sympy.functions.elementary.exponential.log', 'log', (['(7)'], {}), '(7)\n', (22505, 22508), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22529, 22543), 'sympy.core.function.expand_func', 'expand_func', (['L'], {}), '(L)\n', (22540, 22543), False, 'from sympy.core.function import expand_func\n'), ((22594, 22605), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['x'], {}), '(x)\n', (22602, 22605), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((22781, 22787), 'sympy.functions.elementary.exponential.log', 'log', (['x'], {}), '(x)\n', (22784, 22787), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((23055, 23066), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['x'], {}), '(x)\n', (23063, 23066), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((23600, 23626), 'sympy.core.symbol.Symbol', 'Symbol', (['"""v"""'], {'positive': '(True)'}), "('v', positive=True)\n", (23606, 23626), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((23664, 23686), 'sympy.core.symbol.Symbol', 'Symbol', (['"""v"""'], {'zero': '(True)'}), "('v', zero=True)\n", (23670, 23686), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((23725, 23751), 'sympy.core.symbol.Symbol', 'Symbol', (['"""v"""'], {'negative': '(True)'}), "('v', negative=True)\n", (23731, 23751), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((23790, 23819), 'sympy.core.symbol.Symbol', 'Symbol', (['"""v"""'], {'nonpositive': '(True)'}), "('v', nonpositive=True)\n", (23796, 23819), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((23858, 23887), 'sympy.core.symbol.Symbol', 'Symbol', (['"""v"""'], {'nonnegative': '(True)'}), "('v', nonnegative=True)\n", (23864, 23887), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((23925, 23952), 'sympy.core.symbol.Symbol', 'Symbol', (['"""v"""'], {'imaginary': '(True)'}), "('v', imaginary=True)\n", (23931, 23952), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((23990, 24012), 'sympy.core.symbol.Symbol', 'Symbol', (['"""v"""'], {'real': '(True)'}), "('v', real=True)\n", (23996, 24012), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((24050, 24061), 'sympy.core.symbol.Symbol', 'Symbol', (['"""v"""'], {}), "('v')\n", (24056, 24061), False, 'from sympy.core.symbol import Dummy, Symbol\n'), ((24184, 24199), 'sympy.core.numbers.Rational', 'Rational', (['(-1)', '(2)'], {}), '(-1, 2)\n', (24192, 24199), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((24538, 24557), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(1 / x)'], {}), '(0, 1 / x)\n', (24547, 24557), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((24630, 24649), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(1 / x)'], {}), '(1, 1 / x)\n', (24639, 24649), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((24717, 24736), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(3)', '(1 / x)'], {}), '(3, 1 / x)\n', (24726, 24736), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((26511, 26534), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(x + (1 - _k) / 2)'], {}), '(x + (1 - _k) / 2)\n', (26516, 26534), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((26569, 26585), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['x', 'p'], {}), '(x, p)\n', (26579, 26585), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((26829, 26845), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['x', 'p'], {}), '(x, p)\n', (26839, 26845), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((26997, 27014), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['oo', '(1)'], {}), '(oo, 1)\n', (27007, 27014), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27421, 27437), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['x', '(1)'], {}), '(x, 1)\n', (27431, 27437), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27485, 27501), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['x', '(2)'], {}), '(x, 2)\n', (27495, 27501), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27559, 27574), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (27568, 27574), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27613, 27637), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(x - S.Half)'], {}), '(0, x - S.Half)\n', (27622, 27637), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27650, 27670), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(x - 1)', '(1)'], {}), '(x - 1, 1)\n', (27660, 27670), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27720, 27740), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(x + 2)', '(1)'], {}), '(x + 2, 1)\n', (27730, 27740), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27813, 27833), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(x - 1)', '(2)'], {}), '(x - 1, 2)\n', (27823, 27833), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27883, 27900), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(x + S.Half)'], {}), '(x + S.Half)\n', (27888, 27900), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27938, 27952), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(4)'], {}), '(3, 4)\n', (27946, 27952), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((27965, 27985), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['(x - 1)', '(3)'], {}), '(x - 1, 3)\n', (27975, 27985), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28048, 28065), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(x + S.Half)'], {}), '(x + S.Half)\n', (28053, 28065), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28125, 28139), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(2)'], {}), '(3, 2)\n', (28133, 28139), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((28153, 28169), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['n', '(1)'], {}), '(n, 1)\n', (28163, 28169), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28220, 28236), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['n', '(2)'], {}), '(n, 2)\n', (28230, 28236), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28259, 28267), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (28263, 28267), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((28336, 28352), 'sympy.functions.special.gamma_functions.multigamma', 'multigamma', (['n', '(3)'], {}), '(n, 3)\n', (28346, 28352), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28491, 28506), 'sympy.core.numbers.Rational', 'Rational', (['(-1)', '(2)'], {}), '(-1, 2)\n', (28499, 28506), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((28964, 28972), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (28969, 28972), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((29010, 29022), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(2 + x)'], {}), '(2 + x)\n', (29015, 29022), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((2141, 2157), 'sympy.core.numbers.Rational', 'Rational', (['(-11)', '(8)'], {}), '(-11, 8)\n', (2149, 2157), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2245, 2261), 'sympy.core.numbers.Rational', 'Rational', (['(-10)', '(3)'], {}), '(-10, 3)\n', (2253, 2261), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2350, 2365), 'sympy.core.numbers.Rational', 'Rational', (['(14)', '(3)'], {}), '(14, 3)\n', (2358, 2365), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2454, 2469), 'sympy.core.numbers.Rational', 'Rational', (['(17)', '(7)'], {}), '(17, 7)\n', (2462, 2469), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2557, 2572), 'sympy.core.numbers.Rational', 'Rational', (['(19)', '(8)'], {}), '(19, 8)\n', (2565, 2572), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((2927, 2941), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(2)'], {}), '(3, 2)\n', (2935, 2941), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((3127, 3141), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(4)'], {}), '(3, 4)\n', (3135, 3141), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((4971, 4979), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (4976, 4979), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((4980, 4990), 'sympy.functions.special.gamma_functions.digamma', 'digamma', (['x'], {}), '(x)\n', (4987, 4990), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((4993, 5009), 'sympy.functions.special.gamma_functions.uppergamma', 'uppergamma', (['x', 'y'], {}), '(x, y)\n', (5003, 5009), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((5010, 5016), 'sympy.functions.elementary.exponential.log', 'log', (['y'], {}), '(y)\n', (5013, 5016), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((5823, 5840), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(pi * I)'], {}), '(pi * I)\n', (5832, 5840), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((5917, 5934), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(I * pi)'], {}), '(I * pi)\n', (5926, 5934), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((7466, 7473), 'sympy.functions.elementary.exponential.exp', 'exp', (['(-2)'], {}), '(-2)\n', (7469, 7473), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((9706, 9725), 'sympy.functions.elementary.exponential.exp', 'exp', (['(4 * pi * I * y)'], {}), '(4 * pi * I * y)\n', (9709, 9725), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((9798, 9815), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(I * pi)'], {}), '(I * pi)\n', (9807, 9815), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((12990, 13002), 'sympy.functions.special.zeta_functions.zeta', 'zeta', (['(ni + 1)'], {}), '(ni + 1)\n', (12994, 13002), False, 'from sympy.functions.special.zeta_functions import zeta\n'), ((13297, 13318), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(2 * I * pi)'], {}), '(2 * I * pi)\n', (13306, 13318), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((14435, 14451), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', 'pi'], {}), '(I, pi)\n', (14444, 14451), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14489, 14505), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', 'pi'], {}), '(I, pi)\n', (14498, 14505), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14547, 14562), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(1)'], {}), '(I, 1)\n', (14556, 14562), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14599, 14614), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(1)'], {}), '(I, 1)\n', (14608, 14614), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14646, 14661), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(1)'], {}), '(I, 1)\n', (14655, 14661), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14704, 14719), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(4)'], {}), '(I, 4)\n', (14713, 14719), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14870, 14885), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(4)'], {}), '(I, 4)\n', (14879, 14885), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15287, 15302), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (15296, 15302), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15307, 15331), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', '(S.Half + x)'], {}), '(0, S.Half + x)\n', (15316, 15331), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15858, 15873), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (15867, 15873), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16313, 16328), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (16322, 16328), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16780, 16803), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(y + 4 * x)'], {}), '(1, y + 4 * x)\n', (16789, 16803), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16958, 16981), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(3)', '(y + 4 * x)'], {}), '(3, y + 4 * x)\n', (16967, 16981), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18734, 18749), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (18743, 18749), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19112, 19127), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (19121, 19127), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20483, 20498), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (20492, 20498), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20856, 20879), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', '(y + 4 * x)'], {}), '(1, y + 4 * x)\n', (20865, 20879), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21054, 21065), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['x'], {}), '(x)\n', (21062, 21065), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21461, 21469), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['n'], {}), '(n)\n', (21466, 21469), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21767, 21773), 'sympy.functions.elementary.exponential.log', 'log', (['(4)'], {}), '(4)\n', (21770, 21773), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((21949, 21955), 'sympy.functions.elementary.exponential.log', 'log', (['(3)'], {}), '(3)\n', (21952, 21955), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22104, 22110), 'sympy.functions.elementary.exponential.log', 'log', (['(2)'], {}), '(2)\n', (22107, 22110), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22122, 22136), 'sympy.core.numbers.Rational', 'Rational', (['(2)', '(7)'], {}), '(2, 7)\n', (22130, 22136), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((22278, 22284), 'sympy.functions.elementary.exponential.log', 'log', (['(5)'], {}), '(5)\n', (22281, 22284), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22296, 22310), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(4)'], {}), '(3, 4)\n', (22304, 22310), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((22464, 22470), 'sympy.functions.elementary.exponential.log', 'log', (['(5)'], {}), '(5)\n', (22467, 22470), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22482, 22496), 'sympy.core.numbers.Rational', 'Rational', (['(2)', '(7)'], {}), '(2, 7)\n', (22490, 22496), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((22668, 22674), 'sympy.functions.elementary.trigonometric.cos', 'cos', (['x'], {}), '(x)\n', (22671, 22674), False, 'from sympy.functions.elementary.trigonometric import cos, sin\n'), ((23117, 23128), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['x'], {}), '(x)\n', (23125, 23128), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((25861, 25875), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(8)'], {}), '(3, 8)\n', (25869, 25875), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((25946, 25960), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(8)'], {}), '(3, 8)\n', (25954, 25960), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((27531, 27548), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(x - S.Half)'], {}), '(x - S.Half)\n', (27536, 27548), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27595, 27612), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['(x - S.Half)'], {}), '(x - S.Half)\n', (27600, 27612), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27855, 27863), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (27859, 27863), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((27864, 27872), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (27869, 27872), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28404, 28420), 'sympy.functions.combinatorial.factorials.factorial', 'factorial', (['(n - 2)'], {}), '(n - 2)\n', (28413, 28420), False, 'from sympy.functions.combinatorial.factorials import factorial\n'), ((29067, 29073), 'sympy.functions.elementary.trigonometric.cos', 'cos', (['x'], {}), '(x)\n', (29070, 29073), False, 'from sympy.functions.elementary.trigonometric import cos, sin\n'), ((29119, 29125), 'sympy.functions.elementary.trigonometric.sin', 'sin', (['x'], {}), '(x)\n', (29122, 29125), False, 'from sympy.functions.elementary.trigonometric import cos, sin\n'), ((3329, 3346), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(I * pi)'], {}), '(I * pi)\n', (3338, 3346), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((3391, 3408), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(I * pi)'], {}), '(I * pi)\n', (3400, 3408), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((6208, 6212), 'sympy.core.singleton.S', 'S', (['(1)'], {}), '(1)\n', (6209, 6212), False, 'from sympy.core.singleton import S\n'), ((6622, 6626), 'sympy.core.singleton.S', 'S', (['(1)'], {}), '(1)\n', (6623, 6626), False, 'from sympy.core.singleton import S\n'), ((7051, 7055), 'sympy.core.singleton.S', 'S', (['(1)'], {}), '(1)\n', (7052, 7055), False, 'from sympy.core.singleton import S\n'), ((9663, 9680), 'sympy.functions.elementary.exponential.exp_polar', 'exp_polar', (['(pi * I)'], {}), '(pi * I)\n', (9672, 9680), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((12899, 12922), 'sympy.functions.combinatorial.numbers.harmonic', 'harmonic', (['(x - 1)', '(ni + 1)'], {}), '(x - 1, ni + 1)\n', (12907, 12922), False, 'from sympy.functions.combinatorial.numbers import harmonic\n'), ((14623, 14638), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(1)'], {}), '(I, 1)\n', (14632, 14638), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14670, 14685), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(1)'], {}), '(I, 1)\n', (14679, 14685), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14824, 14839), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(4)'], {}), '(I, 4)\n', (14833, 14839), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((15966, 15981), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (15975, 15981), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((16449, 16464), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (16458, 16464), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19215, 19230), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(0)', 'x'], {}), '(0, x)\n', (19224, 19230), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((19696, 19707), 'sympy.functions.special.gamma_functions.trigamma', 'trigamma', (['x'], {}), '(x)\n', (19704, 19707), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((20184, 20198), 'sympy.core.numbers.Rational', 'Rational', (['(1)', '(2)'], {}), '(1, 2)\n', (20192, 20198), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((20615, 20630), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(1)', 'x'], {}), '(1, x)\n', (20624, 20630), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((21452, 21460), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (21456, 21460), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((22095, 22101), 'sympy.functions.elementary.exponential.log', 'log', (['(7)'], {}), '(7)\n', (22098, 22101), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22269, 22275), 'sympy.functions.elementary.exponential.log', 'log', (['(9)'], {}), '(9)\n', (22272, 22275), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22454, 22461), 'sympy.functions.elementary.exponential.log', 'log', (['(12)'], {}), '(12)\n', (22457, 22461), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22725, 22736), 'sympy.functions.elementary.exponential.log', 'log', (['(x / pi)'], {}), '(x / pi)\n', (22728, 22736), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((23263, 23278), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(4)', '(1)'], {}), '(4, 1)\n', (23272, 23278), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((24586, 24592), 'sympy.functions.elementary.exponential.log', 'log', (['x'], {}), '(x)\n', (24589, 24592), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((26630, 26642), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['p'], {}), '(p)\n', (26639, 26642), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((26660, 26672), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['x'], {}), '(x)\n', (26669, 26672), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((26898, 26910), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['x'], {}), '(x)\n', (26907, 26910), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((27513, 27521), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (27517, 27521), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((27522, 27530), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (27527, 27530), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27577, 27585), 'sympy.functions.elementary.miscellaneous.sqrt', 'sqrt', (['pi'], {}), '(pi)\n', (27581, 27585), False, 'from sympy.functions.elementary.miscellaneous import sqrt\n'), ((27586, 27594), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (27591, 27594), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((27920, 27935), 'sympy.core.numbers.Rational', 'Rational', (['(11)', '(4)'], {}), '(11, 4)\n', (27928, 27935), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((28011, 28025), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(2)'], {}), '(3, 2)\n', (28019, 28025), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((28026, 28034), 'sympy.functions.special.gamma_functions.gamma', 'gamma', (['x'], {}), '(x)\n', (28031, 28034), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((28107, 28122), 'sympy.core.numbers.Rational', 'Rational', (['(31)', '(4)'], {}), '(31, 4)\n', (28115, 28122), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((28292, 28306), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(2)'], {}), '(3, 2)\n', (28300, 28306), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((28379, 28393), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(2)'], {}), '(3, 2)\n', (28387, 28393), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((28435, 28449), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(2)'], {}), '(3, 2)\n', (28443, 28449), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((7486, 7492), 'sympy.functions.elementary.exponential.log', 'log', (['(2)'], {}), '(2)\n', (7489, 7492), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((8103, 8108), 'sympy.core.singleton.S', 'S', (['(77)'], {}), '(77)\n', (8104, 8108), False, 'from sympy.core.singleton import S\n'), ((8130, 8135), 'sympy.core.singleton.S', 'S', (['(77)'], {}), '(77)\n', (8131, 8135), False, 'from sympy.core.singleton import S\n'), ((10325, 10330), 'sympy.core.singleton.S', 'S', (['(77)'], {}), '(77)\n', (10326, 10330), False, 'from sympy.core.singleton import S\n'), ((10352, 10357), 'sympy.core.singleton.S', 'S', (['(77)'], {}), '(77)\n', (10353, 10357), False, 'from sympy.core.singleton import S\n'), ((14848, 14863), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(4)'], {}), '(I, 4)\n', (14857, 14863), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((18767, 18781), 'sympy.core.numbers.Rational', 'Rational', (['(1)', '(2)'], {}), '(1, 2)\n', (18775, 18781), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((21731, 21737), 'sympy.functions.elementary.exponential.log', 'log', (['(3)'], {}), '(3)\n', (21734, 21737), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((21749, 21763), 'sympy.core.numbers.Rational', 'Rational', (['(1)', '(3)'], {}), '(1, 3)\n', (21757, 21763), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((21913, 21919), 'sympy.functions.elementary.exponential.log', 'log', (['(4)'], {}), '(4)\n', (21916, 21919), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((21931, 21945), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(4)'], {}), '(3, 4)\n', (21939, 21945), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((22444, 22451), 'sympy.functions.elementary.exponential.log', 'log', (['(19)'], {}), '(19)\n', (22447, 22451), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22658, 22664), 'sympy.functions.elementary.trigonometric.sin', 'sin', (['x'], {}), '(x)\n', (22661, 22664), False, 'from sympy.functions.elementary.trigonometric import cos, sin\n'), ((22744, 22754), 'sympy.functions.elementary.exponential.log', 'log', (['(2 * x)'], {}), '(2 * x)\n', (22747, 22754), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22924, 22935), 'sympy.functions.elementary.exponential.log', 'log', (['(2 * pi)'], {}), '(2 * pi)\n', (22927, 22935), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((24336, 24351), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['(1 / x)'], {}), '(1 / x)\n', (24344, 24351), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((26081, 26095), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(8)'], {}), '(3, 8)\n', (26089, 26095), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((26158, 26172), 'sympy.core.numbers.Rational', 'Rational', (['(3)', '(8)'], {}), '(3, 8)\n', (26166, 26172), False, 'from sympy.core.numbers import I, Rational, nan, oo, pi, zoo\n'), ((26602, 26614), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['p'], {}), '(p)\n', (26611, 26614), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((8201, 8206), 'sympy.core.singleton.S', 'S', (['(77)'], {}), '(77)\n', (8202, 8206), False, 'from sympy.core.singleton import S\n'), ((8229, 8234), 'sympy.core.singleton.S', 'S', (['(77)'], {}), '(77)\n', (8230, 8234), False, 'from sympy.core.singleton import S\n'), ((10423, 10428), 'sympy.core.singleton.S', 'S', (['(77)'], {}), '(77)\n', (10424, 10428), False, 'from sympy.core.singleton import S\n'), ((10451, 10456), 'sympy.core.singleton.S', 'S', (['(77)'], {}), '(77)\n', (10452, 10456), False, 'from sympy.core.singleton import S\n'), ((14760, 14775), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(4)'], {}), '(I, 4)\n', (14769, 14775), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((14785, 14800), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['I', '(4)'], {}), '(I, 4)\n', (14794, 14800), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((22703, 22713), 'sympy.functions.elementary.exponential.log', 'log', (['(2 * x)'], {}), '(2 * x)\n', (22706, 22713), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((22907, 22917), 'sympy.functions.elementary.exponential.log', 'log', (['(1 / x)'], {}), '(1 / x)\n', (22910, 22917), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n'), ((23211, 23226), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(2)', '(1)'], {}), '(2, 1)\n', (23220, 23226), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((23310, 23321), 'sympy.functions.special.gamma_functions.loggamma', 'loggamma', (['x'], {}), '(x)\n', (23318, 23321), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((26678, 26691), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['_k'], {}), '(_k)\n', (26687, 26691), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((26916, 26929), 'sympy.functions.elementary.complexes.conjugate', 'conjugate', (['_k'], {}), '(_k)\n', (26925, 26929), False, 'from sympy.functions.elementary.complexes import Abs, conjugate, im, re\n'), ((4233, 4248), 'sympy.functions.special.gamma_functions.polygamma', 'polygamma', (['(2)', '(1)'], {}), '(2, 1)\n', (4242, 4248), False, 'from sympy.functions.special.gamma_functions import digamma, gamma, loggamma, lowergamma, multigamma, polygamma, trigamma, uppergamma\n'), ((23166, 23172), 'sympy.functions.elementary.exponential.log', 'log', (['x'], {}), '(x)\n', (23169, 23172), False, 'from sympy.functions.elementary.exponential import exp, exp_polar, log\n')] |
import importlib
def cli():
for experiments_file in (
"larq_zoo.training.basic_experiments",
"larq_zoo.training.multi_stage_experiments",
):
importlib.import_module(experiments_file)
from zookeeper import cli
cli()
if __name__ == "__main__":
cli()
| [
"zookeeper.cli",
"importlib.import_module"
] | [((253, 258), 'zookeeper.cli', 'cli', ([], {}), '()\n', (256, 258), False, 'from zookeeper import cli\n'), ((292, 297), 'zookeeper.cli', 'cli', ([], {}), '()\n', (295, 297), False, 'from zookeeper import cli\n'), ((175, 216), 'importlib.import_module', 'importlib.import_module', (['experiments_file'], {}), '(experiments_file)\n', (198, 216), False, 'import importlib\n')] |
"""
Module responsible for translating g2p data into GA4GH native
objects.
"""
import re
import bisect
import rdflib
from rdflib import RDF
import candig.server.exceptions as exceptions
import candig.server.datamodel.sequence_annotations as sequence_annotations
import candig.server.datamodel.genotype_phenotype as g2p
import candig.schemas.protocol as protocol
# annotation keys
TYPE = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'
LABEL = 'http://www.w3.org/2000/01/rdf-schema#label'
HAS_QUALITY = 'http://purl.obolibrary.org/obo/BFO_0000159'
FALDO_LOCATION = "http://biohackathon.org/resource/faldo#location"
FALDO_BEGIN = "http://biohackathon.org/resource/faldo#begin"
FALDO_END = "http://biohackathon.org/resource/faldo#end"
FALDO_POSITION = "http://biohackathon.org/resource/faldo#position"
FALDO_REFERENCE = "http://biohackathon.org/resource/faldo#reference"
MEMBER_OF = 'http://purl.obolibrary.org/obo/RO_0002350'
ASSOCIATION = "http://purl.org/oban/association"
HAS_SUBJECT = "http://purl.org/oban/association_has_subject"
class PhenotypeAssociationFeatureSet(
g2p.G2PUtility, sequence_annotations.Gff3DbFeatureSet):
"""
An rdf object store. The cancer genome database
[Clinical Genomics Knowledge Base]
(http://nif-crawler.neuinfo.org/monarch/ttl/cgd.ttl),
published by the Monarch project, was the source of Evidence.
"""
def __init__(self, parentContainer, localId):
super(PhenotypeAssociationFeatureSet, self).__init__(
parentContainer, localId)
# mimic featureset
def populateFromRow(self, featureSetRecord):
"""
Populates the instance variables of this FeatureSet from the specified
DB row.
"""
self._dbFilePath = featureSetRecord.dataurl
self.setAttributesJson(featureSetRecord.attributes)
self.populateFromFile(self._dbFilePath)
def populateFromFile(self, dataUrl):
"""
Populates the instance variables of this FeatureSet from the specified
data URL.
Initialize dataset, using the passed dict of sources
[{source,format}] see rdflib.parse() for more
If path is set, this backend will load itself
"""
self._dbFilePath = dataUrl
# initialize graph
self._rdfGraph = rdflib.ConjunctiveGraph()
# save the path
self._dataUrl = dataUrl
self._scanDataFiles(self._dataUrl, ['*.ttl'])
# extract version
cgdTTL = rdflib.URIRef("http://data.monarchinitiative.org/ttl/cgd.ttl")
versionInfo = rdflib.URIRef(
'http://www.w3.org/2002/07/owl#versionInfo')
self._version = None
for _, _, obj in self._rdfGraph.triples((cgdTTL, versionInfo, None)):
self._version = obj.toPython()
# setup location cache
self._initializeLocationCache()
# mimic featureset
def getFeature(self, compoundId):
"""
find a feature and return candig representation, use compoundId as
featureId
"""
feature = self._getFeatureById(compoundId.featureId)
feature.id = str(compoundId)
return feature
def _getFeatureById(self, featureId):
"""
find a feature and return candig representation, use 'native' id as
featureId
"""
featureRef = rdflib.URIRef(featureId)
featureDetails = self._detailTuples([featureRef])
feature = {}
for detail in featureDetails:
feature[detail['predicate']] = []
for detail in featureDetails:
feature[detail['predicate']].append(detail['object'])
pbFeature = protocol.Feature()
term = protocol.OntologyTerm()
# Schema for feature only supports one type of `type`
# here we default to first OBO defined
for featureType in sorted(feature[TYPE]):
if "obolibrary" in featureType:
term.term = self._featureTypeLabel(featureType)
term.term_id = featureType
pbFeature.feature_type.MergeFrom(term)
break
pbFeature.id = featureId
# Schema for feature only supports one type of `name` `symbol`
# here we default to shortest for symbol and longest for name
feature[LABEL].sort(key=len)
pbFeature.gene_symbol = feature[LABEL][0]
pbFeature.name = feature[LABEL][-1]
pbFeature.attributes.MergeFrom(protocol.Attributes())
for key in feature:
for val in sorted(feature[key]):
pbFeature.attributes.attr[key].values.add().string_value = val
if featureId in self._locationMap:
location = self._locationMap[featureId]
pbFeature.reference_name = location["chromosome"]
pbFeature.start = location["begin"]
pbFeature.end = location["end"]
return pbFeature
# mimic featureset
def getFeatures(self, referenceName=None, start=None, end=None,
startIndex=None, maxResults=None,
featureTypes=None, parentId=None,
name=None, geneSymbol=None, numFeatures=10):
# query to do search
query = self._filterSearchFeaturesRequest(
referenceName, geneSymbol, name, start, end)
featuresResults = self._rdfGraph.query(query)
featureIds = set()
try:
for row in featuresResults.bindings:
featureIds.add(row['feature'].toPython())
except re.error:
raise exceptions.BadFeatureSetSearchRequestRegularExpression()
if startIndex:
startPosition = int(startIndex)
else:
startPosition = 0
for i, featureId in enumerate(featureIds):
if i < startPosition:
continue
feature = self._getFeatureById(featureId)
# _getFeatureById returns native id, cast to compound
feature.id = self.getCompoundIdForFeatureId(feature.id)
yield feature
def _baseQuery(self):
return """
PREFIX OBAN: <http://purl.org/oban/>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT DISTINCT
?feature
?feature_label
WHERE {
?association a OBAN:association .
?association OBAN:association_has_subject ?feature .
?feature rdfs:label ?feature_label .
#%FILTER%
}
ORDER BY ?feature
"""
def _filterSearchFeaturesRequest(self, reference_name, gene_symbol, name,
start, end):
"""
formulate a sparql query string based on parameters
"""
filters = []
query = self._baseQuery()
filters = []
location = self._findLocation(reference_name, start, end)
if location:
filters.append("?feature = <{}>".format(location))
if gene_symbol:
filters.append('regex(?feature_label, "{}")')
if name:
filters.append(
'regex(?feature_label, "{}")'.format(name))
# apply filters
filter = "FILTER ({})".format(' && '.join(filters))
if len(filters) == 0:
filter = ""
query = query.replace("#%FILTER%", filter)
return query
def _findLocation(self, reference_name, start, end):
"""
return a location key form the locationMap
"""
try:
# TODO - sequence_annotations does not have build?
return self._locationMap['hg19'][reference_name][start][end]
except:
return None
def _initializeLocationCache(self):
"""
CGD uses Faldo ontology for locations, it's a bit complicated.
This function sets up an in memory cache of all locations, which
can be queried via:
locationMap[build][chromosome][begin][end] = location["_id"]
"""
# cache of locations
self._locationMap = {}
locationMap = self._locationMap
triples = self._rdfGraph.triples
Ref = rdflib.URIRef
associations = []
for subj, _, _ in triples((None, RDF.type, Ref(ASSOCIATION))):
associations.append(subj.toPython())
locationIds = []
for association in associations:
for _, _, obj in triples((Ref(association),
Ref(HAS_SUBJECT), None)):
locationIds.append(obj.toPython())
locations = []
for _id in locationIds:
location = {}
location["_id"] = _id
for subj, predicate, obj in triples((Ref(location["_id"]),
None, None)):
if not predicate.toPython() in location:
location[predicate.toPython()] = []
bisect.insort(location[predicate.toPython()], obj.toPython())
if FALDO_LOCATION in location:
locations.append(location)
for location in locations:
for _id in location[FALDO_LOCATION]:
# lookup faldo region, ensure positions are sorted
faldoLocation = {}
faldoLocation["_id"] = _id
for subj, predicate, obj in triples((Ref(faldoLocation["_id"]),
None, None)):
if not predicate.toPython() in faldoLocation:
faldoLocation[predicate.toPython()] = []
bisect.insort(faldoLocation[predicate.toPython()],
obj.toPython())
faldoBegins = []
for _id in faldoLocation[FALDO_BEGIN]:
faldoBegin = {}
faldoBegin["_id"] = _id
for subj, predicate, obj in triples(
(Ref(faldoBegin["_id"]),
None, None)):
faldoBegin[predicate.toPython()] = obj.toPython()
faldoBegins.append(faldoBegin)
faldoReferences = []
for _id in faldoLocation[FALDO_BEGIN]:
faldoReference = {}
faldoReference["_id"] = faldoBegin[FALDO_REFERENCE]
for subj, predicate, obj in triples(
(Ref(faldoReference["_id"]),
None, None)):
faldoReference[predicate.toPython()] = obj.toPython()
faldoReferences.append(faldoReference)
faldoEnds = []
for _id in faldoLocation[FALDO_END]:
faldoEnd = {}
faldoEnd["_id"] = _id
for subj, predicate, obj in triples((Ref(faldoEnd["_id"]),
None, None)):
faldoEnd[predicate.toPython()] = obj.toPython()
faldoEnds.append(faldoEnd)
for idx, faldoReference in enumerate(faldoReferences):
if MEMBER_OF in faldoReference:
build = faldoReference[MEMBER_OF].split('/')[-1]
chromosome = faldoReference[LABEL].split(' ')[0]
begin = faldoBegins[idx][FALDO_POSITION]
end = faldoEnds[idx][FALDO_POSITION]
if build not in locationMap:
locationMap[build] = {}
if chromosome not in locationMap[build]:
locationMap[build][chromosome] = {}
if begin not in locationMap[build][chromosome]:
locationMap[build][chromosome][begin] = {}
if end not in locationMap[build][chromosome][begin]:
locationMap[build][chromosome][begin][end] = {}
locationMap[build][chromosome][begin][end] = \
location["_id"]
locationMap[location["_id"]] = {
"build": build,
"chromosome": chromosome,
"begin": begin,
"end": end,
}
| [
"candig.server.exceptions.BadFeatureSetSearchRequestRegularExpression",
"candig.schemas.protocol.Attributes",
"candig.schemas.protocol.Feature",
"rdflib.ConjunctiveGraph",
"candig.schemas.protocol.OntologyTerm",
"rdflib.URIRef"
] | [((2298, 2323), 'rdflib.ConjunctiveGraph', 'rdflib.ConjunctiveGraph', ([], {}), '()\n', (2321, 2323), False, 'import rdflib\n'), ((2478, 2540), 'rdflib.URIRef', 'rdflib.URIRef', (['"""http://data.monarchinitiative.org/ttl/cgd.ttl"""'], {}), "('http://data.monarchinitiative.org/ttl/cgd.ttl')\n", (2491, 2540), False, 'import rdflib\n'), ((2563, 2621), 'rdflib.URIRef', 'rdflib.URIRef', (['"""http://www.w3.org/2002/07/owl#versionInfo"""'], {}), "('http://www.w3.org/2002/07/owl#versionInfo')\n", (2576, 2621), False, 'import rdflib\n'), ((3339, 3363), 'rdflib.URIRef', 'rdflib.URIRef', (['featureId'], {}), '(featureId)\n', (3352, 3363), False, 'import rdflib\n'), ((3653, 3671), 'candig.schemas.protocol.Feature', 'protocol.Feature', ([], {}), '()\n', (3669, 3671), True, 'import candig.schemas.protocol as protocol\n'), ((3688, 3711), 'candig.schemas.protocol.OntologyTerm', 'protocol.OntologyTerm', ([], {}), '()\n', (3709, 3711), True, 'import candig.schemas.protocol as protocol\n'), ((4445, 4466), 'candig.schemas.protocol.Attributes', 'protocol.Attributes', ([], {}), '()\n', (4464, 4466), True, 'import candig.schemas.protocol as protocol\n'), ((5543, 5599), 'candig.server.exceptions.BadFeatureSetSearchRequestRegularExpression', 'exceptions.BadFeatureSetSearchRequestRegularExpression', ([], {}), '()\n', (5597, 5599), True, 'import candig.server.exceptions as exceptions\n')] |
import numpy as np
# import os
# current_directory = os.path.dirname(os.path.abspath(__file__)).replace('\\','/')
# from ctypes import *
# bro = cdll.LoadLibrary(current_directory+"/broken.so")
# bro.broken_frame.argtypes = [np.ctypeslib.ndpointer(dtype=np.int16, ndim=1, flags="C_CONTIGUOUS"),
# c_int,
# np.ctypeslib.ndpointer(dtype=np.float, ndim=1), #
# c_int]
# bro.broken_frame.restype = c_int
def detect_broken_frame(wdata, framerate):
'''
To detect broken frame.
Parameters
----------
wdata: wave data Type:[array]
framerate: sample rate. Type:[int]
Returns
----------
bf: broken frame Type:[list]
_______ _______
| | |
| | |
| | |
| amp0 | amp1 |
| | |_______
| | | amp |
|_______|_______|_______|
_____________
\
|
|_______
_______________
| | |
| | |
| | |
| | amp |
_______| | |
| amp0 | amp1 | |
|_______|_______|_______|
_________
/
|
________________|
'''
# num = 0
# bf = np.zeros(5)
# num = bro.broken_frame(wdata,len(wdata),bf,framerate)
# if num == 0:
# bf = []
# return list(bf)
# frame length: 5ms
FRAME_LENGTH = 0.005
AMP_THRESHOLD = 4
up_edge = False
# print framerate
w = int(framerate*FRAME_LENGTH)
amp0 = amp1 = 0
bf = []
last_dis = 0
AMP_ARRAY = []
n = 0
for i in xrange(len(wdata)/w):
tem = np.sum(np.abs(wdata[i*w:(i+1)*w]))
if tem !=0:
amp = np.log10(tem) #amplitude
else:
amp = 0
#Up edge detection
if up_edge is False:
dis = amp1-amp
ldis = amp0-amp
if (dis >= AMP_THRESHOLD) and (ldis>=AMP_THRESHOLD):# and (distance1 > 0):#AMP_THRESHOLD-0.2
bft = round((i*w)/float(framerate),3)
up_edge = True
n = 0
#Falling edge detection
else:
n += 1
dis = amp1-amp0
ldis = amp-amp0
if (dis >= AMP_THRESHOLD) and (ldis>=AMP_THRESHOLD):#AMP_THRESHOLD-0.2 (distance0 > 0) and
# print dis-ldis,i,amp0,amp1,amp
up_edge = False
n = 0
bf.append(bft)
#if detect a falling edge, but it can`t detect a up edge within 5 seconds, we will reset the FLAG
elif n%400 == 0:
n = 0
up_edge = False
#Update amp0 & amp1
amp0 = amp1
amp1 = amp
# #Up edge detection
# if up_edge is False:
# distance0 = amp0-amp
# if (distance0 > AMP_THRESHOLD):# and (distance1 > 0):#AMP_THRESHOLD-0.2
# bft = round((i*w)/float(framerate),3)
# up_edge = True
# #Falling edge detection
# else:
# distance0 = amp-amp0
# distance1 = amp1-amp0
# if (distance1 > AMP_THRESHOLD):#AMP_THRESHOLD-0.2 (distance0 > 0) and
# up_edge = False
# bf.append(bft)
# #if detect a falling edge, but it can`t detect a up edge within 5 seconds, we will reset the FLAG
# elif i%100 == 0:
# up_edge = False
# #Update amp0 & amp1
# amp0,amp1=amp1,amp
# #######################################
# import matplotlib.pyplot as plt
# x = range(len(wdata)/w)
# plt.title("")
# plt.xlabel('Window')
# plt.ylabel('Amplitude (log)')#
# plt.plot(x,AMP_ARRAY)
# plt.show()
# #######################################
# import matplotlib.mlab as mlab
# import matplotlib.pyplot as plt
# num_bins = 90
# # the histogram of the data
# n, bins, patches = plt.hist(AMP_ARRAY, num_bins, normed = True, facecolor='green', alpha=0.5)
# plt.xlabel('Distance')
# plt.ylabel('Probability(100%)')
# plt.title(r'Histogram of amplitude')
# plt.show()
if len(bf) == 0:
return 0
else:
return bf | [
"numpy.abs",
"numpy.log10"
] | [((1532, 1564), 'numpy.abs', 'np.abs', (['wdata[i * w:(i + 1) * w]'], {}), '(wdata[i * w:(i + 1) * w])\n', (1538, 1564), True, 'import numpy as np\n'), ((1584, 1597), 'numpy.log10', 'np.log10', (['tem'], {}), '(tem)\n', (1592, 1597), True, 'import numpy as np\n')] |
import torch
from typing import Sequence
from torch.utils.data import (DataLoader, RandomSampler, SequentialSampler, TensorDataset)
import utils
from utils import CQAExample, SMExample, NLIExample
from utils import truncate_seq_pair, detok_batch
class T5Input:
def __init__(self, encoder_inputs, encoder_masks, decoder_inputs, decoder_masks, decoder_labels, choice_labels=None,
context_ids=None, explanation_ids=None):
self.encoder_inputs = encoder_inputs
self.encoder_masks = encoder_masks
self.decoder_inputs = decoder_inputs
self.decoder_masks = decoder_masks
self.decoder_labels = decoder_labels
self.choice_labels = choice_labels
self.context_ids = context_ids
self.explanation_ids = explanation_ids
def to_device(self, device):
for attr, value in self.__dict__.items():
if value is not None:
self.__dict__[attr] = value.to(device)
class T5Output:
def __init__(self, encoder_hidden_states, loss, decoder_logits, predictions=None, acc_sum=None, bleu=None,
choices_loss=None):
self.encoder_hidden_states = encoder_hidden_states
self.loss = loss
self.decoder_logits = decoder_logits
self.predictions = predictions
self.acc_sum = acc_sum
self.bleu = bleu
self.choices_loss = choices_loss
def make_t5_dataloader(args, tokenizer, sequential, do_test):
if args.dataset == 'sm':
read_func = utils.read_sm_examples
make_input_func = utils.read_sm_examples
elif args.dataset == 'cqa':
read_func = utils.read_cqa_examples
make_input_func = make_t5_cqa_inputs
elif args.dataset == 'nli':
read_func = utils.read_nli_examples
make_input_func = make_t5_nli_inputs
train_examples = read_func(args.train_data_file)
eval_examples = read_func(args.eval_data_file)
# small data for debugging purposes
if args.small_data > 0:
train_examples = train_examples[:args.small_data]
eval_examples = eval_examples[:args.small_data]
# convert examples to lists of tensors, and put into TensorDatasets then dataloaders.
# use_explanations is flag for excluded explanations in inputs
train_tensors = make_input_func(args, tokenizer, train_examples)
train_data = TensorDataset(*train_tensors)
train_sampler = RandomSampler(train_data) if not sequential else SequentialSampler(train_data)
train_dataloader = DataLoader(train_data, sampler=train_sampler, batch_size=args.train_batch_size,
num_workers=4, pin_memory=True)
eval_tensors = make_input_func(args, tokenizer, eval_examples)
eval_data = TensorDataset(*eval_tensors)
eval_sampler = SequentialSampler(eval_data)
eval_dataloader = DataLoader(eval_data, sampler=eval_sampler, batch_size=args.eval_batch_size,
num_workers=4, pin_memory=True)
test_dataloader = None
if do_test:
test_examples = read_func(args.test_data_file)
if args.small_data > 0:
test_examples = test_examples[:args.small_data]
test_tensors = make_input_func(args, tokenizer, test_examples)
test_data = TensorDataset(*test_tensors)
test_sampler = SequentialSampler(test_data)
test_dataloader = DataLoader(test_data, sampler=test_sampler, batch_size=args.eval_batch_size,
num_workers=4, pin_memory=True)
return train_dataloader, eval_dataloader, test_dataloader
def make_t5_sm_inputs(args, tokenizer, examples):
qa_encoder_input_strs = []
qa_decoder_answer_input_strs = []
qa_decoder_answer_label_strs = []
qa_decoder_choices_input_strs = []
qa_decoder_choices_label_strs = []
exp_encoder_input_strs = []
exp_decoder_input_strs = []
exp_decoder_label_strs = []
exp_context_strs = []
exp_explanation_strs = []
for idx, example in enumerate(examples):
qa_prefix = 'task: '
exp_prefix = 'explain: '
question_str = f'{example.statements[0]} [SEP] {example.statements[1]}'
if args.label_to_use == 't5':
answer_str = example.statements[int(example.input_dict['t5_prediction'])]
else:
answer_str = example.statements[example.statement_label]
if args.explanation_to_use == 't5':
explanation_str = example.input_dict['t5_explanation']
else:
explanation_str = example.human_explanation
if not args.condition_on_explanation:
qa_input_str = f'[CLS] {question_str} [SEP]'
else:
qa_input_str = f'[CLS] {question_str} [SEP] {explanation_str}'
exp_input_str = f'[CLS] {question_str} [SEP]'
qa_encoder_input_str = qa_prefix + qa_input_str
qa_decoder_answer_input_str = f'The answer is: {answer_str}'
qa_decoder_answer_label_str = qa_decoder_answer_input_str
qa_decoder_choices_input_str = [f'The answer is: {statement}' for statement in example.statements]
qa_decoder_choices_label_str = qa_decoder_choices_input_str
exp_encoder_input_str = exp_prefix + exp_input_str
exp_decoder_input_str = f'My common sense tells me {explanation_str}'
exp_decoder_label_str = exp_decoder_input_str
exp_context_str = ['My common sense tells me ' for statement in example.statements]
exp_explanation_str = explanation_str
qa_encoder_input_strs.append(qa_encoder_input_str)
qa_decoder_answer_input_strs.append(qa_decoder_answer_input_str)
qa_decoder_answer_label_strs.append(qa_decoder_answer_label_str)
qa_decoder_choices_input_strs.append(qa_decoder_choices_input_str)
qa_decoder_choices_label_strs.append(qa_decoder_choices_label_str)
exp_encoder_input_strs.append(exp_encoder_input_str)
exp_decoder_input_strs.append(exp_decoder_input_str)
exp_decoder_label_strs.append(exp_decoder_label_str)
exp_context_strs.append(exp_context_str)
exp_explanation_strs.append(exp_explanation_str)
input_padding_id = tokenizer.pad_token_id
label_padding_id = -100
qa_encoder_inputs, qa_encoder_masks = make_t5_tensor(tokenizer, qa_encoder_input_strs, input_padding_id,
args.max_seq_len, add_eos=False, make_mask=True)
qa_decoder_answer_inputs, qa_decoder_answer_masks = make_t5_tensor(tokenizer, qa_decoder_answer_input_strs,
input_padding_id,
args.max_seq_len, add_eos=False,
make_mask=True)
qa_decoder_answer_labels = make_t5_tensor(tokenizer, qa_decoder_answer_label_strs, label_padding_id,
args.max_seq_len, add_eos=False, make_mask=False)
qa_decoder_choices_inputs, qa_decoder_choices_masks = make_t5_tensor(tokenizer, qa_decoder_choices_input_strs,
input_padding_id,
args.max_seq_len, add_eos=False,
make_mask=True)
qa_decoder_choices_labels = make_t5_tensor(tokenizer, qa_decoder_choices_label_strs, label_padding_id,
args.max_seq_len, add_eos=False, make_mask=False)
if args.label_to_use == 't5':
qa_choice_label_list = [int(example.input_dict['t5_prediction']) for example in examples]
else:
qa_choice_label_list = [example.statement_label for example in examples]
qa_choice_labels = torch.tensor(qa_choice_label_list, dtype=torch.long)
exp_encoder_inputs, exp_encoder_masks = make_t5_tensor(tokenizer, exp_encoder_input_strs,
input_padding_id, args.max_seq_len, add_eos=False,
make_mask=True)
exp_decoder_inputs, exp_decoder_masks = make_t5_tensor(tokenizer, exp_decoder_input_strs,
input_padding_id, args.max_seq_len, add_eos=True,
make_mask=True)
exp_decoder_labels = make_t5_tensor(tokenizer, exp_decoder_label_strs, label_padding_id, args.max_seq_len,
add_eos=True, make_mask=False)
exp_context_ids = make_t5_tensor(tokenizer, exp_context_strs, input_padding_id, args.max_seq_len,
add_eos=False, make_mask=False)
exp_explanation_ids = make_t5_tensor(tokenizer, exp_explanation_strs, input_padding_id, args.max_seq_len,
add_eos=True, make_mask=False)
return [qa_encoder_inputs, qa_encoder_masks,
qa_decoder_answer_inputs, qa_decoder_answer_masks, qa_decoder_answer_labels,
qa_decoder_choices_inputs, qa_decoder_choices_masks, qa_decoder_choices_labels,
qa_choice_labels,
exp_encoder_inputs, exp_encoder_masks,
exp_decoder_inputs, exp_decoder_masks, exp_decoder_labels,
exp_context_ids, exp_explanation_ids]
def make_t5_cqa_inputs(args, tokenizer, examples: Sequence[CQAExample]):
qa_encoder_input_strs = []
qa_decoder_answer_input_strs = []
qa_decoder_answer_label_strs = []
qa_decoder_choices_input_strs = []
qa_decoder_choices_label_strs = []
exp_encoder_input_strs = []
exp_decoder_input_strs = []
exp_decoder_label_strs = []
exp_context_strs = []
exp_explanation_strs = []
qa_encoder_x_masks = [] # e masked as 0
qa_encoder_e_masks = [] # x masked as 0
for idx, example in enumerate(examples):
question_str = f'{example.question}'
choices_str = f'The choices are {example.choices[0]}, {example.choices[1]} and {example.choices[2]}'
# truncate question str if necessary
question_str = truncate_question_str(args, tokenizer, question_str, choices_str)
if args.label_to_use == 't5':
answer_str = example.choices[int(example.input_dict['t5_prediction'])]
else:
answer_str = example.choices[example.label] if example.label >= 0 else ''
if args.explanation_to_use == 't5':
explanation_str = example.input_dict['t5_explanation']
else:
explanation_str = example.human_explanation
if args.explanation_only:
qa_encoder_input_str = f'task: [CLS] {choices_str} [SEP] My commonsense tells me {explanation_str}'
elif args.condition_on_explanation:
qa_encoder_input_str = f'task: [CLS] {question_str} {choices_str} [SEP] My commonsense tells me {explanation_str}'
else:
qa_encoder_input_str = f'task: [CLS] {question_str} {choices_str} [SEP]'
exp_encoder_input_str = f'explain: [CLS] {question_str} {choices_str} [SEP]'
# x,e masks
x_len = len(tokenizer.encode(f'task: [CLS] {question_str} {choices_str} [SEP] '))
qa_encoder_x_mask = [1] * x_len + [0] * (args.max_seq_len - x_len)
qa_encoder_x_masks.append(qa_encoder_x_mask)
start_len = len(tokenizer.encode('task: [CLS] '))
que_len = len(tokenizer.encode(f'task: [CLS] {question_str} '))
qa_encoder_e_mask = [1] * start_len + [0] * (que_len - start_len) + [1] * (args.max_seq_len - que_len)
qa_encoder_e_masks.append(qa_encoder_e_mask)
qa_decoder_answer_input_str = f'The answer is: {answer_str}'
qa_decoder_answer_label_str = qa_decoder_answer_input_str
qa_decoder_choices_input_str = [f'The answer is: {choice}' for choice in example.choices]
qa_decoder_choices_label_str = qa_decoder_choices_input_str
exp_decoder_input_str = f'My commonsense tells me {explanation_str}'
exp_decoder_label_str = exp_decoder_input_str
if args.rationalize:
exp_context_str = [f'The answer is {choice} because ' for choice in example.choices]
else:
exp_context_str = ['My commonsense tells me ' for choice in example.choices]
exp_explanation_str = explanation_str
qa_encoder_input_strs.append(qa_encoder_input_str)
qa_decoder_answer_input_strs.append(qa_decoder_answer_input_str)
qa_decoder_answer_label_strs.append(qa_decoder_answer_label_str)
qa_decoder_choices_input_strs.append(qa_decoder_choices_input_str)
qa_decoder_choices_label_strs.append(qa_decoder_choices_label_str)
exp_encoder_input_strs.append(exp_encoder_input_str)
exp_decoder_input_strs.append(exp_decoder_input_str)
exp_decoder_label_strs.append(exp_decoder_label_str)
exp_context_strs.append(exp_context_str)
exp_explanation_strs.append(exp_explanation_str)
qa_encoder_x_masks = torch.tensor(qa_encoder_x_masks, dtype=torch.long)
qa_encoder_e_masks = torch.tensor(qa_encoder_e_masks, dtype=torch.long)
input_padding_id = tokenizer.pad_token_id
label_padding_id = -100
qa_encoder_inputs, qa_encoder_masks = make_t5_tensor(tokenizer, qa_encoder_input_strs, input_padding_id,
args.max_seq_len, add_eos=False, make_mask=True)
qa_decoder_answer_inputs, qa_decoder_answer_masks = make_t5_tensor(tokenizer, qa_decoder_answer_input_strs,
input_padding_id,
args.max_seq_len, add_eos=False,
make_mask=True)
qa_decoder_answer_labels = make_t5_tensor(tokenizer, qa_decoder_answer_label_strs, label_padding_id,
args.max_seq_len, add_eos=False, make_mask=False)
qa_decoder_choices_inputs, qa_decoder_choices_masks = make_t5_tensor(tokenizer, qa_decoder_choices_input_strs,
input_padding_id,
args.max_seq_len, add_eos=False,
make_mask=True)
qa_decoder_choices_labels = make_t5_tensor(tokenizer, qa_decoder_choices_label_strs, label_padding_id,
args.max_seq_len, add_eos=False, make_mask=False)
if args.label_to_use == 't5':
qa_choice_label_list = [int(example.input_dict['t5_prediction']) for example in examples]
else:
qa_choice_label_list = [example.label for example in examples]
qa_choice_labels = torch.tensor(qa_choice_label_list, dtype=torch.long)
exp_encoder_inputs, exp_encoder_masks = make_t5_tensor(tokenizer, exp_encoder_input_strs,
input_padding_id, args.max_seq_len, add_eos=False,
make_mask=True)
exp_decoder_inputs, exp_decoder_masks = make_t5_tensor(tokenizer, exp_decoder_input_strs,
input_padding_id, args.max_seq_len, add_eos=True,
make_mask=True)
exp_decoder_labels = make_t5_tensor(tokenizer, exp_decoder_label_strs, label_padding_id, args.max_seq_len,
add_eos=True, make_mask=False)
exp_context_ids = make_t5_tensor(tokenizer, exp_context_strs, input_padding_id, args.max_seq_len,
add_eos=False, make_mask=False)
exp_explanation_ids = make_t5_tensor(tokenizer, exp_explanation_strs, input_padding_id, args.max_seq_len,
add_eos=True, make_mask=False)
return [qa_encoder_inputs, qa_encoder_masks, qa_encoder_x_masks, qa_encoder_e_masks,
qa_decoder_answer_inputs, qa_decoder_answer_masks, qa_decoder_answer_labels,
qa_decoder_choices_inputs, qa_decoder_choices_masks, qa_decoder_choices_labels,
qa_choice_labels,
exp_encoder_inputs, exp_encoder_masks,
exp_decoder_inputs, exp_decoder_masks, exp_decoder_labels,
exp_context_ids, exp_explanation_ids]
def make_t5_nli_inputs(args, tokenizer, examples: Sequence[NLIExample]):
qa_encoder_input_strs = []
qa_decoder_answer_input_strs = []
qa_decoder_answer_label_strs = []
qa_decoder_choices_input_strs = []
qa_decoder_choices_label_strs = []
exp_encoder_input_strs = []
exp_decoder_input_strs = []
exp_decoder_label_strs = []
exp_context_strs = []
exp_explanation_strs = []
for idx, example in enumerate(examples):
premise_str = example.premise
hypothesis_str = example.hypothesis
if args.label_to_use == 't5':
answer_str = example.choices[int(example.input_dict['t5_prediction'])]
else:
answer_str = example.choices[int(example.label)]
if args.explanation_to_use == 't5':
explanation_str = example.input_dict['t5_explanation']
else:
explanation_str = example.human_explanation
qa_encoder_input_str = f'task: nli premise: [CLS] {premise_str} [SEP] hypothesis: {hypothesis_str} [SEP]'
if args.condition_on_explanation:
qa_encoder_input_str = f'{qa_encoder_input_str} My commonsense tells me {explanation_str}'
exp_encoder_input_str = f'explain: nli premise: [CLS] {premise_str} [SEP] hypothesis: {hypothesis_str} [SEP]'
qa_decoder_answer_input_str = f'answer {answer_str}'
qa_decoder_answer_label_str = qa_decoder_answer_input_str
qa_decoder_choices_input_str = [f'answer {choice}' for choice in example.choices]
qa_decoder_choices_label_str = qa_decoder_choices_input_str
exp_decoder_input_str = f'My commonsense tells me {explanation_str}'
exp_decoder_label_str = exp_decoder_input_str
exp_context_str = ['My commonsense tells me ' for choice in example.choices]
exp_explanation_str = explanation_str
qa_encoder_input_strs.append(qa_encoder_input_str)
qa_decoder_answer_input_strs.append(qa_decoder_answer_input_str)
qa_decoder_answer_label_strs.append(qa_decoder_answer_label_str)
qa_decoder_choices_input_strs.append(qa_decoder_choices_input_str)
qa_decoder_choices_label_strs.append(qa_decoder_choices_label_str)
exp_encoder_input_strs.append(exp_encoder_input_str)
exp_decoder_input_strs.append(exp_decoder_input_str)
exp_decoder_label_strs.append(exp_decoder_label_str)
exp_context_strs.append(exp_context_str)
exp_explanation_strs.append(exp_explanation_str)
input_padding_id = tokenizer.pad_token_id
label_padding_id = -100
qa_encoder_inputs, qa_encoder_masks = make_t5_tensor(tokenizer, qa_encoder_input_strs, input_padding_id,
args.max_seq_len, add_eos=False, make_mask=True)
qa_decoder_answer_inputs, qa_decoder_answer_masks = make_t5_tensor(tokenizer, qa_decoder_answer_input_strs,
input_padding_id,
args.max_seq_len, add_eos=False,
make_mask=True)
qa_decoder_answer_labels = make_t5_tensor(tokenizer, qa_decoder_answer_label_strs, label_padding_id,
args.max_seq_len, add_eos=False, make_mask=False)
qa_decoder_choices_inputs, qa_decoder_choices_masks = make_t5_tensor(tokenizer, qa_decoder_choices_input_strs,
input_padding_id,
args.max_seq_len, add_eos=False,
make_mask=True)
qa_decoder_choices_labels = make_t5_tensor(tokenizer, qa_decoder_choices_label_strs, label_padding_id,
args.max_seq_len, add_eos=False, make_mask=False)
if args.label_to_use == 't5':
qa_choice_label_list = [int(example.input_dict['t5_prediction']) for example in examples]
else:
qa_choice_label_list = [example.label for example in examples]
qa_choice_labels = torch.tensor(qa_choice_label_list, dtype=torch.long)
exp_encoder_inputs, exp_encoder_masks = make_t5_tensor(tokenizer, exp_encoder_input_strs,
input_padding_id, args.max_seq_len, add_eos=False,
make_mask=True)
exp_decoder_inputs, exp_decoder_masks = make_t5_tensor(tokenizer, exp_decoder_input_strs,
input_padding_id, args.max_seq_len, add_eos=True,
make_mask=True)
exp_decoder_labels = make_t5_tensor(tokenizer, exp_decoder_label_strs, label_padding_id, args.max_seq_len,
add_eos=True, make_mask=False)
exp_context_ids = make_t5_tensor(tokenizer, exp_context_strs, input_padding_id, args.max_seq_len,
add_eos=False, make_mask=False)
exp_explanation_ids = make_t5_tensor(tokenizer, exp_explanation_strs, input_padding_id, args.max_seq_len,
add_eos=True, make_mask=False)
return [qa_encoder_inputs, qa_encoder_masks,
qa_decoder_answer_inputs, qa_decoder_answer_masks, qa_decoder_answer_labels,
qa_decoder_choices_inputs, qa_decoder_choices_masks, qa_decoder_choices_labels,
qa_choice_labels,
exp_encoder_inputs, exp_encoder_masks,
exp_decoder_inputs, exp_decoder_masks, exp_decoder_labels,
exp_context_ids, exp_explanation_ids]
def make_t5_tensor(tokenizer, input_strs, pad_token_id, max_seq_len, add_eos: bool, make_mask: bool):
all_input_ids = []
for input_str in input_strs:
if isinstance(input_str, str):
input_ids = tokenizer.convert_tokens_to_ids(tokenizer.tokenize(input_str))
input_ids += [tokenizer.eos_token_id] if add_eos else []
truncate_seq_pair(input_ids, [], max_seq_len)
input_ids += [pad_token_id] * (max_seq_len - len(input_ids)) # padding
all_input_ids.append(input_ids)
else:
input_ids = []
for choice_str in input_str:
choice_ids = tokenizer.convert_tokens_to_ids(tokenizer.tokenize(choice_str))
choice_ids += [tokenizer.eos_token_id] if add_eos else []
truncate_seq_pair(choice_ids, [], max_seq_len)
choice_ids += [pad_token_id] * (max_seq_len - len(choice_ids)) # padding
input_ids.append(choice_ids)
all_input_ids.append(input_ids)
tensor = torch.tensor(all_input_ids, dtype=torch.long)
if make_mask:
mask = (tensor != pad_token_id).float()
return tensor, mask
else:
return tensor
def truncate_question_str(args, tokenizer, question_str, choices_str):
initial_len = len(tokenizer.encode(f'[CLS] {question_str} {choices_str} [SEP]'))
exp_len = len(tokenizer.encode('My commonsense tells me ')) + args.max_sample_len
prefix_len = len(tokenizer.encode('task: '))
cap_len = args.max_seq_len - exp_len - prefix_len
if initial_len > cap_len:
over_by = initial_len - cap_len
question_tokens = tokenizer.encode(question_str)
keep_up_to = len(question_tokens) - over_by - 1
new_question_tokens = question_tokens[:keep_up_to]
question_str = tokenizer.decode(new_question_tokens) + '?'
return question_str
def print_t5_input(args, tokenizer, input: T5Input, msg='T5Input'):
ignore_tokens_list = [tokenizer.pad_token, '[UNK]']
encoder_input_strs = detok_batch(tokenizer, input.encoder_inputs, ignore_tokens=ignore_tokens_list,
eos_token=tokenizer.eos_token)
decoder_input_strs = detok_batch(tokenizer, input.decoder_inputs, ignore_tokens=ignore_tokens_list,
eos_token=tokenizer.eos_token)
decoder_label_strs = detok_batch(tokenizer, input.decoder_labels, ignore_tokens=ignore_tokens_list,
eos_token=tokenizer.eos_token)
print(f'\n----{msg}----\n')
print(f'encoder_input_strs: {encoder_input_strs}')
print(f'encoder_inputs[0]: {input.encoder_inputs[0]}')
print(f'encoder_masks[0]: {input.encoder_masks[0]}')
print(f'decoder_input_strs: {decoder_input_strs}')
print(f'decoder_label_strs: {decoder_label_strs}')
if args.verbose:
print(f'decoder_inputs[0]: {input.decoder_inputs[0]}')
print(f'decoder_masks[0]: {input.decoder_masks[0]}')
print(f'decoder_labels[0]: {input.decoder_labels[0]}')
if input.choice_labels is not None:
print(f'choice_labels: {input.choice_labels}')
if input.context_ids is not None:
context_strs = detok_batch(tokenizer, input.context_ids, ignore_tokens=ignore_tokens_list,
eos_token=tokenizer.eos_token)
if args.verbose:
print(f'context_ids[0]: {input.context_ids[0]}')
print(f'context_strs: {context_strs}')
if input.explanation_ids is not None:
explanation_strs = detok_batch(tokenizer, input.explanation_ids, ignore_tokens=ignore_tokens_list,
eos_token=tokenizer.eos_token)
if args.verbose:
print(f'explanation_ids[0]: {input.explanation_ids[0]}')
print(f'explanation_strs: {explanation_strs}')
print('')
def print_t5_output(args, tokenizer, output: T5Output, msg='T5Output'):
ignore_tokens_list = [tokenizer.pad_token]
print(f'\n----{msg}----\n')
print(f'encoder_hidden_states.size(): {output.encoder_hidden_states.size()}')
if args.verbose:
print(f'encoder_hidden_states: {output.encoder_hidden_states}')
print(f'loss.size(): {output.loss.size()}')
print(f'loss: {output.loss}')
if output.choices_loss is not None:
print(f'choices_loss: {output.choices_loss}')
if output.predictions is not None: # predictions can be either (batch_size, 1) or (batch_size, max_seq_len)
if isinstance(output.predictions[0], list):
prediction_strs = detok_batch(tokenizer, output.predictions, ignore_tokens=ignore_tokens_list,
eos_token=tokenizer.eos_token)
if args.verbose:
print(f'prediction_ids[0]: {output.predictions[0]}')
print(f'prediction_strs: {prediction_strs}')
else:
print(f'predictions: {output.predictions}')
if output.acc_sum is not None:
print(f'accuracy_sum: {output.acc_sum}')
if output.bleu is not None:
print(f'bleu: {output.bleu}')
print('')
def sample_batched(model, context_ids, tokenizer, max_sample_len, model_name='T5',
input_ids=None, input_masks=None, encoder_hidden_states=None,
sampling_strategy='argmax', pad_prefix=True):
'''
Uses model to sample based on context_ids, until max_sample_len is hit, with the expectation that decoding will stop at a specified [end] token
This function is batched, meaning predictions are placed at the end of each running sequence within a tensor of shape (batch_size x num_choices x max_seq_len)
Before returning samples, the original contexts in running_contexts are set to the pad_token_id
'''
batch_size = context_ids.size(0)
num_choices = context_ids.size(1)
vocab_size = len(tokenizer) # NOT tokenizer.vocab_size, this attr does not update when tokens are added
pad_token_id = tokenizer.pad_token_id if tokenizer.pad_token_id is not None else 0
running_contexts = context_ids.clone()
device = context_ids.device
if model_name == 'T5':
if encoder_hidden_states is None:
encoder_outputs = model(input_ids=input_ids,
attention_mask=input_masks)
encoder_hidden_states = encoder_outputs[1]
if input_masks.shape != context_ids.shape:
input_masks = input_masks.unsqueeze(1).expand_as(context_ids)
expand_shape = list(encoder_hidden_states.shape)
expand_shape.insert(1, context_ids.size(1))
encoder_hidden_states = encoder_hidden_states.unsqueeze(1).expand(expand_shape)
# flatten for T5.forward
batch_size_by_num_choices = list(encoder_hidden_states.shape[:2])
seq_len = encoder_hidden_states.size(2)
embed_dim = encoder_hidden_states.size(3)
encoder_hidden_states = encoder_hidden_states.reshape(-1, seq_len, embed_dim)
input_masks = input_masks.reshape(-1, seq_len)
# BEGIN SAMPLING
for k in range(max_sample_len):
attention_mask = (running_contexts != pad_token_id).float()
# get locations of last non-pad tokens in each sequence for purposes of: getting predictions from logits, and updating running_contexts
# print(running_contexts)
where_last_tokens = [[question[choice_id].index(pad_token_id) - 1 for choice_id in range(num_choices)] for
question in running_contexts.tolist()]
mask = torch.zeros(batch_size, num_choices, context_ids.size(2), vocab_size)
mask = mask.to(device).float()
for i in range(running_contexts.size(0)):
for j in range(num_choices):
last_token_index = where_last_tokens[i][j]
mask[i, j, last_token_index, :] = 1
# hold onto the starting point of sampling for each context
if k == 0: init_where_last_tokens = where_last_tokens
with torch.no_grad():
if 'gpt' in model_name:
outputs = model(running_contexts, attention_mask=attention_mask)
elif 'T5' == model_name:
running_contexts = running_contexts.view(-1, seq_len)
attention_mask = attention_mask.view(-1, seq_len)
outputs = model(encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=input_masks,
decoder_input_ids=running_contexts,
decoder_attention_mask=attention_mask)
logits = outputs[0]
# unflatten for T5
if 'T5' == model_name:
running_contexts = running_contexts.view(batch_size, num_choices, seq_len)
logits = logits.view(batch_size, num_choices, seq_len, vocab_size)
# get logits corresponding to last tokens in each sequence
logits = logits * mask
logits = torch.sum(logits, dim=2) # (batch_size, num_choices, vocab_size)
if sampling_strategy == 'argmax':
preds = torch.argmax(logits, dim=-1)
else:
probs = torch.nn.functional.softmax(logits.squeeze(1), dim=1) # (batch_size, vocab_size)
preds = torch.multinomial(probs, num_samples=1)
# assign preds to the first pad location in each running_contexts[i,j,:] sequence
for i in range(batch_size):
for j in range(num_choices):
last_token_index = where_last_tokens[i][j]
running_contexts[i, j, last_token_index + 1] = preds[i, j].item()
samples = running_contexts
if pad_prefix:
for i in range(batch_size):
for j in range(num_choices):
end_of_context_index = init_where_last_tokens[i][j]
samples[i, j, :(end_of_context_index + 1)] = pad_token_id
return samples
def sample(device, model, prompts, encoder_hidden_states, input_masks, max_seq_length, tokenizer, decoder_masks=None,
sampling_strategy='argmax'):
if decoder_masks is None:
decoder_masks = (prompts!=tokenizer.pad_token_id).int()
context_lens = decoder_masks.sum(dim=-1)
batch_size, num_choices, seq_len = list(decoder_masks.shape)
finished = torch.zeros(batch_size, num_choices, dtype=torch.int32).to(device)
vocab_size = len(tokenizer)
while finished.sum().item() != batch_size*num_choices and decoder_masks.sum().item() != batch_size * num_choices * max_seq_length:
prompts = prompts.view(-1, seq_len)
input_masks = input_masks.view(-1, seq_len)
with torch.no_grad():
outputs = model(encoder_hidden_states = encoder_hidden_states,
encoder_attention_mask = input_masks,
decoder_input_ids = prompts,
decoder_attention_mask = decoder_masks)
logits = outputs[0]
prompts = prompts.view(batch_size, num_choices, seq_len)
logits = logits.view(batch_size, num_choices, seq_len, vocab_size)
if sampling_strategy == 'argmax':
pred = torch.argmax(logits, dim=-1)
elif sampling_strategy == 'multinomial':
prob = torch.nn.functional.softmax(logits, dim=-1).view(-1, vocab_size)
pred = torch.multinomial(prob, num_samples=1).view(batch_size, num_choices, seq_len)
pred = torch.cat((torch.zeros((batch_size, num_choices, 1), dtype=torch.long).to(device), pred[..., :-1]), dim=2)
prompts = decoder_masks * prompts + (1 - decoder_masks) * pred
new_masks = torch.cat((torch.ones((batch_size, num_choices, 1), dtype=torch.int32).to(device), decoder_masks[..., :-1]), dim=2)
new_tokens = (1 - decoder_masks) * new_masks * prompts
finished += (torch.ones(batch_size, num_choices, dtype=torch.int32).to(device) - finished) * \
(new_tokens.sum(dim=2) == tokenizer.eos_token_id).int()
decoder_masks = new_masks
return prompts | [
"torch.nn.functional.softmax",
"utils.truncate_seq_pair",
"torch.ones",
"torch.multinomial",
"torch.utils.data.SequentialSampler",
"torch.utils.data.TensorDataset",
"torch.utils.data.RandomSampler",
"torch.tensor",
"utils.detok_batch",
"torch.sum",
"torch.utils.data.DataLoader",
"torch.no_grad... | [((2358, 2387), 'torch.utils.data.TensorDataset', 'TensorDataset', (['*train_tensors'], {}), '(*train_tensors)\n', (2371, 2387), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n'), ((2510, 2626), 'torch.utils.data.DataLoader', 'DataLoader', (['train_data'], {'sampler': 'train_sampler', 'batch_size': 'args.train_batch_size', 'num_workers': '(4)', 'pin_memory': '(True)'}), '(train_data, sampler=train_sampler, batch_size=args.\n train_batch_size, num_workers=4, pin_memory=True)\n', (2520, 2626), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n'), ((2740, 2768), 'torch.utils.data.TensorDataset', 'TensorDataset', (['*eval_tensors'], {}), '(*eval_tensors)\n', (2753, 2768), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n'), ((2788, 2816), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['eval_data'], {}), '(eval_data)\n', (2805, 2816), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n'), ((2839, 2951), 'torch.utils.data.DataLoader', 'DataLoader', (['eval_data'], {'sampler': 'eval_sampler', 'batch_size': 'args.eval_batch_size', 'num_workers': '(4)', 'pin_memory': '(True)'}), '(eval_data, sampler=eval_sampler, batch_size=args.eval_batch_size,\n num_workers=4, pin_memory=True)\n', (2849, 2951), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n'), ((7869, 7921), 'torch.tensor', 'torch.tensor', (['qa_choice_label_list'], {'dtype': 'torch.long'}), '(qa_choice_label_list, dtype=torch.long)\n', (7881, 7921), False, 'import torch\n'), ((13104, 13154), 'torch.tensor', 'torch.tensor', (['qa_encoder_x_masks'], {'dtype': 'torch.long'}), '(qa_encoder_x_masks, dtype=torch.long)\n', (13116, 13154), False, 'import torch\n'), ((13180, 13230), 'torch.tensor', 'torch.tensor', (['qa_encoder_e_masks'], {'dtype': 'torch.long'}), '(qa_encoder_e_masks, dtype=torch.long)\n', (13192, 13230), False, 'import torch\n'), ((14955, 15007), 'torch.tensor', 'torch.tensor', (['qa_choice_label_list'], {'dtype': 'torch.long'}), '(qa_choice_label_list, dtype=torch.long)\n', (14967, 15007), False, 'import torch\n'), ((20788, 20840), 'torch.tensor', 'torch.tensor', (['qa_choice_label_list'], {'dtype': 'torch.long'}), '(qa_choice_label_list, dtype=torch.long)\n', (20800, 20840), False, 'import torch\n'), ((23412, 23457), 'torch.tensor', 'torch.tensor', (['all_input_ids'], {'dtype': 'torch.long'}), '(all_input_ids, dtype=torch.long)\n', (23424, 23457), False, 'import torch\n'), ((24415, 24529), 'utils.detok_batch', 'detok_batch', (['tokenizer', 'input.encoder_inputs'], {'ignore_tokens': 'ignore_tokens_list', 'eos_token': 'tokenizer.eos_token'}), '(tokenizer, input.encoder_inputs, ignore_tokens=\n ignore_tokens_list, eos_token=tokenizer.eos_token)\n', (24426, 24529), False, 'from utils import truncate_seq_pair, detok_batch\n'), ((24587, 24701), 'utils.detok_batch', 'detok_batch', (['tokenizer', 'input.decoder_inputs'], {'ignore_tokens': 'ignore_tokens_list', 'eos_token': 'tokenizer.eos_token'}), '(tokenizer, input.decoder_inputs, ignore_tokens=\n ignore_tokens_list, eos_token=tokenizer.eos_token)\n', (24598, 24701), False, 'from utils import truncate_seq_pair, detok_batch\n'), ((24759, 24873), 'utils.detok_batch', 'detok_batch', (['tokenizer', 'input.decoder_labels'], {'ignore_tokens': 'ignore_tokens_list', 'eos_token': 'tokenizer.eos_token'}), '(tokenizer, input.decoder_labels, ignore_tokens=\n ignore_tokens_list, eos_token=tokenizer.eos_token)\n', (24770, 24873), False, 'from utils import truncate_seq_pair, detok_batch\n'), ((2408, 2433), 'torch.utils.data.RandomSampler', 'RandomSampler', (['train_data'], {}), '(train_data)\n', (2421, 2433), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n'), ((2457, 2486), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['train_data'], {}), '(train_data)\n', (2474, 2486), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n'), ((3263, 3291), 'torch.utils.data.TensorDataset', 'TensorDataset', (['*test_tensors'], {}), '(*test_tensors)\n', (3276, 3291), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n'), ((3315, 3343), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['test_data'], {}), '(test_data)\n', (3332, 3343), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n'), ((3370, 3482), 'torch.utils.data.DataLoader', 'DataLoader', (['test_data'], {'sampler': 'test_sampler', 'batch_size': 'args.eval_batch_size', 'num_workers': '(4)', 'pin_memory': '(True)'}), '(test_data, sampler=test_sampler, batch_size=args.eval_batch_size,\n num_workers=4, pin_memory=True)\n', (3380, 3482), False, 'from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n'), ((25583, 25693), 'utils.detok_batch', 'detok_batch', (['tokenizer', 'input.context_ids'], {'ignore_tokens': 'ignore_tokens_list', 'eos_token': 'tokenizer.eos_token'}), '(tokenizer, input.context_ids, ignore_tokens=ignore_tokens_list,\n eos_token=tokenizer.eos_token)\n', (25594, 25693), False, 'from utils import truncate_seq_pair, detok_batch\n'), ((25927, 26042), 'utils.detok_batch', 'detok_batch', (['tokenizer', 'input.explanation_ids'], {'ignore_tokens': 'ignore_tokens_list', 'eos_token': 'tokenizer.eos_token'}), '(tokenizer, input.explanation_ids, ignore_tokens=\n ignore_tokens_list, eos_token=tokenizer.eos_token)\n', (25938, 26042), False, 'from utils import truncate_seq_pair, detok_batch\n'), ((22733, 22778), 'utils.truncate_seq_pair', 'truncate_seq_pair', (['input_ids', '[]', 'max_seq_len'], {}), '(input_ids, [], max_seq_len)\n', (22750, 22778), False, 'from utils import truncate_seq_pair, detok_batch\n'), ((26939, 27050), 'utils.detok_batch', 'detok_batch', (['tokenizer', 'output.predictions'], {'ignore_tokens': 'ignore_tokens_list', 'eos_token': 'tokenizer.eos_token'}), '(tokenizer, output.predictions, ignore_tokens=ignore_tokens_list,\n eos_token=tokenizer.eos_token)\n', (26950, 27050), False, 'from utils import truncate_seq_pair, detok_batch\n'), ((30377, 30392), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (30390, 30392), False, 'import torch\n'), ((31370, 31394), 'torch.sum', 'torch.sum', (['logits'], {'dim': '(2)'}), '(logits, dim=2)\n', (31379, 31394), False, 'import torch\n'), ((32702, 32757), 'torch.zeros', 'torch.zeros', (['batch_size', 'num_choices'], {'dtype': 'torch.int32'}), '(batch_size, num_choices, dtype=torch.int32)\n', (32713, 32757), False, 'import torch\n'), ((33045, 33060), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (33058, 33060), False, 'import torch\n'), ((33557, 33585), 'torch.argmax', 'torch.argmax', (['logits'], {'dim': '(-1)'}), '(logits, dim=-1)\n', (33569, 33585), False, 'import torch\n'), ((23172, 23218), 'utils.truncate_seq_pair', 'truncate_seq_pair', (['choice_ids', '[]', 'max_seq_len'], {}), '(choice_ids, [], max_seq_len)\n', (23189, 23218), False, 'from utils import truncate_seq_pair, detok_batch\n'), ((31507, 31535), 'torch.argmax', 'torch.argmax', (['logits'], {'dim': '(-1)'}), '(logits, dim=-1)\n', (31519, 31535), False, 'import torch\n'), ((31684, 31723), 'torch.multinomial', 'torch.multinomial', (['probs'], {'num_samples': '(1)'}), '(probs, num_samples=1)\n', (31701, 31723), False, 'import torch\n'), ((33654, 33697), 'torch.nn.functional.softmax', 'torch.nn.functional.softmax', (['logits'], {'dim': '(-1)'}), '(logits, dim=-1)\n', (33681, 33697), False, 'import torch\n'), ((33738, 33776), 'torch.multinomial', 'torch.multinomial', (['prob'], {'num_samples': '(1)'}), '(prob, num_samples=1)\n', (33755, 33776), False, 'import torch\n'), ((33842, 33901), 'torch.zeros', 'torch.zeros', (['(batch_size, num_choices, 1)'], {'dtype': 'torch.long'}), '((batch_size, num_choices, 1), dtype=torch.long)\n', (33853, 33901), False, 'import torch\n'), ((34040, 34099), 'torch.ones', 'torch.ones', (['(batch_size, num_choices, 1)'], {'dtype': 'torch.int32'}), '((batch_size, num_choices, 1), dtype=torch.int32)\n', (34050, 34099), False, 'import torch\n'), ((34229, 34283), 'torch.ones', 'torch.ones', (['batch_size', 'num_choices'], {'dtype': 'torch.int32'}), '(batch_size, num_choices, dtype=torch.int32)\n', (34239, 34283), False, 'import torch\n')] |
##
## @cond ___LICENSE___
##
## Copyright (c) 2016-2018 Zefiros Software.
##
## Permission is hereby granted, free of charge, to any person obtaining a copy
## of this software and associated documentation files (the "Software"), to deal
## in the Software without restriction, including without limitation the rights
## to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
## copies of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be included in
## all copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
## THE SOFTWARE.
##
## @endcond
##
import subprocess
import re
import os
import glob
import yaml
def convert(word):
return ''.join(x or '_' for x in word.split('_'))
def run(cmd):
return subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE).communicate()
tree = {
'settings': {
}
}
if len(list(glob.iglob('bin.v2/**/bcp.exe', recursive=True))) == 0:
run("b2 tools/bcp")
bcp = list(glob.iglob('bin.v2/**/bcp.exe', recursive=True))[0]
print("tree = {")
for i in os.listdir("libs/"):
out = run( "{0} --list {1}".format(bcp, i) )[0].decode('ascii')
p = re.compile(r"libs\\(\w*)\\")
mods = set([])
for line in out.splitlines():
if line.startswith( "libs" ) and "docs" not in line:
if p.search(line) is not None:
if os.path.isdir( "libs/{0}/src".format(p.match(line).group(1)) ):
mods.add( p.match(line).group(1) )
if len( mods ) > 0:
print(" {} = {},".format(convert(i), mods))
#fm.write( "if zpm.option(\"{0}\") then\n".format(convert(i)) )
#fm.write( "\taddModules( {0} )\n".format(mods) )
#fm.write( "end\n\n" )
tree['settings'][convert(i)] = {
'default': False,
'reduce': 'anyTrue'
}
print("}")
print("\n\n\n\n")
print(yaml.dump(tree,default_flow_style=False)) | [
"os.listdir",
"glob.iglob",
"yaml.dump",
"re.compile",
"subprocess.Popen"
] | [((1747, 1766), 'os.listdir', 'os.listdir', (['"""libs/"""'], {}), "('libs/')\n", (1757, 1766), False, 'import os\n'), ((1849, 1881), 're.compile', 're.compile', (['"""libs\\\\\\\\(\\\\w*)\\\\\\\\"""'], {}), "('libs\\\\\\\\(\\\\w*)\\\\\\\\')\n", (1859, 1881), False, 'import re\n'), ((2569, 2610), 'yaml.dump', 'yaml.dump', (['tree'], {'default_flow_style': '(False)'}), '(tree, default_flow_style=False)\n', (2578, 2610), False, 'import yaml\n'), ((1667, 1714), 'glob.iglob', 'glob.iglob', (['"""bin.v2/**/bcp.exe"""'], {'recursive': '(True)'}), "('bin.v2/**/bcp.exe', recursive=True)\n", (1677, 1714), False, 'import glob\n'), ((1329, 1426), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'stdin': 'subprocess.PIPE'}), '(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin\n =subprocess.PIPE)\n', (1345, 1426), False, 'import subprocess\n'), ((1576, 1623), 'glob.iglob', 'glob.iglob', (['"""bin.v2/**/bcp.exe"""'], {'recursive': '(True)'}), "('bin.v2/**/bcp.exe', recursive=True)\n", (1586, 1623), False, 'import glob\n')] |
# -*- coding: utf-8 -*-
# Copyright © 2021 <NAME>. All rights reserved.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Any company and/or product names found in this book may be trademarks or
# registered trademarks of their owners.
# API - Application Programming Interface
# REST - Representational State Transfer
# JSON - JavaScript Object Notation
# URL - <scheme>://<host>:<port>/<path>
#
# It might be you need to install 'requests' and 'requests_cache' modules for this section
#
# pip3 install requests
# pip3 install requests_cache
# JSON
# https://www.json.org
import json # we need json module to work with JSON
json_string = """
{
"key1": "va1",
"key2": "val2"
}
"""
d = json.loads(json_string)
print(json_string)
print(d)
print(d['key1'])
# ┌───── keys inside JSON will be sorted alphabetically
# │
# │ ┌── defines indentation length inside serialized JSON
# │ │
# │ │
print(json.dumps(d, sort_keys=True, indent=2)) # note that format of output string is different
# from one used as input for json.loads - here we have 2 spaces
import requests
import json
p = requests.get("https://api.datamuse.com/words?rel_rhy=Funny")
print(p.text) # content of http response
print(p.url) # gives you actual url that was used to make a call
# this is useful for debugging as you can use this url
# together with 'curl' application
print(p.json()) # shortcut for json.loads(page.text)
print(json.loads(p.text)) # another way of enerating JSON object
print(p.status_code) # status code of your request - take a look below for full list
# of HTTP status codes
# https://en.wikipedia.org/wiki/List_of_HTTP_status_codes
# HTCPCP/1.0 - https://www.ietf.org/rfc/rfc2324.txt
print(p.headers) # header of the response - sometimes, you need to reach
# inside header in order to get some info
json.loads(p.text)
# If you want to have more control over JSON data, you can do it with
# JSONDocker - https://github.com/mkowsiak/JSONDocker
p = requests.get("http://localhost:80/") # if you want to use this code, make sure to run
print(p.text) # JSONDocker. You can find it here
print(p.status_code) # https://github.com/mkowsiak/JSONDocker
# you can customize JSON content
# by changing index.php file
print(json.dumps(p.json(), sort_keys=True, indent=2))
# caching responses
import requests
import requests_cache # read more here: https://requests-cache.readthedocs.io/en/latest/
requests_cache.clear()
requests_cache.install_cache('memory', backend='memory') # create new cache
for i in range(10): # first call will take quite some time
print("Call {}".format(i)) # note how further calls are way faster
requests.get("http://localhost:80/slow.php") # get data - it will take 5 sec without cache
requests_cache.clear()
# APIs you might need
#
# https://affiliate.itunes.apple.com/resources/documentation/itunes-store-web-service-search-api/
# https://www.flickr.com/services/api/flickr.photos.search.html
# https://maps.googleapis.com/maps/api/geocode/json
# http://python-data.dr-chuck.net
# https://tastedive.com/read/api
# https://www.omdbapi.com
# http://www.datamuse.com/api/
| [
"json.loads",
"requests_cache.clear",
"json.dumps",
"requests_cache.install_cache",
"requests.get"
] | [((1214, 1237), 'json.loads', 'json.loads', (['json_string'], {}), '(json_string)\n', (1224, 1237), False, 'import json\n'), ((1806, 1866), 'requests.get', 'requests.get', (['"""https://api.datamuse.com/words?rel_rhy=Funny"""'], {}), "('https://api.datamuse.com/words?rel_rhy=Funny')\n", (1818, 1866), False, 'import requests\n'), ((2985, 3003), 'json.loads', 'json.loads', (['p.text'], {}), '(p.text)\n', (2995, 3003), False, 'import json\n'), ((3134, 3170), 'requests.get', 'requests.get', (['"""http://localhost:80/"""'], {}), "('http://localhost:80/')\n", (3146, 3170), False, 'import requests\n'), ((3771, 3793), 'requests_cache.clear', 'requests_cache.clear', ([], {}), '()\n', (3791, 3793), False, 'import requests_cache\n'), ((3794, 3850), 'requests_cache.install_cache', 'requests_cache.install_cache', (['"""memory"""'], {'backend': '"""memory"""'}), "('memory', backend='memory')\n", (3822, 3850), False, 'import requests_cache\n'), ((4179, 4201), 'requests_cache.clear', 'requests_cache.clear', ([], {}), '()\n', (4199, 4201), False, 'import requests_cache\n'), ((1570, 1609), 'json.dumps', 'json.dumps', (['d'], {'sort_keys': '(True)', 'indent': '(2)'}), '(d, sort_keys=True, indent=2)\n', (1580, 1609), False, 'import json\n'), ((2320, 2338), 'json.loads', 'json.loads', (['p.text'], {}), '(p.text)\n', (2330, 2338), False, 'import json\n'), ((4076, 4120), 'requests.get', 'requests.get', (['"""http://localhost:80/slow.php"""'], {}), "('http://localhost:80/slow.php')\n", (4088, 4120), False, 'import requests\n')] |
#!/usr/bin/python
import RPi.GPIO as GPIO
class MAX31855(object):
'''Python driver for [MAX38155 Cold-Junction Compensated Thermocouple-to-Digital Converter](http://www.maximintegrated.com/datasheet/index.mvp/id/7273)
Requires:
- The [GPIO Library](https://code.google.com/p/raspberry-gpio-python/) (Already on most Raspberry Pi OS builds)
- A [Raspberry Pi](http://www.raspberrypi.org/)
'''
def __init__(self, cs_pin, clock_pin, data_pin, units = "c", board = GPIO.BCM):
'''Initialize Soft (Bitbang) SPI bus
Parameters:
- cs_pin: Chip Select (CS) / Slave Select (SS) pin (Any GPIO)
- clock_pin: Clock (SCLK / SCK) pin (Any GPIO)
- data_pin: Data input (SO / MOSI) pin (Any GPIO)
- units: (optional) unit of measurement to return. ("c" (default) | "k" | "f")
- board: (optional) pin numbering method as per RPi.GPIO library (GPIO.BCM (default) | GPIO.BOARD)
'''
self.cs_pin = cs_pin
self.clock_pin = clock_pin
self.data_pin = data_pin
self.units = units
self.data = None
self.board = board
# Initialize needed GPIO
GPIO.setmode(self.board)
GPIO.setup(self.cs_pin, GPIO.OUT)
GPIO.setup(self.clock_pin, GPIO.OUT)
GPIO.setup(self.data_pin, GPIO.IN)
# Pull chip select high to make chip inactive
GPIO.output(self.cs_pin, GPIO.HIGH)
def get(self):
'''Reads SPI bus and returns current value of thermocouple.'''
self.read()
self.checkErrors()
return getattr(self, "to_" + self.units)(self.data_to_tc_temperature())
def get_rj(self):
'''Reads SPI bus and returns current value of reference junction.'''
self.read()
return getattr(self, "to_" + self.units)(self.data_to_rj_temperature())
def read(self):
'''Reads 32 bits of the SPI bus & stores as an integer in self.data.'''
bytesin = 0
# Select the chip
GPIO.output(self.cs_pin, GPIO.LOW)
# Read in 32 bits
for i in range(32):
GPIO.output(self.clock_pin, GPIO.LOW)
bytesin = bytesin << 1
if (GPIO.input(self.data_pin)):
bytesin = bytesin | 1
GPIO.output(self.clock_pin, GPIO.HIGH)
# Unselect the chip
GPIO.output(self.cs_pin, GPIO.HIGH)
# Save data
self.data = bytesin
def checkErrors(self, data_32 = None):
'''Checks error bits to see if there are any SCV, SCG, or OC faults'''
if data_32 is None:
data_32 = self.data
anyErrors = (data_32 & 0x10000) != 0 # Fault bit, D16
noConnection = (data_32 & 1) != 0 # OC bit, D0
shortToGround = (data_32 & 2) != 0 # SCG bit, D1
shortToVCC = (data_32 & 4) != 0 # SCV bit, D2
if anyErrors:
if noConnection:
raise MAX31855Error("No Connection")
elif shortToGround:
raise MAX31855Error("Thermocouple short to ground")
elif shortToVCC:
raise MAX31855Error("Thermocouple short to VCC")
else:
# Perhaps another SPI device is trying to send data?
# Did you remember to initialize all other SPI devices?
raise MAX31855Error("Unknown Error")
def data_to_tc_temperature(self, data_32 = None):
'''Takes an integer and returns a thermocouple temperature in celsius.'''
if data_32 is None:
data_32 = self.data
tc_data = ((data_32 >> 18) & 0x3FFF)
return self.convert_tc_data(tc_data)
def data_to_rj_temperature(self, data_32 = None):
'''Takes an integer and returns a reference junction temperature in celsius.'''
if data_32 is None:
data_32 = self.data
rj_data = ((data_32 >> 4) & 0xFFF)
return self.convert_rj_data(rj_data)
def convert_tc_data(self, tc_data):
'''Convert thermocouple data to a useful number (celsius).'''
if tc_data & 0x2000:
# two's compliment
without_resolution = ~tc_data & 0x1FFF
without_resolution += 1
without_resolution *= -1
else:
without_resolution = tc_data & 0x1FFF
return without_resolution * 0.25
def convert_rj_data(self, rj_data):
'''Convert reference junction data to a useful number (celsius).'''
if rj_data & 0x800:
without_resolution = ~rj_data & 0x7FF
without_resolution += 1
without_resolution *= -1
else:
without_resolution = rj_data & 0x7FF
return without_resolution * 0.0625
def to_c(self, celsius):
'''Celsius passthrough for generic to_* method.'''
return celsius
def to_k(self, celsius):
'''Convert celsius to kelvin.'''
return celsius + 273.15
def to_f(self, celsius):
'''Convert celsius to fahrenheit.'''
return celsius * 9.0/5.0 + 32
def cleanup(self):
'''Selective GPIO cleanup'''
GPIO.setup(self.cs_pin, GPIO.IN)
GPIO.setup(self.clock_pin, GPIO.IN)
class MAX31855Error(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
if __name__ == "__main__":
# Multi-chip example
import time
cs_pins = [24]
clock_pin = 25
data_pin = 18
units = "c"
thermocouples = []
for cs_pin in cs_pins:
thermocouples.append(MAX31855(cs_pin, clock_pin, data_pin, units))
running = True
while(running):
try:
for thermocouple in thermocouples:
rj = thermocouple.get_rj()
try:
tc = thermocouple.get()
except MAX31855Error as e:
tc = "Error: "+ e.value
running = False
#print("tc: {} and rj: {}".format(tc, rj))
print("{},{}".format(tc, rj))
time.sleep(1)
except KeyboardInterrupt:
running = False
for thermocouple in thermocouples:
thermocouple.cleanup()
| [
"RPi.GPIO.setup",
"RPi.GPIO.output",
"time.sleep",
"RPi.GPIO.input",
"RPi.GPIO.setmode"
] | [((1189, 1213), 'RPi.GPIO.setmode', 'GPIO.setmode', (['self.board'], {}), '(self.board)\n', (1201, 1213), True, 'import RPi.GPIO as GPIO\n'), ((1222, 1255), 'RPi.GPIO.setup', 'GPIO.setup', (['self.cs_pin', 'GPIO.OUT'], {}), '(self.cs_pin, GPIO.OUT)\n', (1232, 1255), True, 'import RPi.GPIO as GPIO\n'), ((1264, 1300), 'RPi.GPIO.setup', 'GPIO.setup', (['self.clock_pin', 'GPIO.OUT'], {}), '(self.clock_pin, GPIO.OUT)\n', (1274, 1300), True, 'import RPi.GPIO as GPIO\n'), ((1309, 1343), 'RPi.GPIO.setup', 'GPIO.setup', (['self.data_pin', 'GPIO.IN'], {}), '(self.data_pin, GPIO.IN)\n', (1319, 1343), True, 'import RPi.GPIO as GPIO\n'), ((1407, 1442), 'RPi.GPIO.output', 'GPIO.output', (['self.cs_pin', 'GPIO.HIGH'], {}), '(self.cs_pin, GPIO.HIGH)\n', (1418, 1442), True, 'import RPi.GPIO as GPIO\n'), ((2016, 2050), 'RPi.GPIO.output', 'GPIO.output', (['self.cs_pin', 'GPIO.LOW'], {}), '(self.cs_pin, GPIO.LOW)\n', (2027, 2050), True, 'import RPi.GPIO as GPIO\n'), ((2359, 2394), 'RPi.GPIO.output', 'GPIO.output', (['self.cs_pin', 'GPIO.HIGH'], {}), '(self.cs_pin, GPIO.HIGH)\n', (2370, 2394), True, 'import RPi.GPIO as GPIO\n'), ((5133, 5165), 'RPi.GPIO.setup', 'GPIO.setup', (['self.cs_pin', 'GPIO.IN'], {}), '(self.cs_pin, GPIO.IN)\n', (5143, 5165), True, 'import RPi.GPIO as GPIO\n'), ((5174, 5209), 'RPi.GPIO.setup', 'GPIO.setup', (['self.clock_pin', 'GPIO.IN'], {}), '(self.clock_pin, GPIO.IN)\n', (5184, 5209), True, 'import RPi.GPIO as GPIO\n'), ((2117, 2154), 'RPi.GPIO.output', 'GPIO.output', (['self.clock_pin', 'GPIO.LOW'], {}), '(self.clock_pin, GPIO.LOW)\n', (2128, 2154), True, 'import RPi.GPIO as GPIO\n'), ((2206, 2231), 'RPi.GPIO.input', 'GPIO.input', (['self.data_pin'], {}), '(self.data_pin)\n', (2216, 2231), True, 'import RPi.GPIO as GPIO\n'), ((2284, 2322), 'RPi.GPIO.output', 'GPIO.output', (['self.clock_pin', 'GPIO.HIGH'], {}), '(self.clock_pin, GPIO.HIGH)\n', (2295, 2322), True, 'import RPi.GPIO as GPIO\n'), ((6074, 6087), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (6084, 6087), False, 'import time\n')] |
from os import listdir
from os.path import isfile
from PIL import Image
from tqdm import tqdm
import numpy as np
import imgaug.augmenters as iaa
import os
import random
from os.path import join
import matplotlib.pyplot as plt
DATA_DIR = 'DATA DIR'
os.chdir(DATA_DIR)
IMAGE_DIR = join(DATA_DIR, 'dataset\\PascalVOC-OG-flipped\\JPEGImages')
ANN_DIR = join(DATA_DIR, 'dataset\\PascalVOC-OG-flipped\\Annotations')
NEW_IMAGE_DIR = join(DATA_DIR, 'dataset\\PascalVOC-OG-all\\JPEGImages')
NEW_ANN_DIR = join(DATA_DIR, 'dataset\\PascalVOC-OG-all\\Annotations')
NEW_IMAGE_SETS_DIR = join(DATA_DIR, 'dataset\\PascalVOC-OG-all\\ImageSets\\Main')
MAX = 2
with open(join(NEW_IMAGE_SETS_DIR, f"pipe-augmented-degrade.txt"), 'w+') as f:
pass
image_files = [f for f in listdir(IMAGE_DIR) if isfile(join(IMAGE_DIR, f))]
shuffled_image_files = random.sample(image_files, len(image_files))
shuffled_image_files = random.sample(image_files, len(shuffled_image_files))[:MAX]
seq = iaa.Sequential([
iaa.JpegCompression(compression=(99, 99))
])
for image in tqdm(shuffled_image_files):
if len(image) > 0:
# Åpne bildet
im = Image.open(join(IMAGE_DIR, image))
# Gjøre om til array med type uint8, (1920, 1080, 3)
im = np.asarray(im).astype(np.uint8)
# Ekspandere arrayet til å se ut som (1, 1920, 1080, 3), nødvendig siden iaa forventer en 4D matrise
im_expand = np.expand_dims(im, 0)
# Augmentere bildet
augmented_image_array = seq(images=im_expand)
# Fjerne ekstra dimensjonen satt på tidligere på første akse, resultat: (1920, 1080, 3)
augmented_image_array = np.squeeze(augmented_image_array, axis=0)
# Laste inn array som bilde
augmented_image = Image.fromarray(augmented_image_array)
# Laste inn bildet igjen fra matriseformat.
im = Image.fromarray(im)
im.save('im1.jpeg')
augmented_image.save('im2.jpeg')
fig, ax = plt.subplots(nrows=1, ncols=2)
# Plotting
plt.subplot(1, 2, 1)
plt.imshow(im)
plt.subplot(1, 2, 2)
plt.imshow(augmented_image)
plt.show()
| [
"matplotlib.pyplot.imshow",
"PIL.Image.fromarray",
"os.listdir",
"tqdm.tqdm",
"os.path.join",
"numpy.asarray",
"numpy.squeeze",
"os.chdir",
"numpy.expand_dims",
"imgaug.augmenters.JpegCompression",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((265, 283), 'os.chdir', 'os.chdir', (['DATA_DIR'], {}), '(DATA_DIR)\n', (273, 283), False, 'import os\n'), ((301, 360), 'os.path.join', 'join', (['DATA_DIR', '"""dataset\\\\PascalVOC-OG-flipped\\\\JPEGImages"""'], {}), "(DATA_DIR, 'dataset\\\\PascalVOC-OG-flipped\\\\JPEGImages')\n", (305, 360), False, 'from os.path import join\n'), ((372, 432), 'os.path.join', 'join', (['DATA_DIR', '"""dataset\\\\PascalVOC-OG-flipped\\\\Annotations"""'], {}), "(DATA_DIR, 'dataset\\\\PascalVOC-OG-flipped\\\\Annotations')\n", (376, 432), False, 'from os.path import join\n'), ((452, 507), 'os.path.join', 'join', (['DATA_DIR', '"""dataset\\\\PascalVOC-OG-all\\\\JPEGImages"""'], {}), "(DATA_DIR, 'dataset\\\\PascalVOC-OG-all\\\\JPEGImages')\n", (456, 507), False, 'from os.path import join\n'), ((523, 579), 'os.path.join', 'join', (['DATA_DIR', '"""dataset\\\\PascalVOC-OG-all\\\\Annotations"""'], {}), "(DATA_DIR, 'dataset\\\\PascalVOC-OG-all\\\\Annotations')\n", (527, 579), False, 'from os.path import join\n'), ((604, 664), 'os.path.join', 'join', (['DATA_DIR', '"""dataset\\\\PascalVOC-OG-all\\\\ImageSets\\\\Main"""'], {}), "(DATA_DIR, 'dataset\\\\PascalVOC-OG-all\\\\ImageSets\\\\Main')\n", (608, 664), False, 'from os.path import join\n'), ((1095, 1121), 'tqdm.tqdm', 'tqdm', (['shuffled_image_files'], {}), '(shuffled_image_files)\n', (1099, 1121), False, 'from tqdm import tqdm\n'), ((689, 744), 'os.path.join', 'join', (['NEW_IMAGE_SETS_DIR', 'f"""pipe-augmented-degrade.txt"""'], {}), "(NEW_IMAGE_SETS_DIR, f'pipe-augmented-degrade.txt')\n", (693, 744), False, 'from os.path import join\n'), ((797, 815), 'os.listdir', 'listdir', (['IMAGE_DIR'], {}), '(IMAGE_DIR)\n', (804, 815), False, 'from os import listdir\n'), ((1031, 1072), 'imgaug.augmenters.JpegCompression', 'iaa.JpegCompression', ([], {'compression': '(99, 99)'}), '(compression=(99, 99))\n', (1050, 1072), True, 'import imgaug.augmenters as iaa\n'), ((1462, 1483), 'numpy.expand_dims', 'np.expand_dims', (['im', '(0)'], {}), '(im, 0)\n', (1476, 1483), True, 'import numpy as np\n'), ((1702, 1743), 'numpy.squeeze', 'np.squeeze', (['augmented_image_array'], {'axis': '(0)'}), '(augmented_image_array, axis=0)\n', (1712, 1743), True, 'import numpy as np\n'), ((1810, 1848), 'PIL.Image.fromarray', 'Image.fromarray', (['augmented_image_array'], {}), '(augmented_image_array)\n', (1825, 1848), False, 'from PIL import Image\n'), ((1918, 1937), 'PIL.Image.fromarray', 'Image.fromarray', (['im'], {}), '(im)\n', (1933, 1937), False, 'from PIL import Image\n'), ((2028, 2058), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(2)'}), '(nrows=1, ncols=2)\n', (2040, 2058), True, 'import matplotlib.pyplot as plt\n'), ((2090, 2110), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (2101, 2110), True, 'import matplotlib.pyplot as plt\n'), ((2120, 2134), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im'], {}), '(im)\n', (2130, 2134), True, 'import matplotlib.pyplot as plt\n'), ((2146, 2166), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (2157, 2166), True, 'import matplotlib.pyplot as plt\n'), ((2176, 2203), 'matplotlib.pyplot.imshow', 'plt.imshow', (['augmented_image'], {}), '(augmented_image)\n', (2186, 2203), True, 'import matplotlib.pyplot as plt\n'), ((2213, 2223), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2221, 2223), True, 'import matplotlib.pyplot as plt\n'), ((826, 844), 'os.path.join', 'join', (['IMAGE_DIR', 'f'], {}), '(IMAGE_DIR, f)\n', (830, 844), False, 'from os.path import join\n'), ((1195, 1217), 'os.path.join', 'join', (['IMAGE_DIR', 'image'], {}), '(IMAGE_DIR, image)\n', (1199, 1217), False, 'from os.path import join\n'), ((1297, 1311), 'numpy.asarray', 'np.asarray', (['im'], {}), '(im)\n', (1307, 1311), True, 'import numpy as np\n')] |
"""Command line interface for chalice.
Contains commands for deploying chalice.
"""
import logging
import os
import platform
import sys
import tempfile
import shutil
import traceback
import functools
import json
import botocore.exceptions
import click
from typing import Dict, Any, Optional # noqa
from chalice import __version__ as chalice_version
from chalice.app import Chalice # noqa
from chalice.awsclient import TypedAWSClient
from chalice.awsclient import ReadTimeout
from chalice.cli.factory import CLIFactory
from chalice.cli.factory import NoSuchFunctionError
from chalice.config import Config # noqa
from chalice.logs import display_logs
from chalice.utils import create_zip_file
from chalice.deploy.validate import validate_routes, validate_python_version
from chalice.deploy.validate import ExperimentalFeatureError
from chalice.utils import getting_started_prompt, UI, serialize_to_json
from chalice.constants import CONFIG_VERSION, TEMPLATE_APP, GITIGNORE
from chalice.constants import DEFAULT_STAGE_NAME
from chalice.constants import DEFAULT_APIGATEWAY_STAGE_NAME
from chalice.local import LocalDevServer # noqa
from chalice.constants import DEFAULT_HANDLER_NAME
from chalice.invoke import UnhandledLambdaError
from chalice.deploy.swagger import TemplatedSwaggerGenerator
from chalice.deploy.planner import PlanEncoder
def _configure_logging(level, format_string=None):
# type: (int, Optional[str]) -> None
if format_string is None:
format_string = "%(asctime)s %(name)s [%(levelname)s] %(message)s"
logger = logging.getLogger('')
logger.setLevel(level)
handler = logging.StreamHandler()
handler.setLevel(level)
formatter = logging.Formatter(format_string)
handler.setFormatter(formatter)
logger.addHandler(handler)
def create_new_project_skeleton(project_name, profile=None):
# type: (str, Optional[str]) -> None
chalice_dir = os.path.join(project_name, '.chalice')
os.makedirs(chalice_dir)
config = os.path.join(project_name, '.chalice', 'config.json')
cfg = {
'version': CONFIG_VERSION,
'app_name': project_name,
'stages': {
DEFAULT_STAGE_NAME: {
'api_gateway_stage': DEFAULT_APIGATEWAY_STAGE_NAME,
}
}
}
if profile is not None:
cfg['profile'] = profile
with open(config, 'w') as f:
f.write(serialize_to_json(cfg))
with open(os.path.join(project_name, 'requirements.txt'), 'w'):
pass
with open(os.path.join(project_name, 'app.py'), 'w') as f:
f.write(TEMPLATE_APP % project_name)
with open(os.path.join(project_name, '.gitignore'), 'w') as f:
f.write(GITIGNORE)
def get_system_info():
# type: () -> str
python_info = "python {}.{}.{}".format(sys.version_info[0],
sys.version_info[1],
sys.version_info[2])
platform_system = platform.system().lower()
platform_release = platform.release()
platform_info = "{} {}".format(platform_system, platform_release)
return "{}, {}".format(python_info, platform_info)
@click.group()
@click.version_option(version=chalice_version,
message='%(prog)s %(version)s, {}'
.format(get_system_info()))
@click.option('--project-dir',
help='The project directory path (absolute or relative).'
'Defaults to CWD')
@click.option('--debug/--no-debug',
default=False,
help='Print debug logs to stderr.')
@click.pass_context
def cli(ctx, project_dir, debug=False):
# type: (click.Context, str, bool) -> None
if project_dir is None:
project_dir = os.getcwd()
elif not os.path.isabs(project_dir):
project_dir = os.path.abspath(project_dir)
if debug is True:
_configure_logging(logging.DEBUG)
ctx.obj['project_dir'] = project_dir
ctx.obj['debug'] = debug
ctx.obj['factory'] = CLIFactory(project_dir, debug, environ=os.environ)
os.chdir(project_dir)
@cli.command()
@click.option('--host', default='127.0.0.1')
@click.option('--port', default=8000, type=click.INT)
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help='Name of the Chalice stage for the local server to use.')
@click.option('--autoreload/--no-autoreload',
default=True,
help='Automatically restart server when code changes.')
@click.pass_context
def local(ctx, host='127.0.0.1', port=8000, stage=DEFAULT_STAGE_NAME,
autoreload=True):
# type: (click.Context, str, int, str, bool) -> None
factory = ctx.obj['factory'] # type: CLIFactory
from chalice.cli import reloader
# We don't create the server here because that will bind the
# socket and we only want to do this in the worker process.
server_factory = functools.partial(
create_local_server, factory, host, port, stage)
# When running `chalice local`, a stdout logger is configured
# so you'll see the same stdout logging as you would when
# running in lambda. This is configuring the root logger.
# The app-specific logger (app.log) will still continue
# to work.
logging.basicConfig(
stream=sys.stdout, level=logging.INFO, format='%(message)s')
if autoreload:
project_dir = factory.create_config_obj(
chalice_stage_name=stage).project_dir
rc = reloader.run_with_reloader(
server_factory, os.environ, project_dir)
# Click doesn't sys.exit() with the RC this function. The
# recommended way to do this is to use sys.exit() directly,
# see: https://github.com/pallets/click/issues/747
sys.exit(rc)
run_local_server(factory, host, port, stage)
def create_local_server(factory, host, port, stage):
# type: (CLIFactory, str, int, str) -> LocalDevServer
config = factory.create_config_obj(
chalice_stage_name=stage
)
app_obj = config.chalice_app
# Check that `chalice deploy` would let us deploy these routes, otherwise
# there is no point in testing locally.
routes = config.chalice_app.routes
validate_routes(routes)
server = factory.create_local_server(app_obj, config, host, port)
return server
def run_local_server(factory, host, port, stage):
# type: (CLIFactory, str, int, str) -> None
server = create_local_server(factory, host, port, stage)
server.serve_forever()
@cli.command()
@click.option('--autogen-policy/--no-autogen-policy',
default=None,
help='Automatically generate IAM policy for app code.')
@click.option('--profile', help='Override profile at deploy time.')
@click.option('--api-gateway-stage',
help='Name of the API gateway stage to deploy to.')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help=('Name of the Chalice stage to deploy to. '
'Specifying a new chalice stage will create '
'an entirely new set of AWS resources.'))
@click.option('--connection-timeout',
type=int,
help=('Overrides the default botocore connection '
'timeout.'))
@click.pass_context
def deploy(ctx, autogen_policy, profile, api_gateway_stage, stage,
connection_timeout):
# type: (click.Context, Optional[bool], str, str, str, int) -> None
factory = ctx.obj['factory'] # type: CLIFactory
factory.profile = profile
config = factory.create_config_obj(
chalice_stage_name=stage, autogen_policy=autogen_policy,
api_gateway_stage=api_gateway_stage,
)
session = factory.create_botocore_session(
connection_timeout=connection_timeout)
ui = UI()
d = factory.create_default_deployer(session=session,
config=config,
ui=ui)
deployed_values = d.deploy(config, chalice_stage_name=stage)
reporter = factory.create_deployment_reporter(ui=ui)
reporter.display_report(deployed_values)
@cli.command()
@click.option('--autogen-policy/--no-autogen-policy',
default=None,
help='Automatically generate IAM policy for app code.')
@click.option('--profile', help='Override profile at deploy time.')
@click.option('--api-gateway-stage',
help='Name of the API gateway stage to deploy to.')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help=('Name of the Chalice stage to deploy to. '
'Specifying a new chalice stage will create '
'an entirely new set of AWS resources.'))
@click.option('--connection-timeout',
type=int,
help=('Overrides the default botocore connection '
'timeout.'))
@click.pass_context
def tbtdeploy(ctx, autogen_policy, profile, api_gateway_stage, stage,
connection_timeout):
# type: (click.Context, Optional[bool], str, str, str, int) -> None
factory = ctx.obj['factory'] # type: CLIFactory
factory.profile = profile
config = factory.create_config_obj(
chalice_stage_name=stage, autogen_policy=autogen_policy,
api_gateway_stage=api_gateway_stage,
)
session = factory.create_botocore_session(
connection_timeout=connection_timeout)
ui = UI()
d = factory.create_tbt_deployer(session=session,
config=config,
ui=ui)
deployed_values = d.deploy(config, chalice_stage_name=stage)
reporter = factory.create_deployment_reporter(ui=ui)
reporter.display_report(deployed_values)
@cli.command('invoke')
@click.option('-n', '--name', metavar='NAME', required=True,
help=('The name of the function to invoke. '
'This is the logical name of the function. If the '
'function is decorated by app.route use the name '
'api_handler instead.'))
@click.option('--profile', metavar='PROFILE',
help='Override profile at deploy time.')
@click.option('--stage', metavar='STAGE', default=DEFAULT_STAGE_NAME,
help=('Name of the Chalice stage to deploy to. '
'Specifying a new chalice stage will create '
'an entirely new set of AWS resources.'))
@click.pass_context
def invoke(ctx, name, profile, stage):
# type: (click.Context, str, str, str) -> None
"""Invoke the deployed lambda function NAME.
Reads payload from STDIN.
"""
factory = ctx.obj['factory'] # type: CLIFactory
factory.profile = profile
try:
invoke_handler = factory.create_lambda_invoke_handler(name, stage)
payload = factory.create_stdin_reader().read()
invoke_handler.invoke(payload)
except NoSuchFunctionError as e:
err = click.ClickException(
"could not find a lambda function named %s." % e.name)
err.exit_code = 2
raise err
except botocore.exceptions.ClientError as e:
error = e.response['Error']
err = click.ClickException(
"got '%s' exception back from Lambda\n%s"
% (error['Code'], error['Message']))
err.exit_code = 1
raise err
except UnhandledLambdaError:
err = click.ClickException(
"Unhandled exception in Lambda function, details above.")
err.exit_code = 1
raise err
except ReadTimeout as e:
err = click.ClickException(e.message)
err.exit_code = 1
raise err
@cli.command('delete')
@click.option('--profile', help='Override profile at deploy time.')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help='Name of the Chalice stage to delete.')
@click.pass_context
def delete(ctx, profile, stage):
# type: (click.Context, str, str) -> None
factory = ctx.obj['factory'] # type: CLIFactory
factory.profile = profile
config = factory.create_config_obj(chalice_stage_name=stage)
session = factory.create_botocore_session()
d = factory.create_deletion_deployer(session=session, ui=UI())
d.deploy(config, chalice_stage_name=stage)
@cli.command()
@click.option('--num-entries', default=None, type=int,
help='Max number of log entries to show.')
@click.option('--include-lambda-messages/--no-include-lambda-messages',
default=False,
help='Controls whether or not lambda log messages are included.')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help='Name of the Chalice stage to get logs for.')
@click.option('-n', '--name',
help='The name of the lambda function to retrieve logs from.',
default=DEFAULT_HANDLER_NAME)
@click.option('--profile', help='The profile to use for fetching logs.')
@click.pass_context
def logs(ctx, num_entries, include_lambda_messages, stage, name, profile):
# type: (click.Context, int, bool, str, str, str) -> None
factory = ctx.obj['factory'] # type: CLIFactory
factory.profile = profile
config = factory.create_config_obj(stage, False)
deployed = config.deployed_resources(stage)
if name in deployed.resource_names():
lambda_arn = deployed.resource_values(name)['lambda_arn']
session = factory.create_botocore_session()
retriever = factory.create_log_retriever(
session, lambda_arn)
display_logs(retriever, num_entries, include_lambda_messages,
sys.stdout)
@cli.command('gen-policy')
@click.option('--filename',
help='The filename to analyze. Otherwise app.py is assumed.')
@click.pass_context
def gen_policy(ctx, filename):
# type: (click.Context, str) -> None
from chalice import policy
if filename is None:
filename = os.path.join(ctx.obj['project_dir'], 'app.py')
if not os.path.isfile(filename):
click.echo("App file does not exist: %s" % filename, err=True)
raise click.Abort()
with open(filename) as f:
contents = f.read()
generated = policy.policy_from_source_code(contents)
click.echo(serialize_to_json(generated))
@cli.command('new-project')
@click.argument('project_name', required=False)
@click.option('--profile', required=False)
def new_project(project_name, profile):
# type: (str, str) -> None
if project_name is None:
project_name = getting_started_prompt(click)
if os.path.isdir(project_name):
click.echo("Directory already exists: %s" % project_name, err=True)
raise click.Abort()
create_new_project_skeleton(project_name, profile)
validate_python_version(Config.create())
@cli.command('url')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help='Name of the Chalice stage to get the deployed URL for.')
@click.pass_context
def url(ctx, stage):
# type: (click.Context, str) -> None
factory = ctx.obj['factory'] # type: CLIFactory
config = factory.create_config_obj(stage)
deployed = config.deployed_resources(stage)
if deployed is not None and 'rest_api' in deployed.resource_names():
click.echo(deployed.resource_values('rest_api')['rest_api_url'])
else:
e = click.ClickException(
"Could not find a record of a Rest API in chalice stage: '%s'"
% stage)
e.exit_code = 2
raise e
@cli.command('generate-sdk')
@click.option('--sdk-type', default='javascript',
type=click.Choice(['javascript']))
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help='Name of the Chalice stage to generate an SDK for.')
@click.argument('outdir')
@click.pass_context
def generate_sdk(ctx, sdk_type, stage, outdir):
# type: (click.Context, str, str, str) -> None
factory = ctx.obj['factory'] # type: CLIFactory
config = factory.create_config_obj(stage)
session = factory.create_botocore_session()
client = TypedAWSClient(session)
deployed = config.deployed_resources(stage)
if deployed is not None and 'rest_api' in deployed.resource_names():
rest_api_id = deployed.resource_values('rest_api')['rest_api_id']
api_gateway_stage = config.api_gateway_stage
client.download_sdk(rest_api_id, outdir,
api_gateway_stage=api_gateway_stage,
sdk_type=sdk_type)
else:
click.echo("Could not find API ID, has this application "
"been deployed?", err=True)
raise click.Abort()
@cli.command('generate-models')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help="Chalice Stage for which to generate models.")
@click.pass_context
def generate_models(ctx, stage):
# type: (click.Context, str) -> None
"""Generate a model from Chalice routes.
Currently only supports generating Swagger 2.0 models.
"""
factory = ctx.obj['factory'] # type: CLIFactory
config = factory.create_config_obj(stage)
if not config.chalice_app.routes:
click.echo('No REST API found to generate model from.')
raise click.Abort()
swagger_generator = TemplatedSwaggerGenerator()
model = swagger_generator.generate_swagger(
config.chalice_app,
)
ui = UI()
ui.write(json.dumps(model, indent=4, cls=PlanEncoder))
ui.write('\n')
@cli.command('package')
@click.option('--pkg-format', default='cloudformation',
help=('Specify the provisioning engine to use for '
'template output. Chalice supports both '
'CloudFormation and Terraform. Default '
'is CloudFormation.'),
type=click.Choice(['cloudformation', 'terraform']))
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help="Chalice Stage to package.")
@click.option('--single-file', is_flag=True,
default=False,
help=("Create a single packaged file. "
"By default, the 'out' argument "
"specifies a directory in which the "
"package assets will be placed. If "
"this argument is specified, a single "
"zip file will be created instead. CloudFormation Only."))
@click.option('--merge-template',
help=('Specify a JSON template to be merged '
'into the generated template. This is useful '
'for adding resources to a Chalice template or '
'modify values in the template. CloudFormation Only.'))
@click.argument('out')
@click.pass_context
def package(ctx, single_file, stage, merge_template,
out, pkg_format):
# type: (click.Context, bool, str, str, str, str) -> None
factory = ctx.obj['factory'] # type: CLIFactory
config = factory.create_config_obj(stage)
packager = factory.create_app_packager(config, pkg_format, merge_template)
if pkg_format == 'terraform' and (merge_template or single_file):
click.echo((
"Terraform format does not support "
"merge-template or single-file options"))
raise click.Abort()
if single_file:
dirname = tempfile.mkdtemp()
try:
packager.package_app(config, dirname, stage)
create_zip_file(source_dir=dirname, outfile=out)
finally:
shutil.rmtree(dirname)
else:
packager.package_app(config, out, stage)
@cli.command('generate-pipeline')
@click.option('-i', '--codebuild-image',
help=("Specify default codebuild image to use. "
"This option must be provided when using a python "
"version besides 2.7."))
@click.option('-s', '--source', default='codecommit',
type=click.Choice(['codecommit', 'github']),
help=("Specify the input source. The default value of "
"'codecommit' will create a CodeCommit repository "
"for you. The 'github' value allows you to "
"reference an existing GitHub repository."))
@click.option('-b', '--buildspec-file',
help=("Specify path for buildspec.yml file. "
"By default, the build steps are included in the "
"generated cloudformation template. If this option "
"is provided, a buildspec.yml will be generated "
"as a separate file and not included in the cfn "
"template. This allows you to make changes to how "
"the project is built without having to redeploy "
"a CloudFormation template. This file should be "
"named 'buildspec.yml' and placed in the root "
"directory of your app."))
@click.argument('filename')
@click.pass_context
def generate_pipeline(ctx, codebuild_image, source, buildspec_file, filename):
# type: (click.Context, str, str, str, str) -> None
"""Generate a cloudformation template for a starter CD pipeline.
This command will write a starter cloudformation template to
the filename you provide. It contains a CodeCommit repo,
a CodeBuild stage for packaging your chalice app, and a
CodePipeline stage to deploy your application using cloudformation.
You can use any AWS SDK or the AWS CLI to deploy this stack.
Here's an example using the AWS CLI:
\b
$ chalice generate-pipeline pipeline.json
$ aws cloudformation deploy --stack-name mystack \b
--template-file pipeline.json --capabilities CAPABILITY_IAM
"""
from chalice import pipeline
factory = ctx.obj['factory'] # type: CLIFactory
config = factory.create_config_obj()
p = pipeline.CreatePipelineTemplate()
params = pipeline.PipelineParameters(
app_name=config.app_name,
lambda_python_version=config.lambda_python_version,
codebuild_image=codebuild_image,
code_source=source,
)
output = p.create_template(params)
if buildspec_file:
extractor = pipeline.BuildSpecExtractor()
buildspec_contents = extractor.extract_buildspec(output)
with open(buildspec_file, 'w') as f:
f.write(buildspec_contents)
with open(filename, 'w') as f:
f.write(serialize_to_json(output))
def main():
# type: () -> int
# click's dynamic attrs will allow us to pass through
# 'obj' via the context object, so we're ignoring
# these error messages from pylint because we know it's ok.
# pylint: disable=unexpected-keyword-arg,no-value-for-parameter
try:
return cli(obj={})
except botocore.exceptions.NoRegionError:
click.echo("No region configured. "
"Either export the AWS_DEFAULT_REGION "
"environment variable or set the "
"region value in our ~/.aws/config file.", err=True)
return 2
except ExperimentalFeatureError as e:
click.echo(str(e))
return 2
except Exception:
click.echo(traceback.format_exc(), err=True)
return 2
| [
"logging.getLogger",
"click.Choice",
"logging.StreamHandler",
"platform.release",
"click.echo",
"chalice.pipeline.PipelineParameters",
"click.ClickException",
"chalice.utils.serialize_to_json",
"sys.exit",
"chalice.cli.factory.CLIFactory",
"chalice.pipeline.CreatePipelineTemplate",
"chalice.aw... | [((3150, 3163), 'click.group', 'click.group', ([], {}), '()\n', (3161, 3163), False, 'import click\n'), ((3319, 3427), 'click.option', 'click.option', (['"""--project-dir"""'], {'help': '"""The project directory path (absolute or relative).Defaults to CWD"""'}), "('--project-dir', help=\n 'The project directory path (absolute or relative).Defaults to CWD')\n", (3331, 3427), False, 'import click\n'), ((3460, 3550), 'click.option', 'click.option', (['"""--debug/--no-debug"""'], {'default': '(False)', 'help': '"""Print debug logs to stderr."""'}), "('--debug/--no-debug', default=False, help=\n 'Print debug logs to stderr.')\n", (3472, 3550), False, 'import click\n'), ((4089, 4132), 'click.option', 'click.option', (['"""--host"""'], {'default': '"""127.0.0.1"""'}), "('--host', default='127.0.0.1')\n", (4101, 4132), False, 'import click\n'), ((4134, 4186), 'click.option', 'click.option', (['"""--port"""'], {'default': '(8000)', 'type': 'click.INT'}), "('--port', default=8000, type=click.INT)\n", (4146, 4186), False, 'import click\n'), ((4188, 4307), 'click.option', 'click.option', (['"""--stage"""'], {'default': 'DEFAULT_STAGE_NAME', 'help': '"""Name of the Chalice stage for the local server to use."""'}), "('--stage', default=DEFAULT_STAGE_NAME, help=\n 'Name of the Chalice stage for the local server to use.')\n", (4200, 4307), False, 'import click\n'), ((4318, 4437), 'click.option', 'click.option', (['"""--autoreload/--no-autoreload"""'], {'default': '(True)', 'help': '"""Automatically restart server when code changes."""'}), "('--autoreload/--no-autoreload', default=True, help=\n 'Automatically restart server when code changes.')\n", (4330, 4437), False, 'import click\n'), ((6496, 6623), 'click.option', 'click.option', (['"""--autogen-policy/--no-autogen-policy"""'], {'default': 'None', 'help': '"""Automatically generate IAM policy for app code."""'}), "('--autogen-policy/--no-autogen-policy', default=None, help=\n 'Automatically generate IAM policy for app code.')\n", (6508, 6623), False, 'import click\n'), ((6648, 6714), 'click.option', 'click.option', (['"""--profile"""'], {'help': '"""Override profile at deploy time."""'}), "('--profile', help='Override profile at deploy time.')\n", (6660, 6714), False, 'import click\n'), ((6716, 6808), 'click.option', 'click.option', (['"""--api-gateway-stage"""'], {'help': '"""Name of the API gateway stage to deploy to."""'}), "('--api-gateway-stage', help=\n 'Name of the API gateway stage to deploy to.')\n", (6728, 6808), False, 'import click\n'), ((6819, 7009), 'click.option', 'click.option', (['"""--stage"""'], {'default': 'DEFAULT_STAGE_NAME', 'help': '"""Name of the Chalice stage to deploy to. Specifying a new chalice stage will create an entirely new set of AWS resources."""'}), "('--stage', default=DEFAULT_STAGE_NAME, help=\n 'Name of the Chalice stage to deploy to. Specifying a new chalice stage will create an entirely new set of AWS resources.'\n )\n", (6831, 7009), False, 'import click\n'), ((7063, 7173), 'click.option', 'click.option', (['"""--connection-timeout"""'], {'type': 'int', 'help': '"""Overrides the default botocore connection timeout."""'}), "('--connection-timeout', type=int, help=\n 'Overrides the default botocore connection timeout.')\n", (7075, 7173), False, 'import click\n'), ((8104, 8231), 'click.option', 'click.option', (['"""--autogen-policy/--no-autogen-policy"""'], {'default': 'None', 'help': '"""Automatically generate IAM policy for app code."""'}), "('--autogen-policy/--no-autogen-policy', default=None, help=\n 'Automatically generate IAM policy for app code.')\n", (8116, 8231), False, 'import click\n'), ((8256, 8322), 'click.option', 'click.option', (['"""--profile"""'], {'help': '"""Override profile at deploy time."""'}), "('--profile', help='Override profile at deploy time.')\n", (8268, 8322), False, 'import click\n'), ((8324, 8416), 'click.option', 'click.option', (['"""--api-gateway-stage"""'], {'help': '"""Name of the API gateway stage to deploy to."""'}), "('--api-gateway-stage', help=\n 'Name of the API gateway stage to deploy to.')\n", (8336, 8416), False, 'import click\n'), ((8427, 8617), 'click.option', 'click.option', (['"""--stage"""'], {'default': 'DEFAULT_STAGE_NAME', 'help': '"""Name of the Chalice stage to deploy to. Specifying a new chalice stage will create an entirely new set of AWS resources."""'}), "('--stage', default=DEFAULT_STAGE_NAME, help=\n 'Name of the Chalice stage to deploy to. Specifying a new chalice stage will create an entirely new set of AWS resources.'\n )\n", (8439, 8617), False, 'import click\n'), ((8671, 8781), 'click.option', 'click.option', (['"""--connection-timeout"""'], {'type': 'int', 'help': '"""Overrides the default botocore connection timeout."""'}), "('--connection-timeout', type=int, help=\n 'Overrides the default botocore connection timeout.')\n", (8683, 8781), False, 'import click\n'), ((9720, 9951), 'click.option', 'click.option', (['"""-n"""', '"""--name"""'], {'metavar': '"""NAME"""', 'required': '(True)', 'help': '"""The name of the function to invoke. This is the logical name of the function. If the function is decorated by app.route use the name api_handler instead."""'}), "('-n', '--name', metavar='NAME', required=True, help=\n 'The name of the function to invoke. This is the logical name of the function. If the function is decorated by app.route use the name api_handler instead.'\n )\n", (9732, 9951), False, 'import click\n'), ((10028, 10118), 'click.option', 'click.option', (['"""--profile"""'], {'metavar': '"""PROFILE"""', 'help': '"""Override profile at deploy time."""'}), "('--profile', metavar='PROFILE', help=\n 'Override profile at deploy time.')\n", (10040, 10118), False, 'import click\n'), ((10129, 10336), 'click.option', 'click.option', (['"""--stage"""'], {'metavar': '"""STAGE"""', 'default': 'DEFAULT_STAGE_NAME', 'help': '"""Name of the Chalice stage to deploy to. Specifying a new chalice stage will create an entirely new set of AWS resources."""'}), "('--stage', metavar='STAGE', default=DEFAULT_STAGE_NAME, help=\n 'Name of the Chalice stage to deploy to. Specifying a new chalice stage will create an entirely new set of AWS resources.'\n )\n", (10141, 10336), False, 'import click\n'), ((11629, 11695), 'click.option', 'click.option', (['"""--profile"""'], {'help': '"""Override profile at deploy time."""'}), "('--profile', help='Override profile at deploy time.')\n", (11641, 11695), False, 'import click\n'), ((11697, 11798), 'click.option', 'click.option', (['"""--stage"""'], {'default': 'DEFAULT_STAGE_NAME', 'help': '"""Name of the Chalice stage to delete."""'}), "('--stage', default=DEFAULT_STAGE_NAME, help=\n 'Name of the Chalice stage to delete.')\n", (11709, 11798), False, 'import click\n'), ((12235, 12336), 'click.option', 'click.option', (['"""--num-entries"""'], {'default': 'None', 'type': 'int', 'help': '"""Max number of log entries to show."""'}), "('--num-entries', default=None, type=int, help=\n 'Max number of log entries to show.')\n", (12247, 12336), False, 'import click\n'), ((12347, 12507), 'click.option', 'click.option', (['"""--include-lambda-messages/--no-include-lambda-messages"""'], {'default': '(False)', 'help': '"""Controls whether or not lambda log messages are included."""'}), "('--include-lambda-messages/--no-include-lambda-messages',\n default=False, help=\n 'Controls whether or not lambda log messages are included.')\n", (12359, 12507), False, 'import click\n'), ((12528, 12635), 'click.option', 'click.option', (['"""--stage"""'], {'default': 'DEFAULT_STAGE_NAME', 'help': '"""Name of the Chalice stage to get logs for."""'}), "('--stage', default=DEFAULT_STAGE_NAME, help=\n 'Name of the Chalice stage to get logs for.')\n", (12540, 12635), False, 'import click\n'), ((12646, 12777), 'click.option', 'click.option', (['"""-n"""', '"""--name"""'], {'help': '"""The name of the lambda function to retrieve logs from."""', 'default': 'DEFAULT_HANDLER_NAME'}), "('-n', '--name', help=\n 'The name of the lambda function to retrieve logs from.', default=\n DEFAULT_HANDLER_NAME)\n", (12658, 12777), False, 'import click\n'), ((12797, 12868), 'click.option', 'click.option', (['"""--profile"""'], {'help': '"""The profile to use for fetching logs."""'}), "('--profile', help='The profile to use for fetching logs.')\n", (12809, 12868), False, 'import click\n'), ((13586, 13680), 'click.option', 'click.option', (['"""--filename"""'], {'help': '"""The filename to analyze. Otherwise app.py is assumed."""'}), "('--filename', help=\n 'The filename to analyze. Otherwise app.py is assumed.')\n", (13598, 13680), False, 'import click\n'), ((14239, 14285), 'click.argument', 'click.argument', (['"""project_name"""'], {'required': '(False)'}), "('project_name', required=False)\n", (14253, 14285), False, 'import click\n'), ((14287, 14328), 'click.option', 'click.option', (['"""--profile"""'], {'required': '(False)'}), "('--profile', required=False)\n", (14299, 14328), False, 'import click\n'), ((14745, 14864), 'click.option', 'click.option', (['"""--stage"""'], {'default': 'DEFAULT_STAGE_NAME', 'help': '"""Name of the Chalice stage to get the deployed URL for."""'}), "('--stage', default=DEFAULT_STAGE_NAME, help=\n 'Name of the Chalice stage to get the deployed URL for.')\n", (14757, 14864), False, 'import click\n'), ((15560, 15674), 'click.option', 'click.option', (['"""--stage"""'], {'default': 'DEFAULT_STAGE_NAME', 'help': '"""Name of the Chalice stage to generate an SDK for."""'}), "('--stage', default=DEFAULT_STAGE_NAME, help=\n 'Name of the Chalice stage to generate an SDK for.')\n", (15572, 15674), False, 'import click\n'), ((15685, 15709), 'click.argument', 'click.argument', (['"""outdir"""'], {}), "('outdir')\n", (15699, 15709), False, 'import click\n'), ((16608, 16716), 'click.option', 'click.option', (['"""--stage"""'], {'default': 'DEFAULT_STAGE_NAME', 'help': '"""Chalice Stage for which to generate models."""'}), "('--stage', default=DEFAULT_STAGE_NAME, help=\n 'Chalice Stage for which to generate models.')\n", (16620, 16716), False, 'import click\n'), ((17769, 17859), 'click.option', 'click.option', (['"""--stage"""'], {'default': 'DEFAULT_STAGE_NAME', 'help': '"""Chalice Stage to package."""'}), "('--stage', default=DEFAULT_STAGE_NAME, help=\n 'Chalice Stage to package.')\n", (17781, 17859), False, 'import click\n'), ((17870, 18170), 'click.option', 'click.option', (['"""--single-file"""'], {'is_flag': '(True)', 'default': '(False)', 'help': '"""Create a single packaged file. By default, the \'out\' argument specifies a directory in which the package assets will be placed. If this argument is specified, a single zip file will be created instead. CloudFormation Only."""'}), '(\'--single-file\', is_flag=True, default=False, help=\n "Create a single packaged file. By default, the \'out\' argument specifies a directory in which the package assets will be placed. If this argument is specified, a single zip file will be created instead. CloudFormation Only."\n )\n', (17882, 18170), False, 'import click\n'), ((18307, 18536), 'click.option', 'click.option', (['"""--merge-template"""'], {'help': '"""Specify a JSON template to be merged into the generated template. This is useful for adding resources to a Chalice template or modify values in the template. CloudFormation Only."""'}), "('--merge-template', help=\n 'Specify a JSON template to be merged into the generated template. This is useful for adding resources to a Chalice template or modify values in the template. CloudFormation Only.'\n )\n", (18319, 18536), False, 'import click\n'), ((18613, 18634), 'click.argument', 'click.argument', (['"""out"""'], {}), "('out')\n", (18627, 18634), False, 'import click\n'), ((19537, 19705), 'click.option', 'click.option', (['"""-i"""', '"""--codebuild-image"""'], {'help': '"""Specify default codebuild image to use. This option must be provided when using a python version besides 2.7."""'}), "('-i', '--codebuild-image', help=\n 'Specify default codebuild image to use. This option must be provided when using a python version besides 2.7.'\n )\n", (19549, 19705), False, 'import click\n'), ((20146, 20645), 'click.option', 'click.option', (['"""-b"""', '"""--buildspec-file"""'], {'help': '"""Specify path for buildspec.yml file. By default, the build steps are included in the generated cloudformation template. If this option is provided, a buildspec.yml will be generated as a separate file and not included in the cfn template. This allows you to make changes to how the project is built without having to redeploy a CloudFormation template. This file should be named \'buildspec.yml\' and placed in the root directory of your app."""'}), '(\'-b\', \'--buildspec-file\', help=\n "Specify path for buildspec.yml file. By default, the build steps are included in the generated cloudformation template. If this option is provided, a buildspec.yml will be generated as a separate file and not included in the cfn template. This allows you to make changes to how the project is built without having to redeploy a CloudFormation template. This file should be named \'buildspec.yml\' and placed in the root directory of your app."\n )\n', (20158, 20645), False, 'import click\n'), ((20860, 20886), 'click.argument', 'click.argument', (['"""filename"""'], {}), "('filename')\n", (20874, 20886), False, 'import click\n'), ((1555, 1576), 'logging.getLogger', 'logging.getLogger', (['""""""'], {}), "('')\n", (1572, 1576), False, 'import logging\n'), ((1618, 1641), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1639, 1641), False, 'import logging\n'), ((1686, 1718), 'logging.Formatter', 'logging.Formatter', (['format_string'], {}), '(format_string)\n', (1703, 1718), False, 'import logging\n'), ((1908, 1946), 'os.path.join', 'os.path.join', (['project_name', '""".chalice"""'], {}), "(project_name, '.chalice')\n", (1920, 1946), False, 'import os\n'), ((1951, 1975), 'os.makedirs', 'os.makedirs', (['chalice_dir'], {}), '(chalice_dir)\n', (1962, 1975), False, 'import os\n'), ((1989, 2042), 'os.path.join', 'os.path.join', (['project_name', '""".chalice"""', '"""config.json"""'], {}), "(project_name, '.chalice', 'config.json')\n", (2001, 2042), False, 'import os\n'), ((3003, 3021), 'platform.release', 'platform.release', ([], {}), '()\n', (3019, 3021), False, 'import platform\n'), ((3994, 4044), 'chalice.cli.factory.CLIFactory', 'CLIFactory', (['project_dir', 'debug'], {'environ': 'os.environ'}), '(project_dir, debug, environ=os.environ)\n', (4004, 4044), False, 'from chalice.cli.factory import CLIFactory\n'), ((4049, 4070), 'os.chdir', 'os.chdir', (['project_dir'], {}), '(project_dir)\n', (4057, 4070), False, 'import os\n'), ((4876, 4942), 'functools.partial', 'functools.partial', (['create_local_server', 'factory', 'host', 'port', 'stage'], {}), '(create_local_server, factory, host, port, stage)\n', (4893, 4942), False, 'import functools\n'), ((5222, 5307), 'logging.basicConfig', 'logging.basicConfig', ([], {'stream': 'sys.stdout', 'level': 'logging.INFO', 'format': '"""%(message)s"""'}), "(stream=sys.stdout, level=logging.INFO, format='%(message)s'\n )\n", (5241, 5307), False, 'import logging\n'), ((6178, 6201), 'chalice.deploy.validate.validate_routes', 'validate_routes', (['routes'], {}), '(routes)\n', (6193, 6201), False, 'from chalice.deploy.validate import validate_routes, validate_python_version\n'), ((7755, 7759), 'chalice.utils.UI', 'UI', ([], {}), '()\n', (7757, 7759), False, 'from chalice.utils import getting_started_prompt, UI, serialize_to_json\n'), ((9367, 9371), 'chalice.utils.UI', 'UI', ([], {}), '()\n', (9369, 9371), False, 'from chalice.utils import getting_started_prompt, UI, serialize_to_json\n'), ((14489, 14516), 'os.path.isdir', 'os.path.isdir', (['project_name'], {}), '(project_name)\n', (14502, 14516), False, 'import os\n'), ((15989, 16012), 'chalice.awsclient.TypedAWSClient', 'TypedAWSClient', (['session'], {}), '(session)\n', (16003, 16012), False, 'from chalice.awsclient import TypedAWSClient\n'), ((17186, 17213), 'chalice.deploy.swagger.TemplatedSwaggerGenerator', 'TemplatedSwaggerGenerator', ([], {}), '()\n', (17211, 17213), False, 'from chalice.deploy.swagger import TemplatedSwaggerGenerator\n'), ((17305, 17309), 'chalice.utils.UI', 'UI', ([], {}), '()\n', (17307, 17309), False, 'from chalice.utils import getting_started_prompt, UI, serialize_to_json\n'), ((21815, 21848), 'chalice.pipeline.CreatePipelineTemplate', 'pipeline.CreatePipelineTemplate', ([], {}), '()\n', (21846, 21848), False, 'from chalice import pipeline\n'), ((21862, 22029), 'chalice.pipeline.PipelineParameters', 'pipeline.PipelineParameters', ([], {'app_name': 'config.app_name', 'lambda_python_version': 'config.lambda_python_version', 'codebuild_image': 'codebuild_image', 'code_source': 'source'}), '(app_name=config.app_name, lambda_python_version\n =config.lambda_python_version, codebuild_image=codebuild_image,\n code_source=source)\n', (21889, 22029), False, 'from chalice import pipeline\n'), ((3731, 3742), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3740, 3742), False, 'import os\n'), ((5443, 5510), 'chalice.cli.reloader.run_with_reloader', 'reloader.run_with_reloader', (['server_factory', 'os.environ', 'project_dir'], {}), '(server_factory, os.environ, project_dir)\n', (5469, 5510), False, 'from chalice.cli import reloader\n'), ((5726, 5738), 'sys.exit', 'sys.exit', (['rc'], {}), '(rc)\n', (5734, 5738), False, 'import sys\n'), ((13461, 13534), 'chalice.logs.display_logs', 'display_logs', (['retriever', 'num_entries', 'include_lambda_messages', 'sys.stdout'], {}), '(retriever, num_entries, include_lambda_messages, sys.stdout)\n', (13473, 13534), False, 'from chalice.logs import display_logs\n'), ((13857, 13903), 'os.path.join', 'os.path.join', (["ctx.obj['project_dir']", '"""app.py"""'], {}), "(ctx.obj['project_dir'], 'app.py')\n", (13869, 13903), False, 'import os\n'), ((13915, 13939), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (13929, 13939), False, 'import os\n'), ((13949, 14011), 'click.echo', 'click.echo', (["('App file does not exist: %s' % filename)"], {'err': '(True)'}), "('App file does not exist: %s' % filename, err=True)\n", (13959, 14011), False, 'import click\n'), ((14026, 14039), 'click.Abort', 'click.Abort', ([], {}), '()\n', (14037, 14039), False, 'import click\n'), ((14118, 14158), 'chalice.policy.policy_from_source_code', 'policy.policy_from_source_code', (['contents'], {}), '(contents)\n', (14148, 14158), False, 'from chalice import policy\n'), ((14452, 14481), 'chalice.utils.getting_started_prompt', 'getting_started_prompt', (['click'], {}), '(click)\n', (14474, 14481), False, 'from chalice.utils import getting_started_prompt, UI, serialize_to_json\n'), ((14526, 14593), 'click.echo', 'click.echo', (["('Directory already exists: %s' % project_name)"], {'err': '(True)'}), "('Directory already exists: %s' % project_name, err=True)\n", (14536, 14593), False, 'import click\n'), ((14608, 14621), 'click.Abort', 'click.Abort', ([], {}), '()\n', (14619, 14621), False, 'import click\n'), ((14705, 14720), 'chalice.config.Config.create', 'Config.create', ([], {}), '()\n', (14718, 14720), False, 'from chalice.config import Config\n'), ((15271, 15368), 'click.ClickException', 'click.ClickException', (['("Could not find a record of a Rest API in chalice stage: \'%s\'" % stage)'], {}), '(\n "Could not find a record of a Rest API in chalice stage: \'%s\'" % stage)\n', (15291, 15368), False, 'import click\n'), ((16440, 16526), 'click.echo', 'click.echo', (['"""Could not find API ID, has this application been deployed?"""'], {'err': '(True)'}), "('Could not find API ID, has this application been deployed?',\n err=True)\n", (16450, 16526), False, 'import click\n'), ((16559, 16572), 'click.Abort', 'click.Abort', ([], {}), '()\n', (16570, 16572), False, 'import click\n'), ((15529, 15557), 'click.Choice', 'click.Choice', (["['javascript']"], {}), "(['javascript'])\n", (15541, 15557), False, 'import click\n'), ((17078, 17133), 'click.echo', 'click.echo', (['"""No REST API found to generate model from."""'], {}), "('No REST API found to generate model from.')\n", (17088, 17133), False, 'import click\n'), ((17148, 17161), 'click.Abort', 'click.Abort', ([], {}), '()\n', (17159, 17161), False, 'import click\n'), ((17323, 17367), 'json.dumps', 'json.dumps', (['model'], {'indent': '(4)', 'cls': 'PlanEncoder'}), '(model, indent=4, cls=PlanEncoder)\n', (17333, 17367), False, 'import json\n'), ((19056, 19146), 'click.echo', 'click.echo', (['"""Terraform format does not support merge-template or single-file options"""'], {}), "(\n 'Terraform format does not support merge-template or single-file options')\n", (19066, 19146), False, 'import click\n'), ((19186, 19199), 'click.Abort', 'click.Abort', ([], {}), '()\n', (19197, 19199), False, 'import click\n'), ((19239, 19257), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (19255, 19257), False, 'import tempfile\n'), ((17721, 17766), 'click.Choice', 'click.Choice', (["['cloudformation', 'terraform']"], {}), "(['cloudformation', 'terraform'])\n", (17733, 17766), False, 'import click\n'), ((22142, 22171), 'chalice.pipeline.BuildSpecExtractor', 'pipeline.BuildSpecExtractor', ([], {}), '()\n', (22169, 22171), False, 'from chalice import pipeline\n'), ((19831, 19869), 'click.Choice', 'click.Choice', (["['codecommit', 'github']"], {}), "(['codecommit', 'github'])\n", (19843, 19869), False, 'import click\n'), ((2386, 2408), 'chalice.utils.serialize_to_json', 'serialize_to_json', (['cfg'], {}), '(cfg)\n', (2403, 2408), False, 'from chalice.utils import getting_started_prompt, UI, serialize_to_json\n'), ((2424, 2470), 'os.path.join', 'os.path.join', (['project_name', '"""requirements.txt"""'], {}), "(project_name, 'requirements.txt')\n", (2436, 2470), False, 'import os\n'), ((2505, 2541), 'os.path.join', 'os.path.join', (['project_name', '"""app.py"""'], {}), "(project_name, 'app.py')\n", (2517, 2541), False, 'import os\n'), ((2613, 2653), 'os.path.join', 'os.path.join', (['project_name', '""".gitignore"""'], {}), "(project_name, '.gitignore')\n", (2625, 2653), False, 'import os\n'), ((2954, 2971), 'platform.system', 'platform.system', ([], {}), '()\n', (2969, 2971), False, 'import platform\n'), ((3756, 3782), 'os.path.isabs', 'os.path.isabs', (['project_dir'], {}), '(project_dir)\n', (3769, 3782), False, 'import os\n'), ((3806, 3834), 'os.path.abspath', 'os.path.abspath', (['project_dir'], {}), '(project_dir)\n', (3821, 3834), False, 'import os\n'), ((10900, 10975), 'click.ClickException', 'click.ClickException', (["('could not find a lambda function named %s.' % e.name)"], {}), "('could not find a lambda function named %s.' % e.name)\n", (10920, 10975), False, 'import click\n'), ((11132, 11239), 'click.ClickException', 'click.ClickException', (['("""got \'%s\' exception back from Lambda\n%s""" % (error[\'Code\'], error[\n \'Message\']))'], {}), '("""got \'%s\' exception back from Lambda\n%s""" % (error[\n \'Code\'], error[\'Message\']))\n', (11152, 11239), False, 'import click\n'), ((11348, 11426), 'click.ClickException', 'click.ClickException', (['"""Unhandled exception in Lambda function, details above."""'], {}), "('Unhandled exception in Lambda function, details above.')\n", (11368, 11426), False, 'import click\n'), ((11527, 11558), 'click.ClickException', 'click.ClickException', (['e.message'], {}), '(e.message)\n', (11547, 11558), False, 'import click\n'), ((12164, 12168), 'chalice.utils.UI', 'UI', ([], {}), '()\n', (12166, 12168), False, 'from chalice.utils import getting_started_prompt, UI, serialize_to_json\n'), ((14178, 14206), 'chalice.utils.serialize_to_json', 'serialize_to_json', (['generated'], {}), '(generated)\n', (14195, 14206), False, 'from chalice.utils import getting_started_prompt, UI, serialize_to_json\n'), ((19340, 19388), 'chalice.utils.create_zip_file', 'create_zip_file', ([], {'source_dir': 'dirname', 'outfile': 'out'}), '(source_dir=dirname, outfile=out)\n', (19355, 19388), False, 'from chalice.utils import create_zip_file\n'), ((19418, 19440), 'shutil.rmtree', 'shutil.rmtree', (['dirname'], {}), '(dirname)\n', (19431, 19440), False, 'import shutil\n'), ((22373, 22398), 'chalice.utils.serialize_to_json', 'serialize_to_json', (['output'], {}), '(output)\n', (22390, 22398), False, 'from chalice.utils import getting_started_prompt, UI, serialize_to_json\n'), ((22770, 22934), 'click.echo', 'click.echo', (['"""No region configured. Either export the AWS_DEFAULT_REGION environment variable or set the region value in our ~/.aws/config file."""'], {'err': '(True)'}), "(\n 'No region configured. Either export the AWS_DEFAULT_REGION environment variable or set the region value in our ~/.aws/config file.'\n , err=True)\n", (22780, 22934), False, 'import click\n'), ((23135, 23157), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (23155, 23157), False, 'import traceback\n')] |
from typing import Dict
import numpy as np
def buffer_from_example(example: Dict[str, np.ndarray],
leading_dims) -> Dict[str, np.ndarray]:
buf = {}
for key, value in example.items():
buf[key] = np.zeros(leading_dims + value.shape, dtype=value.dtype)
return buf
def get_leading_dims(dictionary, n_dims=1):
values = iter(dictionary.values())
leading_dims = next(values).shape[:n_dims]
if not all(leading_dims == value.shape[:n_dims] for value in values):
key, shape = [(key, value.shape[:n_dims])
for key, value in dictionary.items()
if leading_dims != value.shape[:n_dims]][0]
raise ValueError((f'Dimensions do not match: {leading_dims} vs. '
f'{shape} (for key `{key}`)'))
return leading_dims
| [
"numpy.zeros"
] | [((237, 292), 'numpy.zeros', 'np.zeros', (['(leading_dims + value.shape)'], {'dtype': 'value.dtype'}), '(leading_dims + value.shape, dtype=value.dtype)\n', (245, 292), True, 'import numpy as np\n')] |
import numpy as np
import cv2
image = cv2.imread('images/unsharp_bird.jpg')
kernel = np.array([
[0, -1, 0],
[-1, 5, -1],
[0, -1, 0]
])
sharpen_iamge = cv2.filter2D(image, -1, kernel)
cv2.imshow("original image", image)
cv2.imshow("sharpen image", sharpen_iamge)
cv2.waitKey(0)
cv2.destroyAllWindows() | [
"cv2.filter2D",
"cv2.imshow",
"numpy.array",
"cv2.destroyAllWindows",
"cv2.waitKey",
"cv2.imread"
] | [((39, 76), 'cv2.imread', 'cv2.imread', (['"""images/unsharp_bird.jpg"""'], {}), "('images/unsharp_bird.jpg')\n", (49, 76), False, 'import cv2\n'), ((87, 134), 'numpy.array', 'np.array', (['[[0, -1, 0], [-1, 5, -1], [0, -1, 0]]'], {}), '([[0, -1, 0], [-1, 5, -1], [0, -1, 0]])\n', (95, 134), True, 'import numpy as np\n'), ((200, 231), 'cv2.filter2D', 'cv2.filter2D', (['image', '(-1)', 'kernel'], {}), '(image, -1, kernel)\n', (212, 231), False, 'import cv2\n'), ((233, 268), 'cv2.imshow', 'cv2.imshow', (['"""original image"""', 'image'], {}), "('original image', image)\n", (243, 268), False, 'import cv2\n'), ((269, 311), 'cv2.imshow', 'cv2.imshow', (['"""sharpen image"""', 'sharpen_iamge'], {}), "('sharpen image', sharpen_iamge)\n", (279, 311), False, 'import cv2\n'), ((312, 326), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (323, 326), False, 'import cv2\n'), ((327, 350), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (348, 350), False, 'import cv2\n')] |
#!/usr/bin/env python3
import pkg_resources
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx_autodoc_typehints'
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'AnyIO'
author = '<NAME>'
copyright = '2018, ' + author
v = pkg_resources.get_distribution('anyio').parsed_version
version = v.base_version
release = v.public
language = None
exclude_patterns = ['_build']
pygments_style = 'sphinx'
autodoc_default_options = {
'members': True,
'show-inheritance': True
}
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
htmlhelp_basename = 'anyiodoc'
intersphinx_mapping = {'python': ('https://docs.python.org/3/', None)}
| [
"pkg_resources.get_distribution"
] | [((297, 336), 'pkg_resources.get_distribution', 'pkg_resources.get_distribution', (['"""anyio"""'], {}), "('anyio')\n", (327, 336), False, 'import pkg_resources\n')] |
from django import urls
import pytest
from surveys.tests.test_views.helpers import create_user
def get_login_url():
return urls.reverse("user_login")
@pytest.mark.django_db
def test_login_only_post(client):
response = client.get(get_login_url())
print(response.content)
assert response.status_code == 405
@pytest.mark.django_db
def test_successful_login(client):
create_user(login="TestUser", password="<PASSWORD>")
response = client.post(
get_login_url(),
{"login": "TestUser", "password": "<PASSWORD>"},
content_type="application/json",
)
assert response.status_code == 200
@pytest.mark.django_db
def test_unsuccessful_login(client):
response = client.post(
get_login_url(),
{"login": "TestUser", "password": "<PASSWORD>"},
content_type="application/json",
)
assert response.status_code == 404
@pytest.mark.django_db
def test_login_wrong_login(client):
create_user(login="TestUser")
response = client.post(
get_login_url(),
{"login": "WrongTestUser", "password": "<PASSWORD>"},
content_type="application/json",
)
print(response.content)
assert response.status_code == 404
| [
"surveys.tests.test_views.helpers.create_user",
"django.urls.reverse"
] | [((130, 156), 'django.urls.reverse', 'urls.reverse', (['"""user_login"""'], {}), "('user_login')\n", (142, 156), False, 'from django import urls\n'), ((390, 442), 'surveys.tests.test_views.helpers.create_user', 'create_user', ([], {'login': '"""TestUser"""', 'password': '"""<PASSWORD>"""'}), "(login='TestUser', password='<PASSWORD>')\n", (401, 442), False, 'from surveys.tests.test_views.helpers import create_user\n'), ((962, 991), 'surveys.tests.test_views.helpers.create_user', 'create_user', ([], {'login': '"""TestUser"""'}), "(login='TestUser')\n", (973, 991), False, 'from surveys.tests.test_views.helpers import create_user\n')] |
import tensorflow as tf
y=1.
n=-1.
bias=1.0
x=[[y,y,bias],
[y,n,bias],
[n,y,bias],
[n,n,bias]]
out=[[n],[n],[n],[y]]
w=tf.Variable(tf.random_normal([3,1]))
def step(x):
is_greater=tf.greater(x,0)
as_float=tf.to_float(is_greater)
doubled=tf.multiply(as_float,2)
return tf.subtract(doubled,1)
output=step(tf.matmul(x,w))
error=tf.subtract(out,output)
mse=tf.reduce_mean(tf.square(error))
delta=tf.matmul(x,error,transpose_a=True)
train=tf.assign(w,tf.add(w,delta))
sess=tf.Session()
sess.run(tf.initialize_all_variables())
err,target = 1,0
epoch,max_epoch=0,10
while(err>target and epoch < max_epoch):
epoch+=1
err,_=sess.run([mse,train])
print('epoch:', epoch, 'mse:',err)
| [
"tensorflow.initialize_all_variables",
"tensorflow.random_normal",
"tensorflow.to_float",
"tensorflow.Session",
"tensorflow.multiply",
"tensorflow.add",
"tensorflow.matmul",
"tensorflow.greater",
"tensorflow.subtract",
"tensorflow.square"
] | [((375, 399), 'tensorflow.subtract', 'tf.subtract', (['out', 'output'], {}), '(out, output)\n', (386, 399), True, 'import tensorflow as tf\n'), ((446, 483), 'tensorflow.matmul', 'tf.matmul', (['x', 'error'], {'transpose_a': '(True)'}), '(x, error, transpose_a=True)\n', (455, 483), True, 'import tensorflow as tf\n'), ((526, 538), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (536, 538), True, 'import tensorflow as tf\n'), ((156, 180), 'tensorflow.random_normal', 'tf.random_normal', (['[3, 1]'], {}), '([3, 1])\n', (172, 180), True, 'import tensorflow as tf\n'), ((213, 229), 'tensorflow.greater', 'tf.greater', (['x', '(0)'], {}), '(x, 0)\n', (223, 229), True, 'import tensorflow as tf\n'), ((243, 266), 'tensorflow.to_float', 'tf.to_float', (['is_greater'], {}), '(is_greater)\n', (254, 266), True, 'import tensorflow as tf\n'), ((280, 304), 'tensorflow.multiply', 'tf.multiply', (['as_float', '(2)'], {}), '(as_float, 2)\n', (291, 304), True, 'import tensorflow as tf\n'), ((316, 339), 'tensorflow.subtract', 'tf.subtract', (['doubled', '(1)'], {}), '(doubled, 1)\n', (327, 339), True, 'import tensorflow as tf\n'), ((352, 367), 'tensorflow.matmul', 'tf.matmul', (['x', 'w'], {}), '(x, w)\n', (361, 367), True, 'import tensorflow as tf\n'), ((419, 435), 'tensorflow.square', 'tf.square', (['error'], {}), '(error)\n', (428, 435), True, 'import tensorflow as tf\n'), ((501, 517), 'tensorflow.add', 'tf.add', (['w', 'delta'], {}), '(w, delta)\n', (507, 517), True, 'import tensorflow as tf\n'), ((549, 578), 'tensorflow.initialize_all_variables', 'tf.initialize_all_variables', ([], {}), '()\n', (576, 578), True, 'import tensorflow as tf\n')] |
# script to count chars, tokens, paragraphs of the corpus files
import nltk as k
r=open("../corpora/corpora.txt","r")
t=r.readlines()
tt=[]
npars = 0
for line in t:
if line =="\n":
pass
else:
tt.append(line[:-1])
npars += 1
ts=" ".join(tt)
nchars = len(ts)
nsents = len(k.tokenize.sent_tokenize(ts))
ntoks = len(k.tokenize.wordpunct_tokenize(ts))
r=open("../corpora/corpora2.txt","r")
t2=r.readlines()
tt2=[]
npars2 = 0
for line in t2:
if line =="\n":
pass
else:
tt2.append(line[:-1])
npars2 += 1
ts2=" ".join(tt2)
nchars2 = len(ts2)
nsents2 = len(k.tokenize.sent_tokenize(ts2))
ntoks2 = len(k.tokenize.wordpunct_tokenize(ts2))
| [
"nltk.tokenize.sent_tokenize",
"nltk.tokenize.wordpunct_tokenize"
] | [((304, 332), 'nltk.tokenize.sent_tokenize', 'k.tokenize.sent_tokenize', (['ts'], {}), '(ts)\n', (328, 332), True, 'import nltk as k\n'), ((346, 379), 'nltk.tokenize.wordpunct_tokenize', 'k.tokenize.wordpunct_tokenize', (['ts'], {}), '(ts)\n', (375, 379), True, 'import nltk as k\n'), ((618, 647), 'nltk.tokenize.sent_tokenize', 'k.tokenize.sent_tokenize', (['ts2'], {}), '(ts2)\n', (642, 647), True, 'import nltk as k\n'), ((662, 696), 'nltk.tokenize.wordpunct_tokenize', 'k.tokenize.wordpunct_tokenize', (['ts2'], {}), '(ts2)\n', (691, 696), True, 'import nltk as k\n')] |
#!/usr/bin/python
import argparse
import distutils.version
import json
import os
import plistlib
import subprocess
import sys
import tempfile
import urllib
import webbrowser
def export_bundle(bundle_path):
toplevel_path = subprocess.check_output(
['git', 'rev-parse', '--show-toplevel']).rstrip()
git = ['git', '-C', toplevel_path]
dest_path = tempfile.mkdtemp()
ls_files = subprocess.Popen(git +
['ls-files', '-cz', bundle_path], stdout=subprocess.PIPE)
checkout_index = subprocess.Popen(git +
['checkout-index', '--prefix=%s/'% dest_path, '--stdin', '-z'],
stdin=ls_files.stdout)
ls_files.stdout.close()
checkout_index.communicate()
return os.path.abspath(os.path.join(dest_path, bundle_path))
def expand_url_template(url_template, *args, **query):
url = url_template
if args:
url = url % tuple(map(urllib.quote, args))
if query:
url += '?' + urllib.urlencode(query)
return url
def archive_dir_name(bundle_path, version):
dir_path, bundle_filename = os.path.split(bundle_path)
bundle_name, bundle_ext = os.path.splitext(bundle_filename)
# GitHub will replace spaces with periods; dashes look better
bundle_name = bundle_name.replace(' ', '-')
return dir_path, '%s-%s%s' % (bundle_name, version, bundle_ext)
def tag_for_version(version):
return 'v' + version
def create_virtualenv(bundle_path, requirements_path):
import compileall
import pip
import virtualenv
scripts_path = os.path.join(bundle_path, 'Contents', 'Scripts')
virtualenv.create_environment(scripts_path, site_packages=True)
virtualenv.make_environment_relocatable(scripts_path)
pip.main(['install', '--prefix', scripts_path, '-r', requirements_path])
compileall.compile_dir(scripts_path, maxlevels=0)
def update_bundle_info(bundle_path, version, repo):
info_plist_path = os.path.join(bundle_path, 'Contents', 'Info.plist')
info_plist = plistlib.readPlist(info_plist_path)
info_plist['CFBundleVersion'] = version
info_plist['LBDescription']['LBDownloadURL'] = expand_url_template(
'https://github.com/%s/releases/download/%s/%s', repo,
tag_for_version(version), archive_dir_name(bundle_path, version)[1])
plistlib.writePlist(info_plist, info_plist_path)
def sign_bundle(bundle_path):
subprocess.check_call(['/usr/bin/codesign', '-fs',
'Developer ID Application: <NAME>',
bundle_path])
def archive_bundle(bundle_path, version):
archive_path = os.path.join(*archive_dir_name(bundle_path, version))
subprocess.check_call(['/usr/bin/ditto', '--keepParent', '-ck',
bundle_path, archive_path])
return archive_path
def upload_release(repo, version, archive_path, github_access_token):
strict_version = distutils.version.StrictVersion(version)
releases_url = expand_url_template(
'https://api.github.com/repos/%s/releases', repo,
access_token=github_access_token)
release_name = tag_for_version(version)
release_json = dict(tag_name=release_name, target_commitish='master',
name=release_name, body='', draft=True,
prerelease=bool(strict_version.prerelease))
releases_api = subprocess.Popen(
['/usr/bin/curl', '--data', '@-', releases_url],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
release_json_data, _ = releases_api.communicate(json.dumps(release_json))
release_json = json.loads(release_json_data)
html_url = release_json['html_url']
upload_url = release_json['upload_url'].split('{', 1)[0]
upload_url = expand_url_template(upload_url,
name=os.path.basename(archive_path), access_token=github_access_token)
subprocess.check_call(
['/usr/bin/curl', '-H', 'Content-Type: application/zip',
'--data-binary', '@' + archive_path, upload_url])
return html_url
def release(version, github_access_token):
repo = 'nriley/LBHermes'
project_path = os.path.join(os.path.dirname(__file__), '..')
action_path = os.path.join(project_path, 'Hermes Stations.lbaction')
# exported version is equivalent to committed version
export_path = export_bundle(action_path)
# except for version number and download URL
update_bundle_info(export_path, version, repo)
# update the same info in the working copy so it can be committed
update_bundle_info(action_path, version, repo)
requirements_path = os.path.join(project_path, 'requirements.txt')
if os.path.exists(requirements_path):
create_virtualenv(export_path, requirements_path)
sign_bundle(export_path)
archive_path = archive_bundle(export_path, version)
html_url = upload_release(repo, version, archive_path, github_access_token)
webbrowser.open(html_url)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Release to GitHub.')
parser.add_argument('version')
parser.add_argument('github_access_token')
args = parser.parse_args()
release(args.version, args.github_access_token)
| [
"webbrowser.open",
"plistlib.readPlist",
"urllib.urlencode",
"pip.main",
"os.path.exists",
"argparse.ArgumentParser",
"subprocess.Popen",
"json.dumps",
"os.path.split",
"plistlib.writePlist",
"virtualenv.make_environment_relocatable",
"subprocess.check_output",
"json.loads",
"subprocess.ch... | [((366, 384), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (382, 384), False, 'import tempfile\n'), ((400, 485), 'subprocess.Popen', 'subprocess.Popen', (["(git + ['ls-files', '-cz', bundle_path])"], {'stdout': 'subprocess.PIPE'}), "(git + ['ls-files', '-cz', bundle_path], stdout=subprocess.PIPE\n )\n", (416, 485), False, 'import subprocess\n'), ((510, 624), 'subprocess.Popen', 'subprocess.Popen', (["(git + ['checkout-index', '--prefix=%s/' % dest_path, '--stdin', '-z'])"], {'stdin': 'ls_files.stdout'}), "(git + ['checkout-index', '--prefix=%s/' % dest_path,\n '--stdin', '-z'], stdin=ls_files.stdout)\n", (526, 624), False, 'import subprocess\n'), ((1056, 1082), 'os.path.split', 'os.path.split', (['bundle_path'], {}), '(bundle_path)\n', (1069, 1082), False, 'import os\n'), ((1113, 1146), 'os.path.splitext', 'os.path.splitext', (['bundle_filename'], {}), '(bundle_filename)\n', (1129, 1146), False, 'import os\n'), ((1519, 1567), 'os.path.join', 'os.path.join', (['bundle_path', '"""Contents"""', '"""Scripts"""'], {}), "(bundle_path, 'Contents', 'Scripts')\n", (1531, 1567), False, 'import os\n'), ((1572, 1635), 'virtualenv.create_environment', 'virtualenv.create_environment', (['scripts_path'], {'site_packages': '(True)'}), '(scripts_path, site_packages=True)\n', (1601, 1635), False, 'import virtualenv\n'), ((1640, 1693), 'virtualenv.make_environment_relocatable', 'virtualenv.make_environment_relocatable', (['scripts_path'], {}), '(scripts_path)\n', (1679, 1693), False, 'import virtualenv\n'), ((1698, 1770), 'pip.main', 'pip.main', (["['install', '--prefix', scripts_path, '-r', requirements_path]"], {}), "(['install', '--prefix', scripts_path, '-r', requirements_path])\n", (1706, 1770), False, 'import pip\n'), ((1775, 1824), 'compileall.compile_dir', 'compileall.compile_dir', (['scripts_path'], {'maxlevels': '(0)'}), '(scripts_path, maxlevels=0)\n', (1797, 1824), False, 'import compileall\n'), ((1900, 1951), 'os.path.join', 'os.path.join', (['bundle_path', '"""Contents"""', '"""Info.plist"""'], {}), "(bundle_path, 'Contents', 'Info.plist')\n", (1912, 1951), False, 'import os\n'), ((1969, 2004), 'plistlib.readPlist', 'plistlib.readPlist', (['info_plist_path'], {}), '(info_plist_path)\n', (1987, 2004), False, 'import plistlib\n'), ((2266, 2314), 'plistlib.writePlist', 'plistlib.writePlist', (['info_plist', 'info_plist_path'], {}), '(info_plist, info_plist_path)\n', (2285, 2314), False, 'import plistlib\n'), ((2350, 2454), 'subprocess.check_call', 'subprocess.check_call', (["['/usr/bin/codesign', '-fs', 'Developer ID Application: <NAME>', bundle_path]"], {}), "(['/usr/bin/codesign', '-fs',\n 'Developer ID Application: <NAME>', bundle_path])\n", (2371, 2454), False, 'import subprocess\n'), ((2625, 2720), 'subprocess.check_call', 'subprocess.check_call', (["['/usr/bin/ditto', '--keepParent', '-ck', bundle_path, archive_path]"], {}), "(['/usr/bin/ditto', '--keepParent', '-ck', bundle_path,\n archive_path])\n", (2646, 2720), False, 'import subprocess\n'), ((3314, 3431), 'subprocess.Popen', 'subprocess.Popen', (["['/usr/bin/curl', '--data', '@-', releases_url]"], {'stdin': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE'}), "(['/usr/bin/curl', '--data', '@-', releases_url], stdin=\n subprocess.PIPE, stdout=subprocess.PIPE)\n", (3330, 3431), False, 'import subprocess\n'), ((3541, 3570), 'json.loads', 'json.loads', (['release_json_data'], {}), '(release_json_data)\n', (3551, 3570), False, 'import json\n'), ((3805, 3941), 'subprocess.check_call', 'subprocess.check_call', (["['/usr/bin/curl', '-H', 'Content-Type: application/zip', '--data-binary', \n '@' + archive_path, upload_url]"], {}), "(['/usr/bin/curl', '-H',\n 'Content-Type: application/zip', '--data-binary', '@' + archive_path,\n upload_url])\n", (3826, 3941), False, 'import subprocess\n'), ((4129, 4183), 'os.path.join', 'os.path.join', (['project_path', '"""Hermes Stations.lbaction"""'], {}), "(project_path, 'Hermes Stations.lbaction')\n", (4141, 4183), False, 'import os\n'), ((4535, 4581), 'os.path.join', 'os.path.join', (['project_path', '"""requirements.txt"""'], {}), "(project_path, 'requirements.txt')\n", (4547, 4581), False, 'import os\n'), ((4589, 4622), 'os.path.exists', 'os.path.exists', (['requirements_path'], {}), '(requirements_path)\n', (4603, 4622), False, 'import os\n'), ((4854, 4879), 'webbrowser.open', 'webbrowser.open', (['html_url'], {}), '(html_url)\n', (4869, 4879), False, 'import webbrowser\n'), ((4921, 4978), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Release to GitHub."""'}), "(description='Release to GitHub.')\n", (4944, 4978), False, 'import argparse\n'), ((724, 760), 'os.path.join', 'os.path.join', (['dest_path', 'bundle_path'], {}), '(dest_path, bundle_path)\n', (736, 760), False, 'import os\n'), ((3496, 3520), 'json.dumps', 'json.dumps', (['release_json'], {}), '(release_json)\n', (3506, 3520), False, 'import json\n'), ((4078, 4103), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (4093, 4103), False, 'import os\n'), ((228, 292), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'rev-parse', '--show-toplevel']"], {}), "(['git', 'rev-parse', '--show-toplevel'])\n", (251, 292), False, 'import subprocess\n'), ((940, 963), 'urllib.urlencode', 'urllib.urlencode', (['query'], {}), '(query)\n', (956, 963), False, 'import urllib\n'), ((3735, 3765), 'os.path.basename', 'os.path.basename', (['archive_path'], {}), '(archive_path)\n', (3751, 3765), False, 'import os\n')] |