code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
#!/usr/bin/env python
import nltk
from nltk.corpus import brown
import numpy as np
from math import log
from config import *
"""
convert word list file to a map from word to id
"""
def word2map(filename):
word2idx = {};
with open(filename) as f:
for line in f:
word2idx[line.strip('\n')] = len(word2idx);
return word2idx;
if __name__ == "__main__":
# add nltk serach path
nltk.data.path.append(DATA_HOME);
# get brown text stream
print ("getting text stream...")
brown_text = list(filter(lambda x: x.isalpha(), map(lambda x: x.lower(), brown.words())));
M = len(brown_text);
# mapping word to index
print ("generating word map...")
V2id = word2map(DATA_HOME + "V.txt");
C2id = word2map(DATA_HOME + "C.txt");
print (V2id);
print (C2id);
# prepare for the calculation of Pr(c) and Pr(c|w)
# use ones to apply laplace smoothing
print ("counting context appearance...");
window_count = np.ones((V_SIZE, C_SIZE));
core_count = np.ones((1, C_SIZE));
for i in range(M):
w = brown_text[i];
if w not in V2id:#has_key(w):
continue;
wid = V2id[w];
for j in range(i - HALF_WINDOW, i + HALF_WINDOW + 1):
if j < 0 or j >= M or j == i:
continue;
c = brown_text[j];
if c not in C2id:
continue;
cid = C2id[c];
window_count[wid][cid] += 1;
core_count[0][cid] += 1;
#print (window_count)
#print (core_count)
# calculate Pr(c) and Pr(c|w)
print ("calculating probability...");
pcw, pc = window_count, core_count;
for i in range(len(pcw)):
pcw[i] = pcw[i] / pcw[i].sum();
pc = pc / pc.sum();
# calculate pointwise mutual information
phi = np.zeros((V_SIZE, C_SIZE));
for i in range(V_SIZE):
for j in range(C_SIZE):
phi[i][j] = max(0, log(pcw[i][j] / pc[0][j]));
# save representation matrix to file
print ("saving representation...");
np.save("representation-" + str(C_SIZE) + ".npy", phi);
|
[
"nltk.corpus.brown.words",
"nltk.data.path.append",
"numpy.zeros",
"numpy.ones",
"math.log"
] |
[((416, 448), 'nltk.data.path.append', 'nltk.data.path.append', (['DATA_HOME'], {}), '(DATA_HOME)\n', (437, 448), False, 'import nltk\n'), ((985, 1010), 'numpy.ones', 'np.ones', (['(V_SIZE, C_SIZE)'], {}), '((V_SIZE, C_SIZE))\n', (992, 1010), True, 'import numpy as np\n'), ((1029, 1049), 'numpy.ones', 'np.ones', (['(1, C_SIZE)'], {}), '((1, C_SIZE))\n', (1036, 1049), True, 'import numpy as np\n'), ((1822, 1848), 'numpy.zeros', 'np.zeros', (['(V_SIZE, C_SIZE)'], {}), '((V_SIZE, C_SIZE))\n', (1830, 1848), True, 'import numpy as np\n'), ((593, 606), 'nltk.corpus.brown.words', 'brown.words', ([], {}), '()\n', (604, 606), False, 'from nltk.corpus import brown\n'), ((1941, 1966), 'math.log', 'log', (['(pcw[i][j] / pc[0][j])'], {}), '(pcw[i][j] / pc[0][j])\n', (1944, 1966), False, 'from math import log\n')]
|
# This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 <NAME> <http://sixpinetrees.blogspot.com/>,
# 2013-2014 <NAME> <<EMAIL>>
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""Confront a CoAP over TCP server with a client that speaks so bad protocol it
is easier to mock with sending byte sequences than with aiocoap"""
import asyncio
import unittest
import aiocoap
from .test_server import WithTestServer, precise_warnings, no_warnings, asynctest
from .common import tcp_disabled
@unittest.skipIf(tcp_disabled, "TCP disabled in environment")
class TestNoncoapTCPClient(WithTestServer):
def setUp(self):
super().setUp()
self.mock_r, self.mock_w = self.loop.run_until_complete(
asyncio.open_connection(
self.serveraddress,
aiocoap.COAP_PORT))
def tearDown(self):
self.mock_w.close()
super().tearDown()
@staticmethod
def _read_as_messages(encoded: bytes):
"""Process the encoded data into CoAP-over-TCP messages, return them as
a list and trailing (unrecognized / incomplete) data."""
messages = []
while True:
size = aiocoap.transports.tcp._extract_message_size(encoded)
if size is not None:
size = sum(size)
if size is None or size > len(encoded):
return messages, encoded
messages.append(aiocoap.transports.tcp._decode_message(encoded[:size]))
encoded = encoded[size:]
async def should_abort_early(self, request: bytes):
"""Send request bytes, expect that the server closes the connection
after having sent possibly a CSM and an abort"""
self.mock_w.write(request)
r = await self.mock_r.read() # timing out would be a typical failure case here too
parsed, trail = self._read_as_messages(r)
self.assertEqual(trail, b"", "Leftover data after closing message")
if parsed[0].code == aiocoap.CSM:
# don't discard the CSM unconditionallly: the server might have
# read the request data before sending its own initial CSM.
parsed.pop(0)
self.assertEqual(len(parsed), 1, "Not exactly one (presumably abort) message received")
self.assertEqual(parsed[0].code, aiocoap.ABORT, "Received message is not an abort message")
async def should_idle(self, request: bytes, timeout=0.1):
"""Send request bytes, expect that the server sends CSM and does not
close the connection, awaiting more from the client.
Returns all messages received until the timeout."""
self.mock_w.write(request)
triggered_eof = False
async def kill_read():
"""After a timeout, synthesize an end-of-file condition into the
reader, hoping this doesn't beak too much."""
nonlocal triggered_eof
await asyncio.sleep(timeout)
triggered_eof = True
self.mock_r.feed_eof()
self.loop.create_task(kill_read())
r = await self.mock_r.read() # timing out would be a typical failure case here too
self.assertEqual(triggered_eof, True, "Server closed connection prematurely")
parsed, trail = self._read_as_messages(r)
# if this happens, the server is either sending garbage (announcing
# something long and not following up), or the timeout should be
# increased
self.assertEqual(trail, b"", "Leftover data after reading timeout")
if parsed[0].code == aiocoap.CSM:
# don't discard the CSM unconditionallly: the server might have
# read the request data before sending its own initial CSM.
parsed.pop(0)
return parsed
async def should_idle_quietly(self, request: bytes, timeout=0.1):
"""should_idle, but assert that no messages were returned"""
messages = await self.should_idle(request, timeout)
# it's not a per-spec wrong thing to do, but highly unusual
self.assertEqual(messages, [], "Server sent messages on its own")
@precise_warnings(["Aborting connection: Failed to parse message"])
@asynctest
async def test_http_get(self):
await self.should_abort_early(b'GET /.well-known/core HTTP/1.0')
@precise_warnings(["Aborting connection: No CSM received"])
@asynctest
async def test_early_get(self):
await self.should_abort_early(b'\0\x01')
@no_warnings
@asynctest
async def test_incomplete_small(self):
await self.should_idle_quietly(b'\0')
@no_warnings
@asynctest
async def test_incomplete_large1(self):
# announcing but not sending 1 bytes extlen
await self.should_idle_quietly(b'\xd0')
@no_warnings
@asynctest
async def test_incomplete_large2(self):
# sending one out of four bytes extlen
# a server could in theory reject this on grounds of "no matter what
# you say next, my buffer ain't large enough"
await self.should_idle_quietly(b'\xf0\0')
@no_warnings
@asynctest
async def test_incomplete_large3(self):
# announcing a 269 byte long message, but not even sendin the code
await self.should_idle_quietly(b'\xe0\0\0')
@precise_warnings(['Aborting connection: Overly large message announced'])
@asynctest
async def test_incomplete_large4(self):
# announcing the longest possible message, this should excede
# everyone's max-message-size.
#
# blocking to read more would be acceptable behavior as well.
await self.should_abort_early(b'\xf0\xff\xff\xff\xff')
@precise_warnings(['Aborting connection: Failed to parse message'])
@asynctest
async def test_wrong_tkl(self):
# send an unspecified token length of 15.
# the rest of the message is an empty CSM, so if the server were to
# extrapolate from the meaning of tkl 0..8, it'd read it as OK.
await self.should_abort_early(b'\x0fxxxxxxxxxxxxxxx\xe1')
# Fun inside the CSM
@no_warnings
@asynctest
async def test_exotic_elective_csm_option(self):
# send option number something-even (something-odd plus 269) as an empty option
await self.should_idle_quietly(b'\x30\xe1\xe0\xf1\xf1')
@precise_warnings(['Aborting connection: Option not supported'])
@asynctest
async def test_exotic_compulsory_csm_option(self):
# send option number something-odd (something-even plus 269) as an empty option
await self.should_abort_early(b'\x30\xe1\xe0\xf2\xf2')
@precise_warnings(['Aborting connection: Option not supported'])
@asynctest
async def test_exotic_compulsory_csm_option_late(self):
# send an empty CSM, and after that the one from compulsory_csm_option
await self.should_abort_early(b'\0\xe1\x30\xe1\xe0\xf2\xf2')
|
[
"unittest.skipIf",
"aiocoap.transports.tcp._decode_message",
"asyncio.sleep",
"aiocoap.transports.tcp._extract_message_size",
"asyncio.open_connection"
] |
[((611, 671), 'unittest.skipIf', 'unittest.skipIf', (['tcp_disabled', '"""TCP disabled in environment"""'], {}), "(tcp_disabled, 'TCP disabled in environment')\n", (626, 671), False, 'import unittest\n'), ((843, 905), 'asyncio.open_connection', 'asyncio.open_connection', (['self.serveraddress', 'aiocoap.COAP_PORT'], {}), '(self.serveraddress, aiocoap.COAP_PORT)\n', (866, 905), False, 'import asyncio\n'), ((1297, 1350), 'aiocoap.transports.tcp._extract_message_size', 'aiocoap.transports.tcp._extract_message_size', (['encoded'], {}), '(encoded)\n', (1341, 1350), False, 'import aiocoap\n'), ((1539, 1593), 'aiocoap.transports.tcp._decode_message', 'aiocoap.transports.tcp._decode_message', (['encoded[:size]'], {}), '(encoded[:size])\n', (1577, 1593), False, 'import aiocoap\n'), ((3032, 3054), 'asyncio.sleep', 'asyncio.sleep', (['timeout'], {}), '(timeout)\n', (3045, 3054), False, 'import asyncio\n')]
|
#!/usr/bin/python3
# <NAME>
# audio to speech using google speech api
# 11/7/19
# Mac speech_recognition library installation
# pip3 install SpeechRecognition
# brew install portaudio
# pip3 install pyaudio
# pip3 install pydub
# Testing speech_recognization
# python3 -m speech_recognition
#Program usage
#usage: python3 ./audio2text.py audio.wav
#import library
import speech_recognition as sr
import sys
import os
from pydub import AudioSegment
from pydub.silence import split_on_silence
from textblob import TextBlob
# a function that splits the audio file into chunks
# and applies speech recognition
def silence_based_conversion(path):
# open the audio file stored in
# the local system as a wav file.
song = AudioSegment.from_wav(path)
# open a file where we will concatenate
# and store the recognized text
fh = open("output.txt", "w+")
# split track where silence is 0.5 seconds
# or more and get chunks
chunks = split_on_silence(
song,
# must be silent for at least 0.5 seconds
# or 500 ms. adjust this value based on user
# requirement. if the speaker stays silent for
# longer, increase this value. else, decrease it.
min_silence_len = 400,
# consider it silent if quieter than -16 dBFS
# adjust this per requirement
silence_thresh = -16
)
# create a directory to store the audio chunks.
try:
os.mkdir('audio_chunks')
except(FileExistsError):
pass
# move into the directory to
# store the audio files.
os.chdir('audio_chunks')
i = 0
# process each chunk
for chunk in chunks:
# export audio chunk and save it in
# the current directory.
# print("saving chunk{0}.wav".format(i))
chunk.export("chunk{0}.wav".format(i), format ="wav")
# the name of the newly created chunk
file = 'chunk'+str(i)+'.wav'
# print("Processing chunk "+str(i))
# create a speech recognition object
r = sr.Recognizer()
# recognize the chunk
with sr.AudioFile(file) as source:
file = r.record(source)
try:
# try converting it to text
rec = r.recognize_google(file)
# write the output to the file.
fh.write(rec+". ")
# catch any errors.
except sr.UnknownValueError:
print("Could not understand audio")
except sr.RequestError as e:
print("Could not request results. check your internet connection")
i += 1
os.chdir('..')
os.system('rm -rf audio_chunks/')
def textAnalysis(filename = 'output.txt'):
url = filename
file= open(url)
t = file.read()
bobo = TextBlob(t)
score = []
score.append(bobo.sentiment[0])
score.append(bobo.sentiment[1])
result = score[0] * 5 + score[1] * 5
print("The Response: ")
log = open("output.txt", "r")
for line in log:
print(line)
print("\n\nThe essay score out of 10: ")
print(result)
# the main driver program
def main():
silence_based_conversion(sys.argv[1])
textAnalysis()
if __name__ == '__main__':
main()
|
[
"os.mkdir",
"os.system",
"pydub.AudioSegment.from_wav",
"textblob.TextBlob",
"pydub.silence.split_on_silence",
"speech_recognition.AudioFile",
"os.chdir",
"speech_recognition.Recognizer"
] |
[((744, 771), 'pydub.AudioSegment.from_wav', 'AudioSegment.from_wav', (['path'], {}), '(path)\n', (765, 771), False, 'from pydub import AudioSegment\n'), ((998, 1061), 'pydub.silence.split_on_silence', 'split_on_silence', (['song'], {'min_silence_len': '(400)', 'silence_thresh': '(-16)'}), '(song, min_silence_len=400, silence_thresh=-16)\n', (1014, 1061), False, 'from pydub.silence import split_on_silence\n'), ((1627, 1651), 'os.chdir', 'os.chdir', (['"""audio_chunks"""'], {}), "('audio_chunks')\n", (1635, 1651), False, 'import os\n'), ((2688, 2702), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (2696, 2702), False, 'import os\n'), ((2707, 2740), 'os.system', 'os.system', (['"""rm -rf audio_chunks/"""'], {}), "('rm -rf audio_chunks/')\n", (2716, 2740), False, 'import os\n'), ((2860, 2871), 'textblob.TextBlob', 'TextBlob', (['t'], {}), '(t)\n', (2868, 2871), False, 'from textblob import TextBlob\n'), ((1487, 1511), 'os.mkdir', 'os.mkdir', (['"""audio_chunks"""'], {}), "('audio_chunks')\n", (1495, 1511), False, 'import os\n'), ((2113, 2128), 'speech_recognition.Recognizer', 'sr.Recognizer', ([], {}), '()\n', (2126, 2128), True, 'import speech_recognition as sr\n'), ((2177, 2195), 'speech_recognition.AudioFile', 'sr.AudioFile', (['file'], {}), '(file)\n', (2189, 2195), True, 'import speech_recognition as sr\n')]
|
#!/usr/bin/env python3
#
# Copyright (c) 2020 JinTian.
#
# This file is part of alfred
# (see http://jinfagang.github.io).
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""
main entrance of Alfred
"""
import os
import sys
import argparse
from colorama import Fore, Back, Style
import traceback
from .modules.vision.video_extractor import VideoExtractor
from .modules.scrap.image_scraper import ImageScraper
from .modules.vision.to_video import VideoCombiner
from .modules.vision.video_reducer import VideoReducer
from .modules.data.view_voc import vis_voc
from .modules.data.view_coco import vis_coco
from .modules.data.view_txt import vis_det_txt
from .modules.data.gather_voclabels import gather_labels
from .modules.data.voc2coco import convert
from .modules.data.eval_voc import eval_voc
from .modules.cabinet.count_file import count_file
from .modules.cabinet.split_txt import split_txt_file
from .modules.cabinet.license import apply_license
from .modules.cabinet.stack_imgs import stack_imgs
from alfred.utils.log import logger as logging
__VERSION__ = '2.7.1'
__AUTHOR__ = '<NAME>'
__DATE__ = '20202.10.01, since 2019.11.11'
__LOC__ = 'Shenzhen, China'
__git__ = 'http://github.com/jinfagang/alfred'
def arg_parse():
"""
parse arguments
:return:
"""
parser = argparse.ArgumentParser(prog="alfred")
parser.add_argument('--version', '-v', action="store_true", help='show version info.')
# vision, text, scrap
main_sub_parser = parser.add_subparsers()
# =============== vision part ================
vision_parser = main_sub_parser.add_parser('vision', help='vision related commands.')
vision_sub_parser = vision_parser.add_subparsers()
vision_extract_parser = vision_sub_parser.add_parser('extract', help='extract image from video: alfred vision '
'extract -v tt.mp4')
vision_extract_parser.set_defaults(which='vision-extract')
vision_extract_parser.add_argument('--video', '-v', help='video to extract')
vision_extract_parser.add_argument('--jumps', '-j', help='jump frames for wide extract')
vision_reduce_parser = vision_sub_parser.add_parser('reduce', help='reduce video by drop frames'
'\nalfred vision reduce -v a.mp4 -j 10')
vision_reduce_parser.set_defaults(which='vision-reduce')
vision_reduce_parser.add_argument('--video', '-v', help='video to extract')
vision_reduce_parser.add_argument('--jumps', '-j', help='jump frames for wide extract')
vision_2video_parser = vision_sub_parser.add_parser('2video', help='combine into a video: alfred vision '
'2video -d ./images')
vision_2video_parser.set_defaults(which='vision-2video')
vision_2video_parser.add_argument('--dir', '-d', help='dir contains image sequences.')
vision_clean_parser = vision_sub_parser.add_parser('clean', help='clean images in a dir.')
vision_clean_parser.set_defaults(which='vision-clean')
vision_clean_parser.add_argument('--dir', '-d', help='dir contains images.')
vision_getface_parser = vision_sub_parser.add_parser('getface', help='get all faces inside an image and save it.')
vision_getface_parser.set_defaults(which='vision-getface')
vision_getface_parser.add_argument('--dir', '-d', help='dir contains images to extract faces.')
# =============== text part ================
text_parser = main_sub_parser.add_parser('text', help='text related commands.')
text_sub_parser = text_parser.add_subparsers()
text_clean_parser = text_sub_parser.add_parser('clean', help='clean text.')
text_clean_parser.set_defaults(which='text-clean')
text_clean_parser.add_argument('--file', '-f', help='file to clean')
text_translate_parser = text_sub_parser.add_parser('translate', help='translate')
text_translate_parser.set_defaults(which='text-translate')
text_translate_parser.add_argument('--file', '-f', help='translate a words to target language')
# =============== scrap part ================
scrap_parser = main_sub_parser.add_parser('scrap', help='scrap related commands.')
scrap_sub_parser = scrap_parser.add_subparsers()
scrap_image_parser = scrap_sub_parser.add_parser('image', help='scrap images.')
scrap_image_parser.set_defaults(which='scrap-image')
scrap_image_parser.add_argument('--query', '-q', help='query words.')
# =============== cabinet part ================
cabinet_parser = main_sub_parser.add_parser('cab', help='cabinet related commands.')
cabinet_sub_parser = cabinet_parser.add_subparsers()
count_file_parser = cabinet_sub_parser.add_parser('count', help='scrap images.')
count_file_parser.set_defaults(which='cab-count')
count_file_parser.add_argument('--dir', '-d', default='./', help='dir to count.')
count_file_parser.add_argument('--type', '-t', help='dir to count.')
split_txt_parser = cabinet_sub_parser.add_parser('split', help='split txt file.')
split_txt_parser.set_defaults(which='cab-split')
split_txt_parser.add_argument('--file', '-f', required=True, help='file to split.')
split_txt_parser.add_argument('--ratios', '-r', help='ratios.')
split_txt_parser.add_argument('--names', '-n', help='names.')
stackimgs_parser = cabinet_sub_parser.add_parser('stackimgs', help='stack images into one')
stackimgs_parser.set_defaults(which='cab-stackimgs')
stackimgs_parser.add_argument('--imgs', '-i', required=True, nargs='+', help='images list.')
stackimgs_parser.add_argument('--dim', '-d', help='dims like 2x3.')
apply_license_parser = cabinet_sub_parser.add_parser('license', help='automatically add/update license.')
apply_license_parser.set_defaults(which='cab-license')
apply_license_parser.add_argument('--owner', '-o', required=True, help='owner of license.')
apply_license_parser.add_argument('--name', '-n', help='project name.')
apply_license_parser.add_argument('--year', '-y', help='project year: 2016-2020.')
apply_license_parser.add_argument('--url', '-u', default='manaai.cn', help='your website url.')
apply_license_parser.add_argument('--dir', '-d', default='./', help='to apply license dir.')
apply_license_parser.add_argument('--except', '-e', help='except extensions: xml,cc,h')
# =============== data part ================
data_parser = main_sub_parser.add_parser('data', help='data related commands.')
data_sub_parser = data_parser.add_subparsers()
view_voc_parser = data_sub_parser.add_parser('vocview', help='view voc.')
view_voc_parser.set_defaults(which='data-vocview')
view_voc_parser.add_argument('--image_dir', '-i', help='Root path of VOC image.')
view_voc_parser.add_argument('--label_dir', '-l', help='Root path of VOC label.')
view_txt_parser = data_sub_parser.add_parser('txtview', help='view voc.')
view_txt_parser.set_defaults(which='data-txtview')
view_txt_parser.add_argument('--image_dir', '-i', help='Root path of VOC image.')
view_txt_parser.add_argument('--label_dir', '-l', help='Root path of VOC label.')
view_coco_parser = data_sub_parser.add_parser('cocoview', help='view voc.')
view_coco_parser.set_defaults(which='data-cocoview')
view_coco_parser.add_argument('--image_dir', '-i', help='Root path of COCO images.')
view_coco_parser.add_argument('--json', '-j', help='Root path of COCO annotations.json .')
voc_label_parser = data_sub_parser.add_parser('voclabel', help='gather labels from annotations dir.')
voc_label_parser.set_defaults(which='data-voclabel')
voc_label_parser.add_argument('--anno_dir', '-d', help='dir to annotations.')
split_voc_parser = data_sub_parser.add_parser('splitvoc', help='split VOC to train and val.')
split_voc_parser.set_defaults(which='data-splitvoc')
split_voc_parser.add_argument('--image_dir', '-i', help='Root path of VOC image.')
split_voc_parser.add_argument('--label_dir', '-l', help='Root path of VOC label.')
labelone2voc_parser = data_sub_parser.add_parser('labelone2voc', help='convert labelone to VOC.')
labelone2voc_parser.set_defaults(which='data-labelone2voc')
labelone2voc_parser.add_argument('--json_dir', '-j', help='Root of labelone json dir.')
voc2coco_parser = data_sub_parser.add_parser('voc2coco', help='convert VOC to coco.')
voc2coco_parser.set_defaults(which='data-voc2coco')
voc2coco_parser.add_argument('--xml_dir', '-d', help='Root of xmls dir (Annotations/).')
evalvoc_parser = data_sub_parser.add_parser('evalvoc', help='evaluation on VOC.')
evalvoc_parser.set_defaults(which='data-evalvoc')
evalvoc_parser.add_argument('-g', '--gt_dir', type=str, required=True, help="Ground truth path (can be xml dir or txt dir, coco json will support soon)")
evalvoc_parser.add_argument('-d', '--det_dir', type=str, required=True, help="Detection result (should saved into txt format)")
evalvoc_parser.add_argument('-im', '--images_dir', type=str, default='images', help="Raw images dir for animation.")
evalvoc_parser.add_argument('-na', '--no-animation', help="no animation is shown.", action="store_true")
evalvoc_parser.add_argument('-np', '--no-plot', help="no plot is shown.", action="store_true")
evalvoc_parser.add_argument('-q', '--quiet', help="minimalistic console output.", action="store_true")
evalvoc_parser.add_argument('--min_overlap', type=float, default=0.5, help="min overlap, default is 0.5")
evalvoc_parser.add_argument('-i', '--ignore', nargs='+', type=str, help="ignore a list of classes.")
evalvoc_parser.add_argument('--set-class-iou', nargs='+', type=str, help="set IoU for a specific class.")
return parser.parse_args()
def print_welcome_msg():
print(Fore.BLUE + Style.BRIGHT + 'Alfred ' + Style.RESET_ALL +
Fore.WHITE + '- Valet of Artificial Intelligence.' + Style.RESET_ALL)
print('Author: ' + Fore.RED + Style.BRIGHT + __AUTHOR__ + Style.RESET_ALL)
print('At : ' + Fore.RED + Style.BRIGHT + __DATE__ + Style.RESET_ALL)
print('Loc : ' + Fore.RED + Style.BRIGHT + __LOC__ + Style.RESET_ALL)
print('Star : ' + Fore.RED + Style.BRIGHT + __git__ + Style.RESET_ALL)
print('Ver. : ' + Fore.RED + Style.BRIGHT + __VERSION__ + Style.RESET_ALL)
def main(args=None):
args = arg_parse()
if args.version:
print(print_welcome_msg())
exit(0)
else:
args_dict = vars(args)
print_welcome_msg()
try:
module = args_dict['which'].split('-')[0]
action = args_dict['which'].split('-')[1]
print(Fore.GREEN + Style.BRIGHT)
print('=> Module: ' + Fore.WHITE + Style.BRIGHT + module + Fore.GREEN + Style.BRIGHT)
print('=> Action: ' + Fore.WHITE + Style.BRIGHT + action)
if module == 'vision':
if action == 'extract':
v_f = args_dict['video']
j = args_dict['jumps']
print(Fore.BLUE + Style.BRIGHT + 'Extracting from {}'.format(v_f))
video_extractor = VideoExtractor(jump_frames=j)
video_extractor.extract(v_f)
elif action == 'reduce':
v_f = args_dict['video']
j = args_dict['jumps']
print(Fore.BLUE + Style.BRIGHT + 'Reduce from {}, jumps: {}'.format(v_f, j))
video_reducer = VideoReducer(jump_frames=j)
video_reducer.act(v_f)
elif action == '2video':
d = args_dict['dir']
combiner = VideoCombiner(img_dir=d)
print(Fore.BLUE + Style.BRIGHT + 'Combine video from {}'.format(d))
print(Fore.BLUE + Style.BRIGHT + 'What the hell.. {}'.format(d))
combiner.combine()
elif action == 'clean':
d = args_dict['dir']
print(Fore.BLUE + Style.BRIGHT + 'Cleaning from {}'.format(d))
elif action == 'getface':
try:
from .modules.vision.face_extractor import FaceExtractor
import dlib
d = args_dict['dir']
print(Fore.BLUE + Style.BRIGHT + 'Extract faces from {}'.format(d))
face_extractor = FaceExtractor()
face_extractor.get_faces(d)
except ImportError:
print('This action needs to install dlib first. http://dlib.net')
elif module == 'text':
if action == 'clean':
f = args_dict['file']
print(Fore.BLUE + Style.BRIGHT + 'Cleaning from {}'.format(f))
elif action == 'translate':
f = args.v
print(Fore.BLUE + Style.BRIGHT + 'Translate from {}'.format(f))
elif module == 'scrap':
if action == 'image':
q = args_dict['query']
q_list = q.split(',')
q_list = [i.replace(' ', '') for i in q_list]
image_scraper = ImageScraper()
image_scraper.scrap(q_list)
elif module == 'cab':
if action == 'count':
d = args_dict['dir']
t = args_dict['type']
logging.info('dir: {}, types: {}'.format(d, t))
count_file(d, t)
elif action == 'split':
f = args_dict['file']
r = args_dict['ratios']
n = args_dict['names']
logging.info('files: {}, ratios: {}, names: {}'.format(f, r, n))
split_txt_file(f, r, n)
elif action == 'stackimgs':
f = args_dict['imgs']
r = args_dict['dim']
logging.info('files: {}, dim: {}'.format(f, r))
stack_imgs(f, r)
elif action == 'license':
owner = args_dict['owner']
project_name = args_dict['name']
year = args_dict['year']
url = args_dict['url']
d = args_dict['dir']
apply_license(owner, project_name, year, url, d)
elif module == 'data':
if action == 'vocview':
image_dir = args_dict['image_dir']
label_dir = args_dict['label_dir']
vis_voc(img_root=image_dir, label_root=label_dir)
elif action == 'cocoview':
img_d = args_dict['image_dir']
json_f = args_dict['json']
vis_coco(img_d, json_f)
elif action == 'txtview':
image_dir = args_dict['image_dir']
label_dir = args_dict['label_dir']
vis_det_txt(img_root=image_dir, label_root=label_dir)
elif action == 'voclabel':
anno_dir = args_dict['anno_dir']
gather_labels(anno_dir)
elif action == 'splitvoc':
logging.info('split VOC to train and val not implement yet.')
pass
elif action == 'labelone2voc':
logging.info('labelone2voc not implement yet.')
pass
elif action == 'voc2coco':
logging.info('start convert VOC to coco... Annotations root: {}'.format(args_dict['xml_dir']))
convert(args_dict['xml_dir'])
elif action == 'evalvoc':
logging.info('start eval on VOC dataset..')
eval_voc(args)
except Exception as e:
traceback.print_exc()
print(Fore.RED, 'parse args error, type -h to see help. msg: {}'.format(e))
if __name__ == '__main__':
main()
|
[
"alfred.utils.log.logger.info",
"traceback.print_exc",
"argparse.ArgumentParser"
] |
[((2037, 2075), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""alfred"""'}), "(prog='alfred')\n", (2060, 2075), False, 'import argparse\n'), ((16740, 16761), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (16759, 16761), False, 'import traceback\n'), ((16120, 16181), 'alfred.utils.log.logger.info', 'logging.info', (['"""split VOC to train and val not implement yet."""'], {}), "('split VOC to train and val not implement yet.')\n", (16132, 16181), True, 'from alfred.utils.log import logger as logging\n'), ((16274, 16321), 'alfred.utils.log.logger.info', 'logging.info', (['"""labelone2voc not implement yet."""'], {}), "('labelone2voc not implement yet.')\n", (16286, 16321), True, 'from alfred.utils.log import logger as logging\n'), ((16617, 16660), 'alfred.utils.log.logger.info', 'logging.info', (['"""start eval on VOC dataset.."""'], {}), "('start eval on VOC dataset..')\n", (16629, 16660), True, 'from alfred.utils.log import logger as logging\n')]
|
# Generated by Django 2.2.20 on 2021-05-21 15:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('waldur_azure', '0018_drop_spl'),
]
operations = [
migrations.AlterModelOptions(
name='image', options={'ordering': ['publisher', 'offer', 'name', 'sku']},
),
migrations.AddField(
model_name='image',
name='offer',
field=models.CharField(default='offer', max_length=255),
preserve_default=False,
),
]
|
[
"django.db.models.CharField",
"django.db.migrations.AlterModelOptions"
] |
[((231, 339), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""image"""', 'options': "{'ordering': ['publisher', 'offer', 'name', 'sku']}"}), "(name='image', options={'ordering': [\n 'publisher', 'offer', 'name', 'sku']})\n", (259, 339), False, 'from django.db import migrations, models\n'), ((464, 513), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""offer"""', 'max_length': '(255)'}), "(default='offer', max_length=255)\n", (480, 513), False, 'from django.db import migrations, models\n')]
|
import os
import csv
import collections
import numpy as np
class StatsTracker(collections.defaultdict):
"""Keep track of mean values"""
def __init__(self):
super().__init__(float)
self.step = 1
def update(self, data):
for key, val in data.items():
if key.endswith('_min'):
val = np.min(val)
self[key] = min(self.get(key, val), val)
elif key.endswith('_max'):
val = np.max(val)
self[key] = max(self.get(key, val), val)
else:
val = np.mean(val)
self[key] += (val - self[key]) / self.step
self.step += 1
class CSVWriter:
"""CSV Writer"""
def __init__(self, fields, fileobj):
self.fileobj = fileobj
self.writer = csv.DictWriter(fileobj, fieldnames=fields)
self.writer.writeheader()
def write(self, **kwargs):
self.writer.writerow(kwargs)
self.fileobj.flush()
def ensure_dir(filepath):
dirpath = os.path.dirname(filepath)
os.makedirs(dirpath, exist_ok=True)
|
[
"os.makedirs",
"os.path.dirname",
"numpy.max",
"numpy.min",
"numpy.mean",
"csv.DictWriter"
] |
[((1032, 1057), 'os.path.dirname', 'os.path.dirname', (['filepath'], {}), '(filepath)\n', (1047, 1057), False, 'import os\n'), ((1062, 1097), 'os.makedirs', 'os.makedirs', (['dirpath'], {'exist_ok': '(True)'}), '(dirpath, exist_ok=True)\n', (1073, 1097), False, 'import os\n'), ((815, 857), 'csv.DictWriter', 'csv.DictWriter', (['fileobj'], {'fieldnames': 'fields'}), '(fileobj, fieldnames=fields)\n', (829, 857), False, 'import csv\n'), ((347, 358), 'numpy.min', 'np.min', (['val'], {}), '(val)\n', (353, 358), True, 'import numpy as np\n'), ((477, 488), 'numpy.max', 'np.max', (['val'], {}), '(val)\n', (483, 488), True, 'import numpy as np\n'), ((586, 598), 'numpy.mean', 'np.mean', (['val'], {}), '(val)\n', (593, 598), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
"""
Daemon to watch over Zabbix
"""
from pyzabbix import ZabbixAPI
from aiohttp import web
from os import getenv
import logging
zabbix_srv = 'https://zabbix.company.ru'
zabbix_user = getenv('secret_zabbix_user')
zabbix_pass = getenv('secret_zabbix_pass')
zabbix_groups = ['Production']
def get_all_versions():
zapi = ZabbixAPI(zabbix_srv)
zapi.login(zabbix_user, zabbix_pass)
logging.info('Connected to Zabbix API Version %s' % zapi.api_version())
listed = list()
'Get groups IDs'
groups = zapi.hostgroup.get(output=['itemid', 'name'])
for group in groups:
if group['name'] in zabbix_groups:
listed.append(group['groupid'])
'Search query'
query = {'key_': 'service_ping[*,service,version]'}
'Get all items'
items = zapi.item.get(
groupids=listed,
search=query,
searchWildcardsEnabled=True,
output=['name', 'lastvalue'])
output = dict()
for item in items:
'Skip zero values'
if item['lastvalue'] != '0':
app = item['name'].split('"')[1]
ver = item['lastvalue']
'Create app dict for the first time'
if app not in output:
output[app] = dict()
'Create ver dict for the first time'
if ver not in output[app]:
output[app][ver] = int()
output[app][ver] += 1
return output
def get_current_versions(data):
output = dict()
output['multi'] = dict()
output['most'] = dict()
for app in data:
'Make it simple if there is only one version'
if len(data[app]) == 1:
output['most'][app] = next(iter(data[app]))
else:
multi = sorted(data[app], key=data[app].get, reverse=True)
output['most'][app] = next(iter(multi))
'Multi-version list'
output['multi'][app] = multi
return output
async def get_it(request):
"""
Get data from Zabbix
:param request: parameters
:type request: aiohttp.web_request.Request
:return: information about versions in json
:rtype: aiohttp.json_response.Response
"""
app = request.match_info.get('data', None)
logging.info('incoming: %s' % app)
data = get_all_versions()
output = get_current_versions(data)
if app:
if app in output['most']:
version = output['most'][app]
else:
version = 'N/A'
logging.error('app not found: %s' % app)
output = {'version': version}
logging.info('output: %s' % output)
return web.json_response(output)
if __name__ == "__main__":
'Setup logging'
logging.basicConfig(format='xerxes_overwatch - %(levelname)s - %(message)s', level=logging.WARNING)
app = web.Application()
app.add_routes([
web.get('/{data}', get_it),
web.get('/', get_it)])
web.run_app(app, port=8080)
|
[
"logging.error",
"logging.basicConfig",
"pyzabbix.ZabbixAPI",
"aiohttp.web.Application",
"aiohttp.web.json_response",
"logging.info",
"aiohttp.web.get",
"aiohttp.web.run_app",
"os.getenv"
] |
[((209, 237), 'os.getenv', 'getenv', (['"""secret_zabbix_user"""'], {}), "('secret_zabbix_user')\n", (215, 237), False, 'from os import getenv\n'), ((252, 280), 'os.getenv', 'getenv', (['"""secret_zabbix_pass"""'], {}), "('secret_zabbix_pass')\n", (258, 280), False, 'from os import getenv\n'), ((349, 370), 'pyzabbix.ZabbixAPI', 'ZabbixAPI', (['zabbix_srv'], {}), '(zabbix_srv)\n', (358, 370), False, 'from pyzabbix import ZabbixAPI\n'), ((2237, 2271), 'logging.info', 'logging.info', (["('incoming: %s' % app)"], {}), "('incoming: %s' % app)\n", (2249, 2271), False, 'import logging\n'), ((2572, 2607), 'logging.info', 'logging.info', (["('output: %s' % output)"], {}), "('output: %s' % output)\n", (2584, 2607), False, 'import logging\n'), ((2619, 2644), 'aiohttp.web.json_response', 'web.json_response', (['output'], {}), '(output)\n', (2636, 2644), False, 'from aiohttp import web\n'), ((2698, 2801), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""xerxes_overwatch - %(levelname)s - %(message)s"""', 'level': 'logging.WARNING'}), "(format='xerxes_overwatch - %(levelname)s - %(message)s',\n level=logging.WARNING)\n", (2717, 2801), False, 'import logging\n'), ((2809, 2826), 'aiohttp.web.Application', 'web.Application', ([], {}), '()\n', (2824, 2826), False, 'from aiohttp import web\n'), ((2920, 2947), 'aiohttp.web.run_app', 'web.run_app', (['app'], {'port': '(8080)'}), '(app, port=8080)\n', (2931, 2947), False, 'from aiohttp import web\n'), ((2487, 2527), 'logging.error', 'logging.error', (["('app not found: %s' % app)"], {}), "('app not found: %s' % app)\n", (2500, 2527), False, 'import logging\n'), ((2856, 2882), 'aiohttp.web.get', 'web.get', (['"""/{data}"""', 'get_it'], {}), "('/{data}', get_it)\n", (2863, 2882), False, 'from aiohttp import web\n'), ((2892, 2912), 'aiohttp.web.get', 'web.get', (['"""/"""', 'get_it'], {}), "('/', get_it)\n", (2899, 2912), False, 'from aiohttp import web\n')]
|
#
# ovirt-engine-setup -- ovirt engine setup
# Copyright (C) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Config plugin."""
import gettext
import os
from otopi import plugin, util
from ovirt_engine_setup.engine import constants as oenginecons
def _(m):
return gettext.dgettext(message=m, domain='ovirt-engine-setup')
@util.export
class Plugin(plugin.PluginBase):
"""Config plugin."""
def __init__(self, context):
super(Plugin, self).__init__(context=context)
@plugin.event(
stage=plugin.Stages.STAGE_INIT,
)
def _init(self):
self.environment.setdefault(
oenginecons.ConfigEnv.OVIRT_ENGINE_DB_BACKUP_DIR,
oenginecons.FileLocations.OVIRT_ENGINE_DEFAULT_DB_BACKUP_DIR
)
@plugin.event(
stage=plugin.Stages.STAGE_VALIDATION,
condition=lambda self: self.environment[oenginecons.CoreEnv.ENABLE],
)
def _validation(self):
path = self.environment[
oenginecons.ConfigEnv.OVIRT_ENGINE_DB_BACKUP_DIR
]
if not os.path.exists(path):
raise RuntimeError(
_(
'Backup path {path} not found'
).format(
path=path,
)
)
# vim: expandtab tabstop=4 shiftwidth=4
|
[
"gettext.dgettext",
"os.path.exists",
"otopi.plugin.event"
] |
[((797, 853), 'gettext.dgettext', 'gettext.dgettext', ([], {'message': 'm', 'domain': '"""ovirt-engine-setup"""'}), "(message=m, domain='ovirt-engine-setup')\n", (813, 853), False, 'import gettext\n'), ((1021, 1065), 'otopi.plugin.event', 'plugin.event', ([], {'stage': 'plugin.Stages.STAGE_INIT'}), '(stage=plugin.Stages.STAGE_INIT)\n', (1033, 1065), False, 'from otopi import plugin, util\n'), ((1290, 1413), 'otopi.plugin.event', 'plugin.event', ([], {'stage': 'plugin.Stages.STAGE_VALIDATION', 'condition': '(lambda self: self.environment[oenginecons.CoreEnv.ENABLE])'}), '(stage=plugin.Stages.STAGE_VALIDATION, condition=lambda self:\n self.environment[oenginecons.CoreEnv.ENABLE])\n', (1302, 1413), False, 'from otopi import plugin, util\n'), ((1579, 1599), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (1593, 1599), False, 'import os\n')]
|
"""
Test management commands
"""
from io import StringIO
from sga.management.commands.createmockdata import CreateMockDataCommand
from sga.tests.common import SGATestCase
class ManagementTest(SGATestCase):
"""
Class for management tests
"""
def test_create_mock_data(self):
"""
Test create_mock_data command
"""
out = StringIO()
command = CreateMockDataCommand()
command.execute(stdout=out)
self.assertIn("Successfully created mock data.", out.getvalue())
|
[
"io.StringIO",
"sga.management.commands.createmockdata.CreateMockDataCommand"
] |
[((369, 379), 'io.StringIO', 'StringIO', ([], {}), '()\n', (377, 379), False, 'from io import StringIO\n'), ((398, 421), 'sga.management.commands.createmockdata.CreateMockDataCommand', 'CreateMockDataCommand', ([], {}), '()\n', (419, 421), False, 'from sga.management.commands.createmockdata import CreateMockDataCommand\n')]
|
#Name: Blackjack
#Version: v.010
#Authour: dp
#Date: Aug2019
import sys
import random
import time
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
class Card(object):
def __init__(self, value, suit):
self.value = value
self.suit = suit
def __str__(self):
return '%s of %s' % (self.value, self.suit)
class Deck(object):
def __init__(self):
self.cards = []
self.build()
def build(self):
suits = ["spade","club","heart","diamond"]
faces = [2,3,4,5,6,7,8,9,10,"jack","queen","king","ace"]
for suit in suits:
for face in faces:
card=(Card(face, suit))
self.cards.append(card)
self.shuffle()
def add_card(self, card):
self.cards.append(card)
def pop_card(self, i=-1):
return str(self.cards.pop(i))
def move_card(self, hand, num):
for i in range(num):
if deck.cards == []:
self.build()
newcard = self.pop_card()
hand.add_card(newcard)
def __str__(self):
res = []
for card in self.cards:
res.append(str(card))
return '\n'.join(res)
def shuffle(self):
random.shuffle(self.cards)
class Hand(Deck):
def __init__(self, label=''):
self.label = label
self.cards = []
def total(self):
rank_values = {'2':2, '3':3, '4':4, '5':5, '6':6, '7':7, '8':8,'9':9, '1':10, 'j':10, 'q':10, 'k':10, 'a':11}
hand_total = 0
ace_counter = 0
for i in range(len(self.cards)):
cardvalue = self.cards[i][0]
hand_total += rank_values[cardvalue]
if cardvalue == 'a':
ace_counter += 1
if (ace_counter > 0 and hand_total > 21):
hand_total -= 10
ace_counter -= 1
return hand_total
player_hand = Hand()
dealer_hand = Hand()
deck = Deck()
global info_i
info_i = 0
def update_info_gui(text):
global info_i
top.info_listbox.insert(info_i,text)
info_i += 1
def update_player_gui():
top.player_listbox.delete(0,100)
top.player_hand_total_lbl.configure(text=player_hand.total())
for i in range(len(player_hand.cards)):
top.player_listbox.insert(i,player_hand.cards[i])
def update_dealer_gui(show):
top.dealer_listbox.delete(0,100)
if show == 1:
top.dealer_hand_total_lbl.configure(text='-')
top.dealer_listbox.insert(0,'Hidden')
top.dealer_listbox.insert(1,dealer_hand.cards[1])
if show == 2:
top.dealer_hand_total_lbl.configure(text=dealer_hand.total())
for i in range(len(dealer_hand.cards)):
top.dealer_listbox.insert(i,dealer_hand.cards[i])
def deal_button_action():
top.info_listbox.delete(0,100)
#top.player_listbox2.place_forget()
info_i = 0
update_info_gui('Dealer Deals a new hand.')
player_hand.cards = []
dealer_hand.cards = []
deck.move_card(player_hand, 1)
deck.move_card(dealer_hand, 1)
deck.move_card(player_hand, 1)
deck.move_card(dealer_hand, 1)
update_player_gui()
time.sleep(.3)
update_dealer_gui(1)
if player_hand.total() == 21:
update_info_gui('BLACKJACK!!!')
if dealer_hand.cards[1][0] == 'a':
pass
#print("Dealer is showing an Ace")
#print("but we are not betting so it does not matter.")
#does player have doubles to split.
#double down - take one card and stay.
sys.stdout.flush()
def hit_button_action():
update_info_gui('---Player Hits---')
deck.move_card(player_hand, 1)
update_player_gui()
if player_hand.total() > 21:
update_info_gui("BUST!")
sys.stdout.flush()
def stand_button_action():
while dealer_hand.total() < 17:
update_info_gui("---Dealer Hits---")
deck.move_card(dealer_hand, 1)
update_dealer_gui(2)
if dealer_hand.total() > 21:
update_info_gui("Dealer BUST!")
update_dealer_gui(2)
if (player_hand.total() > dealer_hand.total() or dealer_hand.total() > 21):
update_info_gui("Player Wins!")
elif player_hand.total() == dealer_hand.total():
update_info_gui("Push!")
else:
update_info_gui("Dealer Wins!")
sys.stdout.flush()
def split_button_action():
pass
#top.player_listbox2 = tk.Listbox(top)
top.player_listbox2.place(relx=0.400, rely=0.549, relheight=0.324
, relwidth=0.352)
top.player_listbox2.configure(background="white")
top.player_listbox2.configure(font="TkFixedFont")
top.player_listbox2.configure(selectbackground="#c4c4c4")
top.player_listbox2.configure(width=124)
#move one card to this list.
#play hand one
#play hand two
#win/lose
#clean up
#time.sleep(1)
top.player_listbox2.place_forget()
top.player_listbox2 = tk.Listbox(top)
sys.stdout.flush()
def init(top, gui, *args, **kwargs):
global w, top_level, root
w = gui
top_level = top
root = top
def destroy_window():
global top_level
top_level.destroy()
top_level = None
def vp_start_gui():
'''Starting point when module is the main routine.'''
global val, w, root, top
root = tk.Tk()
top = Toplevel1 (root)
init(root, top)
root.mainloop()
print(top)
w = None
def create_Toplevel1(root, *args, **kwargs):
'''Starting point when module is imported by another program.'''
global w, w_win, rt, top
rt = root
w = tk.Toplevel (root)
top = Toplevel1 (w)
init(w, top, *args, **kwargs)
return (w, top)
def destroy_Toplevel1():
global w
w.destroy()
w = None
class Toplevel1:
def __init__(self, top=None):
'''This class configures and populates the toplevel window.
top is the toplevel containing window.'''
_bgcolor = '#d9d9d9' # X11 color: 'gray85'
_fgcolor = '#000000' # X11 color: 'black'
_compcolor = '#d9d9d9' # X11 color: 'gray85'
_ana1color = '#d9d9d9' # X11 color: 'gray85'
_ana2color = '#ececec' # Closest X11 color: 'gray92'
font9 = "-family gothic -size 15 -weight normal -slant roman " \
"-underline 0 -overstrike 0"
top.geometry("352x346+2220+7")
top.title("Blackjack")
top.configure(highlightcolor="black")
self.player_listbox = tk.Listbox(top)
self.player_listbox.place(relx=0.057, rely=0.549, relheight=0.324
, relwidth=0.352)
self.player_listbox.configure(background="white")
self.player_listbox.configure(font="TkFixedFont")
self.player_listbox.configure(selectbackground="#c4c4c4")
self.player_listbox.configure(width=124)
self.player_listbox2 = tk.Listbox(top)
self.Label1 = tk.Label(top)
self.Label1.place(relx=0.028, rely=0.029, height=15, width=109)
self.Label1.configure(activebackground="#f9f9f9")
self.Label1.configure(text='''Dealers Hand''')
self.Label2 = tk.Label(top)
self.Label2.place(relx=0.028, rely=0.491, height=15, width=109)
self.Label2.configure(activebackground="#f9f9f9")
self.Label2.configure(text='''Player Hand''')
self.dealer_listbox = tk.Listbox(top)
self.dealer_listbox.place(relx=0.057, rely=0.087, relheight=0.353
, relwidth=0.352)
self.dealer_listbox.configure(background="white")
self.dealer_listbox.configure(font="TkFixedFont")
self.dealer_listbox.configure(selectbackground="#c4c4c4")
self.dealer_listbox.configure(width=124)
self.info_listbox = tk.Listbox(top)
self.info_listbox.place(relx=0.450, rely=0.087, relheight=0.353
, relwidth=0.500)
self.info_listbox.configure(background="white")
self.info_listbox.configure(font="TkFixedFont")
self.info_listbox.configure(selectbackground="#c4c4c4")
self.info_listbox.configure(width=124)
self.deal_button = tk.Button(top)
self.deal_button.place(relx=0.057, rely=0.896, height=25, width=56)
self.deal_button.configure(activebackground="#f9f9f9")
self.deal_button.configure(command=deal_button_action)
self.deal_button.configure(text='''Deal''')
self.hit_button = tk.Button(top)
self.hit_button.place(relx=0.199, rely=0.896, height=25, width=49)
self.hit_button.configure(activebackground="#f9f9f9")
self.hit_button.configure(command=hit_button_action)
self.hit_button.configure(text='''Hit''')
self.stand_button = tk.Button(top)
self.stand_button.place(relx=0.313, rely=0.896, height=25, width=63)
self.stand_button.configure(activebackground="#f9f9f9")
self.stand_button.configure(command=stand_button_action)
self.stand_button.configure(text='''Stand''')
self.split_button = tk.Button(top)
self.split_button.place(relx=0.483, rely=0.896, height=25, width=63)
self.split_button.configure(activebackground="#f9f9f9")
self.split_button.configure(command=split_button_action)
self.split_button.configure(text='''Split''')
self.Button5 = tk.Button(top)
self.Button5.place(relx=0.653, rely=0.896, height=25, width=105)
self.Button5.configure(activebackground="#f9f9f9")
self.Button5.configure(text='''Double Down''')
self.dealer_hand_total_lbl = tk.Label(top)
self.dealer_hand_total_lbl.place(relx=0.313, rely=0.015, height=22
, width=19)
self.dealer_hand_total_lbl.configure(activebackground="#f9f9f9")
self.dealer_hand_total_lbl.configure(font=font9)
#self.dealer_hand_total_lbl.configure(text='''0''')
self.player_hand_total_lbl = tk.Label(top)
self.player_hand_total_lbl.place(relx=0.313, rely=0.477, height=22
, width=20)
self.player_hand_total_lbl.configure(activebackground="#f9f9f9")
self.player_hand_total_lbl.configure(font=font9)
self.player_hand_total_lbl.configure(text='''0''')
if __name__ == '__main__':
vp_start_gui()
|
[
"tkinter.Button",
"random.shuffle",
"tkinter.Listbox",
"time.sleep",
"tkinter.Toplevel",
"sys.stdout.flush",
"tkinter.Label",
"tkinter.Tk"
] |
[((3249, 3264), 'time.sleep', 'time.sleep', (['(0.3)'], {}), '(0.3)\n', (3259, 3264), False, 'import time\n'), ((3609, 3627), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (3625, 3627), False, 'import sys\n'), ((3825, 3843), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (3841, 3843), False, 'import sys\n'), ((4389, 4407), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (4405, 4407), False, 'import sys\n'), ((4986, 5001), 'tkinter.Listbox', 'tk.Listbox', (['top'], {}), '(top)\n', (4996, 5001), True, 'import tkinter as tk\n'), ((5006, 5024), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (5022, 5024), False, 'import sys\n'), ((5348, 5355), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (5353, 5355), True, 'import tkinter as tk\n'), ((5613, 5630), 'tkinter.Toplevel', 'tk.Toplevel', (['root'], {}), '(root)\n', (5624, 5630), True, 'import tkinter as tk\n'), ((1345, 1371), 'random.shuffle', 'random.shuffle', (['self.cards'], {}), '(self.cards)\n', (1359, 1371), False, 'import random\n'), ((6483, 6498), 'tkinter.Listbox', 'tk.Listbox', (['top'], {}), '(top)\n', (6493, 6498), True, 'import tkinter as tk\n'), ((6870, 6885), 'tkinter.Listbox', 'tk.Listbox', (['top'], {}), '(top)\n', (6880, 6885), True, 'import tkinter as tk\n'), ((6909, 6922), 'tkinter.Label', 'tk.Label', (['top'], {}), '(top)\n', (6917, 6922), True, 'import tkinter as tk\n'), ((7131, 7144), 'tkinter.Label', 'tk.Label', (['top'], {}), '(top)\n', (7139, 7144), True, 'import tkinter as tk\n'), ((7360, 7375), 'tkinter.Listbox', 'tk.Listbox', (['top'], {}), '(top)\n', (7370, 7375), True, 'import tkinter as tk\n'), ((7744, 7759), 'tkinter.Listbox', 'tk.Listbox', (['top'], {}), '(top)\n', (7754, 7759), True, 'import tkinter as tk\n'), ((8117, 8131), 'tkinter.Button', 'tk.Button', (['top'], {}), '(top)\n', (8126, 8131), True, 'import tkinter as tk\n'), ((8413, 8427), 'tkinter.Button', 'tk.Button', (['top'], {}), '(top)\n', (8422, 8427), True, 'import tkinter as tk\n'), ((8705, 8719), 'tkinter.Button', 'tk.Button', (['top'], {}), '(top)\n', (8714, 8719), True, 'import tkinter as tk\n'), ((9009, 9023), 'tkinter.Button', 'tk.Button', (['top'], {}), '(top)\n', (9018, 9023), True, 'import tkinter as tk\n'), ((9308, 9322), 'tkinter.Button', 'tk.Button', (['top'], {}), '(top)\n', (9317, 9322), True, 'import tkinter as tk\n'), ((9548, 9561), 'tkinter.Label', 'tk.Label', (['top'], {}), '(top)\n', (9556, 9561), True, 'import tkinter as tk\n'), ((9893, 9906), 'tkinter.Label', 'tk.Label', (['top'], {}), '(top)\n', (9901, 9906), True, 'import tkinter as tk\n')]
|
from django import forms
from jobboard.models import Job
class FormControl(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for form_field in self.visible_fields():
form_field.field.widget.attrs['class'] = 'form-control'
class CreateNewJobForm(FormControl):
class Meta:
model = Job
fields = ['title', 'job_type', 'major', 'work_from', 'description', 'city', 'address', 'title_keywords']
widgets = {'description': forms.Textarea(attrs={'rows': '5'}), }
|
[
"django.forms.Textarea"
] |
[((517, 552), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'rows': '5'}"}), "(attrs={'rows': '5'})\n", (531, 552), False, 'from django import forms\n')]
|
import pytest
from jwkest.jwt import JWT, b2s_conv
__author__ = 'roland'
def _eq(l1, l2):
return set(l1) == set(l2)
def test_pack_jwt():
_jwt = JWT(**{"alg": "none", "cty": "jwt"})
jwt = _jwt.pack(parts=[{"iss": "joe", "exp": 1300819380,
"http://example.com/is_root": True}, ""])
p = jwt.split('.')
assert len(p) == 3
def test_unpack_pack():
_jwt = JWT(**{"alg": "none"})
payload = {"iss": "joe", "exp": 1300819380,
"http://example.com/is_root": True}
jwt = _jwt.pack(parts=[payload, ""])
repacked = JWT().unpack(jwt).pack()
assert jwt == repacked
def test_pack_unpack():
_jwt = JWT(**{"alg": "none"})
payload = {"iss": "joe", "exp": 1300819380,
"http://example.com/is_root": True}
jwt = _jwt.pack(parts=[payload, ""])
_jwt2 = JWT().unpack(jwt)
assert _jwt2
out_payload = _jwt2.payload()
assert _eq(out_payload.keys(), ["iss", "exp", "http://example.com/is_root"])
assert out_payload["iss"] == payload["iss"]
assert out_payload["exp"] == payload["exp"]
assert out_payload["http://example.com/is_root"] == payload[
"http://example.com/is_root"]
def test_pack_with_headers():
_jwt = JWT()
jwt = _jwt.pack(parts=["", ""], headers={"foo": "bar"})
assert JWT().unpack(jwt).headers["foo"] == "bar"
def test_unpack_str():
_jwt = JWT(**{"alg": "none"})
payload = {"iss": "joe", "exp": 1300819380,
"http://example.com/is_root": True}
jwt = _jwt.pack(parts=[payload, ""])
_jwt2 = JWT().unpack(jwt)
assert _jwt2
out_payload = _jwt2.payload()
def test_b2s_conv_raise_exception_on_bad_value():
with pytest.raises(ValueError):
b2s_conv(object())
if __name__ == "__main__":
test_unpack_str()
|
[
"pytest.raises",
"jwkest.jwt.JWT"
] |
[((158, 194), 'jwkest.jwt.JWT', 'JWT', ([], {}), "(**{'alg': 'none', 'cty': 'jwt'})\n", (161, 194), False, 'from jwkest.jwt import JWT, b2s_conv\n'), ((410, 432), 'jwkest.jwt.JWT', 'JWT', ([], {}), "(**{'alg': 'none'})\n", (413, 432), False, 'from jwkest.jwt import JWT, b2s_conv\n'), ((678, 700), 'jwkest.jwt.JWT', 'JWT', ([], {}), "(**{'alg': 'none'})\n", (681, 700), False, 'from jwkest.jwt import JWT, b2s_conv\n'), ((1247, 1252), 'jwkest.jwt.JWT', 'JWT', ([], {}), '()\n', (1250, 1252), False, 'from jwkest.jwt import JWT, b2s_conv\n'), ((1402, 1424), 'jwkest.jwt.JWT', 'JWT', ([], {}), "(**{'alg': 'none'})\n", (1405, 1424), False, 'from jwkest.jwt import JWT, b2s_conv\n'), ((1708, 1733), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1721, 1733), False, 'import pytest\n'), ((854, 859), 'jwkest.jwt.JWT', 'JWT', ([], {}), '()\n', (857, 859), False, 'from jwkest.jwt import JWT, b2s_conv\n'), ((1578, 1583), 'jwkest.jwt.JWT', 'JWT', ([], {}), '()\n', (1581, 1583), False, 'from jwkest.jwt import JWT, b2s_conv\n'), ((588, 593), 'jwkest.jwt.JWT', 'JWT', ([], {}), '()\n', (591, 593), False, 'from jwkest.jwt import JWT, b2s_conv\n'), ((1324, 1329), 'jwkest.jwt.JWT', 'JWT', ([], {}), '()\n', (1327, 1329), False, 'from jwkest.jwt import JWT, b2s_conv\n')]
|
"""
Copyright (C) 2022 <NAME>
This work is released under the MIT License.
See the file LICENSE for details
Utility functions
"""
from math import sqrt
from typing import List
import numpy as np
import carla
import io
def loc_dist(a, b):
return sqrt((a.x - b.x)**2 + (a.y - b.y)**2 + (a.z - b.z)**2)
def vector_normalize(v:carla.Vector3D):
norm = v.x**2 + v.y**2 + v.z**2
new = carla.Vector3D(x=v.x/norm, y=v.y/norm, z=v.z/norm)
return new
def vector_from_to(a:carla.Vector3D, b:carla.Vector3D):
dx = b.x - a.x
dy = b.y - a.y
dz = b.z - a.z
return carla.Vector3D(dx, dy, dz)
def scalar_product(a:carla.Vector3D, b:carla.Vector3D):
return a.x*b.x + a.y*b.y + a.z*b.z
def vector_dist(a, b):
return np.linalg.norm(a-b)
def normalize_numpy_vector(x: np.ndarray):
n = np.linalg.norm(x)
if n > 0.00001:
return x / n
else:
return None
# long_str(2) -> '0002'
# long_str(42, 3) -> '042'
def long_str(i:int, N:int=4, padding='0'):
s = str(i)
n = len(s)
if n < N:
s = padding*(N-n) + s
return s
# Removes 'intro' from left part of 'text', raises error if not found
def good_lstrip(text, intro):
assert(len(intro) <= len(text))
l = len(intro)
first = text[:l]
assert(first == intro)
return text[l:]
def intr(x):
return int(round(float(x)))
# Projective flattening, scales homogeneous coordinates so that last coordinate is always one
def pflat(x):
if len(x.shape) == 1:
x /= x[-1]
else:
x /= x[-1, :]
return x
def print_table(row_names:List[str], col_names:List[str], matrix:np.ndarray,
decimals=2):
matrix = np.around(matrix, decimals=decimals)
row_names = np.array(row_names, dtype=str).reshape((len(row_names), 1))
matrix = np.hstack([row_names, matrix])
col_names = np.array(['', *col_names], dtype=str)
col_names = col_names.reshape((1, len(col_names)))
matrix = np.vstack([col_names, matrix])
max_len = max([len(v) for v in matrix.flatten()])
for i in range(matrix.shape[0]):
for j in range(matrix.shape[1]):
val = matrix[i,j]
matrix[i, j] = long_str(val, max_len, padding=' ')
print(np.array2string(matrix, max_line_width=200))
|
[
"math.sqrt",
"numpy.array2string",
"numpy.hstack",
"numpy.around",
"numpy.linalg.norm",
"numpy.array",
"numpy.vstack",
"carla.Vector3D"
] |
[((272, 332), 'math.sqrt', 'sqrt', (['((a.x - b.x) ** 2 + (a.y - b.y) ** 2 + (a.z - b.z) ** 2)'], {}), '((a.x - b.x) ** 2 + (a.y - b.y) ** 2 + (a.z - b.z) ** 2)\n', (276, 332), False, 'from math import sqrt\n'), ((414, 470), 'carla.Vector3D', 'carla.Vector3D', ([], {'x': '(v.x / norm)', 'y': '(v.y / norm)', 'z': '(v.z / norm)'}), '(x=v.x / norm, y=v.y / norm, z=v.z / norm)\n', (428, 470), False, 'import carla\n'), ((606, 632), 'carla.Vector3D', 'carla.Vector3D', (['dx', 'dy', 'dz'], {}), '(dx, dy, dz)\n', (620, 632), False, 'import carla\n'), ((764, 785), 'numpy.linalg.norm', 'np.linalg.norm', (['(a - b)'], {}), '(a - b)\n', (778, 785), True, 'import numpy as np\n'), ((836, 853), 'numpy.linalg.norm', 'np.linalg.norm', (['x'], {}), '(x)\n', (850, 853), True, 'import numpy as np\n'), ((1711, 1747), 'numpy.around', 'np.around', (['matrix'], {'decimals': 'decimals'}), '(matrix, decimals=decimals)\n', (1720, 1747), True, 'import numpy as np\n'), ((1842, 1872), 'numpy.hstack', 'np.hstack', (['[row_names, matrix]'], {}), '([row_names, matrix])\n', (1851, 1872), True, 'import numpy as np\n'), ((1889, 1926), 'numpy.array', 'np.array', (["['', *col_names]"], {'dtype': 'str'}), "(['', *col_names], dtype=str)\n", (1897, 1926), True, 'import numpy as np\n'), ((1995, 2025), 'numpy.vstack', 'np.vstack', (['[col_names, matrix]'], {}), '([col_names, matrix])\n', (2004, 2025), True, 'import numpy as np\n'), ((2263, 2306), 'numpy.array2string', 'np.array2string', (['matrix'], {'max_line_width': '(200)'}), '(matrix, max_line_width=200)\n', (2278, 2306), True, 'import numpy as np\n'), ((1769, 1799), 'numpy.array', 'np.array', (['row_names'], {'dtype': 'str'}), '(row_names, dtype=str)\n', (1777, 1799), True, 'import numpy as np\n')]
|
# Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Sequential Monte Carlo."""
from __future__ import print_function
import collections
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.experimental.mcmc import weighted_resampling
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import samplers
from tensorflow_probability.python.mcmc import kernel as kernel_base
__all__ = [
'SequentialMonteCarlo',
'SequentialMonteCarloResults',
'WeightedParticles',
'ess_below_threshold',
]
# SequentialMonteCarlo `state` structure.
class WeightedParticles(collections.namedtuple(
'WeightedParticles', ['particles', 'log_weights'])):
"""Particles with corresponding log weights.
This structure serves as the `state` for the `SequentialMonteCarlo` transition
kernel.
Elements:
particles: a (structure of) Tensor(s) each of shape
`concat([[num_particles, b1, ..., bN], event_shape])`, where `event_shape`
may differ across component `Tensor`s.
log_weights: `float` `Tensor` of shape
`[num_particles, b1, ..., bN]` containing a log importance weight for
each particle, typically normalized so that
`exp(reduce_logsumexp(log_weights, axis=0)) == 1.`. These must be used in
conjunction with `particles` to compute expectations under the target
distribution.
In some contexts, particles may be stacked across multiple inference steps,
in which case all `Tensor` shapes will be prefixed by an additional dimension
of size `num_steps`.
"""
# SequentialMonteCarlo `kernel_results` structure.
class SequentialMonteCarloResults(collections.namedtuple(
'SequentialMonteCarloResults',
['steps',
'parent_indices',
'incremental_log_marginal_likelihood',
# Track both incremental and accumulated likelihoods so that users can get
# the accumulated likelihood without needing to trace every step.
'accumulated_log_marginal_likelihood',
'seed',
])):
"""Auxiliary results from a Sequential Monte Carlo step.
This structure serves as the `kernel_results` for the `SequentialMonteCarlo`
transition kernel.
Elements:
steps: scalar int `Tensor` number of inference steps completed so far.
parent_indices: `int` `Tensor` of shape `[num_particles, b1, ..., bN]`,
such that `parent_indices[k]` gives the indice(s) of the particle(s) at
the previous step from which the the `k`th current particle is
immediately descended. See also
`tfp.experimental.mcmc.reconstruct_trajectories`.
incremental_log_marginal_likelihood: float `Tensor` of shape
`[b1, ..., bN]`, giving the natural logarithm of an unbiased estimate of
the ratio in normalizing constants incurred in the most recent step
(typically this is the likelihood of observed data).
Note that (by [Jensen's inequality](
https://en.wikipedia.org/wiki/Jensen%27s_inequality))
this is *smaller* in expectation than the true log ratio.
cumulative_log_marginal_likelihood: float `Tensor` of shape
`[b1, ..., bN]`, giving the natural logarithm of an unbiased estimate of
the ratio in normalizing constants incurred since the initial step
(typically this is the likelihood of observed data).
Note that (by [Jensen's inequality](
https://en.wikipedia.org/wiki/Jensen%27s_inequality))
this is *smaller* in expectation than the true log ratio.
seed: The seed used in one_step.
In some contexts, results may be stacked across multiple inference steps,
in which case all `Tensor` shapes will be prefixed by an additional dimension
of size `num_steps`.
"""
__slots__ = ()
def _dummy_indices_like(indices):
"""Returns dummy indices ([0, 1, 2, ...]) with batch shape like `indices`."""
indices_shape = ps.shape(indices)
num_particles = indices_shape[0]
return tf.broadcast_to(
ps.reshape(
ps.range(num_particles),
ps.pad([num_particles],
paddings=[[0, ps.rank_from_shape(indices_shape) - 1]],
constant_values=1)),
indices_shape)
def ess_below_threshold(weighted_particles, threshold=0.5):
"""Determines if the effective sample size is much less than num_particles."""
with tf.name_scope('ess_below_threshold'):
num_particles = ps.size0(weighted_particles.log_weights)
log_weights = tf.math.log_softmax(weighted_particles.log_weights, axis=0)
log_ess = -tf.math.reduce_logsumexp(2 * log_weights, axis=0)
return log_ess < (ps.log(num_particles) +
ps.log(threshold))
class SequentialMonteCarlo(kernel_base.TransitionKernel):
"""Sequential Monte Carlo transition kernel.
Sequential Monte Carlo maintains a population of weighted particles
representing samples from a sequence of target distributions. It is
*not* a calibrated MCMC kernel: the transitions step through a sequence of
target distributions, rather than trying to maintain a stationary
distribution.
"""
def __init__(self,
propose_and_update_log_weights_fn,
resample_fn=weighted_resampling.resample_systematic,
resample_criterion_fn=ess_below_threshold,
name=None):
"""Initializes a sequential Monte Carlo transition kernel.
Args:
propose_and_update_log_weights_fn: Python `callable` with signature
`new_weighted_particles = propose_and_update_log_weights_fn(step,
weighted_particles, seed=None)`. Its input is a
`tfp.experimental.mcmc.WeightedParticles` structure representing
weighted samples (with normalized weights) from the `step`th
target distribution, and it returns another such structure representing
unnormalized weighted samples from the next (`step + 1`th) target
distribution. This will typically include particles
sampled from a proposal distribution `q(x[step + 1] | x[step])`, and
weights that account for some or all of: the proposal density,
a transition density `p(x[step + 1] | x[step]),
observation weights `p(y[step + 1] | x[step + 1])`, and/or a backwards
or 'L'-kernel `L(x[step] | x[step + 1])`. The (log) normalization
constant of the weights is interpreted as the incremental (log) marginal
likelihood.
resample_fn: Resampling scheme specified as a `callable` with signature
`indices = resample_fn(log_probs, event_size, sample_shape, seed)`,
where `log_probs` is a `Tensor` of the same shape as `state.log_weights`
containing a normalized log-probability for every current
particle, `event_size` is the number of new particle indices to
generate, `sample_shape` is the number of independent index sets to
return, and the return value `indices` is an `int` Tensor of shape
`concat([sample_shape, [event_size, B1, ..., BN])`. Typically one of
`tfp.experimental.mcmc.resample_deterministic_minimum_error`,
`tfp.experimental.mcmc.resample_independent`,
`tfp.experimental.mcmc.resample_stratified`, or
`tfp.experimental.mcmc.resample_systematic`.
Default value: `tfp.experimental.mcmc.resample_systematic`.
resample_criterion_fn: optional Python `callable` with signature
`do_resample = resample_criterion_fn(weighted_particles)`,
passed an instance of `tfp.experimental.mcmc.WeightedParticles`. The
return value `do_resample`
determines whether particles are resampled at the current step. The
default behavior is to resample particles when the effective
sample size falls below half of the total number of particles.
Default value: `tfp.experimental.mcmc.ess_below_threshold`.
name: Python `str` name for ops created by this kernel.
"""
self._propose_and_update_log_weights_fn = propose_and_update_log_weights_fn
self._resample_fn = resample_fn
self._resample_criterion_fn = resample_criterion_fn
self._name = name or 'SequentialMonteCarlo'
@property
def is_calibrated(self):
return False
@property
def name(self):
return self._name
@property
def propose_and_update_log_weights_fn(self):
return self._propose_and_update_log_weights_fn
@property
def resample_criterion_fn(self):
return self._resample_criterion_fn
@property
def resample_fn(self):
return self._resample_fn
def one_step(self, state, kernel_results, seed=None):
"""Takes one Sequential Monte Carlo inference step.
Args:
state: instance of `tfp.experimental.mcmc.WeightedParticles` representing
the current particles with (log) weights. The `log_weights` must be
a float `Tensor` of shape `[num_particles, b1, ..., bN]`. The
`particles` may be any structure of `Tensor`s, each of which
must have shape `concat([log_weights.shape, event_shape])` for some
`event_shape`, which may vary across components.
kernel_results: instance of
`tfp.experimental.mcmc.SequentialMonteCarloResults` representing results
from a previous step.
seed: Optional seed for reproducible sampling.
Returns:
state: instance of `tfp.experimental.mcmc.WeightedParticles` representing
new particles with (log) weights.
kernel_results: instance of
`tfp.experimental.mcmc.SequentialMonteCarloResults`.
"""
with tf.name_scope(self.name):
with tf.name_scope('one_step'):
seed = samplers.sanitize_seed(seed)
proposal_seed, resample_seed = samplers.split_seed(seed)
state = WeightedParticles(*state) # Canonicalize.
num_particles = ps.size0(state.log_weights)
# Propose new particles and update weights for this step, unless it's
# the initial step, in which case, use the user-provided initial
# particles and weights.
proposed_state = self.propose_and_update_log_weights_fn(
# Propose state[t] from state[t - 1].
ps.maximum(0, kernel_results.steps - 1),
state,
seed=proposal_seed)
is_initial_step = ps.equal(kernel_results.steps, 0)
# TODO(davmre): this `where` assumes the state size didn't change.
state = tf.nest.map_structure(
lambda a, b: tf.where(is_initial_step, a, b), state, proposed_state)
normalized_log_weights = tf.nn.log_softmax(state.log_weights, axis=0)
# Every entry of `log_weights` differs from `normalized_log_weights`
# by the same normalizing constant. We extract that constant by
# examining an arbitrary entry.
incremental_log_marginal_likelihood = (state.log_weights[0] -
normalized_log_weights[0])
do_resample = self.resample_criterion_fn(state)
# Some batch elements may require resampling and others not, so
# we first do the resampling for all elements, then select whether to
# use the resampled values for each batch element according to
# `do_resample`. If there were no batching, we might prefer to use
# `tf.cond` to avoid the resampling computation on steps where it's not
# needed---but we're ultimately interested in adaptive resampling
# for statistical (not computational) purposes, so this isn't a
# dealbreaker.
resampled_particles, resample_indices = weighted_resampling.resample(
state.particles,
state.log_weights,
self.resample_fn,
seed=resample_seed)
uniform_weights = tf.fill(
ps.shape(state.log_weights),
value=-tf.math.log(tf.cast(num_particles, state.log_weights.dtype)))
(resampled_particles,
resample_indices,
log_weights) = tf.nest.map_structure(
lambda r, p: ps.where(do_resample, r, p),
(resampled_particles, resample_indices, uniform_weights),
(state.particles, _dummy_indices_like(resample_indices),
normalized_log_weights))
return (WeightedParticles(particles=resampled_particles,
log_weights=log_weights),
SequentialMonteCarloResults(
steps=kernel_results.steps + 1,
parent_indices=resample_indices,
incremental_log_marginal_likelihood=(
incremental_log_marginal_likelihood),
accumulated_log_marginal_likelihood=(
kernel_results.accumulated_log_marginal_likelihood +
incremental_log_marginal_likelihood),
seed=seed))
def bootstrap_results(self, init_state):
with tf.name_scope(self.name):
with tf.name_scope('bootstrap_results'):
init_state = WeightedParticles(*init_state)
batch_zeros = tf.zeros(
ps.shape(init_state.log_weights)[1:],
dtype=init_state.log_weights.dtype)
return SequentialMonteCarloResults(
steps=0,
parent_indices=_dummy_indices_like(init_state.log_weights),
incremental_log_marginal_likelihood=batch_zeros,
accumulated_log_marginal_likelihood=batch_zeros,
seed=samplers.zeros_seed())
|
[
"tensorflow_probability.python.internal.prefer_static.maximum",
"tensorflow.compat.v2.math.log_softmax",
"tensorflow_probability.python.internal.prefer_static.shape",
"tensorflow_probability.python.internal.samplers.sanitize_seed",
"tensorflow_probability.python.internal.prefer_static.range",
"tensorflow_probability.python.internal.samplers.zeros_seed",
"tensorflow_probability.python.internal.prefer_static.rank_from_shape",
"tensorflow_probability.python.internal.prefer_static.size0",
"tensorflow_probability.python.internal.prefer_static.where",
"tensorflow.compat.v2.where",
"tensorflow.compat.v2.nn.log_softmax",
"tensorflow.compat.v2.cast",
"tensorflow_probability.python.internal.prefer_static.equal",
"tensorflow.compat.v2.math.reduce_logsumexp",
"tensorflow_probability.python.internal.prefer_static.log",
"tensorflow_probability.python.internal.samplers.split_seed",
"tensorflow.compat.v2.name_scope",
"collections.namedtuple",
"tensorflow_probability.python.experimental.mcmc.weighted_resampling.resample"
] |
[((1280, 1353), 'collections.namedtuple', 'collections.namedtuple', (['"""WeightedParticles"""', "['particles', 'log_weights']"], {}), "('WeightedParticles', ['particles', 'log_weights'])\n", (1302, 1353), False, 'import collections\n'), ((2315, 2491), 'collections.namedtuple', 'collections.namedtuple', (['"""SequentialMonteCarloResults"""', "['steps', 'parent_indices', 'incremental_log_marginal_likelihood',\n 'accumulated_log_marginal_likelihood', 'seed']"], {}), "('SequentialMonteCarloResults', ['steps',\n 'parent_indices', 'incremental_log_marginal_likelihood',\n 'accumulated_log_marginal_likelihood', 'seed'])\n", (2337, 2491), False, 'import collections\n'), ((4497, 4514), 'tensorflow_probability.python.internal.prefer_static.shape', 'ps.shape', (['indices'], {}), '(indices)\n', (4505, 4514), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((4944, 4980), 'tensorflow.compat.v2.name_scope', 'tf.name_scope', (['"""ess_below_threshold"""'], {}), "('ess_below_threshold')\n", (4957, 4980), True, 'import tensorflow.compat.v2 as tf\n'), ((5002, 5042), 'tensorflow_probability.python.internal.prefer_static.size0', 'ps.size0', (['weighted_particles.log_weights'], {}), '(weighted_particles.log_weights)\n', (5010, 5042), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((5061, 5120), 'tensorflow.compat.v2.math.log_softmax', 'tf.math.log_softmax', (['weighted_particles.log_weights'], {'axis': '(0)'}), '(weighted_particles.log_weights, axis=0)\n', (5080, 5120), True, 'import tensorflow.compat.v2 as tf\n'), ((4604, 4627), 'tensorflow_probability.python.internal.prefer_static.range', 'ps.range', (['num_particles'], {}), '(num_particles)\n', (4612, 4627), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((5136, 5185), 'tensorflow.compat.v2.math.reduce_logsumexp', 'tf.math.reduce_logsumexp', (['(2 * log_weights)'], {'axis': '(0)'}), '(2 * log_weights, axis=0)\n', (5160, 5185), True, 'import tensorflow.compat.v2 as tf\n'), ((10106, 10130), 'tensorflow.compat.v2.name_scope', 'tf.name_scope', (['self.name'], {}), '(self.name)\n', (10119, 10130), True, 'import tensorflow.compat.v2 as tf\n'), ((13417, 13441), 'tensorflow.compat.v2.name_scope', 'tf.name_scope', (['self.name'], {}), '(self.name)\n', (13430, 13441), True, 'import tensorflow.compat.v2 as tf\n'), ((5208, 5229), 'tensorflow_probability.python.internal.prefer_static.log', 'ps.log', (['num_particles'], {}), '(num_particles)\n', (5214, 5229), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((5254, 5271), 'tensorflow_probability.python.internal.prefer_static.log', 'ps.log', (['threshold'], {}), '(threshold)\n', (5260, 5271), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((10143, 10168), 'tensorflow.compat.v2.name_scope', 'tf.name_scope', (['"""one_step"""'], {}), "('one_step')\n", (10156, 10168), True, 'import tensorflow.compat.v2 as tf\n'), ((10185, 10213), 'tensorflow_probability.python.internal.samplers.sanitize_seed', 'samplers.sanitize_seed', (['seed'], {}), '(seed)\n', (10207, 10213), False, 'from tensorflow_probability.python.internal import samplers\n'), ((10253, 10278), 'tensorflow_probability.python.internal.samplers.split_seed', 'samplers.split_seed', (['seed'], {}), '(seed)\n', (10272, 10278), False, 'from tensorflow_probability.python.internal import samplers\n'), ((10363, 10390), 'tensorflow_probability.python.internal.prefer_static.size0', 'ps.size0', (['state.log_weights'], {}), '(state.log_weights)\n', (10371, 10390), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((10821, 10854), 'tensorflow_probability.python.internal.prefer_static.equal', 'ps.equal', (['kernel_results.steps', '(0)'], {}), '(kernel_results.steps, 0)\n', (10829, 10854), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((11084, 11128), 'tensorflow.compat.v2.nn.log_softmax', 'tf.nn.log_softmax', (['state.log_weights'], {'axis': '(0)'}), '(state.log_weights, axis=0)\n', (11101, 11128), True, 'import tensorflow.compat.v2 as tf\n'), ((12113, 12220), 'tensorflow_probability.python.experimental.mcmc.weighted_resampling.resample', 'weighted_resampling.resample', (['state.particles', 'state.log_weights', 'self.resample_fn'], {'seed': 'resample_seed'}), '(state.particles, state.log_weights, self.\n resample_fn, seed=resample_seed)\n', (12141, 12220), False, 'from tensorflow_probability.python.experimental.mcmc import weighted_resampling\n'), ((13454, 13488), 'tensorflow.compat.v2.name_scope', 'tf.name_scope', (['"""bootstrap_results"""'], {}), "('bootstrap_results')\n", (13467, 13488), True, 'import tensorflow.compat.v2 as tf\n'), ((10703, 10742), 'tensorflow_probability.python.internal.prefer_static.maximum', 'ps.maximum', (['(0)', '(kernel_results.steps - 1)'], {}), '(0, kernel_results.steps - 1)\n', (10713, 10742), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((12312, 12339), 'tensorflow_probability.python.internal.prefer_static.shape', 'ps.shape', (['state.log_weights'], {}), '(state.log_weights)\n', (12320, 12339), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((10994, 11025), 'tensorflow.compat.v2.where', 'tf.where', (['is_initial_step', 'a', 'b'], {}), '(is_initial_step, a, b)\n', (11002, 11025), True, 'import tensorflow.compat.v2 as tf\n'), ((12552, 12579), 'tensorflow_probability.python.internal.prefer_static.where', 'ps.where', (['do_resample', 'r', 'p'], {}), '(do_resample, r, p)\n', (12560, 12579), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((13587, 13619), 'tensorflow_probability.python.internal.prefer_static.shape', 'ps.shape', (['init_state.log_weights'], {}), '(init_state.log_weights)\n', (13595, 13619), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((13950, 13971), 'tensorflow_probability.python.internal.samplers.zeros_seed', 'samplers.zeros_seed', ([], {}), '()\n', (13969, 13971), False, 'from tensorflow_probability.python.internal import samplers\n'), ((4694, 4727), 'tensorflow_probability.python.internal.prefer_static.rank_from_shape', 'ps.rank_from_shape', (['indices_shape'], {}), '(indices_shape)\n', (4712, 4727), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((12372, 12419), 'tensorflow.compat.v2.cast', 'tf.cast', (['num_particles', 'state.log_weights.dtype'], {}), '(num_particles, state.log_weights.dtype)\n', (12379, 12419), True, 'import tensorflow.compat.v2 as tf\n')]
|
# -*- coding: utf-8 -*-
## @package ivf.batch.initial_normal
#
# ivf.batch.initial_normal utility package.
# @author tody
# @date 2016/02/19
import numpy as np
import cv2
import matplotlib.pyplot as plt
from ivf.batch.batch import DatasetBatch
from ivf.io_util.image import loadNormal, saveNormal
from ivf.core.sfs import amg_constraints
from ivf.core.solver import amg_solver
from ivf.core.sfs.lumo import computeNz
from ivf.cv.normal import normalizeImage
from ivf.np.norm import normalizeVectors
class InitialNormalBatch(DatasetBatch):
def __init__(self, name="InitialNormal", dataset_name="3dmodel"):
super(InitialNormalBatch, self).__init__(name, dataset_name)
def _runImp(self):
normal_data = loadNormal(self._data_file)
if normal_data is None:
return
N0_32F, A_8U = normal_data
h, w = A_8U.shape[:2]
A_c, b_c = amg_constraints.silhouetteConstraints(A_8U, is_flat=True)
A_L = amg_constraints.laplacianMatrix((h, w), num_elements=3)
A = A_c + A_L
b = b_c
N = amg_solver.solve(A, b).reshape(-1, 3)
N = computeNz(N)
N = normalizeVectors(N)
N_32F = N.reshape(h, w, 3)
file_path = self.resultFile(self._data_file_name)
saveNormal(file_path, N_32F, A_8U)
if __name__ == '__main__':
InitialNormalBatch().run()
|
[
"ivf.np.norm.normalizeVectors",
"ivf.io_util.image.loadNormal",
"ivf.core.sfs.amg_constraints.silhouetteConstraints",
"ivf.core.sfs.lumo.computeNz",
"ivf.io_util.image.saveNormal",
"ivf.core.sfs.amg_constraints.laplacianMatrix",
"ivf.core.solver.amg_solver.solve"
] |
[((743, 770), 'ivf.io_util.image.loadNormal', 'loadNormal', (['self._data_file'], {}), '(self._data_file)\n', (753, 770), False, 'from ivf.io_util.image import loadNormal, saveNormal\n'), ((909, 966), 'ivf.core.sfs.amg_constraints.silhouetteConstraints', 'amg_constraints.silhouetteConstraints', (['A_8U'], {'is_flat': '(True)'}), '(A_8U, is_flat=True)\n', (946, 966), False, 'from ivf.core.sfs import amg_constraints\n'), ((982, 1037), 'ivf.core.sfs.amg_constraints.laplacianMatrix', 'amg_constraints.laplacianMatrix', (['(h, w)'], {'num_elements': '(3)'}), '((h, w), num_elements=3)\n', (1013, 1037), False, 'from ivf.core.sfs import amg_constraints\n'), ((1139, 1151), 'ivf.core.sfs.lumo.computeNz', 'computeNz', (['N'], {}), '(N)\n', (1148, 1151), False, 'from ivf.core.sfs.lumo import computeNz\n'), ((1164, 1183), 'ivf.np.norm.normalizeVectors', 'normalizeVectors', (['N'], {}), '(N)\n', (1180, 1183), False, 'from ivf.np.norm import normalizeVectors\n'), ((1286, 1320), 'ivf.io_util.image.saveNormal', 'saveNormal', (['file_path', 'N_32F', 'A_8U'], {}), '(file_path, N_32F, A_8U)\n', (1296, 1320), False, 'from ivf.io_util.image import loadNormal, saveNormal\n'), ((1089, 1111), 'ivf.core.solver.amg_solver.solve', 'amg_solver.solve', (['A', 'b'], {}), '(A, b)\n', (1105, 1111), False, 'from ivf.core.solver import amg_solver\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from torch import optim
import copy
class Extragradient(optim.Optimizer):
def __init__(self, optimizer, params):
super(Extragradient, self).__init__(params, optimizer.defaults)
self.params_copy = []
self.optimizer = optimizer
self.extrapolation_flag = False
def step(self, closure=None):
loss = None
if closure is not None:
loss = closure()
if self.extrapolation_flag is False:
for group in self.param_groups:
group["params_copy"] = copy.deepcopy(group["params"])
self.optimizer.step()
self.extrapolation_flag = True
else:
for group in self.param_groups:
for p, p_copy in zip(group["params"], group["params_copy"]):
p.data = p_copy.data
self.optimizer.step()
self.extrapolation_flag = False
return loss
|
[
"copy.deepcopy"
] |
[((719, 749), 'copy.deepcopy', 'copy.deepcopy', (["group['params']"], {}), "(group['params'])\n", (732, 749), False, 'import copy\n')]
|
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
"""
This class provides GWS Group related methods
"""
import logging
from uw_trumba.models import TrumbaCalendar
from accountsynchr.models import (
UwcalGroup, EDITOR, SHOWON, new_editor_group, new_showon_group)
from accountsynchr.dao.gws import Gws
logger = logging.getLogger(__name__)
class GroupManager:
def __init__(self):
self.gws = Gws()
# {campus_code: {group-id: UwcalGroup}}
self.campus_editor_groups = {}
self.campus_showon_groups = {}
for choice in TrumbaCalendar.CAMPUS_CHOICES:
campus_code = choice[0]
result = self.gws.get_campus_groups(campus_code)
campus_editor_groups = result[EDITOR]
self.campus_editor_groups[campus_code] = campus_editor_groups
self.campus_showon_groups[campus_code] = result[SHOWON]
def get_all_editors(self):
return self.gws.all_editors
def get_campus_editor_groups(self, campus_code):
"""
:return: the list of UwcalGroup object in the given campus
"""
return self.campus_editor_groups[campus_code].values()
def get_campus_showon_groups(self, campus_code):
"""
:return: the list of UwcalGroup object in the given campus
"""
return self.campus_showon_groups[campus_code].values()
def get_editor_group(self, trumba_cal):
"""
:return: the UwcalGroup object of the corresponding
editor group for the given TrumbaCalendar object
"""
return self.campus_editor_groups[trumba_cal.campus].get(
trumba_cal.get_group_name(EDITOR))
def get_showon_group(self, trumba_cal):
"""
:return: the UwcalGroup object of the corresponding
showon group for the given TrumbaCalendar object
"""
return self.campus_showon_groups[trumba_cal.campus].get(
trumba_cal.get_group_name(SHOWON))
def has_editor_group(self, trumba_cal):
"""
:param trumba_cal: a TrumbaCalendar object
:return: True if the corresponding editor UwcalGroup exists
"""
return self.get_editor_group(trumba_cal) is not None
def has_showon_group(self, trumba_cal):
"""
:param trumba_cal: a TrumbaCalendar object
:return: True if the corresponding showon UwcalGroup exists
"""
return self.get_showon_group(trumba_cal) is not None
def put_editor_group(self, trumba_cal):
"""
Create or update the editor group for the trumba calendar
:param trumba_cal: a TrumbaCalendar object
:return: the UwcalGroup object created, None is failed
"""
uwcal_group = self.get_editor_group(trumba_cal)
if uwcal_group is not None:
if uwcal_group.same_name(trumba_cal):
return uwcal_group
uwcal_group.set_calendar_name(trumba_cal.name)
else:
uwcal_group = new_editor_group(trumba_cal)
return self._execute_put(uwcal_group)
def put_showon_group(self, trumba_cal):
"""
Create or update the showon group for the trumba calendar
:param trumba_cal: a TrumbaCalendar object
:return: the UwcalGroup object created, None is failed
"""
uwcal_group = self.get_showon_group(trumba_cal)
if uwcal_group is not None:
if uwcal_group.same_name(trumba_cal):
return uwcal_group
uwcal_group.set_calendar_name(trumba_cal.name)
else:
uwcal_group = new_showon_group(trumba_cal)
return self._execute_put(uwcal_group)
def _execute_put(self, uwcal_group):
gwsgroup = self.gws.put_group(uwcal_group)
if (gwsgroup is not None and
gwsgroup.name == uwcal_group.get_group_name()):
# group id match
uwcal_group.group_ref = gwsgroup
return uwcal_group
return None
|
[
"accountsynchr.models.new_editor_group",
"accountsynchr.models.new_showon_group",
"accountsynchr.dao.gws.Gws",
"logging.getLogger"
] |
[((352, 379), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (369, 379), False, 'import logging\n'), ((446, 451), 'accountsynchr.dao.gws.Gws', 'Gws', ([], {}), '()\n', (449, 451), False, 'from accountsynchr.dao.gws import Gws\n'), ((3026, 3054), 'accountsynchr.models.new_editor_group', 'new_editor_group', (['trumba_cal'], {}), '(trumba_cal)\n', (3042, 3054), False, 'from accountsynchr.models import UwcalGroup, EDITOR, SHOWON, new_editor_group, new_showon_group\n'), ((3626, 3654), 'accountsynchr.models.new_showon_group', 'new_showon_group', (['trumba_cal'], {}), '(trumba_cal)\n', (3642, 3654), False, 'from accountsynchr.models import UwcalGroup, EDITOR, SHOWON, new_editor_group, new_showon_group\n')]
|
import os
import imp
from setuptools import setup, find_packages
dirname = os.path.dirname(__file__)
path_version = os.path.join(dirname, 'vaex_gql_schema/_version.py')
version = imp.load_source('version', path_version)
name = 'vaex-gql-schema'
author = '<NAME>'
author_email= '<EMAIL>'
license = 'MIT'
version = version.__version__
url = 'https://www.github.com/gmcbretas/vaex-graphql'
install_requires_graphql = ['vaex-core>=4.1.0,<5', 'graphene>=3.0b7,<4', 'vaex>=4.1.0,<5', 'pandas>=1.2.4,<2']
setup(
name=name,
version=version,
description='GraphQL support for accessing vaex DataFrame',
url=url,
author=author,
author_email=author_email,
install_requires=install_requires_graphql,
license=license,
packages=find_packages(exclude=['tests*']),
zip_safe=False,
entry_points={
'vaex.dataframe.accessor': ['graphql = vaex_gql_schema:DataFrameAccessorGraphQL'],
},
)
|
[
"imp.load_source",
"os.path.dirname",
"os.path.join",
"setuptools.find_packages"
] |
[((76, 101), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (91, 101), False, 'import os\n'), ((117, 169), 'os.path.join', 'os.path.join', (['dirname', '"""vaex_gql_schema/_version.py"""'], {}), "(dirname, 'vaex_gql_schema/_version.py')\n", (129, 169), False, 'import os\n'), ((180, 220), 'imp.load_source', 'imp.load_source', (['"""version"""', 'path_version'], {}), "('version', path_version)\n", (195, 220), False, 'import imp\n'), ((780, 813), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests*']"}), "(exclude=['tests*'])\n", (793, 813), False, 'from setuptools import setup, find_packages\n')]
|
# -*- coding: utf-8 -*-
"""
Copyright © 2017, <NAME>
Contributed by <NAME> (<EMAIL>)
This file is part of BSD license
<https://opensource.org/licenses/BSD-3-Clause>
"""
import os
from channels.asgi import get_channel_layer
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ci.settings")
channel_layer = get_channel_layer()
|
[
"channels.asgi.get_channel_layer",
"os.environ.setdefault"
] |
[((226, 288), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""ci.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'ci.settings')\n", (247, 288), False, 'import os\n'), ((306, 325), 'channels.asgi.get_channel_layer', 'get_channel_layer', ([], {}), '()\n', (323, 325), False, 'from channels.asgi import get_channel_layer\n')]
|
from fastapi.testclient import TestClient
from main import *
client = TestClient(app)
def test_index():
response = client.get("/")
assert response.status_code == 200
assert response.json() == {"msg": "Hello World"}
def test_health():
response = client.get("/health")
assert response.status_code == 200
assert response.json() == {"status": "ok"}
def test_jokes():
response = client.get("/jokes")
assert response.status_code == 200
assert response.json() == data
def test_random_jokes():
response = client.get("/jokes/random")
contains = response.json() in data["jokes"]
assert contains is True
def test_same_random_jokes():
response1 = client.get("/jokes")
response2 = client.get("/jokes")
assert response1 != response2
|
[
"fastapi.testclient.TestClient"
] |
[((72, 87), 'fastapi.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (82, 87), False, 'from fastapi.testclient import TestClient\n')]
|
# coding: utf-8
'''
Pages.
'''
import re
import canvas as cv
from canvas.plugins import users
@cv.alter_root_page_view
def alter_root_page_view(PageView):
class CustomPageView(PageView):
def setup(self):
self.assets = ('site.js', 'site.css', *self.assets, 'decor.js')
if self.title is None:
self.title = 'canvas | modern web apps'
else:
title = self.title.lower()
if re.match(r'[0-9]{3}\s', title):
title = title[3:]
self.title = ' | '.join((title, 'canvas'))
return CustomPageView
@cv.page('/', title=None, assets=('home.js', 'home.css'))
class Homepage: pass
@cv.page('/login', title='log in', assets=('login.js',))
class LoginPage: pass
@cv.page('/new-plugin', title='register a plugin', assets=('plugins.js',))
class PluginRegisterPage:
@users.require_user
def on_get(self, context):
return super().on_get(context)
@cv.page('/dashboard', title='my dashboard', assets=('dash.js',))
class DashboardPage:
@users.require_user
def on_get(self, context):
return super().on_get(context)
@cv.page('/plugins', title='plugins', assets=('plugins.js', 'plugins.css'))
class PluginPage: pass
|
[
"re.match",
"canvas.page"
] |
[((521, 577), 'canvas.page', 'cv.page', (['"""/"""'], {'title': 'None', 'assets': "('home.js', 'home.css')"}), "('/', title=None, assets=('home.js', 'home.css'))\n", (528, 577), True, 'import canvas as cv\n'), ((601, 656), 'canvas.page', 'cv.page', (['"""/login"""'], {'title': '"""log in"""', 'assets': "('login.js',)"}), "('/login', title='log in', assets=('login.js',))\n", (608, 656), True, 'import canvas as cv\n'), ((681, 754), 'canvas.page', 'cv.page', (['"""/new-plugin"""'], {'title': '"""register a plugin"""', 'assets': "('plugins.js',)"}), "('/new-plugin', title='register a plugin', assets=('plugins.js',))\n", (688, 754), True, 'import canvas as cv\n'), ((866, 930), 'canvas.page', 'cv.page', (['"""/dashboard"""'], {'title': '"""my dashboard"""', 'assets': "('dash.js',)"}), "('/dashboard', title='my dashboard', assets=('dash.js',))\n", (873, 930), True, 'import canvas as cv\n'), ((1037, 1111), 'canvas.page', 'cv.page', (['"""/plugins"""'], {'title': '"""plugins"""', 'assets': "('plugins.js', 'plugins.css')"}), "('/plugins', title='plugins', assets=('plugins.js', 'plugins.css'))\n", (1044, 1111), True, 'import canvas as cv\n'), ((394, 424), 're.match', 're.match', (['"""[0-9]{3}\\\\s"""', 'title'], {}), "('[0-9]{3}\\\\s', title)\n", (402, 424), False, 'import re\n')]
|
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
# Licensed under the Apache License, Version 2.0 https://aws.amazon.com/apache-2-0/
import boto3
import time
import os
from botocore.exceptions import ClientError
from boto3.dynamodb.conditions import Key, Attr
from utils.performance_tracker import EventsCounter, performance_tracker_initializer
from utils.state_table_common import *
from utils import grid_error_logger as errlog
from api.queue_manager import queue_manager
region = os.environ["REGION"]
perf_tracker = performance_tracker_initializer(
os.environ["METRICS_ARE_ENABLED"],
os.environ["METRICS_TTL_CHECKER_LAMBDA_CONNECTION_STRING"],
os.environ["METRICS_GRAFANA_PRIVATE_IP"])
# dynamodb = boto3.resource('dynamodb')
# table = dynamodb.Table(os.environ['TASKS_STATUS_TABLE_NAME'])
from api.state_table_manager import state_table_manager
state_table = state_table_manager(
os.environ['TASKS_STATUS_TABLE_SERVICE'],
os.environ['TASKS_STATUS_TABLE_CONFIG'],
os.environ['TASKS_STATUS_TABLE_NAME'],
os.environ['DYNAMODB_ENDPOINT_URL'])
# sqs_res = boto3.resource('sqs', region_name=region, endpoint_url=os.environ['SQS_PORT'])
# sqs_cli = boto3.client('sqs', endpoint_url=os.environ['SQS_PORT'])
# queue = sqs_res.get_queue_by_name(QueueName=os.environ['TASKS_QUEUE_NAME'])
# dlq = sqs_res.get_queue_by_name(QueueName=os.environ['TASKS_QUEUE_DLQ_NAME'])
queue = queue_manager(
grid_queue_service=os.environ['GRID_QUEUE_SERVICE'],
grid_queue_config=os.environ['GRID_QUEUE_CONFIG'],
endpoint_url=os.environ["SQS_ENDPOINT_URL"],
queue_name=os.environ['TASKS_QUEUE_NAME'],
region=region)
dlq = queue_manager(
grid_queue_service="SQS", # TODO extend parameters to configure this queue.
grid_queue_config=os.environ['GRID_QUEUE_CONFIG'],
endpoint_url=os.environ["SQS_ENDPOINT_URL"],
queue_name=os.environ['TASKS_QUEUE_DLQ_NAME'],
region=region)
MAX_RETRIES = 5
RETRIEVE_EXPIRED_TASKS_LIMIT = 200
# TODO: implement archival after 10 days in S3
def lambda_handler(event, context):
"""Handler called by AWS Lambda runtime
Args:
event(dict): a CloudWatch Event generated every minute
context:
Returns:
"""
stats_obj = {'01_invocation_tstmp': {"label": "None", "tstmp": int(round(time.time() * 1000))}}
event_counter = EventsCounter(
["counter_expired_tasks", "counter_failed_to_acquire",
"counter_failed_tasks", "counter_released_tasks", "counter_inconsistent_state", "counter_tasks_queue_size"])
for expired_tasks in state_table.query_expired_tasks():
event_counter.increment("counter_expired_tasks", len(expired_tasks))
event_counter.increment("counter_tasks_queue_size", queue.get_queue_length())
for item in expired_tasks:
print("Processing expired task: {}".format(item))
task_id = item.get('task_id')
owner_id = item.get('task_owner')
current_heartbeat_timestamp = item.get('heartbeat_expiration_timestamp')
try:
is_acquired = state_table.acquire_task_for_ttl_lambda(
task_id, owner_id, current_heartbeat_timestamp)
if not is_acquired:
# task has been updated at the very last second...
event_counter.increment("counter_failed_to_acquire")
continue
# retreive current number of retries and SQS_handler
retries, sqs_handler_id, task_priority = retreive_retries_and_sqs_handler_and_priority(task_id)
print("Number of retires for task[{}]: {} Priority: {}".format(task_id, retries, task_priority))
print("Last owner for task [{}]: {}".format(task_id, owner_id))
# TODO: MAX_RETRIES should be extracted from task definition... Store in DDB?
if retries == MAX_RETRIES:
print("Failing task {} after {} retries".format(task_id, retries))
event_counter.increment("counter_failed_tasks")
fail_task(task_id, sqs_handler_id, task_priority)
continue
event_counter.increment("counter_released_tasks")
# else
state_table.retry_task(task_id, retries + 1)
try:
# Task can be acquired by an agent from this point
reset_sqs_vto(sqs_handler_id, task_priority)
print("SUCCESS FIX for {}".format(task_id))
except ClientError:
try:
errlog.log('Failed to reset VTO trying to delete: {} '.format(task_id))
delete_message_from_queue(sqs_handler_id)
except ClientError:
errlog.log('Inconsistent task: {} sending do DLQ'.format(task_id))
event_counter.increment("counter_inconsistent_state")
set_task_inconsistent(task_id)
send_to_dlq(item)
except ClientError as e:
errlog.log('Lambda ttl error: {}'.format(e.response['Error']['Message']))
print("Cannot process task {} : {}".format(task_id, e))
print("Sending task {} to DLQ...".format(task_id))
send_to_dlq(item)
except Exception as e:
print("Cannot process task {} : {}".format(task_id, e))
print("Sending task {} to DLQ...".format(task_id))
errlog.log('Lambda ttl error: {}'.format(e))
send_to_dlq(item)
stats_obj['02_completion_tstmp'] = {"label": "ttl_execution_time", "tstmp": int(round(time.time() * 1000))}
perf_tracker.add_metric_sample(
stats_obj,
event_counter=event_counter,
from_event="01_invocation_tstmp",
to_event="02_completion_tstmp"
)
perf_tracker.submit_measurements()
def fail_task(task_id, sqs_handler_id, task_priority):
"""This function set the task_status of task to fail
Args:
task_id(str): the id of the task to update
sqs_handler_id(str): the sqs handler associated to this task
task_priority(int): the priority of the task.
Returns:
Nothing
Raises:
ClientError: if DynamoDB table cannot be updated
"""
try:
delete_message_from_queue(sqs_handler_id, task_priority)
state_table.update_task_status_to_failed(task_id)
except ClientError as e:
errlog.log("Cannot fail task {} : {}".format(task_id, e))
raise e
def set_task_inconsistent(task_id):
"""This function set the task_status of task to inconsistent
Args:
task_id(str): the id of the task to update
Returns:
Nothing
Raises:
ClientError: if DynamoDB table cannot be updated
"""
try:
state_table.update_task_status_to_inconsistent(task_id)
except ClientError as e:
errlog.log("Cannot set task to inconsystent {} : {}".format(task_id, e))
raise e
def delete_message_from_queue(sqs_handler_id, task_priority):
"""This function delete a message from a SQS queue
Args:
sqs_handler_id(str): the sqs handler associated of the message to be deleted
task_priority(int): priority of the task
Returns:
Nothing
Raises:
ClientError: if SQS queue cannot be updated
"""
try:
queue.delete_message(sqs_handler_id, task_priority)
except ClientError as e:
errlog.log("Cannot delete message {} : {}".format(sqs_handler_id, e))
raise e
def retreive_retries_and_sqs_handler_and_priority(task_id):
"""This function retrieve (i) the number of retries,
(ii) the SQS handler associated to an expired task
and (iii) and the priority under which this task was executed.
Args:
task_id(str): the id of the expired task
Returns:
rtype: 3 variables
Raises:
ClientError: if DynamoDB query failed
"""
try:
resp_task = state_table.get_task_by_id(task_id)
# CHeck if 1 and only 1
return resp_task.get('retries'),\
resp_task.get('sqs_handler_id'),\
resp_task.get('task_priority')
except ClientError as e:
errlog.log("Cannot retreive retries and handler for task {} : {}".format(task_id, e))
raise e
def reset_sqs_vto(handler_id, task_priority):
"""
Args:
handler_id:
Returns:
"""
try:
visibility_timeout_sec = 0
queue.change_visibility(handler_id, visibility_timeout_sec, task_priority)
except ClientError as e:
errlog.log("Cannot reset VTO for message {} : {}".format(handler_id, e))
raise e
def send_to_dlq(task):
"""
Args:
task:
Returns:
"""
print("Sending task [{}] to DLQ".format(task))
dlq.send_message(message_bodies=[str(task)])
|
[
"api.queue_manager.queue_manager",
"api.state_table_manager.state_table_manager",
"time.time",
"utils.performance_tracker.performance_tracker_initializer",
"utils.performance_tracker.EventsCounter"
] |
[((587, 765), 'utils.performance_tracker.performance_tracker_initializer', 'performance_tracker_initializer', (["os.environ['METRICS_ARE_ENABLED']", "os.environ['METRICS_TTL_CHECKER_LAMBDA_CONNECTION_STRING']", "os.environ['METRICS_GRAFANA_PRIVATE_IP']"], {}), "(os.environ['METRICS_ARE_ENABLED'], os.\n environ['METRICS_TTL_CHECKER_LAMBDA_CONNECTION_STRING'], os.environ[\n 'METRICS_GRAFANA_PRIVATE_IP'])\n", (618, 765), False, 'from utils.performance_tracker import EventsCounter, performance_tracker_initializer\n'), ((945, 1133), 'api.state_table_manager.state_table_manager', 'state_table_manager', (["os.environ['TASKS_STATUS_TABLE_SERVICE']", "os.environ['TASKS_STATUS_TABLE_CONFIG']", "os.environ['TASKS_STATUS_TABLE_NAME']", "os.environ['DYNAMODB_ENDPOINT_URL']"], {}), "(os.environ['TASKS_STATUS_TABLE_SERVICE'], os.environ[\n 'TASKS_STATUS_TABLE_CONFIG'], os.environ['TASKS_STATUS_TABLE_NAME'], os\n .environ['DYNAMODB_ENDPOINT_URL'])\n", (964, 1133), False, 'from api.state_table_manager import state_table_manager\n'), ((1468, 1701), 'api.queue_manager.queue_manager', 'queue_manager', ([], {'grid_queue_service': "os.environ['GRID_QUEUE_SERVICE']", 'grid_queue_config': "os.environ['GRID_QUEUE_CONFIG']", 'endpoint_url': "os.environ['SQS_ENDPOINT_URL']", 'queue_name': "os.environ['TASKS_QUEUE_NAME']", 'region': 'region'}), "(grid_queue_service=os.environ['GRID_QUEUE_SERVICE'],\n grid_queue_config=os.environ['GRID_QUEUE_CONFIG'], endpoint_url=os.\n environ['SQS_ENDPOINT_URL'], queue_name=os.environ['TASKS_QUEUE_NAME'],\n region=region)\n", (1481, 1701), False, 'from api.queue_manager import queue_manager\n'), ((1717, 1923), 'api.queue_manager.queue_manager', 'queue_manager', ([], {'grid_queue_service': '"""SQS"""', 'grid_queue_config': "os.environ['GRID_QUEUE_CONFIG']", 'endpoint_url': "os.environ['SQS_ENDPOINT_URL']", 'queue_name': "os.environ['TASKS_QUEUE_DLQ_NAME']", 'region': 'region'}), "(grid_queue_service='SQS', grid_queue_config=os.environ[\n 'GRID_QUEUE_CONFIG'], endpoint_url=os.environ['SQS_ENDPOINT_URL'],\n queue_name=os.environ['TASKS_QUEUE_DLQ_NAME'], region=region)\n", (1730, 1923), False, 'from api.queue_manager import queue_manager\n'), ((2400, 2585), 'utils.performance_tracker.EventsCounter', 'EventsCounter', (["['counter_expired_tasks', 'counter_failed_to_acquire',\n 'counter_failed_tasks', 'counter_released_tasks',\n 'counter_inconsistent_state', 'counter_tasks_queue_size']"], {}), "(['counter_expired_tasks', 'counter_failed_to_acquire',\n 'counter_failed_tasks', 'counter_released_tasks',\n 'counter_inconsistent_state', 'counter_tasks_queue_size'])\n", (2413, 2585), False, 'from utils.performance_tracker import EventsCounter, performance_tracker_initializer\n'), ((5790, 5801), 'time.time', 'time.time', ([], {}), '()\n', (5799, 5801), False, 'import time\n'), ((2357, 2368), 'time.time', 'time.time', ([], {}), '()\n', (2366, 2368), False, 'import time\n')]
|
import time
from django.utils.deprecation import MiddlewareMixin
class StatsMiddleware(MiddlewareMixin):
def process_request(selfs, request):
request.start_time = time.time()
def process_response(self, request, response):
total = time.time() - request.start_time
print(f"cycle took {total}")
return response
|
[
"time.time"
] |
[((177, 188), 'time.time', 'time.time', ([], {}), '()\n', (186, 188), False, 'import time\n'), ((257, 268), 'time.time', 'time.time', ([], {}), '()\n', (266, 268), False, 'import time\n')]
|
# modify from clovaai
import random
import re
import lmdb
import six
from PIL import Image
from .base import BaseDataset
from .registry import DATASETS
@DATASETS.register_module
class LmdbDataset(BaseDataset):
def __init__(self, *args, **kwargs):
super(LmdbDataset, self).__init__(*args, **kwargs)
def get_name_list(self):
self.env = lmdb.open(self.root, max_readers=32, readonly=True, lock=False, readahead=False, meminit=False)
with self.env.begin(write=False) as txn:
nSamples = int(txn.get('num-samples'.encode()))
if self.data_filter_off:
self.filtered_index_list = [index + 1 for index in range(nSamples)]
self.samples = nSamples
else:
self.filtered_index_list = []
for index in range(nSamples):
index += 1 # lmdb starts with 1
label_key = 'label-%09d'.encode() % index
label = txn.get(label_key).decode('utf-8')
if self.filter(label):
continue
else:
self.filtered_index_list.append(index)
self.samples = len(self.filtered_index_list)
def __getitem__(self, index):
assert index <= len(self), 'index range error'
index = self.filtered_index_list[index]
with self.env.begin(write=False) as txn:
label_key = 'label-%09d'.encode() % index
label = txn.get(label_key).decode('utf-8')
img_key = 'image-%09d'.encode() % index
imgbuf = txn.get(img_key)
buf = six.BytesIO()
buf.write(imgbuf)
buf.seek(0)
try:
img = Image.open(buf).convert('RGB') # for color image
except IOError:
print(f'Corrupted image for {index}')
# make dummy image and dummy label for corrupted image.
img, label = self.__getitem__(random.choice(range(len(self))))
return img, label
if self.transforms:
try:
img, label = self.transforms(img, label)
except:
return self.__getitem__(random.choice(range(len(self))))
if not self.unknown:
out_of_char = f'[^{self.character}]'
label = re.sub(out_of_char, '', label)
return img, label
|
[
"six.BytesIO",
"re.sub",
"lmdb.open",
"PIL.Image.open"
] |
[((365, 465), 'lmdb.open', 'lmdb.open', (['self.root'], {'max_readers': '(32)', 'readonly': '(True)', 'lock': '(False)', 'readahead': '(False)', 'meminit': '(False)'}), '(self.root, max_readers=32, readonly=True, lock=False, readahead=\n False, meminit=False)\n', (374, 465), False, 'import lmdb\n'), ((1654, 1667), 'six.BytesIO', 'six.BytesIO', ([], {}), '()\n', (1665, 1667), False, 'import six\n'), ((2406, 2436), 're.sub', 're.sub', (['out_of_char', '""""""', 'label'], {}), "(out_of_char, '', label)\n", (2412, 2436), False, 'import re\n'), ((1761, 1776), 'PIL.Image.open', 'Image.open', (['buf'], {}), '(buf)\n', (1771, 1776), False, 'from PIL import Image\n')]
|
from functools import partial
from PyQt5 import QtCore
from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer
from PyQt5.QtGui import QCursor
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget
class Thrower:
"""This function does indeed works on a fixed font size, so this has been excluded for the time being."""
def __init__(self, x, y, window) -> None:
self.x = x - 10
self.y = y
self.window = window
def fade(self, widget):
self.effect = QGraphicsOpacityEffect()
widget.setGraphicsEffect(self.effect)
self.animation = QtCore.QPropertyAnimation(self.effect, b"opacity")
self.animation.setDuration(500)
self.animation.setStartValue(1)
self.animation.setEndValue(0)
return self.animation
def throw(self):
animations = QParallelAnimationGroup()
self.labels = []
self.blurs = QParallelAnimationGroup()
for left, right in zip(
[self.x - 40, self.x + 40, self.x ],
[self.y - 65, self.y - 65, self.y + 35],
):
label = QLabel(self.window)
label.setText(" ")
label.setStyleSheet("background-color: transparent; font-family: SauceCodePro Nerd Font; color: #BF616A")
label.setFixedHeight(200)
animation = QPropertyAnimation(label, b"pos")
animation.setStartValue(QPoint(self.x, self.y))
animation.setEndValue(QPoint(left, right))
animation.setDuration(500)
animations.addAnimation(animation)
blur_animation = self.fade(label)
self.blurs.addAnimation(blur_animation)
label.show()
self.labels.append(label)
q = QPushButton(self.window)
q.clicked.connect(partial(self.start, animations))
q.click()
def start(self, animations: QParallelAnimationGroup):
animations.start()
def callback():
for i in self.labels:
i.hide()
animations.finished.connect(callback)
self.blurs.start()
class QCustomButton:
"""Creates A Push Button With Some Tunings"""
def __init__(self, text, window , setStyle = False , addtext = None) -> None:
self.text = text
self.window = window
self.setStyle = setStyle
def create(self):
self.button = QPushButton(self.window)
if(self.setStyle):
self.button.setStyleSheet('font-size: 80px')
self.button.setFlat(True)
self.button.setText(self.text)
self.button.setCursor(QCursor(Qt.PointingHandCursor))
return self.button
class PopUpMessage:
def new_msg(self , window , msg , duration):
self.window = window
if(type(self.window) != QMainWindow):
self.window = self.window.parent().parent().parent().parent()
try:
self.popup_window.hide()
except:
pass
self.popup_window = QLabel(self.window)
self.popup_window.setFixedWidth(len(msg) * 20)
self.popup_window.setFixedHeight(60)
self.popup_window.setAlignment(Qt.AlignCenter | Qt.AlignCenter)
self.popup_window.setText(msg)
self.popup_window.setStyleSheet(
"""
QLabel{
background-color: #4C566A;
font-size: 20px;
font-family: "Comfortaa"
}
"""
)
self.animation = QPropertyAnimation(self.popup_window, b"pos")
self.animation.setStartValue(QPoint(20, self.window.height() + 100))
self.animation.setEndValue(QPoint(20, self.window.height() - 100))
self.animation.setDuration(duration)
self.popup_window.show()
timer = QTimer(self.window)
timer.timeout.connect(self.remove)
timer.start(2000)
self.start()
return self.popup_window
def start(self):
self.animation.start()
def updateText(self , text):
self.popup_window.setText(text)
def remove(self):
self.an = QPropertyAnimation(self.popup_window, b"pos")
self.an.setStartValue(self.popup_window.pos())
self.an.setEndValue(QPoint(20, 1000))
self.an.setDuration(200)
self.an.start()
self.an.finished.connect(self.popup_window.hide)
class QContinueButton:
def __init__(self , window) -> None:
self.window = window
def start(self , text="Continue"):
self.button = QPushButton(self.window)
self.button.setFlat(True)
self.button.setCursor(QCursor(Qt.PointingHandCursor))
self.layout = QHBoxLayout()
first_text = QLabel(text)
second_text = QLabel(text=" ")
animation = QPropertyAnimation(second_text , b"pos")
animation.setDuration(200)
animation.setStartValue(QPoint(self.button.pos().x() + 110 , self.button.y() + 10))
animation.setEndValue(QPoint(self.button.pos().y() + 130 , self.button.pos().x() + 10))
leave_ani = QPropertyAnimation(second_text , b"pos")
leave_ani.setDuration(200)
leave_ani.setEndValue(QPoint(self.button.pos().x() + 110 , self.button.y() + 10))
leave_ani.setStartValue(QPoint(self.button.pos().y() + 130 , self.button.pos().x() + 10))
self.layout.addWidget(first_text)
self.layout.addWidget(second_text)
self.button.setLayout(self.layout)
onhover = lambda x : animation.start()
leave = lambda x: leave_ani.start()
self.button.enterEvent = onhover
self.button.leaveEvent = leave
return self.button
class Animation:
def movingAnimation(self , widget , endValue , duration):
animation = QPropertyAnimation(widget , b"pos")
animation.setStartValue(widget.pos())
animation.setEndValue(endValue)
animation.setDuration(duration)
return animation
def fadingAnimation(self , widget: QWidget , duration, reverse=False , startValue = 0, endValue = 0):
# opacity = widget.graphicsEffect()
# # if(opacity == None):
opacity = QGraphicsOpacityEffect()
widget.setGraphicsEffect(opacity)
animation = QPropertyAnimation(opacity , b"opacity")
if(not reverse):
animation.setStartValue(1)
animation.setEndValue(endValue)
else:
animation.setStartValue(startValue)
animation.setEndValue(1)
animation.setDuration(duration)
return animation
class QLayoutMaker:
def __init__(self , icons: list[list[str]] , functions: list) -> None:
self.icons = icons
self.functions = functions
def make(self) -> QHBoxLayout:
layout = QHBoxLayout()
i = 0
try:
for icon, icon_color, icon_font_size, icon_family in self.icons:
item = QCustomButton(icon, None).create()
item.setStyleSheet(
"color: {}; font-size: {}px; font-family: {}".format(
icon_color, icon_font_size, icon_family
)
)
item.clicked.connect(self.functions[i])
i += 1
layout.addWidget(item)
except:
pass
return layout
class QSliderMenu(QLabel):
def __init__(self , parent) -> None:
super().__init__(parent)
self.head = parent
self.setProperty("class" , "need")
self.setGeometry(QRect(2000, 0, 400, 1000))
self.show()
layout = QVBoxLayout(self)
self.scrollArea = QScrollArea(self)
layout.addWidget(self.scrollArea)
self.buttons = QWidget(self)
self.buttons.setGeometry(QRect(100, 0, 400, 50))
self.scrollArea.setWidget(self.buttons)
self.second_layout = QVBoxLayout(self.buttons)
self.buttons.setLayout(self.second_layout)
self.setStyleSheet("""QLabel[class="need"] { border: 3px solid #3B4252 }""")
def addMenu(self , name , widget , addAsLayout = False):
childLayout = QVBoxLayout()
if(name != ""):
nameLabel = QLabel()
nameLabel.setText(name)
nameLabel.setFixedHeight(80)
nameLabel.setStyleSheet("color: white; font-size: 20px; font-family: Comfortaa")
childLayout.addWidget(nameLabel)
if(addAsLayout):
childLayout.addLayout(widget)
else:
childLayout.addWidget(widget)
widget.setGeometry(self.geometry())
self.second_layout.addLayout(childLayout)
self.buttons.setFixedHeight(self.buttons.height() + 135)
|
[
"PyQt5.QtCore.QTimer",
"PyQt5.QtWidgets.QLabel",
"functools.partial",
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtCore.QRect",
"PyQt5.QtCore.QParallelAnimationGroup",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtWidgets.QScrollArea",
"PyQt5.QtGui.QCursor",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtCore.QPoint",
"PyQt5.QtCore.QPropertyAnimation",
"PyQt5.QtWidgets.QGraphicsOpacityEffect"
] |
[((636, 660), 'PyQt5.QtWidgets.QGraphicsOpacityEffect', 'QGraphicsOpacityEffect', ([], {}), '()\n', (658, 660), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((734, 784), 'PyQt5.QtCore.QPropertyAnimation', 'QtCore.QPropertyAnimation', (['self.effect', "b'opacity'"], {}), "(self.effect, b'opacity')\n", (759, 784), False, 'from PyQt5 import QtCore\n'), ((981, 1006), 'PyQt5.QtCore.QParallelAnimationGroup', 'QParallelAnimationGroup', ([], {}), '()\n', (1004, 1006), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((1055, 1080), 'PyQt5.QtCore.QParallelAnimationGroup', 'QParallelAnimationGroup', ([], {}), '()\n', (1078, 1080), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((1933, 1957), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['self.window'], {}), '(self.window)\n', (1944, 1957), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((2616, 2640), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['self.window'], {}), '(self.window)\n', (2627, 2640), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((3305, 3324), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['self.window'], {}), '(self.window)\n', (3311, 3324), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((3897, 3942), 'PyQt5.QtCore.QPropertyAnimation', 'QPropertyAnimation', (['self.popup_window', "b'pos'"], {}), "(self.popup_window, b'pos')\n", (3915, 3942), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((4215, 4234), 'PyQt5.QtCore.QTimer', 'QTimer', (['self.window'], {}), '(self.window)\n', (4221, 4234), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((4595, 4640), 'PyQt5.QtCore.QPropertyAnimation', 'QPropertyAnimation', (['self.popup_window', "b'pos'"], {}), "(self.popup_window, b'pos')\n", (4613, 4640), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((5049, 5073), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['self.window'], {}), '(self.window)\n', (5060, 5073), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((5219, 5232), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (5230, 5232), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((5263, 5275), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['text'], {}), '(text)\n', (5269, 5275), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((5307, 5330), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {'text': '""" \uf061 """'}), "(text=' \\uf061 ')\n", (5313, 5330), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((5355, 5394), 'PyQt5.QtCore.QPropertyAnimation', 'QPropertyAnimation', (['second_text', "b'pos'"], {}), "(second_text, b'pos')\n", (5373, 5394), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((5667, 5706), 'PyQt5.QtCore.QPropertyAnimation', 'QPropertyAnimation', (['second_text', "b'pos'"], {}), "(second_text, b'pos')\n", (5685, 5706), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((6456, 6490), 'PyQt5.QtCore.QPropertyAnimation', 'QPropertyAnimation', (['widget', "b'pos'"], {}), "(widget, b'pos')\n", (6474, 6490), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((6902, 6926), 'PyQt5.QtWidgets.QGraphicsOpacityEffect', 'QGraphicsOpacityEffect', ([], {}), '()\n', (6924, 6926), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((7016, 7055), 'PyQt5.QtCore.QPropertyAnimation', 'QPropertyAnimation', (['opacity', "b'opacity'"], {}), "(opacity, b'opacity')\n", (7034, 7055), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((7597, 7610), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (7608, 7610), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((8497, 8514), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', (['self'], {}), '(self)\n', (8508, 8514), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((8542, 8559), 'PyQt5.QtWidgets.QScrollArea', 'QScrollArea', (['self'], {}), '(self)\n', (8553, 8559), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((8627, 8640), 'PyQt5.QtWidgets.QWidget', 'QWidget', (['self'], {}), '(self)\n', (8634, 8640), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((8778, 8803), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', (['self.buttons'], {}), '(self.buttons)\n', (8789, 8803), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((9043, 9056), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (9054, 9056), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((1255, 1274), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['self.window'], {}), '(self.window)\n', (1261, 1274), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((1516, 1549), 'PyQt5.QtCore.QPropertyAnimation', 'QPropertyAnimation', (['label', "b'pos'"], {}), "(label, b'pos')\n", (1534, 1549), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((1985, 2016), 'functools.partial', 'partial', (['self.start', 'animations'], {}), '(self.start, animations)\n', (1992, 2016), False, 'from functools import partial\n'), ((2848, 2878), 'PyQt5.QtGui.QCursor', 'QCursor', (['Qt.PointingHandCursor'], {}), '(Qt.PointingHandCursor)\n', (2855, 2878), False, 'from PyQt5.QtGui import QCursor\n'), ((4726, 4742), 'PyQt5.QtCore.QPoint', 'QPoint', (['(20)', '(1000)'], {}), '(20, 1000)\n', (4732, 4742), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((5156, 5186), 'PyQt5.QtGui.QCursor', 'QCursor', (['Qt.PointingHandCursor'], {}), '(Qt.PointingHandCursor)\n', (5163, 5186), False, 'from PyQt5.QtGui import QCursor\n'), ((8431, 8456), 'PyQt5.QtCore.QRect', 'QRect', (['(2000)', '(0)', '(400)', '(1000)'], {}), '(2000, 0, 400, 1000)\n', (8436, 8456), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((8675, 8697), 'PyQt5.QtCore.QRect', 'QRect', (['(100)', '(0)', '(400)', '(50)'], {}), '(100, 0, 400, 50)\n', (8680, 8697), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((9114, 9122), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (9120, 9122), False, 'from PyQt5.QtWidgets import QGraphicsOpacityEffect, QHBoxLayout, QLabel, QMainWindow, QPushButton, QScrollArea, QVBoxLayout, QWidget\n'), ((1587, 1609), 'PyQt5.QtCore.QPoint', 'QPoint', (['self.x', 'self.y'], {}), '(self.x, self.y)\n', (1593, 1609), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n'), ((1646, 1665), 'PyQt5.QtCore.QPoint', 'QPoint', (['left', 'right'], {}), '(left, right)\n', (1652, 1665), False, 'from PyQt5.QtCore import QParallelAnimationGroup, QPoint, QPropertyAnimation, QRect, QTimer\n')]
|
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from .models import Profile
class UserRegisterForm(UserCreationForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email', 'first_name']
class UserUpdateForm(forms.ModelForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email', 'first_name']
class ProfileUpdateForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['image']
labels = {'image': 'Image'}
widgets = {'image': forms.FileInput()}
|
[
"django.forms.FileInput",
"django.forms.EmailField"
] |
[((208, 226), 'django.forms.EmailField', 'forms.EmailField', ([], {}), '()\n', (224, 226), False, 'from django import forms\n'), ((371, 389), 'django.forms.EmailField', 'forms.EmailField', ([], {}), '()\n', (387, 389), False, 'from django import forms\n'), ((657, 674), 'django.forms.FileInput', 'forms.FileInput', ([], {}), '()\n', (672, 674), False, 'from django import forms\n')]
|
from django.db import models
from django.core import validators
# Create your models here.
class Players(models.Model):
POSITION_CHOICES = (
('', '選択'),
(1, '投'),
(2, '捕'),
(3, '一'),
(4, '二'),
(5, '三'),
(6, '遊'),
(7, '外'),
)
DOMINANT_HAND_CHOICES = (
('', '選択'),
(1, '右投右打'),
(2, '右投左打'),
(3, '右投両打'),
(4, '左投左打'),
)
NPB_TEAM_CHOICES = (
('', '選択'),
(1, '西武'),
(2, 'ソフトバンク'),
(3, '楽天'),
(4, 'ロッテ'),
(5, '日本ハム'),
(6, 'オリックス'),
(7, '巨人'),
(8, 'DeNA'),
(9, '阪神'),
(10, '広島'),
(11, '中日'),
(12, 'ヤクルト'),
)
name = models.CharField(
verbose_name="選手名",
max_length=10,
)
age = models.PositiveSmallIntegerField(
verbose_name="年齢",
validators=[validators.MinValueValidator(26),validators.MaxValueValidator(60)],
)
position = models.IntegerField(
verbose_name="メインポジション",
choices=POSITION_CHOICES,
default=0,
)
dominant_hand = models.IntegerField(
verbose_name="利き手",
choices=DOMINANT_HAND_CHOICES,
default=0,
)
department = models.IntegerField(
verbose_name="現所属球団",
choices=NPB_TEAM_CHOICES,
default=0,
)
created_at = models.DateTimeField(
verbose_name="登録日",
auto_now_add=True,
)
def __str__(self):
return f'{self.name} : {self.POSITION_CHOICES[self.position][1]} : {self.NPB_TEAM_CHOICES[self.department][1]}'
class Meta:
verbose_name = "選手情報"
verbose_name_plural = "選手情報"
ordering = ['department', 'position']
class RequestedConditions(models.Model):
POSITION_CHOICES = (
(0, 'なし'),
(1, '投手'),
(2, '捕手'),
(3, '一塁手'),
(4, '二塁手'),
(5, '三塁手'),
(6, '遊撃手'),
(7, '外野手'),
)
DOMINANT_HAND_CHOICES = (
(0, 'なし'),
(1, '右投右打'),
(2, '右投左打'),
(3, '右投両打'),
(4, '左投左打'),
)
age = models.PositiveSmallIntegerField(
verbose_name="年齢",
default=0,
)
position = models.IntegerField(
verbose_name="メインポジション",
choices=POSITION_CHOICES,
default=0,
)
dominant_hand = models.IntegerField(
verbose_name="利き手",
choices=DOMINANT_HAND_CHOICES,
default=0,
)
created_at = models.DateTimeField(
verbose_name="登録日",
auto_now_add=True,
)
def __str__(self):
return f'{self.age} : {self.POSITION_CHOICES[self.position][1]} : {self.DOMINANT_HAND_CHOICES[self.dominant_hand][1]}'
class Meta:
verbose_name = "要望"
verbose_name_plural = "要望"
def set_players_condition():
condition = RequestedConditions.objects.latest('pk')
condition_dict = {}
if condition.age > 0:
condition_dict["age__lt"] = condition.age
if condition.position > 0:
condition_dict["position"] = condition.position
if condition.dominant_hand > 0:
condition_dict["dominant_hand"] = condition.dominant_hand
return condition_dict
class FaExpects(models.Model):
PRIORITY_CHOICES = (
('', '選択'),
(1, '第一希望'),
(2, '第二希望'),
(3, '第三希望'),
(4, '第四希望以降'),
)
NPB_TEAM_CHOICES = (
('', '選択'),
(1, '西武'),
(2, 'ソフトバンク'),
(3, '楽天'),
(4, 'ロッテ'),
(5, '日本ハム'),
(6, 'オリックス'),
(7, '巨人'),
(8, 'DeNA'),
(9, '阪神'),
(10, '広島'),
(11, '中日'),
(12, 'ヤクルト'),
)
team = models.IntegerField(
verbose_name="球団",
choices=NPB_TEAM_CHOICES,
default=0,
)
player_id = models.ForeignKey(
Players,
on_delete=models.CASCADE,
verbose_name="選手",
limit_choices_to=set_players_condition,
)
priority = models.IntegerField(
verbose_name="優先度",
choices=PRIORITY_CHOICES,
default=0,
)
created_at = models.DateTimeField(
verbose_name="登録日",
auto_now_add=True,
)
def __str__(self):
return f'{self.player_id} : {self.PRIORITY_CHOICES[self.priority][1]}'
class Meta:
verbose_name = "FA予想"
verbose_name_plural = "FA予想"
ordering = ['priority']
|
[
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.core.validators.MinValueValidator",
"django.db.models.PositiveSmallIntegerField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField",
"django.core.validators.MaxValueValidator"
] |
[((790, 841), 'django.db.models.CharField', 'models.CharField', ([], {'verbose_name': '"""選手名"""', 'max_length': '(10)'}), "(verbose_name='選手名', max_length=10)\n", (806, 841), False, 'from django.db import models\n'), ((1057, 1142), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'verbose_name': '"""メインポジション"""', 'choices': 'POSITION_CHOICES', 'default': '(0)'}), "(verbose_name='メインポジション', choices=POSITION_CHOICES,\n default=0)\n", (1076, 1142), False, 'from django.db import models\n'), ((1197, 1282), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'verbose_name': '"""利き手"""', 'choices': 'DOMINANT_HAND_CHOICES', 'default': '(0)'}), "(verbose_name='利き手', choices=DOMINANT_HAND_CHOICES,\n default=0)\n", (1216, 1282), False, 'from django.db import models\n'), ((1334, 1412), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'verbose_name': '"""現所属球団"""', 'choices': 'NPB_TEAM_CHOICES', 'default': '(0)'}), "(verbose_name='現所属球団', choices=NPB_TEAM_CHOICES, default=0)\n", (1353, 1412), False, 'from django.db import models\n'), ((1468, 1527), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""登録日"""', 'auto_now_add': '(True)'}), "(verbose_name='登録日', auto_now_add=True)\n", (1488, 1527), False, 'from django.db import models\n'), ((2241, 2303), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'verbose_name': '"""年齢"""', 'default': '(0)'}), "(verbose_name='年齢', default=0)\n", (2273, 2303), False, 'from django.db import models\n'), ((2348, 2433), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'verbose_name': '"""メインポジション"""', 'choices': 'POSITION_CHOICES', 'default': '(0)'}), "(verbose_name='メインポジション', choices=POSITION_CHOICES,\n default=0)\n", (2367, 2433), False, 'from django.db import models\n'), ((2488, 2573), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'verbose_name': '"""利き手"""', 'choices': 'DOMINANT_HAND_CHOICES', 'default': '(0)'}), "(verbose_name='利き手', choices=DOMINANT_HAND_CHOICES,\n default=0)\n", (2507, 2573), False, 'from django.db import models\n'), ((2625, 2684), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""登録日"""', 'auto_now_add': '(True)'}), "(verbose_name='登録日', auto_now_add=True)\n", (2645, 2684), False, 'from django.db import models\n'), ((3886, 3961), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'verbose_name': '"""球団"""', 'choices': 'NPB_TEAM_CHOICES', 'default': '(0)'}), "(verbose_name='球団', choices=NPB_TEAM_CHOICES, default=0)\n", (3905, 3961), False, 'from django.db import models\n'), ((4016, 4131), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Players'], {'on_delete': 'models.CASCADE', 'verbose_name': '"""選手"""', 'limit_choices_to': 'set_players_condition'}), "(Players, on_delete=models.CASCADE, verbose_name='選手',\n limit_choices_to=set_players_condition)\n", (4033, 4131), False, 'from django.db import models\n'), ((4190, 4266), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'verbose_name': '"""優先度"""', 'choices': 'PRIORITY_CHOICES', 'default': '(0)'}), "(verbose_name='優先度', choices=PRIORITY_CHOICES, default=0)\n", (4209, 4266), False, 'from django.db import models\n'), ((4322, 4381), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""登録日"""', 'auto_now_add': '(True)'}), "(verbose_name='登録日', auto_now_add=True)\n", (4342, 4381), False, 'from django.db import models\n'), ((964, 996), 'django.core.validators.MinValueValidator', 'validators.MinValueValidator', (['(26)'], {}), '(26)\n', (992, 996), False, 'from django.core import validators\n'), ((997, 1029), 'django.core.validators.MaxValueValidator', 'validators.MaxValueValidator', (['(60)'], {}), '(60)\n', (1025, 1029), False, 'from django.core import validators\n')]
|
from unittest.mock import Mock
from uuid import uuid4
import pytest
from returns.result import Failure, Result, Success
from kamui.core.entity.source import SourceType
from kamui.core.entity.stream import Stream
from kamui.core.use_case.failure import FailureDetails, BusinessFailureDetails
from kamui.core.use_case.stream.get_streams import FindStreams, GetStreamsUseCase
@pytest.fixture(scope="function")
def find_streams() -> Mock:
return Mock(spec=FindStreams)
@pytest.fixture(scope="function")
def get_streams_use_case(find_streams: Mock) -> GetStreamsUseCase:
return GetStreamsUseCase(find_streams)
def test_should_return_streams_correctly(
get_streams_use_case: GetStreamsUseCase, find_streams: Mock
) -> None:
streams_list = [
Stream(
stream_id=uuid4(),
name="STREAM_ONE",
source_type=SourceType.TOPIC,
source_name="some_topic",
),
Stream(
stream_id=uuid4(),
name="STREAM_TWO",
source_type=SourceType.STREAM,
source_name="STREAM_ONE",
),
]
find_streams.return_value = Success(streams_list)
actual = get_streams_use_case()
find_streams.assert_called_once()
assert isinstance(actual, Result.success_type)
assert isinstance(actual.unwrap(), list)
assert streams_list == actual.unwrap()
def test_should_return_failure_when_find_streams_fails(
get_streams_use_case: GetStreamsUseCase, find_streams: Mock
) -> None:
failure = FailureDetails(reason="TEST_FIND_STREAMS_FAIL")
find_streams.return_value = Failure(failure)
actual = get_streams_use_case()
find_streams.assert_called_once()
assert isinstance(actual, Result.failure_type)
assert isinstance(actual.failure(), BusinessFailureDetails)
assert "NON_BUSINESS_RULE_CAUSE" == actual.failure().reason
assert failure == actual.failure().failure_due
|
[
"uuid.uuid4",
"returns.result.Success",
"kamui.core.use_case.stream.get_streams.GetStreamsUseCase",
"unittest.mock.Mock",
"pytest.fixture",
"returns.result.Failure",
"kamui.core.use_case.failure.FailureDetails"
] |
[((378, 410), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (392, 410), False, 'import pytest\n'), ((476, 508), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (490, 508), False, 'import pytest\n'), ((450, 472), 'unittest.mock.Mock', 'Mock', ([], {'spec': 'FindStreams'}), '(spec=FindStreams)\n', (454, 472), False, 'from unittest.mock import Mock\n'), ((587, 618), 'kamui.core.use_case.stream.get_streams.GetStreamsUseCase', 'GetStreamsUseCase', (['find_streams'], {}), '(find_streams)\n', (604, 618), False, 'from kamui.core.use_case.stream.get_streams import FindStreams, GetStreamsUseCase\n'), ((1136, 1157), 'returns.result.Success', 'Success', (['streams_list'], {}), '(streams_list)\n', (1143, 1157), False, 'from returns.result import Failure, Result, Success\n'), ((1520, 1567), 'kamui.core.use_case.failure.FailureDetails', 'FailureDetails', ([], {'reason': '"""TEST_FIND_STREAMS_FAIL"""'}), "(reason='TEST_FIND_STREAMS_FAIL')\n", (1534, 1567), False, 'from kamui.core.use_case.failure import FailureDetails, BusinessFailureDetails\n'), ((1600, 1616), 'returns.result.Failure', 'Failure', (['failure'], {}), '(failure)\n', (1607, 1616), False, 'from returns.result import Failure, Result, Success\n'), ((797, 804), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (802, 804), False, 'from uuid import uuid4\n'), ((966, 973), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (971, 973), False, 'from uuid import uuid4\n')]
|
from apex import amp
from argparse import ArgumentParser
from collections import OrderedDict
from datetime import datetime
import scipy.sparse as sp_sparse
import tables
from itertools import chain
from model import loss_function
from model import VAE
import numpy as np
import os
import pandas as pd
from sklearn.metrics import accuracy_score
# from train_multitask_ccle import read_tsv
import torch
opt_level = 'O1'
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
def read_tsv(nparpath, genes, outdir, gmtmat, normalize_vals=True):
h5outpath = os.path.join(
outdir, "cellByGeneMatrix.npz")
if "gct" in nparpath:
rnadf = pd.read_csv(
nparpath, sep="\t", index_col=0,
compression="gzip", skiprows=2)
rnadf.drop_duplicates(subset=["Description"], inplace=True)
rnadf = rnadf[rnadf["Description"].isin(genes)]
npar = np.array(rnadf.iloc[:, 1:])
ar_genes = np.array(rnadf["Description"])
barcodes = np.array(rnadf.columns[1:])
else:
rnadf = pd.read_csv(
nparpath, sep="\t", index_col=0,
compression="gzip")
npar = np.array(rnadf)
ar_genes = rnadf.index
barcodes = np.array(rnadf.columns)
# Divide by max
# arsum = np.matrix.sum(npar, axis=0)
if normalize_vals:
arsum = np.apply_along_axis(np.sum, 0, npar)
npar = (npar * 1000) / arsum
_, idx_g1, idx_g2 = np.intersect1d(genes, ar_genes, return_indices=True)
npar = npar[idx_g2, :]
gmtmat = gmtmat[idx_g1, :]
out_genes = genes[idx_g1]
npar = np.transpose(npar)
np.savez_compressed(h5outpath, arr=npar, barcodes=barcodes,
genes=ar_genes)
return npar, barcodes, gmtmat, out_genes
def make_plot_umap(mudf, metadf, outdir, numlvs=10):
metadf.index = metadf["Barcode"]
import umap
import seaborn as sns
mumat = np.array(mudf.iloc[:, :numlvs])
for n_neighbors in [10, 100]:
for min_dist in [0.45]:
adname = "UMAP_dist-{}_nNeigh-{}".format(
min_dist, n_neighbors)
print(adname)
reducer = umap.UMAP(
n_neighbors=n_neighbors,
min_dist=min_dist)
embedding = reducer.fit_transform(mumat)
umap_output = pd.DataFrame(embedding)
umap_output.columns = ["UMAP1", "UMAP2"]
umap_output["CellType"] = list(metadf.loc[mudf.index, "CellType"])
umap_output.index = mudf.index
umap_output.to_csv(
os.path.join(outdir, adname + ".tsv.gz"),
sep="\t", compression="gzip")
sns_plot = sns.relplot(
x="UMAP1", y="UMAP2", hue="CellType", data=umap_output,
height=6, aspect=1.5)
sns_plot.savefig(
os.path.join(outdir, adname + ".pdf"))
sns_plot.savefig(
os.path.join(outdir, adname + ".png"))
def make_args():
metapaths = [
"/scratch/hdd001/home/mkarimza/" +
"ciberAtac/10x/raw/scRNA-seq_10XPBMC" +
"_metadataWithCellType.tsv",
"/scratch/ssd001/home/mkarimza/" +
"data/ciberatac/models/vae202012/" +
"SW480Files/metadata_for_vae_visualization.tsv"]
nparpaths = [
"/scratch/hdd001/home/mkarimza/" +
"ciberAtac/10x/raw/pbmc_unsorted_10k" +
"_filtered_feature_bc_matrix.h5",
"/scratch/hdd001/home/mkarimza/" +
"johnny/A06/10X/outs/" +
"filtered_feature_bc_matrix.h5"]
genepath = "/scratch/ssd001/home/mkarimza/" +\
"data/ciberatac/models/vae202101/" +\
"scviVersusCustomized/customizedScvi" +\
"FullTrainScaled1000/genes.txt"
gmtpath = "../c3.tft.v7.2.symbols.gmt"
genepath = "/scratch/ssd001/home/mkarimza/" +\
"data/ciberatac/models/vae202012/" +\
"commonGenes/Genes_passing_40p.txt"
outdir = "/scratch/ssd001/home/mkarimza/" +\
"data/ciberatac/models/vae202101/" +\
"customScviAppliedOnPbmcAndSw480"
numlvs = 10
os.makedirs(outdir, exist_ok=True)
existingmodelpath = "/scratch/ssd001/home/mkarimza/" +\
"data/ciberatac/models/vae202101/" +\
"scviVersusCustomized/customized" +\
"ScviFullTrainScaled1000/VAE_10LVS.pt"
use_connections = True
loss_scalers = [1, 1, 1]
predict_celltypes = True
num_celltypes = 11
argslist = [gmtpath, nparpaths, outdir,
numlvs, genepath, metapaths,
existingmodelpath,
use_connections,
loss_scalers,
predict_celltypes,
num_celltypes]
return argslist
def get_matrix_from_h5(filename):
with tables.open_file(filename, 'r') as f:
mat_group = f.get_node(f.root, 'matrix')
barcodes = f.get_node(mat_group, 'barcodes').read()
data = getattr(mat_group, 'data').read()
indices = getattr(mat_group, 'indices').read()
indptr = getattr(mat_group, 'indptr').read()
shape = getattr(mat_group, 'shape').read()
matrix = sp_sparse.csc_matrix((data, indices, indptr), shape=shape)
feature_ref = {}
feature_group = f.get_node(mat_group, 'features')
feature_ids = getattr(feature_group, 'id').read()
feature_names = getattr(feature_group, 'name').read()
feature_types = getattr(feature_group, 'feature_type').read()
feature_ref['id'] = feature_ids
feature_ref['name'] = feature_names
feature_ref['feature_type'] = feature_types
tag_keys = getattr(feature_group, '_all_tag_keys').read()
for key in tag_keys:
feature_ref[key] = getattr(feature_group, key.decode()).read()
return feature_ref, barcodes, matrix
def read_npz(nparpath, genes, outdir, gmtmat):
h5outpath = os.path.join(
outdir, "cellByGeneMatrix.npz")
npobj = np.load(nparpath, allow_pickle=True)
npar = npobj["arr"]
if npar.shape[0] > npar.shape[1]:
npar = np.transpose(npar)
ar_genes = npobj["rows"]
barcodes = npobj["cols"]
_, idx_g1, idx_g2 = np.intersect1d(genes, ar_genes, return_indices=True)
# arsum = np.matrix.sum(npar, axis=0)
# arsum = np.apply_along_axis(np.sum, 0, npar)
npar = npar[:, idx_g2]
gmtmat = gmtmat[idx_g1, :]
out_genes = genes[idx_g1]
np.savez_compressed(h5outpath, arr=npar, barcodes=barcodes)
return npar, barcodes, gmtmat, out_genes
def read_h5(h5path, genes, outdir, gmtmat):
h5outpath = os.path.join(
outdir, "cellByGeneMatrix.npz")
# Must be in form of filtered feature matrix
feature_ref, barcodes, matrix = get_matrix_from_h5(h5path)
# Limit the array to gene expression
idx_gexp = np.where(
np.array(feature_ref["feature_type"] == b'Gene Expression'))[0]
npar = matrix.toarray()
npar = np.transpose(npar[idx_gexp, :])
# Normalize npar by dividing by sum of the reads then multiplying by 1000)
# arsum = np.apply_along_axis(np.sum, 0, npar)
# arsum2d = np.zeros((1, npar.shape[1]))
# arsum2d[0, :] = arsum
# npar_scaled = (npar / arsum) * 1000
# tmat = np.transpose(npar_scaled)
expar = np.zeros((len(barcodes), len(genes)), dtype=float)
gene_names = np.array(
feature_ref["name"], dtype="|U64")
_, idx_g1, idx_g2 = np.intersect1d(genes, gene_names, return_indices=True)
expar[:, idx_g1] = npar[:, idx_g2]
np.savez_compressed(h5outpath, arr=npar, barcodes=barcodes, genes=genes)
# return npar, barcodes
return expar, barcodes, gmtmat, genes
def get_genes_from_txt(genepath):
select_genes = np.loadtxt(genepath, dtype="|U64")
return select_genes
def make_gmtmat(gmtpath, outdir, genepath):
gmtoutpath = os.path.join(
outdir, "gmt_conv_matrix.npz")
if os.path.exists(gmtoutpath):
npobj = np.load(gmtoutpath)
npar = npobj["arr"]
all_tfs = npobj["tfs"]
all_genes = npobj["genes"]
return npar, all_tfs, all_genes
gmtdict = {}
with open(gmtpath, "r") as gmtlink:
for gmtline in gmtlink:
gmtlist = gmtline.rstrip().split("\t")
gmtdict[gmtlist[0]] = gmtlist[2:]
all_tfs = np.array(list(gmtdict.keys()))
all_tfs = np.sort(all_tfs)
all_genes = list(gmtdict.values())
all_genes = list(chain.from_iterable(all_genes))
all_genes = np.unique(all_genes)
if genepath != "NA" and os.path.exists(genepath):
select_genes = get_genes_from_txt(genepath)
print("Limiting to {} genes found in {}".format(
len(select_genes), genepath))
all_genes = np.intersect1d(all_genes, select_genes)
print("Found {} TFs and {} genes in {}".format(
len(all_tfs), len(all_genes),
gmtpath))
npar = np.zeros((len(all_genes), len(all_tfs)), dtype=bool)
for tf in all_tfs:
idx_tf = np.where(all_tfs == tf)[0]
genes = gmtdict[tf]
# add index and +1 for the array
for gene in genes:
idx_gene = np.where(all_genes == gene)[0]
npar[idx_gene, idx_tf] = True
if idx_tf % 100 == 0:
print("{}/{} TFs added".format(idx_tf[0], len(all_tfs)))
np.savez_compressed(
gmtoutpath, arr=npar, tfs=all_tfs, genes=all_genes)
return npar, all_tfs, all_genes
def get_n_params(model):
pp = 0
for p in list(model.parameters()):
nn = 1
for s in list(p.size()):
nn = nn * s
pp += nn
return pp
def get_paths(outdir, numlvs):
try:
job_id = os.environ["SLURM_JOB_ID"]
except Exception:
job_id = "NA"
logdir = os.path.join(outdir, "logs")
os.makedirs(logdir, exist_ok=True)
modelpath = os.path.join(
outdir, "VAE_{}LVS.pt".format(numlvs))
chkdir = os.path.join(
"/checkpoint/mkarimza",
job_id)
if not os.path.exists(chkdir):
chkdir = os.path.join(
logdir, "checkpoint")
os.makedirs(chkdir, exist_ok=True)
chkpath = os.path.join(
chkdir, "VAE_{}LVS.pt".format(numlvs))
return logdir, modelpath, chkpath
def train_model(vae, optimizer, MINIBATCH, MAXEPOCH, expar, logdir,
modelpath, chkpath, one_hot_ct_encoding,
loss_scalers, predict_celltypes,
celltypes=[], batch_idxs=None):
criterion_class = torch.nn.CrossEntropyLoss()
time_str = str(datetime.now())
time_str = time_str.replace(" ", "_")
time_str = time_str.replace(":", "0")
logpath = os.path.join(
logdir,
"training.log.{}.{}".format(
os.environ["SLURM_JOB_ID"], time_str))
accpath = logpath + "_accuracy.txt"
loglink = open(logpath, "w")
# header = ["Epoch", "Training.Loss", "MiniBatch.ID", "Time.Stamp"]
header = ["Epoch", "Reconstruction.Loss", "KLD",
"CE.Loss", "Accuracy", "MiniBatch.ID",
"Time.Stamp"]
loglink.write("\t".join(header) + "\n")
loglink.close()
if predict_celltypes:
acclink = open(accpath, "w")
header_acc = ["Epoch"]
for celltype in celltypes:
header_acc.append(celltype + ".acc")
acclink.write("\t".join(header_acc) + "\n")
acclink.close()
TOTBATCHIDX = int(expar.shape[0] / MINIBATCH)
# loss_scalers = np.array([300, 1, 1])
sampled_idxs = np.random.choice(
np.arange(expar.shape[0]), expar.shape[0], replace=False)
for epoch in range(MAXEPOCH):
running_loss_reconst = 0
running_kld = 0
running_ce = 0
running_loss = 0
accval = 0
celltype_resps = np.zeros(
(expar.shape[0]))
celltype_preds = np.zeros(
(expar.shape[0]))
for idxbatch in range(TOTBATCHIDX):
idxbatch_st = idxbatch * MINIBATCH
idxbatch_end = (idxbatch + 1) * MINIBATCH
if idxbatch_end > expar.shape[0]:
idxbatch_end = expar.shape[0]
cur_sidxs = sampled_idxs[idxbatch_st:idxbatch_end]
train1 = torch.from_numpy(
expar[cur_sidxs, :]).to(device).float()
if batch_idxs is not None:
batch_idxs_tensor = torch.from_numpy(
batch_idxs[cur_sidxs]).long().to(device).reshape(
-1, 1)
local_l_mean = np.mean(
np.apply_along_axis(
np.sum, 1, expar[cur_sidxs, :]))
local_l_var = np.var(
np.apply_along_axis(
np.sum, 1, expar[cur_sidxs, :]))
if batch_idxs is None:
outdict = vae(train1)
else:
outdict = vae(train1, batch_idxs_tensor)
ct_pred = outdict["ctpred"]
loss_1, loss_2 = loss_function(
outdict['qz_m'], outdict['qz_v'], train1,
outdict['px_rate'], outdict['px_r'],
outdict['px_dropout'], outdict['ql_m'],
outdict['ql_v'], True,
local_l_mean, local_l_var)
loss_1 = torch.mean(loss_1)
loss_2 = torch.mean(loss_2)
optimizer.zero_grad()
if predict_celltypes:
one_hot_resp = torch.max(
one_hot_ct_encoding[cur_sidxs],
1)[1].to(device).long()
one_hot_pred = torch.max(
ct_pred, 1)[1]
celltype_resps[cur_sidxs] = \
one_hot_resp.detach().cpu().numpy()
celltype_preds[cur_sidxs] = \
one_hot_pred.detach().cpu().numpy()
adacc = accuracy_score(
one_hot_resp.detach().cpu().numpy(),
one_hot_pred.detach().cpu().numpy())
accval += adacc
loss_3 = criterion_class(
ct_pred, one_hot_resp)
else:
loss_3 = 0
if idxbatch == 0:
print(loss_1, loss_2, loss_3)
if idxbatch == -1 and epoch % 25 == 0:
loss_scalers = np.array(
[loss_1.detach().cpu().numpy(),
loss_2.detach().cpu().numpy(),
loss_3.detach().cpu().numpy()])
if np.min(loss_scalers) < 0:
if loss_2 < 0:
loss_2 = loss_2 * -1
else:
raise ValueError("One of the losses are negative")
print(loss_1)
print(loss_2)
print(loss_3)
loss_scalers = loss_scalers / np.min(loss_scalers)
loss = (loss_1 / torch.tensor(loss_scalers[0])) + (
loss_2 / torch.tensor(loss_scalers[1])) + (
loss_3 / torch.tensor(loss_scalers[2]))
if idxbatch == 0:
print(loss)
if torch.isnan(loss):
print("Losses: {} {} {}".format(loss_1, loss_2, loss_3))
raise ValueError("NA occured in loss")
# print(loss)
if torch.cuda.is_available():
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
optimizer.step()
running_loss_reconst += (loss_1 / loss_scalers[0])
running_kld += (loss_2 / loss_scalers[1])
running_ce += (loss_3 / loss_scalers[2])
running_loss += loss
del train1, outdict
# del one_hot_temp
if torch.cuda.is_available():
torch.cuda.empty_cache()
cur_loss = running_loss / TOTBATCHIDX
cur_loss_reconst = running_loss_reconst / TOTBATCHIDX
cur_kld = running_kld / TOTBATCHIDX
cur_ce = running_ce / TOTBATCHIDX
accval = accval / TOTBATCHIDX
adlist_cts = [str(epoch)]
for k in range(len(celltypes)):
pred_cell = celltype_preds == k
resp_cell = celltype_resps == k
cur_acc = accuracy_score(
resp_cell, pred_cell)
adlist_cts.append(str(round(cur_acc, 3)))
if predict_celltypes:
with open(accpath, "a+") as acclink:
acclink.write("\t".join(adlist_cts) + "\n")
print("Epoch {}, Loss {} at {}".format(
epoch, cur_loss.item(), datetime.now()))
with open(logpath, "a+") as loglink:
adlist = [str(epoch), str(cur_loss_reconst.item()),
str(cur_kld.item()), str(cur_ce.item()),
str(round(accval, 3)),
str(idxbatch), str(datetime.now())]
# adlist = [str(epoch), str(cur_loss.item()),
# str(idxbatch), str(datetime.now())]
loglink.write("\t".join(adlist) + "\n")
if epoch % 10 == 0:
checkpoint = {
'model': vae.state_dict(),
'optimizer': optimizer.state_dict(),
}
if torch.cuda.is_available():
checkpoint["amp"] = amp.state_dict()
for eachpath in [modelpath, chkpath]:
torch.save(checkpoint, eachpath)
return vae
def make_labels(metapath, expar, barcodes):
if "S" in str(barcodes.dtype):
barcodes = np.array(barcodes, dtype="|U64")
metadf = pd.read_csv(metapath, sep="\t", index_col=0)
if "CellType" not in metadf.columns:
if "Site_Primary" in metadf.columns:
metadf["CellType"] = metadf["Site_Primary"]
metadf["Barcode"] = metadf.index
classes = np.unique(metadf["CellType"])
classes = np.array(
[each for each in classes if "Not" not in each])
classes = np.array(
[each for each in classes if "nan" not in each])
metadf = metadf[metadf["CellType"].isin(classes)]
metadf = metadf[metadf["Barcode"].isin(barcodes)]
new_barcodes, idx_1, idx_2 = np.intersect1d(
barcodes, np.array(metadf["Barcode"]),
return_indices=True)
outar = expar[idx_1, :]
outdf = metadf.iloc[idx_2, :]
out_barcodes = np.array(barcodes, dtype="|U64")[idx_1]
one_hot_ct_encoding = pd.get_dummies(outdf["CellType"])
one_hot_tensor = torch.from_numpy(np.array(one_hot_ct_encoding))
return outar, outdf, out_barcodes, one_hot_tensor
def load_npar(nparpath, genes, outdir, gmtmat,
metapath):
if ".npz" in nparpath:
expar, barcodes, gmtmat, genes = read_npz(
nparpath, genes, outdir, gmtmat)
list_temp = make_labels(metapath, expar, barcodes)
elif ".gct" in nparpath or ".tsv" in nparpath:
expar, barcodes, gmtmat, genes = read_tsv(
nparpath, genes, outdir, gmtmat, False)
from train_multitask_ccle import make_labels as tmp_fnc
list_temp = tmp_fnc(
metapath, expar, barcodes)
elif ".h5" in nparpath:
expar, barcodes, gmtmat, genes = read_h5(
nparpath, genes, outdir, gmtmat)
list_temp = make_labels(metapath, expar, barcodes)
expar, metadf, barcodes, _ = list_temp
return expar, metadf, barcodes, genes, gmtmat
def filter_by_var(expar, genes, gmtmat, num_genes):
vars_genes = np.apply_along_axis(np.var, 0, expar)
idx_sorted = np.argsort(vars_genes)[::-1]
newexp = expar[:, idx_sorted[:num_genes]]
newgenes = genes[idx_sorted[:num_genes]]
gmtmat_new = gmtmat[idx_sorted[:num_genes], :]
return newexp, newgenes, gmtmat_new
def intersect_lists(genes_list):
genes = np.intersect1d(genes_list[0], genes_list[1])
for i in range(2, len(genes_list)):
genes = np.intersect1d(genes, genes_list[i])
return genes
def load_inputs(nparpaths, gmtmat, outdir,
genes, metapaths, filter_var=False,
num_genes=2000):
GMTMAT = gmtmat
gmtmat_genes = genes
metadf_list = []
expar_list = []
barcodes_list = []
genes_list = []
celltypes_list = []
num_barcodes = 0
for i in range(len(nparpaths)):
print("Loading {}".format(nparpaths[i]))
expar, metadf, barcodes, genes, gmtmat = load_npar(
nparpaths[i], genes, outdir, gmtmat, metapaths[i])
expar_list.append(expar)
barcodes_list.append(barcodes)
celltypes_list.append(
np.array(metadf["CellType"], dtype="|U64"))
addf = pd.DataFrame(
dict(OriginalBarcode=barcodes, CellType=celltypes_list[-1]))
addf["Dataset"] = "File.{}.".format(i + 1)
addf["Barcode"] = addf["Dataset"] + addf["OriginalBarcode"]
addf["Batch.Index"] = i
metadf_list.append(addf)
genes_list.append(genes)
num_barcodes += len(barcodes)
metadf = pd.concat(metadf_list)
metadf.index = metadf["Barcode"]
if len(genes_list) > 1:
genes = intersect_lists(genes_list)
else:
genes = genes_list[0]
# Filter gmtmat
_, idx_1, idx_2 = np.intersect1d(gmtmat_genes, genes, return_indices=True)
# gmtmat = gmtmat[idx_1, :]
gmtmat = GMTMAT[idx_1, :]
npar = np.zeros((num_barcodes, len(genes)), dtype=int)
i_st = 0
i_end = 0
for k in range(len(expar_list)):
cur_genes = genes_list[k]
expar = expar_list[k]
shared_genes, idx_1, idx_2 = np.intersect1d(
genes, cur_genes, return_indices=True)
i_end = i_st + expar.shape[0]
npar[i_st:i_end, idx_1] = expar[:, idx_2]
i_st = i_end
if filter_var:
print("Filtering by variance")
npar, genes, gmtmat = filter_by_var(
npar, genes, gmtmat, num_genes)
one_hot_ct_encoding = pd.get_dummies(metadf["CellType"])
one_hot_tensor = torch.from_numpy(np.array(one_hot_ct_encoding))
out_dict = dict(
expar=npar,
metadf=metadf,
barcodes=np.array(metadf["Barcode"]),
genes=genes,
gmtmat=gmtmat,
cellTypes=np.array(celltypes_list),
batch_idx=np.array(metadf["Batch.Index"]),
one_hot=one_hot_tensor)
return out_dict
def main(gmtpath, nparpaths, outdir, numlvs, metapaths,
dont_train=False, genepath="NA", existingmodelpath="NA",
use_connections=True, loss_scalers=[1, 1, 1],
predict_celltypes=True, num_celltypes=59, filter_var=False,
num_genes=2000, include_batches=False):
BATCHEFFECT_NUM = 0
if include_batches:
BATCHEFFECT_NUM = len(nparpaths)
MINIBATCH = 32
MAXEPOCH = 20
gmtmat, tfs, genes = make_gmtmat(gmtpath, outdir, genepath)
# expar, barcodes = read_h5(h5path, genes, outdir)
dict_inputs = load_inputs(
nparpaths, gmtmat, outdir, genes, metapaths, filter_var,
num_genes)
expar = dict_inputs["expar"]
metadf = dict_inputs["metadf"]
gmtmat = dict_inputs["gmtmat"]
one_hot_ct_encoding = dict_inputs["one_hot"]
barcodes = dict_inputs["barcodes"]
batch_idxs = dict_inputs["batch_idx"]
if not include_batches:
batch_idxs = None
# celltypes = dict_inputs["cellTypes"]
celltypes = []
if predict_celltypes:
celltypes = list(pd.unique(metadf["CellType"]))
celltypes.sort()
# save metadf
metadf.to_csv(
os.path.join(outdir, "metadata.tsv.gz"),
sep="\t", compression="gzip")
# Save genes
print("Shape of expar is : {}".format(expar.shape))
save_genes(genes, outdir)
print("Max in expar is {}".format(np.max(expar)))
if use_connections:
gmttensor = torch.from_numpy(
np.transpose(gmtmat)).to(device).long()
else:
gmttensor = torch.ones(
gmtmat.shape[1], gmtmat.shape[0]).to(device).long()
print("Shape of expar is : {}".format(expar.shape))
logdir, modelpath, chkpath = get_paths(outdir, numlvs)
if existingmodelpath == "NA":
existingmodelpath = modelpath
vae = VAE(expar.shape[1], # num genes
gmttensor,
num_celltypes,
BATCHEFFECT_NUM, # batch
0, # labels
gmtmat.shape[1], # hiddensize
numlvs)
n_params = get_n_params(vae)
print(vae)
print("VAE has {} parameters".format(n_params))
vae.to(device)
# optimizer = adabound.AdaBound(
# vae.parameters(), lr=0.001, final_lr=0.1)
optimizer = torch.optim.Adam(
vae.parameters(), lr=0.002)
if torch.cuda.is_available():
vae, optimizer = amp.initialize(
vae, optimizer, opt_level=opt_level)
vae, optimizer = load_existing_model(
existingmodelpath, chkpath, vae, optimizer)
if not dont_train:
np.random.seed(42)
# For 10 times, sample 1000 cells
for i in range(20):
# idx_rand = np.random.choice(
# np.arange(expar.shape[0]), SAMPLE_IDXS)
vae = train_model(
vae, optimizer, MINIBATCH, MAXEPOCH,
expar, logdir,
modelpath, chkpath, one_hot_ct_encoding,
loss_scalers, predict_celltypes,
celltypes, batch_idxs)
reconst, mumat, sd2mat, tf_act = apply_model(
vae, expar, numlvs, MINIBATCH, batch_idxs)
mudf = pd.DataFrame(mumat)
mudf.columns = ["LV.mu.{}".format(each)
for each in range(numlvs)]
mudf["Index"] = np.array(
barcodes, dtype="|U64")
mudf.index = mudf["Index"]
mudf.to_csv(
os.path.join(outdir, "VAE_mu-matrix.tsv.gz"),
compression="gzip", sep="\t")
make_plot_umap(mudf, metadf, outdir, numlvs)
reconst, mumat, sd2mat, tf_act = apply_model(
vae, expar, numlvs, MINIBATCH, batch_idxs)
tf_act_df = pd.DataFrame(tf_act)
tf_act_df.index = np.array(
barcodes, dtype="|U64")
tf_act_df.columns = tfs
tf_act_df["Labels"] = metadf.loc[tf_act_df.index]["CellType"]
tf_act_df.to_csv(
os.path.join(outdir, "VAE-TF-adjusted-weights_CellxTF.tsv.gz"),
sep="\t", compression="gzip")
# zmat = np_reparameterize(mumat, sd2mat)
zmat = torch_reparameterize(mumat, sd2mat)
zdf = pd.DataFrame(zmat)
zdf.columns = ["LV.Z.{}".format(each)
for each in range(numlvs)]
zdf["Index"] = np.array(
barcodes, dtype="|U64")
zdf.index = np.array(
barcodes, dtype="|U64")
zdf.to_csv(
os.path.join(outdir, "VAE_Z-matrix.tsv.gz"),
compression="gzip", sep="\t")
outdir_full = os.path.join(
outdir, "fullDatasetZPlot")
os.makedirs(outdir_full, exist_ok=True)
make_plot_umap(zdf, metadf, outdir_full, numlvs)
mudf = pd.DataFrame(mumat)
mudf.columns = ["LV.mu.{}".format(each)
for each in range(numlvs)]
mudf["Index"] = np.array(
barcodes, dtype="|U64")
mudf.index = mudf["Index"]
mudf.to_csv(
os.path.join(outdir, "VAE_mu-matrix.tsv.gz"),
compression="gzip", sep="\t")
outdir_full = os.path.join(
outdir, "fullDatasetPlot")
os.makedirs(outdir_full, exist_ok=True)
make_plot_umap(mudf, metadf, outdir_full, numlvs)
sd2df = pd.DataFrame(sd2mat)
sd2df.columns = [
"LV.logVAR.{}".format(each)
for each in range(numlvs)]
sd2df["Index"] = mudf["Index"]
sd2df.index = mudf["Index"]
sd2df.to_csv(
os.path.join(outdir, "VAE_variance-matrix.tsv.gz"),
compression="gzip", sep="\t")
def np_reparameterize(mu, logvar):
mu_tensor = torch.from_numpy(mu)
logvar_tensor = torch.from_numpy(logvar)
std_tensor = torch.exp(0.5 * logvar_tensor)
eps_tensor = torch.randn_like(std_tensor)
ztensor = mu_tensor + eps_tensor * std_tensor
zmat = ztensor.numpy()
return zmat
def load_existing_model(modelpath, chkpath, vae, optimizer):
for eachpath in [modelpath, chkpath]:
if os.path.exists(eachpath):
try:
checkpoint = torch.load(eachpath)
state_dict = checkpoint['model']
new_state_dict = OrderedDict()
for k, v in state_dict.items():
k = k.replace('module.', '')
new_state_dict[k] = v
vae.load_state_dict(new_state_dict)
optimizer.load_state_dict(checkpoint['optimizer'])
if torch.cuda.is_available():
amp.load_state_dict(checkpoint['amp'])
print("Loaded from {}".format(eachpath))
return vae, optimizer
except Exception:
pass
print("Didn't load from any")
return vae, optimizer
def save_genes(genes, outdir):
outpath = os.path.join(outdir, "genes.txt")
outlink = open(outpath, "w")
for gene in genes:
outlink.write(gene + "\n")
outlink.close()
def torch_reparameterize(mumat, varmat):
from torch.distributions import Normal
mu = torch.from_numpy(mumat)
var = torch.from_numpy(varmat)
normtensor = Normal(mu, var.sqrt()).rsample()
zmat = normtensor.detach().numpy()
return zmat
def get_hidden_layer(vae, train1, batch_tensor=None, n_batch=0):
if n_batch > 0 and batch_tensor is not None:
batch_ar_temp = batch_tensor.reshape(-1).cpu().numpy()
ad_mat = torch.zeros((train1.shape[0], n_batch))
for j in range(n_batch):
idx_j = np.where(batch_ar_temp == j)[0]
ad_mat[idx_j, j] = 1
train1 = torch.cat((train1, ad_mat.to(train1.device)), dim=-1)
weight_mat = vae.z_encoder.encoder.fc_layers[0][0].weights
connections = vae.z_encoder.encoder.fc_layers[0][0].connections
enforced_weights = torch.mul(
weight_mat, connections)
ew_times_x = torch.mm(train1, enforced_weights.detach().t())
add_bias = vae.z_encoder.encoder.fc_layers[0][0].bias
ew_times_x = torch.add(ew_times_x, add_bias)
output = ew_times_x.cpu().detach().numpy()
return output
def apply_model(vae, expar, numlvs, MINIBATCH, batch_idxs=None):
n_batch = 0
batch_tensor = None
if batch_idxs is not None:
n_batch = len(np.unique(batch_idxs))
conn_dim = vae.z_encoder.encoder.fc_layers[0][0].connections.shape[0]
reconst = np.zeros(expar.shape)
mumat = np.zeros((expar.shape[0], numlvs))
sd2mat = np.zeros((expar.shape[0], numlvs))
tf_activation = np.zeros((expar.shape[0], conn_dim))
TOTBATCHIDX = int(expar.shape[0] / MINIBATCH) + 1
for idxbatch in range(TOTBATCHIDX):
idxbatch_st = idxbatch * MINIBATCH
if idxbatch_st >= expar.shape[0]:
break
idxbatch_end = min(
[(idxbatch + 1) * MINIBATCH, expar.shape[0]])
train1 = torch.from_numpy(
expar[idxbatch_st:idxbatch_end, :]).to(device).float()
if batch_idxs is None:
outdict = vae(train1)
else:
batch_tensor = torch.from_numpy(
batch_idxs[idxbatch_st:idxbatch_end]).to(
device).long().reshape(-1, 1)
outdict = vae(train1, batch_tensor)
reconst[idxbatch_st:idxbatch_end, :] = \
outdict["px_scale"].cpu().detach().numpy()
mumat[idxbatch_st:idxbatch_end, :] = \
outdict["qz_m"].cpu().detach().numpy()
sd2mat[idxbatch_st:idxbatch_end, :] = \
outdict["qz_v"].cpu().detach().numpy()
tf_activation[idxbatch_st:idxbatch_end, :] = \
get_hidden_layer(vae, train1, batch_tensor, n_batch)
if idxbatch % 100 == 0:
print("Applied on {}/{}".format(idxbatch, TOTBATCHIDX))
return reconst, mumat, sd2mat, tf_activation
if __name__ == "__main__":
parser = ArgumentParser(
description="Train VAE using "
"mapping of genes to TFs")
parser.add_argument(
"gmtpath",
help="Path to GMT file mapping "
"genes to TFs")
parser.add_argument(
"outdir",
help="Path to output directory for "
"saving the model and log files")
parser.add_argument(
"--nparpaths",
nargs="*",
help="Space-separated paths to scRNA-seq "
"file npz containing arr, rows, and cols")
parser.add_argument(
"--numlvs",
type=int,
default=10,
help="Number of latent variables")
parser.add_argument(
"--dont-train",
action="store_true",
help="Specify if you want to apply an existing "
"model which is stored in outdir")
parser.add_argument(
"--genepath",
default="NA",
help="Path to .txt file containing "
"one gene per line to limit the list "
"of genes we use here")
parser.add_argument(
"--modelpath",
default="NA",
help="Specify if you don't want the "
"model existing in <outdir>/VAE_<--numlvs>LVS.pt")
parser.add_argument(
"--metapaths",
nargs="*",
required=True,
help="Space-separated path to metadata tsv with "
"a column named as barcode and a "
"column named as cell type")
parser.add_argument(
"--use-connections",
action="store_true",
help="If set, will enforce weights that don't "
"correspong to TF-gene mappings to be zero")
parser.add_argument(
"--loss-scalers",
nargs="*",
default=[1, 1, 1],
type=float,
help="Specify values to divide "
"MSE, KLD, and CE losses by: example: "
"--loss-scalers 100 1 1")
parser.add_argument(
"--predict-celltypes",
action="store_true",
help="Specify --predict-celltypes to "
"optimize the cell type prediction task as well")
parser.add_argument(
"--num-celltypes",
default=59,
type=int,
help="Number of cell types to predict (must match "
"the column CellType in metadata file)")
parser.add_argument(
"--filter-var",
action="store_true",
help="If specified, will filter by top 2000 most "
"variant genes")
parser.add_argument(
"--num-genes",
default=2000,
type=int,
help="Number of genes to filter by highest variance")
parser.add_argument(
"--include-batches",
action="store_true",
help="Specify if more than one h5 file is being passed "
"and you want to allow scVI to correct the batches")
args = parser.parse_args()
print(args)
modelpath = args.modelpath
if modelpath == "NA":
modelpath = os.path.join(
args.outdir, "VAE_{}LVS.pt".format(args.numlvs))
main(args.gmtpath, args.nparpaths,
args.outdir, args.numlvs, args.metapaths,
args.dont_train, args.genepath, modelpath,
args.use_connections, args.loss_scalers,
args.predict_celltypes, args.num_celltypes,
args.filter_var, args.num_genes,
args.include_batches)
|
[
"apex.amp.state_dict",
"numpy.load",
"numpy.random.seed",
"argparse.ArgumentParser",
"pandas.read_csv",
"sklearn.metrics.accuracy_score",
"numpy.argsort",
"numpy.savez_compressed",
"numpy.arange",
"seaborn.relplot",
"train_multitask_ccle.make_labels",
"os.path.join",
"torch.isnan",
"numpy.unique",
"pandas.DataFrame",
"torch.ones",
"torch.load",
"numpy.transpose",
"os.path.exists",
"numpy.apply_along_axis",
"torch.exp",
"numpy.max",
"apex.amp.scale_loss",
"numpy.loadtxt",
"model.loss_function",
"torch.zeros",
"numpy.intersect1d",
"datetime.datetime.now",
"pandas.concat",
"torch.mean",
"torch.randn_like",
"pandas.get_dummies",
"torch.mul",
"umap.UMAP",
"numpy.sort",
"numpy.min",
"torch.cuda.is_available",
"torch.max",
"tables.open_file",
"model.VAE",
"torch.from_numpy",
"apex.amp.load_state_dict",
"apex.amp.initialize",
"os.makedirs",
"torch.add",
"torch.nn.CrossEntropyLoss",
"numpy.zeros",
"pandas.unique",
"torch.save",
"scipy.sparse.csc_matrix",
"numpy.where",
"numpy.array",
"torch.cuda.empty_cache",
"collections.OrderedDict",
"itertools.chain.from_iterable",
"torch.tensor"
] |
[((578, 622), 'os.path.join', 'os.path.join', (['outdir', '"""cellByGeneMatrix.npz"""'], {}), "(outdir, 'cellByGeneMatrix.npz')\n", (590, 622), False, 'import os\n'), ((1460, 1512), 'numpy.intersect1d', 'np.intersect1d', (['genes', 'ar_genes'], {'return_indices': '(True)'}), '(genes, ar_genes, return_indices=True)\n', (1474, 1512), True, 'import numpy as np\n'), ((1612, 1630), 'numpy.transpose', 'np.transpose', (['npar'], {}), '(npar)\n', (1624, 1630), True, 'import numpy as np\n'), ((1635, 1710), 'numpy.savez_compressed', 'np.savez_compressed', (['h5outpath'], {'arr': 'npar', 'barcodes': 'barcodes', 'genes': 'ar_genes'}), '(h5outpath, arr=npar, barcodes=barcodes, genes=ar_genes)\n', (1654, 1710), True, 'import numpy as np\n'), ((1926, 1957), 'numpy.array', 'np.array', (['mudf.iloc[:, :numlvs]'], {}), '(mudf.iloc[:, :numlvs])\n', (1934, 1957), True, 'import numpy as np\n'), ((4087, 4121), 'os.makedirs', 'os.makedirs', (['outdir'], {'exist_ok': '(True)'}), '(outdir, exist_ok=True)\n', (4098, 4121), False, 'import os\n'), ((5868, 5912), 'os.path.join', 'os.path.join', (['outdir', '"""cellByGeneMatrix.npz"""'], {}), "(outdir, 'cellByGeneMatrix.npz')\n", (5880, 5912), False, 'import os\n'), ((5934, 5970), 'numpy.load', 'np.load', (['nparpath'], {'allow_pickle': '(True)'}), '(nparpath, allow_pickle=True)\n', (5941, 5970), True, 'import numpy as np\n'), ((6149, 6201), 'numpy.intersect1d', 'np.intersect1d', (['genes', 'ar_genes'], {'return_indices': '(True)'}), '(genes, ar_genes, return_indices=True)\n', (6163, 6201), True, 'import numpy as np\n'), ((6387, 6446), 'numpy.savez_compressed', 'np.savez_compressed', (['h5outpath'], {'arr': 'npar', 'barcodes': 'barcodes'}), '(h5outpath, arr=npar, barcodes=barcodes)\n', (6406, 6446), True, 'import numpy as np\n'), ((6554, 6598), 'os.path.join', 'os.path.join', (['outdir', '"""cellByGeneMatrix.npz"""'], {}), "(outdir, 'cellByGeneMatrix.npz')\n", (6566, 6598), False, 'import os\n'), ((6897, 6928), 'numpy.transpose', 'np.transpose', (['npar[idx_gexp, :]'], {}), '(npar[idx_gexp, :])\n', (6909, 6928), True, 'import numpy as np\n'), ((7293, 7336), 'numpy.array', 'np.array', (["feature_ref['name']"], {'dtype': '"""|U64"""'}), "(feature_ref['name'], dtype='|U64')\n", (7301, 7336), True, 'import numpy as np\n'), ((7370, 7424), 'numpy.intersect1d', 'np.intersect1d', (['genes', 'gene_names'], {'return_indices': '(True)'}), '(genes, gene_names, return_indices=True)\n', (7384, 7424), True, 'import numpy as np\n'), ((7468, 7540), 'numpy.savez_compressed', 'np.savez_compressed', (['h5outpath'], {'arr': 'npar', 'barcodes': 'barcodes', 'genes': 'genes'}), '(h5outpath, arr=npar, barcodes=barcodes, genes=genes)\n', (7487, 7540), True, 'import numpy as np\n'), ((7666, 7700), 'numpy.loadtxt', 'np.loadtxt', (['genepath'], {'dtype': '"""|U64"""'}), "(genepath, dtype='|U64')\n", (7676, 7700), True, 'import numpy as np\n'), ((7788, 7831), 'os.path.join', 'os.path.join', (['outdir', '"""gmt_conv_matrix.npz"""'], {}), "(outdir, 'gmt_conv_matrix.npz')\n", (7800, 7831), False, 'import os\n'), ((7848, 7874), 'os.path.exists', 'os.path.exists', (['gmtoutpath'], {}), '(gmtoutpath)\n', (7862, 7874), False, 'import os\n'), ((8291, 8307), 'numpy.sort', 'np.sort', (['all_tfs'], {}), '(all_tfs)\n', (8298, 8307), True, 'import numpy as np\n'), ((8416, 8436), 'numpy.unique', 'np.unique', (['all_genes'], {}), '(all_genes)\n', (8425, 8436), True, 'import numpy as np\n'), ((9236, 9307), 'numpy.savez_compressed', 'np.savez_compressed', (['gmtoutpath'], {'arr': 'npar', 'tfs': 'all_tfs', 'genes': 'all_genes'}), '(gmtoutpath, arr=npar, tfs=all_tfs, genes=all_genes)\n', (9255, 9307), True, 'import numpy as np\n'), ((9676, 9704), 'os.path.join', 'os.path.join', (['outdir', '"""logs"""'], {}), "(outdir, 'logs')\n", (9688, 9704), False, 'import os\n'), ((9709, 9743), 'os.makedirs', 'os.makedirs', (['logdir'], {'exist_ok': '(True)'}), '(logdir, exist_ok=True)\n', (9720, 9743), False, 'import os\n'), ((9834, 9878), 'os.path.join', 'os.path.join', (['"""/checkpoint/mkarimza"""', 'job_id'], {}), "('/checkpoint/mkarimza', job_id)\n", (9846, 9878), False, 'import os\n'), ((10398, 10425), 'torch.nn.CrossEntropyLoss', 'torch.nn.CrossEntropyLoss', ([], {}), '()\n', (10423, 10425), False, 'import torch\n'), ((17420, 17464), 'pandas.read_csv', 'pd.read_csv', (['metapath'], {'sep': '"""\t"""', 'index_col': '(0)'}), "(metapath, sep='\\t', index_col=0)\n", (17431, 17464), True, 'import pandas as pd\n'), ((17666, 17695), 'numpy.unique', 'np.unique', (["metadf['CellType']"], {}), "(metadf['CellType'])\n", (17675, 17695), True, 'import numpy as np\n'), ((17710, 17767), 'numpy.array', 'np.array', (["[each for each in classes if 'Not' not in each]"], {}), "([each for each in classes if 'Not' not in each])\n", (17718, 17767), True, 'import numpy as np\n'), ((17791, 17848), 'numpy.array', 'np.array', (["[each for each in classes if 'nan' not in each]"], {}), "([each for each in classes if 'nan' not in each])\n", (17799, 17848), True, 'import numpy as np\n'), ((18238, 18271), 'pandas.get_dummies', 'pd.get_dummies', (["outdf['CellType']"], {}), "(outdf['CellType'])\n", (18252, 18271), True, 'import pandas as pd\n'), ((19283, 19320), 'numpy.apply_along_axis', 'np.apply_along_axis', (['np.var', '(0)', 'expar'], {}), '(np.var, 0, expar)\n', (19302, 19320), True, 'import numpy as np\n'), ((19596, 19640), 'numpy.intersect1d', 'np.intersect1d', (['genes_list[0]', 'genes_list[1]'], {}), '(genes_list[0], genes_list[1])\n', (19610, 19640), True, 'import numpy as np\n'), ((20792, 20814), 'pandas.concat', 'pd.concat', (['metadf_list'], {}), '(metadf_list)\n', (20801, 20814), True, 'import pandas as pd\n'), ((21006, 21062), 'numpy.intersect1d', 'np.intersect1d', (['gmtmat_genes', 'genes'], {'return_indices': '(True)'}), '(gmtmat_genes, genes, return_indices=True)\n', (21020, 21062), True, 'import numpy as np\n'), ((21698, 21732), 'pandas.get_dummies', 'pd.get_dummies', (["metadf['CellType']"], {}), "(metadf['CellType'])\n", (21712, 21732), True, 'import pandas as pd\n'), ((23914, 24009), 'model.VAE', 'VAE', (['expar.shape[1]', 'gmttensor', 'num_celltypes', 'BATCHEFFECT_NUM', '(0)', 'gmtmat.shape[1]', 'numlvs'], {}), '(expar.shape[1], gmttensor, num_celltypes, BATCHEFFECT_NUM, 0, gmtmat.\n shape[1], numlvs)\n', (23917, 24009), False, 'from model import VAE\n'), ((24420, 24445), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (24443, 24445), False, 'import torch\n'), ((25799, 25819), 'pandas.DataFrame', 'pd.DataFrame', (['tf_act'], {}), '(tf_act)\n', (25811, 25819), True, 'import pandas as pd\n'), ((25842, 25874), 'numpy.array', 'np.array', (['barcodes'], {'dtype': '"""|U64"""'}), "(barcodes, dtype='|U64')\n", (25850, 25874), True, 'import numpy as np\n'), ((26213, 26231), 'pandas.DataFrame', 'pd.DataFrame', (['zmat'], {}), '(zmat)\n', (26225, 26231), True, 'import pandas as pd\n'), ((26339, 26371), 'numpy.array', 'np.array', (['barcodes'], {'dtype': '"""|U64"""'}), "(barcodes, dtype='|U64')\n", (26347, 26371), True, 'import numpy as np\n'), ((26397, 26429), 'numpy.array', 'np.array', (['barcodes'], {'dtype': '"""|U64"""'}), "(barcodes, dtype='|U64')\n", (26405, 26429), True, 'import numpy as np\n'), ((26564, 26604), 'os.path.join', 'os.path.join', (['outdir', '"""fullDatasetZPlot"""'], {}), "(outdir, 'fullDatasetZPlot')\n", (26576, 26604), False, 'import os\n'), ((26618, 26657), 'os.makedirs', 'os.makedirs', (['outdir_full'], {'exist_ok': '(True)'}), '(outdir_full, exist_ok=True)\n', (26629, 26657), False, 'import os\n'), ((26722, 26741), 'pandas.DataFrame', 'pd.DataFrame', (['mumat'], {}), '(mumat)\n', (26734, 26741), True, 'import pandas as pd\n'), ((26853, 26885), 'numpy.array', 'np.array', (['barcodes'], {'dtype': '"""|U64"""'}), "(barcodes, dtype='|U64')\n", (26861, 26885), True, 'import numpy as np\n'), ((27053, 27092), 'os.path.join', 'os.path.join', (['outdir', '"""fullDatasetPlot"""'], {}), "(outdir, 'fullDatasetPlot')\n", (27065, 27092), False, 'import os\n'), ((27106, 27145), 'os.makedirs', 'os.makedirs', (['outdir_full'], {'exist_ok': '(True)'}), '(outdir_full, exist_ok=True)\n', (27117, 27145), False, 'import os\n'), ((27212, 27232), 'pandas.DataFrame', 'pd.DataFrame', (['sd2mat'], {}), '(sd2mat)\n', (27224, 27232), True, 'import pandas as pd\n'), ((27562, 27582), 'torch.from_numpy', 'torch.from_numpy', (['mu'], {}), '(mu)\n', (27578, 27582), False, 'import torch\n'), ((27603, 27627), 'torch.from_numpy', 'torch.from_numpy', (['logvar'], {}), '(logvar)\n', (27619, 27627), False, 'import torch\n'), ((27645, 27675), 'torch.exp', 'torch.exp', (['(0.5 * logvar_tensor)'], {}), '(0.5 * logvar_tensor)\n', (27654, 27675), False, 'import torch\n'), ((27693, 27721), 'torch.randn_like', 'torch.randn_like', (['std_tensor'], {}), '(std_tensor)\n', (27709, 27721), False, 'import torch\n'), ((28744, 28777), 'os.path.join', 'os.path.join', (['outdir', '"""genes.txt"""'], {}), "(outdir, 'genes.txt')\n", (28756, 28777), False, 'import os\n'), ((28984, 29007), 'torch.from_numpy', 'torch.from_numpy', (['mumat'], {}), '(mumat)\n', (29000, 29007), False, 'import torch\n'), ((29018, 29042), 'torch.from_numpy', 'torch.from_numpy', (['varmat'], {}), '(varmat)\n', (29034, 29042), False, 'import torch\n'), ((29727, 29761), 'torch.mul', 'torch.mul', (['weight_mat', 'connections'], {}), '(weight_mat, connections)\n', (29736, 29761), False, 'import torch\n'), ((29911, 29942), 'torch.add', 'torch.add', (['ew_times_x', 'add_bias'], {}), '(ew_times_x, add_bias)\n', (29920, 29942), False, 'import torch\n'), ((30279, 30300), 'numpy.zeros', 'np.zeros', (['expar.shape'], {}), '(expar.shape)\n', (30287, 30300), True, 'import numpy as np\n'), ((30313, 30347), 'numpy.zeros', 'np.zeros', (['(expar.shape[0], numlvs)'], {}), '((expar.shape[0], numlvs))\n', (30321, 30347), True, 'import numpy as np\n'), ((30361, 30395), 'numpy.zeros', 'np.zeros', (['(expar.shape[0], numlvs)'], {}), '((expar.shape[0], numlvs))\n', (30369, 30395), True, 'import numpy as np\n'), ((30416, 30452), 'numpy.zeros', 'np.zeros', (['(expar.shape[0], conn_dim)'], {}), '((expar.shape[0], conn_dim))\n', (30424, 30452), True, 'import numpy as np\n'), ((31734, 31803), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Train VAE using mapping of genes to TFs"""'}), "(description='Train VAE using mapping of genes to TFs')\n", (31748, 31803), False, 'from argparse import ArgumentParser\n'), ((454, 479), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (477, 479), False, 'import torch\n'), ((674, 750), 'pandas.read_csv', 'pd.read_csv', (['nparpath'], {'sep': '"""\t"""', 'index_col': '(0)', 'compression': '"""gzip"""', 'skiprows': '(2)'}), "(nparpath, sep='\\t', index_col=0, compression='gzip', skiprows=2)\n", (685, 750), True, 'import pandas as pd\n'), ((915, 942), 'numpy.array', 'np.array', (['rnadf.iloc[:, 1:]'], {}), '(rnadf.iloc[:, 1:])\n', (923, 942), True, 'import numpy as np\n'), ((962, 992), 'numpy.array', 'np.array', (["rnadf['Description']"], {}), "(rnadf['Description'])\n", (970, 992), True, 'import numpy as np\n'), ((1012, 1039), 'numpy.array', 'np.array', (['rnadf.columns[1:]'], {}), '(rnadf.columns[1:])\n', (1020, 1039), True, 'import numpy as np\n'), ((1066, 1130), 'pandas.read_csv', 'pd.read_csv', (['nparpath'], {'sep': '"""\t"""', 'index_col': '(0)', 'compression': '"""gzip"""'}), "(nparpath, sep='\\t', index_col=0, compression='gzip')\n", (1077, 1130), True, 'import pandas as pd\n'), ((1171, 1186), 'numpy.array', 'np.array', (['rnadf'], {}), '(rnadf)\n', (1179, 1186), True, 'import numpy as np\n'), ((1237, 1260), 'numpy.array', 'np.array', (['rnadf.columns'], {}), '(rnadf.columns)\n', (1245, 1260), True, 'import numpy as np\n'), ((1362, 1398), 'numpy.apply_along_axis', 'np.apply_along_axis', (['np.sum', '(0)', 'npar'], {}), '(np.sum, 0, npar)\n', (1381, 1398), True, 'import numpy as np\n'), ((4746, 4777), 'tables.open_file', 'tables.open_file', (['filename', '"""r"""'], {}), "(filename, 'r')\n", (4762, 4777), False, 'import tables\n'), ((5118, 5176), 'scipy.sparse.csc_matrix', 'sp_sparse.csc_matrix', (['(data, indices, indptr)'], {'shape': 'shape'}), '((data, indices, indptr), shape=shape)\n', (5138, 5176), True, 'import scipy.sparse as sp_sparse\n'), ((6048, 6066), 'numpy.transpose', 'np.transpose', (['npar'], {}), '(npar)\n', (6060, 6066), True, 'import numpy as np\n'), ((7892, 7911), 'numpy.load', 'np.load', (['gmtoutpath'], {}), '(gmtoutpath)\n', (7899, 7911), True, 'import numpy as np\n'), ((8368, 8398), 'itertools.chain.from_iterable', 'chain.from_iterable', (['all_genes'], {}), '(all_genes)\n', (8387, 8398), False, 'from itertools import chain\n'), ((8465, 8489), 'os.path.exists', 'os.path.exists', (['genepath'], {}), '(genepath)\n', (8479, 8489), False, 'import os\n'), ((8662, 8701), 'numpy.intersect1d', 'np.intersect1d', (['all_genes', 'select_genes'], {}), '(all_genes, select_genes)\n', (8676, 8701), True, 'import numpy as np\n'), ((9907, 9929), 'os.path.exists', 'os.path.exists', (['chkdir'], {}), '(chkdir)\n', (9921, 9929), False, 'import os\n'), ((9948, 9982), 'os.path.join', 'os.path.join', (['logdir', '"""checkpoint"""'], {}), "(logdir, 'checkpoint')\n", (9960, 9982), False, 'import os\n'), ((10004, 10038), 'os.makedirs', 'os.makedirs', (['chkdir'], {'exist_ok': '(True)'}), '(chkdir, exist_ok=True)\n', (10015, 10038), False, 'import os\n'), ((10445, 10459), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (10457, 10459), False, 'from datetime import datetime\n'), ((11412, 11437), 'numpy.arange', 'np.arange', (['expar.shape[0]'], {}), '(expar.shape[0])\n', (11421, 11437), True, 'import numpy as np\n'), ((11653, 11677), 'numpy.zeros', 'np.zeros', (['expar.shape[0]'], {}), '(expar.shape[0])\n', (11661, 11677), True, 'import numpy as np\n'), ((11718, 11742), 'numpy.zeros', 'np.zeros', (['expar.shape[0]'], {}), '(expar.shape[0])\n', (11726, 11742), True, 'import numpy as np\n'), ((17374, 17406), 'numpy.array', 'np.array', (['barcodes'], {'dtype': '"""|U64"""'}), "(barcodes, dtype='|U64')\n", (17382, 17406), True, 'import numpy as np\n'), ((18033, 18060), 'numpy.array', 'np.array', (["metadf['Barcode']"], {}), "(metadf['Barcode'])\n", (18041, 18060), True, 'import numpy as np\n'), ((18172, 18204), 'numpy.array', 'np.array', (['barcodes'], {'dtype': '"""|U64"""'}), "(barcodes, dtype='|U64')\n", (18180, 18204), True, 'import numpy as np\n'), ((18310, 18339), 'numpy.array', 'np.array', (['one_hot_ct_encoding'], {}), '(one_hot_ct_encoding)\n', (18318, 18339), True, 'import numpy as np\n'), ((19338, 19360), 'numpy.argsort', 'np.argsort', (['vars_genes'], {}), '(vars_genes)\n', (19348, 19360), True, 'import numpy as np\n'), ((19697, 19733), 'numpy.intersect1d', 'np.intersect1d', (['genes', 'genes_list[i]'], {}), '(genes, genes_list[i])\n', (19711, 19733), True, 'import numpy as np\n'), ((21349, 21402), 'numpy.intersect1d', 'np.intersect1d', (['genes', 'cur_genes'], {'return_indices': '(True)'}), '(genes, cur_genes, return_indices=True)\n', (21363, 21402), True, 'import numpy as np\n'), ((21771, 21800), 'numpy.array', 'np.array', (['one_hot_ct_encoding'], {}), '(one_hot_ct_encoding)\n', (21779, 21800), True, 'import numpy as np\n'), ((23261, 23300), 'os.path.join', 'os.path.join', (['outdir', '"""metadata.tsv.gz"""'], {}), "(outdir, 'metadata.tsv.gz')\n", (23273, 23300), False, 'import os\n'), ((24472, 24523), 'apex.amp.initialize', 'amp.initialize', (['vae', 'optimizer'], {'opt_level': 'opt_level'}), '(vae, optimizer, opt_level=opt_level)\n', (24486, 24523), False, 'from apex import amp\n'), ((24662, 24680), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (24676, 24680), True, 'import numpy as np\n'), ((26008, 26070), 'os.path.join', 'os.path.join', (['outdir', '"""VAE-TF-adjusted-weights_CellxTF.tsv.gz"""'], {}), "(outdir, 'VAE-TF-adjusted-weights_CellxTF.tsv.gz')\n", (26020, 26070), False, 'import os\n'), ((26463, 26506), 'os.path.join', 'os.path.join', (['outdir', '"""VAE_Z-matrix.tsv.gz"""'], {}), "(outdir, 'VAE_Z-matrix.tsv.gz')\n", (26475, 26506), False, 'import os\n'), ((26951, 26995), 'os.path.join', 'os.path.join', (['outdir', '"""VAE_mu-matrix.tsv.gz"""'], {}), "(outdir, 'VAE_mu-matrix.tsv.gz')\n", (26963, 26995), False, 'import os\n'), ((27419, 27469), 'os.path.join', 'os.path.join', (['outdir', '"""VAE_variance-matrix.tsv.gz"""'], {}), "(outdir, 'VAE_variance-matrix.tsv.gz')\n", (27431, 27469), False, 'import os\n'), ((27931, 27955), 'os.path.exists', 'os.path.exists', (['eachpath'], {}), '(eachpath)\n', (27945, 27955), False, 'import os\n'), ((29344, 29383), 'torch.zeros', 'torch.zeros', (['(train1.shape[0], n_batch)'], {}), '((train1.shape[0], n_batch))\n', (29355, 29383), False, 'import torch\n'), ((2165, 2218), 'umap.UMAP', 'umap.UMAP', ([], {'n_neighbors': 'n_neighbors', 'min_dist': 'min_dist'}), '(n_neighbors=n_neighbors, min_dist=min_dist)\n', (2174, 2218), False, 'import umap\n'), ((2331, 2354), 'pandas.DataFrame', 'pd.DataFrame', (['embedding'], {}), '(embedding)\n', (2343, 2354), True, 'import pandas as pd\n'), ((2689, 2783), 'seaborn.relplot', 'sns.relplot', ([], {'x': '"""UMAP1"""', 'y': '"""UMAP2"""', 'hue': '"""CellType"""', 'data': 'umap_output', 'height': '(6)', 'aspect': '(1.5)'}), "(x='UMAP1', y='UMAP2', hue='CellType', data=umap_output, height=\n 6, aspect=1.5)\n", (2700, 2783), True, 'import seaborn as sns\n'), ((6794, 6853), 'numpy.array', 'np.array', (["(feature_ref['feature_type'] == b'Gene Expression')"], {}), "(feature_ref['feature_type'] == b'Gene Expression')\n", (6802, 6853), True, 'import numpy as np\n'), ((8914, 8937), 'numpy.where', 'np.where', (['(all_tfs == tf)'], {}), '(all_tfs == tf)\n', (8922, 8937), True, 'import numpy as np\n'), ((12814, 13005), 'model.loss_function', 'loss_function', (["outdict['qz_m']", "outdict['qz_v']", 'train1', "outdict['px_rate']", "outdict['px_r']", "outdict['px_dropout']", "outdict['ql_m']", "outdict['ql_v']", '(True)', 'local_l_mean', 'local_l_var'], {}), "(outdict['qz_m'], outdict['qz_v'], train1, outdict['px_rate'],\n outdict['px_r'], outdict['px_dropout'], outdict['ql_m'], outdict['ql_v'\n ], True, local_l_mean, local_l_var)\n", (12827, 13005), False, 'from model import loss_function\n'), ((13099, 13117), 'torch.mean', 'torch.mean', (['loss_1'], {}), '(loss_1)\n', (13109, 13117), False, 'import torch\n'), ((13139, 13157), 'torch.mean', 'torch.mean', (['loss_2'], {}), '(loss_2)\n', (13149, 13157), False, 'import torch\n'), ((14934, 14951), 'torch.isnan', 'torch.isnan', (['loss'], {}), '(loss)\n', (14945, 14951), False, 'import torch\n'), ((15122, 15147), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (15145, 15147), False, 'import torch\n'), ((15621, 15646), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (15644, 15646), False, 'import torch\n'), ((16105, 16141), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['resp_cell', 'pred_cell'], {}), '(resp_cell, pred_cell)\n', (16119, 16141), False, 'from sklearn.metrics import accuracy_score\n'), ((17080, 17105), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (17103, 17105), False, 'import torch\n'), ((18889, 18923), 'train_multitask_ccle.make_labels', 'tmp_fnc', (['metapath', 'expar', 'barcodes'], {}), '(metapath, expar, barcodes)\n', (18896, 18923), True, 'from train_multitask_ccle import make_labels as tmp_fnc\n'), ((20378, 20420), 'numpy.array', 'np.array', (["metadf['CellType']"], {'dtype': '"""|U64"""'}), "(metadf['CellType'], dtype='|U64')\n", (20386, 20420), True, 'import numpy as np\n'), ((21883, 21910), 'numpy.array', 'np.array', (["metadf['Barcode']"], {}), "(metadf['Barcode'])\n", (21891, 21910), True, 'import numpy as np\n'), ((21974, 21998), 'numpy.array', 'np.array', (['celltypes_list'], {}), '(celltypes_list)\n', (21982, 21998), True, 'import numpy as np\n'), ((22018, 22049), 'numpy.array', 'np.array', (["metadf['Batch.Index']"], {}), "(metadf['Batch.Index'])\n", (22026, 22049), True, 'import numpy as np\n'), ((23160, 23189), 'pandas.unique', 'pd.unique', (["metadf['CellType']"], {}), "(metadf['CellType'])\n", (23169, 23189), True, 'import pandas as pd\n'), ((23481, 23494), 'numpy.max', 'np.max', (['expar'], {}), '(expar)\n', (23487, 23494), True, 'import numpy as np\n'), ((25248, 25267), 'pandas.DataFrame', 'pd.DataFrame', (['mumat'], {}), '(mumat)\n', (25260, 25267), True, 'import pandas as pd\n'), ((25403, 25435), 'numpy.array', 'np.array', (['barcodes'], {'dtype': '"""|U64"""'}), "(barcodes, dtype='|U64')\n", (25411, 25435), True, 'import numpy as np\n'), ((30168, 30189), 'numpy.unique', 'np.unique', (['batch_idxs'], {}), '(batch_idxs)\n', (30177, 30189), True, 'import numpy as np\n'), ((2578, 2618), 'os.path.join', 'os.path.join', (['outdir', "(adname + '.tsv.gz')"], {}), "(outdir, adname + '.tsv.gz')\n", (2590, 2618), False, 'import os\n'), ((2858, 2895), 'os.path.join', 'os.path.join', (['outdir', "(adname + '.pdf')"], {}), "(outdir, adname + '.pdf')\n", (2870, 2895), False, 'import os\n'), ((2943, 2980), 'os.path.join', 'os.path.join', (['outdir', "(adname + '.png')"], {}), "(outdir, adname + '.png')\n", (2955, 2980), False, 'import os\n'), ((9060, 9087), 'numpy.where', 'np.where', (['(all_genes == gene)'], {}), '(all_genes == gene)\n', (9068, 9087), True, 'import numpy as np\n'), ((12399, 12450), 'numpy.apply_along_axis', 'np.apply_along_axis', (['np.sum', '(1)', 'expar[cur_sidxs, :]'], {}), '(np.sum, 1, expar[cur_sidxs, :])\n', (12418, 12450), True, 'import numpy as np\n'), ((12523, 12574), 'numpy.apply_along_axis', 'np.apply_along_axis', (['np.sum', '(1)', 'expar[cur_sidxs, :]'], {}), '(np.sum, 1, expar[cur_sidxs, :])\n', (12542, 12574), True, 'import numpy as np\n'), ((15664, 15688), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (15686, 15688), False, 'import torch\n'), ((16437, 16451), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (16449, 16451), False, 'from datetime import datetime\n'), ((17143, 17159), 'apex.amp.state_dict', 'amp.state_dict', ([], {}), '()\n', (17157, 17159), False, 'from apex import amp\n'), ((17226, 17258), 'torch.save', 'torch.save', (['checkpoint', 'eachpath'], {}), '(checkpoint, eachpath)\n', (17236, 17258), False, 'import torch\n'), ((25533, 25577), 'os.path.join', 'os.path.join', (['outdir', '"""VAE_mu-matrix.tsv.gz"""'], {}), "(outdir, 'VAE_mu-matrix.tsv.gz')\n", (25545, 25577), False, 'import os\n'), ((28003, 28023), 'torch.load', 'torch.load', (['eachpath'], {}), '(eachpath)\n', (28013, 28023), False, 'import torch\n'), ((28106, 28119), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (28117, 28119), False, 'from collections import OrderedDict\n'), ((28397, 28422), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (28420, 28422), False, 'import torch\n'), ((29437, 29465), 'numpy.where', 'np.where', (['(batch_ar_temp == j)'], {}), '(batch_ar_temp == j)\n', (29445, 29465), True, 'import numpy as np\n'), ((13395, 13416), 'torch.max', 'torch.max', (['ct_pred', '(1)'], {}), '(ct_pred, 1)\n', (13404, 13416), False, 'import torch\n'), ((14305, 14325), 'numpy.min', 'np.min', (['loss_scalers'], {}), '(loss_scalers)\n', (14311, 14325), True, 'import numpy as np\n'), ((14660, 14680), 'numpy.min', 'np.min', (['loss_scalers'], {}), '(loss_scalers)\n', (14666, 14680), True, 'import numpy as np\n'), ((14830, 14859), 'torch.tensor', 'torch.tensor', (['loss_scalers[2]'], {}), '(loss_scalers[2])\n', (14842, 14859), False, 'import torch\n'), ((15170, 15201), 'apex.amp.scale_loss', 'amp.scale_loss', (['loss', 'optimizer'], {}), '(loss, optimizer)\n', (15184, 15201), False, 'from apex import amp\n'), ((16713, 16727), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (16725, 16727), False, 'from datetime import datetime\n'), ((28444, 28482), 'apex.amp.load_state_dict', 'amp.load_state_dict', (["checkpoint['amp']"], {}), "(checkpoint['amp'])\n", (28463, 28482), False, 'from apex import amp\n'), ((14710, 14739), 'torch.tensor', 'torch.tensor', (['loss_scalers[0]'], {}), '(loss_scalers[0])\n', (14722, 14739), False, 'import torch\n'), ((14770, 14799), 'torch.tensor', 'torch.tensor', (['loss_scalers[1]'], {}), '(loss_scalers[1])\n', (14782, 14799), False, 'import torch\n'), ((23641, 23685), 'torch.ones', 'torch.ones', (['gmtmat.shape[1]', 'gmtmat.shape[0]'], {}), '(gmtmat.shape[1], gmtmat.shape[0])\n', (23651, 23685), False, 'import torch\n'), ((30753, 30805), 'torch.from_numpy', 'torch.from_numpy', (['expar[idxbatch_st:idxbatch_end, :]'], {}), '(expar[idxbatch_st:idxbatch_end, :])\n', (30769, 30805), False, 'import torch\n'), ((12079, 12116), 'torch.from_numpy', 'torch.from_numpy', (['expar[cur_sidxs, :]'], {}), '(expar[cur_sidxs, :])\n', (12095, 12116), False, 'import torch\n'), ((23571, 23591), 'numpy.transpose', 'np.transpose', (['gmtmat'], {}), '(gmtmat)\n', (23583, 23591), True, 'import numpy as np\n'), ((13257, 13301), 'torch.max', 'torch.max', (['one_hot_ct_encoding[cur_sidxs]', '(1)'], {}), '(one_hot_ct_encoding[cur_sidxs], 1)\n', (13266, 13301), False, 'import torch\n'), ((30944, 30998), 'torch.from_numpy', 'torch.from_numpy', (['batch_idxs[idxbatch_st:idxbatch_end]'], {}), '(batch_idxs[idxbatch_st:idxbatch_end])\n', (30960, 30998), False, 'import torch\n'), ((12228, 12267), 'torch.from_numpy', 'torch.from_numpy', (['batch_idxs[cur_sidxs]'], {}), '(batch_idxs[cur_sidxs])\n', (12244, 12267), False, 'import torch\n')]
|
# Numpy is imported, seed is set
import numpy as np
np.random.seed(123)
# Initialization
random_walk = [0]
for x in range(100) :
step = random_walk[-1]
dice = np.random.randint(1,7)
if dice <= 2:
step = max(0, step - 1)
elif dice <= 5:
step = step + 1
else:
step = step + np.random.randint(1,7)
random_walk.append(step)
# Import matplotlib.pyplot as plt
import matplotlib.pyplot as plt
# Plot random_walk
plt.plot(random_walk)
# Show the plot
plt.show()
|
[
"numpy.random.randint",
"numpy.random.seed",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.show"
] |
[((52, 71), 'numpy.random.seed', 'np.random.seed', (['(123)'], {}), '(123)\n', (66, 71), True, 'import numpy as np\n'), ((458, 479), 'matplotlib.pyplot.plot', 'plt.plot', (['random_walk'], {}), '(random_walk)\n', (466, 479), True, 'import matplotlib.pyplot as plt\n'), ((497, 507), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (505, 507), True, 'import matplotlib.pyplot as plt\n'), ((168, 191), 'numpy.random.randint', 'np.random.randint', (['(1)', '(7)'], {}), '(1, 7)\n', (185, 191), True, 'import numpy as np\n'), ((318, 341), 'numpy.random.randint', 'np.random.randint', (['(1)', '(7)'], {}), '(1, 7)\n', (335, 341), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
from netCDF4 import Dataset # pylint: disable=no-name-in-module
import numpy as np
#########################################################
# Class for ROMS grd and clm files
# (For use in various post-processing scripts)
#########################################################
class getGrid(object):
'''
Read the basics of ROMS setup into class for further use in other functions
and classes.
'''
# Read grid file
def __init__(self,grdfile):
# Set grd file
self.grdfile = grdfile
self.ncgrd = Dataset(grdfile, mode='r')
# Read mask
self.mask_rho = self.ncgrd.variables['mask_rho'][:]
self.FillValue = getattr(self.ncgrd.variables['mask_rho'],'_FillValue')
# Read dimensions
self.SY = self.mask_rho.shape[0]
self.SX = self.mask_rho.shape[1]
def getAttrs(self,clmfile):
# Set clm file
self.ncclm = Dataset(clmfile, mode='r')
# Read attributes
try:
self.theta_s = getattr(self.ncclm,'theta_s')
self.theta_b = getattr(self.ncclm,'theta_b')
self.hc = getattr(self.ncclm,'hc')
except AttributeError:
self.theta_s = self.ncclm.variables['theta_s'][0]
self.theta_b = self.ncclm.variables['theta_b'][0]
self.hc = self.ncclm.variables['hc'][0]
# Vertical dimension
self.NZ = self.ncclm.dimensions['s_rho'].size
def setClmFiles(self,clmfile,clm2file):
# Set clm file
if not hasattr(self, 'ncclm'):
self.ncclm = Dataset(clmfile, mode='r')
# Set clm2 file
self.ncclm2 = Dataset(clm2file, mode='r')
def getTopo(self):
# Read topography
self.h = self.ncgrd.variables['h'][:]
self.hmin = getattr(self.ncgrd,'hmin')
self.hmax = getattr(self.ncgrd,'hmax')
def getLatLon(self):
# Read Lat/Lon
self.lon_rho = self.ncgrd.variables['lon_rho'][:]
self.lat_rho = self.ncgrd.variables['lat_rho'][:]
def getArea(self):
# Read pm/pn
self.area = 1/(self.ncgrd.variables['pm'][:]*self.ncgrd.variables['pn'][:])
def getAngle(self):
# Read angle
self.angle = self.ncgrd.variables['angle'][:]
#########################################################
# Vertical sigma level depths and spacing
#########################################################
def compute_zlev(fpin,fpin_grd,NZ,type,zeta=None,stype=3):
# Compute z levels of rho points for ZERO SSH. Input:
#
# fpin: file descriptor pointing to a NetCDF file containing theta_b,
# theta_s and Tcline or hc
# fpin_grd: file descriptor pointing to a NetCDF file containing h
# NZ: number of vertical (rho) levels
# type: 'r': rho points
# 'w': w points
# stype: specifies type of sigma levels used:
# 1: similar to Song, Haidvogel 1994
# 2: Shchepetkin 2006
# 3: Shchepetkin 2010 (or so)
import numpy as np
import sys
h = fpin_grd.variables['h'][:,:]
try:
theta_b = fpin.theta_b
theta_s = fpin.theta_s
except AttributeError:
# theta_b/s may be variables:
theta_b = fpin.variables['theta_b'][0]
theta_s = fpin.variables['theta_s'][0]
if stype == 1:
hmin = min(min(h))
try:
Tcline = fpin.Tcline
hc = min(hmin,Tcline)
except AttributeError:
hc = fpin.hc
hc = min(hmin,hc)
elif stype == 2 or stype == 3:
try:
hc = fpin.hc
except AttributeError:
# hc may be a variable:
hc = fpin.variables['hc'][0]
else:
msg = '{}: Unknown type of sigma levels'.format(stype)
sys.exit(msg)
ds = 1./NZ # float, to prevent integer division in sc
if type == 'w':
lev = np.arange(NZ+1)
sc = (lev - NZ) * ds
nr_zlev = NZ+1 # number of vertical levels
else:
lev = np.arange(1,NZ+1)
sc = -1 + (lev-0.5)*ds
nr_zlev = NZ # number of vertical levels
Ptheta = np.sinh(theta_s*sc)/np.sinh(theta_s)
Rtheta = np.tanh(theta_s*(sc+.5))/(2*np.tanh(.5*theta_s))-.5
if stype <= 2:
Cs = (1-theta_b)*Ptheta+theta_b*Rtheta
elif stype == 3:
if theta_s > 0:
csrf=(1.-np.cosh(theta_s*sc))/(np.cosh(theta_s)-1.)
else:
csrf=-sc**2
if theta_b > 0:
Cs=(np.exp(theta_b*csrf)-1.)/(1.-np.exp(-theta_b))
else:
Cs=csrf
z0 = np.zeros((nr_zlev,h.shape[0],h.shape[1]),np.float)
if stype == 1:
cff = (sc-Cs)*hc
cff1 = Cs
hinv = 1.0 / h
for k in range(nr_zlev):
z0[k,:,:] = cff[k]+cff1[k]*h
if not (zeta is None):
z0[k,:,:] = z0[k,:,:]+zeta*(1.+z0[k,:,:]*hinv)
elif stype == 2 or stype == 3:
hinv = 1.0/(h+hc)
cff = hc*sc
cff1 = Cs
for k in range(nr_zlev):
tmp1 = cff[k]+cff1[k]*h
tmp2 = np.multiply(tmp1,hinv)
if zeta is None:
z0[k,:,:] = np.multiply(h,tmp2)
else:
z0[k,:,:] = zeta + np.multiply((zeta+h),tmp2)
# Return
return z0
def compute_dz(fpin,fpin_grd,NZ,zeta=None,stype=3):
# Compute dz of sigma level rho points for ZERO SSH. Input:
#
# fpin: file descriptor pointing to a NetCDF file containing theta_b,
# theta_s and Tcline or hc
# fpin_grd: file descriptor pointing to a NetCDF file containing h
# NZ: number of vertical (rho) levels
# stype: specifies type of sigma levels used:
# 1: similar to Song, Haidvogel 1994
# 2: Shchepetkin 2006
# 3: Shchepetkin 2010 (or so)
# Compute depth of w sigma levels
depth_w = -compute_zlev(fpin,fpin_grd,NZ,type='w',zeta=zeta,stype=3)
# Compute dz between w sigma levels (= dz of sigma layer)
dz_sigma = depth_w[:-1]-depth_w[1:]
return dz_sigma
#########################################################
# Additions from Max Simon
# Author: <NAME>
# Year: 2020
#########################################################
def get_cell_heights(z_values, depth):
"""
Structure if depth is False:
------------- // surface, top second cell
x // rho point, idx 2
------------- // top first cell, bottom second cell
x // rho point, idx 1
------------- // top zero-th cell, bottom first cell
x // rho point, idx 0
------------- // ground, bottom zero-th cell
Structure if depth is True
------------- // surface, top zero-th cell
x // depth point, idx 0
------------- // top first cell, bottom zero-th cell
x // depth point, idx 1
------------- // top second cell, bottom first cell
x // depth point, idx 2
------------- // ground, bottom second cell
Idea:
- loop from top to bottom (this means for depth = False from last index to first)
- calculate distance from current point to last_depth --> half the cell height
- last_depth is initially 0 and set to _current rho point + half the cell height_ after each iteration
- cell size is _2 x half the cell height_
Note: if depth = False this has to be done for each grid point seperately!
"""
heights = np.zeros_like(z_values)
last_height = 0.0 if depth else np.zeros((z_values.shape[1], z_values.shape[2]))
zero_edge_case = False
for srho_idx in range(z_values.shape[0]):
# go from top to bottom
srho = srho_idx if depth else (z_values.shape[0] - srho_idx - 1)
# handle edge case:
if srho == 0 and (z_values[srho] == 0).any():
assert (z_values[srho] == 0).all()
print('Zero Edge Case detected')
zero_edge_case = True
continue
# calc dist to last height
half = np.abs(z_values[srho]) - last_height
# handle edge case
if srho == 1 and zero_edge_case:
half = 0.5*half
previous_srho = 0 if depth else -1
heights[previous_srho] = half
zero_edge_case = False
print('Zero Edge Case solved')
assert np.array(half >= 0).all(), (srho_idx, srho, z_values[srho], last_height, half)
heights[srho] = 2*half
# update last_height
last_height = np.abs(z_values[srho]) + half
return heights
def create_zlevel_file(grid_path, sample_data_path, out_path):
"""
Create a netCDF file containing the zlevels
"""
sample_data = Dataset(sample_data_path)
is_zslice_file = 'depth' in sample_data.dimensions
if is_zslice_file:
print('Sample Data is z sliced')
z_levels = np.array(sample_data['depth'])
z_thickness = get_cell_heights(z_levels, True)
assert np.sum(z_thickness[:-1]) + 0.5*z_thickness[-1] == abs(z_levels[-1]), (np.sum(z_thickness[:-1]), z_thickness[-1], z_levels[-1])
with Dataset(out_path, mode='w') as new_dataset:
# copy global attributes all at once via dictionary
new_dataset.createDimension('depth', len(z_levels))
# save zlevels
new_dataset.createVariable('z_level', np.float32, dimensions=('depth',))
new_dataset['z_level'][:] = np.abs(z_levels)
new_dataset.createVariable('thickness_z', np.float32, dimensions=('depth'))
new_dataset['thickness_z'][:] = np.abs(z_thickness)
else:
sample_data.close() # just make sure that we dont interfer with other routines
print('Sample Data is raw ROMS output')
# calculate the zlevels
grid = Dataset(grid_path)
sample_data = Dataset(sample_data_path)
n_s_rho = sample_data.dimensions['s_rho'].size
n_eta_rho = sample_data.dimensions['eta_rho'].size
n_xi_rho = sample_data.dimensions['xi_rho'].size
z_levels_rho = compute_zlev(sample_data, grid, n_s_rho, 'r')
z_levels_w = compute_zlev(sample_data, grid, n_s_rho, 'w')
z_thickness_rho = get_cell_heights(z_levels_rho, False)
control = np.sum(z_thickness_rho, axis=0) - np.array(grid['h'])
assert np.max(np.abs(control)) < 5, 'Height calculation differs more than 5m'
with Dataset(out_path, mode='w') as new_dataset:
# copy global attributes all at once via dictionary
new_dataset.createDimension('s_rho', n_s_rho)
new_dataset.createDimension('eta_rho', n_eta_rho)
new_dataset.createDimension('xi_rho', n_xi_rho)
new_dataset.createDimension('s_w', n_s_rho + 1)
# save zlevels
new_dataset.createVariable('z_level', np.float32, dimensions=('s_rho', 'eta_rho', 'xi_rho'))
new_dataset['z_level'][:] = np.abs(z_levels_rho)
new_dataset.createVariable('z_level_w', np.float32, dimensions=('s_w', 'eta_rho', 'xi_rho'))
new_dataset['z_level_w'][:] = np.abs(z_levels_w)
new_dataset.createVariable('thickness_z', np.float32, dimensions=('s_rho', 'eta_rho', 'xi_rho'))
new_dataset['thickness_z'][:] = np.abs(z_thickness_rho)
if __name__ == "__main__":
import argparse
# create parser
parser = argparse.ArgumentParser()
# add arguments
parser.add_argument('--input', type=str, required=True, help="Sample Input Path")
parser.add_argument('--grid', type=str, required=True, help="Grid path")
parser.add_argument('--output', type=str, help="Output path")
args = parser.parse_args()
# execute
create_zlevel_file(args.grid, args.input, args.output)
|
[
"netCDF4.Dataset",
"numpy.zeros_like",
"numpy.abs",
"argparse.ArgumentParser",
"numpy.tanh",
"numpy.sum",
"numpy.multiply",
"numpy.zeros",
"numpy.arange",
"numpy.array",
"numpy.exp",
"numpy.cosh",
"numpy.sinh",
"sys.exit"
] |
[((4031, 4084), 'numpy.zeros', 'np.zeros', (['(nr_zlev, h.shape[0], h.shape[1])', 'np.float'], {}), '((nr_zlev, h.shape[0], h.shape[1]), np.float)\n', (4039, 4084), True, 'import numpy as np\n'), ((6626, 6649), 'numpy.zeros_like', 'np.zeros_like', (['z_values'], {}), '(z_values)\n', (6639, 6649), True, 'import numpy as np\n'), ((7689, 7714), 'netCDF4.Dataset', 'Dataset', (['sample_data_path'], {}), '(sample_data_path)\n', (7696, 7714), False, 'from netCDF4 import Dataset\n'), ((10067, 10092), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (10090, 10092), False, 'import argparse\n'), ((534, 560), 'netCDF4.Dataset', 'Dataset', (['grdfile'], {'mode': '"""r"""'}), "(grdfile, mode='r')\n", (541, 560), False, 'from netCDF4 import Dataset\n'), ((857, 883), 'netCDF4.Dataset', 'Dataset', (['clmfile'], {'mode': '"""r"""'}), "(clmfile, mode='r')\n", (864, 883), False, 'from netCDF4 import Dataset\n'), ((1488, 1515), 'netCDF4.Dataset', 'Dataset', (['clm2file'], {'mode': '"""r"""'}), "(clm2file, mode='r')\n", (1495, 1515), False, 'from netCDF4 import Dataset\n'), ((3469, 3486), 'numpy.arange', 'np.arange', (['(NZ + 1)'], {}), '(NZ + 1)\n', (3478, 3486), True, 'import numpy as np\n'), ((3568, 3588), 'numpy.arange', 'np.arange', (['(1)', '(NZ + 1)'], {}), '(1, NZ + 1)\n', (3577, 3588), True, 'import numpy as np\n'), ((3664, 3685), 'numpy.sinh', 'np.sinh', (['(theta_s * sc)'], {}), '(theta_s * sc)\n', (3671, 3685), True, 'import numpy as np\n'), ((3684, 3700), 'numpy.sinh', 'np.sinh', (['theta_s'], {}), '(theta_s)\n', (3691, 3700), True, 'import numpy as np\n'), ((6683, 6731), 'numpy.zeros', 'np.zeros', (['(z_values.shape[1], z_values.shape[2])'], {}), '((z_values.shape[1], z_values.shape[2]))\n', (6691, 6731), True, 'import numpy as np\n'), ((7836, 7866), 'numpy.array', 'np.array', (["sample_data['depth']"], {}), "(sample_data['depth'])\n", (7844, 7866), True, 'import numpy as np\n'), ((8659, 8677), 'netCDF4.Dataset', 'Dataset', (['grid_path'], {}), '(grid_path)\n', (8666, 8677), False, 'from netCDF4 import Dataset\n'), ((8694, 8719), 'netCDF4.Dataset', 'Dataset', (['sample_data_path'], {}), '(sample_data_path)\n', (8701, 8719), False, 'from netCDF4 import Dataset\n'), ((1427, 1453), 'netCDF4.Dataset', 'Dataset', (['clmfile'], {'mode': '"""r"""'}), "(clmfile, mode='r')\n", (1434, 1453), False, 'from netCDF4 import Dataset\n'), ((3374, 3387), 'sys.exit', 'sys.exit', (['msg'], {}), '(msg)\n', (3382, 3387), False, 'import sys\n'), ((3711, 3740), 'numpy.tanh', 'np.tanh', (['(theta_s * (sc + 0.5))'], {}), '(theta_s * (sc + 0.5))\n', (3718, 3740), True, 'import numpy as np\n'), ((7112, 7134), 'numpy.abs', 'np.abs', (['z_values[srho]'], {}), '(z_values[srho])\n', (7118, 7134), True, 'import numpy as np\n'), ((7508, 7530), 'numpy.abs', 'np.abs', (['z_values[srho]'], {}), '(z_values[srho])\n', (7514, 7530), True, 'import numpy as np\n'), ((7996, 8020), 'numpy.sum', 'np.sum', (['z_thickness[:-1]'], {}), '(z_thickness[:-1])\n', (8002, 8020), True, 'import numpy as np\n'), ((8062, 8089), 'netCDF4.Dataset', 'Dataset', (['out_path'], {'mode': '"""w"""'}), "(out_path, mode='w')\n", (8069, 8089), False, 'from netCDF4 import Dataset\n'), ((8341, 8357), 'numpy.abs', 'np.abs', (['z_levels'], {}), '(z_levels)\n', (8347, 8357), True, 'import numpy as np\n'), ((8472, 8491), 'numpy.abs', 'np.abs', (['z_thickness'], {}), '(z_thickness)\n', (8478, 8491), True, 'import numpy as np\n'), ((9069, 9100), 'numpy.sum', 'np.sum', (['z_thickness_rho'], {'axis': '(0)'}), '(z_thickness_rho, axis=0)\n', (9075, 9100), True, 'import numpy as np\n'), ((9103, 9122), 'numpy.array', 'np.array', (["grid['h']"], {}), "(grid['h'])\n", (9111, 9122), True, 'import numpy as np\n'), ((9211, 9238), 'netCDF4.Dataset', 'Dataset', (['out_path'], {'mode': '"""w"""'}), "(out_path, mode='w')\n", (9218, 9238), False, 'from netCDF4 import Dataset\n'), ((9663, 9683), 'numpy.abs', 'np.abs', (['z_levels_rho'], {}), '(z_levels_rho)\n', (9669, 9683), True, 'import numpy as np\n'), ((9813, 9831), 'numpy.abs', 'np.abs', (['z_levels_w'], {}), '(z_levels_w)\n', (9819, 9831), True, 'import numpy as np\n'), ((9967, 9990), 'numpy.abs', 'np.abs', (['z_thickness_rho'], {}), '(z_thickness_rho)\n', (9973, 9990), True, 'import numpy as np\n'), ((3739, 3761), 'numpy.tanh', 'np.tanh', (['(0.5 * theta_s)'], {}), '(0.5 * theta_s)\n', (3746, 3761), True, 'import numpy as np\n'), ((4424, 4447), 'numpy.multiply', 'np.multiply', (['tmp1', 'hinv'], {}), '(tmp1, hinv)\n', (4435, 4447), True, 'import numpy as np\n'), ((7365, 7384), 'numpy.array', 'np.array', (['(half >= 0)'], {}), '(half >= 0)\n', (7373, 7384), True, 'import numpy as np\n'), ((7926, 7950), 'numpy.sum', 'np.sum', (['z_thickness[:-1]'], {}), '(z_thickness[:-1])\n', (7932, 7950), True, 'import numpy as np\n'), ((9139, 9154), 'numpy.abs', 'np.abs', (['control'], {}), '(control)\n', (9145, 9154), True, 'import numpy as np\n'), ((4483, 4503), 'numpy.multiply', 'np.multiply', (['h', 'tmp2'], {}), '(h, tmp2)\n', (4494, 4503), True, 'import numpy as np\n'), ((3868, 3889), 'numpy.cosh', 'np.cosh', (['(theta_s * sc)'], {}), '(theta_s * sc)\n', (3875, 3889), True, 'import numpy as np\n'), ((3890, 3906), 'numpy.cosh', 'np.cosh', (['theta_s'], {}), '(theta_s)\n', (3897, 3906), True, 'import numpy as np\n'), ((3959, 3981), 'numpy.exp', 'np.exp', (['(theta_b * csrf)'], {}), '(theta_b * csrf)\n', (3965, 3981), True, 'import numpy as np\n'), ((3988, 4004), 'numpy.exp', 'np.exp', (['(-theta_b)'], {}), '(-theta_b)\n', (3994, 4004), True, 'import numpy as np\n'), ((4535, 4562), 'numpy.multiply', 'np.multiply', (['(zeta + h)', 'tmp2'], {}), '(zeta + h, tmp2)\n', (4546, 4562), True, 'import numpy as np\n')]
|
import subprocess
import shutil
import tempfile
import logging
from time import sleep
logger = logging.getLogger(__name__)
class Npm:
def __init__(self):
self.process = None
pass
def install(self, path):
logger.info("Installing npm packages...")
process = subprocess.Popen(
["npm", "install"],
cwd=path,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
try:
return_code = process.wait()
except subprocess.TimeoutExpired:
return True
if return_code is not 0:
raise Exception("Return code was non-zero")
logger.info("Done.")
return True
def stop_process(self, kill_after=None):
if self.process is None:
return
outS, errS, timedOut = self.__read_process_stream(self.process, kill_after=kill_after)
rc = self.process.returncode
self.process = None
return (timedOut, rc, (outS, errS))
def start(self, path, cmd, kill_after=None, noRead=False):
if self.process and noRead:
logger.warn("Opening a process while a current one is running.")
process = subprocess.Popen(
["node"] + cmd,
cwd=path,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
if noRead:
self.process = process
else:
outS, errS, timedOut = self.__read_process_stream(process, kill_after)
return (timedOut, process.returncode, (outS, errS))
def __read_process_stream(self, proc, kill_after=None):
timedOut = False
outS = ""
errS = ""
try:
outs, errs = proc.communicate(timeout=kill_after)
outS = outs.decode()
errS = errs.decode()
except subprocess.TimeoutExpired as e:
proc.terminate()
proc.wait()
if e.stdout is not None:
outS = e.stdout.decode()
if e.stderr is not None:
errS = e.stderr.decode()
timedOut = True
logger.debug("%s was terminated", str(" ".join(proc.args)))
return (outS, errS, timedOut)
|
[
"subprocess.Popen",
"logging.getLogger"
] |
[((96, 123), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (113, 123), False, 'import logging\n'), ((300, 398), 'subprocess.Popen', 'subprocess.Popen', (["['npm', 'install']"], {'cwd': 'path', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['npm', 'install'], cwd=path, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n", (316, 398), False, 'import subprocess\n'), ((1210, 1305), 'subprocess.Popen', 'subprocess.Popen', (["(['node'] + cmd)"], {'cwd': 'path', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['node'] + cmd, cwd=path, stdout=subprocess.PIPE, stderr=\n subprocess.PIPE)\n", (1226, 1305), False, 'import subprocess\n')]
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import math
import os
import json
import copy
import logging
from argparse import Namespace
import torch
import torch.nn as nn
import torch.nn.functional as F
from pytorch_transformers import RobertaModel, RobertaConfig
# from pytorch_transformers import BertModel, BertConfig
from fairseq import options, utils
from fairseq.modules import (
AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm,
LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding,
)
from . import (
FairseqIncrementalDecoder, FairseqEncoder, FairseqLanguageModel,
FairseqModel, register_model, register_model_architecture,
)
@register_model('roberta_transformer')
class AbsSumRobertaTransformerModel(FairseqModel):
"""
Transformer model from `"Attention Is All You Need" (Vaswani, et al, 2017)
<https://arxiv.org/abs/1706.03762>`_.
Args:
encoder (TransformerEncoder): the encoder
decoder (TransformerDecoder): the decoder
The Transformer model provides the following named architectures and
command-line arguments:
.. argparse::
:ref: fairseq.models.transformer_parser
:prog:
"""
def __init__(self, encoder, decoder):
super().__init__(encoder, decoder)
@staticmethod
def add_args(parser):
"""Add model-specific arguments to the parser."""
# fmt: off
parser.add_argument('--dropout', type=float, metavar='D',
help='dropout probability')
parser.add_argument('--attention-dropout', type=float, metavar='D',
help='dropout probability for attention weights')
parser.add_argument('--relu-dropout', type=float, metavar='D',
help='dropout probability after ReLU in FFN')
parser.add_argument('--encoder-embed-path', type=str, metavar='STR',
help='path to pre-trained encoder embedding')
parser.add_argument('--encoder-embed-dim', type=int, metavar='N',
help='encoder embedding dimension')
parser.add_argument('--encoder-ffn-embed-dim', type=int, metavar='N',
help='encoder embedding dimension for FFN')
parser.add_argument('--encoder-layers', type=int, metavar='N',
help='num encoder layers')
parser.add_argument('--encoder-attention-heads', type=int, metavar='N',
help='num encoder attention heads')
parser.add_argument('--encoder-normalize-before', action='store_true',
help='apply layernorm before each encoder block')
parser.add_argument('--encoder-learned-pos', action='store_true',
help='use learned positional embeddings in the encoder')
parser.add_argument('--decoder-embed-path', type=str, metavar='STR',
help='path to pre-trained decoder embedding')
parser.add_argument('--decoder-embed-dim', type=int, metavar='N',
help='decoder embedding dimension')
parser.add_argument('--decoder-ffn-embed-dim', type=int, metavar='N',
help='decoder embedding dimension for FFN')
parser.add_argument('--decoder-layers', type=int, metavar='N',
help='num decoder layers')
parser.add_argument('--decoder-dropout', type=float, metavar='D',
help='decoder dropout probability')
parser.add_argument('--decoder-attention-heads', type=int, metavar='N',
help='num decoder attention heads')
parser.add_argument('--decoder-learned-pos', action='store_true',
help='use learned positional embeddings in the decoder')
parser.add_argument('--decoder-normalize-before', action='store_true',
help='apply layernorm before each decoder block')
parser.add_argument('--share-decoder-input-output-embed', action='store_true',
help='share decoder input and output embeddings')
parser.add_argument('--share-all-embeddings', action='store_true',
help='share encoder, decoder and output embeddings'
' (requires shared dictionary and embed dim)')
parser.add_argument('--layer-norm-eps', type=float, metavar='D',
help='eps for layer norm')
parser.add_argument('--no-token-positional-embeddings', default=False, action='store_true',
help='if set, disables positional embeddings (outside self attention)')
# parser.add_argument('--adaptive-softmax-cutoff', metavar='EXPR',
# help='comma separated list of adaptive softmax cutoff points. '
# 'Must be used with adaptive_loss criterion'),
# parser.add_argument('--adaptive-softmax-dropout', type=float, metavar='D',
# help='sets adaptive softmax dropout for the tail projections')
parser.add_argument('--roberta-model', default='roberta-base',
help="RoBerta pre-trained model selected in the list: roberta-base, "
"roberta-large.")
parser.add_argument('--roberta-decoder', default=False, action='store_true',
help='if set, the decoder is built as BERT architecture, instead of Fairseq transformer')
parser.add_argument('--roberta-decoder-initialization', default=False, action='store_true',
help='if set, the decoder is built as BERT architecture, instead of Fairseq transformer')
parser.add_argument('--roberta-config-path', default=None, metavar='PRETRAINED_PATH',
help='roberta config json file path')
# fmt: on
@classmethod
def build_model(cls, args, task):
"""Build a new model instance."""
# make sure all arguments are present in older models
base_architecture(args)
if not hasattr(args, 'max_source_positions'):
args.max_source_positions = 1024
if not hasattr(args, 'max_target_positions'):
args.max_target_positions = 1024
src_dict, tgt_dict = task.source_dictionary, task.target_dictionary
def build_embedding(dictionary, embed_dim, path=None):
num_embeddings = len(dictionary)
padding_idx = dictionary.pad()
emb = Embedding(num_embeddings, embed_dim, padding_idx)
# if provided, load from preloaded dictionaries
if path:
embed_dict = utils.parse_embedding(path)
utils.load_embedding(embed_dict, dictionary, emb)
return emb
if args.share_all_embeddings:
if src_dict != tgt_dict:
raise ValueError('--share-all-embeddings requires a joined dictionary')
if args.encoder_embed_dim != args.decoder_embed_dim:
raise ValueError(
'--share-all-embeddings requires --encoder-embed-dim to match --decoder-embed-dim')
if args.decoder_embed_path and (
args.decoder_embed_path != args.encoder_embed_path):
raise ValueError('--share-all-embeddings not compatible with --decoder-embed-path')
encoder_embed_tokens = build_embedding(
src_dict, args.encoder_embed_dim, args.encoder_embed_path
)
decoder_embed_tokens = encoder_embed_tokens
args.share_decoder_input_output_embed = True
else:
encoder_embed_tokens = build_embedding(
src_dict, args.encoder_embed_dim, args.encoder_embed_path
)
decoder_embed_tokens = build_embedding(
tgt_dict, args.decoder_embed_dim, args.decoder_embed_path
)
encoder = TransformerEncoder(args, src_dict, encoder_embed_tokens, left_pad=args.left_pad_source)
if hasattr(args, 'roberta_decoder') and args.roberta_decoder:
print("Apply Bert Architecture as Decoder")
# json_file_path = 'roberta-vocab/{0}-config.json'.format(args.roberta_model)
json_file_path = args.roberta_config_path
config = from_json_file(json_file_path)
decoder_config = Namespace(**config)
print(decoder_config)
decoder = BertDecoder(args, decoder_config, tgt_dict, decoder_embed_tokens, left_pad=args.left_pad_target)
else:
decoder = TransformerDecoder(args, tgt_dict, decoder_embed_tokens, left_pad=args.left_pad_target)
return AbsSumRobertaTransformerModel(encoder, decoder)
def forward(self, src_tokens, segment_ids, prev_output_tokens):
encoder_out = self.encoder(src_tokens, segment_ids)
decoder_out = self.decoder(prev_output_tokens, encoder_out=encoder_out)
return decoder_out
def initilize_roberta_decoder(self):
print("Initializing the decoder with Roberta encoder parameters.")
assert self.decoder is not None
assert self.encoder is not None
# Embedding
# print(self.decoder.embeddings)
# print(self.encoder.roberta.embeddings)
self.decoder.embeddings = self.copy_params(self.encoder.roberta.embeddings, self.decoder.embeddings)
# print(self.encoder.roberta.encoder.layer[0])
# print(self.decoder.layers[0])
# Layer list
for i in range(len(self.encoder.roberta.encoder.layer)):
self.decoder.layers[i] = self.copy_params(self.encoder.roberta.encoder.layer[i], self.decoder.layers[i])
def copy_params(self, module1, module2):
params1 = module1.state_dict()
params2 = module2.state_dict()
dict_param2 = dict(params2)
for name1 in params1:
# print(name1)
# print(params1[name1].data)
if name1 in dict_param2.keys():
# print('before', dict_param2[name1])
dict_param2[name1].data.copy_(params1[name1].data)
# print('after', dict_param2[name1])
# print('-------------------')
module2.load_state_dict(dict_param2)
return module2
def from_json_file(json_file):
"""Constructs a `BertConfig` from a json file of parameters."""
with open(json_file, "r", encoding='utf-8') as reader:
text = reader.read()
json_object = json.loads(text)
config = dict()
for key, value in json_object.items():
config[key] = value
return config
class TransformerEncoder(FairseqEncoder):
"""
Transformer encoder consisting of *args.encoder_layers* layers. Each layer
is a :class:`TransformerEncoderLayer`.
Args:
args (argparse.Namespace): parsed command-line arguments
dictionary (~fairseq.data.Dictionary): encoding dictionary
embed_tokens (torch.nn.Embedding): input embedding
left_pad (bool, optional): whether the input is left-padded
(default: True).
"""
def __init__(self, args, dictionary, embed_tokens, left_pad=False):
super().__init__(dictionary)
self.dropout = args.dropout
self.n_gpu = torch.cuda.device_count()
print('Distributed rank: ', args.distributed_rank)
print('Number of used GPU: ', self.n_gpu)
# if args.distributed_world_size > 1:
# if args.distributed_rank not in [-1, 0]: # [1, 0]
# torch.distributed.barrier() # Make sure only the first process in distributed training will download model & vocab
# Load pre-trained model (weights)
config = RobertaConfig.from_pretrained(args.roberta_model)
self.roberta = RobertaModel.from_pretrained(args.roberta_model, config=config)
# if args.distributed_world_size > 1:
# if args.distributed_rank == 0: # 1
# torch.distributed.barrier() # Make sure only the first process in distributed training will download model & vocab
embed_dim = embed_tokens.embedding_dim
self.padding_idx = embed_tokens.padding_idx
self.max_source_positions = args.max_source_positions
self.embed_tokens = embed_tokens
self.embed_scale = math.sqrt(embed_dim)
self.embed_positions = PositionalEmbedding(
args.max_source_positions, embed_dim, self.padding_idx,
left_pad=left_pad,
learned=args.encoder_learned_pos,
) if not args.no_token_positional_embeddings else None
# self.layers = nn.ModuleList([])
# self.layers.extend([
# TransformerEncoderLayer(args)
# for i in range(args.encoder_layers)
# ])
self.register_buffer('version', torch.Tensor([2]))
self.normalize = args.encoder_normalize_before
if self.normalize:
self.layer_norm = LayerNorm(embed_dim)
# def forward(self, src_tokens, src_lengths):
def forward(self, src_tokens, segment_ids):
"""
Args:
src_tokens (LongTensor): tokens in the source language of shape
`(batch, src_len)`
src_lengths (torch.LongTensor): lengths of each source sentence of
shape `(batch)`
Returns:
dict:
- **encoder_out** (Tensor): the last encoder layer's output of
shape `(src_len, batch, embed_dim)`
- **encoder_padding_mask** (ByteTensor): the positions of
padding elements of shape `(batch, src_len)`
"""
# print(src_tokens)
# sum = src_tokens[:, 0].sum().item()
# print(sum)
bsz, seqlen = src_tokens.size()
src_tokens = src_tokens.view(bsz, seqlen)
segment_ids = segment_ids.view(bsz, seqlen) # all fill 0
# compute padding mask
attention_mask = src_tokens.ne(self.padding_idx)
# print(attention_mask)
# enc_hids, _ = self.bert(src_tokens, segment_ids, attention_mask, output_all_encoded_layers=False)
# print(src_tokens)
enc_hids, _ = self.roberta(src_tokens, token_type_ids=segment_ids, attention_mask=attention_mask)
# print('enc_hids', enc_hids.size())
# doc_pos = self.sent_embed_positions(doc_pos_tok)
# sent_repr = x[0].view(bsz, n_sent, -1)
sent_repr = enc_hids
# print( 'sent_repr', sent_repr.size() )
if self.embed_positions is not None:
sent_repr += self.embed_positions(src_tokens)
# B x T x C -> T x B x C
sent_repr = sent_repr.transpose(0, 1)
# compute padding mask
encoder_padding_mask = src_tokens.eq(self.padding_idx)
if not encoder_padding_mask.any():
encoder_padding_mask = None
# encoder layers
# for layer in self.layers:
# sent_repr = layer(sent_repr, encoder_padding_mask)
if self.normalize:
sent_repr = self.layer_norm(sent_repr)
'''
# embed tokens and positions
x = self.embed_scale * self.embed_tokens(src_tokens)
if self.embed_positions is not None:
x += self.embed_positions(src_tokens)
x = F.dropout(x, p=self.dropout, training=self.training)
# B x T x C -> T x B x C
x = x.transpose(0, 1)
# compute padding mask
encoder_padding_mask = src_tokens.eq(self.padding_idx)
if not encoder_padding_mask.any():
encoder_padding_mask = None
# encoder layers
for layer in self.layers:
x = layer(x, encoder_padding_mask)
if self.normalize:
x = self.layer_norm(x)
return {
'encoder_out': x, # T x B x C
'encoder_padding_mask': encoder_padding_mask, # B x T
}
'''
return {
'encoder_out': sent_repr, # T x B x C
'encoder_padding_mask': encoder_padding_mask, # B x T
}
def reorder_encoder_out(self, encoder_out, new_order):
"""
Reorder encoder output according to *new_order*.
Args:
encoder_out: output from the ``forward()`` method
new_order (LongTensor): desired order
Returns:
*encoder_out* rearranged according to *new_order*
"""
if encoder_out['encoder_out'] is not None:
encoder_out['encoder_out'] = \
encoder_out['encoder_out'].index_select(1, new_order)
if encoder_out['encoder_padding_mask'] is not None:
encoder_out['encoder_padding_mask'] = \
encoder_out['encoder_padding_mask'].index_select(0, new_order)
return encoder_out
def max_positions(self):
"""Maximum input length supported by the encoder."""
if self.embed_positions is None:
return self.max_source_positions
return min(self.max_source_positions, self.embed_positions.max_positions())
def upgrade_state_dict_named(self, state_dict, name):
"""Upgrade a (possibly old) state dict for new versions of fairseq."""
if isinstance(self.embed_positions, SinusoidalPositionalEmbedding):
weights_key = '{}.embed_positions.weights'.format(name)
if weights_key in state_dict:
del state_dict[weights_key]
state_dict['{}.embed_positions._float_tensor'.format(name)] = torch.FloatTensor(1)
version_key = '{}.version'.format(name)
if utils.item(state_dict.get(version_key, torch.Tensor([1]))[0]) < 2:
# earlier checkpoints did not normalize after the stack of layers
self.layer_norm = None
self.normalize = False
state_dict[version_key] = torch.Tensor([1])
return state_dict
class TransformerDecoder(FairseqIncrementalDecoder):
"""
Transformer decoder consisting of *args.decoder_layers* layers. Each layer
is a :class:`TransformerDecoderLayer`.
Args:
args (argparse.Namespace): parsed command-line arguments
dictionary (~fairseq.data.Dictionary): decoding dictionary
embed_tokens (torch.nn.Embedding): output embedding
no_encoder_attn (bool, optional): whether to attend to encoder outputs
(default: False).
left_pad (bool, optional): whether the input is left-padded
(default: False).
final_norm (bool, optional): apply layer norm to the output of the
final decoder layer (default: True).
"""
def __init__(self, args, dictionary, embed_tokens, no_encoder_attn=False, left_pad=False, final_norm=True):
super().__init__(dictionary)
self.dropout = args.decoder_dropout
self.share_input_output_embed = args.share_decoder_input_output_embed
input_embed_dim = embed_tokens.embedding_dim
embed_dim = args.decoder_embed_dim
output_embed_dim = args.decoder_output_dim
padding_idx = embed_tokens.padding_idx
self.max_target_positions = args.max_target_positions
self.embed_tokens = embed_tokens
self.embed_scale = math.sqrt(embed_dim) # todo: try with input_embed_dim
self.project_in_dim = Linear(input_embed_dim, embed_dim, bias=False) if embed_dim != input_embed_dim else None
self.embed_positions = PositionalEmbedding(
args.max_target_positions, embed_dim, padding_idx,
left_pad=left_pad,
learned=args.decoder_learned_pos,
) if not args.no_token_positional_embeddings else None
self.layers = nn.ModuleList([])
self.layers.extend([
TransformerDecoderLayer(args, no_encoder_attn)
for _ in range(args.decoder_layers)
])
self.adaptive_softmax = None
self.project_out_dim = Linear(embed_dim, output_embed_dim, bias=False) \
if embed_dim != output_embed_dim and not args.tie_adaptive_weights else None
if args.adaptive_softmax_cutoff is not None:
self.adaptive_softmax = AdaptiveSoftmax(
len(dictionary),
output_embed_dim,
options.eval_str_list(args.adaptive_softmax_cutoff, type=int),
dropout=args.adaptive_softmax_dropout,
adaptive_inputs=embed_tokens if args.tie_adaptive_weights else None,
factor=args.adaptive_softmax_factor,
tie_proj=args.tie_adaptive_proj,
)
elif not self.share_input_output_embed:
self.embed_out = nn.Parameter(torch.Tensor(len(dictionary), output_embed_dim))
nn.init.normal_(self.embed_out, mean=0, std=output_embed_dim ** -0.5)
self.register_buffer('version', torch.Tensor([2]))
self.normalize = args.decoder_normalize_before and final_norm
if self.normalize:
self.layer_norm = LayerNorm(embed_dim)
def forward(self, prev_output_tokens, encoder_out=None, incremental_state=None):
"""
Args:
prev_output_tokens (LongTensor): previous decoder outputs of shape
`(batch, tgt_len)`, for input feeding/teacher forcing
encoder_out (Tensor, optional): output from the encoder, used for
encoder-side attention
incremental_state (dict): dictionary used for storing state during
:ref:`Incremental decoding`
Returns:
tuple:
- the last decoder layer's output of shape `(batch, tgt_len,
vocab)`
- the last decoder layer's attention weights of shape `(batch,
tgt_len, src_len)`
"""
# print(encoder_out)
# print(incremental_state)
# exit(1)
# embed positions
# incremental_state = None
positions = self.embed_positions(
prev_output_tokens,
incremental_state=incremental_state,
) if self.embed_positions is not None else None
if incremental_state is not None:
prev_output_tokens = prev_output_tokens[:, -1:]
if positions is not None:
positions = positions[:, -1:]
# embed tokens and positions
x = self.embed_scale * self.embed_tokens(prev_output_tokens)
# self.project_in_dim = None
if self.project_in_dim is not None:
x = self.project_in_dim(x)
if positions is not None:
x += positions
x = F.dropout(x, p=self.dropout, training=self.training)
# B x T x C -> T x B x C
x = x.transpose(0, 1)
attn = None
inner_states = [x]
# decoder layers
for layer in self.layers:
x, attn = layer(
x,
encoder_out['encoder_out'] if encoder_out is not None else None,
encoder_out['encoder_padding_mask'] if encoder_out is not None else None,
incremental_state,
self_attn_mask=self.buffered_future_mask(x) if incremental_state is None else None,
)
inner_states.append(x)
if self.normalize:
x = self.layer_norm(x)
# T x B x C -> B x T x C
x = x.transpose(0, 1)
# self.project_out_dim = None
if self.project_out_dim is not None:
x = self.project_out_dim(x)
# self.adaptive_softmax = None
# print(self.share_input_output_embed)
if self.adaptive_softmax is None:
# project back to size of vocabulary
if self.share_input_output_embed:
x = F.linear(x, self.embed_tokens.weight)
else:
x = F.linear(x, self.embed_out)
return x, {'attn': attn, 'inner_states': inner_states}
def max_positions(self):
"""Maximum output length supported by the decoder."""
if self.embed_positions is None:
return self.max_target_positions
return min(self.max_target_positions, self.embed_positions.max_positions())
def buffered_future_mask(self, tensor):
dim = tensor.size(0)
if not hasattr(self, '_future_mask') or self._future_mask is None or self._future_mask.device != tensor.device:
self._future_mask = torch.triu(utils.fill_with_neg_inf(tensor.new(dim, dim)), 1)
if self._future_mask.size(0) < dim:
self._future_mask = torch.triu(utils.fill_with_neg_inf(self._future_mask.resize_(dim, dim)), 1)
return self._future_mask[:dim, :dim]
def upgrade_state_dict_named(self, state_dict, name):
"""Upgrade a (possibly old) state dict for new versions of fairseq."""
if isinstance(self.embed_positions, SinusoidalPositionalEmbedding):
weights_key = '{}.embed_positions.weights'.format(name)
if weights_key in state_dict:
del state_dict[weights_key]
state_dict['{}.embed_positions._float_tensor'.format(name)] = torch.FloatTensor(1)
for i in range(len(self.layers)):
# update layer norms
layer_norm_map = {
'0': 'self_attn_layer_norm',
'1': 'encoder_attn_layer_norm',
'2': 'final_layer_norm'
}
for old, new in layer_norm_map.items():
for m in ('weight', 'bias'):
k = '{}.layers.{}.layer_norms.{}.{}'.format(name, i, old, m)
if k in state_dict:
state_dict['{}.layers.{}.{}.{}'.format(name, i, new, m)] = state_dict[k]
del state_dict[k]
if utils.item(state_dict.get('{}.version'.format(name), torch.Tensor([1]))[0]) < 2:
# earlier checkpoints did not normalize after the stack of layers
self.layer_norm = None
self.normalize = False
state_dict['{}.version'.format(name)] = torch.Tensor([1])
return state_dict
class TransformerEncoderLayer(nn.Module):
"""Encoder layer block.
In the original paper each operation (multi-head attention or FFN) is
postprocessed with: `dropout -> add residual -> layernorm`. In the
tensor2tensor code they suggest that learning is more robust when
preprocessing each layer with layernorm and postprocessing with:
`dropout -> add residual`. We default to the approach in the paper, but the
tensor2tensor approach can be enabled by setting
*args.encoder_normalize_before* to ``True``.
Args:
args (argparse.Namespace): parsed command-line arguments
"""
def __init__(self, args):
super().__init__()
self.embed_dim = args.encoder_embed_dim
self.self_attn = MultiheadAttention(
self.embed_dim, args.encoder_attention_heads,
dropout=args.attention_dropout,
)
self.dropout = args.dropout
self.relu_dropout = args.relu_dropout
self.normalize_before = args.encoder_normalize_before
self.fc1 = Linear(self.embed_dim, args.encoder_ffn_embed_dim)
self.fc2 = Linear(args.encoder_ffn_embed_dim, self.embed_dim)
self.layer_norms = nn.ModuleList([LayerNorm(self.embed_dim) for i in range(2)])
def forward(self, x, encoder_padding_mask):
"""
Args:
x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_padding_mask (ByteTensor): binary ByteTensor of shape
`(batch, src_len)` where padding elements are indicated by ``1``.
Returns:
encoded output of shape `(batch, src_len, embed_dim)`
"""
residual = x
x = self.maybe_layer_norm(0, x, before=True)
x, _ = self.self_attn(query=x, key=x, value=x, key_padding_mask=encoder_padding_mask)
x = F.dropout(x, p=self.dropout, training=self.training)
x = residual + x
x = self.maybe_layer_norm(0, x, after=True)
residual = x
x = self.maybe_layer_norm(1, x, before=True)
x = F.relu(self.fc1(x))
x = F.dropout(x, p=self.relu_dropout, training=self.training)
x = self.fc2(x)
x = F.dropout(x, p=self.dropout, training=self.training)
x = residual + x
x = self.maybe_layer_norm(1, x, after=True)
return x
def maybe_layer_norm(self, i, x, before=False, after=False):
assert before ^ after
if after ^ self.normalize_before:
return self.layer_norms[i](x)
else:
return x
class TransformerDecoderLayer(nn.Module):
"""Decoder layer block.
In the original paper each operation (multi-head attention, encoder
attention or FFN) is postprocessed with: `dropout -> add residual ->
layernorm`. In the tensor2tensor code they suggest that learning is more
robust when preprocessing each layer with layernorm and postprocessing with:
`dropout -> add residual`. We default to the approach in the paper, but the
tensor2tensor approach can be enabled by setting
*args.decoder_normalize_before* to ``True``.
Args:
args (argparse.Namespace): parsed command-line arguments
no_encoder_attn (bool, optional): whether to attend to encoder outputs
(default: False).
"""
def __init__(self, args, no_encoder_attn=False):
super().__init__()
self.embed_dim = args.decoder_embed_dim
self.self_attn = MultiheadAttention(
self.embed_dim, args.decoder_attention_heads,
dropout=args.attention_dropout,
)
self.dropout = args.decoder_dropout
self.relu_dropout = args.relu_dropout
self.normalize_before = args.decoder_normalize_before
self.self_attn_layer_norm = LayerNorm(self.embed_dim)
if no_encoder_attn:
self.encoder_attn = None
self.encoder_attn_layer_norm = None
else:
self.encoder_attn = MultiheadAttention(
self.embed_dim, args.decoder_attention_heads,
dropout=args.attention_dropout,
)
self.encoder_attn_layer_norm = LayerNorm(self.embed_dim)
self.fc1 = Linear(self.embed_dim, args.decoder_ffn_embed_dim)
self.fc2 = Linear(args.decoder_ffn_embed_dim, self.embed_dim)
self.final_layer_norm = LayerNorm(self.embed_dim)
self.need_attn = True
self.onnx_trace = False
def prepare_for_onnx_export_(self):
self.onnx_trace = True
def forward(self, x, encoder_out, encoder_padding_mask, incremental_state,
prev_self_attn_state=None, prev_attn_state=None, self_attn_mask=None,
self_attn_padding_mask=None):
"""
Args:
x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_padding_mask (ByteTensor): binary ByteTensor of shape
`(batch, src_len)` where padding elements are indicated by ``1``.
Returns:
encoded output of shape `(batch, src_len, embed_dim)`
"""
# print("incremental_state", incremental_state) None
# print("prev_attn_state", prev_attn_state) None
# print("self_attn_mask", self_attn_mask.shape) # tensor
# print(self_attn_mask)
# print("self_attn_padding_mask", self_attn_padding_mask) None
# print("encoder_padding_mask", encoder_padding_mask) None
residual = x
x = self.maybe_layer_norm(self.self_attn_layer_norm, x, before=True)
# print("prev_self_attn_state", prev_self_attn_state) None
if prev_self_attn_state is not None:
if incremental_state is None:
incremental_state = {}
prev_key, prev_value = prev_self_attn_state
saved_state = {"prev_key": prev_key, "prev_value": prev_value}
self.self_attn._set_input_buffer(incremental_state, saved_state)
x, _ = self.self_attn(
query=x,
key=x,
value=x,
key_padding_mask=self_attn_padding_mask,
incremental_state=incremental_state,
need_weights=False,
attn_mask=self_attn_mask,
)
x = F.dropout(x, p=self.dropout, training=self.training)
x = residual + x
x = self.maybe_layer_norm(self.self_attn_layer_norm, x, after=True)
attn = None
if self.encoder_attn is not None:
residual = x
x = self.maybe_layer_norm(self.encoder_attn_layer_norm, x, before=True)
if prev_attn_state is not None:
if incremental_state is None:
incremental_state = {}
prev_key, prev_value = prev_attn_state
saved_state = {"prev_key": prev_key, "prev_value": prev_value}
self.encoder_attn._set_input_buffer(incremental_state, saved_state)
# print("encoder_padding_mask", encoder_padding_mask) # None
# print(not self.training and self.need_attn) # True
x, attn = self.encoder_attn(
query=x,
key=encoder_out,
value=encoder_out,
key_padding_mask=encoder_padding_mask,
incremental_state=incremental_state,
static_kv=True,
need_weights=(not self.training and self.need_attn),
)
x = F.dropout(x, p=self.dropout, training=self.training)
x = residual + x
x = self.maybe_layer_norm(self.encoder_attn_layer_norm, x, after=True)
residual = x
x = self.maybe_layer_norm(self.final_layer_norm, x, before=True)
x = F.relu(self.fc1(x))
x = F.dropout(x, p=self.relu_dropout, training=self.training)
x = self.fc2(x)
x = F.dropout(x, p=self.dropout, training=self.training)
x = residual + x
x = self.maybe_layer_norm(self.final_layer_norm, x, after=True)
if self.onnx_trace:
saved_state = self.self_attn._get_input_buffer(incremental_state)
self_attn_state = saved_state["prev_key"], saved_state["prev_value"]
return x, attn, self_attn_state
return x, attn
def maybe_layer_norm(self, layer_norm, x, before=False, after=False):
assert before ^ after
if after ^ self.normalize_before:
return layer_norm(x)
else:
return x
def make_generation_fast_(self, need_attn=False, **kwargs):
self.need_attn = need_attn
def Embedding(num_embeddings, embedding_dim, padding_idx):
m = nn.Embedding(num_embeddings, embedding_dim, padding_idx=padding_idx)
nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5)
nn.init.constant_(m.weight[padding_idx], 0)
return m
def Linear(in_features, out_features, bias=True):
m = nn.Linear(in_features, out_features, bias)
nn.init.xavier_uniform_(m.weight)
if bias:
nn.init.constant_(m.bias, 0.)
return m
def PositionalEmbedding(num_embeddings, embedding_dim, padding_idx, left_pad, learned=False):
if learned:
m = LearnedPositionalEmbedding(num_embeddings + padding_idx + 1, embedding_dim, padding_idx, left_pad)
nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5)
nn.init.constant_(m.weight[padding_idx], 0)
else:
m = SinusoidalPositionalEmbedding(embedding_dim, padding_idx, left_pad, num_embeddings + padding_idx + 1)
return m
@register_model_architecture('roberta_transformer', 'abs_sum_roberta_transformer_base')
def base_architecture(args):
args.encoder_embed_path = getattr(args, 'encoder_embed_path', None)
args.encoder_embed_dim = getattr(args, 'encoder_embed_dim', 512)
args.encoder_ffn_embed_dim = getattr(args, 'encoder_ffn_embed_dim', 2048)
args.encoder_layers = getattr(args, 'encoder_layers', 6)
args.encoder_attention_heads = getattr(args, 'encoder_attention_heads', 8)
args.encoder_normalize_before = getattr(args, 'encoder_normalize_before', False)
args.encoder_learned_pos = getattr(args, 'encoder_learned_pos', False)
args.decoder_embed_path = getattr(args, 'decoder_embed_path', None)
args.decoder_embed_dim = getattr(args, 'decoder_embed_dim', args.encoder_embed_dim)
args.decoder_ffn_embed_dim = getattr(args, 'decoder_ffn_embed_dim', args.encoder_ffn_embed_dim)
args.decoder_layers = getattr(args, 'decoder_layers', 6)
args.decoder_attention_heads = getattr(args, 'decoder_attention_heads', 8)
args.decoder_normalize_before = getattr(args, 'decoder_normalize_before', False)
args.decoder_learned_pos = getattr(args, 'decoder_learned_pos', False)
args.attention_dropout = getattr(args, 'attention_dropout', 0.)
args.relu_dropout = getattr(args, 'relu_dropout', 0.)
args.dropout = getattr(args, 'dropout', 0.1)
args.decoder_dropout = getattr(args, 'decoder_dropout', args.dropout)
args.adaptive_softmax_cutoff = getattr(args, 'adaptive_softmax_cutoff', None)
args.adaptive_softmax_dropout = getattr(args, 'adaptive_softmax_dropout', 0)
args.share_decoder_input_output_embed = getattr(args, 'share_decoder_input_output_embed', False)
args.share_all_embeddings = getattr(args, 'share_all_embeddings', False)
args.no_token_positional_embeddings = getattr(args, 'no_token_positional_embeddings', False)
args.adaptive_input = getattr(args, 'adaptive_input', False)
args.decoder_output_dim = getattr(args, 'decoder_output_dim', args.decoder_embed_dim)
args.decoder_input_dim = getattr(args, 'decoder_input_dim', args.decoder_embed_dim)
@register_model_architecture('roberta_transformer', 'abs_sum_roberta_transformer')
def transformer_abs_sum_roberta(args):
args.encoder_embed_dim = getattr(args, 'encoder_embed_dim', 768)
# args.encoder_ffn_embed_dim = getattr(args, 'encoder_ffn_embed_dim', 1024)
# args.encoder_attention_heads = getattr(args, 'encoder_attention_heads', 4)
# args.encoder_layers = getattr(args, 'encoder_layers', 6)
args.encoder_normalize_before = getattr(args, 'encoder_normalize_before', False)
args.decoder_embed_dim = getattr(args, 'decoder_embed_dim', 768)
# args.decoder_ffn_embed_dim = getattr(args, 'decoder_ffn_embed_dim', 1024)
# args.decoder_attention_heads = getattr(args, 'decoder_attention_heads', 4)
# args.decoder_layers = getattr(args, 'decoder_layers', 6)
base_architecture(args)
@register_model_architecture('roberta_transformer', 'abs_sum_roberta_transformer_medium')
def transformer_abs_sum_roberta(args):
args.encoder_embed_dim = getattr(args, 'encoder_embed_dim', 768)
args.encoder_ffn_embed_dim = getattr(args, 'encoder_ffn_embed_dim', 3072)
args.encoder_attention_heads = getattr(args, 'encoder_attention_heads', 12)
args.encoder_normalize_before = getattr(args, 'encoder_normalize_before', False)
# args.encoder_layers = getattr(args, 'encoder_layers', 6)
args.decoder_embed_dim = getattr(args, 'decoder_embed_dim', 768)
args.decoder_ffn_embed_dim = getattr(args, 'decoder_ffn_embed_dim', 3072)
args.decoder_attention_heads = getattr(args, 'decoder_attention_heads', 12)
args.decoder_layers = getattr(args, 'decoder_layers', 12)
# args.dropout = getattr(args, 'dropout', 0.15)
base_architecture(args)
@register_model_architecture('roberta_transformer', 'abs_sum_roberta_transformer_large')
def transformer_abs_sum_roberta(args):
args.encoder_embed_dim = getattr(args, 'encoder_embed_dim', 1024)
# args.encoder_ffn_embed_dim = getattr(args, 'encoder_ffn_embed_dim', 1024)
# args.encoder_attention_heads = getattr(args, 'encoder_attention_heads', 4)
args.encoder_normalize_before = getattr(args, 'encoder_normalize_before', False)
# args.encoder_layers = getattr(args, 'encoder_layers', 6)
args.decoder_embed_dim = getattr(args, 'decoder_embed_dim', 1024)
# args.decoder_ffn_embed_dim = getattr(args, 'decoder_ffn_embed_dim', 1024)
# args.decoder_attention_heads = getattr(args, 'decoder_attention_heads', 4)
# args.decoder_layers = getattr(args, 'decoder_layers', 6)
# args.dropout = getattr(args, 'dropout', 0.15)
base_architecture(args)
@register_model_architecture('roberta_transformer', 'abs_sum_roberta_large_transformer_large')
def transformer_abs_sum_roberta(args):
args.encoder_embed_dim = getattr(args, 'encoder_embed_dim', 1024)
# args.encoder_ffn_embed_dim = getattr(args, 'encoder_ffn_embed_dim', 1024)
# args.encoder_attention_heads = getattr(args, 'encoder_attention_heads', 4)
args.encoder_normalize_before = getattr(args, 'encoder_normalize_before', False)
# args.encoder_layers = getattr(args, 'encoder_layers', 6)
args.decoder_embed_dim = getattr(args, 'decoder_embed_dim', 1024)
# args.decoder_ffn_embed_dim = getattr(args, 'decoder_ffn_embed_dim', 1024)
# args.decoder_attention_heads = getattr(args, 'decoder_attention_heads', 4)
# args.decoder_layers = getattr(args, 'decoder_layers', 12)
# args.dropout = getattr(args, 'dropout', 0.15)
base_architecture(args)
'''
@register_model_architecture('transformer', 'transformer_iwslt_de_en')
def transformer_iwslt_de_en(args):
args.encoder_embed_dim = getattr(args, 'encoder_embed_dim', 512)
args.encoder_ffn_embed_dim = getattr(args, 'encoder_ffn_embed_dim', 1024)
args.encoder_attention_heads = getattr(args, 'encoder_attention_heads', 4)
args.encoder_layers = getattr(args, 'encoder_layers', 6)
args.decoder_embed_dim = getattr(args, 'decoder_embed_dim', 512)
args.decoder_ffn_embed_dim = getattr(args, 'decoder_ffn_embed_dim', 1024)
args.decoder_attention_heads = getattr(args, 'decoder_attention_heads', 4)
args.decoder_layers = getattr(args, 'decoder_layers', 6)
base_architecture(args)
@register_model_architecture('transformer', 'transformer_wmt_en_de')
def transformer_wmt_en_de(args):
base_architecture(args)
# parameters used in the "Attention Is All You Need" paper (Vaswani, et al, 2017)
@register_model_architecture('transformer', 'transformer_vaswani_wmt_en_de_big')
def transformer_vaswani_wmt_en_de_big(args):
args.encoder_embed_dim = getattr(args, 'encoder_embed_dim', 1024)
args.encoder_ffn_embed_dim = getattr(args, 'encoder_ffn_embed_dim', 4096)
args.encoder_attention_heads = getattr(args, 'encoder_attention_heads', 16)
args.encoder_normalize_before = getattr(args, 'encoder_normalize_before', False)
args.decoder_embed_dim = getattr(args, 'decoder_embed_dim', 1024)
args.decoder_ffn_embed_dim = getattr(args, 'decoder_ffn_embed_dim', 4096)
args.decoder_attention_heads = getattr(args, 'decoder_attention_heads', 16)
args.dropout = getattr(args, 'dropout', 0.3)
base_architecture(args)
@register_model_architecture('transformer', 'transformer_vaswani_wmt_en_fr_big')
def transformer_vaswani_wmt_en_fr_big(args):
args.dropout = getattr(args, 'dropout', 0.1)
transformer_vaswani_wmt_en_de_big(args)
@register_model_architecture('transformer', 'transformer_wmt_en_de_big')
def transformer_wmt_en_de_big(args):
args.attention_dropout = getattr(args, 'attention_dropout', 0.1)
transformer_vaswani_wmt_en_de_big(args)
# default parameters used in tensor2tensor implementation
@register_model_architecture('transformer', 'transformer_wmt_en_de_big_t2t')
def transformer_wmt_en_de_big_t2t(args):
args.encoder_normalize_before = getattr(args, 'encoder_normalize_before', True)
args.decoder_normalize_before = getattr(args, 'decoder_normalize_before', True)
args.attention_dropout = getattr(args, 'attention_dropout', 0.1)
args.relu_dropout = getattr(args, 'relu_dropout', 0.1)
transformer_vaswani_wmt_en_de_big(args)
'''
###################################################################################################
### Bert as Decoder
def gelu(x):
"""Implementation of the gelu activation function.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
Also see https://arxiv.org/abs/1606.08415
"""
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
def swish(x):
return x * torch.sigmoid(x)
ACT2FN = {"gelu": gelu, "relu": torch.nn.functional.relu, "swish": swish}
# class BertConfig(PretrainedConfig):
# r"""
# :class:`~pytorch_transformers.BertConfig` is the configuration class to store the configuration of a
# `BertModel`.
#
#
# Arguments:
# vocab_size_or_config_json_file: Vocabulary size of `inputs_ids` in `BertModel`.
# hidden_size: Size of the encoder layers and the pooler layer.
# num_hidden_layers: Number of hidden layers in the Transformer encoder.
# num_attention_heads: Number of attention heads for each attention layer in
# the Transformer encoder.
# intermediate_size: The size of the "intermediate" (i.e., feed-forward)
# layer in the Transformer encoder.
# hidden_act: The non-linear activation function (function or string) in the
# encoder and pooler. If string, "gelu", "relu" and "swish" are supported.
# hidden_dropout_prob: The dropout probabilitiy for all fully connected
# layers in the embeddings, encoder, and pooler.
# attention_probs_dropout_prob: The dropout ratio for the attention
# probabilities.
# max_position_embeddings: The maximum sequence length that this model might
# ever be used with. Typically set this to something large just in case
# (e.g., 512 or 1024 or 2048).
# type_vocab_size: The vocabulary size of the `token_type_ids` passed into
# `BertModel`.
# initializer_range: The sttdev of the truncated_normal_initializer for
# initializing all weight matrices.
# layer_norm_eps: The epsilon used by LayerNorm.
# """
# pretrained_config_archive_map = ROBERTA_PRETRAINED_MODEL_ARCHIVE_MAP
#
# def __init__(self,
# vocab_size_or_config_json_file=30522,
# hidden_size=768,
# num_hidden_layers=12,
# num_attention_heads=12,
# intermediate_size=3072,
# hidden_act="gelu",
# hidden_dropout_prob=0.1,
# attention_probs_dropout_prob=0.1,
# max_position_embeddings=512,
# type_vocab_size=2,
# initializer_range=0.02,
# layer_norm_eps=1e-12,
# **kwargs):
# super(BertConfig, self).__init__(**kwargs)
# # if isinstance(vocab_size_or_config_json_file, str) or (sys.version_info[0] == 2
# # and isinstance(vocab_size_or_config_json_file, unicode)):
# # with open(vocab_size_or_config_json_file, "r", encoding='utf-8') as reader:
# # json_config = json.loads(reader.read())
# # for key, value in json_config.items():
# # self.__dict__[key] = value
# # elif isinstance(vocab_size_or_config_json_file, int):
# self.vocab_size = vocab_size_or_config_json_file
# self.hidden_size = hidden_size
# self.num_hidden_layers = num_hidden_layers
# self.num_attention_heads = num_attention_heads
# self.hidden_act = hidden_act
# self.intermediate_size = intermediate_size
# self.hidden_dropout_prob = hidden_dropout_prob
# self.attention_probs_dropout_prob = attention_probs_dropout_prob
# self.max_position_embeddings = max_position_embeddings
# self.type_vocab_size = type_vocab_size
# self.initializer_range = initializer_range
# self.layer_norm_eps = layer_norm_eps
# # else:
# # raise ValueError("First argument must be either a vocabulary size (int)"
# # "or the path to a pretrained model config file (str)")
class BertLayerNorm(nn.Module):
def __init__(self, hidden_size, eps=1e-12):
"""Construct a layernorm module in the TF style (epsilon inside the square root).
"""
super(BertLayerNorm, self).__init__()
self.weight = nn.Parameter(torch.ones(hidden_size))
self.bias = nn.Parameter(torch.zeros(hidden_size))
self.variance_epsilon = eps
def forward(self, x):
u = x.mean(-1, keepdim=True)
s = (x - u).pow(2).mean(-1, keepdim=True)
x = (x - u) / torch.sqrt(s + self.variance_epsilon)
return self.weight * x + self.bias
# try:
# from apex.normalization.fused_layer_norm import FusedLayerNorm as BertLayerNorm
# except (ImportError, AttributeError) as e:
# class BertLayerNorm(nn.Module):
# def __init__(self, hidden_size, eps=1e-12):
# """Construct a layernorm module in the TF style (epsilon inside the square root).
# """
# super(BertLayerNorm, self).__init__()
# self.weight = nn.Parameter(torch.ones(hidden_size))
# self.bias = nn.Parameter(torch.zeros(hidden_size))
# self.variance_epsilon = eps
#
# def forward(self, x):
# u = x.mean(-1, keepdim=True)
# s = (x - u).pow(2).mean(-1, keepdim=True)
# x = (x - u) / torch.sqrt(s + self.variance_epsilon)
# return self.weight * x + self.bias
class BertEmbeddings(nn.Module):
"""Construct the embeddings from word, position and token_type embeddings.
"""
def __init__(self, config):
super(BertEmbeddings, self).__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=0)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size)
self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size)
# self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load
# any TensorFlow checkpoint file
self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, input_ids, token_type_ids=None, position_ids=None):
seq_length = input_ids.size(1)
if position_ids is None:
position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device)
position_ids = position_ids.unsqueeze(0).expand_as(input_ids)
if token_type_ids is None:
token_type_ids = torch.zeros_like(input_ids)
words_embeddings = self.word_embeddings(input_ids)
position_embeddings = self.position_embeddings(position_ids)
token_type_embeddings = self.token_type_embeddings(token_type_ids)
embeddings = words_embeddings + position_embeddings + token_type_embeddings
embeddings = self.LayerNorm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class BertSelfAttention(nn.Module):
def __init__(self, config):
super(BertSelfAttention, self).__init__()
if config.hidden_size % config.num_attention_heads != 0:
raise ValueError(
"The hidden size (%d) is not a multiple of the number of attention "
"heads (%d)" % (config.hidden_size, config.num_attention_heads))
self.output_attentions = True # config.output_attentions
self.num_attention_heads = config.num_attention_heads
self.attention_head_size = int(config.hidden_size / config.num_attention_heads)
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query = nn.Linear(config.hidden_size, self.all_head_size)
self.key = nn.Linear(config.hidden_size, self.all_head_size)
self.value = nn.Linear(config.hidden_size, self.all_head_size)
self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
self.attention_probs_dropout_prob = config.attention_probs_dropout_prob
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3)
def forward(self, query_hidden_states, key_hidden_states, value_hidden_states, attention_mask=None, head_mask=None):
# print('query', query_hidden_states.shape)
# print('key', key_hidden_states.shape)
# print('value', value_hidden_states.shape)
mixed_query_layer = self.query(query_hidden_states)
mixed_key_layer = self.key(key_hidden_states)
mixed_value_layer = self.value(value_hidden_states)
# print('mixed_query_layer', mixed_query_layer.shape)
# print('mixed_key_layer', mixed_key_layer.shape)
# print('mixed_value_layer', mixed_value_layer.shape)
tgt_len, bsz, embed_dim = query_hidden_states.size()
# query_layer = self.transpose_for_scores(mixed_query_layer)
# key_layer = self.transpose_for_scores(mixed_key_layer)
# value_layer = self.transpose_for_scores(mixed_value_layer)
query_layer = mixed_query_layer.contiguous().view(tgt_len, bsz * self.num_attention_heads, self.attention_head_size).transpose(0, 1)
key_layer = mixed_key_layer.contiguous().view(-1, bsz * self.num_attention_heads, self.attention_head_size).transpose(0, 1)
value_layer = mixed_value_layer.contiguous().view(-1, bsz * self.num_attention_heads, self.attention_head_size).transpose(0, 1)
# print('query_layer', query_layer.shape)
# print('key_layer', key_layer.shape)
# print('value_layer', value_layer.shape)
# Take the dot product between "query" and "key" to get the raw attention scores.
attention_scores = torch.matmul(query_layer, key_layer.transpose(1, 2))
# attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
# attention_scores = attention_scores / math.sqrt(self.attention_head_size)
# Apply the attention mask is (precomputed for all layers in BertModel forward() function)
if attention_mask is not None:
attention_mask = attention_mask.unsqueeze(0)
# print('attention_scores', attention_scores.shape)
# print('attention_mask', attention_mask.shape)
attention_scores = attention_scores + attention_mask
# attention_scores = attention_scores
# Normalize the attention scores to probabilities.
# attention_probs = nn.Softmax(dim=-1)(attention_scores)
attention_probs = utils.softmax(
attention_scores, dim=-1
).type_as(attention_scores)
attention_probs = F.dropout(attention_probs, p=self.attention_probs_dropout_prob, training=self.training)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
# attention_probs = self.dropout(attention_probs)
# print('attention_probs', attention_probs.shape)
# Mask heads if we want to
if head_mask is not None:
attention_probs = attention_probs * head_mask
context_layer = torch.bmm(attention_probs, value_layer)
# context_layer = torch.matmul(attention_probs, value_layer)
# print('attention_probs', attention_probs.shape)
# print('value_layer', value_layer.shape)
# print('context_layer', context_layer.shape)
# context_layer = context_layer.permute(0, 2, 1, 3).contiguous()
# new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,)
# context_layer = context_layer.view(*new_context_layer_shape)
# print('context layer', context_layer.shape)
context_layer = context_layer.transpose(0, 1).contiguous().view(tgt_len, bsz, embed_dim)
# print('context layer', context_layer.shape)
outputs = (context_layer, attention_probs) if self.output_attentions else (context_layer,)
# exit(1)
return outputs
class BertSelfOutput(nn.Module):
def __init__(self, config):
super(BertSelfOutput, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class BertAttention(nn.Module):
def __init__(self, config):
super(BertAttention, self).__init__()
self.self = BertSelfAttention(config)
self.output = BertSelfOutput(config)
def prune_heads(self, heads):
if len(heads) == 0:
return
mask = torch.ones(self.self.num_attention_heads, self.self.attention_head_size)
for head in heads:
mask[head] = 0
mask = mask.view(-1).contiguous().eq(1)
index = torch.arange(len(mask))[mask].long()
# Prune linear layers
self.self.query = prune_linear_layer(self.self.query, index)
self.self.key = prune_linear_layer(self.self.key, index)
self.self.value = prune_linear_layer(self.self.value, index)
self.output.dense = prune_linear_layer(self.output.dense, index, dim=1)
# Update hyper params
self.self.num_attention_heads = self.self.num_attention_heads - len(heads)
self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads
def forward(self, query_tensor, key_tensor, value_tensor, attention_mask=None, head_mask=None):
self_outputs = self.self(query_hidden_states=query_tensor,
key_hidden_states=key_tensor,
value_hidden_states=value_tensor,
attention_mask=attention_mask,
head_mask=head_mask)
attention_output = self.output(self_outputs[0], query_tensor)
outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them
return outputs
def prune_linear_layer(layer, index, dim=0):
""" Prune a linear layer (a model parameters) to keep only entries in index.
Return the pruned layer as a new layer with requires_grad=True.
Used to remove heads.
"""
index = index.to(layer.weight.device)
W = layer.weight.index_select(dim, index).clone().detach()
if layer.bias is not None:
if dim == 1:
b = layer.bias.clone().detach()
else:
b = layer.bias[index].clone().detach()
new_size = list(layer.weight.size())
new_size[dim] = len(index)
new_layer = nn.Linear(new_size[1], new_size[0], bias=layer.bias is not None).to(layer.weight.device)
new_layer.weight.requires_grad = False
new_layer.weight.copy_(W.contiguous())
new_layer.weight.requires_grad = True
if layer.bias is not None:
new_layer.bias.requires_grad = False
new_layer.bias.copy_(b.contiguous())
new_layer.bias.requires_grad = True
return new_layer
class BertIntermediate(nn.Module):
def __init__(self, config):
super(BertIntermediate, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.intermediate_size)
# if isinstance(config.hidden_act, str) or (sys.version_info[0] == 2 and isinstance(config.hidden_act, unicode)):
# self.intermediate_act_fn = ACT2FN[config.hidden_act]
# else:
# self.intermediate_act_fn = config.hidden_act
self.intermediate_act_fn = ACT2FN[config.hidden_act]
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
class BertOutput(nn.Module):
def __init__(self, config):
super(BertOutput, self).__init__()
self.dense = nn.Linear(config.intermediate_size, config.hidden_size)
self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class BertDecoderLayer(nn.Module):
def __init__(self, config, args):
super(BertDecoderLayer, self).__init__()
self.attention = BertAttention(config)
# self.self_intermediate = BertIntermediate(config)
self.encoder_attention = MultiheadAttention(config.hidden_size, config.num_attention_heads,
dropout=args.attention_dropout,)
self.intermediate = BertIntermediate(config)
self.output = BertOutput(config)
self.need_attn = True
def forward(self, x, encoder_hidden_states, encoder_padding_mask, self_attn_mask=None, head_mask=None):
# We create a 3D attention mask from a 2D tensor mask.
# Sizes are [batch_size, 1, 1, to_seq_length]
# So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length]
# this attention mask is more simple than the triangular masking of causal attention
# used in OpenAI GPT, we just need to prepare the broadcast dimension here.
self_attention_outputs = self.attention(query_tensor=x, key_tensor=x, value_tensor=x,
attention_mask=self_attn_mask, head_mask=head_mask)
self_attention_output = self_attention_outputs[0]
# self_intermediate_output = self.self_intermediate(self_attention_output)
attention_outputs = self.encoder_attention(query=self_attention_output, key=encoder_hidden_states,
value=encoder_hidden_states, key_padding_mask=encoder_padding_mask,
incremental_state=None,
static_kv=True,
need_weights=(not self.training and self.need_attn),)
attention_output = attention_outputs[0]
intermediate_output = self.intermediate(attention_output)
layer_output = self.output(intermediate_output, attention_output)
outputs = (layer_output,) + attention_outputs[1:] # add attentions if we output them
return outputs
class BertDecoder(FairseqIncrementalDecoder):
"""
Bert decoder consisting of *args.decoder_layers* layers. Each layer
is a :class:`TransformerDecoderLayer`.
Args:
args (argparse.Namespace): parsed command-line arguments
dictionary (~fairseq.data.Dictionary): decoding dictionary
embed_tokens (torch.nn.Embedding): output embedding
no_encoder_attn (bool, optional): whether to attend to encoder outputs
(default: False).
left_pad (bool, optional): whether the input is left-padded
(default: False).
final_norm (bool, optional): apply layer norm to the output of the
final decoder layer (default: True).
"""
def __init__(self, args, config, dictionary, embed_tokens, no_encoder_attn=False, left_pad=False, final_norm=True):
super().__init__(dictionary)
self.embeddings = BertEmbeddings(config)
# self.share_input_output_embed = args.share_decoder_input_output_embed
input_embed_dim = config.hidden_size # embed_tokens.embedding_dim
embed_dim = config.hidden_size # args.decoder_embed_dim
output_embed_dim = config.hidden_size # args.decoder_output_dim
# padding_idx = embed_tokens.padding_idx
self.max_target_positions = args.max_target_positions
# self.embed_tokens = embed_tokens
self.embed_scale = math.sqrt(embed_dim) # todo: try with input_embed_dim
# self.project_in_dim = BertLinear(input_embed_dim, embed_dim, bias=False) if embed_dim != input_embed_dim else None
# self.embed_positions = BertPositionalEmbedding(
# args.max_target_positions, embed_dim, padding_idx,
# left_pad=left_pad,
# learned=args.decoder_learned_pos,
# ) if not args.no_token_positional_embeddings else None
self.embed_positions = None
self.layers = nn.ModuleList([])
self.layers.extend([
BertDecoderLayer(config, args)
for _ in range(config.num_hidden_layers)
])
self.adaptive_softmax = None
# self.project_out_dim = BertLinear(embed_dim, output_embed_dim, bias=False) \
# if embed_dim != output_embed_dim and not args.tie_adaptive_weights else None
# if args.adaptive_softmax_cutoff is not None:
# self.adaptive_softmax = AdaptiveSoftmax(
# len(dictionary),
# output_embed_dim,
# options.eval_str_list(args.adaptive_softmax_cutoff, type=int),
# dropout=args.adaptive_softmax_dropout,
# adaptive_inputs=embed_tokens if args.tie_adaptive_weights else None,
# factor=args.adaptive_softmax_factor,
# tie_proj=args.tie_adaptive_proj,
# )
self.embed_out = nn.Parameter(torch.Tensor(len(dictionary), output_embed_dim))
nn.init.normal_(self.embed_out, mean=0, std=output_embed_dim ** -0.5)
self.register_buffer('version', torch.Tensor([2]))
self.normalize = args.decoder_normalize_before and final_norm
if self.normalize:
self.layer_norm = LayerNorm(embed_dim)
def forward(self, prev_output_tokens, encoder_out=None, incremental_state=None):
"""
Args:
prev_output_tokens (LongTensor): previous decoder outputs of shape
`(batch, tgt_len)`, for input feeding/teacher forcing
encoder_out (Tensor, optional): output from the encoder, used for
encoder-side attention
incremental_state (dict): dictionary used for storing state during
:ref:`Incremental decoding`
Returns:
tuple:
- the last decoder layer's output of shape `(batch, tgt_len,
vocab)`
- the last decoder layer's attention weights of shape `(batch,
tgt_len, src_len)`
"""
# print(encoder_out)
# print(incremental_state)
# exit(1)
# embed positions
# incremental_state = None
# positions = self.embed_positions(
# prev_output_tokens,
# incremental_state=incremental_state,
# ) if self.embed_positions is not None else None
#
# if incremental_state is not None:
# prev_output_tokens = prev_output_tokens[:, -1:]
# if positions is not None:
# positions = positions[:, -1:]
# embed tokens and positions
x = self.embeddings(prev_output_tokens)
# if positions is not None:
# x += positions
# x = F.dropout(x, p=self.dropout, training=self.training)
# B x T x C -> T x B x C
x = x.transpose(0, 1)
attn = None
# print('new batch')
# print(prev_output_tokens.shape)
# print('x', x.shape)
inner_states = [x]
# decoder layers
for layer in self.layers:
# print('=========')
x, attn = layer(
x,
encoder_out['encoder_out'] if encoder_out is not None else None,
encoder_out['encoder_padding_mask'] if encoder_out is not None else None,
self_attn_mask=self.buffered_future_mask(x) if incremental_state is None else None,
)
inner_states.append(x)
if self.normalize:
x = self.layer_norm(x)
# T x B x C -> B x T x C
x = x.transpose(0, 1)
# self.project_out_dim = None
# if self.project_out_dim is not None:
# x = self.project_out_dim(x)
# self.adaptive_softmax = None
# print(self.share_input_output_embed)
if self.adaptive_softmax is None:
# project back to size of vocabulary
# if self.share_input_output_embed:
# x = F.linear(x, self.embed_tokens.weight)
# else:
x = F.linear(x, self.embed_out)
return x, {'attn': attn, 'inner_states': inner_states}
def max_positions(self):
"""Maximum output length supported by the decoder."""
if self.embed_positions is None:
return self.max_target_positions
return min(self.max_target_positions, self.embed_positions.max_positions())
def buffered_future_mask(self, tensor):
dim = tensor.size(0)
if not hasattr(self, '_future_mask') or self._future_mask is None or self._future_mask.device != tensor.device:
self._future_mask = torch.triu(utils.fill_with_neg_inf(tensor.new(dim, dim)), 1)
if self._future_mask.size(0) < dim:
self._future_mask = torch.triu(utils.fill_with_neg_inf(self._future_mask.resize_(dim, dim)), 1)
return self._future_mask[:dim, :dim]
def upgrade_state_dict_named(self, state_dict, name):
"""Upgrade a (possibly old) state dict for new versions of fairseq."""
if isinstance(self.embed_positions, SinusoidalPositionalEmbedding):
weights_key = '{}.embed_positions.weights'.format(name)
if weights_key in state_dict:
del state_dict[weights_key]
state_dict['{}.embed_positions._float_tensor'.format(name)] = torch.FloatTensor(1)
for i in range(len(self.layers)):
# update layer norms
layer_norm_map = {
'0': 'self_attn_layer_norm',
'1': 'encoder_attn_layer_norm',
'2': 'final_layer_norm'
}
for old, new in layer_norm_map.items():
for m in ('weight', 'bias'):
k = '{}.layers.{}.layer_norms.{}.{}'.format(name, i, old, m)
if k in state_dict:
state_dict['{}.layers.{}.{}.{}'.format(name, i, new, m)] = state_dict[k]
del state_dict[k]
if utils.item(state_dict.get('{}.version'.format(name), torch.Tensor([1]))[0]) < 2:
# earlier checkpoints did not normalize after the stack of layers
self.layer_norm = None
self.normalize = False
state_dict['{}.version'.format(name)] = torch.Tensor([1])
return state_dict
def BertEmbedding(num_embeddings, embedding_dim, padding_idx):
m = nn.Embedding(num_embeddings, embedding_dim, padding_idx=padding_idx)
nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5)
nn.init.constant_(m.weight[padding_idx], 0)
return m
def BertLinear(in_features, out_features, bias=True):
m = nn.Linear(in_features, out_features, bias)
nn.init.xavier_uniform_(m.weight)
if bias:
nn.init.constant_(m.bias, 0.)
return m
def BertPositionalEmbedding(num_embeddings, embedding_dim, padding_idx, left_pad, learned=False):
if learned:
m = LearnedPositionalEmbedding(num_embeddings + padding_idx + 1, embedding_dim, padding_idx, left_pad)
nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5)
nn.init.constant_(m.weight[padding_idx], 0)
else:
m = SinusoidalPositionalEmbedding(embedding_dim, padding_idx, left_pad, num_embeddings + padding_idx + 1)
return m
|
[
"torch.nn.Dropout",
"argparse.Namespace",
"fairseq.utils.parse_embedding",
"torch.bmm",
"torch.sqrt",
"torch.nn.Embedding",
"fairseq.utils.softmax",
"torch.nn.functional.dropout",
"torch.cuda.device_count",
"torch.nn.init.constant_",
"torch.arange",
"fairseq.modules.LearnedPositionalEmbedding",
"fairseq.modules.SinusoidalPositionalEmbedding",
"torch.ones",
"json.loads",
"torch.FloatTensor",
"fairseq.modules.LayerNorm",
"torch.Tensor",
"torch.nn.Linear",
"torch.zeros",
"pytorch_transformers.RobertaConfig.from_pretrained",
"math.sqrt",
"torch.nn.ModuleList",
"torch.zeros_like",
"torch.nn.init.xavier_uniform_",
"pytorch_transformers.RobertaModel.from_pretrained",
"fairseq.modules.MultiheadAttention",
"torch.nn.functional.linear",
"fairseq.utils.load_embedding",
"torch.sigmoid",
"torch.nn.init.normal_",
"fairseq.options.eval_str_list"
] |
[((35116, 35184), 'torch.nn.Embedding', 'nn.Embedding', (['num_embeddings', 'embedding_dim'], {'padding_idx': 'padding_idx'}), '(num_embeddings, embedding_dim, padding_idx=padding_idx)\n', (35128, 35184), True, 'import torch.nn as nn\n'), ((35189, 35249), 'torch.nn.init.normal_', 'nn.init.normal_', (['m.weight'], {'mean': '(0)', 'std': '(embedding_dim ** -0.5)'}), '(m.weight, mean=0, std=embedding_dim ** -0.5)\n', (35204, 35249), True, 'import torch.nn as nn\n'), ((35254, 35297), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.weight[padding_idx]', '(0)'], {}), '(m.weight[padding_idx], 0)\n', (35271, 35297), True, 'import torch.nn as nn\n'), ((35371, 35413), 'torch.nn.Linear', 'nn.Linear', (['in_features', 'out_features', 'bias'], {}), '(in_features, out_features, bias)\n', (35380, 35413), True, 'import torch.nn as nn\n'), ((35418, 35451), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['m.weight'], {}), '(m.weight)\n', (35441, 35451), True, 'import torch.nn as nn\n'), ((71677, 71745), 'torch.nn.Embedding', 'nn.Embedding', (['num_embeddings', 'embedding_dim'], {'padding_idx': 'padding_idx'}), '(num_embeddings, embedding_dim, padding_idx=padding_idx)\n', (71689, 71745), True, 'import torch.nn as nn\n'), ((71750, 71810), 'torch.nn.init.normal_', 'nn.init.normal_', (['m.weight'], {'mean': '(0)', 'std': '(embedding_dim ** -0.5)'}), '(m.weight, mean=0, std=embedding_dim ** -0.5)\n', (71765, 71810), True, 'import torch.nn as nn\n'), ((71815, 71858), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.weight[padding_idx]', '(0)'], {}), '(m.weight[padding_idx], 0)\n', (71832, 71858), True, 'import torch.nn as nn\n'), ((71936, 71978), 'torch.nn.Linear', 'nn.Linear', (['in_features', 'out_features', 'bias'], {}), '(in_features, out_features, bias)\n', (71945, 71978), True, 'import torch.nn as nn\n'), ((71983, 72016), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['m.weight'], {}), '(m.weight)\n', (72006, 72016), True, 'import torch.nn as nn\n'), ((10872, 10888), 'json.loads', 'json.loads', (['text'], {}), '(text)\n', (10882, 10888), False, 'import json\n'), ((11658, 11683), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (11681, 11683), False, 'import torch\n'), ((12100, 12149), 'pytorch_transformers.RobertaConfig.from_pretrained', 'RobertaConfig.from_pretrained', (['args.roberta_model'], {}), '(args.roberta_model)\n', (12129, 12149), False, 'from pytorch_transformers import RobertaModel, RobertaConfig\n'), ((12173, 12236), 'pytorch_transformers.RobertaModel.from_pretrained', 'RobertaModel.from_pretrained', (['args.roberta_model'], {'config': 'config'}), '(args.roberta_model, config=config)\n', (12201, 12236), False, 'from pytorch_transformers import RobertaModel, RobertaConfig\n'), ((12699, 12719), 'math.sqrt', 'math.sqrt', (['embed_dim'], {}), '(embed_dim)\n', (12708, 12719), False, 'import math\n'), ((19721, 19741), 'math.sqrt', 'math.sqrt', (['embed_dim'], {}), '(embed_dim)\n', (19730, 19741), False, 'import math\n'), ((20174, 20191), 'torch.nn.ModuleList', 'nn.ModuleList', (['[]'], {}), '([])\n', (20187, 20191), True, 'import torch.nn as nn\n'), ((23066, 23118), 'torch.nn.functional.dropout', 'F.dropout', (['x'], {'p': 'self.dropout', 'training': 'self.training'}), '(x, p=self.dropout, training=self.training)\n', (23075, 23118), True, 'import torch.nn.functional as F\n'), ((27269, 27370), 'fairseq.modules.MultiheadAttention', 'MultiheadAttention', (['self.embed_dim', 'args.encoder_attention_heads'], {'dropout': 'args.attention_dropout'}), '(self.embed_dim, args.encoder_attention_heads, dropout=\n args.attention_dropout)\n', (27287, 27370), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((28362, 28414), 'torch.nn.functional.dropout', 'F.dropout', (['x'], {'p': 'self.dropout', 'training': 'self.training'}), '(x, p=self.dropout, training=self.training)\n', (28371, 28414), True, 'import torch.nn.functional as F\n'), ((28611, 28668), 'torch.nn.functional.dropout', 'F.dropout', (['x'], {'p': 'self.relu_dropout', 'training': 'self.training'}), '(x, p=self.relu_dropout, training=self.training)\n', (28620, 28668), True, 'import torch.nn.functional as F\n'), ((28705, 28757), 'torch.nn.functional.dropout', 'F.dropout', (['x'], {'p': 'self.dropout', 'training': 'self.training'}), '(x, p=self.dropout, training=self.training)\n', (28714, 28757), True, 'import torch.nn.functional as F\n'), ((29972, 30073), 'fairseq.modules.MultiheadAttention', 'MultiheadAttention', (['self.embed_dim', 'args.decoder_attention_heads'], {'dropout': 'args.attention_dropout'}), '(self.embed_dim, args.decoder_attention_heads, dropout=\n args.attention_dropout)\n', (29990, 30073), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((30293, 30318), 'fairseq.modules.LayerNorm', 'LayerNorm', (['self.embed_dim'], {}), '(self.embed_dim)\n', (30302, 30318), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((30866, 30891), 'fairseq.modules.LayerNorm', 'LayerNorm', (['self.embed_dim'], {}), '(self.embed_dim)\n', (30875, 30891), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((32740, 32792), 'torch.nn.functional.dropout', 'F.dropout', (['x'], {'p': 'self.dropout', 'training': 'self.training'}), '(x, p=self.dropout, training=self.training)\n', (32749, 32792), True, 'import torch.nn.functional as F\n'), ((34234, 34291), 'torch.nn.functional.dropout', 'F.dropout', (['x'], {'p': 'self.relu_dropout', 'training': 'self.training'}), '(x, p=self.relu_dropout, training=self.training)\n', (34243, 34291), True, 'import torch.nn.functional as F\n'), ((34328, 34380), 'torch.nn.functional.dropout', 'F.dropout', (['x'], {'p': 'self.dropout', 'training': 'self.training'}), '(x, p=self.dropout, training=self.training)\n', (34337, 34380), True, 'import torch.nn.functional as F\n'), ((35473, 35503), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0.0)'], {}), '(m.bias, 0.0)\n', (35490, 35503), True, 'import torch.nn as nn\n'), ((35640, 35742), 'fairseq.modules.LearnedPositionalEmbedding', 'LearnedPositionalEmbedding', (['(num_embeddings + padding_idx + 1)', 'embedding_dim', 'padding_idx', 'left_pad'], {}), '(num_embeddings + padding_idx + 1, embedding_dim,\n padding_idx, left_pad)\n', (35666, 35742), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((35747, 35807), 'torch.nn.init.normal_', 'nn.init.normal_', (['m.weight'], {'mean': '(0)', 'std': '(embedding_dim ** -0.5)'}), '(m.weight, mean=0, std=embedding_dim ** -0.5)\n', (35762, 35807), True, 'import torch.nn as nn\n'), ((35816, 35859), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.weight[padding_idx]', '(0)'], {}), '(m.weight[padding_idx], 0)\n', (35833, 35859), True, 'import torch.nn as nn\n'), ((35882, 35988), 'fairseq.modules.SinusoidalPositionalEmbedding', 'SinusoidalPositionalEmbedding', (['embedding_dim', 'padding_idx', 'left_pad', '(num_embeddings + padding_idx + 1)'], {}), '(embedding_dim, padding_idx, left_pad, \n num_embeddings + padding_idx + 1)\n', (35911, 35988), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((44775, 44791), 'torch.sigmoid', 'torch.sigmoid', (['x'], {}), '(x)\n', (44788, 44791), False, 'import torch\n'), ((50293, 50359), 'torch.nn.Embedding', 'nn.Embedding', (['config.vocab_size', 'config.hidden_size'], {'padding_idx': '(0)'}), '(config.vocab_size, config.hidden_size, padding_idx=0)\n', (50305, 50359), True, 'import torch.nn as nn\n'), ((50395, 50459), 'torch.nn.Embedding', 'nn.Embedding', (['config.max_position_embeddings', 'config.hidden_size'], {}), '(config.max_position_embeddings, config.hidden_size)\n', (50407, 50459), True, 'import torch.nn as nn\n'), ((50497, 50553), 'torch.nn.Embedding', 'nn.Embedding', (['config.type_vocab_size', 'config.hidden_size'], {}), '(config.type_vocab_size, config.hidden_size)\n', (50509, 50553), True, 'import torch.nn as nn\n'), ((50814, 50852), 'torch.nn.Dropout', 'nn.Dropout', (['config.hidden_dropout_prob'], {}), '(config.hidden_dropout_prob)\n', (50824, 50852), True, 'import torch.nn as nn\n'), ((52370, 52419), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'self.all_head_size'], {}), '(config.hidden_size, self.all_head_size)\n', (52379, 52419), True, 'import torch.nn as nn\n'), ((52439, 52488), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'self.all_head_size'], {}), '(config.hidden_size, self.all_head_size)\n', (52448, 52488), True, 'import torch.nn as nn\n'), ((52510, 52559), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'self.all_head_size'], {}), '(config.hidden_size, self.all_head_size)\n', (52519, 52559), True, 'import torch.nn as nn\n'), ((52584, 52631), 'torch.nn.Dropout', 'nn.Dropout', (['config.attention_probs_dropout_prob'], {}), '(config.attention_probs_dropout_prob)\n', (52594, 52631), True, 'import torch.nn as nn\n'), ((55404, 55496), 'torch.nn.functional.dropout', 'F.dropout', (['attention_probs'], {'p': 'self.attention_probs_dropout_prob', 'training': 'self.training'}), '(attention_probs, p=self.attention_probs_dropout_prob, training=\n self.training)\n', (55413, 55496), True, 'import torch.nn.functional as F\n'), ((55920, 55959), 'torch.bmm', 'torch.bmm', (['attention_probs', 'value_layer'], {}), '(attention_probs, value_layer)\n', (55929, 55959), False, 'import torch\n'), ((56902, 56951), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.hidden_size'], {}), '(config.hidden_size, config.hidden_size)\n', (56911, 56951), True, 'import torch.nn as nn\n'), ((57061, 57099), 'torch.nn.Dropout', 'nn.Dropout', (['config.hidden_dropout_prob'], {}), '(config.hidden_dropout_prob)\n', (57071, 57099), True, 'import torch.nn as nn\n'), ((57652, 57724), 'torch.ones', 'torch.ones', (['self.self.num_attention_heads', 'self.self.attention_head_size'], {}), '(self.self.num_attention_heads, self.self.attention_head_size)\n', (57662, 57724), False, 'import torch\n'), ((60137, 60192), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.intermediate_size'], {}), '(config.hidden_size, config.intermediate_size)\n', (60146, 60192), True, 'import torch.nn as nn\n'), ((60827, 60882), 'torch.nn.Linear', 'nn.Linear', (['config.intermediate_size', 'config.hidden_size'], {}), '(config.intermediate_size, config.hidden_size)\n', (60836, 60882), True, 'import torch.nn as nn\n'), ((60992, 61030), 'torch.nn.Dropout', 'nn.Dropout', (['config.hidden_dropout_prob'], {}), '(config.hidden_dropout_prob)\n', (61002, 61030), True, 'import torch.nn as nn\n'), ((61550, 61653), 'fairseq.modules.MultiheadAttention', 'MultiheadAttention', (['config.hidden_size', 'config.num_attention_heads'], {'dropout': 'args.attention_dropout'}), '(config.hidden_size, config.num_attention_heads, dropout=\n args.attention_dropout)\n', (61568, 61653), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((64800, 64820), 'math.sqrt', 'math.sqrt', (['embed_dim'], {}), '(embed_dim)\n', (64809, 64820), False, 'import math\n'), ((65308, 65325), 'torch.nn.ModuleList', 'nn.ModuleList', (['[]'], {}), '([])\n', (65321, 65325), True, 'import torch.nn as nn\n'), ((66304, 66373), 'torch.nn.init.normal_', 'nn.init.normal_', (['self.embed_out'], {'mean': '(0)', 'std': '(output_embed_dim ** -0.5)'}), '(self.embed_out, mean=0, std=output_embed_dim ** -0.5)\n', (66319, 66373), True, 'import torch.nn as nn\n'), ((72038, 72068), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0.0)'], {}), '(m.bias, 0.0)\n', (72055, 72068), True, 'import torch.nn as nn\n'), ((72209, 72311), 'fairseq.modules.LearnedPositionalEmbedding', 'LearnedPositionalEmbedding', (['(num_embeddings + padding_idx + 1)', 'embedding_dim', 'padding_idx', 'left_pad'], {}), '(num_embeddings + padding_idx + 1, embedding_dim,\n padding_idx, left_pad)\n', (72235, 72311), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((72316, 72376), 'torch.nn.init.normal_', 'nn.init.normal_', (['m.weight'], {'mean': '(0)', 'std': '(embedding_dim ** -0.5)'}), '(m.weight, mean=0, std=embedding_dim ** -0.5)\n', (72331, 72376), True, 'import torch.nn as nn\n'), ((72385, 72428), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.weight[padding_idx]', '(0)'], {}), '(m.weight[padding_idx], 0)\n', (72402, 72428), True, 'import torch.nn as nn\n'), ((72451, 72557), 'fairseq.modules.SinusoidalPositionalEmbedding', 'SinusoidalPositionalEmbedding', (['embedding_dim', 'padding_idx', 'left_pad', '(num_embeddings + padding_idx + 1)'], {}), '(embedding_dim, padding_idx, left_pad, \n num_embeddings + padding_idx + 1)\n', (72480, 72557), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((8763, 8782), 'argparse.Namespace', 'Namespace', ([], {}), '(**config)\n', (8772, 8782), False, 'from argparse import Namespace\n'), ((13202, 13219), 'torch.Tensor', 'torch.Tensor', (['[2]'], {}), '([2])\n', (13214, 13219), False, 'import torch\n'), ((13333, 13353), 'fairseq.modules.LayerNorm', 'LayerNorm', (['embed_dim'], {}), '(embed_dim)\n', (13342, 13353), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((18018, 18038), 'torch.FloatTensor', 'torch.FloatTensor', (['(1)'], {}), '(1)\n', (18035, 18038), False, 'import torch\n'), ((18351, 18368), 'torch.Tensor', 'torch.Tensor', (['[1]'], {}), '([1])\n', (18363, 18368), False, 'import torch\n'), ((21318, 21335), 'torch.Tensor', 'torch.Tensor', (['[2]'], {}), '([2])\n', (21330, 21335), False, 'import torch\n'), ((21464, 21484), 'fairseq.modules.LayerNorm', 'LayerNorm', (['embed_dim'], {}), '(embed_dim)\n', (21473, 21484), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((25546, 25566), 'torch.FloatTensor', 'torch.FloatTensor', (['(1)'], {}), '(1)\n', (25563, 25566), False, 'import torch\n'), ((26470, 26487), 'torch.Tensor', 'torch.Tensor', (['[1]'], {}), '([1])\n', (26482, 26487), False, 'import torch\n'), ((30479, 30580), 'fairseq.modules.MultiheadAttention', 'MultiheadAttention', (['self.embed_dim', 'args.decoder_attention_heads'], {'dropout': 'args.attention_dropout'}), '(self.embed_dim, args.decoder_attention_heads, dropout=\n args.attention_dropout)\n', (30497, 30580), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((30666, 30691), 'fairseq.modules.LayerNorm', 'LayerNorm', (['self.embed_dim'], {}), '(self.embed_dim)\n', (30675, 30691), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((33930, 33982), 'torch.nn.functional.dropout', 'F.dropout', (['x'], {'p': 'self.dropout', 'training': 'self.training'}), '(x, p=self.dropout, training=self.training)\n', (33939, 33982), True, 'import torch.nn.functional as F\n'), ((48906, 48929), 'torch.ones', 'torch.ones', (['hidden_size'], {}), '(hidden_size)\n', (48916, 48929), False, 'import torch\n'), ((48964, 48988), 'torch.zeros', 'torch.zeros', (['hidden_size'], {}), '(hidden_size)\n', (48975, 48988), False, 'import torch\n'), ((49162, 49199), 'torch.sqrt', 'torch.sqrt', (['(s + self.variance_epsilon)'], {}), '(s + self.variance_epsilon)\n', (49172, 49199), False, 'import torch\n'), ((51027, 51094), 'torch.arange', 'torch.arange', (['seq_length'], {'dtype': 'torch.long', 'device': 'input_ids.device'}), '(seq_length, dtype=torch.long, device=input_ids.device)\n', (51039, 51094), False, 'import torch\n'), ((51233, 51260), 'torch.zeros_like', 'torch.zeros_like', (['input_ids'], {}), '(input_ids)\n', (51249, 51260), False, 'import torch\n'), ((59595, 59659), 'torch.nn.Linear', 'nn.Linear', (['new_size[1]', 'new_size[0]'], {'bias': '(layer.bias is not None)'}), '(new_size[1], new_size[0], bias=layer.bias is not None)\n', (59604, 59659), True, 'import torch.nn as nn\n'), ((66414, 66431), 'torch.Tensor', 'torch.Tensor', (['[2]'], {}), '([2])\n', (66426, 66431), False, 'import torch\n'), ((66560, 66580), 'fairseq.modules.LayerNorm', 'LayerNorm', (['embed_dim'], {}), '(embed_dim)\n', (66569, 66580), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((69356, 69383), 'torch.nn.functional.linear', 'F.linear', (['x', 'self.embed_out'], {}), '(x, self.embed_out)\n', (69364, 69383), True, 'import torch.nn.functional as F\n'), ((70636, 70656), 'torch.FloatTensor', 'torch.FloatTensor', (['(1)'], {}), '(1)\n', (70653, 70656), False, 'import torch\n'), ((71560, 71577), 'torch.Tensor', 'torch.Tensor', (['[1]'], {}), '([1])\n', (71572, 71577), False, 'import torch\n'), ((7055, 7082), 'fairseq.utils.parse_embedding', 'utils.parse_embedding', (['path'], {}), '(path)\n', (7076, 7082), False, 'from fairseq import options, utils\n'), ((7099, 7148), 'fairseq.utils.load_embedding', 'utils.load_embedding', (['embed_dict', 'dictionary', 'emb'], {}), '(embed_dict, dictionary, emb)\n', (7119, 7148), False, 'from fairseq import options, utils\n'), ((20738, 20799), 'fairseq.options.eval_str_list', 'options.eval_str_list', (['args.adaptive_softmax_cutoff'], {'type': 'int'}), '(args.adaptive_softmax_cutoff, type=int)\n', (20759, 20799), False, 'from fairseq import options, utils\n'), ((21208, 21277), 'torch.nn.init.normal_', 'nn.init.normal_', (['self.embed_out'], {'mean': '(0)', 'std': '(output_embed_dim ** -0.5)'}), '(self.embed_out, mean=0, std=output_embed_dim ** -0.5)\n', (21223, 21277), True, 'import torch.nn as nn\n'), ((24190, 24227), 'torch.nn.functional.linear', 'F.linear', (['x', 'self.embed_tokens.weight'], {}), '(x, self.embed_tokens.weight)\n', (24198, 24227), True, 'import torch.nn.functional as F\n'), ((24266, 24293), 'torch.nn.functional.linear', 'F.linear', (['x', 'self.embed_out'], {}), '(x, self.embed_out)\n', (24274, 24293), True, 'import torch.nn.functional as F\n'), ((27727, 27752), 'fairseq.modules.LayerNorm', 'LayerNorm', (['self.embed_dim'], {}), '(self.embed_dim)\n', (27736, 27752), False, 'from fairseq.modules import AdaptiveInput, AdaptiveSoftmax, CharacterTokenEmbedder, LayerNorm, LearnedPositionalEmbedding, MultiheadAttention, SinusoidalPositionalEmbedding\n'), ((55290, 55329), 'fairseq.utils.softmax', 'utils.softmax', (['attention_scores'], {'dim': '(-1)'}), '(attention_scores, dim=-1)\n', (55303, 55329), False, 'from fairseq import options, utils\n'), ((44727, 44741), 'math.sqrt', 'math.sqrt', (['(2.0)'], {}), '(2.0)\n', (44736, 44741), False, 'import math\n'), ((18137, 18154), 'torch.Tensor', 'torch.Tensor', (['[1]'], {}), '([1])\n', (18149, 18154), False, 'import torch\n'), ((26242, 26259), 'torch.Tensor', 'torch.Tensor', (['[1]'], {}), '([1])\n', (26254, 26259), False, 'import torch\n'), ((71332, 71349), 'torch.Tensor', 'torch.Tensor', (['[1]'], {}), '([1])\n', (71344, 71349), False, 'import torch\n')]
|
"""
Create MDX View on }ClientGroups cube and query data through it.
IMPORTANT: MDX Views can not be seen through Architect/Perspectives.
"""
import configparser
import uuid
from TM1py.Objects import MDXView
from TM1py.Services import TM1Service
config = configparser.ConfigParser()
# storing the credentials in a file is not recommended for purposes other than testing.
# it's better to setup CAM with SSO or use keyring to store credentials in the windows credential manager. Sample:
# Samples/credentials_best_practice.py
config.read(r'..\config.ini')
with TM1Service(**config['tm1srv01']) as tm1:
# Random text
random_string = str(uuid.uuid4())
# Create mdx view
mdx = "SELECT " \
"NON EMPTY {TM1SUBSETALL( [}Clients] )} on ROWS, " \
"NON EMPTY {TM1SUBSETALL( [}Groups] )} ON COLUMNS " \
"FROM [}ClientGroups]"
mdx_view = MDXView(cube_name='}ClientGroups', view_name='TM1py_' + random_string, MDX=mdx)
# Create mdx view on TM1 Server
tm1.cubes.views.create(view=mdx_view)
# Get view content
content = tm1.cubes.cells.execute_view(cube_name=mdx_view.cube, view_name=mdx_view.name)
# Print content
print(content)
|
[
"uuid.uuid4",
"configparser.ConfigParser",
"TM1py.Objects.MDXView",
"TM1py.Services.TM1Service"
] |
[((258, 285), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (283, 285), False, 'import configparser\n'), ((564, 596), 'TM1py.Services.TM1Service', 'TM1Service', ([], {}), "(**config['tm1srv01'])\n", (574, 596), False, 'from TM1py.Services import TM1Service\n'), ((881, 960), 'TM1py.Objects.MDXView', 'MDXView', ([], {'cube_name': '"""}ClientGroups"""', 'view_name': "('TM1py_' + random_string)", 'MDX': 'mdx'}), "(cube_name='}ClientGroups', view_name='TM1py_' + random_string, MDX=mdx)\n", (888, 960), False, 'from TM1py.Objects import MDXView\n'), ((647, 659), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (657, 659), False, 'import uuid\n')]
|
import numpy as np
import torch
import torch.nn as nn
from torch import optim
from torch.utils.data import DataLoader, ConcatDataset
from argparse import ArgumentParser
from models.psp.pspnet import PSPNet
from models.sobel_op import SobelComputer
from dataset import OnlineTransformDataset
from util.logger import BoardLogger
from util.model_saver import ModelSaver
from util.hyper_para import HyperParameters
from util.log_integrator import Integrator
from util.metrics_compute import compute_loss_and_metrics, iou_hooks_to_be_used
from util.image_saver import vis_prediction
import time
import os
import datetime
torch.backends.cudnn.benchmark = True
# Parse command line arguments
para = HyperParameters()
para.parse()
parser = ArgumentParser()
parser.add_argument('data_path', help='Image path')
args = parser.parse_args()
# Logging
if para['id'].lower() != 'null':
long_id = '%s_%s' % (para['id'],datetime.datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
else:
long_id = None
logger = BoardLogger(long_id)
logger.log_string('hyperpara', str(para))
print('CUDA Device count: ', torch.cuda.device_count())
# Construct model
model = PSPNet(sizes=(1, 2, 3, 6), psp_size=2048, deep_features_size=1024, backend='resnet50')
model = nn.DataParallel(
model.cuda(), device_ids=[0,1,2,3]
)
if para['load'] is not None:
model.load_state_dict(torch.load(para['load']))
optimizer = optim.Adam(model.parameters(), lr=para['lr'], weight_decay=para['weight_decay'])
data_dir = args.data_path
dataset = OnlineTransformDataset(data_dir, method=1, perturb=True)
print('dataset size: ', len(dataset))
#train_dataset = ConcatDataset([fss_dataset, duts_tr_dataset, duts_te_dataset, ecssd_dataset, msra_dataset])
#train_dataset = ConcatDataset([ duts_tr_dataset])
# For randomness: https://github.com/pytorch/pytorch/issues/5059
def worker_init_fn(worker_id):
np.random.seed(np.random.get_state()[1][0] + worker_id)
# Dataloaders, multi-process data loading
train_loader = DataLoader(dataset, para['batch_size'], shuffle=True, num_workers=8,
worker_init_fn=worker_init_fn, drop_last=True, pin_memory=True)
sobel_compute = SobelComputer()
# Learning rate decay scheduling
scheduler = optim.lr_scheduler.MultiStepLR(optimizer, para['steps'], para['gamma'])
saver = ModelSaver(long_id)
report_interval = 50
save_im_interval = 800
total_epoch = int(para['iterations']/len(train_loader) + 0.5)
print('Actual training epoch: ', total_epoch)
train_integrator = Integrator(logger)
train_integrator.add_hook(iou_hooks_to_be_used)
total_iter = 0
last_time = 0
for e in range(total_epoch):
np.random.seed() # reset seed
epoch_start_time = time.time()
# Train loop
model = model.train()
for im, seg, gt in train_loader:
im, seg, gt = im.cuda(), seg.cuda(), gt.cuda()
total_iter += 1
if total_iter % 5000 == 0:
saver.save_model(model, total_iter)
images = model(im, seg)
images['im'] = im
images['seg'] = seg
images['gt'] = gt
sobel_compute.compute_edges(images)
loss_and_metrics = compute_loss_and_metrics(images, para)
train_integrator.add_dict(loss_and_metrics)
optimizer.zero_grad()
(loss_and_metrics['total_loss']).backward()
optimizer.step()
if total_iter % report_interval == 0:
logger.log_scalar('train/lr', scheduler.get_lr()[0], total_iter)
train_integrator.finalize('train', total_iter)
train_integrator.reset_except_hooks()
# Need to put step AFTER get_lr() for correct logging, see issue #22107 in PyTorch
scheduler.step()
if total_iter % save_im_interval == 0:
predict_vis = vis_prediction(images)
logger.log_cv2('train/predict', predict_vis, total_iter)
# Final save!
saver.save_model(model, total_iter)
|
[
"util.logger.BoardLogger",
"numpy.random.seed",
"models.psp.pspnet.PSPNet",
"argparse.ArgumentParser",
"torch.utils.data.DataLoader",
"numpy.random.get_state",
"util.image_saver.vis_prediction",
"torch.load",
"dataset.OnlineTransformDataset",
"torch.cuda.device_count",
"util.model_saver.ModelSaver",
"time.time",
"models.sobel_op.SobelComputer",
"util.hyper_para.HyperParameters",
"util.metrics_compute.compute_loss_and_metrics",
"datetime.datetime.now",
"util.log_integrator.Integrator",
"torch.optim.lr_scheduler.MultiStepLR"
] |
[((696, 713), 'util.hyper_para.HyperParameters', 'HyperParameters', ([], {}), '()\n', (711, 713), False, 'from util.hyper_para import HyperParameters\n'), ((737, 753), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (751, 753), False, 'from argparse import ArgumentParser\n'), ((1002, 1022), 'util.logger.BoardLogger', 'BoardLogger', (['long_id'], {}), '(long_id)\n', (1013, 1022), False, 'from util.logger import BoardLogger\n'), ((1149, 1240), 'models.psp.pspnet.PSPNet', 'PSPNet', ([], {'sizes': '(1, 2, 3, 6)', 'psp_size': '(2048)', 'deep_features_size': '(1024)', 'backend': '"""resnet50"""'}), "(sizes=(1, 2, 3, 6), psp_size=2048, deep_features_size=1024, backend=\n 'resnet50')\n", (1155, 1240), False, 'from models.psp.pspnet import PSPNet\n'), ((1524, 1580), 'dataset.OnlineTransformDataset', 'OnlineTransformDataset', (['data_dir'], {'method': '(1)', 'perturb': '(True)'}), '(data_dir, method=1, perturb=True)\n', (1546, 1580), False, 'from dataset import OnlineTransformDataset\n'), ((1998, 2134), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset', "para['batch_size']"], {'shuffle': '(True)', 'num_workers': '(8)', 'worker_init_fn': 'worker_init_fn', 'drop_last': '(True)', 'pin_memory': '(True)'}), "(dataset, para['batch_size'], shuffle=True, num_workers=8,\n worker_init_fn=worker_init_fn, drop_last=True, pin_memory=True)\n", (2008, 2134), False, 'from torch.utils.data import DataLoader, ConcatDataset\n'), ((2176, 2191), 'models.sobel_op.SobelComputer', 'SobelComputer', ([], {}), '()\n', (2189, 2191), False, 'from models.sobel_op import SobelComputer\n'), ((2238, 2309), 'torch.optim.lr_scheduler.MultiStepLR', 'optim.lr_scheduler.MultiStepLR', (['optimizer', "para['steps']", "para['gamma']"], {}), "(optimizer, para['steps'], para['gamma'])\n", (2268, 2309), False, 'from torch import optim\n'), ((2319, 2338), 'util.model_saver.ModelSaver', 'ModelSaver', (['long_id'], {}), '(long_id)\n', (2329, 2338), False, 'from util.model_saver import ModelSaver\n'), ((2512, 2530), 'util.log_integrator.Integrator', 'Integrator', (['logger'], {}), '(logger)\n', (2522, 2530), False, 'from util.log_integrator import Integrator\n'), ((1095, 1120), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (1118, 1120), False, 'import torch\n'), ((2641, 2657), 'numpy.random.seed', 'np.random.seed', ([], {}), '()\n', (2655, 2657), True, 'import numpy as np\n'), ((2694, 2705), 'time.time', 'time.time', ([], {}), '()\n', (2703, 2705), False, 'import time\n'), ((1366, 1390), 'torch.load', 'torch.load', (["para['load']"], {}), "(para['load'])\n", (1376, 1390), False, 'import torch\n'), ((3137, 3175), 'util.metrics_compute.compute_loss_and_metrics', 'compute_loss_and_metrics', (['images', 'para'], {}), '(images, para)\n', (3161, 3175), False, 'from util.metrics_compute import compute_loss_and_metrics, iou_hooks_to_be_used\n'), ((3760, 3782), 'util.image_saver.vis_prediction', 'vis_prediction', (['images'], {}), '(images)\n', (3774, 3782), False, 'from util.image_saver import vis_prediction\n'), ((913, 936), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (934, 936), False, 'import datetime\n'), ((1899, 1920), 'numpy.random.get_state', 'np.random.get_state', ([], {}), '()\n', (1918, 1920), True, 'import numpy as np\n')]
|
from __future__ import absolute_import
import httpretty
import pygerduty
import pygerduty.v2
###################
# Version 1 Tests #
###################
@httpretty.activate
def test_get_user_v1():
body = open('tests/fixtures/user_v1.json').read()
httpretty.register_uri(
httpretty.GET, "https://contosso.pagerduty.com/api/v1/users/PIJ90N7",
body=body, status=200)
p = pygerduty.PagerDuty("contosso", "password")
user = p.users.show("PIJ90N7")
assert user.id == "PIJ90N7"
assert user.name == "<NAME>"
assert user.role == "admin"
@httpretty.activate
def test_list_user_contact_methods_v1():
user_body = open('tests/fixtures/user_v1.json').read()
contact_body = open('tests/fixtures/contacts_v1.json').read()
httpretty.register_uri(
httpretty.GET, "https://contosso.pagerduty.com/api/v1/users/PIJ90N7",
body=user_body, status=200),
httpretty.register_uri(
httpretty.GET, "https://contosso.pagerduty.com/api/v1/users/PIJ90N7/contact_methods",
body=contact_body, status=200)
p = pygerduty.PagerDuty("contosso", "password")
user = p.users.show("PIJ90N7")
contact_methods = [c for c in user.contact_methods.list()]
assert len(contact_methods) == 3
assert len([c for c in contact_methods if c.type == "email"]) == 1
assert len([c for c in contact_methods if c.type == "phone"]) == 1
assert len([c for c in contact_methods if c.type == "SMS"]) == 1
###################
# Version 2 Tests #
###################
@httpretty.activate
def test_get_user_v2():
body = open('tests/fixtures/user_v2.json').read()
httpretty.register_uri(
httpretty.GET, "https://api.pagerduty.com/users/PXPGF42",
body=body, status=200)
p = pygerduty.v2.PagerDuty("password")
user = p.users.show("PXPGF42")
assert user.id == "PXPGF42"
assert user.name == "<NAME>"
assert user.role == "admin"
assert user.self_ == 'https://api.pagerduty.com/users/PXPGF42'
@httpretty.activate
def test_list_user_contact_methods_v2():
user_body = open('tests/fixtures/user_v2.json').read()
contact_body = open('tests/fixtures/contacts_v2.json').read()
httpretty.register_uri(
httpretty.GET, "https://api.pagerduty.com/users/PXPGF42",
body=user_body, status=200)
httpretty.register_uri(
httpretty.GET, "https://api.pagerduty.com/users/PXPGF42/contact_methods",
body=contact_body, status=200)
p = pygerduty.v2.PagerDuty("password")
user = p.users.show("PXPGF42")
contact_methods = [c for c in user.contact_methods.list()]
assert len(contact_methods) == 3
assert len([c for c in contact_methods if c.type == "email"]) == 1
assert len([c for c in contact_methods if c.type == "phone"]) == 1
assert len([c for c in contact_methods if c.type == "SMS"]) == 1
assert user.self_ == 'https://api.pagerduty.com/users/PXPGF42'
@httpretty.activate
def test_user_notification_rules_v2():
user_body = open('tests/fixtures/user_v2.json').read()
notification_body = open('tests/fixtures/notification_v2.json').read()
httpretty.register_uri(
httpretty.GET, "https://api.pagerduty.com/users/PXPGF42",
body=user_body, status=200)
httpretty.register_uri(
httpretty.GET, "https://api.pagerduty.com/users/PXPGF42/notification_rules",
body=notification_body, status=200)
p = pygerduty.v2.PagerDuty("password")
user = p.users.show("PXPGF42")
notification_rules = [n for n in user.notification_rules.list()]
assert len(notification_rules) == 1
assert len([n for n in notification_rules if n.type == "assignment_notification_rule"]) == 1
assert user.self_ == "https://api.pagerduty.com/users/PXPGF42"
def test_clean_response():
mock_response = {
"user" : {
"id": "PHDGK84",
"type": "user",
"self": "https://api.pagerduty.com/users/PHDGK84",
"name": "Snoopy",
"contact_methods": [
{
"address": "<EMAIL>",
"id": "PZMO0JF",
"self": "https://api.pagerduty.com/users/PHDGK84/contact_method/PZMO0JF",
"label": "Default"
},
{
"address": "8928393498",
"id": "PZMN843",
"self": "https://api.pagerduty.com/users/PHDGK84/contact_method/PZMN843",
"label": "Default"
}
],
"notification_rules": [
{
"id": "P8WETWW",
"contact_method": {
"id": "PZMO0JF",
"self": "https://api.pagerduty.com/users/PHDGK84/contact_method/PZMO0JF",
}
}
]
}
}
clean_response = pygerduty.common.clean_response(mock_response)
assert clean_response == {
"user" : {
"id": "PHDGK84",
"type": "user",
"self_": "https://api.pagerduty.com/users/PHDGK84",
"name": "Snoopy",
"contact_methods": [
{
"address": "<EMAIL>",
"id": "PZMO0JF",
"self_": "https://api.pagerduty.com/users/PHDGK84/contact_method/PZMO0JF",
"label": "Default"
},
{
"address": "8928393498",
"id": "PZMN843",
"self_": "https://api.pagerduty.com/users/PHDGK84/contact_method/PZMN843",
"label": "Default"
}
],
"notification_rules": [
{
"id": "P8WETWW",
"contact_method": {
"id": "PZMO0JF",
"self_": "https://api.pagerduty.com/users/PHDGK84/contact_method/PZMO0JF",
}
}
]
}
}
|
[
"pygerduty.PagerDuty",
"pygerduty.v2.PagerDuty",
"httpretty.register_uri",
"pygerduty.common.clean_response"
] |
[((258, 381), 'httpretty.register_uri', 'httpretty.register_uri', (['httpretty.GET', '"""https://contosso.pagerduty.com/api/v1/users/PIJ90N7"""'], {'body': 'body', 'status': '(200)'}), "(httpretty.GET,\n 'https://contosso.pagerduty.com/api/v1/users/PIJ90N7', body=body,\n status=200)\n", (280, 381), False, 'import httpretty\n'), ((400, 443), 'pygerduty.PagerDuty', 'pygerduty.PagerDuty', (['"""contosso"""', '"""password"""'], {}), "('contosso', 'password')\n", (419, 443), False, 'import pygerduty\n'), ((911, 1058), 'httpretty.register_uri', 'httpretty.register_uri', (['httpretty.GET', '"""https://contosso.pagerduty.com/api/v1/users/PIJ90N7/contact_methods"""'], {'body': 'contact_body', 'status': '(200)'}), "(httpretty.GET,\n 'https://contosso.pagerduty.com/api/v1/users/PIJ90N7/contact_methods',\n body=contact_body, status=200)\n", (933, 1058), False, 'import httpretty\n'), ((1077, 1120), 'pygerduty.PagerDuty', 'pygerduty.PagerDuty', (['"""contosso"""', '"""password"""'], {}), "('contosso', 'password')\n", (1096, 1120), False, 'import pygerduty\n'), ((1632, 1739), 'httpretty.register_uri', 'httpretty.register_uri', (['httpretty.GET', '"""https://api.pagerduty.com/users/PXPGF42"""'], {'body': 'body', 'status': '(200)'}), "(httpretty.GET,\n 'https://api.pagerduty.com/users/PXPGF42', body=body, status=200)\n", (1654, 1739), False, 'import httpretty\n'), ((1762, 1796), 'pygerduty.v2.PagerDuty', 'pygerduty.v2.PagerDuty', (['"""password"""'], {}), "('password')\n", (1784, 1796), False, 'import pygerduty\n'), ((2188, 2300), 'httpretty.register_uri', 'httpretty.register_uri', (['httpretty.GET', '"""https://api.pagerduty.com/users/PXPGF42"""'], {'body': 'user_body', 'status': '(200)'}), "(httpretty.GET,\n 'https://api.pagerduty.com/users/PXPGF42', body=user_body, status=200)\n", (2210, 2300), False, 'import httpretty\n'), ((2318, 2454), 'httpretty.register_uri', 'httpretty.register_uri', (['httpretty.GET', '"""https://api.pagerduty.com/users/PXPGF42/contact_methods"""'], {'body': 'contact_body', 'status': '(200)'}), "(httpretty.GET,\n 'https://api.pagerduty.com/users/PXPGF42/contact_methods', body=\n contact_body, status=200)\n", (2340, 2454), False, 'import httpretty\n'), ((2472, 2506), 'pygerduty.v2.PagerDuty', 'pygerduty.v2.PagerDuty', (['"""password"""'], {}), "('password')\n", (2494, 2506), False, 'import pygerduty\n'), ((3121, 3233), 'httpretty.register_uri', 'httpretty.register_uri', (['httpretty.GET', '"""https://api.pagerduty.com/users/PXPGF42"""'], {'body': 'user_body', 'status': '(200)'}), "(httpretty.GET,\n 'https://api.pagerduty.com/users/PXPGF42', body=user_body, status=200)\n", (3143, 3233), False, 'import httpretty\n'), ((3251, 3395), 'httpretty.register_uri', 'httpretty.register_uri', (['httpretty.GET', '"""https://api.pagerduty.com/users/PXPGF42/notification_rules"""'], {'body': 'notification_body', 'status': '(200)'}), "(httpretty.GET,\n 'https://api.pagerduty.com/users/PXPGF42/notification_rules', body=\n notification_body, status=200)\n", (3273, 3395), False, 'import httpretty\n'), ((3413, 3447), 'pygerduty.v2.PagerDuty', 'pygerduty.v2.PagerDuty', (['"""password"""'], {}), "('password')\n", (3435, 3447), False, 'import pygerduty\n'), ((4818, 4864), 'pygerduty.common.clean_response', 'pygerduty.common.clean_response', (['mock_response'], {}), '(mock_response)\n', (4849, 4864), False, 'import pygerduty\n'), ((768, 896), 'httpretty.register_uri', 'httpretty.register_uri', (['httpretty.GET', '"""https://contosso.pagerduty.com/api/v1/users/PIJ90N7"""'], {'body': 'user_body', 'status': '(200)'}), "(httpretty.GET,\n 'https://contosso.pagerduty.com/api/v1/users/PIJ90N7', body=user_body,\n status=200)\n", (790, 896), False, 'import httpretty\n')]
|
""" Test multithreading to ensure consistent behavior with
serial implementation."""
import unittest
import warnings
from os import remove
from os.path import exists, join
import numpy as np
from molSim.chemical_datastructures import MoleculeSet
from time import time
from tabulate import tabulate
class TestMultithreading(unittest.TestCase):
"""Unit tests to ensure consistency when running molSim as a single process
or when using multiprocessing.
"""
@classmethod
def setUpClass(self):
"""Create a SMILES database to use for comparisons and
find the similarity matrices and execution times.
"""
if not exists(".speedup-test"):
print("Speedup and Efficiency tests DISABLED.")
self.NO_SPEEDUP_TEST = True
else:
self.NO_SPEEDUP_TEST = False
self.N_REPLICATES = 3
warnings.warn(
"Speedup and Efficiency tests ENABLED, expect long runtime.",
ResourceWarning,
)
print(" ~ ~ Testing Multithreading ~ ~ ")
# basic consistency tests
self.text_fpath = "temp_multithread_smiles_seq.txt"
print(f"Creating text file {self.text_fpath}")
with open(self.text_fpath, "w") as file:
for smiles in ["C", "CC", "CCC", "O", "CCCC", "CO", "CCOCC"]:
file.write(smiles + "\n")
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self.correct_similarity_matrix = test_molecule_set.get_similarity_matrix()
if self.NO_SPEEDUP_TEST:
return
with open(join("tests", "data", "combinatorial_1.txt"), "r") as file:
data = file.readlines()
_100_molecules = data[1:102]
_500_molecules = data[1:502]
_1000_molecules = data[1:1002]
_5000_molecules = data[1:5002]
_10000_molecules = data[1:10002]
_15000_molecules = data[1:15002]
# data used for speedup and efficiency tests
self._100_molecules_fpath = "temp_multithread_speedup_100.txt"
print(f"Creating text file {self._100_molecules_fpath}")
with open(self._100_molecules_fpath, "w") as file:
for smiles in _100_molecules:
file.write(smiles)
print("Running 100 molecules with 1 process.")
self._100_molecules_serial_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self._100_molecules_serial_time += (time() - start) / self.N_REPLICATES
self._500_molecules_fpath = "temp_multithread_speedup_500.txt"
print(f"Creating text file {self._500_molecules_fpath}")
with open(self._500_molecules_fpath, "w") as file:
for smiles in _500_molecules:
file.write(smiles)
print("Running 500 molecules with 1 process.")
self._500_molecules_serial_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self._500_molecules_serial_time += (time() - start) / self.N_REPLICATES
self._1000_molecules_fpath = "temp_multithread_speedup_1000.txt"
print(f"Creating text file {self._1000_molecules_fpath}")
with open(self._1000_molecules_fpath, "w") as file:
for smiles in _1000_molecules:
file.write(smiles)
print("Running 1000 molecules with 1 process.")
self._1000_molecules_serial_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self._1000_molecules_serial_time += (time() - start) / self.N_REPLICATES
self._5000_molecules_fpath = "temp_multithread_speedup_5000.txt"
print(f"Creating text file {self._5000_molecules_fpath}")
with open(self._5000_molecules_fpath, "w") as file:
for smiles in _5000_molecules:
file.write(smiles)
print("Running 5000 molecules with 1 process.")
self._5000_molecules_serial_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self._5000_molecules_serial_time += (time() - start) / self.N_REPLICATES
self._10000_molecules_fpath = "temp_multithread_speedup_10000.txt"
print(f"Creating text file {self._10000_molecules_fpath}")
with open(self._10000_molecules_fpath, "w") as file:
for smiles in _10000_molecules:
file.write(smiles)
print("Running 10000 molecules with 1 process.")
self._10000_molecules_serial_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self._10000_molecules_serial_time += (time() - start) / self.N_REPLICATES
self._15000_molecules_fpath = "temp_multithread_speedup_15000.txt"
print(f"Creating text file {self._15000_molecules_fpath}")
with open(self._15000_molecules_fpath, "w") as file:
for smiles in _15000_molecules:
file.write(smiles)
print("Running 15000 molecules with 1 process.")
self._15000_molecules_serial_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self._15000_molecules_serial_time += (time() - start) / self.N_REPLICATES
# data used for speedup and efficiency test 2
print("Running 100 molecules with 1 process.")
self._100_molecules_serial_time_2 = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=1,
fingerprint_type="topological_fingerprint",
)
self._100_molecules_serial_time_2 += (time() - start) / self.N_REPLICATES
print("Running 500 molecules with 1 process.")
self._500_molecules_serial_time_2 = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=1,
fingerprint_type="topological_fingerprint",
)
self._500_molecules_serial_time_2 += (time() - start) / self.N_REPLICATES
print("Running 1000 molecules with 1 process.")
self._1000_molecules_serial_time_2 = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=1,
fingerprint_type="topological_fingerprint",
)
self._1000_molecules_serial_time_2 += (time() - start) / self.N_REPLICATES
print("Running 5000 molecules with 1 process.")
self._5000_molecules_serial_time_2 = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=1,
fingerprint_type="topological_fingerprint",
)
self._5000_molecules_serial_time_2 += (time() - start) / self.N_REPLICATES
print("Running 10000 molecules with 1 process.")
self._10000_molecules_serial_time_2 = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=1,
fingerprint_type="topological_fingerprint",
)
self._10000_molecules_serial_time_2 += (time() - start) / self.N_REPLICATES
print("Running 15000 molecules with 1 process.")
self._15000_molecules_serial_time_2 = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=1,
fingerprint_type="topological_fingerprint",
)
self._15000_molecules_serial_time_2 += (time() - start) / self.N_REPLICATES
def test_multithreading_consistency_2_threads(self):
"""
Ensure that the similarity matrix produced with 2 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using two threads.",
)
def test_multithreading_consistency_3_threads(self):
"""
Ensure that the similarity matrix produced with 3 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=3,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using three threads.",
)
def test_multithreading_consistency_4_threads(self):
"""
Ensure that the similarity matrix produced with 4 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using four threads.",
)
def test_multithreading_consistency_5_threads(self):
"""
Ensure that the similarity matrix produced with 5 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=5,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using five threads.",
)
def test_multithreading_consistency_6_threads(self):
"""
Ensure that the similarity matrix produced with 6 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=6,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using six threads.",
)
def test_multithreading_consistency_7_threads(self):
"""
Ensure that the similarity matrix produced with 7 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=7,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using seven threads (equal to the number of molecules).",
)
def test_multithreading_consistency_10_threads(self):
"""
Ensure that the similarity matrix produced with 10 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=10,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using ten threads (more than the number of molecules).",
)
def test_speedup_efficiency_tanimoto(self):
"""
Evaluate the speedup and efficieny of the multiprocessing approach.
"""
if self.NO_SPEEDUP_TEST:
return
print("~" * 10, "\n", "Speedup and Efficiency Test\n", "~" * 10)
# 100 molecules
print("Running 100 molecules with 2 processes.")
_100_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
_100_molecules_2_process_time += (time() - start) / self.N_REPLICATES
_100_molecules_2_process_speedup = (
self._100_molecules_serial_time / _100_molecules_2_process_time
)
_100_molecules_2_process_efficiency = _100_molecules_2_process_speedup / 2
print("Running 100 molecules with 5 processes.")
_100_molecules_5_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
_100_molecules_5_process_time += (time() - start) / self.N_REPLICATES
_100_molecules_5_process_speedup = (
self._100_molecules_serial_time / _100_molecules_5_process_time
)
_100_molecules_5_process_efficiency = _100_molecules_5_process_speedup / 5
print("Running 100 molecules with 10 processes.")
_100_molecules_10_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=8,
fingerprint_type="morgan_fingerprint",
)
_100_molecules_10_process_time += (time() - start) / self.N_REPLICATES
_100_molecules_10_process_speedup = (
self._100_molecules_serial_time / _100_molecules_10_process_time
)
_100_molecules_10_process_efficiency = _100_molecules_10_process_speedup / 10
# 500 molecules
print("Running 500 molecules with 2 processes.")
_500_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
_500_molecules_2_process_time += (time() - start) / self.N_REPLICATES
_500_molecules_2_process_speedup = (
self._500_molecules_serial_time / _500_molecules_2_process_time
)
_500_molecules_2_process_efficiency = _500_molecules_2_process_speedup / 2
print("Running 500 molecules with 5 processes.")
_500_molecules_5_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
_500_molecules_5_process_time += (time() - start) / self.N_REPLICATES
_500_molecules_5_process_speedup = (
self._500_molecules_serial_time / _500_molecules_5_process_time
)
_500_molecules_5_process_efficiency = _500_molecules_5_process_speedup / 5
print("Running 500 molecules with 10 processes.")
_500_molecules_10_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=8,
fingerprint_type="morgan_fingerprint",
)
_500_molecules_10_process_time += (time() - start) / self.N_REPLICATES
_500_molecules_10_process_speedup = (
self._500_molecules_serial_time / _500_molecules_10_process_time
)
_500_molecules_10_process_efficiency = _500_molecules_10_process_speedup / 10
# 1000 molecules
print("Running 1000 molecules with 2 processes.")
_1000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
_1000_molecules_2_process_time += (time() - start) / self.N_REPLICATES
_1000_molecules_2_process_speedup = (
self._1000_molecules_serial_time / _1000_molecules_2_process_time
)
_1000_molecules_2_process_efficiency = _1000_molecules_2_process_speedup / 2
print("Running 1000 molecules with 5 processes.")
_1000_molecules_5_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
_1000_molecules_5_process_time += (time() - start) / self.N_REPLICATES
_1000_molecules_5_process_speedup = (
self._1000_molecules_serial_time / _1000_molecules_5_process_time
)
_1000_molecules_5_process_efficiency = _1000_molecules_5_process_speedup / 5
print("Running 1000 molecules with 10 processes.")
_1000_molecules_10_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=8,
fingerprint_type="morgan_fingerprint",
)
_1000_molecules_10_process_time += (time() - start) / self.N_REPLICATES
_1000_molecules_10_process_speedup = (
self._1000_molecules_serial_time / _1000_molecules_10_process_time
)
_1000_molecules_10_process_efficiency = _1000_molecules_10_process_speedup / 10
print("Running 5000 molecules with 2 processes.")
# 5000 molecules
_5000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
_5000_molecules_2_process_time += (time() - start) / self.N_REPLICATES
_5000_molecules_2_process_speedup = (
self._5000_molecules_serial_time / _5000_molecules_2_process_time
)
_5000_molecules_2_process_efficiency = _5000_molecules_2_process_speedup / 2
print("Running 5000 molecules with 5 processes.")
_5000_molecules_5_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
_5000_molecules_5_process_time += (time() - start) / self.N_REPLICATES
_5000_molecules_5_process_speedup = (
self._5000_molecules_serial_time / _5000_molecules_5_process_time
)
_5000_molecules_5_process_efficiency = _5000_molecules_5_process_speedup / 5
print("Running 5000 molecules with 10 processes.")
_5000_molecules_10_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=8,
fingerprint_type="morgan_fingerprint",
)
_5000_molecules_10_process_time += (time() - start) / self.N_REPLICATES
_5000_molecules_10_process_speedup = (
self._5000_molecules_serial_time / _5000_molecules_10_process_time
)
_5000_molecules_10_process_efficiency = _5000_molecules_10_process_speedup / 10
# 10000 molecules
print("Running 10000 molecules with 2 processes.")
_10000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
_10000_molecules_2_process_time += (time() - start) / self.N_REPLICATES
_10000_molecules_2_process_speedup = (
self._10000_molecules_serial_time / _10000_molecules_2_process_time
)
_10000_molecules_2_process_efficiency = _10000_molecules_2_process_speedup / 2
print("Running 10000 molecules with 5 processes.")
_10000_molecules_5_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
_10000_molecules_5_process_time += (time() - start) / self.N_REPLICATES
_10000_molecules_5_process_speedup = (
self._10000_molecules_serial_time / _10000_molecules_5_process_time
)
_10000_molecules_5_process_efficiency = _10000_molecules_5_process_speedup / 5
print("Running 10000 molecules with 10 processes.")
_10000_molecules_10_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=8,
fingerprint_type="morgan_fingerprint",
)
_10000_molecules_10_process_time += (time() - start) / self.N_REPLICATES
_10000_molecules_10_process_speedup = (
self._10000_molecules_serial_time / _10000_molecules_10_process_time
)
_10000_molecules_10_process_efficiency = (
_10000_molecules_10_process_speedup / 10
)
# 15000 molecules
print("Running 15000 molecules with 2 processes.")
_15000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
_15000_molecules_2_process_time += (time() - start) / self.N_REPLICATES
_15000_molecules_2_process_speedup = (
self._15000_molecules_serial_time / _15000_molecules_2_process_time
)
_15000_molecules_2_process_efficiency = _15000_molecules_2_process_speedup / 2
print("Running 15000 molecules with 5 processes.")
_15000_molecules_5_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
_15000_molecules_5_process_time += (time() - start) / self.N_REPLICATES
_15000_molecules_5_process_speedup = (
self._15000_molecules_serial_time / _15000_molecules_5_process_time
)
_15000_molecules_5_process_efficiency = _15000_molecules_5_process_speedup / 5
print("Running 15000 molecules with 10 processes.")
_15000_molecules_10_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=8,
fingerprint_type="morgan_fingerprint",
)
_15000_molecules_10_process_time += (time() - start) / self.N_REPLICATES
_15000_molecules_10_process_speedup = (
self._15000_molecules_serial_time / _15000_molecules_10_process_time
)
_15000_molecules_10_process_efficiency = (
_15000_molecules_10_process_speedup / 10
)
print("Speedup:")
print(
tabulate(
[
["~", 2, 4, 8],
[
100,
_100_molecules_2_process_speedup,
_100_molecules_5_process_speedup,
_100_molecules_10_process_speedup,
],
[
500,
_500_molecules_2_process_speedup,
_500_molecules_5_process_speedup,
_500_molecules_10_process_speedup,
],
[
1000,
_1000_molecules_2_process_speedup,
_1000_molecules_5_process_speedup,
_1000_molecules_10_process_speedup,
],
[
5000,
_5000_molecules_2_process_speedup,
_5000_molecules_5_process_speedup,
_5000_molecules_10_process_speedup,
],
[
10000,
_10000_molecules_2_process_speedup,
_10000_molecules_5_process_speedup,
_10000_molecules_10_process_speedup,
],
[
15000,
_15000_molecules_2_process_speedup,
_15000_molecules_5_process_speedup,
_15000_molecules_10_process_speedup,
],
],
headers=["# mol", "", "# processes", ""],
)
)
print("Efficiency:")
print(
tabulate(
[
["~", 2, 4, 8],
[
100,
_100_molecules_2_process_efficiency,
_100_molecules_5_process_efficiency,
_100_molecules_10_process_efficiency,
],
[
500,
_500_molecules_2_process_efficiency,
_500_molecules_5_process_efficiency,
_500_molecules_10_process_efficiency,
],
[
1000,
_1000_molecules_2_process_efficiency,
_1000_molecules_5_process_efficiency,
_1000_molecules_10_process_efficiency,
],
[
5000,
_5000_molecules_2_process_efficiency,
_5000_molecules_5_process_efficiency,
_5000_molecules_10_process_efficiency,
],
[
10000,
_10000_molecules_2_process_efficiency,
_10000_molecules_5_process_efficiency,
_10000_molecules_10_process_efficiency,
],
[
15000,
_15000_molecules_2_process_efficiency,
_15000_molecules_5_process_efficiency,
_15000_molecules_10_process_efficiency,
],
],
headers=["# mol", "", "# processes", ""],
)
)
print("Execution Time in seconds (serial/parallel):")
print(
tabulate(
[
["~", 2, 4, 8],
[
100,
"{:.2f}/{:.2f}".format(
float(self._100_molecules_serial_time),
float(_100_molecules_2_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._100_molecules_serial_time),
float(_100_molecules_5_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._100_molecules_serial_time),
float(_100_molecules_10_process_time),
),
],
[
500,
"{:.2f}/{:.2f}".format(
float(self._500_molecules_serial_time),
float(_500_molecules_2_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._500_molecules_serial_time),
float(_500_molecules_5_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._500_molecules_serial_time),
float(_500_molecules_10_process_time),
),
],
[
1000,
"{:.2f}/{:.2f}".format(
float(self._1000_molecules_serial_time),
float(_1000_molecules_2_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._1000_molecules_serial_time),
float(_1000_molecules_5_process_time),
),
"{:2f}/{:.2f}".format(
float(self._1000_molecules_serial_time),
float(_1000_molecules_10_process_time),
),
],
[
5000,
"{:.2f}/{:.2f}".format(
float(self._5000_molecules_serial_time),
float(_5000_molecules_2_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._5000_molecules_serial_time),
float(_5000_molecules_5_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._5000_molecules_serial_time),
float(_5000_molecules_10_process_time),
),
],
[
10000,
"{:.2f}/{:.2f}".format(
float(self._10000_molecules_serial_time),
float(_10000_molecules_2_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._10000_molecules_serial_time),
float(_10000_molecules_5_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._10000_molecules_serial_time),
float(_10000_molecules_10_process_time),
),
],
[
15000,
"{:.2f}/{:.2f}".format(
float(self._15000_molecules_serial_time),
float(_15000_molecules_2_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._15000_molecules_serial_time),
float(_15000_molecules_5_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._15000_molecules_serial_time),
float(_15000_molecules_10_process_time),
),
],
],
headers=["# mol", "", "# processes", ""],
)
)
def test_speedup_efficiency_cosine(self):
"""
Evaluate the speedup and efficieny of the multiprocessing approach
with a more complex metric.
"""
if self.NO_SPEEDUP_TEST:
return
print("~" * 10, "\n", "Speedup and Efficiency Test 2\n", "~" * 10)
# 100 molecules
print("Running 100 molecules with 2 processes.")
_100_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=2,
fingerprint_type="topological_fingerprint",
)
_100_molecules_2_process_time += (time() - start) / self.N_REPLICATES
_100_molecules_2_process_speedup = (
self._100_molecules_serial_time_2 / _100_molecules_2_process_time
)
_100_molecules_2_process_efficiency = _100_molecules_2_process_speedup / 2
print("Running 100 molecules with 5 processes.")
_100_molecules_5_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=4,
fingerprint_type="topological_fingerprint",
)
_100_molecules_5_process_time += (time() - start) / self.N_REPLICATES
_100_molecules_5_process_speedup = (
self._100_molecules_serial_time_2 / _100_molecules_5_process_time
)
_100_molecules_5_process_efficiency = _100_molecules_5_process_speedup / 5
print("Running 100 molecules with 10 processes.")
_100_molecules_10_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=8,
fingerprint_type="topological_fingerprint",
)
_100_molecules_10_process_time += (time() - start) / self.N_REPLICATES
_100_molecules_10_process_speedup = (
self._100_molecules_serial_time_2 / _100_molecules_10_process_time
)
_100_molecules_10_process_efficiency = _100_molecules_10_process_speedup / 10
# 500 molecules
print("Running 500 molecules with 2 processes.")
_500_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=2,
fingerprint_type="topological_fingerprint",
)
_500_molecules_2_process_time += (time() - start) / self.N_REPLICATES
_500_molecules_2_process_speedup = (
self._500_molecules_serial_time_2 / _500_molecules_2_process_time
)
_500_molecules_2_process_efficiency = _500_molecules_2_process_speedup / 2
print("Running 500 molecules with 5 processes.")
_500_molecules_5_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=4,
fingerprint_type="topological_fingerprint",
)
_500_molecules_5_process_time += (time() - start) / self.N_REPLICATES
_500_molecules_5_process_speedup = (
self._500_molecules_serial_time_2 / _500_molecules_5_process_time
)
_500_molecules_5_process_efficiency = _500_molecules_5_process_speedup / 5
print("Running 500 molecules with 10 processes.")
_500_molecules_10_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=8,
fingerprint_type="topological_fingerprint",
)
_500_molecules_10_process_time += (time() - start) / self.N_REPLICATES
_500_molecules_10_process_speedup = (
self._500_molecules_serial_time_2 / _500_molecules_10_process_time
)
_500_molecules_10_process_efficiency = _500_molecules_10_process_speedup / 10
# 1000 molecules
print("Running 1000 molecules with 2 processes.")
_1000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=2,
fingerprint_type="topological_fingerprint",
)
_1000_molecules_2_process_time += (time() - start) / self.N_REPLICATES
_1000_molecules_2_process_speedup = (
self._1000_molecules_serial_time_2 / _1000_molecules_2_process_time
)
_1000_molecules_2_process_efficiency = _1000_molecules_2_process_speedup / 2
print("Running 1000 molecules with 5 processes.")
_1000_molecules_5_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=4,
fingerprint_type="topological_fingerprint",
)
_1000_molecules_5_process_time += (time() - start) / self.N_REPLICATES
_1000_molecules_5_process_speedup = (
self._1000_molecules_serial_time_2 / _1000_molecules_5_process_time
)
_1000_molecules_5_process_efficiency = _1000_molecules_5_process_speedup / 5
print("Running 1000 molecules with 10 processes.")
_1000_molecules_10_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=8,
fingerprint_type="topological_fingerprint",
)
_1000_molecules_10_process_time += (time() - start) / self.N_REPLICATES
_1000_molecules_10_process_speedup = (
self._1000_molecules_serial_time_2 / _1000_molecules_10_process_time
)
_1000_molecules_10_process_efficiency = _1000_molecules_10_process_speedup / 10
# 5000 molecules
print("Running 5000 molecules with 2 processes.")
_5000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=2,
fingerprint_type="topological_fingerprint",
)
_5000_molecules_2_process_time += (time() - start) / self.N_REPLICATES
_5000_molecules_2_process_speedup = (
self._5000_molecules_serial_time_2 / _5000_molecules_2_process_time
)
_5000_molecules_2_process_efficiency = _5000_molecules_2_process_speedup / 2
print("Running 5000 molecules with 5 processes.")
_5000_molecules_5_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=4,
fingerprint_type="topological_fingerprint",
)
_5000_molecules_5_process_time += (time() - start) / self.N_REPLICATES
_5000_molecules_5_process_speedup = (
self._5000_molecules_serial_time_2 / _5000_molecules_5_process_time
)
_5000_molecules_5_process_efficiency = _5000_molecules_5_process_speedup / 5
print("Running 5000 molecules with 10 processes.")
_5000_molecules_10_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=8,
fingerprint_type="topological_fingerprint",
)
_5000_molecules_10_process_time += (time() - start) / self.N_REPLICATES
_5000_molecules_10_process_speedup = (
self._5000_molecules_serial_time_2 / _5000_molecules_10_process_time
)
_5000_molecules_10_process_efficiency = _5000_molecules_10_process_speedup / 10
# 10000 molecules
print("Running 10000 molecules with 2 processes.")
_10000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=2,
fingerprint_type="topological_fingerprint",
)
_10000_molecules_2_process_time += (time() - start) / self.N_REPLICATES
_10000_molecules_2_process_speedup = (
self._10000_molecules_serial_time_2 / _10000_molecules_2_process_time
)
_10000_molecules_2_process_efficiency = _10000_molecules_2_process_speedup / 2
print("Running 10000 molecules with 5 processes.")
_10000_molecules_5_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=4,
fingerprint_type="topological_fingerprint",
)
_10000_molecules_5_process_time += (time() - start) / self.N_REPLICATES
_10000_molecules_5_process_speedup = (
self._10000_molecules_serial_time_2 / _10000_molecules_5_process_time
)
_10000_molecules_5_process_efficiency = _10000_molecules_5_process_speedup / 5
print("Running 10000 molecules with 10 processes.")
_10000_molecules_10_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=8,
fingerprint_type="topological_fingerprint",
)
_10000_molecules_10_process_time += (time() - start) / self.N_REPLICATES
_10000_molecules_10_process_speedup = (
self._10000_molecules_serial_time_2 / _10000_molecules_10_process_time
)
_10000_molecules_10_process_efficiency = (
_10000_molecules_10_process_speedup / 10
)
# 15000 molecules
print("Running 15000 molecules with 2 processes.")
_15000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=2,
fingerprint_type="topological_fingerprint",
)
_15000_molecules_2_process_time += (time() - start) / self.N_REPLICATES
_15000_molecules_2_process_speedup = (
self._15000_molecules_serial_time_2 / _15000_molecules_2_process_time
)
_15000_molecules_2_process_efficiency = _15000_molecules_2_process_speedup / 2
print("Running 15000 molecules with 5 processes.")
_15000_molecules_5_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=4,
fingerprint_type="topological_fingerprint",
)
_15000_molecules_5_process_time += (time() - start) / self.N_REPLICATES
_15000_molecules_5_process_speedup = (
self._15000_molecules_serial_time_2 / _15000_molecules_5_process_time
)
_15000_molecules_5_process_efficiency = _15000_molecules_5_process_speedup / 5
print("Running 15000 molecules with 10 processes.")
_15000_molecules_10_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=8,
fingerprint_type="topological_fingerprint",
)
_15000_molecules_10_process_time += (time() - start) / self.N_REPLICATES
_15000_molecules_10_process_speedup = (
self._15000_molecules_serial_time_2 / _15000_molecules_10_process_time
)
_15000_molecules_10_process_efficiency = (
_15000_molecules_10_process_speedup / 10
)
print("Speedup:")
print(
tabulate(
[
["~", 2, 4, 8],
[
100,
_100_molecules_2_process_speedup,
_100_molecules_5_process_speedup,
_100_molecules_10_process_speedup,
],
[
500,
_500_molecules_2_process_speedup,
_500_molecules_5_process_speedup,
_500_molecules_10_process_speedup,
],
[
1000,
_1000_molecules_2_process_speedup,
_1000_molecules_5_process_speedup,
_1000_molecules_10_process_speedup,
],
[
5000,
_5000_molecules_2_process_speedup,
_5000_molecules_5_process_speedup,
_5000_molecules_10_process_speedup,
],
[
10000,
_10000_molecules_2_process_speedup,
_10000_molecules_5_process_speedup,
_10000_molecules_10_process_speedup,
],
[
15000,
_15000_molecules_2_process_speedup,
_15000_molecules_5_process_speedup,
_15000_molecules_10_process_speedup,
],
],
headers=["# mol", "", "# processes", ""],
)
)
print("Efficiency:")
print(
tabulate(
[
["~", 2, 4, 8],
[
100,
_100_molecules_2_process_efficiency,
_100_molecules_5_process_efficiency,
_100_molecules_10_process_efficiency,
],
[
500,
_500_molecules_2_process_efficiency,
_500_molecules_5_process_efficiency,
_500_molecules_10_process_efficiency,
],
[
1000,
_1000_molecules_2_process_efficiency,
_1000_molecules_5_process_efficiency,
_1000_molecules_10_process_efficiency,
],
[
5000,
_5000_molecules_2_process_efficiency,
_5000_molecules_5_process_efficiency,
_5000_molecules_10_process_efficiency,
],
[
10000,
_10000_molecules_2_process_efficiency,
_10000_molecules_5_process_efficiency,
_10000_molecules_10_process_efficiency,
],
[
15000,
_15000_molecules_2_process_efficiency,
_15000_molecules_5_process_efficiency,
_15000_molecules_10_process_efficiency,
],
],
headers=["# mol", "", "# processes", ""],
)
)
print("Execution Time in seconds (serial/parallel):")
print(
tabulate(
[
["~", 2, 4, 8],
[
100,
"{:.2f}/{:.2f}".format(
float(self._100_molecules_serial_time_2),
float(_100_molecules_2_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._100_molecules_serial_time_2),
float(_100_molecules_5_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._100_molecules_serial_time_2),
float(_100_molecules_10_process_time),
),
],
[
500,
"{:.2f}/{:.2f}".format(
float(self._500_molecules_serial_time_2),
float(_500_molecules_2_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._500_molecules_serial_time_2),
float(_500_molecules_5_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._500_molecules_serial_time_2),
float(_500_molecules_10_process_time),
),
],
[
1000,
"{:.2f}/{:.2f}".format(
float(self._1000_molecules_serial_time_2),
float(_1000_molecules_2_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._1000_molecules_serial_time_2),
float(_1000_molecules_5_process_time),
),
"{:2f}/{:.2f}".format(
float(self._1000_molecules_serial_time_2),
float(_1000_molecules_10_process_time),
),
],
[
5000,
"{:.2f}/{:.2f}".format(
float(self._5000_molecules_serial_time_2),
float(_5000_molecules_2_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._5000_molecules_serial_time_2),
float(_5000_molecules_5_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._5000_molecules_serial_time_2),
float(_5000_molecules_10_process_time),
),
],
[
10000,
"{:.2f}/{:.2f}".format(
float(self._10000_molecules_serial_time_2),
float(_10000_molecules_2_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._10000_molecules_serial_time_2),
float(_10000_molecules_5_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._10000_molecules_serial_time_2),
float(_10000_molecules_10_process_time),
),
],
[
15000,
"{:.2f}/{:.2f}".format(
float(self._15000_molecules_serial_time_2),
float(_15000_molecules_2_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._15000_molecules_serial_time_2),
float(_15000_molecules_5_process_time),
),
"{:.2f}/{:.2f}".format(
float(self._15000_molecules_serial_time_2),
float(_15000_molecules_10_process_time),
),
],
],
headers=["# mol", "", "# processes", ""],
)
)
@classmethod
def tearDownClass(self):
"""Delete temporary files used in testing."""
print("Deleting smiles database files.")
remove(self.text_fpath)
if not self.NO_SPEEDUP_TEST:
remove(self._100_molecules_fpath)
remove(self._500_molecules_fpath)
remove(self._1000_molecules_fpath)
remove(self._5000_molecules_fpath)
remove(self._10000_molecules_fpath)
remove(self._15000_molecules_fpath)
print(" ~ ~ Multithreading Test Complete ~ ~ ")
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"molSim.chemical_datastructures.MoleculeSet",
"os.remove",
"os.path.exists",
"time.time",
"tabulate.tabulate",
"warnings.warn",
"os.path.join"
] |
[((64099, 64114), 'unittest.main', 'unittest.main', ([], {}), '()\n', (64112, 64114), False, 'import unittest\n'), ((1419, 1613), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self.text_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(True)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(1)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self.text_fpath,\n molecule_database_src_type='text', is_verbose=True, similarity_measure=\n 'tanimoto', n_threads=1, fingerprint_type='morgan_fingerprint')\n", (1430, 1613), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((11450, 11644), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self.text_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(True)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(2)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self.text_fpath,\n molecule_database_src_type='text', is_verbose=True, similarity_measure=\n 'tanimoto', n_threads=2, fingerprint_type='morgan_fingerprint')\n", (11461, 11644), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((12255, 12449), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self.text_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(True)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(3)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self.text_fpath,\n molecule_database_src_type='text', is_verbose=True, similarity_measure=\n 'tanimoto', n_threads=3, fingerprint_type='morgan_fingerprint')\n", (12266, 12449), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((13062, 13256), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self.text_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(True)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(4)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self.text_fpath,\n molecule_database_src_type='text', is_verbose=True, similarity_measure=\n 'tanimoto', n_threads=4, fingerprint_type='morgan_fingerprint')\n", (13073, 13256), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((13868, 14062), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self.text_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(True)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(5)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self.text_fpath,\n molecule_database_src_type='text', is_verbose=True, similarity_measure=\n 'tanimoto', n_threads=5, fingerprint_type='morgan_fingerprint')\n", (13879, 14062), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((14674, 14868), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self.text_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(True)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(6)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self.text_fpath,\n molecule_database_src_type='text', is_verbose=True, similarity_measure=\n 'tanimoto', n_threads=6, fingerprint_type='morgan_fingerprint')\n", (14685, 14868), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((15479, 15673), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self.text_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(True)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(7)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self.text_fpath,\n molecule_database_src_type='text', is_verbose=True, similarity_measure=\n 'tanimoto', n_threads=7, fingerprint_type='morgan_fingerprint')\n", (15490, 15673), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((16323, 16518), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self.text_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(True)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(10)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self.text_fpath,\n molecule_database_src_type='text', is_verbose=True, similarity_measure=\n 'tanimoto', n_threads=10, fingerprint_type='morgan_fingerprint')\n", (16334, 16518), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((63667, 63690), 'os.remove', 'remove', (['self.text_fpath'], {}), '(self.text_fpath)\n', (63673, 63690), False, 'from os import remove\n'), ((660, 683), 'os.path.exists', 'exists', (['""".speedup-test"""'], {}), "('.speedup-test')\n", (666, 683), False, 'from os.path import exists, join\n'), ((886, 982), 'warnings.warn', 'warnings.warn', (['"""Speedup and Efficiency tests ENABLED, expect long runtime."""', 'ResourceWarning'], {}), "('Speedup and Efficiency tests ENABLED, expect long runtime.',\n ResourceWarning)\n", (899, 982), False, 'import warnings\n'), ((2684, 2690), 'time.time', 'time', ([], {}), '()\n', (2688, 2690), False, 'from time import time\n'), ((2723, 2928), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._100_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(1)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._100_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=1, fingerprint_type='morgan_fingerprint')\n", (2734, 2928), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((3550, 3556), 'time.time', 'time', ([], {}), '()\n', (3554, 3556), False, 'from time import time\n'), ((3589, 3794), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._500_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(1)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._500_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=1, fingerprint_type='morgan_fingerprint')\n", (3600, 3794), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((4423, 4429), 'time.time', 'time', ([], {}), '()\n', (4427, 4429), False, 'from time import time\n'), ((4462, 4668), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._1000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(1)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._1000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=1, fingerprint_type='morgan_fingerprint')\n", (4473, 4668), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((5298, 5304), 'time.time', 'time', ([], {}), '()\n', (5302, 5304), False, 'from time import time\n'), ((5337, 5543), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._5000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(1)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._5000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=1, fingerprint_type='morgan_fingerprint')\n", (5348, 5543), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((6180, 6186), 'time.time', 'time', ([], {}), '()\n', (6184, 6186), False, 'from time import time\n'), ((6219, 6426), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._10000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(1)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._10000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=1, fingerprint_type='morgan_fingerprint')\n", (6230, 6426), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((7064, 7070), 'time.time', 'time', ([], {}), '()\n', (7068, 7070), False, 'from time import time\n'), ((7103, 7310), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._15000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(1)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._15000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=1, fingerprint_type='morgan_fingerprint')\n", (7114, 7310), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((7718, 7724), 'time.time', 'time', ([], {}), '()\n', (7722, 7724), False, 'from time import time\n'), ((7757, 7965), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._100_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(1)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._100_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=1, fingerprint_type='topological_fingerprint')\n", (7768, 7965), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((8319, 8325), 'time.time', 'time', ([], {}), '()\n', (8323, 8325), False, 'from time import time\n'), ((8358, 8566), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._500_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(1)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._500_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=1, fingerprint_type='topological_fingerprint')\n", (8369, 8566), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((8922, 8928), 'time.time', 'time', ([], {}), '()\n', (8926, 8928), False, 'from time import time\n'), ((8961, 9170), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._1000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(1)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._1000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=1, fingerprint_type='topological_fingerprint')\n", (8972, 9170), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((9527, 9533), 'time.time', 'time', ([], {}), '()\n', (9531, 9533), False, 'from time import time\n'), ((9566, 9775), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._5000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(1)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._5000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=1, fingerprint_type='topological_fingerprint')\n", (9577, 9775), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((10134, 10140), 'time.time', 'time', ([], {}), '()\n', (10138, 10140), False, 'from time import time\n'), ((10173, 10383), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._10000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(1)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._10000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=1, fingerprint_type='topological_fingerprint')\n", (10184, 10383), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((10743, 10749), 'time.time', 'time', ([], {}), '()\n', (10747, 10749), False, 'from time import time\n'), ((10782, 10992), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._15000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(1)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._15000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=1, fingerprint_type='topological_fingerprint')\n", (10793, 10992), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((17358, 17364), 'time.time', 'time', ([], {}), '()\n', (17362, 17364), False, 'from time import time\n'), ((17397, 17602), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._100_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(2)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._100_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=2, fingerprint_type='morgan_fingerprint')\n", (17408, 17602), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((18164, 18170), 'time.time', 'time', ([], {}), '()\n', (18168, 18170), False, 'from time import time\n'), ((18203, 18408), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._100_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(4)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._100_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=4, fingerprint_type='morgan_fingerprint')\n", (18214, 18408), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((18972, 18978), 'time.time', 'time', ([], {}), '()\n', (18976, 18978), False, 'from time import time\n'), ((19011, 19216), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._100_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(8)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._100_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=8, fingerprint_type='morgan_fingerprint')\n", (19022, 19216), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((19808, 19814), 'time.time', 'time', ([], {}), '()\n', (19812, 19814), False, 'from time import time\n'), ((19847, 20052), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._500_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(2)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._500_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=2, fingerprint_type='morgan_fingerprint')\n", (19858, 20052), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((20614, 20620), 'time.time', 'time', ([], {}), '()\n', (20618, 20620), False, 'from time import time\n'), ((20653, 20858), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._500_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(4)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._500_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=4, fingerprint_type='morgan_fingerprint')\n", (20664, 20858), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((21422, 21428), 'time.time', 'time', ([], {}), '()\n', (21426, 21428), False, 'from time import time\n'), ((21461, 21666), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._500_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(8)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._500_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=8, fingerprint_type='morgan_fingerprint')\n", (21472, 21666), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((22261, 22267), 'time.time', 'time', ([], {}), '()\n', (22265, 22267), False, 'from time import time\n'), ((22300, 22506), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._1000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(2)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._1000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=2, fingerprint_type='morgan_fingerprint')\n", (22311, 22506), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((23076, 23082), 'time.time', 'time', ([], {}), '()\n', (23080, 23082), False, 'from time import time\n'), ((23115, 23321), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._1000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(4)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._1000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=4, fingerprint_type='morgan_fingerprint')\n", (23126, 23321), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((23893, 23899), 'time.time', 'time', ([], {}), '()\n', (23897, 23899), False, 'from time import time\n'), ((23932, 24138), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._1000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(8)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._1000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=8, fingerprint_type='morgan_fingerprint')\n", (23943, 24138), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((24739, 24745), 'time.time', 'time', ([], {}), '()\n', (24743, 24745), False, 'from time import time\n'), ((24778, 24984), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._5000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(2)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._5000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=2, fingerprint_type='morgan_fingerprint')\n", (24789, 24984), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((25554, 25560), 'time.time', 'time', ([], {}), '()\n', (25558, 25560), False, 'from time import time\n'), ((25593, 25799), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._5000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(4)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._5000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=4, fingerprint_type='morgan_fingerprint')\n", (25604, 25799), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((26371, 26377), 'time.time', 'time', ([], {}), '()\n', (26375, 26377), False, 'from time import time\n'), ((26410, 26616), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._5000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(8)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._5000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=8, fingerprint_type='morgan_fingerprint')\n", (26421, 26616), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((27220, 27226), 'time.time', 'time', ([], {}), '()\n', (27224, 27226), False, 'from time import time\n'), ((27259, 27466), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._10000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(2)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._10000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=2, fingerprint_type='morgan_fingerprint')\n", (27270, 27466), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((28044, 28050), 'time.time', 'time', ([], {}), '()\n', (28048, 28050), False, 'from time import time\n'), ((28083, 28290), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._10000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(4)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._10000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=4, fingerprint_type='morgan_fingerprint')\n", (28094, 28290), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((28870, 28876), 'time.time', 'time', ([], {}), '()\n', (28874, 28876), False, 'from time import time\n'), ((28909, 29116), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._10000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(8)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._10000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=8, fingerprint_type='morgan_fingerprint')\n", (28920, 29116), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((29750, 29756), 'time.time', 'time', ([], {}), '()\n', (29754, 29756), False, 'from time import time\n'), ((29789, 29996), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._15000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(2)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._15000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=2, fingerprint_type='morgan_fingerprint')\n", (29800, 29996), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((30574, 30580), 'time.time', 'time', ([], {}), '()\n', (30578, 30580), False, 'from time import time\n'), ((30613, 30820), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._15000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(4)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._15000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=4, fingerprint_type='morgan_fingerprint')\n", (30624, 30820), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((31400, 31406), 'time.time', 'time', ([], {}), '()\n', (31404, 31406), False, 'from time import time\n'), ((31439, 31646), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._15000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""tanimoto"""', 'n_threads': '(8)', 'fingerprint_type': '"""morgan_fingerprint"""'}), "(molecule_database_src=self._15000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='tanimoto', n_threads=8, fingerprint_type='morgan_fingerprint')\n", (31450, 31646), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((32140, 32941), 'tabulate.tabulate', 'tabulate', (["[['~', 2, 4, 8], [100, _100_molecules_2_process_speedup,\n _100_molecules_5_process_speedup, _100_molecules_10_process_speedup], [\n 500, _500_molecules_2_process_speedup, _500_molecules_5_process_speedup,\n _500_molecules_10_process_speedup], [1000,\n _1000_molecules_2_process_speedup, _1000_molecules_5_process_speedup,\n _1000_molecules_10_process_speedup], [5000,\n _5000_molecules_2_process_speedup, _5000_molecules_5_process_speedup,\n _5000_molecules_10_process_speedup], [10000,\n _10000_molecules_2_process_speedup, _10000_molecules_5_process_speedup,\n _10000_molecules_10_process_speedup], [15000,\n _15000_molecules_2_process_speedup, _15000_molecules_5_process_speedup,\n _15000_molecules_10_process_speedup]]"], {'headers': "['# mol', '', '# processes', '']"}), "([['~', 2, 4, 8], [100, _100_molecules_2_process_speedup,\n _100_molecules_5_process_speedup, _100_molecules_10_process_speedup], [\n 500, _500_molecules_2_process_speedup, _500_molecules_5_process_speedup,\n _500_molecules_10_process_speedup], [1000,\n _1000_molecules_2_process_speedup, _1000_molecules_5_process_speedup,\n _1000_molecules_10_process_speedup], [5000,\n _5000_molecules_2_process_speedup, _5000_molecules_5_process_speedup,\n _5000_molecules_10_process_speedup], [10000,\n _10000_molecules_2_process_speedup, _10000_molecules_5_process_speedup,\n _10000_molecules_10_process_speedup], [15000,\n _15000_molecules_2_process_speedup, _15000_molecules_5_process_speedup,\n _15000_molecules_10_process_speedup]], headers=['# mol', '',\n '# processes', ''])\n", (32148, 32941), False, 'from tabulate import tabulate\n'), ((33879, 34757), 'tabulate.tabulate', 'tabulate', (["[['~', 2, 4, 8], [100, _100_molecules_2_process_efficiency,\n _100_molecules_5_process_efficiency,\n _100_molecules_10_process_efficiency], [500,\n _500_molecules_2_process_efficiency,\n _500_molecules_5_process_efficiency,\n _500_molecules_10_process_efficiency], [1000,\n _1000_molecules_2_process_efficiency,\n _1000_molecules_5_process_efficiency,\n _1000_molecules_10_process_efficiency], [5000,\n _5000_molecules_2_process_efficiency,\n _5000_molecules_5_process_efficiency,\n _5000_molecules_10_process_efficiency], [10000,\n _10000_molecules_2_process_efficiency,\n _10000_molecules_5_process_efficiency,\n _10000_molecules_10_process_efficiency], [15000,\n _15000_molecules_2_process_efficiency,\n _15000_molecules_5_process_efficiency,\n _15000_molecules_10_process_efficiency]]"], {'headers': "['# mol', '', '# processes', '']"}), "([['~', 2, 4, 8], [100, _100_molecules_2_process_efficiency,\n _100_molecules_5_process_efficiency,\n _100_molecules_10_process_efficiency], [500,\n _500_molecules_2_process_efficiency,\n _500_molecules_5_process_efficiency,\n _500_molecules_10_process_efficiency], [1000,\n _1000_molecules_2_process_efficiency,\n _1000_molecules_5_process_efficiency,\n _1000_molecules_10_process_efficiency], [5000,\n _5000_molecules_2_process_efficiency,\n _5000_molecules_5_process_efficiency,\n _5000_molecules_10_process_efficiency], [10000,\n _10000_molecules_2_process_efficiency,\n _10000_molecules_5_process_efficiency,\n _10000_molecules_10_process_efficiency], [15000,\n _15000_molecules_2_process_efficiency,\n _15000_molecules_5_process_efficiency,\n _15000_molecules_10_process_efficiency]], headers=['# mol', '',\n '# processes', ''])\n", (33887, 34757), False, 'from tabulate import tabulate\n'), ((40618, 40624), 'time.time', 'time', ([], {}), '()\n', (40622, 40624), False, 'from time import time\n'), ((40657, 40865), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._100_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(2)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._100_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=2, fingerprint_type='topological_fingerprint')\n", (40668, 40865), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((41429, 41435), 'time.time', 'time', ([], {}), '()\n', (41433, 41435), False, 'from time import time\n'), ((41468, 41676), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._100_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(4)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._100_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=4, fingerprint_type='topological_fingerprint')\n", (41479, 41676), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((42242, 42248), 'time.time', 'time', ([], {}), '()\n', (42246, 42248), False, 'from time import time\n'), ((42281, 42489), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._100_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(8)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._100_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=8, fingerprint_type='topological_fingerprint')\n", (42292, 42489), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((43083, 43089), 'time.time', 'time', ([], {}), '()\n', (43087, 43089), False, 'from time import time\n'), ((43122, 43330), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._500_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(2)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._500_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=2, fingerprint_type='topological_fingerprint')\n", (43133, 43330), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((43894, 43900), 'time.time', 'time', ([], {}), '()\n', (43898, 43900), False, 'from time import time\n'), ((43933, 44141), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._500_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(4)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._500_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=4, fingerprint_type='topological_fingerprint')\n", (43944, 44141), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((44707, 44713), 'time.time', 'time', ([], {}), '()\n', (44711, 44713), False, 'from time import time\n'), ((44746, 44954), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._500_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(8)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._500_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=8, fingerprint_type='topological_fingerprint')\n", (44757, 44954), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((45551, 45557), 'time.time', 'time', ([], {}), '()\n', (45555, 45557), False, 'from time import time\n'), ((45590, 45799), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._1000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(2)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._1000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=2, fingerprint_type='topological_fingerprint')\n", (45601, 45799), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((46371, 46377), 'time.time', 'time', ([], {}), '()\n', (46375, 46377), False, 'from time import time\n'), ((46410, 46619), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._1000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(4)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._1000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=4, fingerprint_type='topological_fingerprint')\n", (46421, 46619), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((47193, 47199), 'time.time', 'time', ([], {}), '()\n', (47197, 47199), False, 'from time import time\n'), ((47232, 47441), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._1000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(8)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._1000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=8, fingerprint_type='topological_fingerprint')\n", (47243, 47441), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((48044, 48050), 'time.time', 'time', ([], {}), '()\n', (48048, 48050), False, 'from time import time\n'), ((48083, 48292), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._5000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(2)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._5000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=2, fingerprint_type='topological_fingerprint')\n", (48094, 48292), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((48864, 48870), 'time.time', 'time', ([], {}), '()\n', (48868, 48870), False, 'from time import time\n'), ((48903, 49112), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._5000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(4)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._5000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=4, fingerprint_type='topological_fingerprint')\n", (48914, 49112), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((49686, 49692), 'time.time', 'time', ([], {}), '()\n', (49690, 49692), False, 'from time import time\n'), ((49725, 49934), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._5000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(8)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._5000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=8, fingerprint_type='topological_fingerprint')\n", (49736, 49934), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((50540, 50546), 'time.time', 'time', ([], {}), '()\n', (50544, 50546), False, 'from time import time\n'), ((50579, 50789), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._10000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(2)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._10000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=2, fingerprint_type='topological_fingerprint')\n", (50590, 50789), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((51369, 51375), 'time.time', 'time', ([], {}), '()\n', (51373, 51375), False, 'from time import time\n'), ((51408, 51618), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._10000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(4)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._10000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=4, fingerprint_type='topological_fingerprint')\n", (51419, 51618), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((52200, 52206), 'time.time', 'time', ([], {}), '()\n', (52204, 52206), False, 'from time import time\n'), ((52239, 52449), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._10000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(8)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._10000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=8, fingerprint_type='topological_fingerprint')\n", (52250, 52449), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((53085, 53091), 'time.time', 'time', ([], {}), '()\n', (53089, 53091), False, 'from time import time\n'), ((53124, 53334), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._15000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(2)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._15000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=2, fingerprint_type='topological_fingerprint')\n", (53135, 53334), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((53914, 53920), 'time.time', 'time', ([], {}), '()\n', (53918, 53920), False, 'from time import time\n'), ((53953, 54163), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._15000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(4)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._15000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=4, fingerprint_type='topological_fingerprint')\n", (53964, 54163), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((54745, 54751), 'time.time', 'time', ([], {}), '()\n', (54749, 54751), False, 'from time import time\n'), ((54784, 54994), 'molSim.chemical_datastructures.MoleculeSet', 'MoleculeSet', ([], {'molecule_database_src': 'self._15000_molecules_fpath', 'molecule_database_src_type': '"""text"""', 'is_verbose': '(False)', 'similarity_measure': '"""cosine"""', 'n_threads': '(8)', 'fingerprint_type': '"""topological_fingerprint"""'}), "(molecule_database_src=self._15000_molecules_fpath,\n molecule_database_src_type='text', is_verbose=False, similarity_measure\n ='cosine', n_threads=8, fingerprint_type='topological_fingerprint')\n", (54795, 54994), False, 'from molSim.chemical_datastructures import MoleculeSet\n'), ((55490, 56291), 'tabulate.tabulate', 'tabulate', (["[['~', 2, 4, 8], [100, _100_molecules_2_process_speedup,\n _100_molecules_5_process_speedup, _100_molecules_10_process_speedup], [\n 500, _500_molecules_2_process_speedup, _500_molecules_5_process_speedup,\n _500_molecules_10_process_speedup], [1000,\n _1000_molecules_2_process_speedup, _1000_molecules_5_process_speedup,\n _1000_molecules_10_process_speedup], [5000,\n _5000_molecules_2_process_speedup, _5000_molecules_5_process_speedup,\n _5000_molecules_10_process_speedup], [10000,\n _10000_molecules_2_process_speedup, _10000_molecules_5_process_speedup,\n _10000_molecules_10_process_speedup], [15000,\n _15000_molecules_2_process_speedup, _15000_molecules_5_process_speedup,\n _15000_molecules_10_process_speedup]]"], {'headers': "['# mol', '', '# processes', '']"}), "([['~', 2, 4, 8], [100, _100_molecules_2_process_speedup,\n _100_molecules_5_process_speedup, _100_molecules_10_process_speedup], [\n 500, _500_molecules_2_process_speedup, _500_molecules_5_process_speedup,\n _500_molecules_10_process_speedup], [1000,\n _1000_molecules_2_process_speedup, _1000_molecules_5_process_speedup,\n _1000_molecules_10_process_speedup], [5000,\n _5000_molecules_2_process_speedup, _5000_molecules_5_process_speedup,\n _5000_molecules_10_process_speedup], [10000,\n _10000_molecules_2_process_speedup, _10000_molecules_5_process_speedup,\n _10000_molecules_10_process_speedup], [15000,\n _15000_molecules_2_process_speedup, _15000_molecules_5_process_speedup,\n _15000_molecules_10_process_speedup]], headers=['# mol', '',\n '# processes', ''])\n", (55498, 56291), False, 'from tabulate import tabulate\n'), ((57229, 58107), 'tabulate.tabulate', 'tabulate', (["[['~', 2, 4, 8], [100, _100_molecules_2_process_efficiency,\n _100_molecules_5_process_efficiency,\n _100_molecules_10_process_efficiency], [500,\n _500_molecules_2_process_efficiency,\n _500_molecules_5_process_efficiency,\n _500_molecules_10_process_efficiency], [1000,\n _1000_molecules_2_process_efficiency,\n _1000_molecules_5_process_efficiency,\n _1000_molecules_10_process_efficiency], [5000,\n _5000_molecules_2_process_efficiency,\n _5000_molecules_5_process_efficiency,\n _5000_molecules_10_process_efficiency], [10000,\n _10000_molecules_2_process_efficiency,\n _10000_molecules_5_process_efficiency,\n _10000_molecules_10_process_efficiency], [15000,\n _15000_molecules_2_process_efficiency,\n _15000_molecules_5_process_efficiency,\n _15000_molecules_10_process_efficiency]]"], {'headers': "['# mol', '', '# processes', '']"}), "([['~', 2, 4, 8], [100, _100_molecules_2_process_efficiency,\n _100_molecules_5_process_efficiency,\n _100_molecules_10_process_efficiency], [500,\n _500_molecules_2_process_efficiency,\n _500_molecules_5_process_efficiency,\n _500_molecules_10_process_efficiency], [1000,\n _1000_molecules_2_process_efficiency,\n _1000_molecules_5_process_efficiency,\n _1000_molecules_10_process_efficiency], [5000,\n _5000_molecules_2_process_efficiency,\n _5000_molecules_5_process_efficiency,\n _5000_molecules_10_process_efficiency], [10000,\n _10000_molecules_2_process_efficiency,\n _10000_molecules_5_process_efficiency,\n _10000_molecules_10_process_efficiency], [15000,\n _15000_molecules_2_process_efficiency,\n _15000_molecules_5_process_efficiency,\n _15000_molecules_10_process_efficiency]], headers=['# mol', '',\n '# processes', ''])\n", (57237, 58107), False, 'from tabulate import tabulate\n'), ((63740, 63773), 'os.remove', 'remove', (['self._100_molecules_fpath'], {}), '(self._100_molecules_fpath)\n', (63746, 63773), False, 'from os import remove\n'), ((63786, 63819), 'os.remove', 'remove', (['self._500_molecules_fpath'], {}), '(self._500_molecules_fpath)\n', (63792, 63819), False, 'from os import remove\n'), ((63832, 63866), 'os.remove', 'remove', (['self._1000_molecules_fpath'], {}), '(self._1000_molecules_fpath)\n', (63838, 63866), False, 'from os import remove\n'), ((63879, 63913), 'os.remove', 'remove', (['self._5000_molecules_fpath'], {}), '(self._5000_molecules_fpath)\n', (63885, 63913), False, 'from os import remove\n'), ((63926, 63961), 'os.remove', 'remove', (['self._10000_molecules_fpath'], {}), '(self._10000_molecules_fpath)\n', (63932, 63961), False, 'from os import remove\n'), ((63974, 64009), 'os.remove', 'remove', (['self._15000_molecules_fpath'], {}), '(self._15000_molecules_fpath)\n', (63980, 64009), False, 'from os import remove\n'), ((1842, 1886), 'os.path.join', 'join', (['"""tests"""', '"""data"""', '"""combinatorial_1.txt"""'], {}), "('tests', 'data', 'combinatorial_1.txt')\n", (1846, 1886), False, 'from os.path import exists, join\n'), ((3079, 3085), 'time.time', 'time', ([], {}), '()\n', (3083, 3085), False, 'from time import time\n'), ((3945, 3951), 'time.time', 'time', ([], {}), '()\n', (3949, 3951), False, 'from time import time\n'), ((4820, 4826), 'time.time', 'time', ([], {}), '()\n', (4824, 4826), False, 'from time import time\n'), ((5695, 5701), 'time.time', 'time', ([], {}), '()\n', (5699, 5701), False, 'from time import time\n'), ((6579, 6585), 'time.time', 'time', ([], {}), '()\n', (6583, 6585), False, 'from time import time\n'), ((7463, 7469), 'time.time', 'time', ([], {}), '()\n', (7467, 7469), False, 'from time import time\n'), ((8118, 8124), 'time.time', 'time', ([], {}), '()\n', (8122, 8124), False, 'from time import time\n'), ((8719, 8725), 'time.time', 'time', ([], {}), '()\n', (8723, 8725), False, 'from time import time\n'), ((9324, 9330), 'time.time', 'time', ([], {}), '()\n', (9328, 9330), False, 'from time import time\n'), ((9929, 9935), 'time.time', 'time', ([], {}), '()\n', (9933, 9935), False, 'from time import time\n'), ((10538, 10544), 'time.time', 'time', ([], {}), '()\n', (10542, 10544), False, 'from time import time\n'), ((11147, 11153), 'time.time', 'time', ([], {}), '()\n', (11151, 11153), False, 'from time import time\n'), ((17751, 17757), 'time.time', 'time', ([], {}), '()\n', (17755, 17757), False, 'from time import time\n'), ((18557, 18563), 'time.time', 'time', ([], {}), '()\n', (18561, 18563), False, 'from time import time\n'), ((19366, 19372), 'time.time', 'time', ([], {}), '()\n', (19370, 19372), False, 'from time import time\n'), ((20201, 20207), 'time.time', 'time', ([], {}), '()\n', (20205, 20207), False, 'from time import time\n'), ((21007, 21013), 'time.time', 'time', ([], {}), '()\n', (21011, 21013), False, 'from time import time\n'), ((21816, 21822), 'time.time', 'time', ([], {}), '()\n', (21820, 21822), False, 'from time import time\n'), ((22656, 22662), 'time.time', 'time', ([], {}), '()\n', (22660, 22662), False, 'from time import time\n'), ((23471, 23477), 'time.time', 'time', ([], {}), '()\n', (23475, 23477), False, 'from time import time\n'), ((24289, 24295), 'time.time', 'time', ([], {}), '()\n', (24293, 24295), False, 'from time import time\n'), ((25134, 25140), 'time.time', 'time', ([], {}), '()\n', (25138, 25140), False, 'from time import time\n'), ((25949, 25955), 'time.time', 'time', ([], {}), '()\n', (25953, 25955), False, 'from time import time\n'), ((26767, 26773), 'time.time', 'time', ([], {}), '()\n', (26771, 26773), False, 'from time import time\n'), ((27617, 27623), 'time.time', 'time', ([], {}), '()\n', (27621, 27623), False, 'from time import time\n'), ((28441, 28447), 'time.time', 'time', ([], {}), '()\n', (28445, 28447), False, 'from time import time\n'), ((29268, 29274), 'time.time', 'time', ([], {}), '()\n', (29272, 29274), False, 'from time import time\n'), ((30147, 30153), 'time.time', 'time', ([], {}), '()\n', (30151, 30153), False, 'from time import time\n'), ((30971, 30977), 'time.time', 'time', ([], {}), '()\n', (30975, 30977), False, 'from time import time\n'), ((31798, 31804), 'time.time', 'time', ([], {}), '()\n', (31802, 31804), False, 'from time import time\n'), ((41014, 41020), 'time.time', 'time', ([], {}), '()\n', (41018, 41020), False, 'from time import time\n'), ((41825, 41831), 'time.time', 'time', ([], {}), '()\n', (41829, 41831), False, 'from time import time\n'), ((42639, 42645), 'time.time', 'time', ([], {}), '()\n', (42643, 42645), False, 'from time import time\n'), ((43479, 43485), 'time.time', 'time', ([], {}), '()\n', (43483, 43485), False, 'from time import time\n'), ((44290, 44296), 'time.time', 'time', ([], {}), '()\n', (44294, 44296), False, 'from time import time\n'), ((45104, 45110), 'time.time', 'time', ([], {}), '()\n', (45108, 45110), False, 'from time import time\n'), ((45949, 45955), 'time.time', 'time', ([], {}), '()\n', (45953, 45955), False, 'from time import time\n'), ((46769, 46775), 'time.time', 'time', ([], {}), '()\n', (46773, 46775), False, 'from time import time\n'), ((47592, 47598), 'time.time', 'time', ([], {}), '()\n', (47596, 47598), False, 'from time import time\n'), ((48442, 48448), 'time.time', 'time', ([], {}), '()\n', (48446, 48448), False, 'from time import time\n'), ((49262, 49268), 'time.time', 'time', ([], {}), '()\n', (49266, 49268), False, 'from time import time\n'), ((50085, 50091), 'time.time', 'time', ([], {}), '()\n', (50089, 50091), False, 'from time import time\n'), ((50940, 50946), 'time.time', 'time', ([], {}), '()\n', (50944, 50946), False, 'from time import time\n'), ((51769, 51775), 'time.time', 'time', ([], {}), '()\n', (51773, 51775), False, 'from time import time\n'), ((52601, 52607), 'time.time', 'time', ([], {}), '()\n', (52605, 52607), False, 'from time import time\n'), ((53485, 53491), 'time.time', 'time', ([], {}), '()\n', (53489, 53491), False, 'from time import time\n'), ((54314, 54320), 'time.time', 'time', ([], {}), '()\n', (54318, 54320), False, 'from time import time\n'), ((55146, 55152), 'time.time', 'time', ([], {}), '()\n', (55150, 55152), False, 'from time import time\n')]
|
from django.shortcuts import render, redirect
from bs4 import BeautifulSoup
from django.views.generic import DetailView, FormView, CreateView
from news.models import Article, Comment
from django.db import IntegrityError
from django.db.models import Q
from .forms import AddComment
import requests
from urllib.request import urlopen, Request
from django.urls import reverse
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from operator import attrgetter
requests.packages.urllib3.disable_warnings()
def refresh(request):
foreign_policy_req = requests.get("https://foreignpolicy.com/category/latest/")
foreign_policy_soup = BeautifulSoup(foreign_policy_req.content, "html.parser")
foreign_policy = foreign_policy_soup.find_all('div', {'class': 'excerpt-content--list content-block'})
for headline in foreign_policy[::-1]:
new_article = Article()
new_article.title = headline.find_all('h3', {'class':'hed'})[0].text
new_article.url= headline.find_all('a', {'class':'hed-heading -excerpt'})[0]['href']
new_article.image_url = headline.find_all('img')[0]['data-src']
auth = headline.find_all('a', {'class':'author'})
if len(auth) != 0:
new_article.author = auth[0].text
else:
new_article.author = "FP"
new_article.site = "Foreign Policy"
new_article.site_url = "https://foreignpolicy.com"
try:
new_article.save() #checks for errors
except IntegrityError as e:
if 'UNIQUE constraint' in str(e.args): #a repeat article
pass
foreign_affairs_req = requests.get("https://www.foreignaffairs.com")
foreign_affairs_soup = BeautifulSoup(foreign_affairs_req.content, "html.parser")
foreign_affairs = foreign_affairs_soup.find_all('div', {'class' : 'magazine-list-item--image-link row'})
for headline in foreign_affairs[::-1]:
new_article = Article()
new_article.title = headline.find_all('h3', {'class':'article-card-title font-weight-bold ls-0 mb-0 f-sans'})[0].text
new_article.image_url = headline.find_all('img',{'class':'b-lazy b-lazy-ratio magazine-list-item--image d-none d-md-block'})[0]['data-src']
if len(new_article.image_url) > 199:
new_article.image_url = 'https://subscribe.foreignaffairs.com/FAF/pub_templates/faf/images/logo.png'
new_article.url = headline.find_all('a', {'class':'d-block flex-grow-1'})[0]['href']
new_article.author = headline.find_all('h4', {'class':'magazine-author font-italic ls-0 mb-0 f-serif'})[0].text
new_article.site = "Foreign Affairs"
new_article.site_url = "https://www.foreignaffairs.com"
try:
new_article.save()
except IntegrityError as e:
if 'UNIQUE constraint' in str(e.args):
pass
#they give a 403 error for other methods
china_power_req = Request("https://chinapower.csis.org/podcasts/", headers = {'User-Agent' : 'Mozilla/5.0'})
china_power_page = urlopen(china_power_req).read()
china_power_soup = BeautifulSoup(china_power_page, "html.parser")
china_power = china_power_soup.find_all('article')
for headline in china_power[::-1]:
#finding author
disc = headline.find_all('h2', {'class':'entry-title'})[0].text #description has the author's name
list_disc = disc.split() #find it in the text
record = False
list_auth = []
for name in list_disc:
if record:
list_auth.append(name) #add the name
if name == "with": #start at 'episode,'
record = True;
new_article = Article()
new_article.title = headline.find_all('h2', {'class':'entry-title'})[0].text
new_article.image_url = "https://megaphone.imgix.net/podcasts/722b9c2a-e6e1-11ea-a520-3349f6671499/image/uploads_2F1598366366917-v9rdxhpawhc-bee946f884ea9a141d33af2322074d0d_2F_ART_ChinaPower.jpg?ixlib=rails-2.1.2&w=400&h=400"
new_article.url = headline.find_all('a')[0]['href']
if len(list_auth) != 0:
new_article.author = " ".join(list_auth) + " & <NAME>"
else:
new_article.author = "<NAME>"
new_article.site = "China Power Podcasts"
new_article.site_url = "https://chinapower.csis.org/podcasts/"
try:
new_article.save()
except IntegrityError as e:
if 'UNIQUE constraint' in str(e.args):
pass
#for war on the rocks, each div class for the articles is different
warontherocks_req = Request("https://warontherocks.com/", headers = {'User-Agent' : 'Mozilla/5.0'})
warontherocks_page = urlopen(warontherocks_req).read()
warontherocks_soup = BeautifulSoup(warontherocks_page, "html.parser")
warontherocks = warontherocks_soup.find_all('div', {'class' : 'all-posts'})
#very nice and straight forward html from warontherocks
header_ = warontherocks[0].find_all('h3')
link_ = warontherocks[0].find_all('a')
img_ = warontherocks[0].find_all('img')
writer_ = warontherocks[0].find_all('h4')
for i in range(12,1,-1):
new_article = Article()
new_article.title = header_[i-1].text
new_article.image_url = img_[i-1]['src']
new_article.url = link_[2*i-1]['href']
new_article.author = writer_[i-1].text
new_article.site = "War on the Rocks"
new_article.site_url = "https://warontherocks.com"
try:
new_article.save()
except IntegrityError as e:
if 'UNIQUE constraint' in str(e.args):
pass
"""AP_FP_req = Request("https://apnews.com/hub/foreign-policy", headers = {'User-Agent' : 'Mozilla/5.0'})
AP_FP_page = urlopen(AP_FP_req).read()
AP_IL_req = Request("https://apnews.com/hub/international-relations", headers = {'User-Agent' : 'Mozilla/5.0'})
AP_IL_page = urlopen(AP_IL_req).read()
AP_FP_soup = BeautifulSoup(AP_FP_page, "html.parser")
AP_IL_soup = BeautifulSoup(AP_IL_page, "html.parser")
AP = AP_FP_soup.find_all('div', {'data-key': 'feed-card-wire-story-with-image'}) + AP_IL_soup.find_all('div', {'data-key': 'feed-card-wire-story-with-image'})
for headline in AP[::-1]:
new_article = Article()
new_article.title = headline.find_all('h1')[0].text
new_article.url= "https://apnews.com" + headline.find_all('a')[0]['href']
#img machine broke
img = headline.find_all('img', {'class': 'image-0-2-132'})
if len(img) == 0:
new_article.image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/0/0c/Associated_Press_logo_2012.svg/220px-Associated_Press_logo_2012.svg.png"
else:
new_article.image_url = img[0]['src']
list_auth = (headline.find_all('span')[0].text).split(" ")
if "GMT" in list_auth:
new_article.author = "AP"
else:
new_article.author = headline.find_all('span')[0].text
new_article.site = "Associated Press"
new_article.site_url = "https://apnews.com"
try:
new_article.save() #checks for errors
except IntegrityError as e:
if 'UNIQUE constraint' in str(e.args): #a repeat article
pass"""
#lowy institute
LI_req = Request("https://www.lowyinstitute.org/the-interpreter/archive", headers = {'User-Agent' : 'Mozilla/5.0'})
LI_page = urlopen(LI_req).read()
LI_soup = BeautifulSoup(LI_page, "html.parser")
LI = LI_soup.find_all('article')
for headline in LI[::-1]:
img = headline.find_all('div',{'class':'article-thumb'})[0]
if len(img) == 0:
img = headline.find_all('div',{'class':'article-thumb-wrap'})[0]
word = [] #getting the link into a list of chars
record = False
for letter in list(img['style']):
if record:
word.append(letter)
if letter == "'":
if record:
word.pop() #revmoving the ' at the end
break
record = True
new_article = Article()
new_article.title = headline.find_all('h2', {'class':'article-title txt-f4 txt-s6 mv-0 pv-xs'})[0].text
new_article.url= "https://www.lowyinstitute.org" + headline.find_all('a', {'class':'txt-dn'})[0]['href']
new_article.image_url = "".join(word)
new_article.author = headline.find_all('a', {'class':'txt-dn'})[1].text
new_article.site = "Lowy Institute"
new_article.site_url = "https://www.lowyinstitute.org/the-interpreter/archive"
try:
new_article.save()
except IntegrityError as e:
if 'UNIQUE constraint' in str(e.args):
pass
return redirect("../")
def getQuerySet(query = None): #for searching
queryset = []
queries = query.split(" ")
for q in queries:
posts = Article.objects.filter(Q(title__icontains = q)).distinct()
for post in posts:
queryset.append(post)
return list(set(queryset))
def home(request, *args, **kwargs):
query = ""
context = {}
if request.GET:
query = request.GET.get('q','')
context['query'] = str(query) #returns post relating to our search
articles = sorted(getQuerySet(query), key = attrgetter('time_added') , reverse = True) #gives it most recent order
page_num = request.GET.get('page',1)
pgntr = Paginator(articles, 10) #divides it into pages of 10 articles
#error checking
try:
articles = pgntr.page(page_num)
except EmptyPage:
articles = pgntr.page(pgntr.num_pages) #page doesn't exist so we go to page 1
except PageNotAnInteger:
articles = pgntr.page(1) #page not an int
context['articles'] = articles
return render(request,"home.html",context)
#viewing each article with its comments
class HomeDetailView(DetailView):
model = Article
template_name = 'detail_article.html'
class CommentView(CreateView):
model = Comment
template_name = 'add_comment.html'
form_class = AddComment
def form_valid(self,form):
#automatically have the post id
form.instance.post_id = self.kwargs['pk']
#automatically add username
form.instance.user = self.request.user
return super().form_valid(form)
def get_success_url(self):#goes back to page
return reverse('ArticleDetail', kwargs={'pk': self.kwargs['pk']})
def contact(request):
return render(request,"contact.html")
def about(request):
return render(request,"about.html")
|
[
"requests.packages.urllib3.disable_warnings",
"urllib.request.Request",
"news.models.Article",
"django.shortcuts.render",
"django.shortcuts.redirect",
"urllib.request.urlopen",
"django.db.models.Q",
"django.urls.reverse",
"operator.attrgetter",
"django.core.paginator.Paginator",
"requests.get",
"bs4.BeautifulSoup"
] |
[((537, 581), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ([], {}), '()\n', (579, 581), False, 'import requests\n'), ((626, 684), 'requests.get', 'requests.get', (['"""https://foreignpolicy.com/category/latest/"""'], {}), "('https://foreignpolicy.com/category/latest/')\n", (638, 684), False, 'import requests\n'), ((708, 764), 'bs4.BeautifulSoup', 'BeautifulSoup', (['foreign_policy_req.content', '"""html.parser"""'], {}), "(foreign_policy_req.content, 'html.parser')\n", (721, 764), False, 'from bs4 import BeautifulSoup\n'), ((1574, 1620), 'requests.get', 'requests.get', (['"""https://www.foreignaffairs.com"""'], {}), "('https://www.foreignaffairs.com')\n", (1586, 1620), False, 'import requests\n'), ((1645, 1702), 'bs4.BeautifulSoup', 'BeautifulSoup', (['foreign_affairs_req.content', '"""html.parser"""'], {}), "(foreign_affairs_req.content, 'html.parser')\n", (1658, 1702), False, 'from bs4 import BeautifulSoup\n'), ((2758, 2849), 'urllib.request.Request', 'Request', (['"""https://chinapower.csis.org/podcasts/"""'], {'headers': "{'User-Agent': 'Mozilla/5.0'}"}), "('https://chinapower.csis.org/podcasts/', headers={'User-Agent':\n 'Mozilla/5.0'})\n", (2765, 2849), False, 'from urllib.request import urlopen, Request\n'), ((2921, 2967), 'bs4.BeautifulSoup', 'BeautifulSoup', (['china_power_page', '"""html.parser"""'], {}), "(china_power_page, 'html.parser')\n", (2934, 2967), False, 'from bs4 import BeautifulSoup\n'), ((4232, 4308), 'urllib.request.Request', 'Request', (['"""https://warontherocks.com/"""'], {'headers': "{'User-Agent': 'Mozilla/5.0'}"}), "('https://warontherocks.com/', headers={'User-Agent': 'Mozilla/5.0'})\n", (4239, 4308), False, 'from urllib.request import urlopen, Request\n'), ((4390, 4438), 'bs4.BeautifulSoup', 'BeautifulSoup', (['warontherocks_page', '"""html.parser"""'], {}), "(warontherocks_page, 'html.parser')\n", (4403, 4438), False, 'from bs4 import BeautifulSoup\n'), ((6689, 6797), 'urllib.request.Request', 'Request', (['"""https://www.lowyinstitute.org/the-interpreter/archive"""'], {'headers': "{'User-Agent': 'Mozilla/5.0'}"}), "('https://www.lowyinstitute.org/the-interpreter/archive', headers={\n 'User-Agent': 'Mozilla/5.0'})\n", (6696, 6797), False, 'from urllib.request import urlopen, Request\n'), ((6841, 6878), 'bs4.BeautifulSoup', 'BeautifulSoup', (['LI_page', '"""html.parser"""'], {}), "(LI_page, 'html.parser')\n", (6854, 6878), False, 'from bs4 import BeautifulSoup\n'), ((7944, 7959), 'django.shortcuts.redirect', 'redirect', (['"""../"""'], {}), "('../')\n", (7952, 7959), False, 'from django.shortcuts import render, redirect\n'), ((8561, 8584), 'django.core.paginator.Paginator', 'Paginator', (['articles', '(10)'], {}), '(articles, 10)\n', (8570, 8584), False, 'from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n'), ((8892, 8929), 'django.shortcuts.render', 'render', (['request', '"""home.html"""', 'context'], {}), "(request, 'home.html', context)\n", (8898, 8929), False, 'from django.shortcuts import render, redirect\n'), ((9527, 9558), 'django.shortcuts.render', 'render', (['request', '"""contact.html"""'], {}), "(request, 'contact.html')\n", (9533, 9558), False, 'from django.shortcuts import render, redirect\n'), ((9587, 9616), 'django.shortcuts.render', 'render', (['request', '"""about.html"""'], {}), "(request, 'about.html')\n", (9593, 9616), False, 'from django.shortcuts import render, redirect\n'), ((924, 933), 'news.models.Article', 'Article', ([], {}), '()\n', (931, 933), False, 'from news.models import Article, Comment\n'), ((1865, 1874), 'news.models.Article', 'Article', ([], {}), '()\n', (1872, 1874), False, 'from news.models import Article, Comment\n'), ((3417, 3426), 'news.models.Article', 'Article', ([], {}), '()\n', (3424, 3426), False, 'from news.models import Article, Comment\n'), ((4784, 4793), 'news.models.Article', 'Article', ([], {}), '()\n', (4791, 4793), False, 'from news.models import Article, Comment\n'), ((7359, 7368), 'news.models.Article', 'Article', ([], {}), '()\n', (7366, 7368), False, 'from news.models import Article, Comment\n'), ((9437, 9495), 'django.urls.reverse', 'reverse', (['"""ArticleDetail"""'], {'kwargs': "{'pk': self.kwargs['pk']}"}), "('ArticleDetail', kwargs={'pk': self.kwargs['pk']})\n", (9444, 9495), False, 'from django.urls import reverse\n'), ((2869, 2893), 'urllib.request.urlopen', 'urlopen', (['china_power_req'], {}), '(china_power_req)\n', (2876, 2893), False, 'from urllib.request import urlopen, Request\n'), ((4334, 4360), 'urllib.request.urlopen', 'urlopen', (['warontherocks_req'], {}), '(warontherocks_req)\n', (4341, 4360), False, 'from urllib.request import urlopen, Request\n'), ((6807, 6822), 'urllib.request.urlopen', 'urlopen', (['LI_req'], {}), '(LI_req)\n', (6814, 6822), False, 'from urllib.request import urlopen, Request\n'), ((8442, 8466), 'operator.attrgetter', 'attrgetter', (['"""time_added"""'], {}), "('time_added')\n", (8452, 8466), False, 'from operator import attrgetter\n'), ((8102, 8123), 'django.db.models.Q', 'Q', ([], {'title__icontains': 'q'}), '(title__icontains=q)\n', (8103, 8123), False, 'from django.db.models import Q\n')]
|
import abc
from typing import Any, Dict, List, Optional, Sequence, Tuple, Type, Union
import gym
import numpy as np
import pymunk as pm
from gym import spaces
import xmagical.entities as en
import xmagical.render as r
from xmagical.phys_vars import PhysicsVariablesBase, PhysVar
from xmagical.style import ARENA_ZOOM_OUT, COLORS_RGB, lighten_rgb
class PhysicsVariables(PhysicsVariablesBase):
"""Default values & randomisation ranges for key physical parameters of the environment."""
robot_pos_joint_max_force = PhysVar(5, (3.2, 5.5))
robot_rot_joint_max_force = PhysVar(1, (0.7, 1.5))
robot_finger_max_force = PhysVar(4, (2.5, 4.5))
shape_trans_joint_max_force = PhysVar(1.5, (1.0, 1.8))
shape_rot_joint_max_force = PhysVar(0.1, (0.07, 0.15))
class BaseEnv(gym.Env, abc.ABC):
# Constants for all envs.
ROBOT_RAD = 0.2
ROBOT_MASS = 1.0
SHAPE_RAD = ROBOT_RAD * 0.6
SIZE = 1.1
ARENA_BOUNDS_LRBT = [-SIZE, SIZE, -SIZE, SIZE]
ARENA_SIZE_MAX = max(ARENA_BOUNDS_LRBT)
# Minimum and maximum size of goal regions used during randomisation.
RAND_GOAL_MIN_SIZE = 0.5
RAND_GOAL_MAX_SIZE = 0.8
RAND_GOAL_SIZE_RANGE = RAND_GOAL_MAX_SIZE - RAND_GOAL_MIN_SIZE
# The following are used to standardise what "jitter" means across different
# tasks.
JITTER_PCT = 0.05
JITTER_POS_BOUND = ARENA_SIZE_MAX * JITTER_PCT / 2.0
JITTER_ROT_BOUND = JITTER_PCT * np.pi
JITTER_TARGET_BOUND = JITTER_PCT * RAND_GOAL_SIZE_RANGE / 2
def __init__(
self,
*, # Subclasses can have additional args.
robot_cls: Type[en.embodiments.NonHolonomicEmbodiment],
res_hw: Tuple[int, int] = (256, 256),
fps: float = 20.0,
phys_steps: int = 10,
phys_iter: int = 10,
max_episode_steps: Optional[int] = None,
view_mode: str = "allo",
rand_dynamics: bool = False,
) -> None:
assert view_mode in [
"allo",
"ego",
], "view_mode must be one of ['allo', 'ego']."
self.robot_cls = robot_cls
self.action_dim = robot_cls.DOF
self.phys_iter = phys_iter
self.phys_steps = phys_steps
self.fps = fps
self.res_hw = res_hw
self.max_episode_steps = max_episode_steps
self.rand_dynamics = rand_dynamics
# State/rendering (see reset()).
self._entities = None
self._space = None
self._robot = None
self._episode_steps = None
self._phys_vars = None
self._renderer_func = (
self._use_allo_cam if view_mode == "allo" else self._use_ego_cam
)
# This is for rendering and displaying.
self.renderer = None
self.viewer = None
# Set observation and action spaces.
self.observation_space = spaces.Box(
low=0, high=255, shape=(*self.res_hw, 3), dtype=np.uint8
)
self.action_space = spaces.Box(
np.array([-1] * self.action_dim, dtype=np.float32),
np.array([+1] * self.action_dim, dtype=np.float32),
dtype=np.float32,
)
self.seed()
def seed(self, seed: Optional[int] = None) -> List[int]:
"""Initialise the PRNG and return seed necessary to reproduce results.
The action space should probably be seeded in a downstream RL
application.
"""
if seed is None:
seed = np.random.randint(0, (1 << 31) - 1)
self.rng = np.random.RandomState(seed=seed)
return [seed]
def _make_robot(
self,
init_pos: Union[np.ndarray, Tuple[float, float]],
init_angle: float,
) -> en.embodiments.NonHolonomicEmbodiment:
return self.robot_cls(
radius=self.ROBOT_RAD,
mass=self.ROBOT_MASS,
init_pos=init_pos,
init_angle=init_angle,
)
def _make_shape(self, **kwargs) -> en.Shape:
return en.Shape(shape_size=self.SHAPE_RAD, mass=0.01, **kwargs)
@abc.abstractmethod
def on_reset(self) -> None:
"""Set up entities necessary for this environment, and reset any other
data needed for the env. Must create a robot in addition to any
necessary entities.
"""
pass
def add_entities(self, entities: Sequence[en.Entity]) -> None:
"""Adds a list of entities to the current entities list and sets it up.
Only intended to be used from within on_reset(). Needs to be called for
every created entity or else they will not be added to the space!
"""
for entity in entities:
if isinstance(entity, self.robot_cls):
self._robot = entity
self._entities.append(entity)
entity.setup(self.renderer, self._space, self._phys_vars)
def _use_ego_cam(self) -> None:
"""Egocentric agent view."""
self.renderer.set_cam_follow(
source_xy_world=(
self._robot.body.position.x,
self._robot.body.position.y,
),
target_xy_01=(0.5, 0.15),
viewport_hw_world=(
self._arena_h * ARENA_ZOOM_OUT,
self._arena_w * ARENA_ZOOM_OUT,
),
rotation=self._robot.body.angle,
)
def _use_allo_cam(self) -> None:
"""Allocentric 'god-mode' view."""
self.renderer.set_bounds(
left=self._arena.left * ARENA_ZOOM_OUT,
right=self._arena.right * ARENA_ZOOM_OUT,
bottom=self._arena.bottom * ARENA_ZOOM_OUT,
top=self._arena.top * ARENA_ZOOM_OUT,
)
def reset(self):
self._episode_steps = 0
# Delete old entities/space.
self._entities = []
self._space = None
self._robot = None
self._phys_vars = None
if self.renderer is None:
res_h, res_w = self.res_hw
background_color = lighten_rgb(COLORS_RGB["grey"], times=4)
self.renderer = r.Viewer(res_w, res_h, background_color)
else:
# These will get added back later.
self.renderer.reset_geoms()
self._space = pm.Space()
self._space.collision_slop = 0.01
self._space.iterations = self.phys_iter
if self.rand_dynamics:
# Randomise the physics properties of objects and the robot a
# little bit.
self._phys_vars = PhysicsVariables.sample(self.rng)
else:
self._phys_vars = PhysicsVariables.defaults()
# Set up robot and arena.
arena_l, arena_r, arena_b, arena_t = self.ARENA_BOUNDS_LRBT
self._arena = en.ArenaBoundaries(
left=arena_l, right=arena_r, bottom=arena_b, top=arena_t
)
self._arena_w = arena_r - arena_l
self._arena_h = arena_t - arena_b
self.add_entities([self._arena])
reset_rv = self.on_reset()
assert reset_rv is None, (
f"on_reset method of {type(self)} returned {reset_rv}, but "
f"should return None"
)
assert isinstance(self._robot, self.robot_cls)
assert len(self._entities) >= 1
assert np.allclose(self._arena.left + self._arena.right, 0)
assert np.allclose(self._arena.bottom + self._arena.top, 0)
self._renderer_func()
return self.render(mode="rgb_array")
def _phys_steps_on_frame(self):
spf = 1 / self.fps
dt = spf / self.phys_steps
for i in range(self.phys_steps):
for ent in self._entities:
ent.update(dt)
self._space.step(dt)
@abc.abstractmethod
def score_on_end_of_traj(self) -> float:
"""Compute the score for this trajectory.
Only called at the last step of the trajectory.
Returns:
score: number in [0, 1] indicating the worst possible
performance (0), the best possible performance (1) or something
in between. Should apply to the WHOLE trajectory.
"""
pass # pytype: disable=bad-return-type
@abc.abstractclassmethod
def get_reward(self) -> float:
"""Compute the reward for the current timestep.
This is called at the end of every timestep.
"""
pass # pytype: disable=bad-return-type
def step(self, action) -> Tuple[np.ndarray, float, bool, Dict[str, Any]]:
self._robot.set_action(action)
self._phys_steps_on_frame()
self._episode_steps += 1
obs = self.render(mode="rgb_array")
reward = self.get_reward()
done = False
eval_score = 0.0
info = {}
if self.max_episode_steps is not None:
if self._episode_steps >= self.max_episode_steps:
info["TimeLimit.truncated"] = not done
done = True
if done:
eval_score = self.score_on_end_of_traj()
assert (
0 <= eval_score <= 1
), f"eval score {eval_score} out of range for env {self}"
info.update(eval_score=eval_score)
return obs, reward, done, info
def render(self, mode="human") -> Optional[np.ndarray]:
for ent in self._entities:
ent.pre_draw()
self._renderer_func()
obs = self.renderer.render()
if mode == "human":
from gym.envs.classic_control import rendering
if self.viewer is None:
self.viewer = rendering.SimpleImageViewer()
self.viewer.imshow(obs)
else:
return obs
def close(self) -> None:
if self.renderer:
self.renderer.close()
self.renderer = None
if self.viewer:
self.viewer.close()
self.viewer = None
|
[
"pymunk.Space",
"xmagical.entities.ArenaBoundaries",
"numpy.allclose",
"xmagical.phys_vars.PhysVar",
"xmagical.style.lighten_rgb",
"numpy.random.RandomState",
"numpy.random.randint",
"numpy.array",
"gym.spaces.Box",
"xmagical.render.Viewer",
"gym.envs.classic_control.rendering.SimpleImageViewer",
"xmagical.entities.Shape"
] |
[((525, 547), 'xmagical.phys_vars.PhysVar', 'PhysVar', (['(5)', '(3.2, 5.5)'], {}), '(5, (3.2, 5.5))\n', (532, 547), False, 'from xmagical.phys_vars import PhysicsVariablesBase, PhysVar\n'), ((580, 602), 'xmagical.phys_vars.PhysVar', 'PhysVar', (['(1)', '(0.7, 1.5)'], {}), '(1, (0.7, 1.5))\n', (587, 602), False, 'from xmagical.phys_vars import PhysicsVariablesBase, PhysVar\n'), ((632, 654), 'xmagical.phys_vars.PhysVar', 'PhysVar', (['(4)', '(2.5, 4.5)'], {}), '(4, (2.5, 4.5))\n', (639, 654), False, 'from xmagical.phys_vars import PhysicsVariablesBase, PhysVar\n'), ((689, 713), 'xmagical.phys_vars.PhysVar', 'PhysVar', (['(1.5)', '(1.0, 1.8)'], {}), '(1.5, (1.0, 1.8))\n', (696, 713), False, 'from xmagical.phys_vars import PhysicsVariablesBase, PhysVar\n'), ((746, 772), 'xmagical.phys_vars.PhysVar', 'PhysVar', (['(0.1)', '(0.07, 0.15)'], {}), '(0.1, (0.07, 0.15))\n', (753, 772), False, 'from xmagical.phys_vars import PhysicsVariablesBase, PhysVar\n'), ((2826, 2894), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(0)', 'high': '(255)', 'shape': '(*self.res_hw, 3)', 'dtype': 'np.uint8'}), '(low=0, high=255, shape=(*self.res_hw, 3), dtype=np.uint8)\n', (2836, 2894), False, 'from gym import spaces\n'), ((3490, 3522), 'numpy.random.RandomState', 'np.random.RandomState', ([], {'seed': 'seed'}), '(seed=seed)\n', (3511, 3522), True, 'import numpy as np\n'), ((3955, 4011), 'xmagical.entities.Shape', 'en.Shape', ([], {'shape_size': 'self.SHAPE_RAD', 'mass': '(0.01)'}), '(shape_size=self.SHAPE_RAD, mass=0.01, **kwargs)\n', (3963, 4011), True, 'import xmagical.entities as en\n'), ((6184, 6194), 'pymunk.Space', 'pm.Space', ([], {}), '()\n', (6192, 6194), True, 'import pymunk as pm\n'), ((6678, 6754), 'xmagical.entities.ArenaBoundaries', 'en.ArenaBoundaries', ([], {'left': 'arena_l', 'right': 'arena_r', 'bottom': 'arena_b', 'top': 'arena_t'}), '(left=arena_l, right=arena_r, bottom=arena_b, top=arena_t)\n', (6696, 6754), True, 'import xmagical.entities as en\n'), ((7201, 7253), 'numpy.allclose', 'np.allclose', (['(self._arena.left + self._arena.right)', '(0)'], {}), '(self._arena.left + self._arena.right, 0)\n', (7212, 7253), True, 'import numpy as np\n'), ((7269, 7321), 'numpy.allclose', 'np.allclose', (['(self._arena.bottom + self._arena.top)', '(0)'], {}), '(self._arena.bottom + self._arena.top, 0)\n', (7280, 7321), True, 'import numpy as np\n'), ((2969, 3019), 'numpy.array', 'np.array', (['([-1] * self.action_dim)'], {'dtype': 'np.float32'}), '([-1] * self.action_dim, dtype=np.float32)\n', (2977, 3019), True, 'import numpy as np\n'), ((3033, 3083), 'numpy.array', 'np.array', (['([+1] * self.action_dim)'], {'dtype': 'np.float32'}), '([+1] * self.action_dim, dtype=np.float32)\n', (3041, 3083), True, 'import numpy as np\n'), ((3435, 3470), 'numpy.random.randint', 'np.random.randint', (['(0)', '((1 << 31) - 1)'], {}), '(0, (1 << 31) - 1)\n', (3452, 3470), True, 'import numpy as np\n'), ((5950, 5990), 'xmagical.style.lighten_rgb', 'lighten_rgb', (["COLORS_RGB['grey']"], {'times': '(4)'}), "(COLORS_RGB['grey'], times=4)\n", (5961, 5990), False, 'from xmagical.style import ARENA_ZOOM_OUT, COLORS_RGB, lighten_rgb\n'), ((6019, 6059), 'xmagical.render.Viewer', 'r.Viewer', (['res_w', 'res_h', 'background_color'], {}), '(res_w, res_h, background_color)\n', (6027, 6059), True, 'import xmagical.render as r\n'), ((9491, 9520), 'gym.envs.classic_control.rendering.SimpleImageViewer', 'rendering.SimpleImageViewer', ([], {}), '()\n', (9518, 9520), False, 'from gym.envs.classic_control import rendering\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2019 Tampere University
# This software was developed as a part of the CityIoT project: https://www.cityiot.fi/english
# This source code is licensed under the 3-clause BSD license. See license.txt in the repository root directory.
# Author(s): <NAME> <<EMAIL>>
'''
Helper module for reading configuration files in the JSON format
'''
import json
import utils
def loadConfig( fileName ):
'''
Reads the named file from configuration directory and converts it to JSON.
The conversion result is returned.
'''
confFile = utils.getAppDir() / 'conf' / fileName
with open( confFile, 'r' ) as file:
return json.load( file )
|
[
"json.load",
"utils.getAppDir"
] |
[((668, 683), 'json.load', 'json.load', (['file'], {}), '(file)\n', (677, 683), False, 'import json\n'), ((575, 592), 'utils.getAppDir', 'utils.getAppDir', ([], {}), '()\n', (590, 592), False, 'import utils\n')]
|
from sklearn.preprocessing import PolynomialFeatures
from sklearn.pipeline import make_pipeline
from sklearn.linear_model import LinearRegression
from sklearn.pipeline import Pipeline
def fit_poly_reg(X, y, degree=1, memory_path=None) -> Pipeline:
polyreg = make_pipeline(PolynomialFeatures(degree), LinearRegression(), memory=memory_path)
polyreg.fit(X, y)
return polyreg
|
[
"sklearn.linear_model.LinearRegression",
"sklearn.preprocessing.PolynomialFeatures"
] |
[((278, 304), 'sklearn.preprocessing.PolynomialFeatures', 'PolynomialFeatures', (['degree'], {}), '(degree)\n', (296, 304), False, 'from sklearn.preprocessing import PolynomialFeatures\n'), ((306, 324), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (322, 324), False, 'from sklearn.linear_model import LinearRegression\n')]
|
#!/usr/bin/env python3
import bdsim
sim = bdsim.BDSim(animation=True) # create simulator
print(sim)
bd = sim.blockdiagram() # create an empty block diagram
# define the blocks
demand = bd.STEP(T=1, pos=(0,0), name='demand')
sum = bd.SUM('+-', pos=(1,0))
gain = bd.GAIN(10, pos=(1.5,0))
plant = bd.LTI_SISO(0.5, [2, 1], name='plant', pos=(3,0))
scope = bd.SCOPE(styles=['k', 'r--'], pos=(4,0))
# connect the blocks
bd.connect(demand, sum[0], scope[1])
bd.connect(plant, sum[1])
bd.connect(sum, gain)
bd.connect(gain, plant)
bd.connect(plant, scope[0])
bd.compile() # check the diagram
bd.report() # list all blocks and wires
sim.set_options(animation=True, graphics=True)
out = sim.run(bd, 5, watch=[plant,demand]) # simulate for 5s
sim.savefig(scope, 'scope0')
sim.done(block=False)
print(out)
|
[
"bdsim.BDSim"
] |
[((44, 71), 'bdsim.BDSim', 'bdsim.BDSim', ([], {'animation': '(True)'}), '(animation=True)\n', (55, 71), False, 'import bdsim\n')]
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Created on 2016年8月9日13:16:54
import datetime
import json
import os
import youkube.compoents.model as model
import youkube.compoents.youtube_compoent as youtube
import youkube.util as util
import time
import youkube.constants as constants
import youkube.compoents.youku_compoent as youkucom
logger = util.get_logger('Youkube')
"""
配置文件示例
user youtube要订阅的用户
video_dir 视频文件保存路径
thumbnail_dir 视频缩略图/封面图保存路径
sqlite3_file sqlite3数据库文件路
youku_client_id 优酷client id
youku_access_token 优酷access_token
{
"users": [
{"user":"greatscottlab", "channel_name": "GreateScoot", "youku_prefix": "GreateScoot - ", "desc": "模拟电路数字电路", "category": "科技"},
{"user":"DarduinMyMenlon", "channel_name": "Dota2 WTF", "youku_prefix" : "", "desc" : "Dota2 Wtf", "category": "游戏"}
{"user":"Larva2011ani", "channel_name": "Larva ", "youku_prefix" : "Larva - ", "desc" : "红虫黄虫", "category": "搞笑"}
],
"video_dir": "/root/video",
"thumbnail_dir": "/root/thumbnail",
"sqlite3_file": "/root/sqlite3.db",
"youku_client_id": "97c24e4be2c1383a",
"youku_access_token": "<KEY>"
}
"""
class Youkube(object):
def __init__(self, config_file_path):
with open(config_file_path) as file:
self.config = json.loads(file.read())
if not self.config:
raise YoukubeException("配置文件读取失败!")
self.repo = YoukubeRepo(self.config['sqlite3_file'])
self.youtube = youtube.YoutubeCompoentImpl()
self.youku = youkucom.Youku(self.config['youku_client_id'], self.config['youku_access_token'])
def run(self):
while True:
logger.info("[Youkube] - 检查并准备删除已上传成功的视频文件...")
self.del_uploaded_video_file()
logger.info("[Youkube] - 检查未完成上传的视频...")
self.retry_upload_task()
logger.info("[Youkube] - 抓取最新视频...")
self.fetch_new_videos()
logger.info(u"[Youkube] - 所有视频处理完成,等待1分钟重新获取新视频!")
time.sleep(60)
def fetch_new_videos(self):
for i in self.config['items']:
if i['type'] == 'user':
links = self.youtube.fetch_user_page_video_links(i['user'])
else:
links = self.youtube.fetch_channel_page_video_links(i['channel'])
self.fetch_new_video(self.rm_dup_link(links), i)
def rm_dup_link(self, links):
uniquelist = []
for i in links:
if (i not in uniquelist ) and (not self.repo.find_by_url(i)):
uniquelist.append(i)
return uniquelist
def fetch_new_video(self, uniquelist, use_info):
"""
{
"user":"greatscottlab",
"channel_name": "GreateScoot",
"youku_prefix": "GreateScoot - ",
"desc": "模拟电路数字电路",
"category": "科技"
}
"""
for link in uniquelist:
# 视频基本信息的字典数据,信息由youtube-dl 提供
info_dict = self.youtube.fetch_video_base_info(link)
# 将视频保存到数据库
try:
video_entity = self.__save_new_video_info_to_db__(info_dict, use_info)
except Exception as e:
logger.error(u"保存失败! reason :" + e.__str__())
continue
logger.debug(u"发现新视频 %s 时长 %s " % (video_entity.title, video_entity.duration))
logger.info(u"视频 %s 下载任务创建成功,正在下载!" % video_entity.title)
self.repo.chg_status(video_entity, constants.VIDEO_STATUS_DOWNLOADING)
self.youtube.download(link, self.config['video_dir'], video_entity.ext, info_dict['url'])
logger.info(u"视频 %s 下载成功,准备上传!" % video_entity.title)
video_entity.filesize = os.path.getsize(
"%s%s.%s" % (self.config['video_dir'], util.md5encode(video_entity.url), video_entity.ext))
self.repo.save(video_entity)
self.repo.chg_status(video_entity, constants.VIDEO_STATUS_DOWNLOADED)
self.retry_upload_task()
self.del_uploaded_video_file()
def retry_upload_task(self):
need_upload_video = self.repo.find_need_upload_video()
for n in need_upload_video:
n.filesize = os.path.getsize(
"%s%s.%s" % (self.config['video_dir'], util.md5encode(n.url), n.ext))
self.repo.save(n)
logger.info(u"[Youkube] - 视频 %s 开始上传!" % n.title)
self.repo.chg_status(n, constants.VIDEO_STATUS_UPLOADING)
try:
self.youku.upload(
"%s%s.%s" % (self.config['video_dir'], util.md5encode(n.url), n.ext),
n.youku_prefix + n.title, "", n.desc, n.category)
except Exception as e:
logger.warn(u"[Youkube] - 视频上传失败! : " + e.__str__())
continue
logger.info(u"[Youkube] - 视频 %s 上传完成!" % n.title)
self.repo.chg_status(n, constants.VIDEO_STATUS_UPLOADED)
self.del_uploaded_video_file()
def del_uploaded_video_file(self):
uploaded_videps = self.repo.find_uploaded_video()
for v in uploaded_videps:
file_paht = self.config['video_dir'] + '/' + v.url_hash + '.' + v.ext
is_exist = os.path.exists(file_paht)
if is_exist:
logger.info(u"[Youkube] - 视频 %s 已上传成功 ! 视频文件 %s 准备删除!" % (v.title, file_paht))
os.remove(file_paht)
logger.info(u"[Youkube] - 视频 %s 视频文件 %s 删除成功!" % (v.title, file_paht))
def __save_new_video_info_to_db__(self, info_dict, user_info):
"""
{
"user":"greatscottlab",
"channel_name": "GreateScoot",
"youku_prefix": "GreateScoot - ",
"desc": "模拟电路数字电路"},
"""
date_time_format = '%Y%m%d'
video = model.Video()
video.url = info_dict['webpage_url']
video.url_hash = util.md5encode(video.url)
video.uploader = info_dict['uploader']
video.title = info_dict['title']
video.like_count = info_dict['like_count']
video.dislike_count = info_dict['dislike_count']
video.duration = info_dict['duration']
video.format_note = info_dict['format_note']
video.height = info_dict['height']
video.width = info_dict['width']
video.resolution = info_dict['resolution']
video.view_count = info_dict['view_count']
video.video_id = info_dict['id']
video.format = info_dict['format']
video.filesize = 0 # info_dict['filesize']
video.ext = info_dict['ext']
video.thumbnail = info_dict['thumbnail']
try:
video.upload_date = datetime.datetime.strptime(info_dict['upload_date'], date_time_format)
except Exception:
video.upload_date = datetime.datetime.now()
video.create_time = datetime.datetime.now()
video.update_time = datetime.datetime.now()
try:
video.user = user_info['user']
except Exception:
video.user = user_info['channel']
video.channel_name = user_info['channel_name']
video.youku_prefix = user_info['youku_prefix']
video.desc = user_info['desc']
video.category = user_info['category']
self.repo.save(video)
return video
class YoukubeRepo(object):
"""数据库访问类
包括了视频信息,任务信息等等
Attributes:
sqlite3_file (str): 数据库文件位置
"""
def __init__(self, sqlite3_file):
if not sqlite3_file:
raise YoukubeRepoException("参数 sqlite3_file 不能为空!")
model.deferred_db.init(sqlite3_file)
try:
model.deferred_db.connect()
except Exception as e:
raise YoukubeRepoException("数据库连接失败: " + e.message)
if not model.Video.table_exists():
model.Video.create_table()
def save(self, video):
"""将新发布的视频信息保存到数据库
Args:
video (model.Video): 视频实体
"""
video.save()
def update(self, video):
"""将新发布的视频信息保存到数据库
Args:
video (model.Video): 视频实体
"""
video.update()
def find_by_url_hash(self, url_hash):
try:
model.Video.get(model.Video.url_hash == url_hash)
except:
return None
def find_by_url(self, url):
try:
return model.Video.get(model.Video.url == url)
except:
return None
def chg_status(self, video_entity, status):
video_entity.status = status
video_entity.update_time = datetime.datetime.now()
video_entity.save()
def find_need_upload_video(self):
return model.Video.select().where(model.Video.status >= 3 and model.Video.status <= 5)
def find_uploaded_video(self):
return model.Video.select().where(model.Video.status == 6)
class YoukubeRepoException(Exception):
def __init__(self, msg):
self.message = msg
def __str__(self):
return self.message
class YoukubeException(Exception):
def __init__(self, msg):
self.message = msg
def __str__(self):
return self.message
|
[
"os.remove",
"youkube.compoents.model.Video.select",
"youkube.compoents.model.deferred_db.init",
"os.path.exists",
"youkube.compoents.model.Video.create_table",
"youkube.compoents.youku_compoent.Youku",
"time.sleep",
"youkube.compoents.model.Video.get",
"youkube.util.get_logger",
"datetime.datetime.strptime",
"youkube.util.md5encode",
"youkube.compoents.model.Video",
"youkube.compoents.youtube_compoent.YoutubeCompoentImpl",
"youkube.compoents.model.Video.table_exists",
"datetime.datetime.now",
"youkube.compoents.model.deferred_db.connect"
] |
[((351, 377), 'youkube.util.get_logger', 'util.get_logger', (['"""Youkube"""'], {}), "('Youkube')\n", (366, 377), True, 'import youkube.util as util\n'), ((1477, 1506), 'youkube.compoents.youtube_compoent.YoutubeCompoentImpl', 'youtube.YoutubeCompoentImpl', ([], {}), '()\n', (1504, 1506), True, 'import youkube.compoents.youtube_compoent as youtube\n'), ((1528, 1614), 'youkube.compoents.youku_compoent.Youku', 'youkucom.Youku', (["self.config['youku_client_id']", "self.config['youku_access_token']"], {}), "(self.config['youku_client_id'], self.config[\n 'youku_access_token'])\n", (1542, 1614), True, 'import youkube.compoents.youku_compoent as youkucom\n'), ((5798, 5811), 'youkube.compoents.model.Video', 'model.Video', ([], {}), '()\n', (5809, 5811), True, 'import youkube.compoents.model as model\n'), ((5883, 5908), 'youkube.util.md5encode', 'util.md5encode', (['video.url'], {}), '(video.url)\n', (5897, 5908), True, 'import youkube.util as util\n'), ((6840, 6863), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6861, 6863), False, 'import datetime\n'), ((6892, 6915), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6913, 6915), False, 'import datetime\n'), ((7560, 7596), 'youkube.compoents.model.deferred_db.init', 'model.deferred_db.init', (['sqlite3_file'], {}), '(sqlite3_file)\n', (7582, 7596), True, 'import youkube.compoents.model as model\n'), ((8539, 8562), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (8560, 8562), False, 'import datetime\n'), ((2007, 2021), 'time.sleep', 'time.sleep', (['(60)'], {}), '(60)\n', (2017, 2021), False, 'import time\n'), ((5218, 5243), 'os.path.exists', 'os.path.exists', (['file_paht'], {}), '(file_paht)\n', (5232, 5243), False, 'import os\n'), ((6657, 6727), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["info_dict['upload_date']", 'date_time_format'], {}), "(info_dict['upload_date'], date_time_format)\n", (6683, 6727), False, 'import datetime\n'), ((7623, 7650), 'youkube.compoents.model.deferred_db.connect', 'model.deferred_db.connect', ([], {}), '()\n', (7648, 7650), True, 'import youkube.compoents.model as model\n'), ((7762, 7788), 'youkube.compoents.model.Video.table_exists', 'model.Video.table_exists', ([], {}), '()\n', (7786, 7788), True, 'import youkube.compoents.model as model\n'), ((7802, 7828), 'youkube.compoents.model.Video.create_table', 'model.Video.create_table', ([], {}), '()\n', (7826, 7828), True, 'import youkube.compoents.model as model\n'), ((8183, 8232), 'youkube.compoents.model.Video.get', 'model.Video.get', (['(model.Video.url_hash == url_hash)'], {}), '(model.Video.url_hash == url_hash)\n', (8198, 8232), True, 'import youkube.compoents.model as model\n'), ((8338, 8377), 'youkube.compoents.model.Video.get', 'model.Video.get', (['(model.Video.url == url)'], {}), '(model.Video.url == url)\n', (8353, 8377), True, 'import youkube.compoents.model as model\n'), ((5381, 5401), 'os.remove', 'os.remove', (['file_paht'], {}), '(file_paht)\n', (5390, 5401), False, 'import os\n'), ((6786, 6809), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6807, 6809), False, 'import datetime\n'), ((8645, 8665), 'youkube.compoents.model.Video.select', 'model.Video.select', ([], {}), '()\n', (8663, 8665), True, 'import youkube.compoents.model as model\n'), ((8776, 8796), 'youkube.compoents.model.Video.select', 'model.Video.select', ([], {}), '()\n', (8794, 8796), True, 'import youkube.compoents.model as model\n'), ((3780, 3812), 'youkube.util.md5encode', 'util.md5encode', (['video_entity.url'], {}), '(video_entity.url)\n', (3794, 3812), True, 'import youkube.util as util\n'), ((4269, 4290), 'youkube.util.md5encode', 'util.md5encode', (['n.url'], {}), '(n.url)\n', (4283, 4290), True, 'import youkube.util as util\n'), ((4575, 4596), 'youkube.util.md5encode', 'util.md5encode', (['n.url'], {}), '(n.url)\n', (4589, 4596), True, 'import youkube.util as util\n')]
|
import json
class ROIUpdateRegions:
roi_region_list = list()
def __init__(self):
self.roi_region_list.clear()
def add_roi_region(self, id, ltx, lty, rbx, rby):
testNestedDict = {
"id": id,
"region": {
"lt": {
"x": ltx,
"y": lty
},
"rb": {
"x": rbx,
"y": rby
}
}
}
self.roi_region_list.append(testNestedDict)
def print_roi_regions(self):
print(json.dumps(self.roi_region_list))
###############################################################################
# sample codes
###############################################################################
if __name__ == '__main__':
rur = ROIUpdateRegions()
rur.add_roi_region('abc', 100, 200, 500, 600)
rur.add_roi_region('uuu', 300, 500, 600, 900)
rur.print_roi_regions()
|
[
"json.dumps"
] |
[((585, 617), 'json.dumps', 'json.dumps', (['self.roi_region_list'], {}), '(self.roi_region_list)\n', (595, 617), False, 'import json\n')]
|
#!/usr/bin/env python
from __future__ import unicode_literals
from __future__ import print_function
import falcon
import spacy
import json
import sys
from spacy.pipeline import EntityRecognizer
import spacy.util
from spacy.tagger import Tagger
from .parse import Entities, TrainEntities
from falcon_cors import CORS
try:
unicode
except NameError:
unicode = str
_models = {}
def get_model(model_name):
if model_name not in _models:
model = spacy.load(model_name)
if model.tagger is None:
model.tagger = Tagger(model.vocab, features=Tagger.feature_templates)
if model.entity is None:
model.entity = EntityRecognizer(model.vocab, entity_types=['PERSON', 'NORP', 'FACILITY', 'ORG', 'GPE',
'LOC', 'PRODUCT', 'EVENT', 'WORK_OF_ART',
'LANGUAGE', 'DATE', 'TIME', 'PERCENT',
'MONEY', 'QUANTITY', 'ORDINAL', 'CARDINAL'])
model.pipeline = [model.tagger, model.entity, model.parser]
_models[model_name] = model
return _models[model_name]
def update_vocabulary(model, texts):
for text in texts:
doc = model.make_doc(text)
for word in doc:
_ = model.vocab[word.orth]
class EntResource(object):
"""Parse text and return displaCy ent's expected output."""
def on_post(self, req, resp):
req_body = req.stream.read()
json_data = json.loads(req_body.decode('utf8'))
paragraphs = json_data.get('paragraphs')
model_name = json_data.get('model', 'en')
try:
model = get_model(model_name)
entities = []
for p in paragraphs:
e = Entities(model, p.get('text'))
entities.append(e.to_json())
resp.body = json.dumps(entities, sort_keys=True, indent=2)
resp.content_type = 'application/json'
resp.status = falcon.HTTP_200
except Exception:
resp.status = falcon.HTTP_500
class TrainEntResource(object):
"""Parse text and use it to train the entity recognizer."""
def on_post(self, req, resp):
req_body = req.stream.read()
json_data = json.loads(req_body.decode('utf8'))
paragraphs = json_data.get('paragraphs')
model_name = json_data.get('model', 'en')
try:
model = get_model(model_name)
texts = [paragraph.get('text') for paragraph in paragraphs]
update_vocabulary(model, texts)
entities = []
for p in paragraphs:
e = TrainEntities(model, p.get('text'), p.get('tags'))
entities.append(e.to_json())
resp.body = json.dumps(entities, sort_keys=True, indent=2)
resp.content_type = 'application/json'
resp.status = falcon.HTTP_200
except Exception:
print("Unexpected error:", sys.exc_info()[0])
resp.status = falcon.HTTP_500
cors = CORS(allow_all_origins=True)
APP = falcon.API(middleware=[cors.middleware])
APP.add_route('/ent', EntResource())
APP.add_route('/train', TrainEntResource())
|
[
"spacy.tagger.Tagger",
"spacy.pipeline.EntityRecognizer",
"json.dumps",
"falcon_cors.CORS",
"spacy.load",
"falcon.API",
"sys.exc_info"
] |
[((3117, 3145), 'falcon_cors.CORS', 'CORS', ([], {'allow_all_origins': '(True)'}), '(allow_all_origins=True)\n', (3121, 3145), False, 'from falcon_cors import CORS\n'), ((3152, 3192), 'falcon.API', 'falcon.API', ([], {'middleware': '[cors.middleware]'}), '(middleware=[cors.middleware])\n', (3162, 3192), False, 'import falcon\n'), ((467, 489), 'spacy.load', 'spacy.load', (['model_name'], {}), '(model_name)\n', (477, 489), False, 'import spacy\n'), ((550, 604), 'spacy.tagger.Tagger', 'Tagger', (['model.vocab'], {'features': 'Tagger.feature_templates'}), '(model.vocab, features=Tagger.feature_templates)\n', (556, 604), False, 'from spacy.tagger import Tagger\n'), ((665, 886), 'spacy.pipeline.EntityRecognizer', 'EntityRecognizer', (['model.vocab'], {'entity_types': "['PERSON', 'NORP', 'FACILITY', 'ORG', 'GPE', 'LOC', 'PRODUCT', 'EVENT',\n 'WORK_OF_ART', 'LANGUAGE', 'DATE', 'TIME', 'PERCENT', 'MONEY',\n 'QUANTITY', 'ORDINAL', 'CARDINAL']"}), "(model.vocab, entity_types=['PERSON', 'NORP', 'FACILITY',\n 'ORG', 'GPE', 'LOC', 'PRODUCT', 'EVENT', 'WORK_OF_ART', 'LANGUAGE',\n 'DATE', 'TIME', 'PERCENT', 'MONEY', 'QUANTITY', 'ORDINAL', 'CARDINAL'])\n", (681, 886), False, 'from spacy.pipeline import EntityRecognizer\n'), ((1941, 1987), 'json.dumps', 'json.dumps', (['entities'], {'sort_keys': '(True)', 'indent': '(2)'}), '(entities, sort_keys=True, indent=2)\n', (1951, 1987), False, 'import json\n'), ((2843, 2889), 'json.dumps', 'json.dumps', (['entities'], {'sort_keys': '(True)', 'indent': '(2)'}), '(entities, sort_keys=True, indent=2)\n', (2853, 2889), False, 'import json\n'), ((3048, 3062), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3060, 3062), False, 'import sys\n')]
|
from mmcv.utils import Registry
OPTIMIZERS = Registry('optimizers')
|
[
"mmcv.utils.Registry"
] |
[((46, 68), 'mmcv.utils.Registry', 'Registry', (['"""optimizers"""'], {}), "('optimizers')\n", (54, 68), False, 'from mmcv.utils import Registry\n')]
|
from lumada.client.api.gateway_client_base import GatewayClientBase
from lumada.utils.validator import Validator
from lumada.client.lumada_client import LumadaClient
from lumada.client.asset_registration_client import AssetRegistrationClient
from lumada.client.asset_client import AssetClient
class GatewayClient(GatewayClientBase):
def __init__(self, gateway_client_config):
self._lumada_client = LumadaClient(Validator.validate_config_provided(gateway_client_config, 'AssetClientConfig'))
self._gateway_id = gateway_client_config.get_credentials().get_entity_id()
self._gateway_value = gateway_client_config.get_credentials().get_entity_value()
self._registration_client = AssetRegistrationClient(gateway_id=self._gateway_id,
gateway_value=self._gateway_value,
asset_registration_endpoint=gateway_client_config.get_registration_endpoint())
def register_asset_behind_gateway(self, asset_name, gateway_id, tags):
"""
Registers an asset behind a gateway
:param asset_name: Name of asset to register
:param gateway_id: ID of the gateway to register the client
:param tags: tags/params to be encoded on the url
:return: asset client
"""
Validator.validate_param(asset_name, 'AssetName')
Validator.validate_param(gateway_id, 'GatewayId')
asset_id = self._registration_client.register_asset(asset_name=asset_name, gateway_id=gateway_id, properties=tags)
asset_client = AssetClient.from_gateway(asset_id=asset_id, gateway_id=self._gateway_id, client=self._lumada_client)
return asset_client
def create_asset_client(self, asset_id):
"""
Create new asset client that communicates with lumada via the gateway
:param asset_id: ID of the asset to create
:return: Asset Client
"""
Validator.validate_param(asset_id, 'AssetId')
self._registration_client.verify_asset(asset_id=asset_id)
asset_client = AssetClient.from_gateway(asset_id=asset_id, gateway_id=self._gateway_id, client=self._lumada_client)
return asset_client
def close(self):
"""
Disconnects from given communication channel
"""
self._lumada_client.disconnect()
|
[
"lumada.client.asset_client.AssetClient.from_gateway",
"lumada.utils.validator.Validator.validate_config_provided",
"lumada.utils.validator.Validator.validate_param"
] |
[((1362, 1411), 'lumada.utils.validator.Validator.validate_param', 'Validator.validate_param', (['asset_name', '"""AssetName"""'], {}), "(asset_name, 'AssetName')\n", (1386, 1411), False, 'from lumada.utils.validator import Validator\n'), ((1420, 1469), 'lumada.utils.validator.Validator.validate_param', 'Validator.validate_param', (['gateway_id', '"""GatewayId"""'], {}), "(gateway_id, 'GatewayId')\n", (1444, 1469), False, 'from lumada.utils.validator import Validator\n'), ((1618, 1722), 'lumada.client.asset_client.AssetClient.from_gateway', 'AssetClient.from_gateway', ([], {'asset_id': 'asset_id', 'gateway_id': 'self._gateway_id', 'client': 'self._lumada_client'}), '(asset_id=asset_id, gateway_id=self._gateway_id,\n client=self._lumada_client)\n', (1642, 1722), False, 'from lumada.client.asset_client import AssetClient\n'), ((1985, 2030), 'lumada.utils.validator.Validator.validate_param', 'Validator.validate_param', (['asset_id', '"""AssetId"""'], {}), "(asset_id, 'AssetId')\n", (2009, 2030), False, 'from lumada.utils.validator import Validator\n'), ((2121, 2225), 'lumada.client.asset_client.AssetClient.from_gateway', 'AssetClient.from_gateway', ([], {'asset_id': 'asset_id', 'gateway_id': 'self._gateway_id', 'client': 'self._lumada_client'}), '(asset_id=asset_id, gateway_id=self._gateway_id,\n client=self._lumada_client)\n', (2145, 2225), False, 'from lumada.client.asset_client import AssetClient\n'), ((426, 504), 'lumada.utils.validator.Validator.validate_config_provided', 'Validator.validate_config_provided', (['gateway_client_config', '"""AssetClientConfig"""'], {}), "(gateway_client_config, 'AssetClientConfig')\n", (460, 504), False, 'from lumada.utils.validator import Validator\n')]
|
import unittest
import numpy as np
import cddm.core as core
from cddm.conf import FDTYPE, CDTYPE
from cddm.video import fromarrays
#test arrays
a = [1.,2,3,4]
b = [5,6,7,8]
t1 = [1,3,7,8]
t2 = [2,4,6,8]
#results fo calculations
cross_a_b = np.array([ 70., 100., 62., 28.],FDTYPE)
cross_a_b_t1_t2 = np.array([32., 72., 28., 38., 24., 38., 20., 8.],FDTYPE)
auto_a = np.array([30., 20., 11., 4.], FDTYPE)
auto_a_t1 = np.array([30., 12., 2., 0., 6., 8., 3., 4.],FDTYPE)
auto_sum_a = np.array([10. , 7.5, 5. , 2.5], FDTYPE)
auto_sum_a_t1 = np.array([10. , 3.5, 1.5, 0. , 2.5, 3. , 2. , 2.5],FDTYPE)
cross_sum_a = np.array([10., 15., 10., 5.], FDTYPE)
cross_sum_a_t1_t2 = np.array([ 4., 11., 4., 6., 4., 6., 4., 1.],FDTYPE)
cross_count_10 = np.array([10, 18, 16, 14, 12, 10, 8, 6, 4, 2],FDTYPE)
cross_count_t1_t2 = np.array([1, 5, 1, 3, 1, 3, 1, 1],FDTYPE)
auto_count_10 = np.array([10, 9, 8, 7, 6, 5, 4, 3, 2, 1],FDTYPE)
auto_count_t1 = np.array([4, 1, 1, 0, 1, 1, 1, 1],FDTYPE)
np.random.seed(0)
a2 = [a,a]
b2 = [b,b]
test_data1 = np.random.randn(32,19,8) + np.random.randn(32,19,8)*1j
test_data2 = np.random.randn(32,19,8) + np.random.randn(32,19,8)*1j
test_data1 = np.array(test_data1, CDTYPE)
test_data2 = np.array(test_data2, CDTYPE)
test_mask = np.ones((19,8),bool)
test_mask[0] = False
test_mask[:,0::3] = False
class TestCorrelateDifference(unittest.TestCase):
def setUp(self):
pass
def test_auto_correlate_fft(self):
out = core.auto_correlate_fft(a)
self.assertTrue(np.allclose(out,auto_a))
out = core.auto_correlate_fft(a,t1)
self.assertTrue(np.allclose(out,auto_a_t1, atol = 1e-6))
out = core.auto_correlate_fft(a,t1, aout = out)
self.assertTrue(np.allclose(out,auto_a_t1*2,atol = 1e-6))
def test_auto_correlate_fft2(self):
out = core.auto_correlate_fft(a2,axis = -1)
self.assertTrue(np.allclose(out[0],auto_a))
out = core.auto_correlate_fft(a2,t1,axis = -1)
self.assertTrue(np.allclose(out[0],auto_a_t1, atol = 1e-6))
out = core.auto_correlate_fft(a2,t1, axis = -1, aout = out)
self.assertTrue(np.allclose(out[0],auto_a_t1*2, atol = 1e-6))
def test_auto_correlate_fft_n(self):
out = core.auto_correlate_fft(a, n = 3)
self.assertTrue(np.allclose(out,auto_a[0:3]))
out = core.auto_correlate_fft(a,t1,n = 3)
self.assertTrue(np.allclose(out,auto_a_t1[0:3]))
out = core.auto_correlate_fft(a,t1,n = 3, aout = out)
self.assertTrue(np.allclose(out,auto_a_t1[0:3]*2))
def test_auto_correlate_fft_n2(self):
out = core.auto_correlate_fft(a2, axis = -1, n = 3)
self.assertTrue(np.allclose(out[0],auto_a[0:3]))
out = core.auto_correlate_fft(a2,t1,n = 3, axis = -1)
self.assertTrue(np.allclose(out[0],auto_a_t1[0:3]))
out = core.auto_correlate_fft(a2,t1,n = 3, axis = -1, aout = out)
self.assertTrue(np.allclose(out[0],auto_a_t1[0:3]*2))
def test_auto_correlate(self):
out = core.auto_correlate(a)
self.assertTrue(np.allclose(out,auto_a))
out = core.auto_correlate(a,t1)
self.assertTrue(np.allclose(out,auto_a_t1))
out = core.auto_correlate(a,t1, aout = out)
self.assertTrue(np.allclose(out,auto_a_t1*2))
def test_auto_correlate2(self):
out = core.auto_correlate(a2, axis = -1)
self.assertTrue(np.allclose(out[0],auto_a))
out = core.auto_correlate(a2,t1, axis = -1)
self.assertTrue(np.allclose(out[0],auto_a_t1))
out = core.auto_correlate(a2,t1, axis = -1, aout = out)
self.assertTrue(np.allclose(out[0],auto_a_t1*2))
def test_auto_correlate_n(self):
out = core.auto_correlate(a, n = 3)
self.assertTrue(np.allclose(out,auto_a[0:3]))
out = core.auto_correlate(a,t1,n = 3)
self.assertTrue(np.allclose(out,auto_a_t1[0:3]))
out = core.auto_correlate(a,t1,n = 3, aout = out)
self.assertTrue(np.allclose(out,auto_a_t1[0:3]*2))
def test_auto_correlate_n2(self):
out = core.auto_correlate(a2, n = 3,axis = -1)
self.assertTrue(np.allclose(out[0],auto_a[0:3]))
out = core.auto_correlate(a2,t1,n = 3, axis = -1)
self.assertTrue(np.allclose(out[0],auto_a_t1[0:3]))
out = core.auto_correlate(a2,t1,n = 3, aout = out, axis = 1)
self.assertTrue(np.allclose(out[0],auto_a_t1[0:3]*2))
def test_cross_correlate_fft(self):
out = core.cross_correlate_fft(a,b)
self.assertTrue(np.allclose(out,cross_a_b))
out = core.cross_correlate_fft(a,b,t1,t2)
self.assertTrue(np.allclose(out,cross_a_b_t1_t2))
out = core.cross_correlate_fft(a,b,t1,t2, aout = out)
self.assertTrue(np.allclose(out,cross_a_b_t1_t2*2))
def test_cross_correlate_fft2(self):
out = core.cross_correlate_fft(a2,b2,axis = 1)
self.assertTrue(np.allclose(out[0],cross_a_b))
out = core.cross_correlate_fft(a2,b2,t1,t2,axis = 1)
self.assertTrue(np.allclose(out[0],cross_a_b_t1_t2))
out = core.cross_correlate_fft(a2,b2,t1,t2, aout = out,axis = -1)
self.assertTrue(np.allclose(out[0],cross_a_b_t1_t2*2))
def test_cross_correlate_fft_n(self):
out = core.cross_correlate_fft(a,b, n = 3)
self.assertTrue(np.allclose(out,cross_a_b[:3]))
out = core.cross_correlate_fft(a,b,t1,t2, n = 3)
self.assertTrue(np.allclose(out,cross_a_b_t1_t2[:3]))
out = core.cross_correlate_fft(a,b,t1,t2, n = 3, aout = out)
self.assertTrue(np.allclose(out,cross_a_b_t1_t2[:3]*2))
def test_cross_correlate_fft_n2(self):
out = core.cross_correlate_fft(a2,b2, n = 3 ,axis = -1)
self.assertTrue(np.allclose(out[0],cross_a_b[:3]))
out = core.cross_correlate_fft(a2,b2,t1,t2, n = 3, axis = -1)
self.assertTrue(np.allclose(out[0],cross_a_b_t1_t2[:3]))
out = core.cross_correlate_fft(a2,b2,t1,t2, n = 3, aout = out, axis = -1)
self.assertTrue(np.allclose(out[0],cross_a_b_t1_t2[:3]*2))
def test_cross_correlate(self):
out = core.cross_correlate(a,b)
self.assertTrue(np.allclose(out,cross_a_b))
out = core.cross_correlate(a,b,t1,t2)
self.assertTrue(np.allclose(out,cross_a_b_t1_t2))
out = core.cross_correlate(a,b,t1,t2, aout = out)
self.assertTrue(np.allclose(out,cross_a_b_t1_t2*2))
def test_cross_correlate2(self):
out = core.cross_correlate(a2,b2,axis = -1)
self.assertTrue(np.allclose(out[0],cross_a_b))
out = core.cross_correlate(a2,b2,t1,t2,axis = -1)
self.assertTrue(np.allclose(out[0],cross_a_b_t1_t2))
out = core.cross_correlate(a2,b2,t1,t2, aout = out,axis = -1)
self.assertTrue(np.allclose(out[0],cross_a_b_t1_t2*2))
def test_cross_correlate_n(self):
out = core.cross_correlate(a,b, n = 3)
self.assertTrue(np.allclose(out,cross_a_b[:3]))
out = core.cross_correlate(a,b,t1,t2, n = 3)
self.assertTrue(np.allclose(out,cross_a_b_t1_t2[:3]))
out = core.cross_correlate(a,b,t1,t2, n = 3, aout = out)
self.assertTrue(np.allclose(out,cross_a_b_t1_t2[:3]*2))
def test_cross_correlate_n2(self):
out = core.cross_correlate(a2,b2, n = 3,axis = -1)
self.assertTrue(np.allclose(out[0],cross_a_b[:3]))
out = core.cross_correlate(a2,b2,t1,t2, n = 3, axis = -1)
self.assertTrue(np.allclose(out[0],cross_a_b_t1_t2[:3]))
out = core.cross_correlate(a2,b2,t1,t2, n = 3, aout = out, axis = -1)
self.assertTrue(np.allclose(out,cross_a_b_t1_t2[:3]*2))
class TestSum(unittest.TestCase):
def test_auto_sum(self):
out = core.auto_sum(a)
self.assertTrue(np.allclose(out,auto_sum_a))
out = core.auto_sum(a,t1)
self.assertTrue(np.allclose(out,auto_sum_a_t1))
out = core.auto_sum(a,t1, aout = out)
self.assertTrue(np.allclose(out,auto_sum_a_t1*2))
def test_auto_sum_n(self):
out = core.auto_sum(a, n = 3)
self.assertTrue(np.allclose(out,auto_sum_a[0:3]))
out = core.auto_sum(a,t1, n = 3)
self.assertTrue(np.allclose(out,auto_sum_a_t1[0:3]))
out = core.auto_sum(a,t1, aout = out)
self.assertTrue(np.allclose(out,auto_sum_a_t1[0:3]*2))
out = core.auto_sum(a,t1, n = 3, aout = out)
self.assertTrue(np.allclose(out,auto_sum_a_t1[0:3]*3))
def test_auto_sum_fft(self):
out = core.auto_sum_fft(a,t1)
self.assertTrue(np.allclose(out,auto_sum_a_t1))
out = core.auto_sum_fft(a,t1, aout = out)
self.assertTrue(np.allclose(out,auto_sum_a_t1*2))
def test_auto_sum_fft_n(self):
out = core.auto_sum_fft(a,t1, n = 3)
self.assertTrue(np.allclose(out,auto_sum_a_t1[0:3]))
out = core.auto_sum_fft(a,t1, n =3, aout = out)
self.assertTrue(np.allclose(out,auto_sum_a_t1[0:3]*2))
out = core.auto_sum_fft(a,t1, aout = out)
self.assertTrue(np.allclose(out,auto_sum_a_t1[0:3]*3))
def test_cross_sum(self):
out = core.cross_sum(a)
self.assertTrue(np.allclose(out,cross_sum_a))
out = core.cross_sum(a,t1,t2)
self.assertTrue(np.allclose(out,cross_sum_a_t1_t2))
out = core.cross_sum(a,t1,t2, aout = out)
self.assertTrue(np.allclose(out,cross_sum_a_t1_t2*2))
def test_cross_sum_n(self):
out = core.cross_sum(a, n=3)
self.assertTrue(np.allclose(out,cross_sum_a[0:3]))
out = core.cross_sum(a,t1,t2, n = 3)
self.assertTrue(np.allclose(out,cross_sum_a_t1_t2[0:3]))
out = core.cross_sum(a,t1,t2, aout = out)
self.assertTrue(np.allclose(out,cross_sum_a_t1_t2[0:3]*2))
def test_cross_sum_fft(self):
out = core.cross_sum_fft(a,t1,t2)
self.assertTrue(np.allclose(out,cross_sum_a_t1_t2))
out = core.cross_sum_fft(a,t1,t2, aout = out)
self.assertTrue(np.allclose(out,cross_sum_a_t1_t2*2))
def test_cross_sum_fft_n(self):
out = core.cross_sum_fft(a,t1,t2, n = 3)
self.assertTrue(np.allclose(out,cross_sum_a_t1_t2[0:3]))
out = core.cross_sum_fft(a,t1,t2, aout = out)
self.assertTrue(np.allclose(out,cross_sum_a_t1_t2[0:3]*2))
out = core.cross_sum_fft(a,t1,t2, n =3, aout = out)
self.assertTrue(np.allclose(out,cross_sum_a_t1_t2[0:3]*3))
def test_cross_sum_equivalence_ND(self):
for axis in (0,1,2):
t1 = np.arange(test_data1.shape[axis])
t2 = np.arange(test_data1.shape[axis]) + 3
out1 = core.cross_sum(test_data1,t1,t2, axis = axis)
out2 = core.cross_sum_fft(test_data1,t1,t2, axis = axis)
self.assertTrue(np.allclose(out1,out2))
class TestCount(unittest.TestCase):
def test_cross_count(self):
out = core.cross_count(10)
self.assertTrue(np.allclose(out,cross_count_10))
out = core.cross_count(t1,t2)
self.assertTrue(np.allclose(out,cross_count_t1_t2))
out = core.cross_count(t1,t2, aout = out)
self.assertTrue(np.allclose(out,cross_count_t1_t2*2))
def test_cross_count_n(self):
out = core.cross_count(10, n = 5)
self.assertTrue(np.allclose(out,cross_count_10[0:5]))
out = core.cross_count(t1,t2,n=5)
self.assertTrue(np.allclose(out,cross_count_t1_t2[0:5]))
out = core.cross_count(t1,t2, aout = out)
self.assertTrue(np.allclose(out,2*cross_count_t1_t2[0:5]))
def test_auto_count(self):
out = core.auto_count(10)
self.assertTrue(np.allclose(out,auto_count_10))
out = core.auto_count(t1)
self.assertTrue(np.allclose(out,auto_count_t1))
out = core.auto_count(t1, aout = out)
self.assertTrue(np.allclose(out,auto_count_t1*2))
def test_auto_count_n(self):
out = core.auto_count(10, n = 5)
self.assertTrue(np.allclose(out,auto_count_10[0:5]))
out = core.auto_count(t1, n = 5)
self.assertTrue(np.allclose(out,auto_count_t1[:5]))
out = core.auto_count(t1, aout = out)
self.assertTrue(np.allclose(out,2*auto_count_t1[:5]))
class TestIcorr(unittest.TestCase):
def test_cross_equivalence(self):
for method in ("corr","diff","fft"):
bg,var = core.stats(test_data1, test_data2, axis = 0)
data = core.ccorr(test_data1, test_data2,n = 8, norm = 1, method = method)
out1 = core.normalize(data, bg, var)
vid = fromarrays((test_data1, test_data2))
data,bg,var = core.iccorr(vid, count = len(test_data1),chunk_size = 16,n = 8, norm = 1, method = method)
out2 = core.normalize(data, bg, var)
self.assertTrue(np.allclose(out1, out2))
def test_auto_equivalence_2(self):
for method in ("corr",):
bg,var = core.stats(test_data1, axis = 0)
data1 = core.ccorr(test_data1,test_data1, n = 8, norm = 2, method = method)
out1 = core.normalize(data1, bg, var, norm = 2)
data2,bg,var = core.iacorr(test_data1, n = 8, norm = 2, method = method)
out2 = core.normalize(data2, bg, var, norm = 2)
self.assertTrue(np.allclose(out1, out2))
def test_auto_equivalence_1(self):
for method in ("corr","fft","diff"):
bg,var = core.stats(test_data1, axis = 0)
data1 = core.acorr(test_data1, n = 8, norm = 1, method = method)
out1 = core.normalize(data1, bg, var, norm = 1)
data2,bg,var = core.iacorr(test_data1, n = 8, norm = 1, method = method)
out2 = core.normalize(data2, bg, var, norm = 1)
self.assertTrue(np.allclose(out1, out2))
class TestCorr(unittest.TestCase):
def setUp(self):
pass
def test_corr_regular_3(self):
for scale in (True, False):
for mode in ("corr", "diff"):
for axis in (0,1,2):
bg,var = core.stats(test_data1, test_data2, axis = axis)
data = core.ccorr(test_data1, test_data2, norm = 3, method = "fft", axis = axis)
self.out = core.normalize(data, bg, var, norm = 3, mode = mode, scale = scale)
data = core.ccorr(test_data1, test_data2, norm = 3, method = "corr", axis = axis)
out_other = core.normalize(data, bg, var, norm = 3, mode = mode, scale = scale)
self.assertTrue(np.allclose(self.out, out_other))
data = core.ccorr(test_data1, test_data2, norm = 3, method = "diff", axis = axis)
out_other = core.normalize(data, bg, var, norm = 3, mode = mode, scale = scale)
self.assertTrue(np.allclose(self.out, out_other))
def test_ccorr_regular_3_mask(self):
for scale in (True, False):
for mode in ("corr", "diff"):
axis = 0
bg,var = core.stats(test_data1, test_data2, axis = axis)
data = core.ccorr(test_data1, test_data2, norm = 3, method = "fft", axis = axis)
self.out = core.normalize(data, bg, var, norm = 3, mode = mode, scale = scale, mask = test_mask)
data = core.ccorr(test_data1, test_data2, norm = 3, method = "corr", axis = axis)
out_other = core.normalize(data, bg, var, norm = 3, mode = mode, scale = scale, mask = test_mask)
self.assertTrue(np.allclose(self.out, out_other))
data = core.ccorr(test_data1, test_data2, norm = 3, method = "diff", axis = axis)
out_other = core.normalize(data, bg, var, norm = 3, mode = mode, scale = scale, mask = test_mask)
self.assertTrue(np.allclose(self.out, out_other))
def test_acorr_regular_3(self):
for scale in (True, False):
for mode in ("corr", "diff"):
for axis in (0,1,2):
bg,var = core.stats(test_data1, axis = axis)
data = core.ccorr(test_data1, test_data1, norm = 3, method = "fft", axis = axis)
self.out = core.normalize(data, bg, var, norm = 1, mode = mode, scale = scale)
data = core.acorr(test_data1,norm = 3, method = "corr", axis = axis)
out_other = core.normalize(data, bg, var, norm = 1, mode = mode, scale = scale)
self.assertTrue(np.allclose(self.out, out_other))
data = core.acorr(test_data1,norm = 1, method = "diff", axis = axis)
out_other = core.normalize(data, bg, var, norm = 1, mode = mode, scale = scale)
self.assertTrue(np.allclose(self.out, out_other))
def test_ccorr_regular_1(self):
for scale in (True, False):
for mode in ("corr", "diff"):
for axis in (0,1,2):
bg,var = core.stats(test_data1, test_data2, axis = axis)
data = core.ccorr(test_data1, test_data2, norm = 1, method = "fft", axis = axis)
self.out = core.normalize(data, bg, var, norm = 1, mode = mode, scale = scale)
data = core.ccorr(test_data1, test_data2, norm = 1, method = "corr", axis = axis)
out_other = core.normalize(data, bg, var, norm = 1, mode = mode, scale = scale)
self.assertTrue(np.allclose(self.out, out_other))
data = core.ccorr(test_data1, test_data2, norm = 1, method = "diff", axis = axis)
out_other = core.normalize(data, bg, var, norm = 1, mode = mode, scale = scale)
self.assertTrue(np.allclose(self.out, out_other))
def test_acorr_regular_1(self):
for scale in (True, False):
for mode in ("corr", "diff"):
for axis in (0,1,2):
bg,var = core.stats(test_data1, axis = axis)
data = core.acorr(test_data1, norm = 1, method = "fft", axis = axis)
self.out = core.normalize(data, bg, var, norm = 1, mode = mode, scale = scale)
data = core.acorr(test_data1,norm = 1, method = "corr", axis = axis)
out_other = core.normalize(data, bg, var, norm = 1, mode = mode, scale = scale)
self.assertTrue(np.allclose(self.out, out_other))
data = core.acorr(test_data1,norm = 1, method = "diff", axis = axis)
out_other = core.normalize(data, bg, var, norm = 1, mode = mode, scale = scale)
self.assertTrue(np.allclose(self.out, out_other))
def test_corr_regular_1_mask(self):
for scale in (True, False):
for mode in ("corr", "diff"):
axis = 0
bg,var = core.stats(test_data1, test_data2, axis = axis)
data = core.ccorr(test_data1, test_data2, norm = 1, method = "fft", axis = axis)
self.out = core.normalize(data, bg, var, norm = 1, mode = mode, scale = scale,mask = test_mask)
data = core.ccorr(test_data1, test_data2, norm = 1, method = "corr", axis = axis)
out_other = core.normalize(data, bg, var, norm = 1, mode = mode, scale = scale,mask = test_mask)
self.assertTrue(np.allclose(self.out, out_other))
data = core.ccorr(test_data1, test_data2, norm = 1, method = "diff", axis = axis)
out_other = core.normalize(data, bg, var, norm = 1, mode = mode, scale = scale,mask = test_mask)
self.assertTrue(np.allclose(self.out, out_other))
def test_ccorr_regular_0(self):
for scale in (True, False):
for mode in ("corr", "diff"):
for axis in (0,1,2):
bg,var = core.stats(test_data1, test_data2, axis = axis)
data = core.ccorr(test_data1, test_data2, norm = 0, method = "fft", axis = axis)
self.out = core.normalize(data, bg, var, norm = 0, mode = mode, scale = scale)
data = core.ccorr(test_data1, test_data2, norm = 0, method = "corr", axis = axis)
out_other = core.normalize(data, bg, var, norm = 0, mode = mode, scale = scale)
self.assertTrue(np.allclose(self.out, out_other))
def test_acorr_regular_0(self):
for scale in (True, False):
for mode in ("corr", "diff"):
for axis in (0,1,2):
bg,var = core.stats(test_data1, axis = axis)
data = core.acorr(test_data1, norm = 0, method = "fft", axis = axis)
self.out = core.normalize(data, bg, var, norm = 0, mode = mode, scale = scale)
data = core.acorr(test_data1,norm = 0, method = "corr", axis = axis)
out_other = core.normalize(data, bg, var, norm = 0, mode = mode, scale = scale)
self.assertTrue(np.allclose(self.out, out_other))
def test_corr_regular_0_mask(self):
for scale in (True, False):
for mode in ("corr", "diff"):
axis = 0
bg,var = core.stats(test_data1, test_data2, axis = axis)
data = core.ccorr(test_data1, test_data2, norm = 0, method = "fft", axis = axis)
self.out = core.normalize(data, bg, var, norm = 0, mode = mode, scale = scale, mask = test_mask)
data = core.ccorr(test_data1, test_data2, norm = 0, method = "corr", axis = axis)
out_other = core.normalize(data, bg, var, norm = 0, mode = mode, scale = scale, mask = test_mask)
self.assertTrue(np.allclose(self.out, out_other))
def test_corr_regular_2(self):
for scale in (True, False):
for mode in ("corr", "diff"):
for axis in (0,1,2):
bg,var = core.stats(test_data1, test_data2, axis = axis)
data = core.ccorr(test_data1, test_data2, norm = 2, method = "fft", axis = axis)
self.out = core.normalize(data, bg, var, norm = 2, mode = mode, scale = scale)
data = core.ccorr(test_data1, test_data2, norm = 2, method = "corr", axis = axis)
out_other = core.normalize(data, bg, var, norm = 2, mode = mode, scale = scale)
self.assertTrue(np.allclose(self.out, out_other))
def test_corr_regular_2_mask(self):
for scale in (True, False):
for mode in ("corr", "diff"):
bg,var = core.stats(test_data1, test_data2)
data = core.ccorr(test_data1, test_data2, norm = 2, method = "fft")
self.out = core.normalize(data, bg, var, norm = 2, mode = mode, scale = scale, mask = test_mask)
data = core.ccorr(test_data1, test_data2, norm = 2, method = "corr")
out_other = core.normalize(data, bg, var, norm = 2, mode = mode, scale = scale,mask = test_mask)
self.assertTrue(np.allclose(self.out, out_other))
class TestRest(unittest.TestCase):
def test_abs2(self):
self.assertTrue(np.allclose(core.abs2(test_data1), np.abs(test_data1)**2))
if __name__ == "__main__":
unittest.main()
|
[
"numpy.random.seed",
"numpy.abs",
"numpy.allclose",
"numpy.ones",
"cddm.core.cross_count",
"cddm.core.ccorr",
"numpy.arange",
"cddm.core.acorr",
"cddm.video.fromarrays",
"cddm.core.cross_correlate_fft",
"unittest.main",
"cddm.core.normalize",
"cddm.core.iacorr",
"cddm.core.abs2",
"numpy.random.randn",
"cddm.core.auto_count",
"cddm.core.cross_sum",
"cddm.core.auto_correlate_fft",
"cddm.core.stats",
"cddm.core.auto_correlate",
"cddm.core.auto_sum_fft",
"cddm.core.cross_sum_fft",
"cddm.core.cross_correlate",
"numpy.array",
"cddm.core.auto_sum"
] |
[((241, 284), 'numpy.array', 'np.array', (['[70.0, 100.0, 62.0, 28.0]', 'FDTYPE'], {}), '([70.0, 100.0, 62.0, 28.0], FDTYPE)\n', (249, 284), True, 'import numpy as np\n'), ((301, 366), 'numpy.array', 'np.array', (['[32.0, 72.0, 28.0, 38.0, 24.0, 38.0, 20.0, 8.0]', 'FDTYPE'], {}), '([32.0, 72.0, 28.0, 38.0, 24.0, 38.0, 20.0, 8.0], FDTYPE)\n', (309, 366), True, 'import numpy as np\n'), ((368, 409), 'numpy.array', 'np.array', (['[30.0, 20.0, 11.0, 4.0]', 'FDTYPE'], {}), '([30.0, 20.0, 11.0, 4.0], FDTYPE)\n', (376, 409), True, 'import numpy as np\n'), ((419, 479), 'numpy.array', 'np.array', (['[30.0, 12.0, 2.0, 0.0, 6.0, 8.0, 3.0, 4.0]', 'FDTYPE'], {}), '([30.0, 12.0, 2.0, 0.0, 6.0, 8.0, 3.0, 4.0], FDTYPE)\n', (427, 479), True, 'import numpy as np\n'), ((485, 524), 'numpy.array', 'np.array', (['[10.0, 7.5, 5.0, 2.5]', 'FDTYPE'], {}), '([10.0, 7.5, 5.0, 2.5], FDTYPE)\n', (493, 524), True, 'import numpy as np\n'), ((544, 603), 'numpy.array', 'np.array', (['[10.0, 3.5, 1.5, 0.0, 2.5, 3.0, 2.0, 2.5]', 'FDTYPE'], {}), '([10.0, 3.5, 1.5, 0.0, 2.5, 3.0, 2.0, 2.5], FDTYPE)\n', (552, 603), True, 'import numpy as np\n'), ((625, 666), 'numpy.array', 'np.array', (['[10.0, 15.0, 10.0, 5.0]', 'FDTYPE'], {}), '([10.0, 15.0, 10.0, 5.0], FDTYPE)\n', (633, 666), True, 'import numpy as np\n'), ((684, 743), 'numpy.array', 'np.array', (['[4.0, 11.0, 4.0, 6.0, 4.0, 6.0, 4.0, 1.0]', 'FDTYPE'], {}), '([4.0, 11.0, 4.0, 6.0, 4.0, 6.0, 4.0, 1.0], FDTYPE)\n', (692, 743), True, 'import numpy as np\n'), ((760, 814), 'numpy.array', 'np.array', (['[10, 18, 16, 14, 12, 10, 8, 6, 4, 2]', 'FDTYPE'], {}), '([10, 18, 16, 14, 12, 10, 8, 6, 4, 2], FDTYPE)\n', (768, 814), True, 'import numpy as np\n'), ((838, 880), 'numpy.array', 'np.array', (['[1, 5, 1, 3, 1, 3, 1, 1]', 'FDTYPE'], {}), '([1, 5, 1, 3, 1, 3, 1, 1], FDTYPE)\n', (846, 880), True, 'import numpy as np\n'), ((896, 945), 'numpy.array', 'np.array', (['[10, 9, 8, 7, 6, 5, 4, 3, 2, 1]', 'FDTYPE'], {}), '([10, 9, 8, 7, 6, 5, 4, 3, 2, 1], FDTYPE)\n', (904, 945), True, 'import numpy as np\n'), ((970, 1012), 'numpy.array', 'np.array', (['[4, 1, 1, 0, 1, 1, 1, 1]', 'FDTYPE'], {}), '([4, 1, 1, 0, 1, 1, 1, 1], FDTYPE)\n', (978, 1012), True, 'import numpy as np\n'), ((1013, 1030), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (1027, 1030), True, 'import numpy as np\n'), ((1204, 1232), 'numpy.array', 'np.array', (['test_data1', 'CDTYPE'], {}), '(test_data1, CDTYPE)\n', (1212, 1232), True, 'import numpy as np\n'), ((1246, 1274), 'numpy.array', 'np.array', (['test_data2', 'CDTYPE'], {}), '(test_data2, CDTYPE)\n', (1254, 1274), True, 'import numpy as np\n'), ((1288, 1310), 'numpy.ones', 'np.ones', (['(19, 8)', 'bool'], {}), '((19, 8), bool)\n', (1295, 1310), True, 'import numpy as np\n'), ((1068, 1094), 'numpy.random.randn', 'np.random.randn', (['(32)', '(19)', '(8)'], {}), '(32, 19, 8)\n', (1083, 1094), True, 'import numpy as np\n'), ((1136, 1162), 'numpy.random.randn', 'np.random.randn', (['(32)', '(19)', '(8)'], {}), '(32, 19, 8)\n', (1151, 1162), True, 'import numpy as np\n'), ((23963, 23978), 'unittest.main', 'unittest.main', ([], {}), '()\n', (23976, 23978), False, 'import unittest\n'), ((1095, 1121), 'numpy.random.randn', 'np.random.randn', (['(32)', '(19)', '(8)'], {}), '(32, 19, 8)\n', (1110, 1121), True, 'import numpy as np\n'), ((1163, 1189), 'numpy.random.randn', 'np.random.randn', (['(32)', '(19)', '(8)'], {}), '(32, 19, 8)\n', (1178, 1189), True, 'import numpy as np\n'), ((1511, 1537), 'cddm.core.auto_correlate_fft', 'core.auto_correlate_fft', (['a'], {}), '(a)\n', (1534, 1537), True, 'import cddm.core as core\n'), ((1601, 1631), 'cddm.core.auto_correlate_fft', 'core.auto_correlate_fft', (['a', 't1'], {}), '(a, t1)\n', (1624, 1631), True, 'import cddm.core as core\n'), ((1711, 1751), 'cddm.core.auto_correlate_fft', 'core.auto_correlate_fft', (['a', 't1'], {'aout': 'out'}), '(a, t1, aout=out)\n', (1734, 1751), True, 'import cddm.core as core\n'), ((1887, 1923), 'cddm.core.auto_correlate_fft', 'core.auto_correlate_fft', (['a2'], {'axis': '(-1)'}), '(a2, axis=-1)\n', (1910, 1923), True, 'import cddm.core as core\n'), ((1991, 2031), 'cddm.core.auto_correlate_fft', 'core.auto_correlate_fft', (['a2', 't1'], {'axis': '(-1)'}), '(a2, t1, axis=-1)\n', (2014, 2031), True, 'import cddm.core as core\n'), ((2115, 2165), 'cddm.core.auto_correlate_fft', 'core.auto_correlate_fft', (['a2', 't1'], {'axis': '(-1)', 'aout': 'out'}), '(a2, t1, axis=-1, aout=out)\n', (2138, 2165), True, 'import cddm.core as core\n'), ((2312, 2343), 'cddm.core.auto_correlate_fft', 'core.auto_correlate_fft', (['a'], {'n': '(3)'}), '(a, n=3)\n', (2335, 2343), True, 'import cddm.core as core\n'), ((2415, 2450), 'cddm.core.auto_correlate_fft', 'core.auto_correlate_fft', (['a', 't1'], {'n': '(3)'}), '(a, t1, n=3)\n', (2438, 2450), True, 'import cddm.core as core\n'), ((2523, 2568), 'cddm.core.auto_correlate_fft', 'core.auto_correlate_fft', (['a', 't1'], {'n': '(3)', 'aout': 'out'}), '(a, t1, n=3, aout=out)\n', (2546, 2568), True, 'import cddm.core as core\n'), ((2688, 2729), 'cddm.core.auto_correlate_fft', 'core.auto_correlate_fft', (['a2'], {'axis': '(-1)', 'n': '(3)'}), '(a2, axis=-1, n=3)\n', (2711, 2729), True, 'import cddm.core as core\n'), ((2806, 2851), 'cddm.core.auto_correlate_fft', 'core.auto_correlate_fft', (['a2', 't1'], {'n': '(3)', 'axis': '(-1)'}), '(a2, t1, n=3, axis=-1)\n', (2829, 2851), True, 'import cddm.core as core\n'), ((2929, 2984), 'cddm.core.auto_correlate_fft', 'core.auto_correlate_fft', (['a2', 't1'], {'n': '(3)', 'axis': '(-1)', 'aout': 'out'}), '(a2, t1, n=3, axis=-1, aout=out)\n', (2952, 2984), True, 'import cddm.core as core\n'), ((3102, 3124), 'cddm.core.auto_correlate', 'core.auto_correlate', (['a'], {}), '(a)\n', (3121, 3124), True, 'import cddm.core as core\n'), ((3189, 3215), 'cddm.core.auto_correlate', 'core.auto_correlate', (['a', 't1'], {}), '(a, t1)\n', (3208, 3215), True, 'import cddm.core as core\n'), ((3282, 3318), 'cddm.core.auto_correlate', 'core.auto_correlate', (['a', 't1'], {'aout': 'out'}), '(a, t1, aout=out)\n', (3301, 3318), True, 'import cddm.core as core\n'), ((3434, 3466), 'cddm.core.auto_correlate', 'core.auto_correlate', (['a2'], {'axis': '(-1)'}), '(a2, axis=-1)\n', (3453, 3466), True, 'import cddm.core as core\n'), ((3536, 3572), 'cddm.core.auto_correlate', 'core.auto_correlate', (['a2', 't1'], {'axis': '(-1)'}), '(a2, t1, axis=-1)\n', (3555, 3572), True, 'import cddm.core as core\n'), ((3644, 3690), 'cddm.core.auto_correlate', 'core.auto_correlate', (['a2', 't1'], {'axis': '(-1)', 'aout': 'out'}), '(a2, t1, axis=-1, aout=out)\n', (3663, 3690), True, 'import cddm.core as core\n'), ((3812, 3839), 'cddm.core.auto_correlate', 'core.auto_correlate', (['a'], {'n': '(3)'}), '(a, n=3)\n', (3831, 3839), True, 'import cddm.core as core\n'), ((3911, 3942), 'cddm.core.auto_correlate', 'core.auto_correlate', (['a', 't1'], {'n': '(3)'}), '(a, t1, n=3)\n', (3930, 3942), True, 'import cddm.core as core\n'), ((4015, 4056), 'cddm.core.auto_correlate', 'core.auto_correlate', (['a', 't1'], {'n': '(3)', 'aout': 'out'}), '(a, t1, n=3, aout=out)\n', (4034, 4056), True, 'import cddm.core as core\n'), ((4180, 4217), 'cddm.core.auto_correlate', 'core.auto_correlate', (['a2'], {'n': '(3)', 'axis': '(-1)'}), '(a2, n=3, axis=-1)\n', (4199, 4217), True, 'import cddm.core as core\n'), ((4293, 4334), 'cddm.core.auto_correlate', 'core.auto_correlate', (['a2', 't1'], {'n': '(3)', 'axis': '(-1)'}), '(a2, t1, n=3, axis=-1)\n', (4312, 4334), True, 'import cddm.core as core\n'), ((4412, 4462), 'cddm.core.auto_correlate', 'core.auto_correlate', (['a2', 't1'], {'n': '(3)', 'aout': 'out', 'axis': '(1)'}), '(a2, t1, n=3, aout=out, axis=1)\n', (4431, 4462), True, 'import cddm.core as core\n'), ((4593, 4623), 'cddm.core.cross_correlate_fft', 'core.cross_correlate_fft', (['a', 'b'], {}), '(a, b)\n', (4617, 4623), True, 'import cddm.core as core\n'), ((4690, 4728), 'cddm.core.cross_correlate_fft', 'core.cross_correlate_fft', (['a', 'b', 't1', 't2'], {}), '(a, b, t1, t2)\n', (4714, 4728), True, 'import cddm.core as core\n'), ((4799, 4847), 'cddm.core.cross_correlate_fft', 'core.cross_correlate_fft', (['a', 'b', 't1', 't2'], {'aout': 'out'}), '(a, b, t1, t2, aout=out)\n', (4823, 4847), True, 'import cddm.core as core\n'), ((4973, 5013), 'cddm.core.cross_correlate_fft', 'core.cross_correlate_fft', (['a2', 'b2'], {'axis': '(1)'}), '(a2, b2, axis=1)\n', (4997, 5013), True, 'import cddm.core as core\n'), ((5084, 5132), 'cddm.core.cross_correlate_fft', 'core.cross_correlate_fft', (['a2', 'b2', 't1', 't2'], {'axis': '(1)'}), '(a2, b2, t1, t2, axis=1)\n', (5108, 5132), True, 'import cddm.core as core\n'), ((5207, 5266), 'cddm.core.cross_correlate_fft', 'core.cross_correlate_fft', (['a2', 'b2', 't1', 't2'], {'aout': 'out', 'axis': '(-1)'}), '(a2, b2, t1, t2, aout=out, axis=-1)\n', (5231, 5266), True, 'import cddm.core as core\n'), ((5396, 5431), 'cddm.core.cross_correlate_fft', 'core.cross_correlate_fft', (['a', 'b'], {'n': '(3)'}), '(a, b, n=3)\n', (5420, 5431), True, 'import cddm.core as core\n'), ((5504, 5547), 'cddm.core.cross_correlate_fft', 'core.cross_correlate_fft', (['a', 'b', 't1', 't2'], {'n': '(3)'}), '(a, b, t1, t2, n=3)\n', (5528, 5547), True, 'import cddm.core as core\n'), ((5624, 5677), 'cddm.core.cross_correlate_fft', 'core.cross_correlate_fft', (['a', 'b', 't1', 't2'], {'n': '(3)', 'aout': 'out'}), '(a, b, t1, t2, n=3, aout=out)\n', (5648, 5677), True, 'import cddm.core as core\n'), ((5802, 5848), 'cddm.core.cross_correlate_fft', 'core.cross_correlate_fft', (['a2', 'b2'], {'n': '(3)', 'axis': '(-1)'}), '(a2, b2, n=3, axis=-1)\n', (5826, 5848), True, 'import cddm.core as core\n'), ((5926, 5980), 'cddm.core.cross_correlate_fft', 'core.cross_correlate_fft', (['a2', 'b2', 't1', 't2'], {'n': '(3)', 'axis': '(-1)'}), '(a2, b2, t1, t2, n=3, axis=-1)\n', (5950, 5980), True, 'import cddm.core as core\n'), ((6062, 6126), 'cddm.core.cross_correlate_fft', 'core.cross_correlate_fft', (['a2', 'b2', 't1', 't2'], {'n': '(3)', 'aout': 'out', 'axis': '(-1)'}), '(a2, b2, t1, t2, n=3, aout=out, axis=-1)\n', (6086, 6126), True, 'import cddm.core as core\n'), ((6249, 6275), 'cddm.core.cross_correlate', 'core.cross_correlate', (['a', 'b'], {}), '(a, b)\n', (6269, 6275), True, 'import cddm.core as core\n'), ((6342, 6376), 'cddm.core.cross_correlate', 'core.cross_correlate', (['a', 'b', 't1', 't2'], {}), '(a, b, t1, t2)\n', (6362, 6376), True, 'import cddm.core as core\n'), ((6447, 6491), 'cddm.core.cross_correlate', 'core.cross_correlate', (['a', 'b', 't1', 't2'], {'aout': 'out'}), '(a, b, t1, t2, aout=out)\n', (6467, 6491), True, 'import cddm.core as core\n'), ((6604, 6641), 'cddm.core.cross_correlate', 'core.cross_correlate', (['a2', 'b2'], {'axis': '(-1)'}), '(a2, b2, axis=-1)\n', (6624, 6641), True, 'import cddm.core as core\n'), ((6712, 6757), 'cddm.core.cross_correlate', 'core.cross_correlate', (['a2', 'b2', 't1', 't2'], {'axis': '(-1)'}), '(a2, b2, t1, t2, axis=-1)\n', (6732, 6757), True, 'import cddm.core as core\n'), ((6832, 6887), 'cddm.core.cross_correlate', 'core.cross_correlate', (['a2', 'b2', 't1', 't2'], {'aout': 'out', 'axis': '(-1)'}), '(a2, b2, t1, t2, aout=out, axis=-1)\n', (6852, 6887), True, 'import cddm.core as core\n'), ((7013, 7044), 'cddm.core.cross_correlate', 'core.cross_correlate', (['a', 'b'], {'n': '(3)'}), '(a, b, n=3)\n', (7033, 7044), True, 'import cddm.core as core\n'), ((7117, 7156), 'cddm.core.cross_correlate', 'core.cross_correlate', (['a', 'b', 't1', 't2'], {'n': '(3)'}), '(a, b, t1, t2, n=3)\n', (7137, 7156), True, 'import cddm.core as core\n'), ((7233, 7282), 'cddm.core.cross_correlate', 'core.cross_correlate', (['a', 'b', 't1', 't2'], {'n': '(3)', 'aout': 'out'}), '(a, b, t1, t2, n=3, aout=out)\n', (7253, 7282), True, 'import cddm.core as core\n'), ((7411, 7453), 'cddm.core.cross_correlate', 'core.cross_correlate', (['a2', 'b2'], {'n': '(3)', 'axis': '(-1)'}), '(a2, b2, n=3, axis=-1)\n', (7431, 7453), True, 'import cddm.core as core\n'), ((7530, 7580), 'cddm.core.cross_correlate', 'core.cross_correlate', (['a2', 'b2', 't1', 't2'], {'n': '(3)', 'axis': '(-1)'}), '(a2, b2, t1, t2, n=3, axis=-1)\n', (7550, 7580), True, 'import cddm.core as core\n'), ((7662, 7722), 'cddm.core.cross_correlate', 'core.cross_correlate', (['a2', 'b2', 't1', 't2'], {'n': '(3)', 'aout': 'out', 'axis': '(-1)'}), '(a2, b2, t1, t2, n=3, aout=out, axis=-1)\n', (7682, 7722), True, 'import cddm.core as core\n'), ((7869, 7885), 'cddm.core.auto_sum', 'core.auto_sum', (['a'], {}), '(a)\n', (7882, 7885), True, 'import cddm.core as core\n'), ((7953, 7973), 'cddm.core.auto_sum', 'core.auto_sum', (['a', 't1'], {}), '(a, t1)\n', (7966, 7973), True, 'import cddm.core as core\n'), ((8044, 8074), 'cddm.core.auto_sum', 'core.auto_sum', (['a', 't1'], {'aout': 'out'}), '(a, t1, aout=out)\n', (8057, 8074), True, 'import cddm.core as core\n'), ((8181, 8202), 'cddm.core.auto_sum', 'core.auto_sum', (['a'], {'n': '(3)'}), '(a, n=3)\n', (8194, 8202), True, 'import cddm.core as core\n'), ((8277, 8302), 'cddm.core.auto_sum', 'core.auto_sum', (['a', 't1'], {'n': '(3)'}), '(a, t1, n=3)\n', (8290, 8302), True, 'import cddm.core as core\n'), ((8380, 8410), 'cddm.core.auto_sum', 'core.auto_sum', (['a', 't1'], {'aout': 'out'}), '(a, t1, aout=out)\n', (8393, 8410), True, 'import cddm.core as core\n'), ((8491, 8526), 'cddm.core.auto_sum', 'core.auto_sum', (['a', 't1'], {'n': '(3)', 'aout': 'out'}), '(a, t1, n=3, aout=out)\n', (8504, 8526), True, 'import cddm.core as core\n'), ((8650, 8674), 'cddm.core.auto_sum_fft', 'core.auto_sum_fft', (['a', 't1'], {}), '(a, t1)\n', (8667, 8674), True, 'import cddm.core as core\n'), ((8745, 8779), 'cddm.core.auto_sum_fft', 'core.auto_sum_fft', (['a', 't1'], {'aout': 'out'}), '(a, t1, aout=out)\n', (8762, 8779), True, 'import cddm.core as core\n'), ((8898, 8927), 'cddm.core.auto_sum_fft', 'core.auto_sum_fft', (['a', 't1'], {'n': '(3)'}), '(a, t1, n=3)\n', (8915, 8927), True, 'import cddm.core as core\n'), ((9005, 9044), 'cddm.core.auto_sum_fft', 'core.auto_sum_fft', (['a', 't1'], {'n': '(3)', 'aout': 'out'}), '(a, t1, n=3, aout=out)\n', (9022, 9044), True, 'import cddm.core as core\n'), ((9125, 9159), 'cddm.core.auto_sum_fft', 'core.auto_sum_fft', (['a', 't1'], {'aout': 'out'}), '(a, t1, aout=out)\n', (9142, 9159), True, 'import cddm.core as core\n'), ((9269, 9286), 'cddm.core.cross_sum', 'core.cross_sum', (['a'], {}), '(a)\n', (9283, 9286), True, 'import cddm.core as core\n'), ((9355, 9380), 'cddm.core.cross_sum', 'core.cross_sum', (['a', 't1', 't2'], {}), '(a, t1, t2)\n', (9369, 9380), True, 'import cddm.core as core\n'), ((9454, 9489), 'cddm.core.cross_sum', 'core.cross_sum', (['a', 't1', 't2'], {'aout': 'out'}), '(a, t1, t2, aout=out)\n', (9468, 9489), True, 'import cddm.core as core\n'), ((9600, 9622), 'cddm.core.cross_sum', 'core.cross_sum', (['a'], {'n': '(3)'}), '(a, n=3)\n', (9614, 9622), True, 'import cddm.core as core\n'), ((9696, 9726), 'cddm.core.cross_sum', 'core.cross_sum', (['a', 't1', 't2'], {'n': '(3)'}), '(a, t1, t2, n=3)\n', (9710, 9726), True, 'import cddm.core as core\n'), ((9807, 9842), 'cddm.core.cross_sum', 'core.cross_sum', (['a', 't1', 't2'], {'aout': 'out'}), '(a, t1, t2, aout=out)\n', (9821, 9842), True, 'import cddm.core as core\n'), ((9960, 9989), 'cddm.core.cross_sum_fft', 'core.cross_sum_fft', (['a', 't1', 't2'], {}), '(a, t1, t2)\n', (9978, 9989), True, 'import cddm.core as core\n'), ((10063, 10102), 'cddm.core.cross_sum_fft', 'core.cross_sum_fft', (['a', 't1', 't2'], {'aout': 'out'}), '(a, t1, t2, aout=out)\n', (10081, 10102), True, 'import cddm.core as core\n'), ((10217, 10251), 'cddm.core.cross_sum_fft', 'core.cross_sum_fft', (['a', 't1', 't2'], {'n': '(3)'}), '(a, t1, t2, n=3)\n', (10235, 10251), True, 'import cddm.core as core\n'), ((10332, 10371), 'cddm.core.cross_sum_fft', 'core.cross_sum_fft', (['a', 't1', 't2'], {'aout': 'out'}), '(a, t1, t2, aout=out)\n', (10350, 10371), True, 'import cddm.core as core\n'), ((10454, 10498), 'cddm.core.cross_sum_fft', 'core.cross_sum_fft', (['a', 't1', 't2'], {'n': '(3)', 'aout': 'out'}), '(a, t1, t2, n=3, aout=out)\n', (10472, 10498), True, 'import cddm.core as core\n'), ((11029, 11049), 'cddm.core.cross_count', 'core.cross_count', (['(10)'], {}), '(10)\n', (11045, 11049), True, 'import cddm.core as core\n'), ((11121, 11145), 'cddm.core.cross_count', 'core.cross_count', (['t1', 't2'], {}), '(t1, t2)\n', (11137, 11145), True, 'import cddm.core as core\n'), ((11220, 11254), 'cddm.core.cross_count', 'core.cross_count', (['t1', 't2'], {'aout': 'out'}), '(t1, t2, aout=out)\n', (11236, 11254), True, 'import cddm.core as core\n'), ((11369, 11394), 'cddm.core.cross_count', 'core.cross_count', (['(10)'], {'n': '(5)'}), '(10, n=5)\n', (11385, 11394), True, 'import cddm.core as core\n'), ((11473, 11502), 'cddm.core.cross_count', 'core.cross_count', (['t1', 't2'], {'n': '(5)'}), '(t1, t2, n=5)\n', (11489, 11502), True, 'import cddm.core as core\n'), ((11581, 11615), 'cddm.core.cross_count', 'core.cross_count', (['t1', 't2'], {'aout': 'out'}), '(t1, t2, aout=out)\n', (11597, 11615), True, 'import cddm.core as core\n'), ((11732, 11751), 'cddm.core.auto_count', 'core.auto_count', (['(10)'], {}), '(10)\n', (11747, 11751), True, 'import cddm.core as core\n'), ((11822, 11841), 'cddm.core.auto_count', 'core.auto_count', (['t1'], {}), '(t1)\n', (11837, 11841), True, 'import cddm.core as core\n'), ((11913, 11942), 'cddm.core.auto_count', 'core.auto_count', (['t1'], {'aout': 'out'}), '(t1, aout=out)\n', (11928, 11942), True, 'import cddm.core as core\n'), ((12053, 12077), 'cddm.core.auto_count', 'core.auto_count', (['(10)'], {'n': '(5)'}), '(10, n=5)\n', (12068, 12077), True, 'import cddm.core as core\n'), ((12155, 12179), 'cddm.core.auto_count', 'core.auto_count', (['t1'], {'n': '(5)'}), '(t1, n=5)\n', (12170, 12179), True, 'import cddm.core as core\n'), ((12257, 12286), 'cddm.core.auto_count', 'core.auto_count', (['t1'], {'aout': 'out'}), '(t1, aout=out)\n', (12272, 12286), True, 'import cddm.core as core\n'), ((1562, 1586), 'numpy.allclose', 'np.allclose', (['out', 'auto_a'], {}), '(out, auto_a)\n', (1573, 1586), True, 'import numpy as np\n'), ((1655, 1694), 'numpy.allclose', 'np.allclose', (['out', 'auto_a_t1'], {'atol': '(1e-06)'}), '(out, auto_a_t1, atol=1e-06)\n', (1666, 1694), True, 'import numpy as np\n'), ((1777, 1820), 'numpy.allclose', 'np.allclose', (['out', '(auto_a_t1 * 2)'], {'atol': '(1e-06)'}), '(out, auto_a_t1 * 2, atol=1e-06)\n', (1788, 1820), True, 'import numpy as np\n'), ((1949, 1976), 'numpy.allclose', 'np.allclose', (['out[0]', 'auto_a'], {}), '(out[0], auto_a)\n', (1960, 1976), True, 'import numpy as np\n'), ((2056, 2098), 'numpy.allclose', 'np.allclose', (['out[0]', 'auto_a_t1'], {'atol': '(1e-06)'}), '(out[0], auto_a_t1, atol=1e-06)\n', (2067, 2098), True, 'import numpy as np\n'), ((2193, 2239), 'numpy.allclose', 'np.allclose', (['out[0]', '(auto_a_t1 * 2)'], {'atol': '(1e-06)'}), '(out[0], auto_a_t1 * 2, atol=1e-06)\n', (2204, 2239), True, 'import numpy as np\n'), ((2370, 2399), 'numpy.allclose', 'np.allclose', (['out', 'auto_a[0:3]'], {}), '(out, auto_a[0:3])\n', (2381, 2399), True, 'import numpy as np\n'), ((2475, 2507), 'numpy.allclose', 'np.allclose', (['out', 'auto_a_t1[0:3]'], {}), '(out, auto_a_t1[0:3])\n', (2486, 2507), True, 'import numpy as np\n'), ((2595, 2631), 'numpy.allclose', 'np.allclose', (['out', '(auto_a_t1[0:3] * 2)'], {}), '(out, auto_a_t1[0:3] * 2)\n', (2606, 2631), True, 'import numpy as np\n'), ((2758, 2790), 'numpy.allclose', 'np.allclose', (['out[0]', 'auto_a[0:3]'], {}), '(out[0], auto_a[0:3])\n', (2769, 2790), True, 'import numpy as np\n'), ((2878, 2913), 'numpy.allclose', 'np.allclose', (['out[0]', 'auto_a_t1[0:3]'], {}), '(out[0], auto_a_t1[0:3])\n', (2889, 2913), True, 'import numpy as np\n'), ((3013, 3052), 'numpy.allclose', 'np.allclose', (['out[0]', '(auto_a_t1[0:3] * 2)'], {}), '(out[0], auto_a_t1[0:3] * 2)\n', (3024, 3052), True, 'import numpy as np\n'), ((3149, 3173), 'numpy.allclose', 'np.allclose', (['out', 'auto_a'], {}), '(out, auto_a)\n', (3160, 3173), True, 'import numpy as np\n'), ((3239, 3266), 'numpy.allclose', 'np.allclose', (['out', 'auto_a_t1'], {}), '(out, auto_a_t1)\n', (3250, 3266), True, 'import numpy as np\n'), ((3344, 3375), 'numpy.allclose', 'np.allclose', (['out', '(auto_a_t1 * 2)'], {}), '(out, auto_a_t1 * 2)\n', (3355, 3375), True, 'import numpy as np\n'), ((3493, 3520), 'numpy.allclose', 'np.allclose', (['out[0]', 'auto_a'], {}), '(out[0], auto_a)\n', (3504, 3520), True, 'import numpy as np\n'), ((3598, 3628), 'numpy.allclose', 'np.allclose', (['out[0]', 'auto_a_t1'], {}), '(out[0], auto_a_t1)\n', (3609, 3628), True, 'import numpy as np\n'), ((3718, 3752), 'numpy.allclose', 'np.allclose', (['out[0]', '(auto_a_t1 * 2)'], {}), '(out[0], auto_a_t1 * 2)\n', (3729, 3752), True, 'import numpy as np\n'), ((3866, 3895), 'numpy.allclose', 'np.allclose', (['out', 'auto_a[0:3]'], {}), '(out, auto_a[0:3])\n', (3877, 3895), True, 'import numpy as np\n'), ((3967, 3999), 'numpy.allclose', 'np.allclose', (['out', 'auto_a_t1[0:3]'], {}), '(out, auto_a_t1[0:3])\n', (3978, 3999), True, 'import numpy as np\n'), ((4083, 4119), 'numpy.allclose', 'np.allclose', (['out', '(auto_a_t1[0:3] * 2)'], {}), '(out, auto_a_t1[0:3] * 2)\n', (4094, 4119), True, 'import numpy as np\n'), ((4245, 4277), 'numpy.allclose', 'np.allclose', (['out[0]', 'auto_a[0:3]'], {}), '(out[0], auto_a[0:3])\n', (4256, 4277), True, 'import numpy as np\n'), ((4361, 4396), 'numpy.allclose', 'np.allclose', (['out[0]', 'auto_a_t1[0:3]'], {}), '(out[0], auto_a_t1[0:3])\n', (4372, 4396), True, 'import numpy as np\n'), ((4491, 4530), 'numpy.allclose', 'np.allclose', (['out[0]', '(auto_a_t1[0:3] * 2)'], {}), '(out[0], auto_a_t1[0:3] * 2)\n', (4502, 4530), True, 'import numpy as np\n'), ((4647, 4674), 'numpy.allclose', 'np.allclose', (['out', 'cross_a_b'], {}), '(out, cross_a_b)\n', (4658, 4674), True, 'import numpy as np\n'), ((4750, 4783), 'numpy.allclose', 'np.allclose', (['out', 'cross_a_b_t1_t2'], {}), '(out, cross_a_b_t1_t2)\n', (4761, 4783), True, 'import numpy as np\n'), ((4871, 4908), 'numpy.allclose', 'np.allclose', (['out', '(cross_a_b_t1_t2 * 2)'], {}), '(out, cross_a_b_t1_t2 * 2)\n', (4882, 4908), True, 'import numpy as np\n'), ((5038, 5068), 'numpy.allclose', 'np.allclose', (['out[0]', 'cross_a_b'], {}), '(out[0], cross_a_b)\n', (5049, 5068), True, 'import numpy as np\n'), ((5155, 5191), 'numpy.allclose', 'np.allclose', (['out[0]', 'cross_a_b_t1_t2'], {}), '(out[0], cross_a_b_t1_t2)\n', (5166, 5191), True, 'import numpy as np\n'), ((5291, 5331), 'numpy.allclose', 'np.allclose', (['out[0]', '(cross_a_b_t1_t2 * 2)'], {}), '(out[0], cross_a_b_t1_t2 * 2)\n', (5302, 5331), True, 'import numpy as np\n'), ((5457, 5488), 'numpy.allclose', 'np.allclose', (['out', 'cross_a_b[:3]'], {}), '(out, cross_a_b[:3])\n', (5468, 5488), True, 'import numpy as np\n'), ((5571, 5608), 'numpy.allclose', 'np.allclose', (['out', 'cross_a_b_t1_t2[:3]'], {}), '(out, cross_a_b_t1_t2[:3])\n', (5582, 5608), True, 'import numpy as np\n'), ((5703, 5744), 'numpy.allclose', 'np.allclose', (['out', '(cross_a_b_t1_t2[:3] * 2)'], {}), '(out, cross_a_b_t1_t2[:3] * 2)\n', (5714, 5744), True, 'import numpy as np\n'), ((5876, 5910), 'numpy.allclose', 'np.allclose', (['out[0]', 'cross_a_b[:3]'], {}), '(out[0], cross_a_b[:3])\n', (5887, 5910), True, 'import numpy as np\n'), ((6006, 6046), 'numpy.allclose', 'np.allclose', (['out[0]', 'cross_a_b_t1_t2[:3]'], {}), '(out[0], cross_a_b_t1_t2[:3])\n', (6017, 6046), True, 'import numpy as np\n'), ((6154, 6198), 'numpy.allclose', 'np.allclose', (['out[0]', '(cross_a_b_t1_t2[:3] * 2)'], {}), '(out[0], cross_a_b_t1_t2[:3] * 2)\n', (6165, 6198), True, 'import numpy as np\n'), ((6299, 6326), 'numpy.allclose', 'np.allclose', (['out', 'cross_a_b'], {}), '(out, cross_a_b)\n', (6310, 6326), True, 'import numpy as np\n'), ((6398, 6431), 'numpy.allclose', 'np.allclose', (['out', 'cross_a_b_t1_t2'], {}), '(out, cross_a_b_t1_t2)\n', (6409, 6431), True, 'import numpy as np\n'), ((6515, 6552), 'numpy.allclose', 'np.allclose', (['out', '(cross_a_b_t1_t2 * 2)'], {}), '(out, cross_a_b_t1_t2 * 2)\n', (6526, 6552), True, 'import numpy as np\n'), ((6666, 6696), 'numpy.allclose', 'np.allclose', (['out[0]', 'cross_a_b'], {}), '(out[0], cross_a_b)\n', (6677, 6696), True, 'import numpy as np\n'), ((6780, 6816), 'numpy.allclose', 'np.allclose', (['out[0]', 'cross_a_b_t1_t2'], {}), '(out[0], cross_a_b_t1_t2)\n', (6791, 6816), True, 'import numpy as np\n'), ((6912, 6952), 'numpy.allclose', 'np.allclose', (['out[0]', '(cross_a_b_t1_t2 * 2)'], {}), '(out[0], cross_a_b_t1_t2 * 2)\n', (6923, 6952), True, 'import numpy as np\n'), ((7070, 7101), 'numpy.allclose', 'np.allclose', (['out', 'cross_a_b[:3]'], {}), '(out, cross_a_b[:3])\n', (7081, 7101), True, 'import numpy as np\n'), ((7180, 7217), 'numpy.allclose', 'np.allclose', (['out', 'cross_a_b_t1_t2[:3]'], {}), '(out, cross_a_b_t1_t2[:3])\n', (7191, 7217), True, 'import numpy as np\n'), ((7308, 7349), 'numpy.allclose', 'np.allclose', (['out', '(cross_a_b_t1_t2[:3] * 2)'], {}), '(out, cross_a_b_t1_t2[:3] * 2)\n', (7319, 7349), True, 'import numpy as np\n'), ((7480, 7514), 'numpy.allclose', 'np.allclose', (['out[0]', 'cross_a_b[:3]'], {}), '(out[0], cross_a_b[:3])\n', (7491, 7514), True, 'import numpy as np\n'), ((7606, 7646), 'numpy.allclose', 'np.allclose', (['out[0]', 'cross_a_b_t1_t2[:3]'], {}), '(out[0], cross_a_b_t1_t2[:3])\n', (7617, 7646), True, 'import numpy as np\n'), ((7750, 7791), 'numpy.allclose', 'np.allclose', (['out', '(cross_a_b_t1_t2[:3] * 2)'], {}), '(out, cross_a_b_t1_t2[:3] * 2)\n', (7761, 7791), True, 'import numpy as np\n'), ((7910, 7938), 'numpy.allclose', 'np.allclose', (['out', 'auto_sum_a'], {}), '(out, auto_sum_a)\n', (7921, 7938), True, 'import numpy as np\n'), ((7997, 8028), 'numpy.allclose', 'np.allclose', (['out', 'auto_sum_a_t1'], {}), '(out, auto_sum_a_t1)\n', (8008, 8028), True, 'import numpy as np\n'), ((8100, 8135), 'numpy.allclose', 'np.allclose', (['out', '(auto_sum_a_t1 * 2)'], {}), '(out, auto_sum_a_t1 * 2)\n', (8111, 8135), True, 'import numpy as np\n'), ((8229, 8262), 'numpy.allclose', 'np.allclose', (['out', 'auto_sum_a[0:3]'], {}), '(out, auto_sum_a[0:3])\n', (8240, 8262), True, 'import numpy as np\n'), ((8328, 8364), 'numpy.allclose', 'np.allclose', (['out', 'auto_sum_a_t1[0:3]'], {}), '(out, auto_sum_a_t1[0:3])\n', (8339, 8364), True, 'import numpy as np\n'), ((8437, 8477), 'numpy.allclose', 'np.allclose', (['out', '(auto_sum_a_t1[0:3] * 2)'], {}), '(out, auto_sum_a_t1[0:3] * 2)\n', (8448, 8477), True, 'import numpy as np\n'), ((8554, 8594), 'numpy.allclose', 'np.allclose', (['out', '(auto_sum_a_t1[0:3] * 3)'], {}), '(out, auto_sum_a_t1[0:3] * 3)\n', (8565, 8594), True, 'import numpy as np\n'), ((8698, 8729), 'numpy.allclose', 'np.allclose', (['out', 'auto_sum_a_t1'], {}), '(out, auto_sum_a_t1)\n', (8709, 8729), True, 'import numpy as np\n'), ((8805, 8840), 'numpy.allclose', 'np.allclose', (['out', '(auto_sum_a_t1 * 2)'], {}), '(out, auto_sum_a_t1 * 2)\n', (8816, 8840), True, 'import numpy as np\n'), ((8953, 8989), 'numpy.allclose', 'np.allclose', (['out', 'auto_sum_a_t1[0:3]'], {}), '(out, auto_sum_a_t1[0:3])\n', (8964, 8989), True, 'import numpy as np\n'), ((9071, 9111), 'numpy.allclose', 'np.allclose', (['out', '(auto_sum_a_t1[0:3] * 2)'], {}), '(out, auto_sum_a_t1[0:3] * 2)\n', (9082, 9111), True, 'import numpy as np\n'), ((9185, 9225), 'numpy.allclose', 'np.allclose', (['out', '(auto_sum_a_t1[0:3] * 3)'], {}), '(out, auto_sum_a_t1[0:3] * 3)\n', (9196, 9225), True, 'import numpy as np\n'), ((9311, 9340), 'numpy.allclose', 'np.allclose', (['out', 'cross_sum_a'], {}), '(out, cross_sum_a)\n', (9322, 9340), True, 'import numpy as np\n'), ((9403, 9438), 'numpy.allclose', 'np.allclose', (['out', 'cross_sum_a_t1_t2'], {}), '(out, cross_sum_a_t1_t2)\n', (9414, 9438), True, 'import numpy as np\n'), ((9514, 9553), 'numpy.allclose', 'np.allclose', (['out', '(cross_sum_a_t1_t2 * 2)'], {}), '(out, cross_sum_a_t1_t2 * 2)\n', (9525, 9553), True, 'import numpy as np\n'), ((9647, 9681), 'numpy.allclose', 'np.allclose', (['out', 'cross_sum_a[0:3]'], {}), '(out, cross_sum_a[0:3])\n', (9658, 9681), True, 'import numpy as np\n'), ((9751, 9791), 'numpy.allclose', 'np.allclose', (['out', 'cross_sum_a_t1_t2[0:3]'], {}), '(out, cross_sum_a_t1_t2[0:3])\n', (9762, 9791), True, 'import numpy as np\n'), ((9867, 9911), 'numpy.allclose', 'np.allclose', (['out', '(cross_sum_a_t1_t2[0:3] * 2)'], {}), '(out, cross_sum_a_t1_t2[0:3] * 2)\n', (9878, 9911), True, 'import numpy as np\n'), ((10012, 10047), 'numpy.allclose', 'np.allclose', (['out', 'cross_sum_a_t1_t2'], {}), '(out, cross_sum_a_t1_t2)\n', (10023, 10047), True, 'import numpy as np\n'), ((10127, 10166), 'numpy.allclose', 'np.allclose', (['out', '(cross_sum_a_t1_t2 * 2)'], {}), '(out, cross_sum_a_t1_t2 * 2)\n', (10138, 10166), True, 'import numpy as np\n'), ((10276, 10316), 'numpy.allclose', 'np.allclose', (['out', 'cross_sum_a_t1_t2[0:3]'], {}), '(out, cross_sum_a_t1_t2[0:3])\n', (10287, 10316), True, 'import numpy as np\n'), ((10396, 10440), 'numpy.allclose', 'np.allclose', (['out', '(cross_sum_a_t1_t2[0:3] * 2)'], {}), '(out, cross_sum_a_t1_t2[0:3] * 2)\n', (10407, 10440), True, 'import numpy as np\n'), ((10524, 10568), 'numpy.allclose', 'np.allclose', (['out', '(cross_sum_a_t1_t2[0:3] * 3)'], {}), '(out, cross_sum_a_t1_t2[0:3] * 3)\n', (10535, 10568), True, 'import numpy as np\n'), ((10669, 10702), 'numpy.arange', 'np.arange', (['test_data1.shape[axis]'], {}), '(test_data1.shape[axis])\n', (10678, 10702), True, 'import numpy as np\n'), ((10778, 10823), 'cddm.core.cross_sum', 'core.cross_sum', (['test_data1', 't1', 't2'], {'axis': 'axis'}), '(test_data1, t1, t2, axis=axis)\n', (10792, 10823), True, 'import cddm.core as core\n'), ((10843, 10892), 'cddm.core.cross_sum_fft', 'core.cross_sum_fft', (['test_data1', 't1', 't2'], {'axis': 'axis'}), '(test_data1, t1, t2, axis=axis)\n', (10861, 10892), True, 'import cddm.core as core\n'), ((11074, 11106), 'numpy.allclose', 'np.allclose', (['out', 'cross_count_10'], {}), '(out, cross_count_10)\n', (11085, 11106), True, 'import numpy as np\n'), ((11169, 11204), 'numpy.allclose', 'np.allclose', (['out', 'cross_count_t1_t2'], {}), '(out, cross_count_t1_t2)\n', (11180, 11204), True, 'import numpy as np\n'), ((11280, 11319), 'numpy.allclose', 'np.allclose', (['out', '(cross_count_t1_t2 * 2)'], {}), '(out, cross_count_t1_t2 * 2)\n', (11291, 11319), True, 'import numpy as np\n'), ((11421, 11458), 'numpy.allclose', 'np.allclose', (['out', 'cross_count_10[0:5]'], {}), '(out, cross_count_10[0:5])\n', (11432, 11458), True, 'import numpy as np\n'), ((11525, 11565), 'numpy.allclose', 'np.allclose', (['out', 'cross_count_t1_t2[0:5]'], {}), '(out, cross_count_t1_t2[0:5])\n', (11536, 11565), True, 'import numpy as np\n'), ((11641, 11685), 'numpy.allclose', 'np.allclose', (['out', '(2 * cross_count_t1_t2[0:5])'], {}), '(out, 2 * cross_count_t1_t2[0:5])\n', (11652, 11685), True, 'import numpy as np\n'), ((11776, 11807), 'numpy.allclose', 'np.allclose', (['out', 'auto_count_10'], {}), '(out, auto_count_10)\n', (11787, 11807), True, 'import numpy as np\n'), ((11866, 11897), 'numpy.allclose', 'np.allclose', (['out', 'auto_count_t1'], {}), '(out, auto_count_t1)\n', (11877, 11897), True, 'import numpy as np\n'), ((11969, 12004), 'numpy.allclose', 'np.allclose', (['out', '(auto_count_t1 * 2)'], {}), '(out, auto_count_t1 * 2)\n', (11980, 12004), True, 'import numpy as np\n'), ((12104, 12140), 'numpy.allclose', 'np.allclose', (['out', 'auto_count_10[0:5]'], {}), '(out, auto_count_10[0:5])\n', (12115, 12140), True, 'import numpy as np\n'), ((12206, 12241), 'numpy.allclose', 'np.allclose', (['out', 'auto_count_t1[:5]'], {}), '(out, auto_count_t1[:5])\n', (12217, 12241), True, 'import numpy as np\n'), ((12313, 12352), 'numpy.allclose', 'np.allclose', (['out', '(2 * auto_count_t1[:5])'], {}), '(out, 2 * auto_count_t1[:5])\n', (12324, 12352), True, 'import numpy as np\n'), ((12496, 12538), 'cddm.core.stats', 'core.stats', (['test_data1', 'test_data2'], {'axis': '(0)'}), '(test_data1, test_data2, axis=0)\n', (12506, 12538), True, 'import cddm.core as core\n'), ((12560, 12622), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'n': '(8)', 'norm': '(1)', 'method': 'method'}), '(test_data1, test_data2, n=8, norm=1, method=method)\n', (12570, 12622), True, 'import cddm.core as core\n'), ((12647, 12676), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {}), '(data, bg, var)\n', (12661, 12676), True, 'import cddm.core as core\n'), ((12695, 12731), 'cddm.video.fromarrays', 'fromarrays', (['(test_data1, test_data2)'], {}), '((test_data1, test_data2))\n', (12705, 12731), False, 'from cddm.video import fromarrays\n'), ((12868, 12897), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {}), '(data, bg, var)\n', (12882, 12897), True, 'import cddm.core as core\n'), ((13061, 13091), 'cddm.core.stats', 'core.stats', (['test_data1'], {'axis': '(0)'}), '(test_data1, axis=0)\n', (13071, 13091), True, 'import cddm.core as core\n'), ((13114, 13176), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data1'], {'n': '(8)', 'norm': '(2)', 'method': 'method'}), '(test_data1, test_data1, n=8, norm=2, method=method)\n', (13124, 13176), True, 'import cddm.core as core\n'), ((13201, 13239), 'cddm.core.normalize', 'core.normalize', (['data1', 'bg', 'var'], {'norm': '(2)'}), '(data1, bg, var, norm=2)\n', (13215, 13239), True, 'import cddm.core as core\n'), ((13269, 13320), 'cddm.core.iacorr', 'core.iacorr', (['test_data1'], {'n': '(8)', 'norm': '(2)', 'method': 'method'}), '(test_data1, n=8, norm=2, method=method)\n', (13280, 13320), True, 'import cddm.core as core\n'), ((13346, 13384), 'cddm.core.normalize', 'core.normalize', (['data2', 'bg', 'var'], {'norm': '(2)'}), '(data2, bg, var, norm=2)\n', (13360, 13384), True, 'import cddm.core as core\n'), ((13552, 13582), 'cddm.core.stats', 'core.stats', (['test_data1'], {'axis': '(0)'}), '(test_data1, axis=0)\n', (13562, 13582), True, 'import cddm.core as core\n'), ((13605, 13655), 'cddm.core.acorr', 'core.acorr', (['test_data1'], {'n': '(8)', 'norm': '(1)', 'method': 'method'}), '(test_data1, n=8, norm=1, method=method)\n', (13615, 13655), True, 'import cddm.core as core\n'), ((13681, 13719), 'cddm.core.normalize', 'core.normalize', (['data1', 'bg', 'var'], {'norm': '(1)'}), '(data1, bg, var, norm=1)\n', (13695, 13719), True, 'import cddm.core as core\n'), ((13749, 13800), 'cddm.core.iacorr', 'core.iacorr', (['test_data1'], {'n': '(8)', 'norm': '(1)', 'method': 'method'}), '(test_data1, n=8, norm=1, method=method)\n', (13760, 13800), True, 'import cddm.core as core\n'), ((13826, 13864), 'cddm.core.normalize', 'core.normalize', (['data2', 'bg', 'var'], {'norm': '(1)'}), '(data2, bg, var, norm=1)\n', (13840, 13864), True, 'import cddm.core as core\n'), ((10721, 10754), 'numpy.arange', 'np.arange', (['test_data1.shape[axis]'], {}), '(test_data1.shape[axis])\n', (10730, 10754), True, 'import numpy as np\n'), ((10921, 10944), 'numpy.allclose', 'np.allclose', (['out1', 'out2'], {}), '(out1, out2)\n', (10932, 10944), True, 'import numpy as np\n'), ((12928, 12951), 'numpy.allclose', 'np.allclose', (['out1', 'out2'], {}), '(out1, out2)\n', (12939, 12951), True, 'import numpy as np\n'), ((13417, 13440), 'numpy.allclose', 'np.allclose', (['out1', 'out2'], {}), '(out1, out2)\n', (13428, 13440), True, 'import numpy as np\n'), ((13897, 13920), 'numpy.allclose', 'np.allclose', (['out1', 'out2'], {}), '(out1, out2)\n', (13908, 13920), True, 'import numpy as np\n'), ((15210, 15255), 'cddm.core.stats', 'core.stats', (['test_data1', 'test_data2'], {'axis': 'axis'}), '(test_data1, test_data2, axis=axis)\n', (15220, 15255), True, 'import cddm.core as core\n'), ((15281, 15348), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(3)', 'method': '"""fft"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=3, method='fft', axis=axis)\n", (15291, 15348), True, 'import cddm.core as core\n'), ((15382, 15459), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(3)', 'mode': 'mode', 'scale': 'scale', 'mask': 'test_mask'}), '(data, bg, var, norm=3, mode=mode, scale=scale, mask=test_mask)\n', (15396, 15459), True, 'import cddm.core as core\n'), ((15496, 15564), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(3)', 'method': '"""corr"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=3, method='corr', axis=axis)\n", (15506, 15564), True, 'import cddm.core as core\n'), ((15599, 15676), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(3)', 'mode': 'mode', 'scale': 'scale', 'mask': 'test_mask'}), '(data, bg, var, norm=3, mode=mode, scale=scale, mask=test_mask)\n', (15613, 15676), True, 'import cddm.core as core\n'), ((15796, 15864), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(3)', 'method': '"""diff"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=3, method='diff', axis=axis)\n", (15806, 15864), True, 'import cddm.core as core\n'), ((15899, 15976), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(3)', 'mode': 'mode', 'scale': 'scale', 'mask': 'test_mask'}), '(data, bg, var, norm=3, mode=mode, scale=scale, mask=test_mask)\n', (15913, 15976), True, 'import cddm.core as core\n'), ((19290, 19335), 'cddm.core.stats', 'core.stats', (['test_data1', 'test_data2'], {'axis': 'axis'}), '(test_data1, test_data2, axis=axis)\n', (19300, 19335), True, 'import cddm.core as core\n'), ((19361, 19428), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(1)', 'method': '"""fft"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=1, method='fft', axis=axis)\n", (19371, 19428), True, 'import cddm.core as core\n'), ((19462, 19539), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(1)', 'mode': 'mode', 'scale': 'scale', 'mask': 'test_mask'}), '(data, bg, var, norm=1, mode=mode, scale=scale, mask=test_mask)\n', (19476, 19539), True, 'import cddm.core as core\n'), ((19575, 19643), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(1)', 'method': '"""corr"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=1, method='corr', axis=axis)\n", (19585, 19643), True, 'import cddm.core as core\n'), ((19678, 19755), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(1)', 'mode': 'mode', 'scale': 'scale', 'mask': 'test_mask'}), '(data, bg, var, norm=1, mode=mode, scale=scale, mask=test_mask)\n', (19692, 19755), True, 'import cddm.core as core\n'), ((19874, 19942), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(1)', 'method': '"""diff"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=1, method='diff', axis=axis)\n", (19884, 19942), True, 'import cddm.core as core\n'), ((19977, 20054), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(1)', 'mode': 'mode', 'scale': 'scale', 'mask': 'test_mask'}), '(data, bg, var, norm=1, mode=mode, scale=scale, mask=test_mask)\n', (19991, 20054), True, 'import cddm.core as core\n'), ((21762, 21807), 'cddm.core.stats', 'core.stats', (['test_data1', 'test_data2'], {'axis': 'axis'}), '(test_data1, test_data2, axis=axis)\n', (21772, 21807), True, 'import cddm.core as core\n'), ((21833, 21900), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(0)', 'method': '"""fft"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=0, method='fft', axis=axis)\n", (21843, 21900), True, 'import cddm.core as core\n'), ((21934, 22011), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(0)', 'mode': 'mode', 'scale': 'scale', 'mask': 'test_mask'}), '(data, bg, var, norm=0, mode=mode, scale=scale, mask=test_mask)\n', (21948, 22011), True, 'import cddm.core as core\n'), ((22048, 22116), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(0)', 'method': '"""corr"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=0, method='corr', axis=axis)\n", (22058, 22116), True, 'import cddm.core as core\n'), ((22151, 22228), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(0)', 'mode': 'mode', 'scale': 'scale', 'mask': 'test_mask'}), '(data, bg, var, norm=0, mode=mode, scale=scale, mask=test_mask)\n', (22165, 22228), True, 'import cddm.core as core\n'), ((23234, 23268), 'cddm.core.stats', 'core.stats', (['test_data1', 'test_data2'], {}), '(test_data1, test_data2)\n', (23244, 23268), True, 'import cddm.core as core\n'), ((23292, 23348), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(2)', 'method': '"""fft"""'}), "(test_data1, test_data2, norm=2, method='fft')\n", (23302, 23348), True, 'import cddm.core as core\n'), ((23380, 23457), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(2)', 'mode': 'mode', 'scale': 'scale', 'mask': 'test_mask'}), '(data, bg, var, norm=2, mode=mode, scale=scale, mask=test_mask)\n', (23394, 23457), True, 'import cddm.core as core\n'), ((23494, 23551), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(2)', 'method': '"""corr"""'}), "(test_data1, test_data2, norm=2, method='corr')\n", (23504, 23551), True, 'import cddm.core as core\n'), ((23584, 23661), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(2)', 'mode': 'mode', 'scale': 'scale', 'mask': 'test_mask'}), '(data, bg, var, norm=2, mode=mode, scale=scale, mask=test_mask)\n', (23598, 23661), True, 'import cddm.core as core\n'), ((23866, 23887), 'cddm.core.abs2', 'core.abs2', (['test_data1'], {}), '(test_data1)\n', (23875, 23887), True, 'import cddm.core as core\n'), ((14188, 14233), 'cddm.core.stats', 'core.stats', (['test_data1', 'test_data2'], {'axis': 'axis'}), '(test_data1, test_data2, axis=axis)\n', (14198, 14233), True, 'import cddm.core as core\n'), ((14263, 14330), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(3)', 'method': '"""fft"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=3, method='fft', axis=axis)\n", (14273, 14330), True, 'import cddm.core as core\n'), ((14368, 14429), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(3)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=3, mode=mode, scale=scale)\n', (14382, 14429), True, 'import cddm.core as core\n'), ((14472, 14540), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(3)', 'method': '"""corr"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=3, method='corr', axis=axis)\n", (14482, 14540), True, 'import cddm.core as core\n'), ((14579, 14640), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(3)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=3, mode=mode, scale=scale)\n', (14593, 14640), True, 'import cddm.core as core\n'), ((14774, 14842), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(3)', 'method': '"""diff"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=3, method='diff', axis=axis)\n", (14784, 14842), True, 'import cddm.core as core\n'), ((14881, 14942), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(3)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=3, mode=mode, scale=scale)\n', (14895, 14942), True, 'import cddm.core as core\n'), ((15734, 15766), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (15745, 15766), True, 'import numpy as np\n'), ((16034, 16066), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (16045, 16066), True, 'import numpy as np\n'), ((16265, 16298), 'cddm.core.stats', 'core.stats', (['test_data1'], {'axis': 'axis'}), '(test_data1, axis=axis)\n', (16275, 16298), True, 'import cddm.core as core\n'), ((16328, 16395), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data1'], {'norm': '(3)', 'method': '"""fft"""', 'axis': 'axis'}), "(test_data1, test_data1, norm=3, method='fft', axis=axis)\n", (16338, 16395), True, 'import cddm.core as core\n'), ((16433, 16494), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(1)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=1, mode=mode, scale=scale)\n', (16447, 16494), True, 'import cddm.core as core\n'), ((16537, 16593), 'cddm.core.acorr', 'core.acorr', (['test_data1'], {'norm': '(3)', 'method': '"""corr"""', 'axis': 'axis'}), "(test_data1, norm=3, method='corr', axis=axis)\n", (16547, 16593), True, 'import cddm.core as core\n'), ((16631, 16692), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(1)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=1, mode=mode, scale=scale)\n', (16645, 16692), True, 'import cddm.core as core\n'), ((16826, 16882), 'cddm.core.acorr', 'core.acorr', (['test_data1'], {'norm': '(1)', 'method': '"""diff"""', 'axis': 'axis'}), "(test_data1, norm=1, method='diff', axis=axis)\n", (16836, 16882), True, 'import cddm.core as core\n'), ((16920, 16981), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(1)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=1, mode=mode, scale=scale)\n', (16934, 16981), True, 'import cddm.core as core\n'), ((17264, 17309), 'cddm.core.stats', 'core.stats', (['test_data1', 'test_data2'], {'axis': 'axis'}), '(test_data1, test_data2, axis=axis)\n', (17274, 17309), True, 'import cddm.core as core\n'), ((17339, 17406), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(1)', 'method': '"""fft"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=1, method='fft', axis=axis)\n", (17349, 17406), True, 'import cddm.core as core\n'), ((17444, 17505), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(1)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=1, mode=mode, scale=scale)\n', (17458, 17505), True, 'import cddm.core as core\n'), ((17548, 17616), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(1)', 'method': '"""corr"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=1, method='corr', axis=axis)\n", (17558, 17616), True, 'import cddm.core as core\n'), ((17655, 17716), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(1)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=1, mode=mode, scale=scale)\n', (17669, 17716), True, 'import cddm.core as core\n'), ((17850, 17918), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(1)', 'method': '"""diff"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=1, method='diff', axis=axis)\n", (17860, 17918), True, 'import cddm.core as core\n'), ((17957, 18018), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(1)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=1, mode=mode, scale=scale)\n', (17971, 18018), True, 'import cddm.core as core\n'), ((18297, 18330), 'cddm.core.stats', 'core.stats', (['test_data1'], {'axis': 'axis'}), '(test_data1, axis=axis)\n', (18307, 18330), True, 'import cddm.core as core\n'), ((18360, 18415), 'cddm.core.acorr', 'core.acorr', (['test_data1'], {'norm': '(1)', 'method': '"""fft"""', 'axis': 'axis'}), "(test_data1, norm=1, method='fft', axis=axis)\n", (18370, 18415), True, 'import cddm.core as core\n'), ((18454, 18515), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(1)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=1, mode=mode, scale=scale)\n', (18468, 18515), True, 'import cddm.core as core\n'), ((18558, 18614), 'cddm.core.acorr', 'core.acorr', (['test_data1'], {'norm': '(1)', 'method': '"""corr"""', 'axis': 'axis'}), "(test_data1, norm=1, method='corr', axis=axis)\n", (18568, 18614), True, 'import cddm.core as core\n'), ((18652, 18713), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(1)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=1, mode=mode, scale=scale)\n', (18666, 18713), True, 'import cddm.core as core\n'), ((18847, 18903), 'cddm.core.acorr', 'core.acorr', (['test_data1'], {'norm': '(1)', 'method': '"""diff"""', 'axis': 'axis'}), "(test_data1, norm=1, method='diff', axis=axis)\n", (18857, 18903), True, 'import cddm.core as core\n'), ((18941, 19002), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(1)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=1, mode=mode, scale=scale)\n', (18955, 19002), True, 'import cddm.core as core\n'), ((19812, 19844), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (19823, 19844), True, 'import numpy as np\n'), ((20111, 20143), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (20122, 20143), True, 'import numpy as np\n'), ((20326, 20371), 'cddm.core.stats', 'core.stats', (['test_data1', 'test_data2'], {'axis': 'axis'}), '(test_data1, test_data2, axis=axis)\n', (20336, 20371), True, 'import cddm.core as core\n'), ((20401, 20468), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(0)', 'method': '"""fft"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=0, method='fft', axis=axis)\n", (20411, 20468), True, 'import cddm.core as core\n'), ((20506, 20567), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(0)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=0, mode=mode, scale=scale)\n', (20520, 20567), True, 'import cddm.core as core\n'), ((20610, 20678), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(0)', 'method': '"""corr"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=0, method='corr', axis=axis)\n", (20620, 20678), True, 'import cddm.core as core\n'), ((20717, 20778), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(0)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=0, mode=mode, scale=scale)\n', (20731, 20778), True, 'import cddm.core as core\n'), ((21057, 21090), 'cddm.core.stats', 'core.stats', (['test_data1'], {'axis': 'axis'}), '(test_data1, axis=axis)\n', (21067, 21090), True, 'import cddm.core as core\n'), ((21120, 21175), 'cddm.core.acorr', 'core.acorr', (['test_data1'], {'norm': '(0)', 'method': '"""fft"""', 'axis': 'axis'}), "(test_data1, norm=0, method='fft', axis=axis)\n", (21130, 21175), True, 'import cddm.core as core\n'), ((21214, 21275), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(0)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=0, mode=mode, scale=scale)\n', (21228, 21275), True, 'import cddm.core as core\n'), ((21318, 21374), 'cddm.core.acorr', 'core.acorr', (['test_data1'], {'norm': '(0)', 'method': '"""corr"""', 'axis': 'axis'}), "(test_data1, norm=0, method='corr', axis=axis)\n", (21328, 21374), True, 'import cddm.core as core\n'), ((21412, 21473), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(0)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=0, mode=mode, scale=scale)\n', (21426, 21473), True, 'import cddm.core as core\n'), ((22286, 22318), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (22297, 22318), True, 'import numpy as np\n'), ((22520, 22565), 'cddm.core.stats', 'core.stats', (['test_data1', 'test_data2'], {'axis': 'axis'}), '(test_data1, test_data2, axis=axis)\n', (22530, 22565), True, 'import cddm.core as core\n'), ((22595, 22662), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(2)', 'method': '"""fft"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=2, method='fft', axis=axis)\n", (22605, 22662), True, 'import cddm.core as core\n'), ((22700, 22761), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(2)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=2, mode=mode, scale=scale)\n', (22714, 22761), True, 'import cddm.core as core\n'), ((22804, 22872), 'cddm.core.ccorr', 'core.ccorr', (['test_data1', 'test_data2'], {'norm': '(2)', 'method': '"""corr"""', 'axis': 'axis'}), "(test_data1, test_data2, norm=2, method='corr', axis=axis)\n", (22814, 22872), True, 'import cddm.core as core\n'), ((22911, 22972), 'cddm.core.normalize', 'core.normalize', (['data', 'bg', 'var'], {'norm': '(2)', 'mode': 'mode', 'scale': 'scale'}), '(data, bg, var, norm=2, mode=mode, scale=scale)\n', (22925, 22972), True, 'import cddm.core as core\n'), ((23718, 23750), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (23729, 23750), True, 'import numpy as np\n'), ((23889, 23907), 'numpy.abs', 'np.abs', (['test_data1'], {}), '(test_data1)\n', (23895, 23907), True, 'import numpy as np\n'), ((14704, 14736), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (14715, 14736), True, 'import numpy as np\n'), ((15006, 15038), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (15017, 15038), True, 'import numpy as np\n'), ((16756, 16788), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (16767, 16788), True, 'import numpy as np\n'), ((17045, 17077), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (17056, 17077), True, 'import numpy as np\n'), ((17780, 17812), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (17791, 17812), True, 'import numpy as np\n'), ((18082, 18114), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (18093, 18114), True, 'import numpy as np\n'), ((18777, 18809), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (18788, 18809), True, 'import numpy as np\n'), ((19066, 19098), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (19077, 19098), True, 'import numpy as np\n'), ((20842, 20874), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (20853, 20874), True, 'import numpy as np\n'), ((21537, 21569), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (21548, 21569), True, 'import numpy as np\n'), ((23036, 23068), 'numpy.allclose', 'np.allclose', (['self.out', 'out_other'], {}), '(self.out, out_other)\n', (23047, 23068), True, 'import numpy as np\n')]
|
import argparse
import numpy as np
import pytorch_lightning as pl
from torch.utils.data.dataloader import DataLoader
import utils.data.functions
class SpatioTemporalCSVDataModule(pl.LightningDataModule):
def __init__(
self,
feat_path: str,
adj_path: str,
batch_size: int = 32,
seq_len: int = 12,
pre_len: int = 3,
split_ratio: float = 0.8,
normalize: bool = True,
**kwargs
):
super(SpatioTemporalCSVDataModule, self).__init__()
self._feat_path = feat_path
self._adj_path = adj_path
self.batch_size = batch_size
self.seq_len = seq_len
self.pre_len = pre_len
self.split_ratio = split_ratio
self.normalize = normalize
self._feat = utils.data.functions.load_features(self._feat_path)
self._feat_max_val = np.max(self._feat)
self._adj = utils.data.functions.load_adjacency_matrix(self._adj_path)
self._dis = utils.data.functions.load_distance_matrix(r'data/sz_distance.csv')
self.direct = utils.data.functions.load_distance_matrix(r'data/sz_direct.csv')
@staticmethod
def add_data_specific_arguments(parent_parser):
parser = argparse.ArgumentParser(parents=[parent_parser], add_help=False)
parser.add_argument("--batch_size", type=int, default=32)
parser.add_argument("--seq_len", type=int, default=32)
parser.add_argument("--pre_len", type=int, default=1)
parser.add_argument("--split_ratio", type=float, default=0.8)
parser.add_argument("--normalize", type=bool, default=True)
return parser
def setup(self, stage: str = None):
(
self.train_dataset,
self.val_dataset,
) = utils.data.functions.generate_torch_datasets(
self._feat,
self.seq_len,
self.pre_len,
split_ratio=self.split_ratio,
normalize=self.normalize,
)
def train_dataloader(self):
return DataLoader(self.train_dataset, batch_size=self.batch_size)
def val_dataloader(self):
return DataLoader(self.val_dataset, batch_size=len(self.val_dataset))
@property
def feat_max_val(self):
return self._feat_max_val
@property
def adj(self):
return self._adj
@property
def dis(self):
return self._dis
|
[
"torch.utils.data.dataloader.DataLoader",
"numpy.max",
"argparse.ArgumentParser"
] |
[((863, 881), 'numpy.max', 'np.max', (['self._feat'], {}), '(self._feat)\n', (869, 881), True, 'import numpy as np\n'), ((1223, 1287), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'parents': '[parent_parser]', 'add_help': '(False)'}), '(parents=[parent_parser], add_help=False)\n', (1246, 1287), False, 'import argparse\n'), ((2024, 2082), 'torch.utils.data.dataloader.DataLoader', 'DataLoader', (['self.train_dataset'], {'batch_size': 'self.batch_size'}), '(self.train_dataset, batch_size=self.batch_size)\n', (2034, 2082), False, 'from torch.utils.data.dataloader import DataLoader\n')]
|
# Copyright 2016 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Share group type access interface."""
from manilaclient import api_versions
from manilaclient import base
from manilaclient.common.apiclient import base as common_base
RESOURCES_PATH = '/share-group-types'
RESOURCE_PATH = '/share-group-types/%s/access'
RESOURCE_PATH_ACTION = '/share-group-types/%s/action'
RESOURCE_NAME = 'share_group_type_access'
class ShareGroupTypeAccess(common_base.Resource):
def __repr__(self):
return "<Share Group Type Access: %s>" % self.id
class ShareGroupTypeAccessManager(base.ManagerWithFind):
"""Manage :class:`ShareGroupTypeAccess` resources."""
resource_class = ShareGroupTypeAccess
@api_versions.wraps("2.31")
@api_versions.experimental_api
def list(self, share_group_type, search_opts=None):
if share_group_type.is_public:
return None
share_group_type_id = common_base.getid(share_group_type)
url = RESOURCE_PATH % share_group_type_id
return self._list(url, RESOURCE_NAME)
@api_versions.wraps("2.31")
@api_versions.experimental_api
def add_project_access(self, share_group_type, project):
"""Add a project to the given share group type access list."""
info = {'project': project}
self._action('addProjectAccess', share_group_type, info)
@api_versions.wraps("2.31")
@api_versions.experimental_api
def remove_project_access(self, share_group_type, project):
"""Remove a project from the given share group type access list."""
info = {'project': project}
self._action('removeProjectAccess', share_group_type, info)
def _action(self, action, share_group_type, info, **kwargs):
"""Perform a share group type action."""
body = {action: info}
self.run_hooks('modify_body_for_action', body, **kwargs)
share_group_type_id = common_base.getid(share_group_type)
url = RESOURCE_PATH_ACTION % share_group_type_id
return self.api.client.post(url, body=body)
|
[
"manilaclient.api_versions.wraps",
"manilaclient.common.apiclient.base.getid"
] |
[((1250, 1276), 'manilaclient.api_versions.wraps', 'api_versions.wraps', (['"""2.31"""'], {}), "('2.31')\n", (1268, 1276), False, 'from manilaclient import api_versions\n'), ((1599, 1625), 'manilaclient.api_versions.wraps', 'api_versions.wraps', (['"""2.31"""'], {}), "('2.31')\n", (1617, 1625), False, 'from manilaclient import api_versions\n'), ((1900, 1926), 'manilaclient.api_versions.wraps', 'api_versions.wraps', (['"""2.31"""'], {}), "('2.31')\n", (1918, 1926), False, 'from manilaclient import api_versions\n'), ((1461, 1496), 'manilaclient.common.apiclient.base.getid', 'common_base.getid', (['share_group_type'], {}), '(share_group_type)\n', (1478, 1496), True, 'from manilaclient.common.apiclient import base as common_base\n'), ((2446, 2481), 'manilaclient.common.apiclient.base.getid', 'common_base.getid', (['share_group_type'], {}), '(share_group_type)\n', (2463, 2481), True, 'from manilaclient.common.apiclient import base as common_base\n')]
|
from csv import reader, writer
import sys
def get_id(s):
#adapted from Biopython SeqIO fasta parser
return s[1:].split(None, 1)[0]
r = reader(sys.stdin, delimiter="\t")
w = writer(sys.stdout, delimiter="\t")
for row in r:
row[0] = get_id(row[0]) #only keep the Accession number (trim everything after first space)
row[2] = int(row[2]) + 1 #bed file 3rd field is 1-based
w.writerow(row)
|
[
"csv.reader",
"csv.writer"
] |
[((145, 178), 'csv.reader', 'reader', (['sys.stdin'], {'delimiter': '"""\t"""'}), "(sys.stdin, delimiter='\\t')\n", (151, 178), False, 'from csv import reader, writer\n'), ((183, 217), 'csv.writer', 'writer', (['sys.stdout'], {'delimiter': '"""\t"""'}), "(sys.stdout, delimiter='\\t')\n", (189, 217), False, 'from csv import reader, writer\n')]
|
import os
import cv2
import numpy as np
import sys
caffe_root = os.path.expanduser('~') + "/CNN/ssd"
sys.path.insert(0, caffe_root+'/python')
import caffe
from tqdm import tqdm
CLASSES = ('background', 'aeroplane', 'bicycle', 'bird', 'boat','bottle', 'bus', 'car', 'cat',
'chair','cow', 'diningtable', 'dog', 'horse','motorbike', 'person',
'pottedplant','sheep', 'sofa', 'train', 'tvmonitor')
# color index please refer to https://zhuanlan.zhihu.com/p/102303256
colors = [[0,0,0], [128,0,0],[0,128,0],[128,128,0],[0,0,128],[128,0,128],
[0,0,128],[128,128,128], [64,0,0],[192,0,0],[64,128,0],
[192,128,0], [64,0,128], [192,0,128], [64,128,128], [192,128,128],
[0,64,0], [128,64,0], [0,192,0], [128,192,0],[0,64,128]]
outputdir="output/preproess"
def showpreprocess(blobs,i,show=False):
data = np.array(blobs['data'].data)
label = np.array(blobs['label'].data)
img = data[0].transpose(1,2,0).copy()
objs = label[0][0]
height, width,_ = img.shape
for obj in objs:
x = int(obj[3]*width)
y = int(obj[4]*height)
x2 = int(obj[5]*width)
y2 = int(obj[6]*height)
cls = int(obj[1])
cv2.rectangle(img,(x,y),(x2,y2),colors[cls])
cv2.putText(img,CLASSES[cls],(x,y),1,1,colors[cls])
if show:
cv2.imshow("img",img)
cv2.waitKey()
cv2.imwrite(outputdir+"/"+str(i)+".jpg",img)
def main(model="voc/MobileNetSSD_preprocess.prototxt",show=False):
net = caffe.Net(model, caffe.TRAIN)
for i in tqdm(range(20)):
blobs = net.forward()
showpreprocess(blobs,i)
if __name__=="__main__":
if not os.path.exists(outputdir):
os.makedirs(outputdir)
main()
|
[
"cv2.putText",
"os.makedirs",
"cv2.waitKey",
"cv2.imshow",
"sys.path.insert",
"os.path.exists",
"numpy.array",
"cv2.rectangle",
"caffe.Net",
"os.path.expanduser"
] |
[((101, 143), 'sys.path.insert', 'sys.path.insert', (['(0)', "(caffe_root + '/python')"], {}), "(0, caffe_root + '/python')\n", (116, 143), False, 'import sys\n'), ((64, 87), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (82, 87), False, 'import os\n'), ((827, 855), 'numpy.array', 'np.array', (["blobs['data'].data"], {}), "(blobs['data'].data)\n", (835, 855), True, 'import numpy as np\n'), ((868, 897), 'numpy.array', 'np.array', (["blobs['label'].data"], {}), "(blobs['label'].data)\n", (876, 897), True, 'import numpy as np\n'), ((1472, 1501), 'caffe.Net', 'caffe.Net', (['model', 'caffe.TRAIN'], {}), '(model, caffe.TRAIN)\n', (1481, 1501), False, 'import caffe\n'), ((1174, 1223), 'cv2.rectangle', 'cv2.rectangle', (['img', '(x, y)', '(x2, y2)', 'colors[cls]'], {}), '(img, (x, y), (x2, y2), colors[cls])\n', (1187, 1223), False, 'import cv2\n'), ((1227, 1284), 'cv2.putText', 'cv2.putText', (['img', 'CLASSES[cls]', '(x, y)', '(1)', '(1)', 'colors[cls]'], {}), '(img, CLASSES[cls], (x, y), 1, 1, colors[cls])\n', (1238, 1284), False, 'import cv2\n'), ((1301, 1323), 'cv2.imshow', 'cv2.imshow', (['"""img"""', 'img'], {}), "('img', img)\n", (1311, 1323), False, 'import cv2\n'), ((1331, 1344), 'cv2.waitKey', 'cv2.waitKey', ([], {}), '()\n', (1342, 1344), False, 'import cv2\n'), ((1631, 1656), 'os.path.exists', 'os.path.exists', (['outputdir'], {}), '(outputdir)\n', (1645, 1656), False, 'import os\n'), ((1666, 1688), 'os.makedirs', 'os.makedirs', (['outputdir'], {}), '(outputdir)\n', (1677, 1688), False, 'import os\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class FindApproveServiceListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CSB', '2017-11-18', 'FindApproveServiceList','CSB')
self.set_protocol_type('https');
def get_projectName(self):
return self.get_query_params().get('projectName')
def set_projectName(self,projectName):
self.add_query_param('projectName',projectName)
def get_approveLevel(self):
return self.get_query_params().get('approveLevel')
def set_approveLevel(self,approveLevel):
self.add_query_param('approveLevel',approveLevel)
def get_showDelService(self):
return self.get_query_params().get('showDelService')
def set_showDelService(self,showDelService):
self.add_query_param('showDelService',showDelService)
def get_csbId(self):
return self.get_query_params().get('csbId')
def set_csbId(self,csbId):
self.add_query_param('csbId',csbId)
def get_alias(self):
return self.get_query_params().get('alias')
def set_alias(self,alias):
self.add_query_param('alias',alias)
def get_serviceName(self):
return self.get_query_params().get('serviceName')
def set_serviceName(self,serviceName):
self.add_query_param('serviceName',serviceName)
|
[
"aliyunsdkcore.request.RpcRequest.__init__"
] |
[((910, 989), 'aliyunsdkcore.request.RpcRequest.__init__', 'RpcRequest.__init__', (['self', '"""CSB"""', '"""2017-11-18"""', '"""FindApproveServiceList"""', '"""CSB"""'], {}), "(self, 'CSB', '2017-11-18', 'FindApproveServiceList', 'CSB')\n", (929, 989), False, 'from aliyunsdkcore.request import RpcRequest\n')]
|
import urllib3
import os
import sys
base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(base_dir)
from request import get_employment_data
from datetime import date
from utils import get_label_data, getLogger
DEBUG = getLogger()
def main(label, commencement_date, end_date):
result = get_employment_data()
try:
value_within_date = get_label_data(result, label, commencement_date, end_date)
except Exception as error:
DEBUG.error("======Error======")
if 'msg' in result: DEBUG.info(result['msg'])
DEBUG.error("{}".format(error))
return {}
DEBUG.info(value_within_date)
total = 0
for value in value_within_date.values():
total += value
DEBUG.info(total)
if __name__ == '__main__':
try:
commencement_date = '2020-03-01'
end_date = '2021-05-01'
label = "c:36"
main(label, commencement_date, end_date)
except KeyboardInterrupt:
exit()
|
[
"sys.path.append",
"os.path.abspath",
"utils.get_label_data",
"request.get_employment_data",
"utils.getLogger"
] |
[((107, 132), 'sys.path.append', 'sys.path.append', (['base_dir'], {}), '(base_dir)\n', (122, 132), False, 'import sys\n'), ((251, 262), 'utils.getLogger', 'getLogger', ([], {}), '()\n', (260, 262), False, 'from utils import get_label_data, getLogger\n'), ((320, 341), 'request.get_employment_data', 'get_employment_data', ([], {}), '()\n', (339, 341), False, 'from request import get_employment_data\n'), ((79, 104), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (94, 104), False, 'import os\n'), ((373, 431), 'utils.get_label_data', 'get_label_data', (['result', 'label', 'commencement_date', 'end_date'], {}), '(result, label, commencement_date, end_date)\n', (387, 431), False, 'from utils import get_label_data, getLogger\n')]
|
import pytest
from _pytest.monkeypatch import MonkeyPatch
from update_status_groups import update_status_groups
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
class Test():
monkeypatch = MonkeyPatch()
existing_groups = []
# Mock API request for token.
def mock_get_token(self, args):
return 'mock token'
# Mock API request for groups.
def mock_get_groups(self, args):
return {'user_groups': self.existing_groups}
# Mock query results for members by state.
def mock_get_psql_results(self, args):
return [{'state': 'CO', 'member_ids': 'mock,member,ids'}]
# Mock API request to update group.
def mock_update_group(self, args):
return {'name': 'updated %s' % args.GROUP_ID, 'member_ids': args.MEMBER_IDS}
# Mock API request to create group.
def mock_create_group(self, args):
return {'name': 'created %s' % args.GROUP_NAME, 'member_ids': args.MEMBER_IDS}
def test_update_status_groups(self):
Test.monkeypatch.setattr("update_status_groups.get_token", self.mock_get_token)
Test.monkeypatch.setattr("update_status_groups.get_groups", self.mock_get_groups)
Test.monkeypatch.setattr("update_status_groups.get_psql_results", self.mock_get_psql_results)
Test.monkeypatch.setattr("update_status_groups.update_group", self.mock_update_group)
Test.monkeypatch.setattr("update_status_groups.create_group", self.mock_create_group)
# All args are mocked, but still required.
args = {
'DB_HOST': 'mock',
'DB_PORT': 'mock',
'DB_USER': 'mock',
'DB_PASS': '<PASSWORD>',
'DB_NAME': 'mock',
'REACH_API_USER': 'mock',
'REACH_API_PASS': 'mock',
'STATUS_NAME': 'mock',
'DB_QUERY': 'mock'
}
args = Struct(**args)
# Test create.
self.existing_groups = []
result = update_status_groups(args)
assert result == {
'created': [
{'name': 'created CO: mock', 'member_ids': 'mock,member,ids'}
],
'updated': []
}
# Test update.
self.existing_groups = [{'name': 'CO: mock', 'id': 'existing-group-id'}]
result = update_status_groups(args)
assert result == {
'created': [],
'updated': [
{'name': 'updated existing-group-id', 'member_ids': 'mock,member,ids'}
]
}
|
[
"_pytest.monkeypatch.MonkeyPatch",
"update_status_groups.update_status_groups"
] |
[((235, 248), '_pytest.monkeypatch.MonkeyPatch', 'MonkeyPatch', ([], {}), '()\n', (246, 248), False, 'from _pytest.monkeypatch import MonkeyPatch\n'), ((1984, 2010), 'update_status_groups.update_status_groups', 'update_status_groups', (['args'], {}), '(args)\n', (2004, 2010), False, 'from update_status_groups import update_status_groups\n'), ((2313, 2339), 'update_status_groups.update_status_groups', 'update_status_groups', (['args'], {}), '(args)\n', (2333, 2339), False, 'from update_status_groups import update_status_groups\n')]
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, <NAME>PORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PyTorch BERT model."""
from __future__ import absolute_import, division, print_function, unicode_literals
import copy
import json
import logging
import math
import os
import sys
from io import open
import torch
from torch import nn
from torch.nn import CrossEntropyLoss
from .file_utils import cached_path, WEIGHTS_NAME, CONFIG_NAME
logger = logging.getLogger(__name__)
BERT_CONFIG_NAME = 'bert_config.json'
def prune_linear_layer(layer, index, dim=0):
""" Prune a linear layer (a model parameters) to keep only entries in index.
Return the pruned layer as a new layer with requires_grad=True.
Used to remove heads.
"""
index = index.to(layer.weight.device)
W = layer.weight.index_select(dim, index).clone().detach()
if layer.bias is not None:
if dim == 1:
b = layer.bias.clone().detach()
else:
b = layer.bias[index].clone().detach()
new_size = list(layer.weight.size())
new_size[dim] = len(index)
new_layer = nn.Linear(new_size[1], new_size[0], bias=layer.bias is not None).to(layer.weight.device)
new_layer.weight.requires_grad = False
new_layer.weight.copy_(W.contiguous())
new_layer.weight.requires_grad = True
if layer.bias is not None:
new_layer.bias.requires_grad = False
new_layer.bias.copy_(b.contiguous())
new_layer.bias.requires_grad = True
return new_layer
def gelu(x):
"""Implementation of the gelu activation function.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
Also see https://arxiv.org/abs/1606.08415
"""
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
def swish(x):
return x * torch.sigmoid(x)
ACT2FN = {"gelu": gelu, "relu": torch.nn.functional.relu, "swish": swish}
class BertConfig(object):
"""Configuration class to store the configuration of a `BertModel`.
"""
def __init__(self,
vocab_size_or_config_json_file,
embedding_size=128,
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
intermediate_size=3072,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,
initializer_range=0.02,
layer_norm_eps=1e-12):
if isinstance(vocab_size_or_config_json_file, str) or (sys.version_info[0] == 2
and isinstance(vocab_size_or_config_json_file, unicode)):
with open(vocab_size_or_config_json_file, "r", encoding='utf-8') as reader:
json_config = json.loads(reader.read())
for key, value in json_config.items():
self.__dict__[key] = value
elif isinstance(vocab_size_or_config_json_file, int):
self.vocab_size = vocab_size_or_config_json_file
self.embedding_size = embedding_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.hidden_act = hidden_act
self.intermediate_size = intermediate_size
self.hidden_dropout_prob = hidden_dropout_prob
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.max_position_embeddings = max_position_embeddings
self.initializer_range = initializer_range
self.layer_norm_eps = layer_norm_eps
else:
raise ValueError("First argument must be either a vocabulary size (int)"
"or the path to a pretrained model config file (str)")
@classmethod
def from_dict(cls, json_object):
"""Constructs a `BertConfig` from a Python dictionary of parameters."""
config = BertConfig(vocab_size_or_config_json_file=-1)
for key, value in json_object.items():
config.__dict__[key] = value
return config
@classmethod
def from_json_file(cls, json_file):
"""Constructs a `BertConfig` from a json file of parameters."""
with open(json_file, "r", encoding='utf-8') as reader:
text = reader.read()
return cls.from_dict(json.loads(text))
def __repr__(self):
return str(self.to_json_string())
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
def to_json_file(self, json_file_path):
""" Save this instance to a json file."""
with open(json_file_path, "w", encoding='utf-8') as writer:
writer.write(self.to_json_string())
try:
from apex.normalization.fused_layer_norm import FusedLayerNorm as BertLayerNorm
except ImportError:
logger.info("Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex .")
class BertLayerNorm(nn.Module):
def __init__(self, hidden_size, eps=1e-12):
"""Construct a layernorm module in the TF style (epsilon inside the square root).
"""
super(BertLayerNorm, self).__init__()
self.weight = nn.Parameter(torch.ones(hidden_size))
self.bias = nn.Parameter(torch.zeros(hidden_size))
self.variance_epsilon = eps
def forward(self, x):
u = x.mean(-1, keepdim=True)
s = (x - u).pow(2).mean(-1, keepdim=True)
x = (x - u) / torch.sqrt(s + self.variance_epsilon)
return self.weight * x + self.bias
class BertEmbeddings(nn.Module):
"""Construct the embeddings from word, position and token_type embeddings.
"""
def __init__(self, config):
super(BertEmbeddings, self).__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.embedding_size, padding_idx=0)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.embedding_size)
self.LayerNorm = BertLayerNorm(config.embedding_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, input_ids):
seq_length = input_ids.size(1)
position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device)
position_ids = position_ids.unsqueeze(0).expand_as(input_ids)
words_embeddings = self.word_embeddings(input_ids)
position_embeddings = self.position_embeddings(position_ids)
embeddings = words_embeddings + position_embeddings
embeddings = self.LayerNorm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class BertSelfAttention(nn.Module):
def __init__(self, config, output_attentions=False, keep_multihead_output=False):
super(BertSelfAttention, self).__init__()
if config.hidden_size % config.num_attention_heads != 0:
raise ValueError(
"The hidden size (%d) is not a multiple of the number of attention "
"heads (%d)" % (config.hidden_size, config.num_attention_heads))
self.output_attentions = output_attentions
self.keep_multihead_output = keep_multihead_output
self.multihead_output = None
self.num_attention_heads = config.num_attention_heads
self.attention_head_size = int(config.hidden_size / config.num_attention_heads)
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query = nn.Linear(config.hidden_size, self.all_head_size)
self.key = nn.Linear(config.hidden_size, self.all_head_size)
self.value = nn.Linear(config.hidden_size, self.all_head_size)
self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3)
def forward(self, hidden_states, attention_mask, head_mask=None):
mixed_query_layer = self.query(hidden_states)
mixed_key_layer = self.key(hidden_states)
mixed_value_layer = self.value(hidden_states)
query_layer = self.transpose_for_scores(mixed_query_layer)
key_layer = self.transpose_for_scores(mixed_key_layer)
value_layer = self.transpose_for_scores(mixed_value_layer)
# Take the dot product between "query" and "key" to get the raw attention scores.
attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
attention_scores = attention_scores / math.sqrt(self.attention_head_size)
# Apply the attention mask is (precomputed for all layers in BertModel forward() function)
attention_scores = attention_scores + attention_mask
# Normalize the attention scores to probabilities.
attention_probs = nn.Softmax(dim=-1)(attention_scores)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
attention_probs = self.dropout(attention_probs)
# Mask heads if we want to
if head_mask is not None:
attention_probs = attention_probs * head_mask
context_layer = torch.matmul(attention_probs, value_layer)
if self.keep_multihead_output:
self.multihead_output = context_layer
self.multihead_output.retain_grad()
context_layer = context_layer.permute(0, 2, 1, 3).contiguous()
new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,)
context_layer = context_layer.view(*new_context_layer_shape)
if self.output_attentions:
return attention_probs, context_layer
return context_layer
class BertSelfOutput(nn.Module):
def __init__(self, config):
super(BertSelfOutput, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class BertAttention(nn.Module):
def __init__(self, config, output_attentions=False, keep_multihead_output=False):
super(BertAttention, self).__init__()
self.output_attentions = output_attentions
self.self = BertSelfAttention(config, output_attentions=output_attentions,
keep_multihead_output=keep_multihead_output)
self.output = BertSelfOutput(config)
def prune_heads(self, heads):
if len(heads) == 0:
return
mask = torch.ones(self.self.num_attention_heads, self.self.attention_head_size)
for head in heads:
mask[head] = 0
mask = mask.view(-1).contiguous().eq(1)
index = torch.arange(len(mask))[mask].long()
# Prune linear layers
self.self.query = prune_linear_layer(self.self.query, index)
self.self.key = prune_linear_layer(self.self.key, index)
self.self.value = prune_linear_layer(self.self.value, index)
self.output.dense = prune_linear_layer(self.output.dense, index, dim=1)
# Update hyper params
self.self.num_attention_heads = self.self.num_attention_heads - len(heads)
self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads
def forward(self, input_tensor, attention_mask, head_mask=None):
self_output = self.self(input_tensor, attention_mask, head_mask)
if self.output_attentions:
attentions, self_output = self_output
attention_output = self.output(self_output, input_tensor)
if self.output_attentions:
return attentions, attention_output
return attention_output
class BertIntermediate(nn.Module):
def __init__(self, config):
super(BertIntermediate, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.intermediate_size)
if isinstance(config.hidden_act, str) or (sys.version_info[0] == 2 and isinstance(config.hidden_act, unicode)):
self.intermediate_act_fn = ACT2FN[config.hidden_act]
else:
self.intermediate_act_fn = config.hidden_act
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
class BertOutput(nn.Module):
def __init__(self, config):
super(BertOutput, self).__init__()
self.dense = nn.Linear(config.intermediate_size, config.hidden_size)
self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class BertLayer(nn.Module):
def __init__(self, config, output_attentions=False, keep_multihead_output=False):
super(BertLayer, self).__init__()
self.output_attentions = output_attentions
self.attention = BertAttention(config, output_attentions=output_attentions,
keep_multihead_output=keep_multihead_output)
self.intermediate = BertIntermediate(config)
self.output = BertOutput(config)
def forward(self, hidden_states, attention_mask, head_mask=None):
attention_output = self.attention(hidden_states, attention_mask, head_mask)
if self.output_attentions:
attentions, attention_output = attention_output
intermediate_output = self.intermediate(attention_output)
layer_output = self.output(intermediate_output, attention_output)
if self.output_attentions:
return attentions, layer_output
return layer_output
'''
class BertEncoder(nn.Module):
def __init__(self, config, output_attentions=False, keep_multihead_output=False):
super(BertEncoder, self).__init__()
self.config = config
self.output_attentions = output_attentions
self.layer = BertLayer(config, output_attentions=output_attentions,
keep_multihead_output=keep_multihead_output)
#self.layer = nn.ModuleList([copy.deepcopy(layer) for _ in range(config.num_hidden_layers)])
if config.embedding_size != config.hidden_size:
self.embedding_to_hidden = nn.Linear(config.embedding_size, config.hidden_size)
def forward(self, hidden_states, attention_mask, output_all_encoded_layers=True, head_mask=None):
if self.config.embedding_size != self.config.hidden_size: # embedding to hidden
hidden_states = self.embedding_to_hidden(hidden_states)
all_encoder_layers = []
all_attentions = []
for i in range(self.config.num_hidden_layers) :
hidden_states = self.layer(hidden_states, attention_mask, head_mask[i])
if self.output_attentions:
attentions, hidden_states = hidden_states
all_attentions.append(attentions)
if output_all_encoded_layers:
all_encoder_layers.append(hidden_states)
if not output_all_encoded_layers:
all_encoder_layers.append(hidden_states)
if self.output_attentions:
return all_attentions, all_encoder_layers
return all_encoder_layers
'''
class BertEncoder(nn.Module):
def __init__(self, config, output_attentions=False, keep_multihead_output=False):
super(BertEncoder, self).__init__()
self.config = config
self.output_attentions = output_attentions
layer = BertLayer(config, output_attentions=output_attentions,
keep_multihead_output=keep_multihead_output)
self.layer = nn.ModuleList([copy.deepcopy(layer) for _ in range(config.num_hidden_layers)])
if config.embedding_size != config.hidden_size:
self.embedding_to_hidden = nn.Linear(config.embedding_size, config.hidden_size)
def forward(self, hidden_states, attention_mask, output_all_encoded_layers=True, head_mask=None):
if self.config.embedding_size != self.config.hidden_size: # embedding to hidden
hidden_states = self.embedding_to_hidden(hidden_states)
all_encoder_layers = []
all_attentions = []
for i, layer_module in enumerate(self.layer):
hidden_states = layer_module(hidden_states, attention_mask, head_mask[i])
if self.output_attentions:
attentions, hidden_states = hidden_states
all_attentions.append(attentions)
if output_all_encoded_layers:
all_encoder_layers.append(hidden_states)
if not output_all_encoded_layers:
all_encoder_layers.append(hidden_states)
if self.output_attentions:
return all_attentions, all_encoder_layers
return all_encoder_layers
class BertPooler(nn.Module):
def __init__(self, config):
super(BertPooler, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.activation = nn.Tanh()
def forward(self, hidden_states):
# We "pool" the model by simply taking the hidden state corresponding
# to the first token.
first_token_tensor = hidden_states[:, 0]
pooled_output = self.dense(first_token_tensor)
pooled_output = self.activation(pooled_output)
return pooled_output
class BertPredictionHeadTransform(nn.Module):
def __init__(self, config):
super(BertPredictionHeadTransform, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.embedding_size)
if isinstance(config.hidden_act, str) or (sys.version_info[0] == 2 and isinstance(config.hidden_act, unicode)):
self.transform_act_fn = ACT2FN[config.hidden_act]
else:
self.transform_act_fn = config.hidden_act
self.LayerNorm = BertLayerNorm(config.embedding_size, eps=config.layer_norm_eps)
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.transform_act_fn(hidden_states)
hidden_states = self.LayerNorm(hidden_states)
return hidden_states
class BertLMPredictionHead(nn.Module):
def __init__(self, config, bert_model_embedding_weights):
super(BertLMPredictionHead, self).__init__()
self.transform = BertPredictionHeadTransform(config)
# The output weights are the same as the input embeddings, but there is
# an output-only bias for each token.
self.decoder = nn.Linear(bert_model_embedding_weights.size(1),
bert_model_embedding_weights.size(0),
bias=False)
self.decoder.weight = bert_model_embedding_weights
self.bias = nn.Parameter(torch.zeros(bert_model_embedding_weights.size(0)))
def forward(self, hidden_states):
hidden_states = self.transform(hidden_states)
hidden_states = self.decoder(hidden_states) + self.bias
return hidden_states
class BertPreTrainingHeads(nn.Module):
def __init__(self, config, bert_model_embedding_weights):
super(BertPreTrainingHeads, self).__init__()
self.predictions = BertLMPredictionHead(config, bert_model_embedding_weights)
def forward(self, sequence_output):
prediction_scores = self.predictions(sequence_output)
return prediction_scores
class BertPreTrainedModel(nn.Module):
""" An abstract class to handle weights initialization and
a simple interface for dowloading and loading pretrained models.
"""
def __init__(self, config, *inputs, **kwargs):
super(BertPreTrainedModel, self).__init__()
if not isinstance(config, BertConfig):
raise ValueError(
"Parameter config in `{}(config)` should be an instance of class `BertConfig`. "
"To create a model from a Google pretrained model use "
"`model = {}.from_pretrained(PRETRAINED_MODEL_NAME)`".format(
self.__class__.__name__, self.__class__.__name__
))
self.config = config
def init_bert_weights(self, module):
""" Initialize the weights.
"""
if isinstance(module, (nn.Linear, nn.Embedding)):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
elif isinstance(module, BertLayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
if isinstance(module, nn.Linear) and module.bias is not None:
module.bias.data.zero_()
@classmethod
def from_pretrained(cls, model_path, *inputs, **kwargs):
state_dict = kwargs.get('state_dict', None)
kwargs.pop('state_dict', None)
config = BertConfig.from_json_file(os.path.join(model_path, BERT_CONFIG_NAME))
logger.info("Model config {}".format(config))
# Instantiate model.
model = cls(config, *inputs, **kwargs)
# Load from a PyTorch state_dict
old_keys = []
new_keys = []
for key in state_dict.keys():
new_key = None
if 'gamma' in key:
new_key = key.replace('gamma', 'weight')
if 'beta' in key:
new_key = key.replace('beta', 'bias')
if new_key:
old_keys.append(key)
new_keys.append(new_key)
for old_key, new_key in zip(old_keys, new_keys):
state_dict[new_key] = state_dict.pop(old_key)
missing_keys = []
unexpected_keys = []
error_msgs = []
# copy state_dict so _load_from_state_dict can modify it
metadata = getattr(state_dict, '_metadata', None)
state_dict = state_dict.copy()
if metadata is not None:
state_dict._metadata = metadata
def load(module, prefix=''):
local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {})
module._load_from_state_dict(
state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs)
for name, child in module._modules.items():
if child is not None:
load(child, prefix + name + '.')
start_prefix = ''
if not hasattr(model, 'bert') and any(s.startswith('bert.') for s in state_dict.keys()):
start_prefix = 'bert.'
load(model, prefix=start_prefix)
if len(missing_keys) > 0:
logger.info("Weights of {} not initialized from pretrained model: {}".format(
model.__class__.__name__, missing_keys))
if len(unexpected_keys) > 0:
logger.info("Weights from pretrained model not used in {}: {}".format(
model.__class__.__name__, unexpected_keys))
if len(error_msgs) > 0:
raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
model.__class__.__name__, "\n\t".join(error_msgs)))
return model
class BertModel(BertPreTrainedModel):
def __init__(self, config, output_attentions=False, keep_multihead_output=False):
super(BertModel, self).__init__(config)
self.output_attentions = output_attentions
self.embeddings = BertEmbeddings(config)
self.encoder = BertEncoder(config, output_attentions=output_attentions,
keep_multihead_output=keep_multihead_output)
self.apply(self.init_bert_weights)
def prune_heads(self, heads_to_prune):
""" Prunes heads of the model.
heads_to_prune: dict of {layer_num: list of heads to prune in this layer}
"""
for layer, heads in heads_to_prune.items():
self.encoder.layer[layer].attention.prune_heads(heads)
def get_multihead_outputs(self):
""" Gather all multi-head outputs.
Return: list (layers) of multihead module outputs with gradients
"""
return [layer.attention.self.multihead_output for layer in self.encoder.layer]
def forward(self, input_ids, attention_mask=None, output_all_encoded_layers=True, head_mask=None):
if attention_mask is None:
attention_mask = torch.ones_like(input_ids)
# We create a 3D attention mask from a 2D tensor mask.
# Sizes are [batch_size, 1, 1, to_seq_length]
# So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length]
# this attention mask is more simple than the triangular masking of causal attention
# used in OpenAI GPT, we just need to prepare the broadcast dimension here.
extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2)
# Since attention_mask is 1.0 for positions we want to attend and 0.0 for
# masked positions, this operation will create a tensor which is 0.0 for
# positions we want to attend and -10000.0 for masked positions.
# Since we are adding it to the raw scores before the softmax, this is
# effectively the same as removing these entirely.
#extended_attention_mask = extended_attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility
#extended_attention_mask = extended_attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility
extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
# and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length]
if head_mask is not None:
if head_mask.dim() == 1:
head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(-1).unsqueeze(-1)
head_mask = head_mask.expand_as(self.config.num_hidden_layers, -1, -1, -1, -1)
elif head_mask.dim() == 2:
head_mask = head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1) # We can specify head_mask for each layer
head_mask = head_mask.to(dtype=next(self.parameters()).dtype) # switch to fload if need + fp16 compatibility
else:
head_mask = [None] * self.config.num_hidden_layers
embedding_output = self.embeddings(input_ids)
encoded_layers = self.encoder(embedding_output,
extended_attention_mask,
output_all_encoded_layers=output_all_encoded_layers,
head_mask=head_mask)
if self.output_attentions:
all_attentions, encoded_layers = encoded_layers
sequence_output = encoded_layers[-1]
if not output_all_encoded_layers:
encoded_layers = encoded_layers[-1]
if self.output_attentions:
return all_attentions, encoded_layers
return encoded_layers
class BertForPreTraining(BertPreTrainedModel):
def __init__(self, config, output_attentions=False, keep_multihead_output=False):
super(BertForPreTraining, self).__init__(config)
self.output_attentions = output_attentions
self.bert = BertModel(config, output_attentions=output_attentions,
keep_multihead_output=keep_multihead_output)
self.cls = BertPreTrainingHeads(config, self.bert.embeddings.word_embeddings.weight)
self.apply(self.init_bert_weights)
def forward(self, input_ids, attention_mask=None, masked_lm_labels=None, head_mask=None):
outputs = self.bert(input_ids, attention_mask,
output_all_encoded_layers=False, head_mask=head_mask)
if self.output_attentions:
all_attentions, sequence_output = outputs
else:
sequence_output = outputs
prediction_scores = self.cls(sequence_output)
if masked_lm_labels is not None :
loss_fct = CrossEntropyLoss(ignore_index=-1)
masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), masked_lm_labels.view(-1))
return masked_lm_loss
elif self.output_attentions:
return all_attentions, prediction_scores
return prediction_scores
|
[
"torch.nn.Dropout",
"torch.sqrt",
"torch.nn.Embedding",
"torch.nn.Softmax",
"torch.arange",
"os.path.join",
"torch.ones",
"json.loads",
"io.open",
"torch.zeros",
"torch.nn.Linear",
"torch.matmul",
"copy.deepcopy",
"math.sqrt",
"torch.nn.Tanh",
"apex.normalization.fused_layer_norm.FusedLayerNorm",
"torch.ones_like",
"torch.nn.CrossEntropyLoss",
"torch.sigmoid",
"logging.getLogger"
] |
[((1055, 1082), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1072, 1082), False, 'import logging\n'), ((2532, 2548), 'torch.sigmoid', 'torch.sigmoid', (['x'], {}), '(x)\n', (2545, 2548), False, 'import torch\n'), ((5329, 5357), 'copy.deepcopy', 'copy.deepcopy', (['self.__dict__'], {}), '(self.__dict__)\n', (5342, 5357), False, 'import copy\n'), ((6857, 6926), 'torch.nn.Embedding', 'nn.Embedding', (['config.vocab_size', 'config.embedding_size'], {'padding_idx': '(0)'}), '(config.vocab_size, config.embedding_size, padding_idx=0)\n', (6869, 6926), False, 'from torch import nn\n'), ((6962, 7029), 'torch.nn.Embedding', 'nn.Embedding', (['config.max_position_embeddings', 'config.embedding_size'], {}), '(config.max_position_embeddings, config.embedding_size)\n', (6974, 7029), False, 'from torch import nn\n'), ((7056, 7119), 'apex.normalization.fused_layer_norm.FusedLayerNorm', 'BertLayerNorm', (['config.embedding_size'], {'eps': 'config.layer_norm_eps'}), '(config.embedding_size, eps=config.layer_norm_eps)\n', (7069, 7119), True, 'from apex.normalization.fused_layer_norm import FusedLayerNorm as BertLayerNorm\n'), ((7143, 7181), 'torch.nn.Dropout', 'nn.Dropout', (['config.hidden_dropout_prob'], {}), '(config.hidden_dropout_prob)\n', (7153, 7181), False, 'from torch import nn\n'), ((7279, 7346), 'torch.arange', 'torch.arange', (['seq_length'], {'dtype': 'torch.long', 'device': 'input_ids.device'}), '(seq_length, dtype=torch.long, device=input_ids.device)\n', (7291, 7346), False, 'import torch\n'), ((8563, 8612), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'self.all_head_size'], {}), '(config.hidden_size, self.all_head_size)\n', (8572, 8612), False, 'from torch import nn\n'), ((8632, 8681), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'self.all_head_size'], {}), '(config.hidden_size, self.all_head_size)\n', (8641, 8681), False, 'from torch import nn\n'), ((8703, 8752), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'self.all_head_size'], {}), '(config.hidden_size, self.all_head_size)\n', (8712, 8752), False, 'from torch import nn\n'), ((8777, 8824), 'torch.nn.Dropout', 'nn.Dropout', (['config.attention_probs_dropout_prob'], {}), '(config.attention_probs_dropout_prob)\n', (8787, 8824), False, 'from torch import nn\n'), ((10361, 10403), 'torch.matmul', 'torch.matmul', (['attention_probs', 'value_layer'], {}), '(attention_probs, value_layer)\n', (10373, 10403), False, 'import torch\n'), ((11015, 11064), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.hidden_size'], {}), '(config.hidden_size, config.hidden_size)\n', (11024, 11064), False, 'from torch import nn\n'), ((11090, 11150), 'apex.normalization.fused_layer_norm.FusedLayerNorm', 'BertLayerNorm', (['config.hidden_size'], {'eps': 'config.layer_norm_eps'}), '(config.hidden_size, eps=config.layer_norm_eps)\n', (11103, 11150), True, 'from apex.normalization.fused_layer_norm import FusedLayerNorm as BertLayerNorm\n'), ((11174, 11212), 'torch.nn.Dropout', 'nn.Dropout', (['config.hidden_dropout_prob'], {}), '(config.hidden_dropout_prob)\n', (11184, 11212), False, 'from torch import nn\n'), ((11998, 12070), 'torch.ones', 'torch.ones', (['self.self.num_attention_heads', 'self.self.attention_head_size'], {}), '(self.self.num_attention_heads, self.self.attention_head_size)\n', (12008, 12070), False, 'import torch\n'), ((13296, 13351), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.intermediate_size'], {}), '(config.hidden_size, config.intermediate_size)\n', (13305, 13351), False, 'from torch import nn\n'), ((13917, 13972), 'torch.nn.Linear', 'nn.Linear', (['config.intermediate_size', 'config.hidden_size'], {}), '(config.intermediate_size, config.hidden_size)\n', (13926, 13972), False, 'from torch import nn\n'), ((13998, 14058), 'apex.normalization.fused_layer_norm.FusedLayerNorm', 'BertLayerNorm', (['config.hidden_size'], {'eps': 'config.layer_norm_eps'}), '(config.hidden_size, eps=config.layer_norm_eps)\n', (14011, 14058), True, 'from apex.normalization.fused_layer_norm import FusedLayerNorm as BertLayerNorm\n'), ((14082, 14120), 'torch.nn.Dropout', 'nn.Dropout', (['config.hidden_dropout_prob'], {}), '(config.hidden_dropout_prob)\n', (14092, 14120), False, 'from torch import nn\n'), ((18622, 18671), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.hidden_size'], {}), '(config.hidden_size, config.hidden_size)\n', (18631, 18671), False, 'from torch import nn\n'), ((18698, 18707), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (18705, 18707), False, 'from torch import nn\n'), ((19203, 19255), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.embedding_size'], {}), '(config.hidden_size, config.embedding_size)\n', (19212, 19255), False, 'from torch import nn\n'), ((19531, 19594), 'apex.normalization.fused_layer_norm.FusedLayerNorm', 'BertLayerNorm', (['config.embedding_size'], {'eps': 'config.layer_norm_eps'}), '(config.embedding_size, eps=config.layer_norm_eps)\n', (19544, 19594), True, 'from apex.normalization.fused_layer_norm import FusedLayerNorm as BertLayerNorm\n'), ((1713, 1777), 'torch.nn.Linear', 'nn.Linear', (['new_size[1]', 'new_size[0]'], {'bias': '(layer.bias is not None)'}), '(new_size[1], new_size[0], bias=layer.bias is not None)\n', (1722, 1777), False, 'from torch import nn\n'), ((5028, 5066), 'io.open', 'open', (['json_file', '"""r"""'], {'encoding': '"""utf-8"""'}), "(json_file, 'r', encoding='utf-8')\n", (5032, 5066), False, 'from io import open\n'), ((5140, 5156), 'json.loads', 'json.loads', (['text'], {}), '(text)\n', (5150, 5156), False, 'import json\n'), ((5651, 5694), 'io.open', 'open', (['json_file_path', '"""w"""'], {'encoding': '"""utf-8"""'}), "(json_file_path, 'w', encoding='utf-8')\n", (5655, 5694), False, 'from io import open\n'), ((9672, 9707), 'math.sqrt', 'math.sqrt', (['self.attention_head_size'], {}), '(self.attention_head_size)\n', (9681, 9707), False, 'import math\n'), ((9954, 9972), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (9964, 9972), False, 'from torch import nn\n'), ((17517, 17569), 'torch.nn.Linear', 'nn.Linear', (['config.embedding_size', 'config.hidden_size'], {}), '(config.embedding_size, config.hidden_size)\n', (17526, 17569), False, 'from torch import nn\n'), ((22636, 22678), 'os.path.join', 'os.path.join', (['model_path', 'BERT_CONFIG_NAME'], {}), '(model_path, BERT_CONFIG_NAME)\n', (22648, 22678), False, 'import os\n'), ((26068, 26094), 'torch.ones_like', 'torch.ones_like', (['input_ids'], {}), '(input_ids)\n', (26083, 26094), False, 'import torch\n'), ((29910, 29943), 'torch.nn.CrossEntropyLoss', 'CrossEntropyLoss', ([], {'ignore_index': '(-1)'}), '(ignore_index=-1)\n', (29926, 29943), False, 'from torch.nn import CrossEntropyLoss\n'), ((3441, 3500), 'io.open', 'open', (['vocab_size_or_config_json_file', '"""r"""'], {'encoding': '"""utf-8"""'}), "(vocab_size_or_config_json_file, 'r', encoding='utf-8')\n", (3445, 3500), False, 'from io import open\n'), ((17357, 17377), 'copy.deepcopy', 'copy.deepcopy', (['layer'], {}), '(layer)\n', (17370, 17377), False, 'import copy\n'), ((2484, 2498), 'math.sqrt', 'math.sqrt', (['(2.0)'], {}), '(2.0)\n', (2493, 2498), False, 'import math\n'), ((6261, 6284), 'torch.ones', 'torch.ones', (['hidden_size'], {}), '(hidden_size)\n', (6271, 6284), False, 'import torch\n'), ((6323, 6347), 'torch.zeros', 'torch.zeros', (['hidden_size'], {}), '(hidden_size)\n', (6334, 6347), False, 'import torch\n'), ((6541, 6578), 'torch.sqrt', 'torch.sqrt', (['(s + self.variance_epsilon)'], {}), '(s + self.variance_epsilon)\n', (6551, 6578), False, 'import torch\n')]
|
# Generated by Django 3.2.3 on 2021-05-21 12:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cmsmenus', '0006_auto_20210507_1618'),
]
operations = [
migrations.AlterField(
model_name='navigationbar',
name='name',
field=models.CharField(max_length=255),
),
]
|
[
"django.db.models.CharField"
] |
[((342, 374), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (358, 374), False, 'from django.db import migrations, models\n')]
|
from .market import Market
from . import position
from ..db.models import TradingOrder
import logging
logger = logging.getLogger(__name__)
class MarketSimulator(Market):
"""Wrapper for market that allows simulating simple buys and sells"""
def __init__(self, exchange, base_currency, quote_currency, quote_currency_balance, strategy):
super().__init__(exchange, base_currency, quote_currency, strategy)
self.starting_balance = quote_currency_balance
self.quote_balance = quote_currency_balance
self.base_balance = 0
self.simulating = False
def __del__(self):
self.session.close()
def add_session(self, session):
self.session = session()
def limit_buy(self, quantity, price):
if self.quote_balance >= quantity * price:
self.quote_balance = self.quote_balance - quantity * price
self.base_balance = self.base_balance + quantity
order = TradingOrder(
exchange=self.exchange.id,
strategy_id= self.strategy.strategy_id,
run_key=self.strategy.run_key,
pair=self.analysis_pair,
position='buy',
amount=quantity,
price=price,
simulated="simulated"
)
self.session.add(order)
self.session.commit()
logger.info("Executed buy simulation of " + str(quantity) + " " + self.base_currency + " for " + str(price) + " " + self.quote_currency)
logger.info(self.quote_currency + " balance: " + str(self.quote_balance))
logger.info(self.base_currency + " balance: " + str(self.base_balance))
else:
logger.info("Insufficient balance for simulation buy")
def limit_sell(self, quantity, price):
if self.base_balance >= quantity:
self.base_balance = self.base_balance - quantity
self.quote_balance = self.quote_balance + quantity * price
order = TradingOrder(
exchange=self.exchange.id,
strategy_id= self.strategy.strategy_id,
run_key=self.strategy.run_key,
pair=self.analysis_pair,
position='sell',
amount=quantity,
price=price,
simulated="simulated"
)
self.session.add(order)
self.session.commit()
logger.info("Executed sell simulation of " + str(quantity) + " " + self.base_currency + " for " + str(price) + " " + self.quote_currency)
logger.info(self.quote_currency + " balance: " + str(self.quote_balance))
logger.info(self.base_currency + " balance: " + str(self.base_balance))
else:
logger.info("Insufficient balance for simulation sell")
def market_buy(self, quantity):
if self.quote_balance >= quantity * self.get_ask_price():
self.quote_balance = self.quote_balance - quantity * self.get_ask_price()
self.base_balance = self.base_balance + quantity
logger.info("Executed buy simulation of " + str(quantity) + " " + self.base_currency + " for " + str(self.get_ask_price()) + " " + self.quote_currency)
logger.info(self.quote_currency + " balance: " + str(self.quote_balance))
logger.info(self.base_currency + " balance: " + str(self.base_balance))
else:
logger.info("Insufficient balance for simulation buy")
def market_sell(self, quantity):
if self.base_balance >= quantity:
self.base_balance = self.base_balance - quantity
self.quote_balance = self.quote_balance + quantity * self.get_bid_price()
logger.info("Executed sell simulation of " + str(quantity) + " " + self.base_currency + " for " + str(self.get_bid_price()) + " " + self.quote_currency)
logger.info(self.quote_currency + " balance: " + str(self.quote_balance))
logger.info(self.base_currency + " balance: " + str(self.base_balance))
else:
logger.info("Insufficient balance for simulation sell")
def get_ask_price(self):
"""Get ask price for simulation"""
if not self.simulating:
"""if operating on live data, use actual ask"""
return self.exchange.fetchTicker(self.analysis_pair)['ask']
else:
"""if operating on historical data, use close"""
return self.latest_candle['5m'][4]
def get_bid_price(self):
if not self.simulating:
"""if operating on live data, use actual ask"""
return self.exchange.fetchTicker(self.analysis_pair)['bid']
else:
"""if operating on historical data, use close"""
return self.latest_candle['5m'][4]
def get_wallet_balance(self):
return self.quote_balance
def open_long_position_simulation(market, amount, price, fixed_stoploss, trailing_stoploss_percent, profit_target_percent):
"""Create simulated long position"""
# logger.info("Opening simulated long position")
position = LongPositionSimulator(market, amount, price, fixed_stoploss, trailing_stoploss_percent, profit_target_percent)
position.open()
return position
def open_short_position_simulation(market, amount, price):
"""Create simulated short position"""
logger.info("Opening simulated short position")
position = ShortPositionSimulator(market, amount, price)
position.open()
return position
# TODO: %m interval also hardcoded here, search the project for 5m
class LongPositionSimulator(position.LongPosition):
"""Simulated long position. Overrides the functionality of creating an actual order to use the MarketSimulators balance and calculations"""
def __init__(self, market, amount, price, fixed_stoploss, trailing_stoploss_percent, profit_target_percent):
super().__init__(market, amount, price, fixed_stoploss, trailing_stoploss_percent, profit_target_percent)
# TODO: 5m interval is hard coded here
def liquidate_position(self):
"""Will use this method to actually create the order that liquidates the position"""
logger.info("Closing simulated long position")
open_short_position_simulation(self.market, self.amount, self.market.latest_candle['5m'][3])
self.is_open = False
def open(self):
self.market.limit_buy(self.amount, self.price)
self.is_open = True
def update(self, sell=False):
"""Use this method to trigger position to check if profit target has been met, and re-set trailiing stop loss"""
# logger.info("UPDATING LONG POSITION")
if self.market.latest_candle['5m'][3] < self.trailing_stoploss or \
self.market.latest_candle['5m'][3] < self.fixed_stoploss or \
self.market.latest_candle['5m'][3] >= self.profit_target or \
sell is True: # check price against last calculated trailing stoploss
self.liquidate_position()
# re-calculate trailing stoploss
self.trailing_stoploss = self.calculate_trailing_stoploss()
class ShortPositionSimulator(position.ShortPosition):
"""Simulated short position. Overrides the functionality of creating an actual order to use the MarketSimulators balance and calculations"""
def __init__(self, market, amount, price):
super().__init__(market, amount, price)
def open(self):
self.market.limit_sell(self.amount, self.price)
|
[
"logging.getLogger"
] |
[((112, 139), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (129, 139), False, 'import logging\n')]
|
import lyricsgenius
# wrapper for lyrics which remembers last search
# if there are several providers implemented this class should be inherited
class geniuslyrics:
"""
Class for searching for lyrics
"""
def __init__(self,_token = "<KEY>",_timeout=15,_retries=3,_verbose=False):
self.__session = lyricsgenius.Genius(_token,timeout=_timeout,retries=_retries,verbose=_verbose)
self.__artistname = None
self.__artistinstance = None
self.__titlename = None
self.__titleinstance = None
self.__lyrics = None
def get_session(self):
return self.__session
def search_artist(self,artist):
if(artist is not self.__artistname):
self.__artistname = artist
self.__artistinstance = self.__session.search_artist(artist, max_songs=1)
def get_artist(self):
return self.__artistname
def get_artistinstance(self):
return self.__artistinstance
def search_title(self,title):
if title is not self.__titlename and self.__artistinstance is not None:
self.__titlename = title
self.__titleinstance = self.__artistinstance.song(title)
self.__set_lyrics()
else:
self.__titleinstance = None
self.__set_lyrics()
def get_title(self):
return self.__titlename
def __set_lyrics(self):
if self.__titleinstance is not None:
self.__lyrics = self.__titleinstance.lyrics
else:
self.__lyrics = None
def get_lyrics(self):
return self.__lyrics
def search_lyrics(self,title,artist):
self.search_artist(artist)
self.search_title(title)
return self.get_lyrics()
|
[
"lyricsgenius.Genius"
] |
[((321, 407), 'lyricsgenius.Genius', 'lyricsgenius.Genius', (['_token'], {'timeout': '_timeout', 'retries': '_retries', 'verbose': '_verbose'}), '(_token, timeout=_timeout, retries=_retries, verbose=\n _verbose)\n', (340, 407), False, 'import lyricsgenius\n')]
|
import os.path
import pickle
import random
from data.base_dataset import BaseDataset, get_params, get_transform
from data.image_folder import make_numbering_dataset
import numpy as np
from PIL import Image
class AlignedDataset(BaseDataset):
"""A dataset class for paired image dataset.
It assumes that the directory '/path/to/data/train' contains image pairs in the form of {A,B}.
During test time, you need to prepare a directory '/path/to/data/test'.
"""
def __init__(self, opt):
"""Initialize this dataset class.
Parameters:
opt (Option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions
"""
BaseDataset.__init__(self, opt)
self.dir_AB = os.path.join(opt.dataroot, opt.phase) # get the image directory
self.AB_paths = [
e[1] for e in sorted(make_numbering_dataset(self.dir_AB, opt.max_dataset_size), key=lambda idx: idx[0])]
assert(self.opt.load_size >= self.opt.crop_size) # crop_size should be smaller than the size of loaded image
self.input_nc = self.opt.output_nc if self.opt.direction == 'BtoA' else self.opt.input_nc
self.output_nc = self.opt.input_nc if self.opt.direction == 'BtoA' else self.opt.output_nc
with open(opt.captions, 'rb') as f:
x = pickle.load(f)
train_captions, test_captions = x[0], x[1]
self.captions = train_captions if opt.phase == "train" else test_captions
self.ixtoword, self.wordtoix = x[2], x[3]
del x, train_captions, test_captions
self.n_words = len(self.ixtoword)
print('Load from: ', opt.captions)
self.captions_per_image = opt.captions_per_image
self.text_words_num = opt.text_words_num
def get_caption(self, sent_ix):
# a list of indices for a sentence
sent_caption = np.asarray(self.captions[sent_ix]).astype('int64')
if (sent_caption == 0).sum() > 0:
print('ERROR: do not need END (0) token', sent_caption)
num_words = len(sent_caption)
# pad with 0s (i.e., '<end>')
x = np.zeros(self.text_words_num, dtype='int64')
x_len = num_words
if num_words <= self.text_words_num:
x[:num_words] = sent_caption
else:
ix = list(np.arange(num_words)) # 1, 2, 3,..., maxNum
np.random.shuffle(ix)
ix = ix[:self.text_words_num]
ix = np.sort(ix)
x = sent_caption[ix]
x_len = self.text_words_num
return x, x_len
def __getitem__(self, index):
"""Return a data point and its metadata information.
Parameters:
index - - a random integer for data indexing
Returns a dictionary that contains A, B, A_paths and B_paths
A (tensor) - - an image in the input domain
B (tensor) - - its corresponding image in the target domain
A_paths (str) - - image paths
B_paths (str) - - image paths (same as A_paths)
"""
# read a image given a random integer index
AB_path = self.AB_paths[index]
AB = Image.open(AB_path).convert('RGB')
# split AB image into A and B
w, h = AB.size
if w > h:
w2 = int(w / 2)
A = AB.crop((0, 0, w2, h))
B = AB.crop((w2, 0, w, h))
else:
A = AB
B = AB
# apply the same transform to both A and B
transform_params = get_params(self.opt, A.size)
A_transform = get_transform(self.opt, transform_params, grayscale=(self.input_nc == 1))
B_transform = get_transform(self.opt, transform_params, grayscale=(self.output_nc == 1))
A = A_transform(A)
B = B_transform(B)
caption_idx = self.captions_per_image * index + random.randint(0, self.captions_per_image - 1)
caption, caption_len = self.get_caption(caption_idx)
return {'A': A, 'B': B, 'A_paths': AB_path, 'B_paths': AB_path,
"caption": caption, "caption_len": caption_len}
def __len__(self):
"""Return the total number of images in the dataset."""
return len(self.AB_paths)
|
[
"random.randint",
"data.base_dataset.get_params",
"data.base_dataset.BaseDataset.__init__",
"numpy.asarray",
"numpy.zeros",
"data.image_folder.make_numbering_dataset",
"PIL.Image.open",
"numpy.sort",
"pickle.load",
"numpy.arange",
"data.base_dataset.get_transform",
"numpy.random.shuffle"
] |
[((694, 725), 'data.base_dataset.BaseDataset.__init__', 'BaseDataset.__init__', (['self', 'opt'], {}), '(self, opt)\n', (714, 725), False, 'from data.base_dataset import BaseDataset, get_params, get_transform\n'), ((2143, 2187), 'numpy.zeros', 'np.zeros', (['self.text_words_num'], {'dtype': '"""int64"""'}), "(self.text_words_num, dtype='int64')\n", (2151, 2187), True, 'import numpy as np\n'), ((3524, 3552), 'data.base_dataset.get_params', 'get_params', (['self.opt', 'A.size'], {}), '(self.opt, A.size)\n', (3534, 3552), False, 'from data.base_dataset import BaseDataset, get_params, get_transform\n'), ((3575, 3646), 'data.base_dataset.get_transform', 'get_transform', (['self.opt', 'transform_params'], {'grayscale': '(self.input_nc == 1)'}), '(self.opt, transform_params, grayscale=self.input_nc == 1)\n', (3588, 3646), False, 'from data.base_dataset import BaseDataset, get_params, get_transform\n'), ((3671, 3743), 'data.base_dataset.get_transform', 'get_transform', (['self.opt', 'transform_params'], {'grayscale': '(self.output_nc == 1)'}), '(self.opt, transform_params, grayscale=self.output_nc == 1)\n', (3684, 3743), False, 'from data.base_dataset import BaseDataset, get_params, get_transform\n'), ((1333, 1347), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1344, 1347), False, 'import pickle\n'), ((2393, 2414), 'numpy.random.shuffle', 'np.random.shuffle', (['ix'], {}), '(ix)\n', (2410, 2414), True, 'import numpy as np\n'), ((2474, 2485), 'numpy.sort', 'np.sort', (['ix'], {}), '(ix)\n', (2481, 2485), True, 'import numpy as np\n'), ((3858, 3904), 'random.randint', 'random.randint', (['(0)', '(self.captions_per_image - 1)'], {}), '(0, self.captions_per_image - 1)\n', (3872, 3904), False, 'import random\n'), ((1894, 1928), 'numpy.asarray', 'np.asarray', (['self.captions[sent_ix]'], {}), '(self.captions[sent_ix])\n', (1904, 1928), True, 'import numpy as np\n'), ((2336, 2356), 'numpy.arange', 'np.arange', (['num_words'], {}), '(num_words)\n', (2345, 2356), True, 'import numpy as np\n'), ((3173, 3192), 'PIL.Image.open', 'Image.open', (['AB_path'], {}), '(AB_path)\n', (3183, 3192), False, 'from PIL import Image\n'), ((872, 929), 'data.image_folder.make_numbering_dataset', 'make_numbering_dataset', (['self.dir_AB', 'opt.max_dataset_size'], {}), '(self.dir_AB, opt.max_dataset_size)\n', (894, 929), False, 'from data.image_folder import make_numbering_dataset\n')]
|
import networkx
import fda
# Da Vinci robotic system
regulatory_graph = networkx.DiGraph()
regulatory_graph.add_node(fda.empty)
seeds = [fda.FDAApproval("K173585"),
fda.FDAApproval("K081113")]
for seed in seeds:
fda.populate_predicates(regulatory_graph, seed)
for seed in seeds:
subgraph = fda.networkx_to_graphviz(
fda.get_subgraph(regulatory_graph, seed))
subgraph.body = list(filter(lambda edge: "000000" not in edge,
subgraph.body))
subgraph.render(seed.id)
|
[
"fda.get_subgraph",
"networkx.DiGraph",
"fda.populate_predicates",
"fda.FDAApproval"
] |
[((74, 92), 'networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (90, 92), False, 'import networkx\n'), ((140, 166), 'fda.FDAApproval', 'fda.FDAApproval', (['"""K173585"""'], {}), "('K173585')\n", (155, 166), False, 'import fda\n'), ((177, 203), 'fda.FDAApproval', 'fda.FDAApproval', (['"""K081113"""'], {}), "('K081113')\n", (192, 203), False, 'import fda\n'), ((228, 275), 'fda.populate_predicates', 'fda.populate_predicates', (['regulatory_graph', 'seed'], {}), '(regulatory_graph, seed)\n', (251, 275), False, 'import fda\n'), ((345, 385), 'fda.get_subgraph', 'fda.get_subgraph', (['regulatory_graph', 'seed'], {}), '(regulatory_graph, seed)\n', (361, 385), False, 'import fda\n')]
|
from math import pi
from compas_fea.cad import rhino
from compas_fea.structure import CircularSection
from compas_fea.structure import ElasticIsotropic
from compas_fea.structure import ElementProperties as Properties
from compas_fea.structure import GeneralDisplacement
from compas_fea.structure import GeneralStep
from compas_fea.structure import PinnedDisplacement
from compas_fea.structure import PointLoad
from compas_fea.structure import Structure
# Author(s): <NAME> (github.com/andrewliew)
# Structure
mdl = Structure(name='beam_simple', path='C:/Temp/')
# Elements
network = rhino.network_from_lines(layer='elset_lines')
mdl.add_nodes_elements_from_network(network=network, element_type='BeamElement',
elset='elset_lines', axes={'ex': [0, -1, 0]})
# Sets
rhino.add_sets_from_layers(mdl, layers=['nset_left', 'nset_right', 'nset_weights'])
# Materials
mdl.add(ElasticIsotropic(name='mat_elastic', E=20*10**9, v=0.3, p=1500))
# Sections
_, ekeys, L, Lt = rhino.ordered_network(mdl, network=network, layer='nset_left')
for i, Li in zip(ekeys, L):
ri = (1 + Li / Lt) * 0.020
sname = 'sec_{0}'.format(i)
mdl.add(CircularSection(name=sname, r=ri))
mdl.add(Properties(name='ep_{0}'.format(i), material='mat_elastic', section=sname, elements=[i]))
# Displacements
mdl.add([
PinnedDisplacement(name='disp_left', nodes='nset_left'),
GeneralDisplacement(name='disp_right', nodes='nset_right', y=0, z=0, xx=0),
GeneralDisplacement(name='disp_rotate', nodes='nset_left', yy=30*pi/180),
])
# Loads
mdl.add(PointLoad(name='load_weights', nodes='nset_weights', z=-100))
# Steps
mdl.add([
GeneralStep(name='step_bc', displacements=['disp_left', 'disp_right']),
GeneralStep(name='step_load', loads='load_weights', displacements='disp_rotate'),
])
mdl.steps_order = ['step_bc', 'step_load']
# Summary
mdl.summary()
# Run
mdl.analyse_and_extract(software='opensees', fields=['u', 'ur', 'sf', 'sm'])
rhino.plot_data(mdl, step='step_load', field='um', radius=0.01, cbar_size=0.3)
rhino.plot_data(mdl, step='step_load', field='sf1', radius=0.01, cbar_size=0.3)
rhino.plot_data(mdl, step='step_load', field='sf2', radius=0.01, cbar_size=0.3)
rhino.plot_data(mdl, step='step_load', field='sm1', radius=0.01, cbar_size=0.3)
|
[
"compas_fea.structure.Structure",
"compas_fea.cad.rhino.add_sets_from_layers",
"compas_fea.structure.PinnedDisplacement",
"compas_fea.structure.ElasticIsotropic",
"compas_fea.cad.rhino.ordered_network",
"compas_fea.structure.GeneralStep",
"compas_fea.cad.rhino.network_from_lines",
"compas_fea.structure.PointLoad",
"compas_fea.structure.GeneralDisplacement",
"compas_fea.structure.CircularSection",
"compas_fea.cad.rhino.plot_data"
] |
[((521, 567), 'compas_fea.structure.Structure', 'Structure', ([], {'name': '"""beam_simple"""', 'path': '"""C:/Temp/"""'}), "(name='beam_simple', path='C:/Temp/')\n", (530, 567), False, 'from compas_fea.structure import Structure\n'), ((591, 636), 'compas_fea.cad.rhino.network_from_lines', 'rhino.network_from_lines', ([], {'layer': '"""elset_lines"""'}), "(layer='elset_lines')\n", (615, 636), False, 'from compas_fea.cad import rhino\n'), ((809, 896), 'compas_fea.cad.rhino.add_sets_from_layers', 'rhino.add_sets_from_layers', (['mdl'], {'layers': "['nset_left', 'nset_right', 'nset_weights']"}), "(mdl, layers=['nset_left', 'nset_right',\n 'nset_weights'])\n", (835, 896), False, 'from compas_fea.cad import rhino\n'), ((1011, 1073), 'compas_fea.cad.rhino.ordered_network', 'rhino.ordered_network', (['mdl'], {'network': 'network', 'layer': '"""nset_left"""'}), "(mdl, network=network, layer='nset_left')\n", (1032, 1073), False, 'from compas_fea.cad import rhino\n'), ((1985, 2063), 'compas_fea.cad.rhino.plot_data', 'rhino.plot_data', (['mdl'], {'step': '"""step_load"""', 'field': '"""um"""', 'radius': '(0.01)', 'cbar_size': '(0.3)'}), "(mdl, step='step_load', field='um', radius=0.01, cbar_size=0.3)\n", (2000, 2063), False, 'from compas_fea.cad import rhino\n'), ((2064, 2143), 'compas_fea.cad.rhino.plot_data', 'rhino.plot_data', (['mdl'], {'step': '"""step_load"""', 'field': '"""sf1"""', 'radius': '(0.01)', 'cbar_size': '(0.3)'}), "(mdl, step='step_load', field='sf1', radius=0.01, cbar_size=0.3)\n", (2079, 2143), False, 'from compas_fea.cad import rhino\n'), ((2144, 2223), 'compas_fea.cad.rhino.plot_data', 'rhino.plot_data', (['mdl'], {'step': '"""step_load"""', 'field': '"""sf2"""', 'radius': '(0.01)', 'cbar_size': '(0.3)'}), "(mdl, step='step_load', field='sf2', radius=0.01, cbar_size=0.3)\n", (2159, 2223), False, 'from compas_fea.cad import rhino\n'), ((2224, 2303), 'compas_fea.cad.rhino.plot_data', 'rhino.plot_data', (['mdl'], {'step': '"""step_load"""', 'field': '"""sm1"""', 'radius': '(0.01)', 'cbar_size': '(0.3)'}), "(mdl, step='step_load', field='sm1', radius=0.01, cbar_size=0.3)\n", (2239, 2303), False, 'from compas_fea.cad import rhino\n'), ((915, 982), 'compas_fea.structure.ElasticIsotropic', 'ElasticIsotropic', ([], {'name': '"""mat_elastic"""', 'E': '(20 * 10 ** 9)', 'v': '(0.3)', 'p': '(1500)'}), "(name='mat_elastic', E=20 * 10 ** 9, v=0.3, p=1500)\n", (931, 982), False, 'from compas_fea.structure import ElasticIsotropic\n'), ((1583, 1643), 'compas_fea.structure.PointLoad', 'PointLoad', ([], {'name': '"""load_weights"""', 'nodes': '"""nset_weights"""', 'z': '(-100)'}), "(name='load_weights', nodes='nset_weights', z=-100)\n", (1592, 1643), False, 'from compas_fea.structure import PointLoad\n'), ((1178, 1211), 'compas_fea.structure.CircularSection', 'CircularSection', ([], {'name': 'sname', 'r': 'ri'}), '(name=sname, r=ri)\n', (1193, 1211), False, 'from compas_fea.structure import CircularSection\n'), ((1347, 1402), 'compas_fea.structure.PinnedDisplacement', 'PinnedDisplacement', ([], {'name': '"""disp_left"""', 'nodes': '"""nset_left"""'}), "(name='disp_left', nodes='nset_left')\n", (1365, 1402), False, 'from compas_fea.structure import PinnedDisplacement\n'), ((1408, 1482), 'compas_fea.structure.GeneralDisplacement', 'GeneralDisplacement', ([], {'name': '"""disp_right"""', 'nodes': '"""nset_right"""', 'y': '(0)', 'z': '(0)', 'xx': '(0)'}), "(name='disp_right', nodes='nset_right', y=0, z=0, xx=0)\n", (1427, 1482), False, 'from compas_fea.structure import GeneralDisplacement\n'), ((1488, 1564), 'compas_fea.structure.GeneralDisplacement', 'GeneralDisplacement', ([], {'name': '"""disp_rotate"""', 'nodes': '"""nset_left"""', 'yy': '(30 * pi / 180)'}), "(name='disp_rotate', nodes='nset_left', yy=30 * pi / 180)\n", (1507, 1564), False, 'from compas_fea.structure import GeneralDisplacement\n'), ((1669, 1739), 'compas_fea.structure.GeneralStep', 'GeneralStep', ([], {'name': '"""step_bc"""', 'displacements': "['disp_left', 'disp_right']"}), "(name='step_bc', displacements=['disp_left', 'disp_right'])\n", (1680, 1739), False, 'from compas_fea.structure import GeneralStep\n'), ((1745, 1830), 'compas_fea.structure.GeneralStep', 'GeneralStep', ([], {'name': '"""step_load"""', 'loads': '"""load_weights"""', 'displacements': '"""disp_rotate"""'}), "(name='step_load', loads='load_weights', displacements='disp_rotate'\n )\n", (1756, 1830), False, 'from compas_fea.structure import GeneralStep\n')]
|
import copy
from collections import OrderedDict
import zinc.route53
from zinc.utils import memoized_property
from .record import Record, RECORD_PREFIX
class Policy:
def __init__(self, zone, policy):
assert isinstance(zone, zinc.route53.Zone)
self.zone = zone
self.db_policy = policy
@property
def name(self):
return self.db_policy.name
@property
def id(self):
return self.db_policy.id
@property
def routing(self):
return self.db_policy.routing
@memoized_property
def aws_records(self):
"""What we have in AWS"""
return dict([
(r_id, record) for (r_id, record) in self.zone.records().items()
if record.is_member_of(self)
])
@memoized_property
def desired_records(self):
"""The records we should have (the desired state of the world)"""
return OrderedDict([(record.id, record) for record in self._build_tree()])
def _build_weighted_tree(self, policy_members, region_suffixed=True):
# Build simple tree
records = []
for policy_member in policy_members:
record_type = 'A'
if ':' in policy_member.ip.ip:
record_type = 'AAAA'
health_check_kwa = {}
if policy_member.ip.healthcheck_id:
health_check_kwa['health_check_id'] = str(policy_member.ip.healthcheck_id)
record = Record(
ttl=self.db_policy.ttl,
type=record_type,
values=[policy_member.ip.ip],
set_identifier='{}-{}'.format(str(policy_member.id), policy_member.region),
weight=policy_member.weight,
zone=self.zone,
**health_check_kwa,
)
# TODO: maybe we should have a specialized subclass for PolicyRecords
# and this logic should be moved there
if region_suffixed:
record.name = '{}_{}_{}'.format(RECORD_PREFIX, self.name, policy_member.region)
else:
record.name = '{}_{}'.format(RECORD_PREFIX, self.name)
records.append(record)
return records
def _build_lbr_tree(self, policy_members, regions):
# Build latency based routed tree
records = self._build_weighted_tree(policy_members)
for region in regions:
record = Record(
name='{}_{}'.format(RECORD_PREFIX, self.name),
type='A',
alias_target={
'HostedZoneId': self.zone.id,
'DNSName': '{}_{}_{}.{}'.format(
RECORD_PREFIX, self.name, region, self.zone.root),
'EvaluateTargetHealth': True # len(regions) > 1
},
region=region,
set_identifier=region,
zone=self.zone,
)
if self._has_ipv4_records_in_region(policy_members, region):
records.append(record)
# create a similar AAAA record if there exists IPv6 ips in this region.
if self._has_ipv6_records_in_region(policy_members, region):
record = copy.copy(record)
record.type = 'AAAA'
records.append(record)
return records
def _build_tree(self):
policy_members = self.db_policy.members.exclude(enabled=False).exclude(ip__enabled=False)
# ensure we always build region subtrees in alphabetical order; makes tests simpler
regions = sorted(set([pm.region for pm in policy_members]))
if len(regions) == 0:
raise Exception(
"Policy can't be applied for zone '{}'; "
"There is no member in the '{}' policy.".format(
self.zone, self
)
)
if self.routing == 'latency':
# Here is the case where are multiple regions
records = self._build_lbr_tree(policy_members, regions=regions)
# elif len(regions) == 1:
elif self.routing == 'weighted':
# Case with a single region
records = self._build_weighted_tree(
policy_members, region_suffixed=False)
else:
raise AssertionError('invalid routing {} for policy {}'.format(
self.routing, self.db_policy))
return records
def reconcile(self):
aws_record_ids = self.aws_records.keys()
desired_record_ids = self.desired_records.keys()
to_delete = []
for obsolete_rec_id in aws_record_ids - desired_record_ids:
record = self.aws_records[obsolete_rec_id]
record.deleted = True
to_delete.append(record)
self.zone.process_records(to_delete)
to_upsert = []
for rec_id, desired_record in self.desired_records.items():
existing_record = self.aws_records.get(rec_id)
if existing_record is None:
to_upsert.append(desired_record)
else:
# if desired is a subset of existing
if not desired_record.to_aws().items() <= existing_record.to_aws().items():
to_upsert.append(desired_record)
self.zone.process_records(to_upsert)
def remove(self):
records = list(self.aws_records.values())
for record in records:
record.deleted = True
self.zone.process_records(records)
def _has_ipv6_records_in_region(self, policy_members, region):
has_ipv6 = False
for pm in policy_members:
if region and pm.region != region:
continue
if ':' in pm.ip.ip:
has_ipv6 = True
return has_ipv6
def _has_ipv4_records_in_region(self, policy_members, region):
has_ipv4 = False
for pm in policy_members:
if region and pm.region != region:
continue
if '.' in pm.ip.ip:
has_ipv4 = True
return has_ipv4
|
[
"copy.copy"
] |
[((3220, 3237), 'copy.copy', 'copy.copy', (['record'], {}), '(record)\n', (3229, 3237), False, 'import copy\n')]
|
from ecolor import slow_color, slow_print, ecolor
ecolor("This is red text", "red")
ecolor("This is bold blue text", "bold_blue")
slow_print("This is slow_print", 0.025)
slow_color("This is slow_print but colorful", "blue", 0.025)
slow_color("This is slow_print but colorful and bold", "bold_blue", 0.025)
|
[
"ecolor.ecolor",
"ecolor.slow_print",
"ecolor.slow_color"
] |
[((50, 83), 'ecolor.ecolor', 'ecolor', (['"""This is red text"""', '"""red"""'], {}), "('This is red text', 'red')\n", (56, 83), False, 'from ecolor import slow_color, slow_print, ecolor\n'), ((84, 129), 'ecolor.ecolor', 'ecolor', (['"""This is bold blue text"""', '"""bold_blue"""'], {}), "('This is bold blue text', 'bold_blue')\n", (90, 129), False, 'from ecolor import slow_color, slow_print, ecolor\n'), ((130, 169), 'ecolor.slow_print', 'slow_print', (['"""This is slow_print"""', '(0.025)'], {}), "('This is slow_print', 0.025)\n", (140, 169), False, 'from ecolor import slow_color, slow_print, ecolor\n'), ((170, 230), 'ecolor.slow_color', 'slow_color', (['"""This is slow_print but colorful"""', '"""blue"""', '(0.025)'], {}), "('This is slow_print but colorful', 'blue', 0.025)\n", (180, 230), False, 'from ecolor import slow_color, slow_print, ecolor\n'), ((231, 305), 'ecolor.slow_color', 'slow_color', (['"""This is slow_print but colorful and bold"""', '"""bold_blue"""', '(0.025)'], {}), "('This is slow_print but colorful and bold', 'bold_blue', 0.025)\n", (241, 305), False, 'from ecolor import slow_color, slow_print, ecolor\n')]
|
# -*- coding: utf-8 -*-
import os
import sys
import six
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError, ClientError, ParamError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.rum.v20210622 import rum_client as rum_client_v20210622
from tencentcloud.rum.v20210622 import models as models_v20210622
from jmespath import search
import time
def doDescribeTawAreas(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTawAreasRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTawAreas(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateReleaseFile(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateReleaseFileRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateReleaseFile(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataLogUrlInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataLogUrlInfoRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataLogUrlInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTawInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTawInstancesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeTawInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataPerformancePage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataPerformancePageRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataPerformancePage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataLogUrlStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataLogUrlStatisticsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataLogUrlStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataFetchProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataFetchProjectRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataFetchProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteInstance(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteInstanceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteInstance(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataStaticUrl(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataStaticUrlRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataStaticUrl(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doResumeInstance(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ResumeInstanceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ResumeInstance(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataPerformanceProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataPerformanceProjectRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataPerformanceProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeError(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeErrorRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeError(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeLogList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeLogListRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeLogList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeOfflineLogs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeOfflineLogsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeOfflineLogs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTawInstance(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTawInstanceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateTawInstance(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribePvList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribePvListRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribePvList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeLogExports(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeLogExportsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeLogExports(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataWebVitalsPage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataWebVitalsPageRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataWebVitalsPage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteStarProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteStarProjectRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteStarProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataFetchUrlInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataFetchUrlInfoRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataFetchUrlInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataPvUrlStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataPvUrlStatisticsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataPvUrlStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeData(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeData(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeReleaseFileSign(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeReleaseFileSignRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeReleaseFileSign(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateLogExport(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateLogExportRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateLogExport(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataSetUrlStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataSetUrlStatisticsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataSetUrlStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataStaticResource(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataStaticResourceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataStaticResource(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProjectLimits(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProjectLimitsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeProjectLimits(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataCustomUrl(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataCustomUrlRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataCustomUrl(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateWhitelist(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateWhitelistRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateWhitelist(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProjects(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProjectsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeProjects(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateStarProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateStarProjectRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateStarProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteWhitelist(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteWhitelistRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteWhitelist(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProjectLimit(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProjectLimitRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyProjectLimit(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doStopInstance(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.StopInstanceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.StopInstance(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProjectRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteReleaseFile(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteReleaseFileRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteReleaseFile(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteLogExport(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteLogExportRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteLogExport(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeWhitelists(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeWhitelistsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeWhitelists(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataEventUrl(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataEventUrlRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataEventUrl(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteOfflineLogRecord(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteOfflineLogRecordRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteOfflineLogRecord(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeOfflineLogConfigs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeOfflineLogConfigsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeOfflineLogConfigs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeScores(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeScoresRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeScores(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProjectRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataReportCount(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataReportCountRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataReportCount(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataPvUrlInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataPvUrlInfoRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataPvUrlInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataStaticProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataStaticProjectRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataStaticProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteProjectRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeOfflineLogRecords(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeOfflineLogRecordsRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeOfflineLogRecords(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeUvList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeUvListRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeUvList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteOfflineLogConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteOfflineLogConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DeleteOfflineLogConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeReleaseFiles(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeReleaseFilesRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeReleaseFiles(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyInstance(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyInstanceRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.ModifyInstance(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataFetchUrl(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataFetchUrlRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.DescribeDataFetchUrl(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateOfflineLogConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
if g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
cred = credential.CVMRoleCredential()
elif g_param[OptionsDefine.RoleArn.replace('-', '_')] and g_param[OptionsDefine.RoleSessionName.replace('-', '_')]:
cred = credential.STSAssumeRoleCredential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.RoleArn.replace('-', '_')],
g_param[OptionsDefine.RoleSessionName.replace('-', '_')]
)
else:
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy.replace('-', '_')]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.RumClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateOfflineLogConfigRequest()
model.from_json_string(json.dumps(args))
start_time = time.time()
while True:
rsp = client.CreateOfflineLogConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
if not g_param[OptionsDefine.Waiter] or search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj) == g_param['OptionsDefine.WaiterInfo']['to']:
break
cur_time = time.time()
if cur_time - start_time >= g_param['OptionsDefine.WaiterInfo']['timeout']:
raise ClientError('Request timeout, wait `%s` to `%s` timeout, last request is %s' %
(g_param['OptionsDefine.WaiterInfo']['expr'], g_param['OptionsDefine.WaiterInfo']['to'],
search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)))
else:
print('Inquiry result is %s.' % search(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj))
time.sleep(g_param['OptionsDefine.WaiterInfo']['interval'])
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20210622": rum_client_v20210622,
}
MODELS_MAP = {
"v20210622": models_v20210622,
}
ACTION_MAP = {
"DescribeTawAreas": doDescribeTawAreas,
"CreateReleaseFile": doCreateReleaseFile,
"DescribeDataLogUrlInfo": doDescribeDataLogUrlInfo,
"DescribeTawInstances": doDescribeTawInstances,
"DescribeDataPerformancePage": doDescribeDataPerformancePage,
"DescribeDataLogUrlStatistics": doDescribeDataLogUrlStatistics,
"DescribeDataFetchProject": doDescribeDataFetchProject,
"DeleteInstance": doDeleteInstance,
"DescribeDataStaticUrl": doDescribeDataStaticUrl,
"ResumeInstance": doResumeInstance,
"DescribeDataPerformanceProject": doDescribeDataPerformanceProject,
"DescribeError": doDescribeError,
"DescribeLogList": doDescribeLogList,
"DescribeOfflineLogs": doDescribeOfflineLogs,
"CreateTawInstance": doCreateTawInstance,
"DescribePvList": doDescribePvList,
"DescribeLogExports": doDescribeLogExports,
"DescribeDataWebVitalsPage": doDescribeDataWebVitalsPage,
"DeleteStarProject": doDeleteStarProject,
"DescribeDataFetchUrlInfo": doDescribeDataFetchUrlInfo,
"DescribeDataPvUrlStatistics": doDescribeDataPvUrlStatistics,
"DescribeData": doDescribeData,
"DescribeReleaseFileSign": doDescribeReleaseFileSign,
"CreateLogExport": doCreateLogExport,
"DescribeDataSetUrlStatistics": doDescribeDataSetUrlStatistics,
"DescribeDataStaticResource": doDescribeDataStaticResource,
"DescribeProjectLimits": doDescribeProjectLimits,
"DescribeDataCustomUrl": doDescribeDataCustomUrl,
"CreateWhitelist": doCreateWhitelist,
"DescribeProjects": doDescribeProjects,
"CreateStarProject": doCreateStarProject,
"DeleteWhitelist": doDeleteWhitelist,
"ModifyProjectLimit": doModifyProjectLimit,
"StopInstance": doStopInstance,
"ModifyProject": doModifyProject,
"DeleteReleaseFile": doDeleteReleaseFile,
"DeleteLogExport": doDeleteLogExport,
"DescribeWhitelists": doDescribeWhitelists,
"DescribeDataEventUrl": doDescribeDataEventUrl,
"DeleteOfflineLogRecord": doDeleteOfflineLogRecord,
"DescribeOfflineLogConfigs": doDescribeOfflineLogConfigs,
"DescribeScores": doDescribeScores,
"CreateProject": doCreateProject,
"DescribeDataReportCount": doDescribeDataReportCount,
"DescribeDataPvUrlInfo": doDescribeDataPvUrlInfo,
"DescribeDataStaticProject": doDescribeDataStaticProject,
"DeleteProject": doDeleteProject,
"DescribeOfflineLogRecords": doDescribeOfflineLogRecords,
"DescribeUvList": doDescribeUvList,
"DeleteOfflineLogConfig": doDeleteOfflineLogConfig,
"DescribeReleaseFiles": doDescribeReleaseFiles,
"ModifyInstance": doModifyInstance,
"DescribeDataFetchUrl": doDescribeDataFetchUrl,
"CreateOfflineLogConfig": doCreateOfflineLogConfig,
}
AVAILABLE_VERSION_LIST = [
"v20210622",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
if os.environ.get(OptionsDefine.ENV_ROLE_ARN) and os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME):
cred[OptionsDefine.RoleArn] = os.environ.get(OptionsDefine.ENV_ROLE_ARN)
cred[OptionsDefine.RoleSessionName] = os.environ.get(OptionsDefine.ENV_ROLE_SESSION_NAME)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
elif not g_param[OptionsDefine.UseCVMRole.replace('-', '_')]:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param.replace('_', '-') in [OptionsDefine.RoleArn, OptionsDefine.RoleSessionName]:
if param.replace('_', '-') in cred:
g_param[param] = cred[param.replace('_', '-')]
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["rum"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["rum"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
if g_param[OptionsDefine.Waiter]:
param = eval(g_param[OptionsDefine.Waiter])
if 'expr' not in param:
raise Exception('`expr` in `--waiter` must be defined')
if 'to' not in param:
raise Exception('`to` in `--waiter` must be defined')
if 'timeout' not in param:
if 'waiter' in conf and 'timeout' in conf['waiter']:
param['timeout'] = conf['waiter']['timeout']
else:
param['timeout'] = 180
if 'interval' not in param:
if 'waiter' in conf and 'interval' in conf['waiter']:
param['interval'] = conf['waiter']['interval']
else:
param['timeout'] = 5
param['interval'] = min(param['interval'], param['timeout'])
g_param['OptionsDefine.WaiterInfo'] = param
# 如果在配置文件中读取字段的值,python2中的json.load函数会读取unicode类型的值,因此这里要转化类型
if six.PY2:
for key, value in g_param.items():
if isinstance(value, six.text_type):
g_param[key] = value.encode('utf-8')
return g_param
|
[
"tencentcloud.common.credential.CVMRoleCredential",
"json.loads",
"tencentcloud.common.profile.client_profile.ClientProfile",
"tccli.exceptions.ConfigurationError",
"tccli.utils.Utils.load_json_msg",
"tccli.format_output.output",
"tccli.options_define.UseCVMRole.replace",
"time.time",
"json.dumps",
"time.sleep",
"os.environ.get",
"tccli.options_define.RoleArn.replace",
"tccli.options_define.HttpsProxy.replace",
"jmespath.search",
"tccli.options_define.RoleSessionName.replace",
"tccli.utils.Utils.file_existed",
"tencentcloud.common.credential.Credential",
"os.path.expanduser"
] |
[((1693, 1757), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (1706, 1757), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((2097, 2108), 'time.time', 'time.time', ([], {}), '()\n', (2106, 2108), False, 'import time\n'), ((3121, 3226), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (3140, 3226), True, 'import tccli.format_output as FormatOutput\n'), ((4271, 4335), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (4284, 4335), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((4676, 4687), 'time.time', 'time.time', ([], {}), '()\n', (4685, 4687), False, 'import time\n'), ((5701, 5806), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (5720, 5806), True, 'import tccli.format_output as FormatOutput\n'), ((6856, 6920), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (6869, 6920), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((7266, 7277), 'time.time', 'time.time', ([], {}), '()\n', (7275, 7277), False, 'import time\n'), ((8296, 8401), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (8315, 8401), True, 'import tccli.format_output as FormatOutput\n'), ((9449, 9513), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (9462, 9513), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((9857, 9868), 'time.time', 'time.time', ([], {}), '()\n', (9866, 9868), False, 'import time\n'), ((10885, 10990), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (10904, 10990), True, 'import tccli.format_output as FormatOutput\n'), ((12045, 12109), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (12058, 12109), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((12460, 12471), 'time.time', 'time.time', ([], {}), '()\n', (12469, 12471), False, 'import time\n'), ((13495, 13600), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (13514, 13600), True, 'import tccli.format_output as FormatOutput\n'), ((14656, 14720), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (14669, 14720), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((15072, 15083), 'time.time', 'time.time', ([], {}), '()\n', (15081, 15083), False, 'import time\n'), ((16108, 16213), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (16127, 16213), True, 'import tccli.format_output as FormatOutput\n'), ((17265, 17329), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (17278, 17329), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((17677, 17688), 'time.time', 'time.time', ([], {}), '()\n', (17686, 17688), False, 'import time\n'), ((18709, 18814), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (18728, 18814), True, 'import tccli.format_output as FormatOutput\n'), ((19856, 19920), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (19869, 19920), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((20258, 20269), 'time.time', 'time.time', ([], {}), '()\n', (20267, 20269), False, 'import time\n'), ((21280, 21385), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (21299, 21385), True, 'import tccli.format_output as FormatOutput\n'), ((22434, 22498), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (22447, 22498), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((22843, 22854), 'time.time', 'time.time', ([], {}), '()\n', (22852, 22854), False, 'import time\n'), ((23872, 23977), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (23891, 23977), True, 'import tccli.format_output as FormatOutput\n'), ((25019, 25083), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (25032, 25083), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((25421, 25432), 'time.time', 'time.time', ([], {}), '()\n', (25430, 25432), False, 'import time\n'), ((26443, 26548), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (26462, 26548), True, 'import tccli.format_output as FormatOutput\n'), ((27606, 27670), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (27619, 27670), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((28024, 28035), 'time.time', 'time.time', ([], {}), '()\n', (28033, 28035), False, 'import time\n'), ((29062, 29167), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (29081, 29167), True, 'import tccli.format_output as FormatOutput\n'), ((30208, 30272), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (30221, 30272), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((30609, 30620), 'time.time', 'time.time', ([], {}), '()\n', (30618, 30620), False, 'import time\n'), ((31630, 31735), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (31649, 31735), True, 'import tccli.format_output as FormatOutput\n'), ((32778, 32842), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (32791, 32842), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((33181, 33192), 'time.time', 'time.time', ([], {}), '()\n', (33190, 33192), False, 'import time\n'), ((34204, 34309), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (34223, 34309), True, 'import tccli.format_output as FormatOutput\n'), ((35356, 35420), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (35369, 35420), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((35763, 35774), 'time.time', 'time.time', ([], {}), '()\n', (35772, 35774), False, 'import time\n'), ((36790, 36895), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (36809, 36895), True, 'import tccli.format_output as FormatOutput\n'), ((37940, 38004), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (37953, 38004), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((38345, 38356), 'time.time', 'time.time', ([], {}), '()\n', (38354, 38356), False, 'import time\n'), ((39370, 39475), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (39389, 39475), True, 'import tccli.format_output as FormatOutput\n'), ((40517, 40581), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (40530, 40581), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((40919, 40930), 'time.time', 'time.time', ([], {}), '()\n', (40928, 40930), False, 'import time\n'), ((41941, 42046), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (41960, 42046), True, 'import tccli.format_output as FormatOutput\n'), ((43092, 43156), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (43105, 43156), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((43498, 43509), 'time.time', 'time.time', ([], {}), '()\n', (43507, 43509), False, 'import time\n'), ((44524, 44629), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (44543, 44629), True, 'import tccli.format_output as FormatOutput\n'), ((45682, 45746), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (45695, 45746), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((46095, 46106), 'time.time', 'time.time', ([], {}), '()\n', (46104, 46106), False, 'import time\n'), ((47128, 47233), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (47147, 47233), True, 'import tccli.format_output as FormatOutput\n'), ((48278, 48342), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (48291, 48342), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((48683, 48694), 'time.time', 'time.time', ([], {}), '()\n', (48692, 48694), False, 'import time\n'), ((49708, 49813), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (49727, 49813), True, 'import tccli.format_output as FormatOutput\n'), ((50865, 50929), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (50878, 50929), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((51277, 51288), 'time.time', 'time.time', ([], {}), '()\n', (51286, 51288), False, 'import time\n'), ((52309, 52414), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (52328, 52414), True, 'import tccli.format_output as FormatOutput\n'), ((53469, 53533), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (53482, 53533), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((53884, 53895), 'time.time', 'time.time', ([], {}), '()\n', (53893, 53895), False, 'import time\n'), ((54919, 55024), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (54938, 55024), True, 'import tccli.format_output as FormatOutput\n'), ((56064, 56128), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (56077, 56128), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((56464, 56475), 'time.time', 'time.time', ([], {}), '()\n', (56473, 56475), False, 'import time\n'), ((57484, 57589), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (57503, 57589), True, 'import tccli.format_output as FormatOutput\n'), ((58640, 58704), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (58653, 58704), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((59051, 59062), 'time.time', 'time.time', ([], {}), '()\n', (59060, 59062), False, 'import time\n'), ((60082, 60187), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (60101, 60187), True, 'import tccli.format_output as FormatOutput\n'), ((61230, 61294), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (61243, 61294), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((61633, 61644), 'time.time', 'time.time', ([], {}), '()\n', (61642, 61644), False, 'import time\n'), ((62656, 62761), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (62675, 62761), True, 'import tccli.format_output as FormatOutput\n'), ((63817, 63881), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (63830, 63881), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((64233, 64244), 'time.time', 'time.time', ([], {}), '()\n', (64242, 64244), False, 'import time\n'), ((65269, 65374), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (65288, 65374), True, 'import tccli.format_output as FormatOutput\n'), ((66428, 66492), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (66441, 66492), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((66842, 66853), 'time.time', 'time.time', ([], {}), '()\n', (66851, 66853), False, 'import time\n'), ((67876, 67981), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (67895, 67981), True, 'import tccli.format_output as FormatOutput\n'), ((69030, 69094), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (69043, 69094), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((69439, 69450), 'time.time', 'time.time', ([], {}), '()\n', (69448, 69450), False, 'import time\n'), ((70468, 70573), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (70487, 70573), True, 'import tccli.format_output as FormatOutput\n'), ((71622, 71686), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (71635, 71686), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((72031, 72042), 'time.time', 'time.time', ([], {}), '()\n', (72040, 72042), False, 'import time\n'), ((73060, 73165), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (73079, 73165), True, 'import tccli.format_output as FormatOutput\n'), ((74208, 74272), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (74221, 74272), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((74611, 74622), 'time.time', 'time.time', ([], {}), '()\n', (74620, 74622), False, 'import time\n'), ((75634, 75739), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (75653, 75739), True, 'import tccli.format_output as FormatOutput\n'), ((76783, 76847), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (76796, 76847), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((77187, 77198), 'time.time', 'time.time', ([], {}), '()\n', (77196, 77198), False, 'import time\n'), ((78211, 78316), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (78230, 78316), True, 'import tccli.format_output as FormatOutput\n'), ((79361, 79425), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (79374, 79425), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((79766, 79777), 'time.time', 'time.time', ([], {}), '()\n', (79775, 79777), False, 'import time\n'), ((80791, 80896), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (80810, 80896), True, 'import tccli.format_output as FormatOutput\n'), ((81939, 82003), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (81952, 82003), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((82342, 82353), 'time.time', 'time.time', ([], {}), '()\n', (82351, 82353), False, 'import time\n'), ((83365, 83470), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (83384, 83470), True, 'import tccli.format_output as FormatOutput\n'), ((84516, 84580), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (84529, 84580), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((84922, 84933), 'time.time', 'time.time', ([], {}), '()\n', (84931, 84933), False, 'import time\n'), ((85948, 86053), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (85967, 86053), True, 'import tccli.format_output as FormatOutput\n'), ((87093, 87157), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (87106, 87157), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((87493, 87504), 'time.time', 'time.time', ([], {}), '()\n', (87502, 87504), False, 'import time\n'), ((88513, 88618), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (88532, 88618), True, 'import tccli.format_output as FormatOutput\n'), ((89659, 89723), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (89672, 89723), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((90060, 90071), 'time.time', 'time.time', ([], {}), '()\n', (90069, 90071), False, 'import time\n'), ((91081, 91186), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (91100, 91186), True, 'import tccli.format_output as FormatOutput\n'), ((92231, 92295), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (92244, 92295), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((92636, 92647), 'time.time', 'time.time', ([], {}), '()\n', (92645, 92647), False, 'import time\n'), ((93661, 93766), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (93680, 93766), True, 'import tccli.format_output as FormatOutput\n'), ((94809, 94873), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (94822, 94873), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((95212, 95223), 'time.time', 'time.time', ([], {}), '()\n', (95221, 95223), False, 'import time\n'), ((96235, 96340), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (96254, 96340), True, 'import tccli.format_output as FormatOutput\n'), ((97386, 97450), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (97399, 97450), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((97792, 97803), 'time.time', 'time.time', ([], {}), '()\n', (97801, 97803), False, 'import time\n'), ((98818, 98923), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (98837, 98923), True, 'import tccli.format_output as FormatOutput\n'), ((99971, 100035), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (99984, 100035), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((100379, 100390), 'time.time', 'time.time', ([], {}), '()\n', (100388, 100390), False, 'import time\n'), ((101407, 101512), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (101426, 101512), True, 'import tccli.format_output as FormatOutput\n'), ((102562, 102626), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (102575, 102626), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((102972, 102983), 'time.time', 'time.time', ([], {}), '()\n', (102981, 102983), False, 'import time\n'), ((104002, 104107), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (104021, 104107), True, 'import tccli.format_output as FormatOutput\n'), ((105160, 105224), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (105173, 105224), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((105573, 105584), 'time.time', 'time.time', ([], {}), '()\n', (105582, 105584), False, 'import time\n'), ((106606, 106711), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (106625, 106711), True, 'import tccli.format_output as FormatOutput\n'), ((107753, 107817), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (107766, 107817), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((108155, 108166), 'time.time', 'time.time', ([], {}), '()\n', (108164, 108166), False, 'import time\n'), ((109177, 109282), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (109196, 109282), True, 'import tccli.format_output as FormatOutput\n'), ((110323, 110387), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (110336, 110387), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((110724, 110735), 'time.time', 'time.time', ([], {}), '()\n', (110733, 110735), False, 'import time\n'), ((111745, 111850), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (111764, 111850), True, 'import tccli.format_output as FormatOutput\n'), ((112901, 112965), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (112914, 112965), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((113312, 113323), 'time.time', 'time.time', ([], {}), '()\n', (113321, 113323), False, 'import time\n'), ((114343, 114448), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (114362, 114448), True, 'import tccli.format_output as FormatOutput\n'), ((115497, 115561), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (115510, 115561), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((115906, 115917), 'time.time', 'time.time', ([], {}), '()\n', (115915, 115917), False, 'import time\n'), ((116935, 117040), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (116954, 117040), True, 'import tccli.format_output as FormatOutput\n'), ((118093, 118157), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (118106, 118157), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((118506, 118517), 'time.time', 'time.time', ([], {}), '()\n', (118515, 118517), False, 'import time\n'), ((119539, 119644), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (119558, 119644), True, 'import tccli.format_output as FormatOutput\n'), ((120685, 120749), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (120698, 120749), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((121086, 121097), 'time.time', 'time.time', ([], {}), '()\n', (121095, 121097), False, 'import time\n'), ((122107, 122212), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (122126, 122212), True, 'import tccli.format_output as FormatOutput\n'), ((123265, 123329), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (123278, 123329), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((123678, 123689), 'time.time', 'time.time', ([], {}), '()\n', (123687, 123689), False, 'import time\n'), ((124711, 124816), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (124730, 124816), True, 'import tccli.format_output as FormatOutput\n'), ((125858, 125922), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (125871, 125922), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((126260, 126271), 'time.time', 'time.time', ([], {}), '()\n', (126269, 126271), False, 'import time\n'), ((127282, 127387), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (127301, 127387), True, 'import tccli.format_output as FormatOutput\n'), ((128437, 128501), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (128450, 128501), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((128847, 128858), 'time.time', 'time.time', ([], {}), '()\n', (128856, 128858), False, 'import time\n'), ((129877, 129982), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (129896, 129982), True, 'import tccli.format_output as FormatOutput\n'), ((131030, 131094), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (131043, 131094), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((131438, 131449), 'time.time', 'time.time', ([], {}), '()\n', (131447, 131449), False, 'import time\n'), ((132466, 132571), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (132485, 132571), True, 'import tccli.format_output as FormatOutput\n'), ((133613, 133677), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (133626, 133677), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((134015, 134026), 'time.time', 'time.time', ([], {}), '()\n', (134024, 134026), False, 'import time\n'), ((135037, 135142), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (135056, 135142), True, 'import tccli.format_output as FormatOutput\n'), ((136190, 136254), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (136203, 136254), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((136598, 136609), 'time.time', 'time.time', ([], {}), '()\n', (136607, 136609), False, 'import time\n'), ((137626, 137731), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (137645, 137731), True, 'import tccli.format_output as FormatOutput\n'), ((138781, 138845), 'tencentcloud.common.profile.client_profile.ClientProfile', 'ClientProfile', ([], {'httpProfile': 'http_profile', 'signMethod': '"""HmacSHA256"""'}), "(httpProfile=http_profile, signMethod='HmacSHA256')\n", (138794, 138845), False, 'from tencentcloud.common.profile.client_profile import ClientProfile\n'), ((139191, 139202), 'time.time', 'time.time', ([], {}), '()\n', (139200, 139202), False, 'import time\n'), ((140221, 140326), 'tccli.format_output.output', 'FormatOutput.output', (['"""action"""', 'json_obj', 'g_param[OptionsDefine.Output]', 'g_param[OptionsDefine.Filter]'], {}), "('action', json_obj, g_param[OptionsDefine.Output],\n g_param[OptionsDefine.Filter])\n", (140240, 140326), True, 'import tccli.format_output as FormatOutput\n'), ((143579, 143648), 'tccli.utils.Utils.file_existed', 'Utils.file_existed', (['configure_path', "(g_param['profile'] + '.configure')"], {}), "(configure_path, g_param['profile'] + '.configure')\n", (143597, 143648), False, 'from tccli.utils import Utils\n'), ((143680, 143750), 'tccli.utils.Utils.file_existed', 'Utils.file_existed', (['configure_path', "(g_param['profile'] + '.credential')"], {}), "(configure_path, g_param['profile'] + '.credential')\n", (143698, 143750), False, 'from tccli.utils import Utils\n'), ((757, 799), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (789, 799), True, 'import tccli.options_define as OptionsDefine\n'), ((817, 847), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (845, 847), False, 'from tencentcloud.common import credential\n'), ((2062, 2078), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (2072, 2078), False, 'import json\n'), ((2558, 2569), 'time.time', 'time.time', ([], {}), '()\n', (2567, 2569), False, 'import time\n'), ((3057, 3116), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (3067, 3116), False, 'import time\n'), ((3335, 3377), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (3367, 3377), True, 'import tccli.options_define as OptionsDefine\n'), ((3395, 3425), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (3423, 3425), False, 'from tencentcloud.common import credential\n'), ((4641, 4657), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (4651, 4657), False, 'import json\n'), ((5138, 5149), 'time.time', 'time.time', ([], {}), '()\n', (5147, 5149), False, 'import time\n'), ((5637, 5696), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (5647, 5696), False, 'import time\n'), ((5920, 5962), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (5952, 5962), True, 'import tccli.options_define as OptionsDefine\n'), ((5980, 6010), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (6008, 6010), False, 'from tencentcloud.common import credential\n'), ((7231, 7247), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (7241, 7247), False, 'import json\n'), ((7733, 7744), 'time.time', 'time.time', ([], {}), '()\n', (7742, 7744), False, 'import time\n'), ((8232, 8291), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (8242, 8291), False, 'import time\n'), ((8513, 8555), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (8545, 8555), True, 'import tccli.options_define as OptionsDefine\n'), ((8573, 8603), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (8601, 8603), False, 'from tencentcloud.common import credential\n'), ((9822, 9838), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (9832, 9838), False, 'import json\n'), ((10322, 10333), 'time.time', 'time.time', ([], {}), '()\n', (10331, 10333), False, 'import time\n'), ((10821, 10880), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (10831, 10880), False, 'import time\n'), ((11109, 11151), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (11141, 11151), True, 'import tccli.options_define as OptionsDefine\n'), ((11169, 11199), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (11197, 11199), False, 'from tencentcloud.common import credential\n'), ((12425, 12441), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (12435, 12441), False, 'import json\n'), ((12932, 12943), 'time.time', 'time.time', ([], {}), '()\n', (12941, 12943), False, 'import time\n'), ((13431, 13490), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (13441, 13490), False, 'import time\n'), ((13720, 13762), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (13752, 13762), True, 'import tccli.options_define as OptionsDefine\n'), ((13780, 13810), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (13808, 13810), False, 'from tencentcloud.common import credential\n'), ((15037, 15053), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (15047, 15053), False, 'import json\n'), ((15545, 15556), 'time.time', 'time.time', ([], {}), '()\n', (15554, 15556), False, 'import time\n'), ((16044, 16103), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (16054, 16103), False, 'import time\n'), ((16329, 16371), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (16361, 16371), True, 'import tccli.options_define as OptionsDefine\n'), ((16389, 16419), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (16417, 16419), False, 'from tencentcloud.common import credential\n'), ((17642, 17658), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (17652, 17658), False, 'import json\n'), ((18146, 18157), 'time.time', 'time.time', ([], {}), '()\n', (18155, 18157), False, 'import time\n'), ((18645, 18704), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (18655, 18704), False, 'import time\n'), ((18920, 18962), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (18952, 18962), True, 'import tccli.options_define as OptionsDefine\n'), ((18980, 19010), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (19008, 19010), False, 'from tencentcloud.common import credential\n'), ((20223, 20239), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (20233, 20239), False, 'import json\n'), ((20717, 20728), 'time.time', 'time.time', ([], {}), '()\n', (20726, 20728), False, 'import time\n'), ((21216, 21275), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (21226, 21275), False, 'import time\n'), ((21498, 21540), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (21530, 21540), True, 'import tccli.options_define as OptionsDefine\n'), ((21558, 21588), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (21586, 21588), False, 'from tencentcloud.common import credential\n'), ((22808, 22824), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (22818, 22824), False, 'import json\n'), ((23309, 23320), 'time.time', 'time.time', ([], {}), '()\n', (23318, 23320), False, 'import time\n'), ((23808, 23867), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (23818, 23867), False, 'import time\n'), ((24083, 24125), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (24115, 24125), True, 'import tccli.options_define as OptionsDefine\n'), ((24143, 24173), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (24171, 24173), False, 'from tencentcloud.common import credential\n'), ((25386, 25402), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (25396, 25402), False, 'import json\n'), ((25880, 25891), 'time.time', 'time.time', ([], {}), '()\n', (25889, 25891), False, 'import time\n'), ((26379, 26438), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (26389, 26438), False, 'import time\n'), ((26670, 26712), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (26702, 26712), True, 'import tccli.options_define as OptionsDefine\n'), ((26730, 26760), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (26758, 26760), False, 'from tencentcloud.common import credential\n'), ((27989, 28005), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (27999, 28005), False, 'import json\n'), ((28499, 28510), 'time.time', 'time.time', ([], {}), '()\n', (28508, 28510), False, 'import time\n'), ((28998, 29057), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (29008, 29057), False, 'import time\n'), ((29272, 29314), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (29304, 29314), True, 'import tccli.options_define as OptionsDefine\n'), ((29332, 29362), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (29360, 29362), False, 'from tencentcloud.common import credential\n'), ((30574, 30590), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (30584, 30590), False, 'import json\n'), ((31067, 31078), 'time.time', 'time.time', ([], {}), '()\n', (31076, 31078), False, 'import time\n'), ((31566, 31625), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (31576, 31625), False, 'import time\n'), ((31842, 31884), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (31874, 31884), True, 'import tccli.options_define as OptionsDefine\n'), ((31902, 31932), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (31930, 31932), False, 'from tencentcloud.common import credential\n'), ((33146, 33162), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (33156, 33162), False, 'import json\n'), ((33641, 33652), 'time.time', 'time.time', ([], {}), '()\n', (33650, 33652), False, 'import time\n'), ((34140, 34199), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (34150, 34199), False, 'import time\n'), ((34420, 34462), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (34452, 34462), True, 'import tccli.options_define as OptionsDefine\n'), ((34480, 34510), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (34508, 34510), False, 'from tencentcloud.common import credential\n'), ((35728, 35744), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (35738, 35744), False, 'import json\n'), ((36227, 36238), 'time.time', 'time.time', ([], {}), '()\n', (36236, 36238), False, 'import time\n'), ((36726, 36785), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (36736, 36785), False, 'import time\n'), ((37004, 37046), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (37036, 37046), True, 'import tccli.options_define as OptionsDefine\n'), ((37064, 37094), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (37092, 37094), False, 'from tencentcloud.common import credential\n'), ((38310, 38326), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (38320, 38326), False, 'import json\n'), ((38807, 38818), 'time.time', 'time.time', ([], {}), '()\n', (38816, 38818), False, 'import time\n'), ((39306, 39365), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (39316, 39365), False, 'import time\n'), ((39581, 39623), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (39613, 39623), True, 'import tccli.options_define as OptionsDefine\n'), ((39641, 39671), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (39669, 39671), False, 'from tencentcloud.common import credential\n'), ((40884, 40900), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (40894, 40900), False, 'import json\n'), ((41378, 41389), 'time.time', 'time.time', ([], {}), '()\n', (41387, 41389), False, 'import time\n'), ((41877, 41936), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (41887, 41936), False, 'import time\n'), ((42156, 42198), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (42188, 42198), True, 'import tccli.options_define as OptionsDefine\n'), ((42216, 42246), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (42244, 42246), False, 'from tencentcloud.common import credential\n'), ((43463, 43479), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (43473, 43479), False, 'import json\n'), ((43961, 43972), 'time.time', 'time.time', ([], {}), '()\n', (43970, 43972), False, 'import time\n'), ((44460, 44519), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (44470, 44519), False, 'import time\n'), ((44746, 44788), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (44778, 44788), True, 'import tccli.options_define as OptionsDefine\n'), ((44806, 44836), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (44834, 44836), False, 'from tencentcloud.common import credential\n'), ((46060, 46076), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (46070, 46076), False, 'import json\n'), ((46565, 46576), 'time.time', 'time.time', ([], {}), '()\n', (46574, 46576), False, 'import time\n'), ((47064, 47123), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (47074, 47123), False, 'import time\n'), ((47342, 47384), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (47374, 47384), True, 'import tccli.options_define as OptionsDefine\n'), ((47402, 47432), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (47430, 47432), False, 'from tencentcloud.common import credential\n'), ((48648, 48664), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (48658, 48664), False, 'import json\n'), ((49145, 49156), 'time.time', 'time.time', ([], {}), '()\n', (49154, 49156), False, 'import time\n'), ((49644, 49703), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (49654, 49703), False, 'import time\n'), ((49929, 49971), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (49961, 49971), True, 'import tccli.options_define as OptionsDefine\n'), ((49989, 50019), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (50017, 50019), False, 'from tencentcloud.common import credential\n'), ((51242, 51258), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (51252, 51258), False, 'import json\n'), ((51746, 51757), 'time.time', 'time.time', ([], {}), '()\n', (51755, 51757), False, 'import time\n'), ((52245, 52304), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (52255, 52304), False, 'import time\n'), ((52533, 52575), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (52565, 52575), True, 'import tccli.options_define as OptionsDefine\n'), ((52593, 52623), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (52621, 52623), False, 'from tencentcloud.common import credential\n'), ((53849, 53865), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (53859, 53865), False, 'import json\n'), ((54356, 54367), 'time.time', 'time.time', ([], {}), '()\n', (54365, 54367), False, 'import time\n'), ((54855, 54914), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (54865, 54914), False, 'import time\n'), ((55128, 55170), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (55160, 55170), True, 'import tccli.options_define as OptionsDefine\n'), ((55188, 55218), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (55216, 55218), False, 'from tencentcloud.common import credential\n'), ((56429, 56445), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (56439, 56445), False, 'import json\n'), ((56921, 56932), 'time.time', 'time.time', ([], {}), '()\n', (56930, 56932), False, 'import time\n'), ((57420, 57479), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (57430, 57479), False, 'import time\n'), ((57704, 57746), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (57736, 57746), True, 'import tccli.options_define as OptionsDefine\n'), ((57764, 57794), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (57792, 57794), False, 'from tencentcloud.common import credential\n'), ((59016, 59032), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (59026, 59032), False, 'import json\n'), ((59519, 59530), 'time.time', 'time.time', ([], {}), '()\n', (59528, 59530), False, 'import time\n'), ((60018, 60077), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (60028, 60077), False, 'import time\n'), ((60294, 60336), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (60326, 60336), True, 'import tccli.options_define as OptionsDefine\n'), ((60354, 60384), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (60382, 60384), False, 'from tencentcloud.common import credential\n'), ((61598, 61614), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (61608, 61614), False, 'import json\n'), ((62093, 62104), 'time.time', 'time.time', ([], {}), '()\n', (62102, 62104), False, 'import time\n'), ((62592, 62651), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (62602, 62651), False, 'import time\n'), ((62881, 62923), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (62913, 62923), True, 'import tccli.options_define as OptionsDefine\n'), ((62941, 62971), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (62969, 62971), False, 'from tencentcloud.common import credential\n'), ((64198, 64214), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (64208, 64214), False, 'import json\n'), ((64706, 64717), 'time.time', 'time.time', ([], {}), '()\n', (64715, 64717), False, 'import time\n'), ((65205, 65264), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (65215, 65264), False, 'import time\n'), ((65492, 65534), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (65524, 65534), True, 'import tccli.options_define as OptionsDefine\n'), ((65552, 65582), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (65580, 65582), False, 'from tencentcloud.common import credential\n'), ((66807, 66823), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (66817, 66823), False, 'import json\n'), ((67313, 67324), 'time.time', 'time.time', ([], {}), '()\n', (67322, 67324), False, 'import time\n'), ((67812, 67871), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (67822, 67871), False, 'import time\n'), ((68094, 68136), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (68126, 68136), True, 'import tccli.options_define as OptionsDefine\n'), ((68154, 68184), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (68182, 68184), False, 'from tencentcloud.common import credential\n'), ((69404, 69420), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (69414, 69420), False, 'import json\n'), ((69905, 69916), 'time.time', 'time.time', ([], {}), '()\n', (69914, 69916), False, 'import time\n'), ((70404, 70463), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (70414, 70463), False, 'import time\n'), ((70686, 70728), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (70718, 70728), True, 'import tccli.options_define as OptionsDefine\n'), ((70746, 70776), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (70774, 70776), False, 'from tencentcloud.common import credential\n'), ((71996, 72012), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (72006, 72012), False, 'import json\n'), ((72497, 72508), 'time.time', 'time.time', ([], {}), '()\n', (72506, 72508), False, 'import time\n'), ((72996, 73055), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (73006, 73055), False, 'import time\n'), ((73272, 73314), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (73304, 73314), True, 'import tccli.options_define as OptionsDefine\n'), ((73332, 73362), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (73360, 73362), False, 'from tencentcloud.common import credential\n'), ((74576, 74592), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (74586, 74592), False, 'import json\n'), ((75071, 75082), 'time.time', 'time.time', ([], {}), '()\n', (75080, 75082), False, 'import time\n'), ((75570, 75629), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (75580, 75629), False, 'import time\n'), ((75847, 75889), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (75879, 75889), True, 'import tccli.options_define as OptionsDefine\n'), ((75907, 75937), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (75935, 75937), False, 'from tencentcloud.common import credential\n'), ((77152, 77168), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (77162, 77168), False, 'import json\n'), ((77648, 77659), 'time.time', 'time.time', ([], {}), '()\n', (77657, 77659), False, 'import time\n'), ((78147, 78206), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (78157, 78206), False, 'import time\n'), ((78425, 78467), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (78457, 78467), True, 'import tccli.options_define as OptionsDefine\n'), ((78485, 78515), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (78513, 78515), False, 'from tencentcloud.common import credential\n'), ((79731, 79747), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (79741, 79747), False, 'import json\n'), ((80228, 80239), 'time.time', 'time.time', ([], {}), '()\n', (80237, 80239), False, 'import time\n'), ((80727, 80786), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (80737, 80786), False, 'import time\n'), ((81003, 81045), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (81035, 81045), True, 'import tccli.options_define as OptionsDefine\n'), ((81063, 81093), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (81091, 81093), False, 'from tencentcloud.common import credential\n'), ((82307, 82323), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (82317, 82323), False, 'import json\n'), ((82802, 82813), 'time.time', 'time.time', ([], {}), '()\n', (82811, 82813), False, 'import time\n'), ((83301, 83360), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (83311, 83360), False, 'import time\n'), ((83580, 83622), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (83612, 83622), True, 'import tccli.options_define as OptionsDefine\n'), ((83640, 83670), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (83668, 83670), False, 'from tencentcloud.common import credential\n'), ((84887, 84903), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (84897, 84903), False, 'import json\n'), ((85385, 85396), 'time.time', 'time.time', ([], {}), '()\n', (85394, 85396), False, 'import time\n'), ((85884, 85943), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (85894, 85943), False, 'import time\n'), ((86157, 86199), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (86189, 86199), True, 'import tccli.options_define as OptionsDefine\n'), ((86217, 86247), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (86245, 86247), False, 'from tencentcloud.common import credential\n'), ((87458, 87474), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (87468, 87474), False, 'import json\n'), ((87950, 87961), 'time.time', 'time.time', ([], {}), '()\n', (87959, 87961), False, 'import time\n'), ((88449, 88508), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (88459, 88508), False, 'import time\n'), ((88723, 88765), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (88755, 88765), True, 'import tccli.options_define as OptionsDefine\n'), ((88783, 88813), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (88811, 88813), False, 'from tencentcloud.common import credential\n'), ((90025, 90041), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (90035, 90041), False, 'import json\n'), ((90518, 90529), 'time.time', 'time.time', ([], {}), '()\n', (90527, 90529), False, 'import time\n'), ((91017, 91076), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (91027, 91076), False, 'import time\n'), ((91295, 91337), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (91327, 91337), True, 'import tccli.options_define as OptionsDefine\n'), ((91355, 91385), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (91383, 91385), False, 'from tencentcloud.common import credential\n'), ((92601, 92617), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (92611, 92617), False, 'import json\n'), ((93098, 93109), 'time.time', 'time.time', ([], {}), '()\n', (93107, 93109), False, 'import time\n'), ((93597, 93656), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (93607, 93656), False, 'import time\n'), ((93873, 93915), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (93905, 93915), True, 'import tccli.options_define as OptionsDefine\n'), ((93933, 93963), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (93961, 93963), False, 'from tencentcloud.common import credential\n'), ((95177, 95193), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (95187, 95193), False, 'import json\n'), ((95672, 95683), 'time.time', 'time.time', ([], {}), '()\n', (95681, 95683), False, 'import time\n'), ((96171, 96230), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (96181, 96230), False, 'import time\n'), ((96450, 96492), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (96482, 96492), True, 'import tccli.options_define as OptionsDefine\n'), ((96510, 96540), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (96538, 96540), False, 'from tencentcloud.common import credential\n'), ((97757, 97773), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (97767, 97773), False, 'import json\n'), ((98255, 98266), 'time.time', 'time.time', ([], {}), '()\n', (98264, 98266), False, 'import time\n'), ((98754, 98813), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (98764, 98813), False, 'import time\n'), ((99035, 99077), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (99067, 99077), True, 'import tccli.options_define as OptionsDefine\n'), ((99095, 99125), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (99123, 99125), False, 'from tencentcloud.common import credential\n'), ((100344, 100360), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (100354, 100360), False, 'import json\n'), ((100844, 100855), 'time.time', 'time.time', ([], {}), '()\n', (100853, 100855), False, 'import time\n'), ((101343, 101402), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (101353, 101402), False, 'import time\n'), ((101626, 101668), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (101658, 101668), True, 'import tccli.options_define as OptionsDefine\n'), ((101686, 101716), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (101714, 101716), False, 'from tencentcloud.common import credential\n'), ((102937, 102953), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (102947, 102953), False, 'import json\n'), ((103439, 103450), 'time.time', 'time.time', ([], {}), '()\n', (103448, 103450), False, 'import time\n'), ((103938, 103997), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (103948, 103997), False, 'import time\n'), ((104224, 104266), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (104256, 104266), True, 'import tccli.options_define as OptionsDefine\n'), ((104284, 104314), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (104312, 104314), False, 'from tencentcloud.common import credential\n'), ((105538, 105554), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (105548, 105554), False, 'import json\n'), ((106043, 106054), 'time.time', 'time.time', ([], {}), '()\n', (106052, 106054), False, 'import time\n'), ((106542, 106601), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (106552, 106601), False, 'import time\n'), ((106817, 106859), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (106849, 106859), True, 'import tccli.options_define as OptionsDefine\n'), ((106877, 106907), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (106905, 106907), False, 'from tencentcloud.common import credential\n'), ((108120, 108136), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (108130, 108136), False, 'import json\n'), ((108614, 108625), 'time.time', 'time.time', ([], {}), '()\n', (108623, 108625), False, 'import time\n'), ((109113, 109172), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (109123, 109172), False, 'import time\n'), ((109387, 109429), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (109419, 109429), True, 'import tccli.options_define as OptionsDefine\n'), ((109447, 109477), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (109475, 109477), False, 'from tencentcloud.common import credential\n'), ((110689, 110705), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (110699, 110705), False, 'import json\n'), ((111182, 111193), 'time.time', 'time.time', ([], {}), '()\n', (111191, 111193), False, 'import time\n'), ((111681, 111740), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (111691, 111740), False, 'import time\n'), ((111965, 112007), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (111997, 112007), True, 'import tccli.options_define as OptionsDefine\n'), ((112025, 112055), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (112053, 112055), False, 'from tencentcloud.common import credential\n'), ((113277, 113293), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (113287, 113293), False, 'import json\n'), ((113780, 113791), 'time.time', 'time.time', ([], {}), '()\n', (113789, 113791), False, 'import time\n'), ((114279, 114338), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (114289, 114338), False, 'import time\n'), ((114561, 114603), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (114593, 114603), True, 'import tccli.options_define as OptionsDefine\n'), ((114621, 114651), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (114649, 114651), False, 'from tencentcloud.common import credential\n'), ((115871, 115887), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (115881, 115887), False, 'import json\n'), ((116372, 116383), 'time.time', 'time.time', ([], {}), '()\n', (116381, 116383), False, 'import time\n'), ((116871, 116930), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (116881, 116930), False, 'import time\n'), ((117157, 117199), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (117189, 117199), True, 'import tccli.options_define as OptionsDefine\n'), ((117217, 117247), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (117245, 117247), False, 'from tencentcloud.common import credential\n'), ((118471, 118487), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (118481, 118487), False, 'import json\n'), ((118976, 118987), 'time.time', 'time.time', ([], {}), '()\n', (118985, 118987), False, 'import time\n'), ((119475, 119534), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (119485, 119534), False, 'import time\n'), ((119749, 119791), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (119781, 119791), True, 'import tccli.options_define as OptionsDefine\n'), ((119809, 119839), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (119837, 119839), False, 'from tencentcloud.common import credential\n'), ((121051, 121067), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (121061, 121067), False, 'import json\n'), ((121544, 121555), 'time.time', 'time.time', ([], {}), '()\n', (121553, 121555), False, 'import time\n'), ((122043, 122102), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (122053, 122102), False, 'import time\n'), ((122329, 122371), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (122361, 122371), True, 'import tccli.options_define as OptionsDefine\n'), ((122389, 122419), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (122417, 122419), False, 'from tencentcloud.common import credential\n'), ((123643, 123659), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (123653, 123659), False, 'import json\n'), ((124148, 124159), 'time.time', 'time.time', ([], {}), '()\n', (124157, 124159), False, 'import time\n'), ((124647, 124706), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (124657, 124706), False, 'import time\n'), ((124922, 124964), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (124954, 124964), True, 'import tccli.options_define as OptionsDefine\n'), ((124982, 125012), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (125010, 125012), False, 'from tencentcloud.common import credential\n'), ((126225, 126241), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (126235, 126241), False, 'import json\n'), ((126719, 126730), 'time.time', 'time.time', ([], {}), '()\n', (126728, 126730), False, 'import time\n'), ((127218, 127277), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (127228, 127277), False, 'import time\n'), ((127501, 127543), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (127533, 127543), True, 'import tccli.options_define as OptionsDefine\n'), ((127561, 127591), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (127589, 127591), False, 'from tencentcloud.common import credential\n'), ((128812, 128828), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (128822, 128828), False, 'import json\n'), ((129314, 129325), 'time.time', 'time.time', ([], {}), '()\n', (129323, 129325), False, 'import time\n'), ((129813, 129872), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (129823, 129872), False, 'import time\n'), ((130094, 130136), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (130126, 130136), True, 'import tccli.options_define as OptionsDefine\n'), ((130154, 130184), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (130182, 130184), False, 'from tencentcloud.common import credential\n'), ((131403, 131419), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (131413, 131419), False, 'import json\n'), ((131903, 131914), 'time.time', 'time.time', ([], {}), '()\n', (131912, 131914), False, 'import time\n'), ((132402, 132461), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (132412, 132461), False, 'import time\n'), ((132677, 132719), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (132709, 132719), True, 'import tccli.options_define as OptionsDefine\n'), ((132737, 132767), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (132765, 132767), False, 'from tencentcloud.common import credential\n'), ((133980, 133996), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (133990, 133996), False, 'import json\n'), ((134474, 134485), 'time.time', 'time.time', ([], {}), '()\n', (134483, 134485), False, 'import time\n'), ((134973, 135032), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (134983, 135032), False, 'import time\n'), ((135254, 135296), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (135286, 135296), True, 'import tccli.options_define as OptionsDefine\n'), ((135314, 135344), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (135342, 135344), False, 'from tencentcloud.common import credential\n'), ((136563, 136579), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (136573, 136579), False, 'import json\n'), ((137063, 137074), 'time.time', 'time.time', ([], {}), '()\n', (137072, 137074), False, 'import time\n'), ((137562, 137621), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (137572, 137621), False, 'import time\n'), ((137845, 137887), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (137877, 137887), True, 'import tccli.options_define as OptionsDefine\n'), ((137905, 137935), 'tencentcloud.common.credential.CVMRoleCredential', 'credential.CVMRoleCredential', ([], {}), '()\n', (137933, 137935), False, 'from tencentcloud.common import credential\n'), ((139156, 139172), 'json.dumps', 'json.dumps', (['args'], {}), '(args)\n', (139166, 139172), False, 'import json\n'), ((139658, 139669), 'time.time', 'time.time', ([], {}), '()\n', (139667, 139669), False, 'import time\n'), ((140157, 140216), 'time.sleep', 'time.sleep', (["g_param['OptionsDefine.WaiterInfo']['interval']"], {}), "(g_param['OptionsDefine.WaiterInfo']['interval'])\n", (140167, 140216), False, 'import time\n'), ((143513, 143536), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (143531, 143536), False, 'import os\n'), ((143818, 143848), 'tccli.utils.Utils.load_json_msg', 'Utils.load_json_msg', (['conf_path'], {}), '(conf_path)\n', (143837, 143848), False, 'from tccli.utils import Utils\n'), ((143886, 143916), 'tccli.utils.Utils.load_json_msg', 'Utils.load_json_msg', (['cred_path'], {}), '(cred_path)\n', (143905, 143916), False, 'from tccli.utils import Utils\n'), ((143996, 144130), 'tccli.exceptions.ConfigurationError', 'ConfigurationError', (["('file: %s or %s is not json format' % (g_param['profile'] + '.configure', \n g_param['profile'] + '.credential'))"], {}), "('file: %s or %s is not json format' % (g_param['profile'\n ] + '.configure', g_param['profile'] + '.credential'))\n", (144014, 144130), False, 'from tccli.exceptions import ConfigurationError, ClientError, ParamError\n'), ((144636, 144676), 'os.environ.get', 'os.environ.get', (['OptionsDefine.ENV_REGION'], {}), '(OptionsDefine.ENV_REGION)\n', (144650, 144676), False, 'import os\n'), ((1252, 1375), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (1273, 1375), False, 'from tencentcloud.common import credential\n'), ((2244, 2262), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (2254, 2262), False, 'import json\n'), ((3830, 3953), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (3851, 3953), False, 'from tencentcloud.common import credential\n'), ((4824, 4842), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (4834, 4842), False, 'import json\n'), ((6415, 6538), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (6436, 6538), False, 'from tencentcloud.common import credential\n'), ((7419, 7437), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (7429, 7437), False, 'import json\n'), ((9008, 9131), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (9029, 9131), False, 'from tencentcloud.common import credential\n'), ((10008, 10026), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (10018, 10026), False, 'import json\n'), ((11604, 11727), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (11625, 11727), False, 'from tencentcloud.common import credential\n'), ((12618, 12636), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (12628, 12636), False, 'import json\n'), ((14215, 14338), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (14236, 14338), False, 'from tencentcloud.common import credential\n'), ((15231, 15249), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (15241, 15249), False, 'import json\n'), ((16824, 16947), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (16845, 16947), False, 'from tencentcloud.common import credential\n'), ((17832, 17850), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (17842, 17850), False, 'import json\n'), ((19415, 19538), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (19436, 19538), False, 'from tencentcloud.common import credential\n'), ((20403, 20421), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (20413, 20421), False, 'import json\n'), ((21993, 22116), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (22014, 22116), False, 'from tencentcloud.common import credential\n'), ((22995, 23013), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (23005, 23013), False, 'import json\n'), ((24578, 24701), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (24599, 24701), False, 'from tencentcloud.common import credential\n'), ((25566, 25584), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (25576, 25584), False, 'import json\n'), ((27165, 27288), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (27186, 27288), False, 'from tencentcloud.common import credential\n'), ((28185, 28203), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (28195, 28203), False, 'import json\n'), ((29767, 29890), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (29788, 29890), False, 'from tencentcloud.common import credential\n'), ((30753, 30771), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (30763, 30771), False, 'import json\n'), ((32337, 32460), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (32358, 32460), False, 'from tencentcloud.common import credential\n'), ((33327, 33345), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (33337, 33345), False, 'import json\n'), ((34915, 35038), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (34936, 35038), False, 'from tencentcloud.common import credential\n'), ((35913, 35931), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (35923, 35931), False, 'import json\n'), ((37499, 37622), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (37520, 37622), False, 'from tencentcloud.common import credential\n'), ((38493, 38511), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (38503, 38511), False, 'import json\n'), ((40076, 40199), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (40097, 40199), False, 'from tencentcloud.common import credential\n'), ((41064, 41082), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (41074, 41082), False, 'import json\n'), ((42651, 42774), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (42672, 42774), False, 'from tencentcloud.common import credential\n'), ((43647, 43665), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (43657, 43665), False, 'import json\n'), ((45241, 45364), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (45262, 45364), False, 'from tencentcloud.common import credential\n'), ((46251, 46269), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (46261, 46269), False, 'import json\n'), ((47837, 47960), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (47858, 47960), False, 'from tencentcloud.common import credential\n'), ((48831, 48849), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (48841, 48849), False, 'import json\n'), ((50424, 50547), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (50445, 50547), False, 'from tencentcloud.common import credential\n'), ((51432, 51450), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (51442, 51450), False, 'import json\n'), ((53028, 53151), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (53049, 53151), False, 'from tencentcloud.common import credential\n'), ((54042, 54060), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (54052, 54060), False, 'import json\n'), ((55623, 55746), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (55644, 55746), False, 'from tencentcloud.common import credential\n'), ((56607, 56625), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (56617, 56625), False, 'import json\n'), ((58199, 58322), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (58220, 58322), False, 'from tencentcloud.common import credential\n'), ((59205, 59223), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (59215, 59223), False, 'import json\n'), ((60789, 60912), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (60810, 60912), False, 'from tencentcloud.common import credential\n'), ((61779, 61797), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (61789, 61797), False, 'import json\n'), ((63376, 63499), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (63397, 63499), False, 'from tencentcloud.common import credential\n'), ((64392, 64410), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (64402, 64410), False, 'import json\n'), ((65987, 66110), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (66008, 66110), False, 'from tencentcloud.common import credential\n'), ((66999, 67017), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (67009, 67017), False, 'import json\n'), ((68589, 68712), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (68610, 68712), False, 'from tencentcloud.common import credential\n'), ((69591, 69609), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (69601, 69609), False, 'import json\n'), ((71181, 71304), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (71202, 71304), False, 'from tencentcloud.common import credential\n'), ((72183, 72201), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (72193, 72201), False, 'import json\n'), ((73767, 73890), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (73788, 73890), False, 'from tencentcloud.common import credential\n'), ((74757, 74775), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (74767, 74775), False, 'import json\n'), ((76342, 76465), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (76363, 76465), False, 'from tencentcloud.common import credential\n'), ((77334, 77352), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (77344, 77352), False, 'import json\n'), ((78920, 79043), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (78941, 79043), False, 'from tencentcloud.common import credential\n'), ((79914, 79932), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (79924, 79932), False, 'import json\n'), ((81498, 81621), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (81519, 81621), False, 'from tencentcloud.common import credential\n'), ((82488, 82506), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (82498, 82506), False, 'import json\n'), ((84075, 84198), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (84096, 84198), False, 'from tencentcloud.common import credential\n'), ((85071, 85089), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (85081, 85089), False, 'import json\n'), ((86652, 86775), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (86673, 86775), False, 'from tencentcloud.common import credential\n'), ((87636, 87654), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (87646, 87654), False, 'import json\n'), ((89218, 89341), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (89239, 89341), False, 'from tencentcloud.common import credential\n'), ((90204, 90222), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (90214, 90222), False, 'import json\n'), ((91790, 91913), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (91811, 91913), False, 'from tencentcloud.common import credential\n'), ((92784, 92802), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (92794, 92802), False, 'import json\n'), ((94368, 94491), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (94389, 94491), False, 'from tencentcloud.common import credential\n'), ((95358, 95376), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (95368, 95376), False, 'import json\n'), ((96945, 97068), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (96966, 97068), False, 'from tencentcloud.common import credential\n'), ((97941, 97959), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (97951, 97959), False, 'import json\n'), ((99530, 99653), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (99551, 99653), False, 'from tencentcloud.common import credential\n'), ((100530, 100548), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (100540, 100548), False, 'import json\n'), ((102121, 102244), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (102142, 102244), False, 'from tencentcloud.common import credential\n'), ((103125, 103143), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (103135, 103143), False, 'import json\n'), ((104719, 104842), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (104740, 104842), False, 'from tencentcloud.common import credential\n'), ((105729, 105747), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (105739, 105747), False, 'import json\n'), ((107312, 107435), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (107333, 107435), False, 'from tencentcloud.common import credential\n'), ((108300, 108318), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (108310, 108318), False, 'import json\n'), ((109882, 110005), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (109903, 110005), False, 'from tencentcloud.common import credential\n'), ((110868, 110886), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (110878, 110886), False, 'import json\n'), ((112460, 112583), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (112481, 112583), False, 'from tencentcloud.common import credential\n'), ((113466, 113484), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (113476, 113484), False, 'import json\n'), ((115056, 115179), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (115077, 115179), False, 'from tencentcloud.common import credential\n'), ((116058, 116076), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (116068, 116076), False, 'import json\n'), ((117652, 117775), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (117673, 117775), False, 'from tencentcloud.common import credential\n'), ((118662, 118680), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (118672, 118680), False, 'import json\n'), ((120244, 120367), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (120265, 120367), False, 'from tencentcloud.common import credential\n'), ((121230, 121248), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (121240, 121248), False, 'import json\n'), ((122824, 122947), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (122845, 122947), False, 'from tencentcloud.common import credential\n'), ((123834, 123852), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (123844, 123852), False, 'import json\n'), ((125417, 125540), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (125438, 125540), False, 'from tencentcloud.common import credential\n'), ((126405, 126423), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (126415, 126423), False, 'import json\n'), ((127996, 128119), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (128017, 128119), False, 'from tencentcloud.common import credential\n'), ((129000, 129018), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (129010, 129018), False, 'import json\n'), ((130589, 130712), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (130610, 130712), False, 'from tencentcloud.common import credential\n'), ((131589, 131607), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (131599, 131607), False, 'import json\n'), ((133172, 133295), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (133193, 133295), False, 'from tencentcloud.common import credential\n'), ((134160, 134178), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (134170, 134178), False, 'import json\n'), ((135749, 135872), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (135770, 135872), False, 'from tencentcloud.common import credential\n'), ((136749, 136767), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (136759, 136767), False, 'import json\n'), ((138340, 138463), 'tencentcloud.common.credential.Credential', 'credential.Credential', (['g_param[OptionsDefine.SecretId]', 'g_param[OptionsDefine.SecretKey]', 'g_param[OptionsDefine.Token]'], {}), '(g_param[OptionsDefine.SecretId], g_param[\n OptionsDefine.SecretKey], g_param[OptionsDefine.Token])\n', (138361, 138463), False, 'from tencentcloud.common import credential\n'), ((139344, 139362), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (139354, 139362), False, 'import json\n'), ((144274, 144317), 'os.environ.get', 'os.environ.get', (['OptionsDefine.ENV_SECRET_ID'], {}), '(OptionsDefine.ENV_SECRET_ID)\n', (144288, 144317), False, 'import os\n'), ((144322, 144366), 'os.environ.get', 'os.environ.get', (['OptionsDefine.ENV_SECRET_KEY'], {}), '(OptionsDefine.ENV_SECRET_KEY)\n', (144336, 144366), False, 'import os\n'), ((144411, 144454), 'os.environ.get', 'os.environ.get', (['OptionsDefine.ENV_SECRET_ID'], {}), '(OptionsDefine.ENV_SECRET_ID)\n', (144425, 144454), False, 'import os\n'), ((144499, 144543), 'os.environ.get', 'os.environ.get', (['OptionsDefine.ENV_SECRET_KEY'], {}), '(OptionsDefine.ENV_SECRET_KEY)\n', (144513, 144543), False, 'import os\n'), ((144584, 144623), 'os.environ.get', 'os.environ.get', (['OptionsDefine.ENV_TOKEN'], {}), '(OptionsDefine.ENV_TOKEN)\n', (144598, 144623), False, 'import os\n'), ((144719, 144759), 'os.environ.get', 'os.environ.get', (['OptionsDefine.ENV_REGION'], {}), '(OptionsDefine.ENV_REGION)\n', (144733, 144759), False, 'import os\n'), ((144772, 144814), 'os.environ.get', 'os.environ.get', (['OptionsDefine.ENV_ROLE_ARN'], {}), '(OptionsDefine.ENV_ROLE_ARN)\n', (144786, 144814), False, 'import os\n'), ((144819, 144870), 'os.environ.get', 'os.environ.get', (['OptionsDefine.ENV_ROLE_SESSION_NAME'], {}), '(OptionsDefine.ENV_ROLE_SESSION_NAME)\n', (144833, 144870), False, 'import os\n'), ((144914, 144956), 'os.environ.get', 'os.environ.get', (['OptionsDefine.ENV_ROLE_ARN'], {}), '(OptionsDefine.ENV_ROLE_ARN)\n', (144928, 144956), False, 'import os\n'), ((145007, 145058), 'os.environ.get', 'os.environ.get', (['OptionsDefine.ENV_ROLE_SESSION_NAME'], {}), '(OptionsDefine.ENV_ROLE_SESSION_NAME)\n', (145021, 145058), False, 'import os\n'), ((865, 904), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (894, 904), True, 'import tccli.options_define as OptionsDefine\n'), ((918, 965), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (955, 965), True, 'import tccli.options_define as OptionsDefine\n'), ((1629, 1671), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (1661, 1671), True, 'import tccli.options_define as OptionsDefine\n'), ((2413, 2474), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (2419, 2474), False, 'from jmespath import search\n'), ((3443, 3482), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (3472, 3482), True, 'import tccli.options_define as OptionsDefine\n'), ((3496, 3543), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (3533, 3543), True, 'import tccli.options_define as OptionsDefine\n'), ((4207, 4249), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (4239, 4249), True, 'import tccli.options_define as OptionsDefine\n'), ((4993, 5054), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (4999, 5054), False, 'from jmespath import search\n'), ((6028, 6067), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (6057, 6067), True, 'import tccli.options_define as OptionsDefine\n'), ((6081, 6128), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (6118, 6128), True, 'import tccli.options_define as OptionsDefine\n'), ((6792, 6834), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (6824, 6834), True, 'import tccli.options_define as OptionsDefine\n'), ((7588, 7649), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (7594, 7649), False, 'from jmespath import search\n'), ((8621, 8660), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (8650, 8660), True, 'import tccli.options_define as OptionsDefine\n'), ((8674, 8721), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (8711, 8721), True, 'import tccli.options_define as OptionsDefine\n'), ((9385, 9427), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (9417, 9427), True, 'import tccli.options_define as OptionsDefine\n'), ((10177, 10238), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (10183, 10238), False, 'from jmespath import search\n'), ((11217, 11256), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (11246, 11256), True, 'import tccli.options_define as OptionsDefine\n'), ((11270, 11317), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (11307, 11317), True, 'import tccli.options_define as OptionsDefine\n'), ((11981, 12023), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (12013, 12023), True, 'import tccli.options_define as OptionsDefine\n'), ((12787, 12848), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (12793, 12848), False, 'from jmespath import search\n'), ((13828, 13867), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (13857, 13867), True, 'import tccli.options_define as OptionsDefine\n'), ((13881, 13928), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (13918, 13928), True, 'import tccli.options_define as OptionsDefine\n'), ((14592, 14634), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (14624, 14634), True, 'import tccli.options_define as OptionsDefine\n'), ((15400, 15461), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (15406, 15461), False, 'from jmespath import search\n'), ((16437, 16476), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (16466, 16476), True, 'import tccli.options_define as OptionsDefine\n'), ((16490, 16537), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (16527, 16537), True, 'import tccli.options_define as OptionsDefine\n'), ((17201, 17243), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (17233, 17243), True, 'import tccli.options_define as OptionsDefine\n'), ((18001, 18062), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (18007, 18062), False, 'from jmespath import search\n'), ((19028, 19067), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (19057, 19067), True, 'import tccli.options_define as OptionsDefine\n'), ((19081, 19128), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (19118, 19128), True, 'import tccli.options_define as OptionsDefine\n'), ((19792, 19834), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (19824, 19834), True, 'import tccli.options_define as OptionsDefine\n'), ((20572, 20633), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (20578, 20633), False, 'from jmespath import search\n'), ((21606, 21645), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (21635, 21645), True, 'import tccli.options_define as OptionsDefine\n'), ((21659, 21706), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (21696, 21706), True, 'import tccli.options_define as OptionsDefine\n'), ((22370, 22412), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (22402, 22412), True, 'import tccli.options_define as OptionsDefine\n'), ((23164, 23225), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (23170, 23225), False, 'from jmespath import search\n'), ((24191, 24230), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (24220, 24230), True, 'import tccli.options_define as OptionsDefine\n'), ((24244, 24291), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (24281, 24291), True, 'import tccli.options_define as OptionsDefine\n'), ((24955, 24997), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (24987, 24997), True, 'import tccli.options_define as OptionsDefine\n'), ((25735, 25796), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (25741, 25796), False, 'from jmespath import search\n'), ((26778, 26817), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (26807, 26817), True, 'import tccli.options_define as OptionsDefine\n'), ((26831, 26878), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (26868, 26878), True, 'import tccli.options_define as OptionsDefine\n'), ((27542, 27584), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (27574, 27584), True, 'import tccli.options_define as OptionsDefine\n'), ((28354, 28415), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (28360, 28415), False, 'from jmespath import search\n'), ((29380, 29419), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (29409, 29419), True, 'import tccli.options_define as OptionsDefine\n'), ((29433, 29480), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (29470, 29480), True, 'import tccli.options_define as OptionsDefine\n'), ((30144, 30186), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (30176, 30186), True, 'import tccli.options_define as OptionsDefine\n'), ((30922, 30983), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (30928, 30983), False, 'from jmespath import search\n'), ((31950, 31989), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (31979, 31989), True, 'import tccli.options_define as OptionsDefine\n'), ((32003, 32050), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (32040, 32050), True, 'import tccli.options_define as OptionsDefine\n'), ((32714, 32756), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (32746, 32756), True, 'import tccli.options_define as OptionsDefine\n'), ((33496, 33557), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (33502, 33557), False, 'from jmespath import search\n'), ((34528, 34567), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (34557, 34567), True, 'import tccli.options_define as OptionsDefine\n'), ((34581, 34628), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (34618, 34628), True, 'import tccli.options_define as OptionsDefine\n'), ((35292, 35334), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (35324, 35334), True, 'import tccli.options_define as OptionsDefine\n'), ((36082, 36143), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (36088, 36143), False, 'from jmespath import search\n'), ((37112, 37151), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (37141, 37151), True, 'import tccli.options_define as OptionsDefine\n'), ((37165, 37212), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (37202, 37212), True, 'import tccli.options_define as OptionsDefine\n'), ((37876, 37918), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (37908, 37918), True, 'import tccli.options_define as OptionsDefine\n'), ((38662, 38723), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (38668, 38723), False, 'from jmespath import search\n'), ((39689, 39728), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (39718, 39728), True, 'import tccli.options_define as OptionsDefine\n'), ((39742, 39789), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (39779, 39789), True, 'import tccli.options_define as OptionsDefine\n'), ((40453, 40495), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (40485, 40495), True, 'import tccli.options_define as OptionsDefine\n'), ((41233, 41294), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (41239, 41294), False, 'from jmespath import search\n'), ((42264, 42303), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (42293, 42303), True, 'import tccli.options_define as OptionsDefine\n'), ((42317, 42364), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (42354, 42364), True, 'import tccli.options_define as OptionsDefine\n'), ((43028, 43070), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (43060, 43070), True, 'import tccli.options_define as OptionsDefine\n'), ((43816, 43877), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (43822, 43877), False, 'from jmespath import search\n'), ((44854, 44893), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (44883, 44893), True, 'import tccli.options_define as OptionsDefine\n'), ((44907, 44954), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (44944, 44954), True, 'import tccli.options_define as OptionsDefine\n'), ((45618, 45660), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (45650, 45660), True, 'import tccli.options_define as OptionsDefine\n'), ((46420, 46481), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (46426, 46481), False, 'from jmespath import search\n'), ((47450, 47489), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (47479, 47489), True, 'import tccli.options_define as OptionsDefine\n'), ((47503, 47550), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (47540, 47550), True, 'import tccli.options_define as OptionsDefine\n'), ((48214, 48256), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (48246, 48256), True, 'import tccli.options_define as OptionsDefine\n'), ((49000, 49061), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (49006, 49061), False, 'from jmespath import search\n'), ((50037, 50076), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (50066, 50076), True, 'import tccli.options_define as OptionsDefine\n'), ((50090, 50137), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (50127, 50137), True, 'import tccli.options_define as OptionsDefine\n'), ((50801, 50843), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (50833, 50843), True, 'import tccli.options_define as OptionsDefine\n'), ((51601, 51662), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (51607, 51662), False, 'from jmespath import search\n'), ((52641, 52680), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (52670, 52680), True, 'import tccli.options_define as OptionsDefine\n'), ((52694, 52741), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (52731, 52741), True, 'import tccli.options_define as OptionsDefine\n'), ((53405, 53447), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (53437, 53447), True, 'import tccli.options_define as OptionsDefine\n'), ((54211, 54272), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (54217, 54272), False, 'from jmespath import search\n'), ((55236, 55275), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (55265, 55275), True, 'import tccli.options_define as OptionsDefine\n'), ((55289, 55336), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (55326, 55336), True, 'import tccli.options_define as OptionsDefine\n'), ((56000, 56042), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (56032, 56042), True, 'import tccli.options_define as OptionsDefine\n'), ((56776, 56837), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (56782, 56837), False, 'from jmespath import search\n'), ((57812, 57851), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (57841, 57851), True, 'import tccli.options_define as OptionsDefine\n'), ((57865, 57912), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (57902, 57912), True, 'import tccli.options_define as OptionsDefine\n'), ((58576, 58618), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (58608, 58618), True, 'import tccli.options_define as OptionsDefine\n'), ((59374, 59435), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (59380, 59435), False, 'from jmespath import search\n'), ((60402, 60441), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (60431, 60441), True, 'import tccli.options_define as OptionsDefine\n'), ((60455, 60502), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (60492, 60502), True, 'import tccli.options_define as OptionsDefine\n'), ((61166, 61208), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (61198, 61208), True, 'import tccli.options_define as OptionsDefine\n'), ((61948, 62009), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (61954, 62009), False, 'from jmespath import search\n'), ((62989, 63028), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (63018, 63028), True, 'import tccli.options_define as OptionsDefine\n'), ((63042, 63089), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (63079, 63089), True, 'import tccli.options_define as OptionsDefine\n'), ((63753, 63795), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (63785, 63795), True, 'import tccli.options_define as OptionsDefine\n'), ((64561, 64622), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (64567, 64622), False, 'from jmespath import search\n'), ((65600, 65639), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (65629, 65639), True, 'import tccli.options_define as OptionsDefine\n'), ((65653, 65700), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (65690, 65700), True, 'import tccli.options_define as OptionsDefine\n'), ((66364, 66406), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (66396, 66406), True, 'import tccli.options_define as OptionsDefine\n'), ((67168, 67229), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (67174, 67229), False, 'from jmespath import search\n'), ((68202, 68241), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (68231, 68241), True, 'import tccli.options_define as OptionsDefine\n'), ((68255, 68302), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (68292, 68302), True, 'import tccli.options_define as OptionsDefine\n'), ((68966, 69008), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (68998, 69008), True, 'import tccli.options_define as OptionsDefine\n'), ((69760, 69821), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (69766, 69821), False, 'from jmespath import search\n'), ((70794, 70833), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (70823, 70833), True, 'import tccli.options_define as OptionsDefine\n'), ((70847, 70894), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (70884, 70894), True, 'import tccli.options_define as OptionsDefine\n'), ((71558, 71600), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (71590, 71600), True, 'import tccli.options_define as OptionsDefine\n'), ((72352, 72413), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (72358, 72413), False, 'from jmespath import search\n'), ((73380, 73419), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (73409, 73419), True, 'import tccli.options_define as OptionsDefine\n'), ((73433, 73480), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (73470, 73480), True, 'import tccli.options_define as OptionsDefine\n'), ((74144, 74186), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (74176, 74186), True, 'import tccli.options_define as OptionsDefine\n'), ((74926, 74987), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (74932, 74987), False, 'from jmespath import search\n'), ((75955, 75994), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (75984, 75994), True, 'import tccli.options_define as OptionsDefine\n'), ((76008, 76055), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (76045, 76055), True, 'import tccli.options_define as OptionsDefine\n'), ((76719, 76761), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (76751, 76761), True, 'import tccli.options_define as OptionsDefine\n'), ((77503, 77564), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (77509, 77564), False, 'from jmespath import search\n'), ((78533, 78572), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (78562, 78572), True, 'import tccli.options_define as OptionsDefine\n'), ((78586, 78633), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (78623, 78633), True, 'import tccli.options_define as OptionsDefine\n'), ((79297, 79339), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (79329, 79339), True, 'import tccli.options_define as OptionsDefine\n'), ((80083, 80144), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (80089, 80144), False, 'from jmespath import search\n'), ((81111, 81150), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (81140, 81150), True, 'import tccli.options_define as OptionsDefine\n'), ((81164, 81211), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (81201, 81211), True, 'import tccli.options_define as OptionsDefine\n'), ((81875, 81917), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (81907, 81917), True, 'import tccli.options_define as OptionsDefine\n'), ((82657, 82718), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (82663, 82718), False, 'from jmespath import search\n'), ((83688, 83727), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (83717, 83727), True, 'import tccli.options_define as OptionsDefine\n'), ((83741, 83788), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (83778, 83788), True, 'import tccli.options_define as OptionsDefine\n'), ((84452, 84494), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (84484, 84494), True, 'import tccli.options_define as OptionsDefine\n'), ((85240, 85301), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (85246, 85301), False, 'from jmespath import search\n'), ((86265, 86304), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (86294, 86304), True, 'import tccli.options_define as OptionsDefine\n'), ((86318, 86365), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (86355, 86365), True, 'import tccli.options_define as OptionsDefine\n'), ((87029, 87071), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (87061, 87071), True, 'import tccli.options_define as OptionsDefine\n'), ((87805, 87866), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (87811, 87866), False, 'from jmespath import search\n'), ((88831, 88870), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (88860, 88870), True, 'import tccli.options_define as OptionsDefine\n'), ((88884, 88931), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (88921, 88931), True, 'import tccli.options_define as OptionsDefine\n'), ((89595, 89637), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (89627, 89637), True, 'import tccli.options_define as OptionsDefine\n'), ((90373, 90434), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (90379, 90434), False, 'from jmespath import search\n'), ((91403, 91442), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (91432, 91442), True, 'import tccli.options_define as OptionsDefine\n'), ((91456, 91503), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (91493, 91503), True, 'import tccli.options_define as OptionsDefine\n'), ((92167, 92209), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (92199, 92209), True, 'import tccli.options_define as OptionsDefine\n'), ((92953, 93014), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (92959, 93014), False, 'from jmespath import search\n'), ((93981, 94020), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (94010, 94020), True, 'import tccli.options_define as OptionsDefine\n'), ((94034, 94081), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (94071, 94081), True, 'import tccli.options_define as OptionsDefine\n'), ((94745, 94787), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (94777, 94787), True, 'import tccli.options_define as OptionsDefine\n'), ((95527, 95588), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (95533, 95588), False, 'from jmespath import search\n'), ((96558, 96597), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (96587, 96597), True, 'import tccli.options_define as OptionsDefine\n'), ((96611, 96658), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (96648, 96658), True, 'import tccli.options_define as OptionsDefine\n'), ((97322, 97364), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (97354, 97364), True, 'import tccli.options_define as OptionsDefine\n'), ((98110, 98171), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (98116, 98171), False, 'from jmespath import search\n'), ((99143, 99182), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (99172, 99182), True, 'import tccli.options_define as OptionsDefine\n'), ((99196, 99243), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (99233, 99243), True, 'import tccli.options_define as OptionsDefine\n'), ((99907, 99949), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (99939, 99949), True, 'import tccli.options_define as OptionsDefine\n'), ((100699, 100760), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (100705, 100760), False, 'from jmespath import search\n'), ((101734, 101773), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (101763, 101773), True, 'import tccli.options_define as OptionsDefine\n'), ((101787, 101834), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (101824, 101834), True, 'import tccli.options_define as OptionsDefine\n'), ((102498, 102540), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (102530, 102540), True, 'import tccli.options_define as OptionsDefine\n'), ((103294, 103355), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (103300, 103355), False, 'from jmespath import search\n'), ((104332, 104371), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (104361, 104371), True, 'import tccli.options_define as OptionsDefine\n'), ((104385, 104432), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (104422, 104432), True, 'import tccli.options_define as OptionsDefine\n'), ((105096, 105138), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (105128, 105138), True, 'import tccli.options_define as OptionsDefine\n'), ((105898, 105959), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (105904, 105959), False, 'from jmespath import search\n'), ((106925, 106964), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (106954, 106964), True, 'import tccli.options_define as OptionsDefine\n'), ((106978, 107025), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (107015, 107025), True, 'import tccli.options_define as OptionsDefine\n'), ((107689, 107731), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (107721, 107731), True, 'import tccli.options_define as OptionsDefine\n'), ((108469, 108530), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (108475, 108530), False, 'from jmespath import search\n'), ((109495, 109534), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (109524, 109534), True, 'import tccli.options_define as OptionsDefine\n'), ((109548, 109595), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (109585, 109595), True, 'import tccli.options_define as OptionsDefine\n'), ((110259, 110301), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (110291, 110301), True, 'import tccli.options_define as OptionsDefine\n'), ((111037, 111098), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (111043, 111098), False, 'from jmespath import search\n'), ((112073, 112112), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (112102, 112112), True, 'import tccli.options_define as OptionsDefine\n'), ((112126, 112173), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (112163, 112173), True, 'import tccli.options_define as OptionsDefine\n'), ((112837, 112879), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (112869, 112879), True, 'import tccli.options_define as OptionsDefine\n'), ((113635, 113696), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (113641, 113696), False, 'from jmespath import search\n'), ((114669, 114708), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (114698, 114708), True, 'import tccli.options_define as OptionsDefine\n'), ((114722, 114769), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (114759, 114769), True, 'import tccli.options_define as OptionsDefine\n'), ((115433, 115475), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (115465, 115475), True, 'import tccli.options_define as OptionsDefine\n'), ((116227, 116288), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (116233, 116288), False, 'from jmespath import search\n'), ((117265, 117304), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (117294, 117304), True, 'import tccli.options_define as OptionsDefine\n'), ((117318, 117365), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (117355, 117365), True, 'import tccli.options_define as OptionsDefine\n'), ((118029, 118071), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (118061, 118071), True, 'import tccli.options_define as OptionsDefine\n'), ((118831, 118892), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (118837, 118892), False, 'from jmespath import search\n'), ((119857, 119896), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (119886, 119896), True, 'import tccli.options_define as OptionsDefine\n'), ((119910, 119957), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (119947, 119957), True, 'import tccli.options_define as OptionsDefine\n'), ((120621, 120663), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (120653, 120663), True, 'import tccli.options_define as OptionsDefine\n'), ((121399, 121460), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (121405, 121460), False, 'from jmespath import search\n'), ((122437, 122476), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (122466, 122476), True, 'import tccli.options_define as OptionsDefine\n'), ((122490, 122537), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (122527, 122537), True, 'import tccli.options_define as OptionsDefine\n'), ((123201, 123243), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (123233, 123243), True, 'import tccli.options_define as OptionsDefine\n'), ((124003, 124064), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (124009, 124064), False, 'from jmespath import search\n'), ((125030, 125069), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (125059, 125069), True, 'import tccli.options_define as OptionsDefine\n'), ((125083, 125130), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (125120, 125130), True, 'import tccli.options_define as OptionsDefine\n'), ((125794, 125836), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (125826, 125836), True, 'import tccli.options_define as OptionsDefine\n'), ((126574, 126635), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (126580, 126635), False, 'from jmespath import search\n'), ((127609, 127648), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (127638, 127648), True, 'import tccli.options_define as OptionsDefine\n'), ((127662, 127709), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (127699, 127709), True, 'import tccli.options_define as OptionsDefine\n'), ((128373, 128415), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (128405, 128415), True, 'import tccli.options_define as OptionsDefine\n'), ((129169, 129230), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (129175, 129230), False, 'from jmespath import search\n'), ((130202, 130241), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (130231, 130241), True, 'import tccli.options_define as OptionsDefine\n'), ((130255, 130302), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (130292, 130302), True, 'import tccli.options_define as OptionsDefine\n'), ((130966, 131008), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (130998, 131008), True, 'import tccli.options_define as OptionsDefine\n'), ((131758, 131819), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (131764, 131819), False, 'from jmespath import search\n'), ((132785, 132824), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (132814, 132824), True, 'import tccli.options_define as OptionsDefine\n'), ((132838, 132885), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (132875, 132885), True, 'import tccli.options_define as OptionsDefine\n'), ((133549, 133591), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (133581, 133591), True, 'import tccli.options_define as OptionsDefine\n'), ((134329, 134390), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (134335, 134390), False, 'from jmespath import search\n'), ((135362, 135401), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (135391, 135401), True, 'import tccli.options_define as OptionsDefine\n'), ((135415, 135462), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (135452, 135462), True, 'import tccli.options_define as OptionsDefine\n'), ((136126, 136168), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (136158, 136168), True, 'import tccli.options_define as OptionsDefine\n'), ((136918, 136979), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (136924, 136979), False, 'from jmespath import search\n'), ((137953, 137992), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (137982, 137992), True, 'import tccli.options_define as OptionsDefine\n'), ((138006, 138053), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (138043, 138053), True, 'import tccli.options_define as OptionsDefine\n'), ((138717, 138759), 'tccli.options_define.HttpsProxy.replace', 'OptionsDefine.HttpsProxy.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (138749, 138759), True, 'import tccli.options_define as OptionsDefine\n'), ((139513, 139574), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (139519, 139574), False, 'from jmespath import search\n'), ((1106, 1145), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (1135, 1145), True, 'import tccli.options_define as OptionsDefine\n'), ((1168, 1215), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (1205, 1215), True, 'import tccli.options_define as OptionsDefine\n'), ((2986, 3047), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (2992, 3047), False, 'from jmespath import search\n'), ((3684, 3723), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (3713, 3723), True, 'import tccli.options_define as OptionsDefine\n'), ((3746, 3793), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (3783, 3793), True, 'import tccli.options_define as OptionsDefine\n'), ((5566, 5627), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (5572, 5627), False, 'from jmespath import search\n'), ((6269, 6308), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (6298, 6308), True, 'import tccli.options_define as OptionsDefine\n'), ((6331, 6378), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (6368, 6378), True, 'import tccli.options_define as OptionsDefine\n'), ((8161, 8222), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (8167, 8222), False, 'from jmespath import search\n'), ((8862, 8901), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (8891, 8901), True, 'import tccli.options_define as OptionsDefine\n'), ((8924, 8971), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (8961, 8971), True, 'import tccli.options_define as OptionsDefine\n'), ((10750, 10811), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (10756, 10811), False, 'from jmespath import search\n'), ((11458, 11497), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (11487, 11497), True, 'import tccli.options_define as OptionsDefine\n'), ((11520, 11567), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (11557, 11567), True, 'import tccli.options_define as OptionsDefine\n'), ((13360, 13421), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (13366, 13421), False, 'from jmespath import search\n'), ((14069, 14108), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (14098, 14108), True, 'import tccli.options_define as OptionsDefine\n'), ((14131, 14178), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (14168, 14178), True, 'import tccli.options_define as OptionsDefine\n'), ((15973, 16034), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (15979, 16034), False, 'from jmespath import search\n'), ((16678, 16717), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (16707, 16717), True, 'import tccli.options_define as OptionsDefine\n'), ((16740, 16787), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (16777, 16787), True, 'import tccli.options_define as OptionsDefine\n'), ((18574, 18635), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (18580, 18635), False, 'from jmespath import search\n'), ((19269, 19308), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (19298, 19308), True, 'import tccli.options_define as OptionsDefine\n'), ((19331, 19378), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (19368, 19378), True, 'import tccli.options_define as OptionsDefine\n'), ((21145, 21206), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (21151, 21206), False, 'from jmespath import search\n'), ((21847, 21886), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (21876, 21886), True, 'import tccli.options_define as OptionsDefine\n'), ((21909, 21956), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (21946, 21956), True, 'import tccli.options_define as OptionsDefine\n'), ((23737, 23798), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (23743, 23798), False, 'from jmespath import search\n'), ((24432, 24471), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (24461, 24471), True, 'import tccli.options_define as OptionsDefine\n'), ((24494, 24541), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (24531, 24541), True, 'import tccli.options_define as OptionsDefine\n'), ((26308, 26369), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (26314, 26369), False, 'from jmespath import search\n'), ((27019, 27058), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (27048, 27058), True, 'import tccli.options_define as OptionsDefine\n'), ((27081, 27128), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (27118, 27128), True, 'import tccli.options_define as OptionsDefine\n'), ((28927, 28988), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (28933, 28988), False, 'from jmespath import search\n'), ((29621, 29660), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (29650, 29660), True, 'import tccli.options_define as OptionsDefine\n'), ((29683, 29730), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (29720, 29730), True, 'import tccli.options_define as OptionsDefine\n'), ((31495, 31556), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (31501, 31556), False, 'from jmespath import search\n'), ((32191, 32230), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (32220, 32230), True, 'import tccli.options_define as OptionsDefine\n'), ((32253, 32300), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (32290, 32300), True, 'import tccli.options_define as OptionsDefine\n'), ((34069, 34130), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (34075, 34130), False, 'from jmespath import search\n'), ((34769, 34808), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (34798, 34808), True, 'import tccli.options_define as OptionsDefine\n'), ((34831, 34878), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (34868, 34878), True, 'import tccli.options_define as OptionsDefine\n'), ((36655, 36716), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (36661, 36716), False, 'from jmespath import search\n'), ((37353, 37392), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (37382, 37392), True, 'import tccli.options_define as OptionsDefine\n'), ((37415, 37462), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (37452, 37462), True, 'import tccli.options_define as OptionsDefine\n'), ((39235, 39296), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (39241, 39296), False, 'from jmespath import search\n'), ((39930, 39969), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (39959, 39969), True, 'import tccli.options_define as OptionsDefine\n'), ((39992, 40039), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (40029, 40039), True, 'import tccli.options_define as OptionsDefine\n'), ((41806, 41867), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (41812, 41867), False, 'from jmespath import search\n'), ((42505, 42544), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (42534, 42544), True, 'import tccli.options_define as OptionsDefine\n'), ((42567, 42614), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (42604, 42614), True, 'import tccli.options_define as OptionsDefine\n'), ((44389, 44450), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (44395, 44450), False, 'from jmespath import search\n'), ((45095, 45134), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (45124, 45134), True, 'import tccli.options_define as OptionsDefine\n'), ((45157, 45204), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (45194, 45204), True, 'import tccli.options_define as OptionsDefine\n'), ((46993, 47054), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (46999, 47054), False, 'from jmespath import search\n'), ((47691, 47730), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (47720, 47730), True, 'import tccli.options_define as OptionsDefine\n'), ((47753, 47800), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (47790, 47800), True, 'import tccli.options_define as OptionsDefine\n'), ((49573, 49634), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (49579, 49634), False, 'from jmespath import search\n'), ((50278, 50317), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (50307, 50317), True, 'import tccli.options_define as OptionsDefine\n'), ((50340, 50387), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (50377, 50387), True, 'import tccli.options_define as OptionsDefine\n'), ((52174, 52235), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (52180, 52235), False, 'from jmespath import search\n'), ((52882, 52921), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (52911, 52921), True, 'import tccli.options_define as OptionsDefine\n'), ((52944, 52991), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (52981, 52991), True, 'import tccli.options_define as OptionsDefine\n'), ((54784, 54845), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (54790, 54845), False, 'from jmespath import search\n'), ((55477, 55516), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (55506, 55516), True, 'import tccli.options_define as OptionsDefine\n'), ((55539, 55586), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (55576, 55586), True, 'import tccli.options_define as OptionsDefine\n'), ((57349, 57410), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (57355, 57410), False, 'from jmespath import search\n'), ((58053, 58092), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (58082, 58092), True, 'import tccli.options_define as OptionsDefine\n'), ((58115, 58162), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (58152, 58162), True, 'import tccli.options_define as OptionsDefine\n'), ((59947, 60008), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (59953, 60008), False, 'from jmespath import search\n'), ((60643, 60682), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (60672, 60682), True, 'import tccli.options_define as OptionsDefine\n'), ((60705, 60752), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (60742, 60752), True, 'import tccli.options_define as OptionsDefine\n'), ((62521, 62582), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (62527, 62582), False, 'from jmespath import search\n'), ((63230, 63269), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (63259, 63269), True, 'import tccli.options_define as OptionsDefine\n'), ((63292, 63339), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (63329, 63339), True, 'import tccli.options_define as OptionsDefine\n'), ((65134, 65195), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (65140, 65195), False, 'from jmespath import search\n'), ((65841, 65880), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (65870, 65880), True, 'import tccli.options_define as OptionsDefine\n'), ((65903, 65950), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (65940, 65950), True, 'import tccli.options_define as OptionsDefine\n'), ((67741, 67802), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (67747, 67802), False, 'from jmespath import search\n'), ((68443, 68482), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (68472, 68482), True, 'import tccli.options_define as OptionsDefine\n'), ((68505, 68552), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (68542, 68552), True, 'import tccli.options_define as OptionsDefine\n'), ((70333, 70394), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (70339, 70394), False, 'from jmespath import search\n'), ((71035, 71074), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (71064, 71074), True, 'import tccli.options_define as OptionsDefine\n'), ((71097, 71144), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (71134, 71144), True, 'import tccli.options_define as OptionsDefine\n'), ((72925, 72986), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (72931, 72986), False, 'from jmespath import search\n'), ((73621, 73660), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (73650, 73660), True, 'import tccli.options_define as OptionsDefine\n'), ((73683, 73730), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (73720, 73730), True, 'import tccli.options_define as OptionsDefine\n'), ((75499, 75560), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (75505, 75560), False, 'from jmespath import search\n'), ((76196, 76235), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (76225, 76235), True, 'import tccli.options_define as OptionsDefine\n'), ((76258, 76305), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (76295, 76305), True, 'import tccli.options_define as OptionsDefine\n'), ((78076, 78137), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (78082, 78137), False, 'from jmespath import search\n'), ((78774, 78813), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (78803, 78813), True, 'import tccli.options_define as OptionsDefine\n'), ((78836, 78883), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (78873, 78883), True, 'import tccli.options_define as OptionsDefine\n'), ((80656, 80717), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (80662, 80717), False, 'from jmespath import search\n'), ((81352, 81391), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (81381, 81391), True, 'import tccli.options_define as OptionsDefine\n'), ((81414, 81461), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (81451, 81461), True, 'import tccli.options_define as OptionsDefine\n'), ((83230, 83291), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (83236, 83291), False, 'from jmespath import search\n'), ((83929, 83968), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (83958, 83968), True, 'import tccli.options_define as OptionsDefine\n'), ((83991, 84038), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (84028, 84038), True, 'import tccli.options_define as OptionsDefine\n'), ((85813, 85874), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (85819, 85874), False, 'from jmespath import search\n'), ((86506, 86545), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (86535, 86545), True, 'import tccli.options_define as OptionsDefine\n'), ((86568, 86615), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (86605, 86615), True, 'import tccli.options_define as OptionsDefine\n'), ((88378, 88439), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (88384, 88439), False, 'from jmespath import search\n'), ((89072, 89111), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (89101, 89111), True, 'import tccli.options_define as OptionsDefine\n'), ((89134, 89181), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (89171, 89181), True, 'import tccli.options_define as OptionsDefine\n'), ((90946, 91007), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (90952, 91007), False, 'from jmespath import search\n'), ((91644, 91683), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (91673, 91683), True, 'import tccli.options_define as OptionsDefine\n'), ((91706, 91753), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (91743, 91753), True, 'import tccli.options_define as OptionsDefine\n'), ((93526, 93587), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (93532, 93587), False, 'from jmespath import search\n'), ((94222, 94261), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (94251, 94261), True, 'import tccli.options_define as OptionsDefine\n'), ((94284, 94331), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (94321, 94331), True, 'import tccli.options_define as OptionsDefine\n'), ((96100, 96161), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (96106, 96161), False, 'from jmespath import search\n'), ((96799, 96838), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (96828, 96838), True, 'import tccli.options_define as OptionsDefine\n'), ((96861, 96908), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (96898, 96908), True, 'import tccli.options_define as OptionsDefine\n'), ((98683, 98744), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (98689, 98744), False, 'from jmespath import search\n'), ((99384, 99423), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (99413, 99423), True, 'import tccli.options_define as OptionsDefine\n'), ((99446, 99493), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (99483, 99493), True, 'import tccli.options_define as OptionsDefine\n'), ((101272, 101333), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (101278, 101333), False, 'from jmespath import search\n'), ((101975, 102014), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (102004, 102014), True, 'import tccli.options_define as OptionsDefine\n'), ((102037, 102084), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (102074, 102084), True, 'import tccli.options_define as OptionsDefine\n'), ((103867, 103928), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (103873, 103928), False, 'from jmespath import search\n'), ((104573, 104612), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (104602, 104612), True, 'import tccli.options_define as OptionsDefine\n'), ((104635, 104682), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (104672, 104682), True, 'import tccli.options_define as OptionsDefine\n'), ((106471, 106532), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (106477, 106532), False, 'from jmespath import search\n'), ((107166, 107205), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (107195, 107205), True, 'import tccli.options_define as OptionsDefine\n'), ((107228, 107275), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (107265, 107275), True, 'import tccli.options_define as OptionsDefine\n'), ((109042, 109103), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (109048, 109103), False, 'from jmespath import search\n'), ((109736, 109775), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (109765, 109775), True, 'import tccli.options_define as OptionsDefine\n'), ((109798, 109845), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (109835, 109845), True, 'import tccli.options_define as OptionsDefine\n'), ((111610, 111671), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (111616, 111671), False, 'from jmespath import search\n'), ((112314, 112353), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (112343, 112353), True, 'import tccli.options_define as OptionsDefine\n'), ((112376, 112423), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (112413, 112423), True, 'import tccli.options_define as OptionsDefine\n'), ((114208, 114269), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (114214, 114269), False, 'from jmespath import search\n'), ((114910, 114949), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (114939, 114949), True, 'import tccli.options_define as OptionsDefine\n'), ((114972, 115019), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (115009, 115019), True, 'import tccli.options_define as OptionsDefine\n'), ((116800, 116861), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (116806, 116861), False, 'from jmespath import search\n'), ((117506, 117545), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (117535, 117545), True, 'import tccli.options_define as OptionsDefine\n'), ((117568, 117615), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (117605, 117615), True, 'import tccli.options_define as OptionsDefine\n'), ((119404, 119465), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (119410, 119465), False, 'from jmespath import search\n'), ((120098, 120137), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (120127, 120137), True, 'import tccli.options_define as OptionsDefine\n'), ((120160, 120207), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (120197, 120207), True, 'import tccli.options_define as OptionsDefine\n'), ((121972, 122033), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (121978, 122033), False, 'from jmespath import search\n'), ((122678, 122717), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (122707, 122717), True, 'import tccli.options_define as OptionsDefine\n'), ((122740, 122787), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (122777, 122787), True, 'import tccli.options_define as OptionsDefine\n'), ((124576, 124637), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (124582, 124637), False, 'from jmespath import search\n'), ((125271, 125310), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (125300, 125310), True, 'import tccli.options_define as OptionsDefine\n'), ((125333, 125380), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (125370, 125380), True, 'import tccli.options_define as OptionsDefine\n'), ((127147, 127208), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (127153, 127208), False, 'from jmespath import search\n'), ((127850, 127889), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (127879, 127889), True, 'import tccli.options_define as OptionsDefine\n'), ((127912, 127959), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (127949, 127959), True, 'import tccli.options_define as OptionsDefine\n'), ((129742, 129803), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (129748, 129803), False, 'from jmespath import search\n'), ((130443, 130482), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (130472, 130482), True, 'import tccli.options_define as OptionsDefine\n'), ((130505, 130552), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (130542, 130552), True, 'import tccli.options_define as OptionsDefine\n'), ((132331, 132392), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (132337, 132392), False, 'from jmespath import search\n'), ((133026, 133065), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (133055, 133065), True, 'import tccli.options_define as OptionsDefine\n'), ((133088, 133135), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (133125, 133135), True, 'import tccli.options_define as OptionsDefine\n'), ((134902, 134963), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (134908, 134963), False, 'from jmespath import search\n'), ((135603, 135642), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (135632, 135642), True, 'import tccli.options_define as OptionsDefine\n'), ((135665, 135712), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (135702, 135712), True, 'import tccli.options_define as OptionsDefine\n'), ((137491, 137552), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (137497, 137552), False, 'from jmespath import search\n'), ((138194, 138233), 'tccli.options_define.RoleArn.replace', 'OptionsDefine.RoleArn.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (138223, 138233), True, 'import tccli.options_define as OptionsDefine\n'), ((138256, 138303), 'tccli.options_define.RoleSessionName.replace', 'OptionsDefine.RoleSessionName.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (138293, 138303), True, 'import tccli.options_define as OptionsDefine\n'), ((140086, 140147), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (140092, 140147), False, 'from jmespath import search\n'), ((2864, 2925), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (2870, 2925), False, 'from jmespath import search\n'), ((5444, 5505), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (5450, 5505), False, 'from jmespath import search\n'), ((8039, 8100), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (8045, 8100), False, 'from jmespath import search\n'), ((10628, 10689), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (10634, 10689), False, 'from jmespath import search\n'), ((13238, 13299), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (13244, 13299), False, 'from jmespath import search\n'), ((15851, 15912), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (15857, 15912), False, 'from jmespath import search\n'), ((18452, 18513), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (18458, 18513), False, 'from jmespath import search\n'), ((21023, 21084), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (21029, 21084), False, 'from jmespath import search\n'), ((23615, 23676), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (23621, 23676), False, 'from jmespath import search\n'), ((26186, 26247), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (26192, 26247), False, 'from jmespath import search\n'), ((28805, 28866), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (28811, 28866), False, 'from jmespath import search\n'), ((31373, 31434), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (31379, 31434), False, 'from jmespath import search\n'), ((33947, 34008), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (33953, 34008), False, 'from jmespath import search\n'), ((36533, 36594), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (36539, 36594), False, 'from jmespath import search\n'), ((39113, 39174), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (39119, 39174), False, 'from jmespath import search\n'), ((41684, 41745), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (41690, 41745), False, 'from jmespath import search\n'), ((44267, 44328), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (44273, 44328), False, 'from jmespath import search\n'), ((46871, 46932), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (46877, 46932), False, 'from jmespath import search\n'), ((49451, 49512), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (49457, 49512), False, 'from jmespath import search\n'), ((52052, 52113), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (52058, 52113), False, 'from jmespath import search\n'), ((54662, 54723), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (54668, 54723), False, 'from jmespath import search\n'), ((57227, 57288), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (57233, 57288), False, 'from jmespath import search\n'), ((59825, 59886), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (59831, 59886), False, 'from jmespath import search\n'), ((62399, 62460), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (62405, 62460), False, 'from jmespath import search\n'), ((65012, 65073), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (65018, 65073), False, 'from jmespath import search\n'), ((67619, 67680), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (67625, 67680), False, 'from jmespath import search\n'), ((70211, 70272), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (70217, 70272), False, 'from jmespath import search\n'), ((72803, 72864), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (72809, 72864), False, 'from jmespath import search\n'), ((75377, 75438), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (75383, 75438), False, 'from jmespath import search\n'), ((77954, 78015), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (77960, 78015), False, 'from jmespath import search\n'), ((80534, 80595), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (80540, 80595), False, 'from jmespath import search\n'), ((83108, 83169), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (83114, 83169), False, 'from jmespath import search\n'), ((85691, 85752), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (85697, 85752), False, 'from jmespath import search\n'), ((88256, 88317), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (88262, 88317), False, 'from jmespath import search\n'), ((90824, 90885), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (90830, 90885), False, 'from jmespath import search\n'), ((93404, 93465), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (93410, 93465), False, 'from jmespath import search\n'), ((95978, 96039), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (95984, 96039), False, 'from jmespath import search\n'), ((98561, 98622), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (98567, 98622), False, 'from jmespath import search\n'), ((101150, 101211), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (101156, 101211), False, 'from jmespath import search\n'), ((103745, 103806), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (103751, 103806), False, 'from jmespath import search\n'), ((106349, 106410), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (106355, 106410), False, 'from jmespath import search\n'), ((108920, 108981), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (108926, 108981), False, 'from jmespath import search\n'), ((111488, 111549), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (111494, 111549), False, 'from jmespath import search\n'), ((114086, 114147), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (114092, 114147), False, 'from jmespath import search\n'), ((116678, 116739), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (116684, 116739), False, 'from jmespath import search\n'), ((119282, 119343), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (119288, 119343), False, 'from jmespath import search\n'), ((121850, 121911), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (121856, 121911), False, 'from jmespath import search\n'), ((124454, 124515), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (124460, 124515), False, 'from jmespath import search\n'), ((127025, 127086), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (127031, 127086), False, 'from jmespath import search\n'), ((129620, 129681), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (129626, 129681), False, 'from jmespath import search\n'), ((132209, 132270), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (132215, 132270), False, 'from jmespath import search\n'), ((134780, 134841), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (134786, 134841), False, 'from jmespath import search\n'), ((137369, 137430), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (137375, 137430), False, 'from jmespath import search\n'), ((139964, 140025), 'jmespath.search', 'search', (["g_param['OptionsDefine.WaiterInfo']['expr']", 'json_obj'], {}), "(g_param['OptionsDefine.WaiterInfo']['expr'], json_obj)\n", (139970, 140025), False, 'from jmespath import search\n'), ((145411, 145454), 'tccli.exceptions.ConfigurationError', 'ConfigurationError', (["('%s is invalid' % param)"], {}), "('%s is invalid' % param)\n", (145429, 145454), False, 'from tccli.exceptions import ConfigurationError, ClientError, ParamError\n'), ((145658, 145701), 'tccli.exceptions.ConfigurationError', 'ConfigurationError', (["('%s is invalid' % param)"], {}), "('%s is invalid' % param)\n", (145676, 145701), False, 'from tccli.exceptions import ConfigurationError, ClientError, ParamError\n'), ((145340, 145382), 'tccli.options_define.UseCVMRole.replace', 'OptionsDefine.UseCVMRole.replace', (['"""-"""', '"""_"""'], {}), "('-', '_')\n", (145372, 145382), True, 'import tccli.options_define as OptionsDefine\n')]
|
"""
phase.py
Estimate the phase of an oscillation using a waveform-based approach
"""
import numpy as np
def extrema_interpolated_phase(x, Ps, Ts, zeroxR=None, zeroxD=None):
"""
Use peaks (phase 0) and troughs (phase pi/-pi) to estimate
instantaneous phase. Also use rise and decay zerocrossings
(phase -pi/2 and pi/2, respectively) if specified.
Parameters
----------
x : array-like 1d
voltage time series
Ps : array-like 1d
time points of oscillatory peaks
Ts : array-like 1d
time points of oscillatory troughs
zeroxR : array-like 1d
indices at which oscillatory rising zerocrossings occur
zeroxD : array-like 1d
indices at which oscillatory decaying zerocrossings occur
Returns
-------
pha : array-like 1d
instantaneous phase
Notes
-----
Sometimes, due to noise, extrema and zerocrossing estimation
is poor, and for example, the same index may be assigned to
both a peak and a decaying zerocrossing. Because of this,
we first assign phase values by zerocrossings, and then
may overwrite them with extrema phases.
"""
# Initialize phase arrays
# 2 phase arrays: trough pi and trough -pi
L = len(x)
t = np.arange(L)
pha_tpi = np.zeros(L) * np.nan
pha_tnpi = np.zeros(L) * np.nan
# If specified, assign phases to zerocrossings
if zeroxR is not None:
pha_tpi[zeroxR] = -np.pi / 2
pha_tnpi[zeroxR] = -np.pi / 2
if zeroxD is not None:
pha_tpi[zeroxD] = np.pi / 2
pha_tnpi[zeroxD] = np.pi / 2
# Define phases
pha_tpi[Ps] = 0
pha_tpi[Ts] = np.pi
pha_tnpi[Ps] = 0
pha_tnpi[Ts] = -np.pi
# Interpolate to find all phases
pha_tpi = np.interp(t, t[~np.isnan(pha_tpi)], pha_tpi[~np.isnan(pha_tpi)])
pha_tnpi = np.interp(t, t[~np.isnan(pha_tnpi)], pha_tnpi[~np.isnan(pha_tnpi)])
# For the phase time series in which the trough is negative pi:
# Replace the decaying periods with these periods in the phase time
# series in which the trough is pi
diffs = np.diff(pha_tnpi)
diffs = np.append(diffs, 99)
pha_tnpi[diffs < 0] = pha_tpi[diffs < 0]
# Assign the periods before the first empirical phase timepoint to NaN
diffs = np.diff(pha_tnpi)
first_empirical_idx = next(i for i, xi in enumerate(diffs) if xi > 0)
pha_tnpi[:first_empirical_idx] = np.nan
# Assign the periods after the last empirical phase timepoint to NaN
diffs = np.diff(pha_tnpi)
last_empirical_idx = next(i for i, xi in enumerate(diffs[::-1]) if xi > 0)
pha_tnpi[-last_empirical_idx + 1:] = np.nan
return pha_tnpi
|
[
"numpy.zeros",
"numpy.isnan",
"numpy.append",
"numpy.diff",
"numpy.arange"
] |
[((1263, 1275), 'numpy.arange', 'np.arange', (['L'], {}), '(L)\n', (1272, 1275), True, 'import numpy as np\n'), ((2105, 2122), 'numpy.diff', 'np.diff', (['pha_tnpi'], {}), '(pha_tnpi)\n', (2112, 2122), True, 'import numpy as np\n'), ((2135, 2155), 'numpy.append', 'np.append', (['diffs', '(99)'], {}), '(diffs, 99)\n', (2144, 2155), True, 'import numpy as np\n'), ((2289, 2306), 'numpy.diff', 'np.diff', (['pha_tnpi'], {}), '(pha_tnpi)\n', (2296, 2306), True, 'import numpy as np\n'), ((2511, 2528), 'numpy.diff', 'np.diff', (['pha_tnpi'], {}), '(pha_tnpi)\n', (2518, 2528), True, 'import numpy as np\n'), ((1290, 1301), 'numpy.zeros', 'np.zeros', (['L'], {}), '(L)\n', (1298, 1301), True, 'import numpy as np\n'), ((1326, 1337), 'numpy.zeros', 'np.zeros', (['L'], {}), '(L)\n', (1334, 1337), True, 'import numpy as np\n'), ((1781, 1798), 'numpy.isnan', 'np.isnan', (['pha_tpi'], {}), '(pha_tpi)\n', (1789, 1798), True, 'import numpy as np\n'), ((1810, 1827), 'numpy.isnan', 'np.isnan', (['pha_tpi'], {}), '(pha_tpi)\n', (1818, 1827), True, 'import numpy as np\n'), ((1861, 1879), 'numpy.isnan', 'np.isnan', (['pha_tnpi'], {}), '(pha_tnpi)\n', (1869, 1879), True, 'import numpy as np\n'), ((1892, 1910), 'numpy.isnan', 'np.isnan', (['pha_tnpi'], {}), '(pha_tnpi)\n', (1900, 1910), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
import os
from collections import defaultdict
from copy import deepcopy
from warnings import warn
import numpy as np
import pandas as pd
from pathlib import Path
from simulator.core.DtnBundle import Bundle
from simulator.utils.DtnIO import load_traffic_file
from simulator.utils.DtnUtils import shift_traffic
from simulator.generators.DtnAbstractGenerator import DtnAbstractGenerator
# ============================================================================================================
# === DEFINE LATENCY CATEGORIES - THESE ARE CONSTANT
# ============================================================================================================
# Define latency
lat = np.array([[60, np.nan, np.nan],
[60, np.nan, np.nan],
[60, np.nan, 3600],
[60, 60, np.nan],
[60, 900, 21600],
[60, 300, 3600],
[60, 300, np.nan],
[60, 60, np.nan],
[60, 900, 21600],
[60, 900, 21600],
[60, 900, 21600],
[60, 300, np.nan]])
lat = pd.DataFrame(data=1.0*lat, columns=['seconds','minutes','hours'],
index=['voice','biomedical','caution and warning','command and teleoperation',
'file','health and status','nav type 1 products','nav type 2 message',
'pao hd video','sci hd video','science','sd video'])
# ============================================================================================================
# === FUNCTIONS TO CREATE TWO STATE MARKOV PROCESS AND BUNDLE GENERATION TIMES
# ============================================================================================================
def two_state_markov_process(Tmin, Tmax, DutyCycle, Ton):
# Initialize variables
Tstart = 0
Tend = Tmax - Tmin
Toff = ((1 / DutyCycle) - 1) * Ton
K = 10
ok = False
while not ok:
# Initialize variables
Ns = int(np.ceil(0.5*K*(Tend-Tstart)/(Ton + Toff)))
# Handle special case where duty cycle is 1
if DutyCycle == 1:
state, times = True, Tend
else:
state = np.random.uniform() < DutyCycle
on_dur = np.random.exponential(scale=Ton, size=Ns)
off_dur = np.random.exponential(scale=Toff, size=Ns)
times = np.zeros(2*Ns)
if state == True:
times[0::2] = on_dur
times[1::2] = off_dur
else:
times[0::2] = off_dur
times[1::2] = on_dur
# Finalize the process generated
times = np.insert(np.cumsum(times), 0, 0)
states = np.zeros_like(times, dtype=bool)
states[0::2] = state
states[1::2] = not state
# Validate the sequence
if times[-1] >= Tend: ok = True
else: K += 1
# Trim end of generated sequence to match Tend
times[times > Tend] = Tend
idx = np.argmax(times == Tend)+1
if idx != 0 and DutyCycle != 1.0 and idx != len(times):
times = times[0:idx]
states = states[0:idx]
# Shift times to Tmin, Tmax
times += Tmin
return times, states
def generate_markov_bundles(BS, Rb, Lat, Tmin, Tmax, DutyCycle, Ton):
# Generate Markov intervals
times, states = two_state_markov_process(Tmin, Tmax, DutyCycle, Ton)
# Initial processing entry. If initial state is OFF, skip it
ini = (states[0] == False)
# Initialize variables
t = []
buf = 0
state = True
# Iterate over periods
for i in range(ini, len(states)-1):
# Handle OFF state only if buffer is not empty
if state == False and buf != 0:
# t_ref indicates the time at which the last bundle was sent. If no
# bundles were ever sent, assume 0.
t_ref = 0 if len(t) == 0 else t[-1]
# If waiting for the start of the ON period will make you exceed
# the latency requirement, send a bundle with half data half padding.
while t_ref + Lat < times[i+1] and buf >= BS:
t_ref = max(t_ref, times[i]) + Lat
t.append(t)
buf -= BS
# Handle ON state
if state == True:
dv = buf + Rb * (times[i+1] - times[i])
N_bnd = int(np.floor(dv / BS))
t_bnd = times[i] + np.arange(1,N_bnd+1)*(BS / Rb)
if len(t_bnd) > 0: t_bnd -= buf/Rb
t_bnd = t_bnd[t_bnd <= times[i+1]]
t.extend(t_bnd)
buf = dv - N_bnd * BS
# Switch state
state = not state
# Add one last bundle add the end of t to transmit all unaccounted data.
# Note that this bundle might have some padding data
if buf > 0:
t_ref = times[-1] if len(t) == 0 else t[-1]
if states[-1] == False:
t.append(t_ref + Lat)
else:
t.append(max(t_ref, times[-1])+Lat)
buf = 0
# return times at which a bundle is delivered, and the amount of data left at the end
return t, buf
def generate_bundles(traffic, id2alias, min_bundle_size=1024, max_bundle_size=8e9, lat_frac=0.5):
# Get a map from node alias to ids
alias2id = {v: k for k, v in id2alias.items()}
# Get simulation start time
t0 = min([flow['StartTime'] for _, flow in traffic.items()])
# Iterate over flows
for fid, flow in traffic.items():
# Get the numeric latency
flow['Latency'] = lat.loc[flow['DataType'].lower(), flow['Latency'].lower()]
# Compute bundle size
bundle_lat = flow['Latency']*min(lat_frac, flow['DutyCycle'])
bundle_sz = min(max(min_bundle_size, int(flow['DataRate']*bundle_lat)), max_bundle_size)
# Get start and time for this flow
Tmin = (flow['StartTime'] - t0).total_seconds()
Tmax = (flow['EndTime'] - t0).total_seconds()
# Generate bundles
t, _ = generate_markov_bundles(bundle_sz, flow['DataRate'], flow['Latency'],
Tmin, Tmax, flow['DutyCycle'], flow['Duration'])
# Store the bundle times and size
flow['Bundles'] = t
flow['BundleSize'] = bundle_sz
flow['fid'] = fid
# Transform names of flows from alias to ids
flow['Orig'] = alias2id[flow['TransElementName']]
flow['Dest'] = alias2id[flow['ReceiveElementName']]
return traffic
# ============================================================================================================
# === SIMULATION CLASS
# ============================================================================================================
class DtnMarkovBundleGenerator(DtnAbstractGenerator):
_all_flows = None
def __init__(self, env, parent, props):
super().__init__(env, parent, props)
# Initialize variables
self.traffic_file = self.config['globals'].indir / props.file
def reset(self):
# Reset static variables
super().reset()
self.__class__._all_flows = None
def initialize(self):
# Setting static variables only once
if not self.__class__._all_flows: self.load_flows()
# Get flows for this generator
self.flows = self.__class__._all_flows[self.parent.nid]
# Iterate over all flows for this generator
for _, flow in self.flows.items(): self.env.process(self.run(flow))
def load_flows(self):
# Load generators file
traffic = shift_traffic(load_traffic_file(self.traffic_file), self.epoch)
# Generate bundles
id2alias = {nid: dd.alias for nid, dd in self.config['network'].nodes.items()}
flows = generate_bundles(traffic, id2alias, min_bundle_size=int(self.props.min_bundle_size),
max_bundle_size=float(self.props.max_bundle_size),
lat_frac=float(self.props.latency_fraction))
# Log bundle generation
for fid, flow in flows.items():
if len(flow['Bundles']) == 0:
self.disp('Flow {}: No bundles generated', fid)
else:
self.disp('Flow {}: {} bundles generated between t={:.3f} and t={:.3f}', fid, len(flow['Bundles']),
min(flow['Bundles']), max(flow['Bundles']))
# Create a dictionary of dictionaries or dictionary: {Node ID: {flow id: {flow props}}
d = defaultdict(dict)
for fid, flow in flows.items(): d[flow['Orig']][fid] = flow
# Store all the flows generated
self.__class__._all_flows = d
def run(self, flow):
# If no bundles, return
if len(flow['Bundles']) == 0: return
# Initialize variables
bnd_dt = np.insert(np.diff(flow['Bundles']), 0, flow['Bundles'][0])
# Iterate over bundle transmit times
for dt in bnd_dt:
# Wait until next time to transmit
yield self.env.timeout(dt)
# Create a new bundle and record it
new_bundle = Bundle.from_flow(self.env, flow)
# Monitor the new bundle creation
self.monitor_new_bundle(new_bundle)
# Log the new bundle creation
self.disp('{} is created at node {}', new_bundle, self.parent.nid)
# Schedule routers of bundle
self.parent.forward(new_bundle)
def predicted_data_vol(self):
""" Predicted data volume in [bits] """
return sum(f['DataRate']*((f['EndTime']-f['StartTime']).total_seconds())
for f in self.flows.values())
|
[
"pandas.DataFrame",
"numpy.random.uniform",
"numpy.zeros_like",
"numpy.ceil",
"numpy.argmax",
"numpy.floor",
"numpy.random.exponential",
"numpy.zeros",
"collections.defaultdict",
"numpy.cumsum",
"numpy.diff",
"numpy.array",
"numpy.arange",
"simulator.core.DtnBundle.Bundle.from_flow",
"simulator.utils.DtnIO.load_traffic_file"
] |
[((710, 960), 'numpy.array', 'np.array', (['[[60, np.nan, np.nan], [60, np.nan, np.nan], [60, np.nan, 3600], [60, 60,\n np.nan], [60, 900, 21600], [60, 300, 3600], [60, 300, np.nan], [60, 60,\n np.nan], [60, 900, 21600], [60, 900, 21600], [60, 900, 21600], [60, 300,\n np.nan]]'], {}), '([[60, np.nan, np.nan], [60, np.nan, np.nan], [60, np.nan, 3600], [\n 60, 60, np.nan], [60, 900, 21600], [60, 300, 3600], [60, 300, np.nan],\n [60, 60, np.nan], [60, 900, 21600], [60, 900, 21600], [60, 900, 21600],\n [60, 300, np.nan]])\n', (718, 960), True, 'import numpy as np\n'), ((1156, 1454), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': '(1.0 * lat)', 'columns': "['seconds', 'minutes', 'hours']", 'index': "['voice', 'biomedical', 'caution and warning', 'command and teleoperation',\n 'file', 'health and status', 'nav type 1 products',\n 'nav type 2 message', 'pao hd video', 'sci hd video', 'science', 'sd video'\n ]"}), "(data=1.0 * lat, columns=['seconds', 'minutes', 'hours'], index\n =['voice', 'biomedical', 'caution and warning',\n 'command and teleoperation', 'file', 'health and status',\n 'nav type 1 products', 'nav type 2 message', 'pao hd video',\n 'sci hd video', 'science', 'sd video'])\n", (1168, 1454), True, 'import pandas as pd\n'), ((2770, 2802), 'numpy.zeros_like', 'np.zeros_like', (['times'], {'dtype': 'bool'}), '(times, dtype=bool)\n', (2783, 2802), True, 'import numpy as np\n'), ((3052, 3076), 'numpy.argmax', 'np.argmax', (['(times == Tend)'], {}), '(times == Tend)\n', (3061, 3076), True, 'import numpy as np\n'), ((8540, 8557), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (8551, 8557), False, 'from collections import defaultdict\n'), ((2067, 2116), 'numpy.ceil', 'np.ceil', (['(0.5 * K * (Tend - Tstart) / (Ton + Toff))'], {}), '(0.5 * K * (Tend - Tstart) / (Ton + Toff))\n', (2074, 2116), True, 'import numpy as np\n'), ((2318, 2359), 'numpy.random.exponential', 'np.random.exponential', ([], {'scale': 'Ton', 'size': 'Ns'}), '(scale=Ton, size=Ns)\n', (2339, 2359), True, 'import numpy as np\n'), ((2383, 2425), 'numpy.random.exponential', 'np.random.exponential', ([], {'scale': 'Toff', 'size': 'Ns'}), '(scale=Toff, size=Ns)\n', (2404, 2425), True, 'import numpy as np\n'), ((2447, 2463), 'numpy.zeros', 'np.zeros', (['(2 * Ns)'], {}), '(2 * Ns)\n', (2455, 2463), True, 'import numpy as np\n'), ((2729, 2745), 'numpy.cumsum', 'np.cumsum', (['times'], {}), '(times)\n', (2738, 2745), True, 'import numpy as np\n'), ((7621, 7657), 'simulator.utils.DtnIO.load_traffic_file', 'load_traffic_file', (['self.traffic_file'], {}), '(self.traffic_file)\n', (7638, 7657), False, 'from simulator.utils.DtnIO import load_traffic_file\n'), ((8883, 8907), 'numpy.diff', 'np.diff', (["flow['Bundles']"], {}), "(flow['Bundles'])\n", (8890, 8907), True, 'import numpy as np\n'), ((9164, 9196), 'simulator.core.DtnBundle.Bundle.from_flow', 'Bundle.from_flow', (['self.env', 'flow'], {}), '(self.env, flow)\n', (9180, 9196), False, 'from simulator.core.DtnBundle import Bundle\n'), ((2264, 2283), 'numpy.random.uniform', 'np.random.uniform', ([], {}), '()\n', (2281, 2283), True, 'import numpy as np\n'), ((4418, 4435), 'numpy.floor', 'np.floor', (['(dv / BS)'], {}), '(dv / BS)\n', (4426, 4435), True, 'import numpy as np\n'), ((4468, 4491), 'numpy.arange', 'np.arange', (['(1)', '(N_bnd + 1)'], {}), '(1, N_bnd + 1)\n', (4477, 4491), True, 'import numpy as np\n')]
|
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
from twisted.trial.unittest import TestCase, SkipTest
from twisted.internet.defer import Deferred
from twisted.web.server import Site
from twisted.internet import reactor
from twisted.web.client import Agent
from twisted.internet.error import TimeoutError
from twisted.web.client import HTTPConnectionPool
from twisted.web.client import ContentDecoderAgent, GzipDecoder
from twisted.internet import ssl
from twisted.cred.portal import Portal
from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
from twisted.cred.credentials import Anonymous, UsernamePassword
from twisted.web.guard import HTTPAuthSessionWrapper, BasicCredentialFactory
from fastjsonrpc.client import ReceiverProtocol
from fastjsonrpc.client import StringProducer
from fastjsonrpc.client import ProxyFactory
from fastjsonrpc.client import Proxy
from fastjsonrpc import jsonrpc
from dummyserver import DummyServer, AuthDummyServer
class TestReceiverProtocol(TestCase):
def setUp(self):
self.rp = ReceiverProtocol(Deferred())
def test_init(self):
self.assertTrue(isinstance(self.rp.finished, Deferred))
def test_dataReceivedOnce(self):
data = 'some random string'
self.rp.dataReceived(data)
self.assertEquals(self.rp.body, data)
def test_dataReceivedTwice(self):
data1 = 'string1'
data2 = 'string2'
self.rp.dataReceived(data1)
self.rp.dataReceived(data2)
self.assertEquals(self.rp.body, data1 + data2)
def test_connectionLostCalled(self):
data = 'some random string'
self.rp.dataReceived(data)
self.rp.connectionLost(None)
self.assertTrue(self.rp.finished.called)
def test_connectionLostCalledData(self):
data = 'some random string'
self.rp.dataReceived(data)
def called(data_received):
self.assertEquals(data_received, data)
self.rp.finished.addCallback(called)
self.rp.connectionLost(None)
return self.rp.finished
class DummyConsumer(object):
def __init__(self):
self.body = ''
def write(self, data):
self.body += data
class TestStringProducer(TestCase):
def test_init(self):
data = 'some random string'
sp = StringProducer(data)
self.assertEquals(sp.body, data)
self.assertEquals(sp.length, len(data))
def test_startProducing(self):
data = 'some random string'
sp = StringProducer(data)
consumer = DummyConsumer()
d = sp.startProducing(consumer)
def finished(_):
self.assertEquals(consumer.body, data)
d.addCallback(finished)
return d
class DummyResponse(object):
def __init__(self, body):
self.body = body
def deliverBody(self, protocol):
self.protocol = protocol
self.protocol.dataReceived(self.body)
self.protocol.connectionLost(None)
class TestProxy(TestCase):
"""
@TODO: Test callRemote using fake agent, using predefined 'output' JSON,
like in server tests. This might require a bit of refactoring in
client itself.
"""
def setUp(self):
site = Site(DummyServer())
self.port = reactor.listenTCP(0, site)
self.portNumber = self.port._realPortNumber
def tearDown(self):
self.port.stopListening()
def test_init(self):
url = 'http://example.org/abcdef'
version = '2.0'
proxy = Proxy(url, version)
self.assertEquals(proxy.url, url)
self.assertEquals(proxy.version, version)
self.assertTrue(isinstance(proxy.credentials, Anonymous))
self.assertTrue(proxy.agent._connectTimeout is None)
def test_init_agent(self):
proxy = Proxy('', '')
self.assertTrue(isinstance(proxy.agent, Agent))
def test_bodyFromResponseProtocolBody(self):
data = 'some random string'
proxy = Proxy('', '')
response = DummyResponse(data)
d = proxy.bodyFromResponse(response)
def finished(_):
self.assertEquals(response.protocol.body, data)
d.addCallback(finished)
return d
def test_bodyFromResponseDeferred(self):
data = 'some random string'
proxy = Proxy('', '')
response = DummyResponse(data)
d = proxy.bodyFromResponse(response)
def finished(result):
self.assertEquals(result, data)
d.addCallback(finished)
return d
def test_callRemoteV1Ok(self):
data = 'some random string'
addr = 'http://localhost:%s' % self.portNumber
proxy = Proxy(addr, jsonrpc.VERSION_1)
d = proxy.callRemote('echo', data)
def finished(result):
self.assertEquals(result, data)
d.addCallback(finished)
return d
def test_callRemoteV2Ok(self):
data = 'some random string'
addr = 'http://localhost:%s' % self.portNumber
proxy = Proxy(addr, jsonrpc.VERSION_2)
d = proxy.callRemote('echo', data)
def finished(result):
self.assertEquals(result, data)
d.addCallback(finished)
return d
def test_callRemoteV1NoMethod(self):
addr = 'http://localhost:%s' % self.portNumber
proxy = Proxy(addr, jsonrpc.VERSION_1)
d = proxy.callRemote('nosuchmethod')
e = self.assertFailure(d, jsonrpc.JSONRPCError)
def finished(result):
self.assertEquals(result.strerror, 'Method nosuchmethod not found')
self.assertEquals(result.errno, jsonrpc.METHOD_NOT_FOUND)
self.assertEquals(result.version, jsonrpc.VERSION_1)
e.addCallback(finished)
return e
def test_callRemoteV2InvalidParams(self):
addr = 'http://localhost:%s' % self.portNumber
proxy = Proxy(addr, jsonrpc.VERSION_2)
d = proxy.callRemote('echo', 'abc', 'def')
e = self.assertFailure(d, jsonrpc.JSONRPCError)
def finished(result):
msg = 'jsonrpc_echo() takes exactly 2 arguments (3 given)'
self.assertEquals(result.strerror, msg)
self.assertEquals(result.errno, jsonrpc.INVALID_PARAMS)
self.assertEquals(result.version, unicode(jsonrpc.VERSION_2))
e.addCallback(finished)
return e
def test_keywordsV1(self):
data = 'some random string'
addr = 'http://localhost:%s' % self.portNumber
proxy = Proxy(addr, jsonrpc.VERSION_1)
d = proxy.callRemote('echo', data=data)
def finished(result):
self.assertEquals(result, data)
d.addCallback(finished)
return d
def test_keywordsV2(self):
data = 'some random string'
addr = 'http://localhost:%s' % self.portNumber
proxy = Proxy(addr, jsonrpc.VERSION_2)
d = proxy.callRemote('echo', data=data)
def finished(result):
self.assertEquals(result, data)
d.addCallback(finished)
return d
def test_keywordsUnexpected(self):
data = 'some random string'
addr = 'http://localhost:%s' % self.portNumber
proxy = Proxy(addr, jsonrpc.VERSION_1)
d = proxy.callRemote('echo', wrongname=data)
e = self.assertFailure(d, jsonrpc.JSONRPCError)
def finished(result):
msg = 'jsonrpc_echo() got an unexpected keyword argument ' + \
'\'wrongname\''
self.assertEquals(result.strerror, msg)
self.assertEquals(result.errno, jsonrpc.INVALID_PARAMS)
e.addCallback(finished)
return d
def test_timeout(self):
""" Google doesn't offer any services on our crazy ports """
addr = 'http://google.com:%s' % self.portNumber
proxy = Proxy(addr, jsonrpc.VERSION_1, connectTimeout=0.1)
d = proxy.callRemote('sleep', 5)
def finished(result):
self.assertTrue(isinstance(result.value, TimeoutError))
d.addErrback(finished)
return d
def test_anonymousLogin(self):
data = 'some random string'
addr = 'http://localhost:%s' % self.portNumber
proxy = Proxy(addr, jsonrpc.VERSION_1, credentials=Anonymous())
d = proxy.callRemote('echo', data)
def finished(result):
self.assertEquals(result, data)
d.addCallback(finished)
return d
def test_loginNotNeccessary(self):
data = 'some random string'
addr = 'http://localhost:%s' % self.portNumber
credentials = UsernamePassword('user', 'password')
proxy = Proxy(addr, credentials=credentials)
d = proxy.callRemote('echo', data)
def finished(result):
self.assertEquals(result, data)
d.addCallback(finished)
return d
def test_poolPassing(self):
pool = HTTPConnectionPool(reactor)
proxy = Proxy('', pool=pool)
self.assertEqual(id(proxy.agent._pool), id(pool))
class TestProxyFactory(TestCase):
def test_init(self):
factory = ProxyFactory()
proxy = factory.getProxy('')
self.assertEqual(proxy.version, jsonrpc.VERSION_1)
self.assertTrue(isinstance(proxy.credentials, Anonymous))
self.assertTrue(proxy.agent._connectTimeout is None)
def test_getProxy(self):
url1 = 'http://fakeurl1'
url2 = 'http://fakeurl2'
version = jsonrpc.VERSION_2
connectTimeout = 30
cred = UsernamePassword('username', 'password')
contextFactory = WebClientContextFactory()
factory = ProxyFactory(version=version, connectTimeout=connectTimeout,
credentials=cred, contextFactory=contextFactory)
proxy1 = factory.getProxy(url1)
proxy2 = factory.getProxy(url2)
self.assertNotEqual(id(proxy1), id(proxy2))
self.assertNotEqual(id(proxy1.agent._pool), id(proxy2.agent._pool))
self.assertEqual(proxy1.url, url1)
self.assertEqual(proxy2.url, url2)
self.assertEqual(proxy1.version, version)
self.assertEqual(proxy2.version, version)
self.assertEqual(proxy1.credentials, cred)
self.assertEqual(proxy2.credentials, cred)
self.assertEqual(proxy1.agent._connectTimeout, connectTimeout)
self.assertEqual(proxy2.agent._connectTimeout, connectTimeout)
def test_sharedPool(self):
factory = ProxyFactory(sharedPool=True)
proxy1 = factory.getProxy('')
proxy2 = factory.getProxy('')
proxy3 = factory.getProxy('')
self.assertNotEqual(id(proxy1), id(proxy2))
self.assertNotEqual(id(proxy2), id(proxy3))
self.assertNotEqual(id(proxy1), id(proxy3))
self.assertEqual(id(proxy1.agent._pool), id(factory._pool))
self.assertEqual(id(proxy2.agent._pool), id(factory._pool))
self.assertEqual(id(proxy3.agent._pool), id(factory._pool))
#
# I trust twisted's well tested Agent and HTTPConnectionPool classes
#
def test_init_persistentConnections(self):
persistent = True
maxConn = 5
timeout = 3600
retry = False
factory = ProxyFactory(persistent=persistent,
maxPersistentPerHost=maxConn,
cachedConnectionTimeout=timeout,
retryAutomatically=retry)
proxy = factory.getProxy('')
self.assertEqual(proxy.agent._pool.persistent, persistent)
self.assertEqual(proxy.agent._pool.maxPersistentPerHost, maxConn)
self.assertEqual(proxy.agent._pool.cachedConnectionTimeout, timeout)
self.assertEqual(proxy.agent._pool.retryAutomatically, retry)
def test_init_sharedPersistentConnections(self):
persistent = True
maxConn = 5
timeout = 3600
retry = False
factory = ProxyFactory(sharedPool=True,
persistent=persistent,
maxPersistentPerHost=maxConn,
cachedConnectionTimeout=timeout,
retryAutomatically=retry)
proxy1 = factory.getProxy('')
proxy2 = factory.getProxy('')
self.assertEqual(id(proxy1.agent._pool), id(proxy2.agent._pool))
self.assertEqual(proxy1.agent._pool.persistent, persistent)
self.assertEqual(proxy1.agent._pool.maxPersistentPerHost, maxConn)
self.assertEqual(proxy1.agent._pool.cachedConnectionTimeout, timeout)
self.assertEqual(proxy1.agent._pool.retryAutomatically, retry)
self.assertEqual(proxy2.agent._pool.persistent, persistent)
self.assertEqual(proxy2.agent._pool.maxPersistentPerHost, maxConn)
self.assertEqual(proxy2.agent._pool.cachedConnectionTimeout, timeout)
self.assertEqual(proxy2.agent._pool.retryAutomatically, retry)
def test_init_HTTPCompression(self):
factory = ProxyFactory(compressedHTTP=True)
proxy = factory.getProxy('')
self.assertTrue(isinstance(proxy.agent, ContentDecoderAgent))
self.assertTrue(isinstance(proxy.agent._agent, Agent))
self.assertTrue('gzip' in proxy.agent._decoders)
self.assertEqual(proxy.agent._decoders['gzip'], GzipDecoder)
class WebClientContextFactory(ssl.ClientContextFactory):
def getContext(self, hostname, port):
return ssl.ClientContextFactory.getContext(self)
class TestSSLProxy(TestCase):
"""
@TODO: All this does is checking whether Agent connects to SSL server...
"""
def setUp(self):
if not (os.path.exists('../ssl-keys/server.key') and
os.path.exists('../ssl-keys/server.crt')):
raise SkipTest('For testing SSL, please put server.key and ' + \
'server.crt to ssl-keys/')
SSLFactory = ssl.DefaultOpenSSLContextFactory('../ssl-keys/server.key',
'../ssl-keys/server.crt')
site = Site(DummyServer())
self.port = reactor.listenSSL(0, site, contextFactory=SSLFactory)
self.portNumber = self.port._realPortNumber
def tearDown(self):
self.port.stopListening()
def test_init(self):
url = 'https://example.org/abcdef'
version = '2.0'
proxy = Proxy(url, version, contextFactory=WebClientContextFactory())
self.assertEquals(proxy.url, url)
self.assertEquals(proxy.version, version)
def test_init_agent(self):
proxy = Proxy('', '', contextFactory=WebClientContextFactory())
self.assertTrue(isinstance(proxy.agent, Agent))
def test_callRemote(self):
"""
The test itself passes, but trial raises "Reactor was unclean" after
tearDown.. Might be related to
http://twistedmatrix.com/trac/ticket/5118
"""
data = 'some random string'
addr = 'https://localhost:%s' % self.portNumber
proxy = Proxy(addr, jsonrpc.VERSION_1,
contextFactory=WebClientContextFactory())
d = proxy.callRemote('echo', data)
def finished(result):
self.assertEquals(result, data)
d.addCallback(finished)
return d
class TestHTTPAuth(TestCase):
"""
@TODO: All this does is basically checking whether auth in Agent works...
"""
def setUp(self):
checker = InMemoryUsernamePasswordDatabaseDontUse(user='password')
portal = Portal(AuthDummyServer(), [checker])
credentialFactory = BasicCredentialFactory('localhost')
resource = HTTPAuthSessionWrapper(portal, [credentialFactory])
site = Site(resource)
self.port = reactor.listenTCP(0, site)
self.portNumber = self.port._realPortNumber
def tearDown(self):
self.port.stopListening()
def test_loginOk(self):
data = 'some random string'
addr = 'http://localhost:%s' % self.portNumber
credentials = UsernamePassword('user', 'password')
proxy = Proxy(addr, credentials=credentials)
d = proxy.callRemote('echo', data)
def finished(result):
self.assertEquals(result, data)
d.addCallback(finished)
return d
def test_loginWrongPassword(self):
addr = 'http://localhost:%s' % self.portNumber
credentials = UsernamePassword('<PASSWORD>', '<PASSWORD>')
proxy = Proxy(addr, credentials=credentials)
d = proxy.callRemote('echo', '')
e = self.assertFailure(d, jsonrpc.JSONRPCError)
def finished(result):
self.assertEquals(result.strerror, 'Unauthorized')
self.assertEquals(result.errno, jsonrpc.INVALID_REQUEST)
e.addCallback(finished)
return d
def test_loginWrongUser(self):
addr = 'http://localhost:%s' % self.portNumber
credentials = UsernamePassword('<PASSWORD>', '<PASSWORD>')
proxy = Proxy(addr, credentials=credentials)
d = proxy.callRemote('echo', '')
e = self.assertFailure(d, jsonrpc.JSONRPCError)
def finished(result):
self.assertEquals(result.strerror, 'Unauthorized')
self.assertEquals(result.errno, jsonrpc.INVALID_REQUEST)
e.addCallback(finished)
return d
def test_noCredentials(self):
addr = 'http://localhost:%s' % self.portNumber
proxy = Proxy(addr, jsonrpc.VERSION_1)
d = proxy.callRemote('echo', '')
e = self.assertFailure(d, jsonrpc.JSONRPCError)
def finished(result):
self.assertEquals(result.strerror, 'Unauthorized')
self.assertEquals(result.errno, jsonrpc.INVALID_REQUEST)
e.addCallback(finished)
return d
def test_anonymousError(self):
addr = 'http://localhost:%s' % self.portNumber
proxy = Proxy(addr, credentials=Anonymous())
d = proxy.callRemote('echo', '')
e = self.assertFailure(d, jsonrpc.JSONRPCError)
def finished(result):
self.assertEquals(result.strerror, 'Unauthorized')
self.assertEquals(result.errno, jsonrpc.INVALID_REQUEST)
e.addCallback(finished)
return d
|
[
"dummyserver.AuthDummyServer",
"twisted.cred.credentials.UsernamePassword",
"twisted.web.guard.BasicCredentialFactory",
"dummyserver.DummyServer",
"os.path.abspath",
"twisted.cred.credentials.Anonymous",
"os.path.exists",
"fastjsonrpc.client.ProxyFactory",
"twisted.web.client.HTTPConnectionPool",
"twisted.trial.unittest.SkipTest",
"twisted.web.server.Site",
"fastjsonrpc.client.StringProducer",
"twisted.internet.reactor.listenTCP",
"twisted.cred.checkers.InMemoryUsernamePasswordDatabaseDontUse",
"twisted.internet.reactor.listenSSL",
"fastjsonrpc.client.Proxy",
"twisted.internet.ssl.ClientContextFactory.getContext",
"twisted.internet.defer.Deferred",
"twisted.web.guard.HTTPAuthSessionWrapper",
"twisted.internet.ssl.DefaultOpenSSLContextFactory"
] |
[((40, 61), 'os.path.abspath', 'os.path.abspath', (['""".."""'], {}), "('..')\n", (55, 61), False, 'import os\n'), ((2324, 2344), 'fastjsonrpc.client.StringProducer', 'StringProducer', (['data'], {}), '(data)\n', (2338, 2344), False, 'from fastjsonrpc.client import StringProducer\n'), ((2520, 2540), 'fastjsonrpc.client.StringProducer', 'StringProducer', (['data'], {}), '(data)\n', (2534, 2540), False, 'from fastjsonrpc.client import StringProducer\n'), ((3292, 3318), 'twisted.internet.reactor.listenTCP', 'reactor.listenTCP', (['(0)', 'site'], {}), '(0, site)\n', (3309, 3318), False, 'from twisted.internet import reactor\n'), ((3539, 3558), 'fastjsonrpc.client.Proxy', 'Proxy', (['url', 'version'], {}), '(url, version)\n', (3544, 3558), False, 'from fastjsonrpc.client import Proxy\n'), ((3826, 3839), 'fastjsonrpc.client.Proxy', 'Proxy', (['""""""', '""""""'], {}), "('', '')\n", (3831, 3839), False, 'from fastjsonrpc.client import Proxy\n'), ((4000, 4013), 'fastjsonrpc.client.Proxy', 'Proxy', (['""""""', '""""""'], {}), "('', '')\n", (4005, 4013), False, 'from fastjsonrpc.client import Proxy\n'), ((4333, 4346), 'fastjsonrpc.client.Proxy', 'Proxy', (['""""""', '""""""'], {}), "('', '')\n", (4338, 4346), False, 'from fastjsonrpc.client import Proxy\n'), ((4700, 4730), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr', 'jsonrpc.VERSION_1'], {}), '(addr, jsonrpc.VERSION_1)\n', (4705, 4730), False, 'from fastjsonrpc.client import Proxy\n'), ((5043, 5073), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr', 'jsonrpc.VERSION_2'], {}), '(addr, jsonrpc.VERSION_2)\n', (5048, 5073), False, 'from fastjsonrpc.client import Proxy\n'), ((5355, 5385), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr', 'jsonrpc.VERSION_1'], {}), '(addr, jsonrpc.VERSION_1)\n', (5360, 5385), False, 'from fastjsonrpc.client import Proxy\n'), ((5901, 5931), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr', 'jsonrpc.VERSION_2'], {}), '(addr, jsonrpc.VERSION_2)\n', (5906, 5931), False, 'from fastjsonrpc.client import Proxy\n'), ((6525, 6555), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr', 'jsonrpc.VERSION_1'], {}), '(addr, jsonrpc.VERSION_1)\n', (6530, 6555), False, 'from fastjsonrpc.client import Proxy\n'), ((6869, 6899), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr', 'jsonrpc.VERSION_2'], {}), '(addr, jsonrpc.VERSION_2)\n', (6874, 6899), False, 'from fastjsonrpc.client import Proxy\n'), ((7221, 7251), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr', 'jsonrpc.VERSION_1'], {}), '(addr, jsonrpc.VERSION_1)\n', (7226, 7251), False, 'from fastjsonrpc.client import Proxy\n'), ((7841, 7891), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr', 'jsonrpc.VERSION_1'], {'connectTimeout': '(0.1)'}), '(addr, jsonrpc.VERSION_1, connectTimeout=0.1)\n', (7846, 7891), False, 'from fastjsonrpc.client import Proxy\n'), ((8603, 8639), 'twisted.cred.credentials.UsernamePassword', 'UsernamePassword', (['"""user"""', '"""password"""'], {}), "('user', 'password')\n", (8619, 8639), False, 'from twisted.cred.credentials import Anonymous, UsernamePassword\n'), ((8656, 8692), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr'], {'credentials': 'credentials'}), '(addr, credentials=credentials)\n', (8661, 8692), False, 'from fastjsonrpc.client import Proxy\n'), ((8909, 8936), 'twisted.web.client.HTTPConnectionPool', 'HTTPConnectionPool', (['reactor'], {}), '(reactor)\n', (8927, 8936), False, 'from twisted.web.client import HTTPConnectionPool\n'), ((8953, 8973), 'fastjsonrpc.client.Proxy', 'Proxy', (['""""""'], {'pool': 'pool'}), "('', pool=pool)\n", (8958, 8973), False, 'from fastjsonrpc.client import Proxy\n'), ((9113, 9127), 'fastjsonrpc.client.ProxyFactory', 'ProxyFactory', ([], {}), '()\n', (9125, 9127), False, 'from fastjsonrpc.client import ProxyFactory\n'), ((9528, 9568), 'twisted.cred.credentials.UsernamePassword', 'UsernamePassword', (['"""username"""', '"""password"""'], {}), "('username', 'password')\n", (9544, 9568), False, 'from twisted.cred.credentials import Anonymous, UsernamePassword\n'), ((9639, 9753), 'fastjsonrpc.client.ProxyFactory', 'ProxyFactory', ([], {'version': 'version', 'connectTimeout': 'connectTimeout', 'credentials': 'cred', 'contextFactory': 'contextFactory'}), '(version=version, connectTimeout=connectTimeout, credentials=\n cred, contextFactory=contextFactory)\n', (9651, 9753), False, 'from fastjsonrpc.client import ProxyFactory\n'), ((10472, 10501), 'fastjsonrpc.client.ProxyFactory', 'ProxyFactory', ([], {'sharedPool': '(True)'}), '(sharedPool=True)\n', (10484, 10501), False, 'from fastjsonrpc.client import ProxyFactory\n'), ((11224, 11352), 'fastjsonrpc.client.ProxyFactory', 'ProxyFactory', ([], {'persistent': 'persistent', 'maxPersistentPerHost': 'maxConn', 'cachedConnectionTimeout': 'timeout', 'retryAutomatically': 'retry'}), '(persistent=persistent, maxPersistentPerHost=maxConn,\n cachedConnectionTimeout=timeout, retryAutomatically=retry)\n', (11236, 11352), False, 'from fastjsonrpc.client import ProxyFactory\n'), ((11934, 12080), 'fastjsonrpc.client.ProxyFactory', 'ProxyFactory', ([], {'sharedPool': '(True)', 'persistent': 'persistent', 'maxPersistentPerHost': 'maxConn', 'cachedConnectionTimeout': 'timeout', 'retryAutomatically': 'retry'}), '(sharedPool=True, persistent=persistent, maxPersistentPerHost=\n maxConn, cachedConnectionTimeout=timeout, retryAutomatically=retry)\n', (11946, 12080), False, 'from fastjsonrpc.client import ProxyFactory\n'), ((12998, 13031), 'fastjsonrpc.client.ProxyFactory', 'ProxyFactory', ([], {'compressedHTTP': '(True)'}), '(compressedHTTP=True)\n', (13010, 13031), False, 'from fastjsonrpc.client import ProxyFactory\n'), ((13445, 13486), 'twisted.internet.ssl.ClientContextFactory.getContext', 'ssl.ClientContextFactory.getContext', (['self'], {}), '(self)\n', (13480, 13486), False, 'from twisted.internet import ssl\n'), ((13906, 13994), 'twisted.internet.ssl.DefaultOpenSSLContextFactory', 'ssl.DefaultOpenSSLContextFactory', (['"""../ssl-keys/server.key"""', '"""../ssl-keys/server.crt"""'], {}), "('../ssl-keys/server.key',\n '../ssl-keys/server.crt')\n", (13938, 13994), False, 'from twisted.internet import ssl\n'), ((14100, 14153), 'twisted.internet.reactor.listenSSL', 'reactor.listenSSL', (['(0)', 'site'], {'contextFactory': 'SSLFactory'}), '(0, site, contextFactory=SSLFactory)\n', (14117, 14153), False, 'from twisted.internet import reactor\n'), ((15449, 15505), 'twisted.cred.checkers.InMemoryUsernamePasswordDatabaseDontUse', 'InMemoryUsernamePasswordDatabaseDontUse', ([], {'user': '"""password"""'}), "(user='password')\n", (15488, 15505), False, 'from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse\n'), ((15588, 15623), 'twisted.web.guard.BasicCredentialFactory', 'BasicCredentialFactory', (['"""localhost"""'], {}), "('localhost')\n", (15610, 15623), False, 'from twisted.web.guard import HTTPAuthSessionWrapper, BasicCredentialFactory\n'), ((15643, 15694), 'twisted.web.guard.HTTPAuthSessionWrapper', 'HTTPAuthSessionWrapper', (['portal', '[credentialFactory]'], {}), '(portal, [credentialFactory])\n', (15665, 15694), False, 'from twisted.web.guard import HTTPAuthSessionWrapper, BasicCredentialFactory\n'), ((15710, 15724), 'twisted.web.server.Site', 'Site', (['resource'], {}), '(resource)\n', (15714, 15724), False, 'from twisted.web.server import Site\n'), ((15746, 15772), 'twisted.internet.reactor.listenTCP', 'reactor.listenTCP', (['(0)', 'site'], {}), '(0, site)\n', (15763, 15772), False, 'from twisted.internet import reactor\n'), ((16027, 16063), 'twisted.cred.credentials.UsernamePassword', 'UsernamePassword', (['"""user"""', '"""password"""'], {}), "('user', 'password')\n", (16043, 16063), False, 'from twisted.cred.credentials import Anonymous, UsernamePassword\n'), ((16080, 16116), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr'], {'credentials': 'credentials'}), '(addr, credentials=credentials)\n', (16085, 16116), False, 'from fastjsonrpc.client import Proxy\n'), ((16402, 16446), 'twisted.cred.credentials.UsernamePassword', 'UsernamePassword', (['"""<PASSWORD>"""', '"""<PASSWORD>"""'], {}), "('<PASSWORD>', '<PASSWORD>')\n", (16418, 16446), False, 'from twisted.cred.credentials import Anonymous, UsernamePassword\n'), ((16463, 16499), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr'], {'credentials': 'credentials'}), '(addr, credentials=credentials)\n', (16468, 16499), False, 'from fastjsonrpc.client import Proxy\n'), ((16923, 16967), 'twisted.cred.credentials.UsernamePassword', 'UsernamePassword', (['"""<PASSWORD>"""', '"""<PASSWORD>"""'], {}), "('<PASSWORD>', '<PASSWORD>')\n", (16939, 16967), False, 'from twisted.cred.credentials import Anonymous, UsernamePassword\n'), ((16984, 17020), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr'], {'credentials': 'credentials'}), '(addr, credentials=credentials)\n', (16989, 17020), False, 'from fastjsonrpc.client import Proxy\n'), ((17437, 17467), 'fastjsonrpc.client.Proxy', 'Proxy', (['addr', 'jsonrpc.VERSION_1'], {}), '(addr, jsonrpc.VERSION_1)\n', (17442, 17467), False, 'from fastjsonrpc.client import Proxy\n'), ((1081, 1091), 'twisted.internet.defer.Deferred', 'Deferred', ([], {}), '()\n', (1089, 1091), False, 'from twisted.internet.defer import Deferred\n'), ((3257, 3270), 'dummyserver.DummyServer', 'DummyServer', ([], {}), '()\n', (3268, 3270), False, 'from dummyserver import DummyServer, AuthDummyServer\n'), ((13772, 13859), 'twisted.trial.unittest.SkipTest', 'SkipTest', (["('For testing SSL, please put server.key and ' + 'server.crt to ssl-keys/')"], {}), "('For testing SSL, please put server.key and ' +\n 'server.crt to ssl-keys/')\n", (13780, 13859), False, 'from twisted.trial.unittest import TestCase, SkipTest\n'), ((14065, 14078), 'dummyserver.DummyServer', 'DummyServer', ([], {}), '()\n', (14076, 14078), False, 'from dummyserver import DummyServer, AuthDummyServer\n'), ((15530, 15547), 'dummyserver.AuthDummyServer', 'AuthDummyServer', ([], {}), '()\n', (15545, 15547), False, 'from dummyserver import DummyServer, AuthDummyServer\n'), ((8268, 8279), 'twisted.cred.credentials.Anonymous', 'Anonymous', ([], {}), '()\n', (8277, 8279), False, 'from twisted.cred.credentials import Anonymous, UsernamePassword\n'), ((13650, 13690), 'os.path.exists', 'os.path.exists', (['"""../ssl-keys/server.key"""'], {}), "('../ssl-keys/server.key')\n", (13664, 13690), False, 'import os\n'), ((13711, 13751), 'os.path.exists', 'os.path.exists', (['"""../ssl-keys/server.crt"""'], {}), "('../ssl-keys/server.crt')\n", (13725, 13751), False, 'import os\n'), ((17909, 17920), 'twisted.cred.credentials.Anonymous', 'Anonymous', ([], {}), '()\n', (17918, 17920), False, 'from twisted.cred.credentials import Anonymous, UsernamePassword\n')]
|
"""Multi-agent traffic light example (single shared policy)."""
from ray.rllib.agents.ppo.ppo_policy import PPOTFPolicy
from flow.envs.multiagent import MyMultiTrafficLightGridPOEnv
from flow.networks import TrafficLightGridNetwork
from flow.core.params import SumoParams, EnvParams, InitialConfig, NetParams
from flow.core.params import InFlows, SumoCarFollowingParams, VehicleParams
from flow.controllers import SimCarFollowingController, GridRouter
from ray.tune.registry import register_env
from flow.utils.registry import make_create_env
import numpy as np
# Experiment parameters
N_ROLLOUTS = 20 # number of rollouts per training iteration
N_CPUS = 3 # number of parallel workers
# Environment parameters
HORIZON = 400 # time horizon of a single rollout
V_ENTER = 30 # enter speed for departing vehicles
INNER_LENGTH = 300 # length of inner edges in the traffic light grid network
LONG_LENGTH = 100 # length of final edge in route
SHORT_LENGTH = 300 # length of edges that vehicles start on
# number of vehicles originating in the left, right, top, and bottom edges
N_LEFT, N_RIGHT, N_TOP, N_BOTTOM = 0, 0, 0, 0
EDGE_INFLOW = 300 # inflow rate of vehicles at every edge
N_ROWS = 2 # number of row of bidirectional lanes
N_COLUMNS = 2 # number of columns of bidirectional lanes
# we place a sufficient number of vehicles to ensure they confirm with the
# total number specified above. We also use a "right_of_way" speed mode to
# support traffic light compliance
vehicles = VehicleParams()
num_vehicles = (N_LEFT + N_RIGHT) * N_COLUMNS + (N_BOTTOM + N_TOP) * N_ROWS
vehicles.add(
veh_id="human",
acceleration_controller=(SimCarFollowingController, {}),
car_following_params=SumoCarFollowingParams(
min_gap=2.5,
max_speed=V_ENTER,
decel=7.5, # avoid collisions at emergency stops
speed_mode="right_of_way",
),
routing_controller=(GridRouter, {}),
num_vehicles=num_vehicles)
# inflows of vehicles are place on all outer edges (listed here)
outer_edges = []
outer_edges += ["left{}_{}".format(N_ROWS, i) for i in range(N_COLUMNS)]
outer_edges += ["right0_{}".format(i) for i in range(N_COLUMNS)]
outer_edges += ["bot{}_0".format(i) for i in range(N_ROWS)]
outer_edges += ["top{}_{}".format(i, N_COLUMNS) for i in range(N_ROWS)]
# equal inflows for each edge (as dictate by the EDGE_INFLOW constant)
inflow = InFlows()
for edge in outer_edges:
inflow.add(
veh_type="human",
edge=edge,
# vehs_per_hour=EDGE_INFLOW,
# probability=0.10,
vehs_per_hour = 600,
departLane="free",
departSpeed=V_ENTER)
myNetParams = NetParams(
inflows=inflow,
additional_params={
"speed_limit": V_ENTER + 5, # inherited from grid0 benchmark
"grid_array": {
"short_length": SHORT_LENGTH,
"inner_length": INNER_LENGTH,
"long_length": LONG_LENGTH,
"row_num": N_ROWS,
"col_num": N_COLUMNS,
"cars_left": N_LEFT,
"cars_right": N_RIGHT,
"cars_top": N_TOP,
"cars_bot": N_BOTTOM,
},
"horizontal_lanes": 1,
"vertical_lanes": 1,
},
)
flow_params = dict(
# name of the experiment
exp_tag="grid_0_{}x{}_i{}_multiagent".format(N_ROWS, N_COLUMNS, EDGE_INFLOW),
# name of the flow environment the experiment is running on
env_name=MyMultiTrafficLightGridPOEnv,
# name of the network class the experiment is running on
network=TrafficLightGridNetwork,
# simulator that is used by the experiment
simulator='traci',
# sumo-related parameters (see flow.core.params.SumoParams)
sim=SumoParams(
restart_instance=True,
sim_step=1,
render=False,
),
# environment related parameters (see flow.core.params.EnvParams)
env=EnvParams(
horizon=HORIZON,
warmup_steps=0,
sims_per_step=1,
additional_params={
"target_velocity": 50,
"switch_time": 3,
"num_observed": 2,
"discrete": False,
"tl_type": "actuated",
"num_local_edges": 4,
"num_local_lights": 4,
},
),
# network-related parameters (see flow.core.params.NetParams and the
# network's documentation or ADDITIONAL_NET_PARAMS component)
net=myNetParams,
# vehicles to be placed in the network at the start of a rollout (see
# flow.core.params.VehicleParams)
veh=vehicles,
# parameters specifying the positioning of vehicles upon initialization
# or reset (see flow.core.params.InitialConfig)
initial=InitialConfig(
spacing='custom',
shuffle=True,
),
)
#############################以下为训练部分#################################
def cover_actions(c_a, s_a,num):
# for i in range(len(c_a)):
# if c_a[i] == 1:
# s_a[i] = abs(s_a[i] - 1)
for i in range(num):
if i == c_a:
s_a[i] = 1
return s_a
def data_collection(env, vels, queues):
vehicles = env.k.vehicle
veh_speeds = vehicles.get_speed(vehicles.get_ids())
vels.append(np.mean(veh_speeds))
queued_vels = len([v for v in veh_speeds if v < 1])
queues.append(queued_vels)
return vels, queues
def normalize_formation(state,Agent_NUM):
_state = [[] for i in range(Agent_NUM)]
for i in range(Agent_NUM):
_state[i] = state["center"+str(i)]
return _state
def record_line(log_path, line):
with open(log_path, 'a') as fp:
fp.writelines(line)
fp.writelines("\n")
return True
if __name__ == "__main__":
myTrafficNet = TrafficLightGridNetwork(
name = 'grid',
vehicles = vehicles,
net_params = myNetParams,
)
env = MyMultiTrafficLightGridPOEnv(
env_params=flow_params['env'], sim_params=flow_params['sim'], network=myTrafficNet)
# print(env.scenario.get_edge_list())
# Perpare agent.
from flow.core.ppo_agent import *
############################################################################
############################################################################
Agent_NUM = N_ROWS * N_COLUMNS
Reward_num = 1 #0代表多个rewards,1代表1个
NAME = '2x2_600_PPO_SOFT_try4'
Epoch = 4000
steps = 400
rnn_train_epi = 25
rnn_agent = PPO(s_dim=42*Agent_NUM,a_dim=Agent_NUM+1,name=NAME)
############################################################################
############################################################################
global_counter = 0
each_line_path = "collected_data/ppo/{}_plot_log.txt".format(NAME)
test_epoch_path = "collected_data/ppo/{}_epoch_log.txt".format(NAME)
for ep in range(Epoch):
#RNN_PPO训练步骤
for i in range(rnn_train_epi):
print("当前训练次数:")
print(i)
global_counter += 1
state = env.reset()
state = normalize_formation(state,Agent_NUM)
_state = [n for a in state for n in a ]
ep_r = 0.0
for step in range(steps):
step_r = 0.0
# print(_state)
_state = np.array(_state)
_actions = rnn_agent.choose_action(_state)
# print(_actions)
actions = np.zeros((Agent_NUM,), dtype=int)
rl_actions = cover_actions(_actions, actions,Agent_NUM)
next_state, rewards, done, _ = env.step(rl_actions)
if Reward_num == 0:
for k in range(Agent_NUM):
step_r += rewards[k]/Agent_NUM
ep_r += rewards[k]/Agent_NUM
rnn_agent.experience_store(_state, _actions, step_r)
else:
ep_r += rewards
rnn_agent.experience_store(_state, _actions, rewards)
state = next_state
state = normalize_formation(state,Agent_NUM)
_state = [n for a in state for n in a ]
_state = np.array(_state)
if (step + 1) % BATCH == 0 or step == EP_LEN - 1:
rnn_agent.trajction_process(_state)
rnn_agent.update()
rnn_agent.empty_buffer()
_done = True
for i in range(Agent_NUM):
_done *= done["center"+str(i)]
# print('dome?')
# print(_done)
if _done:
break
print('steps rewards:')
print(ep_r)
rnn_agent.summarize(ep_r, global_counter, 'reward')
if ep % 10 == 0:
rnn_agent.save_params(NAME,ep)
# test phase
if ep >= 0:
print('测试阶段:')
print(ep)
record_line(each_line_path, "*** Epoch: {} ***\n".format(ep))
queue, speed, ret = [], [], []
for i in range(3):
ep_r, ep_q, ep_v = [], [], []
state = env.reset()
state = normalize_formation(state,Agent_NUM)
_state = [n for a in state for n in a ]
for step in range(steps):
step_r = 0
data_collection(env, ep_v, ep_q)
_state = np.array(_state)
_actions = rnn_agent.choose_action(_state)
actions = np.zeros((Agent_NUM,), dtype=int)
rl_actions = cover_actions(_actions, actions,Agent_NUM)
next_state, rewards, done, _ = env.step(rl_actions)
if Reward_num == 0:
for k in range(Agent_NUM):
step_r += rewards[k]/Agent_NUM
ep_r.append(step_r)
else:
ep_r.append(rewards)
ep_r.append(step_r)
state = next_state
state = normalize_formation(state,Agent_NUM)
_state = [n for a in state for n in a ]
_done = True
for i in range(Agent_NUM):
_done *= done["center"+str(i)]
if _done:
break
queue.append(np.array(ep_q).mean())
speed.append(np.array(ep_v).mean())
ret.append(np.array(ep_r).mean())
record_line(each_line_path, "Queue: " + str(ep_q) + "\n")
record_line(each_line_path, "Speed: " + str(ep_v) + "\n")
record_line(each_line_path, "Return: " + str(ep_r) + "\n")
# record...
print("*** Epoch: {} ***\n".format(ep))
print("| Queue: {}, std: {} |".format(np.array(queue).mean(), np.array(queue).std()))
print("| Speed: {}, std: {} |".format(np.array(speed).mean(), np.array(speed).std()))
print("| Return: {}, std: {} |".format(np.array(ret).mean(), np.array(ret).std()))
print("*****************\n")
record_line(test_epoch_path, "*** Epoch: {} ***\n".format(ep))
record_line(test_epoch_path, "| Queue: {}, std: {} |".format(np.array(queue).mean(), np.array(queue).std()))
record_line(test_epoch_path, "| Speed: {}, std: {} |".format(np.array(speed).mean(), np.array(speed).std()))
record_line(test_epoch_path, "| Return: {}, std: {} |".format(np.array(ret).mean(), np.array(ret).std()))
record_line(test_epoch_path, "*****************\n")
|
[
"flow.networks.TrafficLightGridNetwork",
"flow.core.params.EnvParams",
"flow.core.params.VehicleParams",
"flow.envs.multiagent.MyMultiTrafficLightGridPOEnv",
"flow.core.params.SumoParams",
"numpy.zeros",
"flow.core.params.SumoCarFollowingParams",
"numpy.mean",
"numpy.array",
"flow.core.params.InFlows",
"flow.core.params.InitialConfig",
"flow.core.params.NetParams"
] |
[((1495, 1510), 'flow.core.params.VehicleParams', 'VehicleParams', ([], {}), '()\n', (1508, 1510), False, 'from flow.core.params import InFlows, SumoCarFollowingParams, VehicleParams\n'), ((2385, 2394), 'flow.core.params.InFlows', 'InFlows', ([], {}), '()\n', (2392, 2394), False, 'from flow.core.params import InFlows, SumoCarFollowingParams, VehicleParams\n'), ((2645, 3008), 'flow.core.params.NetParams', 'NetParams', ([], {'inflows': 'inflow', 'additional_params': "{'speed_limit': V_ENTER + 5, 'grid_array': {'short_length': SHORT_LENGTH,\n 'inner_length': INNER_LENGTH, 'long_length': LONG_LENGTH, 'row_num':\n N_ROWS, 'col_num': N_COLUMNS, 'cars_left': N_LEFT, 'cars_right':\n N_RIGHT, 'cars_top': N_TOP, 'cars_bot': N_BOTTOM}, 'horizontal_lanes': \n 1, 'vertical_lanes': 1}"}), "(inflows=inflow, additional_params={'speed_limit': V_ENTER + 5,\n 'grid_array': {'short_length': SHORT_LENGTH, 'inner_length':\n INNER_LENGTH, 'long_length': LONG_LENGTH, 'row_num': N_ROWS, 'col_num':\n N_COLUMNS, 'cars_left': N_LEFT, 'cars_right': N_RIGHT, 'cars_top':\n N_TOP, 'cars_bot': N_BOTTOM}, 'horizontal_lanes': 1, 'vertical_lanes': 1})\n", (2654, 3008), False, 'from flow.core.params import SumoParams, EnvParams, InitialConfig, NetParams\n'), ((5719, 5798), 'flow.networks.TrafficLightGridNetwork', 'TrafficLightGridNetwork', ([], {'name': '"""grid"""', 'vehicles': 'vehicles', 'net_params': 'myNetParams'}), "(name='grid', vehicles=vehicles, net_params=myNetParams)\n", (5742, 5798), False, 'from flow.networks import TrafficLightGridNetwork\n'), ((5847, 5964), 'flow.envs.multiagent.MyMultiTrafficLightGridPOEnv', 'MyMultiTrafficLightGridPOEnv', ([], {'env_params': "flow_params['env']", 'sim_params': "flow_params['sim']", 'network': 'myTrafficNet'}), "(env_params=flow_params['env'], sim_params=\n flow_params['sim'], network=myTrafficNet)\n", (5875, 5964), False, 'from flow.envs.multiagent import MyMultiTrafficLightGridPOEnv\n'), ((1707, 1803), 'flow.core.params.SumoCarFollowingParams', 'SumoCarFollowingParams', ([], {'min_gap': '(2.5)', 'max_speed': 'V_ENTER', 'decel': '(7.5)', 'speed_mode': '"""right_of_way"""'}), "(min_gap=2.5, max_speed=V_ENTER, decel=7.5,\n speed_mode='right_of_way')\n", (1729, 1803), False, 'from flow.core.params import InFlows, SumoCarFollowingParams, VehicleParams\n'), ((3751, 3810), 'flow.core.params.SumoParams', 'SumoParams', ([], {'restart_instance': '(True)', 'sim_step': '(1)', 'render': '(False)'}), '(restart_instance=True, sim_step=1, render=False)\n', (3761, 3810), False, 'from flow.core.params import SumoParams, EnvParams, InitialConfig, NetParams\n'), ((3922, 4160), 'flow.core.params.EnvParams', 'EnvParams', ([], {'horizon': 'HORIZON', 'warmup_steps': '(0)', 'sims_per_step': '(1)', 'additional_params': "{'target_velocity': 50, 'switch_time': 3, 'num_observed': 2, 'discrete': \n False, 'tl_type': 'actuated', 'num_local_edges': 4, 'num_local_lights': 4}"}), "(horizon=HORIZON, warmup_steps=0, sims_per_step=1,\n additional_params={'target_velocity': 50, 'switch_time': 3,\n 'num_observed': 2, 'discrete': False, 'tl_type': 'actuated',\n 'num_local_edges': 4, 'num_local_lights': 4})\n", (3931, 4160), False, 'from flow.core.params import SumoParams, EnvParams, InitialConfig, NetParams\n'), ((4717, 4762), 'flow.core.params.InitialConfig', 'InitialConfig', ([], {'spacing': '"""custom"""', 'shuffle': '(True)'}), "(spacing='custom', shuffle=True)\n", (4730, 4762), False, 'from flow.core.params import SumoParams, EnvParams, InitialConfig, NetParams\n'), ((5217, 5236), 'numpy.mean', 'np.mean', (['veh_speeds'], {}), '(veh_speeds)\n', (5224, 5236), True, 'import numpy as np\n'), ((7262, 7278), 'numpy.array', 'np.array', (['_state'], {}), '(_state)\n', (7270, 7278), True, 'import numpy as np\n'), ((7398, 7431), 'numpy.zeros', 'np.zeros', (['(Agent_NUM,)'], {'dtype': 'int'}), '((Agent_NUM,), dtype=int)\n', (7406, 7431), True, 'import numpy as np\n'), ((8160, 8176), 'numpy.array', 'np.array', (['_state'], {}), '(_state)\n', (8168, 8176), True, 'import numpy as np\n'), ((9427, 9443), 'numpy.array', 'np.array', (['_state'], {}), '(_state)\n', (9435, 9443), True, 'import numpy as np\n'), ((9537, 9570), 'numpy.zeros', 'np.zeros', (['(Agent_NUM,)'], {'dtype': 'int'}), '((Agent_NUM,), dtype=int)\n', (9545, 9570), True, 'import numpy as np\n'), ((10440, 10454), 'numpy.array', 'np.array', (['ep_q'], {}), '(ep_q)\n', (10448, 10454), True, 'import numpy as np\n'), ((10492, 10506), 'numpy.array', 'np.array', (['ep_v'], {}), '(ep_v)\n', (10500, 10506), True, 'import numpy as np\n'), ((10542, 10556), 'numpy.array', 'np.array', (['ep_r'], {}), '(ep_r)\n', (10550, 10556), True, 'import numpy as np\n'), ((10916, 10931), 'numpy.array', 'np.array', (['queue'], {}), '(queue)\n', (10924, 10931), True, 'import numpy as np\n'), ((10940, 10955), 'numpy.array', 'np.array', (['queue'], {}), '(queue)\n', (10948, 10955), True, 'import numpy as np\n'), ((11014, 11029), 'numpy.array', 'np.array', (['speed'], {}), '(speed)\n', (11022, 11029), True, 'import numpy as np\n'), ((11038, 11053), 'numpy.array', 'np.array', (['speed'], {}), '(speed)\n', (11046, 11053), True, 'import numpy as np\n'), ((11113, 11126), 'numpy.array', 'np.array', (['ret'], {}), '(ret)\n', (11121, 11126), True, 'import numpy as np\n'), ((11135, 11148), 'numpy.array', 'np.array', (['ret'], {}), '(ret)\n', (11143, 11148), True, 'import numpy as np\n'), ((11346, 11361), 'numpy.array', 'np.array', (['queue'], {}), '(queue)\n', (11354, 11361), True, 'import numpy as np\n'), ((11370, 11385), 'numpy.array', 'np.array', (['queue'], {}), '(queue)\n', (11378, 11385), True, 'import numpy as np\n'), ((11467, 11482), 'numpy.array', 'np.array', (['speed'], {}), '(speed)\n', (11475, 11482), True, 'import numpy as np\n'), ((11491, 11506), 'numpy.array', 'np.array', (['speed'], {}), '(speed)\n', (11499, 11506), True, 'import numpy as np\n'), ((11589, 11602), 'numpy.array', 'np.array', (['ret'], {}), '(ret)\n', (11597, 11602), True, 'import numpy as np\n'), ((11611, 11624), 'numpy.array', 'np.array', (['ret'], {}), '(ret)\n', (11619, 11624), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa
from django import forms
from common.forms import BaseComponentForm, ListField
from common.constants import API_TYPE_OP
from components.component import Component
from .toolkit import tools, configs
class AddApp(Component):
"""
apiLabel {{ _("新建业务") }}
apiMethod POST
### {{ _("功能描述") }}
{{ _("新建业务") }}
### {{ _("请求参数") }}
{{ common_args_desc }}
#### {{ _("接口参数") }}
| {{ _("字段") }} | {{ _("类型") }} | {{ _("必选") }} | {{ _("描述") }} |
|-----------|------------|--------|------------|
| app_name | string | {{ _("是") }} | {{ _("业务名") }} |
| maintainers | string | {{ _("是") }} | {{ _("运维人员, 多个人之间用逗号分隔") }} |
| product_pm | string | {{ _("否") }} | {{ _("产品人员,多个人之间用逗号分隔") }} |
| developer | string | {{ _("否") }} | {{ _("开发人员,多个人之间用逗号分隔") }} |
| tester | string | {{ _("否") }} | {{ _("测试人员,多个人之间用逗号分隔") }} |
| operator | string | {{ _("否") }} | {{ _("操作者,多个人之间用逗号分隔") }} |
| company_name | string | {{ _("是") }} | {{ _("公司名,cmdb配置文件中定义的constants.php中的 COMPANY_NAME") }} |
| level | int | {{ _("是") }} | {{ _("业务拓扑级别,2或者3") }} |
| life_cycle | string | {{ _("是") }} | {{ _("生成周期,1: 测试中, 2: 已上线, 3: 停运其中的一个值") }} |
### {{ _("请求参数示例") }}
```python
{
"app_code": "esb_test",
"app_secret": "xxx",
"bk_token": "xxx",
"app_name": "Test",
"maintainers": "admin",
"product_pm": "admin",
"company_name": "CompanyName",
"level": 3,
"life_cycle": "1"
}
```
### 返回结果示例
```python
{
"result": true,
"code": "00",
"message": "",
"data": {
"appId": 25
}
}
```
"""
sys_name = configs.SYSTEM_NAME
api_type = API_TYPE_OP
host = configs.host
class Form(BaseComponentForm):
app_name = forms.CharField(label='business name', required=True)
maintainers = ListField(label='OPS', required=True)
product_pm = ListField(label='PM', required=False)
developer = ListField(label='developer', required=False)
tester = ListField(label='test staff', required=False)
operator = ListField(label='operator', required=False)
company_name = forms.CharField(label='company name', required=True)
level = forms.IntegerField(label='business topology level', required=True)
life_cycle = forms.CharField(label='life cycle', required=True)
def clean(self):
data = self.cleaned_data
return {
'ApplicationName': data['app_name'],
'Maintainers': ','.join(data['maintainers']),
'ProductPm': ','.join(data['product_pm']),
'Developer': ','.join(data['developer']),
'Tester': ','.join(data['tester']),
'Operator': ','.join(data['operator']),
'CompanyName': data['company_name'],
'Level': data['level'],
'LifeCycle': data['life_cycle'],
}
def handle(self):
self.form_data['Creator'] = self.current_user.username
client = tools.CCClient(self)
self.response.payload = client.post_request(
self.host,
'/api/app/addApp',
data=self.form_data,
)
|
[
"common.forms.ListField",
"django.forms.CharField",
"django.forms.IntegerField"
] |
[((2683, 2736), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""business name"""', 'required': '(True)'}), "(label='business name', required=True)\n", (2698, 2736), False, 'from django import forms\n'), ((2759, 2796), 'common.forms.ListField', 'ListField', ([], {'label': '"""OPS"""', 'required': '(True)'}), "(label='OPS', required=True)\n", (2768, 2796), False, 'from common.forms import BaseComponentForm, ListField\n'), ((2818, 2855), 'common.forms.ListField', 'ListField', ([], {'label': '"""PM"""', 'required': '(False)'}), "(label='PM', required=False)\n", (2827, 2855), False, 'from common.forms import BaseComponentForm, ListField\n'), ((2876, 2920), 'common.forms.ListField', 'ListField', ([], {'label': '"""developer"""', 'required': '(False)'}), "(label='developer', required=False)\n", (2885, 2920), False, 'from common.forms import BaseComponentForm, ListField\n'), ((2938, 2983), 'common.forms.ListField', 'ListField', ([], {'label': '"""test staff"""', 'required': '(False)'}), "(label='test staff', required=False)\n", (2947, 2983), False, 'from common.forms import BaseComponentForm, ListField\n'), ((3003, 3046), 'common.forms.ListField', 'ListField', ([], {'label': '"""operator"""', 'required': '(False)'}), "(label='operator', required=False)\n", (3012, 3046), False, 'from common.forms import BaseComponentForm, ListField\n'), ((3070, 3122), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""company name"""', 'required': '(True)'}), "(label='company name', required=True)\n", (3085, 3122), False, 'from django import forms\n'), ((3139, 3205), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'label': '"""business topology level"""', 'required': '(True)'}), "(label='business topology level', required=True)\n", (3157, 3205), False, 'from django import forms\n'), ((3227, 3277), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""life cycle"""', 'required': '(True)'}), "(label='life cycle', required=True)\n", (3242, 3277), False, 'from django import forms\n')]
|
""" Module for the KMCRateCalculatorPlugin class """
# Copyright (c) 2013 <NAME>
#
# This file is part of the KMCLib project distributed under the terms of the
# GNU General Public License version 3, see <http://www.gnu.org/licenses/>.
#
import numpy
from KMCLib.Backend import Backend
from KMCLib.Exceptions.Error import Error
class KMCRateCalculatorPlugin(Backend.RateCalculator):
"""
Class for providing an interface to easily extend and customize
the behaviour of the calculation of individual rates in the KMC simulation.
"""
def __init__(self):
"""
Base class constructor.
"""
# Call the C++ base class constructor.
Backend.RateCalculator.__init__(self)
# Call the custom setup.
self.initialize()
def backendRateCallback(self,
cpp_coords,
coords_len,
types_before,
types_after,
rate_constant,
process_number,
global_x,
global_y,
global_z):
"""
Function called from C++ to get the rate. It function recieves
the data from C++ and parse it to a Python friendly format to send it
forward to the custom rate function.
"""
# Call and return the custom rate.
# PERFORMME: Consider creating the numpy array in C++ if possible.
global_coordinate = (global_x, global_y, global_z)
return self.rate(numpy.array(cpp_coords).reshape(coords_len,3),
types_before,
types_after,
rate_constant,
process_number,
global_coordinate)
def initialize(self):
"""
Called as the last statement in the base class constructor
to allow for custom setup of the object.
"""
pass
def rate(self,
coords,
types_before,
types_after,
rate_constant,
process_number,
global_coordinate):
"""
Called from the base class to get the rate for a particular
local geometry. Any class inheriting from the plugin base class
must provide an implementation of this function.
:param coords: The coordinates of the configuration as a Nx3 numpy array
in fractional units of the primitive cell.
:param types_before: The types before the process, as tuple of strings.
:param types_after: The types after the process, as tuple of strings.
:param rate_constant: The rate constant associated with the process
to either update or replace.
:param process_number: The process id number.
:param global_coordinate: The global coordinate of the central index.
:returns: The custom rate of the process. Note that the returned rate must
not be negative or zero.
"""
raise Error("The rate(self,...) API function in the 'KMCRateCalculator' base class must be overloaded when using a custom rate calculator.")
def cutoff(self):
"""
To determine the radial cutoff of the geometry around the central
lattice site to cut out and send down to the rustom rate function.
If not implemented by derrived classes the default is to use
the cutoff of the largetst process local geometry.
:returns: The desiered cutoff in primitive cell internal coordinates.
:rtype: float
"""
# Returning None results in default behaviour.
return None
|
[
"KMCLib.Backend.Backend.RateCalculator.__init__",
"numpy.array",
"KMCLib.Exceptions.Error.Error"
] |
[((691, 728), 'KMCLib.Backend.Backend.RateCalculator.__init__', 'Backend.RateCalculator.__init__', (['self'], {}), '(self)\n', (722, 728), False, 'from KMCLib.Backend import Backend\n'), ((3162, 3306), 'KMCLib.Exceptions.Error.Error', 'Error', (['"""The rate(self,...) API function in the \'KMCRateCalculator\' base class must be overloaded when using a custom rate calculator."""'], {}), '(\n "The rate(self,...) API function in the \'KMCRateCalculator\' base class must be overloaded when using a custom rate calculator."\n )\n', (3167, 3306), False, 'from KMCLib.Exceptions.Error import Error\n'), ((1609, 1632), 'numpy.array', 'numpy.array', (['cpp_coords'], {}), '(cpp_coords)\n', (1620, 1632), False, 'import numpy\n')]
|
import torch
from torch import nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
def test_img(model, dataset, args):
model.eval()
test_loss = 0
correct = 0
data_loader = DataLoader(dataset, batch_size=args.batch_size)
with torch.no_grad():
for index, data in enumerate(data_loader):
images, labels = data
images, labels = images.cuda(), labels.cuda()
output = model(images)
test_loss += F.cross_entropy(output, labels, reduction="sum").item()
predicted = output.max(1, keepdim=True)[1]
correct += predicted.eq(labels.view_as(predicted)).sum().item()
accuracy = 100. * correct / len(data_loader.dataset)
return accuracy, test_loss
|
[
"torch.no_grad",
"torch.nn.functional.cross_entropy",
"torch.utils.data.DataLoader"
] |
[((213, 260), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'args.batch_size'}), '(dataset, batch_size=args.batch_size)\n', (223, 260), False, 'from torch.utils.data import DataLoader\n'), ((270, 285), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (283, 285), False, 'import torch\n'), ((490, 538), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['output', 'labels'], {'reduction': '"""sum"""'}), "(output, labels, reduction='sum')\n", (505, 538), True, 'import torch.nn.functional as F\n')]
|
import csv
import matplotlib as matplot
import matplotlib.pyplot as plt
import numpy as np
# List the colors that will be used for tracing the track.
colors = ['black','blue','red','green', 'cyan', \
'gray', 'gold', 'lightcoral', 'turquoise','red','blue','green','pink']
patterns = ['-', '--','--','--','--','--','--','--', ':','-', '--', ':','-', '--', ':',\
'-.', '-.', '-.', ':', '--', '-']
markers = ['.',',','o','v','8','s','+','x','X','D','^','<','>','v']
sizes = [10, 5, 5, 5, 4, 4, 4, 3, 3, 3, 3, 3, 6,5,4,3,2,2]
# Path to the csv file
dir1 = 'C:/Users/limgr/Desktop/Katrina_wind_intensity_8km.csv'
dir2 = 'C:/Users/limgr/Desktop/Maria_wind_intensity_8km.csv'
dir3 = 'C:/Users/limgr/Desktop/Irma_wind_intensity_8km.csv'
dir4 = 'C:/Users/limgr/Desktop/Dorian_wind_intensity_8km.csv'
dir7 = 'C:/Users/limgr/Desktop/Lorenzo_wind_intensity_8km.csv'
c=0
rows=[]
Times=[]
Times=[]
values=[]
with open(dir1, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0
for row in csv_reader:
if line_count == 0:
print(f'Column names are {", ".join(row)}')
Times.append(list(row.keys()))
line_count += 1
#print(row)
rows.append(row)
values.append(list(row.values()))
line_count += 1
print(f'Processed {line_count} lines.')
Times0=Times[0]
print(Times0)
print(values[0])
for i in range(0,line_count-1):
if i==0:
tmp=[float(i)*0.5144444 for i in values[i]]
#tmp=[float(i) for i in values[i]]
else:
tmp=[float(i) for i in values[i]]
plt.plot( Times0[:5], tmp[:5], color = colors[c], marker='s', linestyle=patterns[c],\
markersize=sizes[c])
c+=1
plt.legend(["Real Track",
"C0.0001",\
"C0.01",\
"C1",\
"C100"],\
loc = "upper right", \
prop={'size': 7})
# plt.legend(["Oussama_NoTurb", "WRF_NoTurb", \
# "WRFSWAN_NoTurb_swdt600_cpdt600_swgr11p1_swh2",\
# "WRFSWAN_NoTurb_swdt60_cpdt600_swgr11p1_swh2",\
# "WRFSWAN_NoTurb_swdt600_cpdt60_swgr11p1_swh2",\
# "WRFSWAN_NoTurb_swdt600_cpdt600_swgr11p1_swh2",\
# "WRFSWAN_NoTurb_swdt600_cpdt600_swgr11p1_swh4",\
# 'WRFSWAN_NoTurb_swdt600_cpdt600_swgr32p0_swh2',\
# 'WRFSWAN_NoTurb_swdt600_cpdt3600_swgr11p1_swh2'],loc = "lower center", \
# prop={'size': 7})
# plt.legend(["Oussama_NoTurb", "WRF_NoTurb", \
# "WRFSWAN_NoTurb_1",\
# "WRFSWAN_NoTurb_2",\
# "WRFSWAN_NoTurb_3",\
# "WRFSWAN_NoTurb_4",\
# "WRFSWAN_NoTurb_5",\
# 'WRFSWAN_NoTurb_6',\
# 'WRFSWAN_NoTurb_7'],loc = "lower center", \
# prop={'size': 7})
plt.xlabel("Time Step [hr]", fontsize=14)
plt.ylabel("Intensity", fontsize=14)
plt.title("Katrina Intensity ", {'size': 20})
plt.savefig('C:/Users/limgr/Desktop/katrina_wind_intensity_A.png')
plt.show()
# Save the plot
#plt.savefig('Output.png')
c=0
rows=[]
Times=[]
Times=[]
values=[]
with open(dir2, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0
for row in csv_reader:
if line_count == 0:
print(f'Column names are {", ".join(row)}')
Times.append(list(row.keys()))
line_count += 1
#print(row)
rows.append(row)
values.append(list(row.values()))
line_count += 1
print(f'Processed {line_count} lines.')
Times0=Times[0]
print(Times0)
print(values[0])
for i in range(0,line_count-1):
if i==0:
tmp=[float(i)*0.5144444 for i in values[i]]
#tmp=[float(i) for i in values[i]]
else:
tmp=[float(i) for i in values[i]]
plt.plot( Times0[:5], tmp[:5], color = colors[c], marker='s', linestyle=patterns[c],\
markersize=sizes[c])
c+=1
plt.legend(["Real Track",
"C0.0001",\
"C0.01",\
"C1",\
"C100"],\
loc = "upper right", \
prop={'size': 7})
plt.xlabel("Time Step [hr]", fontsize=14)
plt.ylabel("Intensity", fontsize=14)
plt.title("Maria Intensity ", {'size': 20})
plt.savefig('C:/Users/limgr/Desktop/maria_wind_intensity_A.png')
plt.show()
c=0
rows=[]
Times=[]
Times=[]
values=[]
with open(dir3, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0
for row in csv_reader:
if line_count == 0:
print(f'Column names are {", ".join(row)}')
Times.append(list(row.keys()))
line_count += 1
#print(row)
rows.append(row)
values.append(list(row.values()))
line_count += 1
print(f'Processed {line_count} lines.')
Times0=Times[0]
print(Times0)
print(values[0])
for i in range(0,line_count-1):
if i==0:
tmp=[float(i)*0.5144444 for i in values[i]]
#tmp=[float(i) for i in values[i]]
else:
tmp=[float(i) for i in values[i]]
plt.plot( Times0, tmp, color = colors[c], marker='s', linestyle=patterns[c],\
markersize=sizes[c])
c+=1
plt.legend(["Real Track",
"C0.0001",\
"C0.01",\
"C1",\
"C100"],\
loc = "upper right", \
prop={'size': 7})
plt.xlabel("Time Step [hr]", fontsize=14)
plt.ylabel("Intensity", fontsize=14)
plt.title("Irma Intensity ", {'size': 20})
plt.savefig('C:/Users/limgr/Desktop/irma_wind_intensity_A.png')
plt.show()
c=0
rows=[]
Times=[]
Times=[]
values=[]
with open(dir4, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0
for row in csv_reader:
if line_count == 0:
print(f'Column names are {", ".join(row)}')
Times.append(list(row.keys()))
line_count += 1
#print(row)
rows.append(row)
values.append(list(row.values()))
line_count += 1
print(f'Processed {line_count} lines.')
Times0=Times[0]
print(Times0)
print(values[0])
for i in range(0,line_count-1):
if i==0:
tmp=[float(i)*0.5144444 for i in values[i]]
#tmp=[float(i) for i in values[i]]
else:
tmp=[float(i) for i in values[i]]
plt.plot( Times0[:-2], tmp[:-2], color = colors[c], marker='s', linestyle=patterns[c],\
markersize=sizes[c])
c+=1
plt.legend(["Real Track",
"C0.0001",\
"C0.01",\
"C1",\
"C100"],\
loc = "upper right", \
prop={'size': 7})
plt.xlabel("Time Step [hr]", fontsize=14)
plt.ylabel("Intensity", fontsize=14)
plt.title("Dorian Intensity ", {'size': 20})
plt.savefig('C:/Users/limgr/Desktop/dorian_wind_intensity_A.png')
plt.show()
c=0
rows=[]
Times=[]
Times=[]
values=[]
with open(dir7, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0
for row in csv_reader:
if line_count == 0:
print(f'Column names are {", ".join(row)}')
Times.append(list(row.keys()))
line_count += 1
#print(row)
rows.append(row)
values.append(list(row.values()))
line_count += 1
print(f'Processed {line_count} lines.')
Times0=Times[0]
print(Times0)
print(values[0])
for i in range(0,line_count-1):
if i==0:
tmp=[float(i)*0.5144444 for i in values[i]]
#tmp=[float(i) for i in values[i]]
else:
tmp=[float(i) for i in values[i]]
plt.plot( Times0, tmp, color = colors[c], marker='s', linestyle=patterns[c],\
markersize=sizes[c])
c+=1
plt.legend(["Real Track",
"C0.0001",\
"C0.01",\
"C1",\
"C100"],\
loc = "upper right", \
prop={'size': 7})
plt.xlabel("Time Step [hr]", fontsize=14)
plt.ylabel("Intensity", fontsize=14)
plt.title("Lorenzo Intensity ", {'size': 20})
plt.savefig('C:/Users/limgr/Desktop/lorenzo_wind_intensity_A.png')
plt.show()
rows1=[]
Times1=[]
Times1=[]
values1=[]
rows2=[]
Times2=[]
Times2=[]
values2=[]
rows3=[]
Times3=[]
Times3=[]
values3=[]
rows4=[]
Times4=[]
Times4=[]
values4=[]
rows5=[]
Times5=[]
Times5=[]
values5=[]
rows6=[]
Times6=[]
Times6=[]
values6=[]
rows7=[]
Times7=[]
Times7=[]
values7=[]
# Set the working space.
#os.chdir(Dir_Output)
# Initiate the varaibles that will contain the output files.
#Forecast_Outputs_NoTurb = ""
#Real_Output = ""
#########################################################################
# This function returns a list of all the files in the output directory.#
#########################################################################
#def list_files (Dir, Forecast_Outputs_NoTurb, Real_Output):
# for f in os.listdir(Dir):
# if (f == "Real_Output.csv"):
# Real_Output = f
# elif (f.find('NoTurb') != -1):
# Forecast_Outputs_NoTurb = f
# return (Forecast_Outputs_NoTurb, Real_Output)
# Calling the list_files function to classify files according to the turbulence model
#(Forecast_Outputs_NoTurb, Real_Output) = list_files (Dir_Output, Forecast_Outputs_NoTurb, Real_Output)
#print (Real_Output)
#print (Forecast_Outputs_Smag2D)
#print (Forecast_Outputs_NoTurb)
###################################################################
# This function returns a list of wind speed for each output file.#
###################################################################
real1_track=[]
oussama1=[]
wrf1=[]
with open(dir1, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0
sim_count = 0
for row in csv_reader:
if line_count == 0:
print(f'Column names are {", ".join(row)}')
Times1.append(list(row.keys()))
real1_track.append(list(row.values()))
line_count += 1
else:
rows1.append(row)
values1.append(list(row.values()))
line_count += 1
print('There is totally ',(line_count-1)*(len(row)),' data points')
simu1=np.array(values1, dtype=np.float32)
real1=np.array(real1_track, dtype=np.float32)
real1=real1*0.5144444
real1=real1
simu_error1=abs(simu1-real1[:,None])/real1[:,None]#/((line_count-3)*(len(row)))
print('absolute pressure error')
print(abs(simu1-real1[:,None]))
real2_track=[]
oussama2=[]
wrf2=[]
with open(dir2, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0
sim_count = 0
for row in csv_reader:
if line_count == 0:
print(f'Column names are {", ".join(row)}')
Times2.append(list(row.keys()))
real2_track.append(list(row.values()))
line_count += 1
else:
rows2.append(row)
values2.append(list(row.values()))
line_count += 1
print('There is totally ',(line_count-1)*(len(row)),' data points')
simu2=np.array(values2, dtype=np.float32)
real2=np.array(real2_track, dtype=np.float32)
real2=real2*0.5144444
real2=real2
simu_error2=abs(simu2-real2[:,None])/real2[:,None]#/((line_count-3)*(len(row)))
print('absolute pressure error')
print(abs(simu2-real2[:,None]))
real3_track=[]
oussama3=[]
wrf3=[]
with open(dir3, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0
sim_count = 0
for row in csv_reader:
if line_count == 0:
print(f'Column names are {", ".join(row)}')
Times3.append(list(row.keys()))
real3_track.append(list(row.values()))
line_count += 1
else:
rows3.append(row)
values3.append(list(row.values()))
line_count += 1
print('There is totally ',(line_count-1)*(len(row)),' data points')
simu3=np.array(values3, dtype=np.float32)
real3=np.array(real3_track, dtype=np.float32)
real3=real3*0.5144444
real3=real3
simu_error3=abs(simu3-real3[:,None])/real3[:,None]#/((line_count-3)*(len(row)))
print('absolute pressure error')
print(abs(simu3-real3[:,None]))
real4_track=[]
oussama4=[]
wrf4=[]
with open(dir4, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0
sim_count = 0
for row in csv_reader:
if line_count == 0:
print(f'Column names are {", ".join(row)}')
Times4.append(list(row.keys()))
real4_track.append(list(row.values()))
line_count += 1
else:
rows4.append(row)
values4.append(list(row.values()))
line_count += 1
print('There is totally ',(line_count-1)*(len(row)),' data points')
simu4=np.array(values4, dtype=np.float32)
real4=np.array(real4_track, dtype=np.float32)
real4=real4*0.5144444
real4=real4
simu_error4=abs(simu4-real4[:,None])/real4[:,None]#/((line_count-3)*(len(row)))
print('absolute pressure error')
print(abs(simu4-real4[:,None]))
real7_track=[]
oussama7=[]
wrf7=[]
with open(dir7, mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0
sim_count = 0
for row in csv_reader:
if line_count == 0:
print(f'Column names are {", ".join(row)}')
Times7.append(list(row.keys()))
real7_track.append(list(row.values()))
line_count += 1
else:
rows7.append(row)
values7.append(list(row.values()))
line_count += 1
print('There is totally ',(line_count-1)*(len(row)),' data points')
simu7=np.array(values7, dtype=np.float32)
real7=np.array(real7_track, dtype=np.float32)
real7=real7*0.5144444
real7=real7
simu_error7=abs(simu7-real7[:,None])/real7[:,None]#/((line_count-3)*(len(row)))
print('absolute pressure error')
print(abs(simu7-real7[:,None]))
#ouss_all=np.append(ouss1[0][:],ouss2[0][:],ouss3[0][:],ouss4[0][:],axis=0)
#error_all=np.append(error1[0][1][:],error2[0][1][:],error3[0][1][:],error4[0][1][:], axis=0)
ouss_error=np.zeros((4, 4))
wrf_error=np.zeros((4, 4))
par1_error=np.zeros((4, 4))
par2_error=np.zeros((4, 4))
par3_error=np.zeros((4, 4))
par4_error=np.zeros((4, 4))
par5_error=np.zeros((4, 4))
# par6_error=np.zeros((4, 9))
# par7_error=np.zeros((4, 9))
# par8_error=np.zeros((4, 9))
# par9_error=np.zeros((4, 9))
# print(np.shape(values4))
# print(np.shape(error4))
# print(ouss_error)
# print(simu_error)
# par1_error[0]=simu_error1[0][0][:]
# par1_error[1]=simu_error2[0][0][:]
# par1_error[2]=simu_error3[0][0][:]
# par1_error[3]=simu_error4[0][0][:]
# par1_error[4]=simu_error5[0][0][:]
# par1_error[5]=simu_error6[0][0][:]
par1_error=np.concatenate((simu_error1[0][0][0:5],simu_error2[0][0][:],\
simu_error3[0][0][:],simu_error4[0][0][:-2],simu_error7[0][0][:]))
par1_error=par1_error.flatten()
par1_error_mean=np.mean(par1_error)
par1_error_std=np.std(par1_error)
# par2_error[0]=simu_error1[0][1][:]
# par2_error[1]=simu_error2[0][1][:]
# par2_error[2]=simu_error3[0][1][:]
# par2_error[3]=simu_error4[0][1][:]
# par2_error[4]=simu_error5[0][1][:]
# par2_error[5]=simu_error6[0][1][:]
par2_error=np.concatenate((simu_error1[0][1][0:5],simu_error2[0][1][:],\
simu_error3[0][1][:],simu_error4[0][1][:-2],simu_error7[0][1][:]))
par2_error=par2_error.flatten()
par2_error_mean=np.mean(par2_error)
par2_error_std=np.std(par2_error)
# par3_error[0]=simu_error1[0][2][:]
# par3_error[1]=simu_error2[0][2][:]
# par3_error[2]=simu_error3[0][2][:]
# par3_error[3]=simu_error4[0][2][:]
# par3_error[4]=simu_error5[0][2][:]
# par3_error[5]=simu_error6[0][2][:]
par3_error=np.concatenate((simu_error1[0][2][0:5],simu_error2[0][2][:],\
simu_error3[0][2][:],simu_error4[0][2][:-2],simu_error7[0][2][:]))
par3_error=par3_error.flatten()
par3_error_mean=np.mean(par3_error)
par3_error_std=np.std(par3_error)
# par4_error[0]=simu_error1[0][3][:]
# par4_error[1]=simu_error2[0][3][:]
# par4_error[2]=simu_error3[0][3][:]
# par4_error[3]=simu_error4[0][3][:]
# par4_error[4]=simu_error5[0][3][:]
# par4_error[5]=simu_error6[0][3][:]
par4_error=np.concatenate((simu_error1[0][3][0:5],simu_error2[0][3][:],\
simu_error3[0][3][:],simu_error4[0][3][:-2],simu_error7[0][3][:]))
par4_error=par4_error.flatten()
par4_error_mean=np.mean(par4_error)
par4_error_std=np.std(par4_error)
hurricanes = ['C0.0001', 'C0.01', 'C1', 'C100']
x_pos = np.arange(len(hurricanes))
CTEs = [par1_error_mean,par2_error_mean,\
par3_error_mean,par4_error_mean]
errors = [par1_error_std,par2_error_std,\
par3_error_std,par4_error_std]
fig, ax = plt.subplots()
ax.bar(x_pos, CTEs, yerr=errors, align='center', alpha=0.5, ecolor='black', capsize=10)
ax.set_ylabel('Intensity')
ax.set_xticks(x_pos)
ax.set_xticklabels(hurricanes)
ax.set_title('Hurricanes')
ax.yaxis.grid(True)
for i, v in enumerate(CTEs):
ax.text(i, v+0.02, str(round(v, 3)), color='red', fontweight='bold')
# Save the figure and show
fig.autofmt_xdate()
plt.tight_layout()
#plt.savefig('wind_intensity_bar_plot.png')
plt.savefig('C:/Users/limgr/Desktop/wind_intensity_bar_plot.png')
plt.show()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.show",
"numpy.concatenate",
"matplotlib.pyplot.plot",
"numpy.std",
"csv.DictReader",
"matplotlib.pyplot.legend",
"numpy.zeros",
"numpy.mean",
"numpy.array",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.savefig"
] |
[((1745, 1847), 'matplotlib.pyplot.legend', 'plt.legend', (["['Real Track', 'C0.0001', 'C0.01', 'C1', 'C100']"], {'loc': '"""upper right"""', 'prop': "{'size': 7}"}), "(['Real Track', 'C0.0001', 'C0.01', 'C1', 'C100'], loc=\n 'upper right', prop={'size': 7})\n", (1755, 1847), True, 'import matplotlib.pyplot as plt\n'), ((2779, 2820), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time Step [hr]"""'], {'fontsize': '(14)'}), "('Time Step [hr]', fontsize=14)\n", (2789, 2820), True, 'import matplotlib.pyplot as plt\n'), ((2821, 2857), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Intensity"""'], {'fontsize': '(14)'}), "('Intensity', fontsize=14)\n", (2831, 2857), True, 'import matplotlib.pyplot as plt\n'), ((2858, 2903), 'matplotlib.pyplot.title', 'plt.title', (['"""Katrina Intensity """', "{'size': 20}"], {}), "('Katrina Intensity ', {'size': 20})\n", (2867, 2903), True, 'import matplotlib.pyplot as plt\n'), ((2904, 2970), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""C:/Users/limgr/Desktop/katrina_wind_intensity_A.png"""'], {}), "('C:/Users/limgr/Desktop/katrina_wind_intensity_A.png')\n", (2915, 2970), True, 'import matplotlib.pyplot as plt\n'), ((2971, 2981), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2979, 2981), True, 'import matplotlib.pyplot as plt\n'), ((3891, 3993), 'matplotlib.pyplot.legend', 'plt.legend', (["['Real Track', 'C0.0001', 'C0.01', 'C1', 'C100']"], {'loc': '"""upper right"""', 'prop': "{'size': 7}"}), "(['Real Track', 'C0.0001', 'C0.01', 'C1', 'C100'], loc=\n 'upper right', prop={'size': 7})\n", (3901, 3993), True, 'import matplotlib.pyplot as plt\n'), ((4065, 4106), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time Step [hr]"""'], {'fontsize': '(14)'}), "('Time Step [hr]', fontsize=14)\n", (4075, 4106), True, 'import matplotlib.pyplot as plt\n'), ((4107, 4143), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Intensity"""'], {'fontsize': '(14)'}), "('Intensity', fontsize=14)\n", (4117, 4143), True, 'import matplotlib.pyplot as plt\n'), ((4144, 4187), 'matplotlib.pyplot.title', 'plt.title', (['"""Maria Intensity """', "{'size': 20}"], {}), "('Maria Intensity ', {'size': 20})\n", (4153, 4187), True, 'import matplotlib.pyplot as plt\n'), ((4188, 4252), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""C:/Users/limgr/Desktop/maria_wind_intensity_A.png"""'], {}), "('C:/Users/limgr/Desktop/maria_wind_intensity_A.png')\n", (4199, 4252), True, 'import matplotlib.pyplot as plt\n'), ((4253, 4263), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4261, 4263), True, 'import matplotlib.pyplot as plt\n'), ((5128, 5230), 'matplotlib.pyplot.legend', 'plt.legend', (["['Real Track', 'C0.0001', 'C0.01', 'C1', 'C100']"], {'loc': '"""upper right"""', 'prop': "{'size': 7}"}), "(['Real Track', 'C0.0001', 'C0.01', 'C1', 'C100'], loc=\n 'upper right', prop={'size': 7})\n", (5138, 5230), True, 'import matplotlib.pyplot as plt\n'), ((5302, 5343), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time Step [hr]"""'], {'fontsize': '(14)'}), "('Time Step [hr]', fontsize=14)\n", (5312, 5343), True, 'import matplotlib.pyplot as plt\n'), ((5344, 5380), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Intensity"""'], {'fontsize': '(14)'}), "('Intensity', fontsize=14)\n", (5354, 5380), True, 'import matplotlib.pyplot as plt\n'), ((5381, 5423), 'matplotlib.pyplot.title', 'plt.title', (['"""Irma Intensity """', "{'size': 20}"], {}), "('Irma Intensity ', {'size': 20})\n", (5390, 5423), True, 'import matplotlib.pyplot as plt\n'), ((5424, 5487), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""C:/Users/limgr/Desktop/irma_wind_intensity_A.png"""'], {}), "('C:/Users/limgr/Desktop/irma_wind_intensity_A.png')\n", (5435, 5487), True, 'import matplotlib.pyplot as plt\n'), ((5488, 5498), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5496, 5498), True, 'import matplotlib.pyplot as plt\n'), ((6372, 6474), 'matplotlib.pyplot.legend', 'plt.legend', (["['Real Track', 'C0.0001', 'C0.01', 'C1', 'C100']"], {'loc': '"""upper right"""', 'prop': "{'size': 7}"}), "(['Real Track', 'C0.0001', 'C0.01', 'C1', 'C100'], loc=\n 'upper right', prop={'size': 7})\n", (6382, 6474), True, 'import matplotlib.pyplot as plt\n'), ((6547, 6588), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time Step [hr]"""'], {'fontsize': '(14)'}), "('Time Step [hr]', fontsize=14)\n", (6557, 6588), True, 'import matplotlib.pyplot as plt\n'), ((6589, 6625), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Intensity"""'], {'fontsize': '(14)'}), "('Intensity', fontsize=14)\n", (6599, 6625), True, 'import matplotlib.pyplot as plt\n'), ((6626, 6670), 'matplotlib.pyplot.title', 'plt.title', (['"""Dorian Intensity """', "{'size': 20}"], {}), "('Dorian Intensity ', {'size': 20})\n", (6635, 6670), True, 'import matplotlib.pyplot as plt\n'), ((6671, 6736), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""C:/Users/limgr/Desktop/dorian_wind_intensity_A.png"""'], {}), "('C:/Users/limgr/Desktop/dorian_wind_intensity_A.png')\n", (6682, 6736), True, 'import matplotlib.pyplot as plt\n'), ((6737, 6747), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6745, 6747), True, 'import matplotlib.pyplot as plt\n'), ((7608, 7710), 'matplotlib.pyplot.legend', 'plt.legend', (["['Real Track', 'C0.0001', 'C0.01', 'C1', 'C100']"], {'loc': '"""upper right"""', 'prop': "{'size': 7}"}), "(['Real Track', 'C0.0001', 'C0.01', 'C1', 'C100'], loc=\n 'upper right', prop={'size': 7})\n", (7618, 7710), True, 'import matplotlib.pyplot as plt\n'), ((7783, 7824), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time Step [hr]"""'], {'fontsize': '(14)'}), "('Time Step [hr]', fontsize=14)\n", (7793, 7824), True, 'import matplotlib.pyplot as plt\n'), ((7825, 7861), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Intensity"""'], {'fontsize': '(14)'}), "('Intensity', fontsize=14)\n", (7835, 7861), True, 'import matplotlib.pyplot as plt\n'), ((7862, 7907), 'matplotlib.pyplot.title', 'plt.title', (['"""Lorenzo Intensity """', "{'size': 20}"], {}), "('Lorenzo Intensity ', {'size': 20})\n", (7871, 7907), True, 'import matplotlib.pyplot as plt\n'), ((7908, 7974), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""C:/Users/limgr/Desktop/lorenzo_wind_intensity_A.png"""'], {}), "('C:/Users/limgr/Desktop/lorenzo_wind_intensity_A.png')\n", (7919, 7974), True, 'import matplotlib.pyplot as plt\n'), ((7975, 7985), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7983, 7985), True, 'import matplotlib.pyplot as plt\n'), ((10004, 10039), 'numpy.array', 'np.array', (['values1'], {'dtype': 'np.float32'}), '(values1, dtype=np.float32)\n', (10012, 10039), True, 'import numpy as np\n'), ((10046, 10085), 'numpy.array', 'np.array', (['real1_track'], {'dtype': 'np.float32'}), '(real1_track, dtype=np.float32)\n', (10054, 10085), True, 'import numpy as np\n'), ((10858, 10893), 'numpy.array', 'np.array', (['values2'], {'dtype': 'np.float32'}), '(values2, dtype=np.float32)\n', (10866, 10893), True, 'import numpy as np\n'), ((10900, 10939), 'numpy.array', 'np.array', (['real2_track'], {'dtype': 'np.float32'}), '(real2_track, dtype=np.float32)\n', (10908, 10939), True, 'import numpy as np\n'), ((11722, 11757), 'numpy.array', 'np.array', (['values3'], {'dtype': 'np.float32'}), '(values3, dtype=np.float32)\n', (11730, 11757), True, 'import numpy as np\n'), ((11764, 11803), 'numpy.array', 'np.array', (['real3_track'], {'dtype': 'np.float32'}), '(real3_track, dtype=np.float32)\n', (11772, 11803), True, 'import numpy as np\n'), ((12586, 12621), 'numpy.array', 'np.array', (['values4'], {'dtype': 'np.float32'}), '(values4, dtype=np.float32)\n', (12594, 12621), True, 'import numpy as np\n'), ((12628, 12667), 'numpy.array', 'np.array', (['real4_track'], {'dtype': 'np.float32'}), '(real4_track, dtype=np.float32)\n', (12636, 12667), True, 'import numpy as np\n'), ((13442, 13477), 'numpy.array', 'np.array', (['values7'], {'dtype': 'np.float32'}), '(values7, dtype=np.float32)\n', (13450, 13477), True, 'import numpy as np\n'), ((13484, 13523), 'numpy.array', 'np.array', (['real7_track'], {'dtype': 'np.float32'}), '(real7_track, dtype=np.float32)\n', (13492, 13523), True, 'import numpy as np\n'), ((13898, 13914), 'numpy.zeros', 'np.zeros', (['(4, 4)'], {}), '((4, 4))\n', (13906, 13914), True, 'import numpy as np\n'), ((13925, 13941), 'numpy.zeros', 'np.zeros', (['(4, 4)'], {}), '((4, 4))\n', (13933, 13941), True, 'import numpy as np\n'), ((13953, 13969), 'numpy.zeros', 'np.zeros', (['(4, 4)'], {}), '((4, 4))\n', (13961, 13969), True, 'import numpy as np\n'), ((13981, 13997), 'numpy.zeros', 'np.zeros', (['(4, 4)'], {}), '((4, 4))\n', (13989, 13997), True, 'import numpy as np\n'), ((14009, 14025), 'numpy.zeros', 'np.zeros', (['(4, 4)'], {}), '((4, 4))\n', (14017, 14025), True, 'import numpy as np\n'), ((14037, 14053), 'numpy.zeros', 'np.zeros', (['(4, 4)'], {}), '((4, 4))\n', (14045, 14053), True, 'import numpy as np\n'), ((14065, 14081), 'numpy.zeros', 'np.zeros', (['(4, 4)'], {}), '((4, 4))\n', (14073, 14081), True, 'import numpy as np\n'), ((14535, 14670), 'numpy.concatenate', 'np.concatenate', (['(simu_error1[0][0][0:5], simu_error2[0][0][:], simu_error3[0][0][:],\n simu_error4[0][0][:-2], simu_error7[0][0][:])'], {}), '((simu_error1[0][0][0:5], simu_error2[0][0][:], simu_error3[0\n ][0][:], simu_error4[0][0][:-2], simu_error7[0][0][:]))\n', (14549, 14670), True, 'import numpy as np\n'), ((14739, 14758), 'numpy.mean', 'np.mean', (['par1_error'], {}), '(par1_error)\n', (14746, 14758), True, 'import numpy as np\n'), ((14774, 14792), 'numpy.std', 'np.std', (['par1_error'], {}), '(par1_error)\n', (14780, 14792), True, 'import numpy as np\n'), ((15031, 15166), 'numpy.concatenate', 'np.concatenate', (['(simu_error1[0][1][0:5], simu_error2[0][1][:], simu_error3[0][1][:],\n simu_error4[0][1][:-2], simu_error7[0][1][:])'], {}), '((simu_error1[0][1][0:5], simu_error2[0][1][:], simu_error3[0\n ][1][:], simu_error4[0][1][:-2], simu_error7[0][1][:]))\n', (15045, 15166), True, 'import numpy as np\n'), ((15235, 15254), 'numpy.mean', 'np.mean', (['par2_error'], {}), '(par2_error)\n', (15242, 15254), True, 'import numpy as np\n'), ((15270, 15288), 'numpy.std', 'np.std', (['par2_error'], {}), '(par2_error)\n', (15276, 15288), True, 'import numpy as np\n'), ((15528, 15663), 'numpy.concatenate', 'np.concatenate', (['(simu_error1[0][2][0:5], simu_error2[0][2][:], simu_error3[0][2][:],\n simu_error4[0][2][:-2], simu_error7[0][2][:])'], {}), '((simu_error1[0][2][0:5], simu_error2[0][2][:], simu_error3[0\n ][2][:], simu_error4[0][2][:-2], simu_error7[0][2][:]))\n', (15542, 15663), True, 'import numpy as np\n'), ((15732, 15751), 'numpy.mean', 'np.mean', (['par3_error'], {}), '(par3_error)\n', (15739, 15751), True, 'import numpy as np\n'), ((15767, 15785), 'numpy.std', 'np.std', (['par3_error'], {}), '(par3_error)\n', (15773, 15785), True, 'import numpy as np\n'), ((16025, 16160), 'numpy.concatenate', 'np.concatenate', (['(simu_error1[0][3][0:5], simu_error2[0][3][:], simu_error3[0][3][:],\n simu_error4[0][3][:-2], simu_error7[0][3][:])'], {}), '((simu_error1[0][3][0:5], simu_error2[0][3][:], simu_error3[0\n ][3][:], simu_error4[0][3][:-2], simu_error7[0][3][:]))\n', (16039, 16160), True, 'import numpy as np\n'), ((16229, 16248), 'numpy.mean', 'np.mean', (['par4_error'], {}), '(par4_error)\n', (16236, 16248), True, 'import numpy as np\n'), ((16264, 16282), 'numpy.std', 'np.std', (['par4_error'], {}), '(par4_error)\n', (16270, 16282), True, 'import numpy as np\n'), ((16555, 16569), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (16567, 16569), True, 'import matplotlib.pyplot as plt\n'), ((16933, 16951), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (16949, 16951), True, 'import matplotlib.pyplot as plt\n'), ((16996, 17061), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""C:/Users/limgr/Desktop/wind_intensity_bar_plot.png"""'], {}), "('C:/Users/limgr/Desktop/wind_intensity_bar_plot.png')\n", (17007, 17061), True, 'import matplotlib.pyplot as plt\n'), ((17062, 17072), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (17070, 17072), True, 'import matplotlib.pyplot as plt\n'), ((984, 1008), 'csv.DictReader', 'csv.DictReader', (['csv_file'], {}), '(csv_file)\n', (998, 1008), False, 'import csv\n'), ((1614, 1721), 'matplotlib.pyplot.plot', 'plt.plot', (['Times0[:5]', 'tmp[:5]'], {'color': 'colors[c]', 'marker': '"""s"""', 'linestyle': 'patterns[c]', 'markersize': 'sizes[c]'}), "(Times0[:5], tmp[:5], color=colors[c], marker='s', linestyle=\n patterns[c], markersize=sizes[c])\n", (1622, 1721), True, 'import matplotlib.pyplot as plt\n'), ((3130, 3154), 'csv.DictReader', 'csv.DictReader', (['csv_file'], {}), '(csv_file)\n', (3144, 3154), False, 'import csv\n'), ((3760, 3867), 'matplotlib.pyplot.plot', 'plt.plot', (['Times0[:5]', 'tmp[:5]'], {'color': 'colors[c]', 'marker': '"""s"""', 'linestyle': 'patterns[c]', 'markersize': 'sizes[c]'}), "(Times0[:5], tmp[:5], color=colors[c], marker='s', linestyle=\n patterns[c], markersize=sizes[c])\n", (3768, 3867), True, 'import matplotlib.pyplot as plt\n'), ((4375, 4399), 'csv.DictReader', 'csv.DictReader', (['csv_file'], {}), '(csv_file)\n', (4389, 4399), False, 'import csv\n'), ((5005, 5103), 'matplotlib.pyplot.plot', 'plt.plot', (['Times0', 'tmp'], {'color': 'colors[c]', 'marker': '"""s"""', 'linestyle': 'patterns[c]', 'markersize': 'sizes[c]'}), "(Times0, tmp, color=colors[c], marker='s', linestyle=patterns[c],\n markersize=sizes[c])\n", (5013, 5103), True, 'import matplotlib.pyplot as plt\n'), ((5609, 5633), 'csv.DictReader', 'csv.DictReader', (['csv_file'], {}), '(csv_file)\n', (5623, 5633), False, 'import csv\n'), ((6239, 6348), 'matplotlib.pyplot.plot', 'plt.plot', (['Times0[:-2]', 'tmp[:-2]'], {'color': 'colors[c]', 'marker': '"""s"""', 'linestyle': 'patterns[c]', 'markersize': 'sizes[c]'}), "(Times0[:-2], tmp[:-2], color=colors[c], marker='s', linestyle=\n patterns[c], markersize=sizes[c])\n", (6247, 6348), True, 'import matplotlib.pyplot as plt\n'), ((6855, 6879), 'csv.DictReader', 'csv.DictReader', (['csv_file'], {}), '(csv_file)\n', (6869, 6879), False, 'import csv\n'), ((7485, 7583), 'matplotlib.pyplot.plot', 'plt.plot', (['Times0', 'tmp'], {'color': 'colors[c]', 'marker': '"""s"""', 'linestyle': 'patterns[c]', 'markersize': 'sizes[c]'}), "(Times0, tmp, color=colors[c], marker='s', linestyle=patterns[c],\n markersize=sizes[c])\n", (7493, 7583), True, 'import matplotlib.pyplot as plt\n'), ((9511, 9535), 'csv.DictReader', 'csv.DictReader', (['csv_file'], {}), '(csv_file)\n', (9525, 9535), False, 'import csv\n'), ((10365, 10389), 'csv.DictReader', 'csv.DictReader', (['csv_file'], {}), '(csv_file)\n', (10379, 10389), False, 'import csv\n'), ((11229, 11253), 'csv.DictReader', 'csv.DictReader', (['csv_file'], {}), '(csv_file)\n', (11243, 11253), False, 'import csv\n'), ((12093, 12117), 'csv.DictReader', 'csv.DictReader', (['csv_file'], {}), '(csv_file)\n', (12107, 12117), False, 'import csv\n'), ((12949, 12973), 'csv.DictReader', 'csv.DictReader', (['csv_file'], {}), '(csv_file)\n', (12963, 12973), False, 'import csv\n')]
|
"""
Unit tests for Schema
"""
import datetime
import json
import pytest
from marshmallow import fields
from pyspark.sql.types import *
from pyspark.sql import Row
from marshmallow_pyspark.constants import *
from marshmallow_pyspark.schema import Schema, _RowValidator
def test_create():
schema = Schema()
assert schema.error_column_name == DEFAULT_ERRORS_COLUMN
assert schema.split_errors == DEFAULT_SPLIT_ERRORS
@pytest.mark.parametrize("ma_field, spark_field", [
(fields.String(), StringType()),
(fields.DateTime(), TimestampType()),
(fields.Date(), DateType()),
(fields.Boolean(), BooleanType()),
(fields.Integer(), IntegerType()),
(fields.Number(), DoubleType()),
(fields.List(fields.String()), ArrayType(StringType())),
(fields.Nested(Schema.from_dict({"name": fields.String()})), StructType([StructField("name", StringType())]))
])
def test_spark_schema(ma_field, spark_field):
class TestSchema(Schema):
test_column = ma_field
spark_schema = StructType(
[
StructField("test_column", spark_field, nullable=True),
StructField(DEFAULT_ERRORS_COLUMN, StringType(), nullable=True)
]
)
schema = TestSchema()
assert schema.spark_schema == spark_schema
@pytest.mark.parametrize("schema, input_data, valid_rows, invalid_rows", [
(
Schema.from_dict({
"name": fields.String(required=True),
"age": fields.Integer(required=True),
"expenses": fields.Float(required=True),
"employed": fields.Boolean(required=True)
}),
[
{"name": "valid_1", "age": "40", "expenses": "43.5", "employed": "True"},
{"name": "valid_2", "age": "32", "expenses": "30.5", "employed": "False"},
{"name": "invalid_1", "age": "32.05", "expenses": "30.5", "employed": "False"},
{"name": "invalid_2", "age": "32", "expenses": "thirty", "employed": "False"},
{"name": "invalid_3", "age": "32", "expenses": "30.5", "employed": "Fa"},
],
[
{"name": "valid_1", "age": 40, "expenses": 43.5, "employed": True},
{"name": "valid_2", "age": 32, "expenses": 30.5, "employed": False},
],
[
{"name": "invalid_1", "age": "32.05", "expenses": "30.5", "employed": "False"},
{"name": "invalid_2", "age": "32", "expenses": "thirty", "employed": "False"},
{"name": "invalid_3", "age": "32", "expenses": "30.5", "employed": "Fa"},
]
),
(
Schema.from_dict({
"name": fields.String(required=True),
"date": fields.Date(required=True),
"date_time": fields.DateTime(required=True),
}),
[
{"name": "valid_1", "date": "1970-10-15", "date_time": "1970-10-15 01:00:00"},
{"name": "invalid_1", "date": "1970-10-15 00:00:00", "date_time": "1970-10-15"},
],
[
{"name": "valid_1",
"date": datetime.date(1970, 10, 15),
"date_time": datetime.datetime(1970, 10, 15, 1, 0)},
],
[
{"name": "invalid_1", "date": "1970-10-15 00:00:00", "date_time": "1970-10-15"},
]
),
(
Schema.from_dict({
"name": fields.String(required=True),
"book": fields.Nested(
Schema.from_dict({
"author": fields.String(required=True),
"title": fields.String(required=True),
"cost": fields.Number(required=True)
})
)
}),
[
{"name": "valid_1", "book": {"author": "Sam", "title": "Sam's Book", "cost": "32.5"}},
{"name": "invalid_1", "book": {"author": "Sam", "title": "Sam's Book", "cost": "32a"}},
],
[
{"name": "valid_1", "book": {"author": "Sam", "title": "Sam's Book", "cost": 32.5}},
],
[
{"name": "invalid_1", "book": {"author": "Sam", "title": "Sam's Book", "cost": "32a"}},
]
)
])
def test_validate_df(spark_session, schema, input_data, valid_rows, invalid_rows):
input_df = spark_session.createDataFrame(input_data)
# Test with split
valid_df, errors_df = schema().validate_df(input_df)
_valid_rows = [row.asDict(recursive=True) for row in valid_df.collect()]
assert _valid_rows == valid_rows
error_rows = [json.loads(row[DEFAULT_ERRORS_COLUMN]) for row in errors_df.collect()]
assert [row["row"] for row in error_rows] == invalid_rows
@pytest.mark.parametrize("schema, input_data, valid_rows, invalid_rows", [
(
Schema.from_dict({
"name": fields.String(required=True),
"age": fields.Integer(required=True),
"expenses": fields.Float(required=True),
"employed": fields.Boolean(required=True)
}),
[
{"name": "valid_1", "age": "40", "expenses": "43.5", "employed": "True"},
{"name": "valid_2", "age": "32", "expenses": "30.5", "employed": "False"},
{"name": "invalid_1", "age": "32.05", "expenses": "30.5", "employed": "False"},
{"name": "invalid_2", "age": "32", "expenses": "thirty", "employed": "False"},
{"name": "invalid_3", "age": "32", "expenses": "30.5", "employed": "Fa"},
],
[
{"name": "valid_1", "age": 40, "expenses": 43.5, "employed": True},
{"name": "valid_2", "age": 32, "expenses": 30.5, "employed": False},
],
[
{"name": "invalid_1", "age": "32.05", "expenses": "30.5", "employed": "False"},
{"name": "invalid_2", "age": "32", "expenses": "thirty", "employed": "False"},
{"name": "invalid_3", "age": "32", "expenses": "30.5", "employed": "Fa"},
]
),
(
Schema.from_dict({
"name": fields.String(required=True),
"date": fields.Date(required=True),
"date_time": fields.DateTime(required=True),
}),
[
{"name": "valid_1", "date": "1970-10-15", "date_time": "1970-10-15 01:00:00"},
{"name": "invalid_1", "date": "1970-10-15 00:00:00", "date_time": "1970-10-15"},
],
[
{"name": "valid_1",
"date": datetime.date(1970, 10, 15),
"date_time": datetime.datetime(1970, 10, 15, 1, 0)},
],
[
{"name": "invalid_1", "date": "1970-10-15 00:00:00", "date_time": "1970-10-15"},
]
),
(
Schema.from_dict({
"name": fields.String(required=True),
"book": fields.Nested(
Schema.from_dict({
"author": fields.String(required=True),
"title": fields.String(required=True),
"cost": fields.Number(required=True)
})
)
}),
[
{"name": "valid_1", "book": {"author": "Sam", "title": "Sam's Book", "cost": "32.5"}},
{"name": "invalid_1", "book": {"author": "Sam", "title": "Sam's Book", "cost": "32a"}},
],
[
{"name": "valid_1", "book": {"author": "Sam", "title": "Sam's Book", "cost": 32.5}},
],
[
{"name": "invalid_1", "book": {"author": "Sam", "title": "Sam's Book", "cost": "32a"}},
]
)
])
def test_validate_df_no_split(spark_session, schema, input_data, valid_rows, invalid_rows):
input_df = spark_session.createDataFrame(input_data)
# Test without split
valid_df, errors_df = schema(split_errors=False).validate_df(input_df)
assert errors_df is None
_valid_rows = [row.asDict(recursive=True) for row in valid_df.collect()]
for row in valid_rows:
row[DEFAULT_ERRORS_COLUMN] = None
assert all(row in _valid_rows for row in valid_rows)
def test_add_duplicate_counts(spark_session):
# Single unique column test
input_data = [
{"title": "valid_1", "release_date": "2020-1-10"},
{"title": "invalid_1", "release_date": "2020-1-11"},
{"title": "invalid_1", "release_date": "2020-31-11"},
{"title": "invalid_2", "release_date": "2020-1-51"},
]
input_df = spark_session.createDataFrame(input_data)
class TestSchema(Schema):
UNIQUE = ["title"]
title = fields.Str()
release_date = fields.Date()
df = TestSchema()._add_duplicate_counts(input_df)
rows = [row.asDict(recursive=True) for row in df.collect()]
assert rows == [
{'release_date': '2020-1-11', 'title': 'invalid_1', '__count__title': 1},
{'release_date': '2020-31-11', 'title': 'invalid_1', '__count__title': 2},
{'release_date': '2020-1-51', 'title': 'invalid_2', '__count__title': 1},
{'release_date': '2020-1-10', 'title': 'valid_1', '__count__title': 1}
]
# Compound unique column test
input_data = [
{"title": "valid_1", "release_date": "2020-1-10"},
{"title": "invalid_1", "release_date": "2020-1-11"},
{"title": "invalid_1", "release_date": "2020-31-11"},
{"title": "invalid_2", "release_date": "2020-1-51"},
{"title": "invalid_2", "release_date": "2020-1-51"},
]
input_df = spark_session.createDataFrame(input_data)
class TestSchema(Schema):
UNIQUE = [["title", "release_date"]]
title = fields.Str()
release_date = fields.Date()
df = TestSchema()._add_duplicate_counts(input_df)
rows = [row.asDict(recursive=True) for row in df.collect()]
assert rows == [
{'release_date': '2020-1-11', 'title': 'invalid_1', '__count__title~release_date': 1},
{'release_date': '2020-31-11', 'title': 'invalid_1', '__count__title~release_date': 1},
{'release_date': '2020-1-51', 'title': 'invalid_2', '__count__title~release_date': 1},
{'release_date': '2020-1-51', 'title': 'invalid_2', '__count__title~release_date': 2},
{'release_date': '2020-1-10', 'title': 'valid_1', '__count__title~release_date': 1}
]
# Multiple unique columns test
input_data = [
{"title": "valid_1", "release_date": "2020-1-10"},
{"title": "invalid_1", "release_date": "2020-1-11"},
{"title": "invalid_1", "release_date": "2020-31-11"},
{"title": "invalid_2", "release_date": "2020-1-51"},
{"title": "invalid_2", "release_date": "2020-1-51"},
]
input_df = spark_session.createDataFrame(input_data)
class TestSchema(Schema):
UNIQUE = ["title", "release_date"]
title = fields.Str()
release_date = fields.Date()
df = TestSchema()._add_duplicate_counts(input_df)
rows = [row.asDict(recursive=True) for row in df.collect()]
assert rows == [
{'release_date': '2020-1-10', 'title': 'valid_1', '__count__title': 1, '__count__release_date': 1},
{'release_date': '2020-1-11', 'title': 'invalid_1', '__count__title': 1, '__count__release_date': 1},
{'release_date': '2020-1-51', 'title': 'invalid_2', '__count__title': 1, '__count__release_date': 1},
{'release_date': '2020-1-51', 'title': 'invalid_2', '__count__title': 2, '__count__release_date': 2},
{'release_date': '2020-31-11', 'title': 'invalid_1', '__count__title': 2, '__count__release_date': 1}
]
def test_validate_df_with_duplicates(spark_session):
# Single unique column test
input_data = [
{"title": "title_1", "release_date": "2020-1-10"},
{"title": "title_2", "release_date": "2020-1-11"},
{"title": "title_2", "release_date": "2020-3-11"},
{"title": "title_3", "release_date": "2020-1-51"},
]
input_df = spark_session.createDataFrame(input_data)
class TestSchema(Schema):
UNIQUE = ["title"]
title = fields.Str()
release_date = fields.Date()
valid_df, errors_df = TestSchema().validate_df(input_df)
valid_rows = [row.asDict(recursive=True) for row in valid_df.collect()]
error_rows = [row.asDict(recursive=True) for row in errors_df.collect()]
assert valid_rows == [
{'title': 'title_1', 'release_date': datetime.date(2020, 1, 10)},
{'title': 'title_2', 'release_date': datetime.date(2020, 1, 11)}
]
assert error_rows == [
{'_errors': '{"row": {"release_date": "2020-3-11", "title": "title_2", "__count__title": 2}, '
'"errors": ["duplicate row"]}'},
{'_errors': '{"row": {"release_date": "2020-1-51", "title": "title_3", "__count__title": 1}, '
'"errors": {"release_date": ["Not a valid date."]}}'}
]
# Compound unique column test
input_data = [
{"title": "title_1", "release_date": "2020-1-10"},
{"title": "title_2", "release_date": "2020-1-11"},
{"title": "title_2", "release_date": "2020-3-11"},
{"title": "title_3", "release_date": "2020-1-21"},
{"title": "title_3", "release_date": "2020-1-21"},
{"title": "title_4", "release_date": "2020-1-51"},
]
input_df = spark_session.createDataFrame(input_data)
class TestSchema(Schema):
UNIQUE = [["title", "release_date"]]
title = fields.Str()
release_date = fields.Date()
valid_df, errors_df = TestSchema().validate_df(input_df)
valid_rows = [row.asDict(recursive=True) for row in valid_df.collect()]
error_rows = [row.asDict(recursive=True) for row in errors_df.collect()]
assert valid_rows == [
{'title': 'title_1', 'release_date': datetime.date(2020, 1, 10)},
{'title': 'title_2', 'release_date': datetime.date(2020, 1, 11)},
{'title': 'title_2', 'release_date': datetime.date(2020, 3, 11)},
{'title': 'title_3', 'release_date': datetime.date(2020, 1, 21)}
]
assert error_rows == [
{'_errors': '{"row": {"release_date": "2020-1-21", "title": "title_3", "__count__title~release_date": 2}, '
'"errors": ["duplicate row"]}'},
{'_errors': '{"row": {"release_date": "2020-1-51", "title": "title_4", "__count__title~release_date": 1}, '
'"errors": {"release_date": ["Not a valid date."]}}'}
]
# Multiple unique columns test
input_data = [
{"title": "title_1", "release_date": "2020-1-10"},
{"title": "title_2", "release_date": "2020-1-11"},
{"title": "title_2", "release_date": "2020-3-11"},
{"title": "title_3", "release_date": "2020-1-21"},
{"title": "title_3", "release_date": "2020-1-21"},
{"title": "title_4", "release_date": "2020-1-51"},
]
input_df = spark_session.createDataFrame(input_data)
class TestSchema(Schema):
UNIQUE = ["title", "release_date"]
title = fields.Str()
release_date = fields.Date()
valid_df, errors_df = TestSchema().validate_df(input_df)
valid_rows = [row.asDict(recursive=True) for row in valid_df.collect()]
error_rows = [row.asDict(recursive=True) for row in errors_df.collect()]
assert valid_rows == [
{'title': 'title_1', 'release_date': datetime.date(2020, 1, 10)},
{'title': 'title_2', 'release_date': datetime.date(2020, 1, 11)},
{'title': 'title_3', 'release_date': datetime.date(2020, 1, 21)}
]
assert error_rows == [
{'_errors': '{"row": {"release_date": "2020-1-21", "title": "title_3", '
'"__count__title": 2, "__count__release_date": 2}, '
'"errors": ["duplicate row"]}'},
{'_errors': '{"row": {"release_date": "2020-1-51", "title": "title_4", '
'"__count__title": 1, "__count__release_date": 1}, '
'"errors": {"release_date": ["Not a valid date."]}}'},
{'_errors': '{"row": {"release_date": "2020-3-11", "title": "title_2", '
'"__count__title": 2, "__count__release_date": 1}, '
'"errors": ["duplicate row"]}'}
]
def test_validate_df_invalid_unique(spark_session):
# Single unique column test
input_data = [
{"title": "title_1", "release_date": "2020-1-10"},
{"title": "title_2", "release_date": "2020-1-11"},
{"title": "title_2", "release_date": "2020-3-11"},
{"title": "title_3", "release_date": "2020-1-51"},
]
input_df = spark_session.createDataFrame(input_data)
class TestSchema(Schema):
UNIQUE = ["title_fake"]
title = fields.Str()
release_date = fields.Date()
with pytest.raises(ValueError):
TestSchema().validate_df(input_df)
# Compound unique column test
input_data = [
{"title": "title_1", "release_date": "2020-1-10"},
{"title": "title_2", "release_date": "2020-1-11"},
{"title": "title_2", "release_date": "2020-3-11"},
{"title": "title_3", "release_date": "2020-1-21"},
{"title": "title_3", "release_date": "2020-1-21"},
{"title": "title_4", "release_date": "2020-1-51"},
]
input_df = spark_session.createDataFrame(input_data)
class TestSchema(Schema):
UNIQUE = [["title", "date"]]
title = fields.Str()
release_date = fields.Date()
with pytest.raises(ValueError):
TestSchema().validate_df(input_df)
# Multiple unique columns test
input_data = [
{"title": "title_1", "release_date": "2020-1-10"},
{"title": "title_2", "release_date": "2020-1-11"},
{"title": "title_2", "release_date": "2020-3-11"},
{"title": "title_3", "release_date": "2020-1-21"},
{"title": "title_3", "release_date": "2020-1-21"},
{"title": "title_4", "release_date": "2020-1-51"},
]
input_df = spark_session.createDataFrame(input_data)
class TestSchema(Schema):
UNIQUE = ["title", "_date"]
title = fields.Str()
release_date = fields.Date()
with pytest.raises(ValueError):
TestSchema().validate_df(input_df)
def test_row_validator():
input_data = [
{"title": "valid_1", "release_date": "2020-1-10", "timestamp": datetime.datetime(2021, 5, 5)},
{"title": "valid_2", "release_date": "2020-1-11", "timestamp": datetime.datetime(2021, 5, 5)},
{"title": "invalid_1", "release_date": "2020-31-11", "timestamp": datetime.datetime(2021, 5, 5)},
{"title": "invalid_2", "release_date": "2020-1-51", "timestamp": datetime.datetime(2021, 5, 5)},
]
class TestSchema(Schema):
title = fields.Str()
release_date = fields.Date()
timestamp = fields.Raw(spark_type=DateType())
validator = _RowValidator(TestSchema(), DEFAULT_ERRORS_COLUMN, [])
validated_data = [validator.validate_row(Row(**x)) for x in input_data]
for row in validated_data:
if '_errors' in row:
row['_errors'] = json.loads(row['_errors'])
assert validated_data == [
{
'release_date': datetime.date(2020, 1, 10),
'timestamp': datetime.datetime(2021, 5, 5, 0, 0),
'title': 'valid_1'
},
{
'release_date': datetime.date(2020, 1, 11),
'timestamp': datetime.datetime(2021, 5, 5, 0, 0),
'title': 'valid_2'
},
{'_errors': {"row": {
"release_date": "2020-31-11",
'timestamp': '2021-05-05 00:00:00',
"title": "invalid_1"
},
"errors": {"release_date": ["Not a valid date."]}}},
{'_errors': {"row": {
"release_date": "2020-1-51",
'timestamp': '2021-05-05 00:00:00',
"title": "invalid_2"
},
"errors": {"release_date": ["Not a valid date."]}}}
]
def test_row_validator_with_duplicates():
input_data = [
{"title": "title_1", "release_date": "2020-1-10", '__count__title': 1},
{"title": "title_2", "release_date": "2020-1-11", '__count__title': 1},
{"title": "title_2", "release_date": "2020-3-11", '__count__title': 2},
{"title": "title_3", "release_date": "2020-1-51", '__count__title': 1},
]
class TestSchema(Schema):
UNIQUE = ["title"]
title = fields.Str()
release_date = fields.Date()
validator = _RowValidator(TestSchema(), DEFAULT_ERRORS_COLUMN, TestSchema.UNIQUE)
validated_data = [validator.validate_row(Row(**x)) for x in input_data]
for row in validated_data:
if '_errors' in row:
row['_errors'] = json.loads(row['_errors'])
assert validated_data == [
{'release_date': datetime.date(2020, 1, 10), 'title': 'title_1'},
{'release_date': datetime.date(2020, 1, 11), 'title': 'title_2'},
{'_errors': {"row": {"__count__title": 2, "release_date": "2020-3-11", "title": "title_2"},
"errors": ["duplicate row"]}},
{'_errors': {"row": {"__count__title": 1, "release_date": "2020-1-51", "title": "title_3"},
"errors": {"release_date": ["Not a valid date."]}}}
]
|
[
"json.loads",
"marshmallow.fields.Integer",
"marshmallow.fields.DateTime",
"marshmallow.fields.Float",
"marshmallow.fields.Str",
"datetime.date",
"marshmallow.fields.Boolean",
"marshmallow.fields.Number",
"datetime.datetime",
"pytest.raises",
"marshmallow.fields.String",
"marshmallow_pyspark.schema.Schema",
"marshmallow.fields.Date",
"pyspark.sql.Row"
] |
[((309, 317), 'marshmallow_pyspark.schema.Schema', 'Schema', ([], {}), '()\n', (315, 317), False, 'from marshmallow_pyspark.schema import Schema, _RowValidator\n'), ((4668, 4706), 'json.loads', 'json.loads', (['row[DEFAULT_ERRORS_COLUMN]'], {}), '(row[DEFAULT_ERRORS_COLUMN])\n', (4678, 4706), False, 'import json\n'), ((8804, 8816), 'marshmallow.fields.Str', 'fields.Str', ([], {}), '()\n', (8814, 8816), False, 'from marshmallow import fields\n'), ((8840, 8853), 'marshmallow.fields.Date', 'fields.Date', ([], {}), '()\n', (8851, 8853), False, 'from marshmallow import fields\n'), ((9840, 9852), 'marshmallow.fields.Str', 'fields.Str', ([], {}), '()\n', (9850, 9852), False, 'from marshmallow import fields\n'), ((9876, 9889), 'marshmallow.fields.Date', 'fields.Date', ([], {}), '()\n', (9887, 9889), False, 'from marshmallow import fields\n'), ((11022, 11034), 'marshmallow.fields.Str', 'fields.Str', ([], {}), '()\n', (11032, 11034), False, 'from marshmallow import fields\n'), ((11058, 11071), 'marshmallow.fields.Date', 'fields.Date', ([], {}), '()\n', (11069, 11071), False, 'from marshmallow import fields\n'), ((12246, 12258), 'marshmallow.fields.Str', 'fields.Str', ([], {}), '()\n', (12256, 12258), False, 'from marshmallow import fields\n'), ((12282, 12295), 'marshmallow.fields.Date', 'fields.Date', ([], {}), '()\n', (12293, 12295), False, 'from marshmallow import fields\n'), ((13621, 13633), 'marshmallow.fields.Str', 'fields.Str', ([], {}), '()\n', (13631, 13633), False, 'from marshmallow import fields\n'), ((13657, 13670), 'marshmallow.fields.Date', 'fields.Date', ([], {}), '()\n', (13668, 13670), False, 'from marshmallow import fields\n'), ((15169, 15181), 'marshmallow.fields.Str', 'fields.Str', ([], {}), '()\n', (15179, 15181), False, 'from marshmallow import fields\n'), ((15205, 15218), 'marshmallow.fields.Date', 'fields.Date', ([], {}), '()\n', (15216, 15218), False, 'from marshmallow import fields\n'), ((16847, 16859), 'marshmallow.fields.Str', 'fields.Str', ([], {}), '()\n', (16857, 16859), False, 'from marshmallow import fields\n'), ((16883, 16896), 'marshmallow.fields.Date', 'fields.Date', ([], {}), '()\n', (16894, 16896), False, 'from marshmallow import fields\n'), ((16907, 16932), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (16920, 16932), False, 'import pytest\n'), ((17533, 17545), 'marshmallow.fields.Str', 'fields.Str', ([], {}), '()\n', (17543, 17545), False, 'from marshmallow import fields\n'), ((17569, 17582), 'marshmallow.fields.Date', 'fields.Date', ([], {}), '()\n', (17580, 17582), False, 'from marshmallow import fields\n'), ((17593, 17618), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (17606, 17618), False, 'import pytest\n'), ((18219, 18231), 'marshmallow.fields.Str', 'fields.Str', ([], {}), '()\n', (18229, 18231), False, 'from marshmallow import fields\n'), ((18255, 18268), 'marshmallow.fields.Date', 'fields.Date', ([], {}), '()\n', (18266, 18268), False, 'from marshmallow import fields\n'), ((18279, 18304), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (18292, 18304), False, 'import pytest\n'), ((18866, 18878), 'marshmallow.fields.Str', 'fields.Str', ([], {}), '()\n', (18876, 18878), False, 'from marshmallow import fields\n'), ((18902, 18915), 'marshmallow.fields.Date', 'fields.Date', ([], {}), '()\n', (18913, 18915), False, 'from marshmallow import fields\n'), ((20531, 20543), 'marshmallow.fields.Str', 'fields.Str', ([], {}), '()\n', (20541, 20543), False, 'from marshmallow import fields\n'), ((20567, 20580), 'marshmallow.fields.Date', 'fields.Date', ([], {}), '()\n', (20578, 20580), False, 'from marshmallow import fields\n'), ((493, 508), 'marshmallow.fields.String', 'fields.String', ([], {}), '()\n', (506, 508), False, 'from marshmallow import fields\n'), ((530, 547), 'marshmallow.fields.DateTime', 'fields.DateTime', ([], {}), '()\n', (545, 547), False, 'from marshmallow import fields\n'), ((572, 585), 'marshmallow.fields.Date', 'fields.Date', ([], {}), '()\n', (583, 585), False, 'from marshmallow import fields\n'), ((605, 621), 'marshmallow.fields.Boolean', 'fields.Boolean', ([], {}), '()\n', (619, 621), False, 'from marshmallow import fields\n'), ((644, 660), 'marshmallow.fields.Integer', 'fields.Integer', ([], {}), '()\n', (658, 660), False, 'from marshmallow import fields\n'), ((683, 698), 'marshmallow.fields.Number', 'fields.Number', ([], {}), '()\n', (696, 698), False, 'from marshmallow import fields\n'), ((18467, 18496), 'datetime.datetime', 'datetime.datetime', (['(2021)', '(5)', '(5)'], {}), '(2021, 5, 5)\n', (18484, 18496), False, 'import datetime\n'), ((18570, 18599), 'datetime.datetime', 'datetime.datetime', (['(2021)', '(5)', '(5)'], {}), '(2021, 5, 5)\n', (18587, 18599), False, 'import datetime\n'), ((18676, 18705), 'datetime.datetime', 'datetime.datetime', (['(2021)', '(5)', '(5)'], {}), '(2021, 5, 5)\n', (18693, 18705), False, 'import datetime\n'), ((18781, 18810), 'datetime.datetime', 'datetime.datetime', (['(2021)', '(5)', '(5)'], {}), '(2021, 5, 5)\n', (18798, 18810), False, 'import datetime\n'), ((19087, 19095), 'pyspark.sql.Row', 'Row', ([], {}), '(**x)\n', (19090, 19095), False, 'from pyspark.sql import Row\n'), ((19207, 19233), 'json.loads', 'json.loads', (["row['_errors']"], {}), "(row['_errors'])\n", (19217, 19233), False, 'import json\n'), ((20713, 20721), 'pyspark.sql.Row', 'Row', ([], {}), '(**x)\n', (20716, 20721), False, 'from pyspark.sql import Row\n'), ((20833, 20859), 'json.loads', 'json.loads', (["row['_errors']"], {}), "(row['_errors'])\n", (20843, 20859), False, 'import json\n'), ((732, 747), 'marshmallow.fields.String', 'fields.String', ([], {}), '()\n', (745, 747), False, 'from marshmallow import fields\n'), ((12583, 12609), 'datetime.date', 'datetime.date', (['(2020)', '(1)', '(10)'], {}), '(2020, 1, 10)\n', (12596, 12609), False, 'import datetime\n'), ((12657, 12683), 'datetime.date', 'datetime.date', (['(2020)', '(1)', '(11)'], {}), '(2020, 1, 11)\n', (12670, 12683), False, 'import datetime\n'), ((13958, 13984), 'datetime.date', 'datetime.date', (['(2020)', '(1)', '(10)'], {}), '(2020, 1, 10)\n', (13971, 13984), False, 'import datetime\n'), ((14032, 14058), 'datetime.date', 'datetime.date', (['(2020)', '(1)', '(11)'], {}), '(2020, 1, 11)\n', (14045, 14058), False, 'import datetime\n'), ((14106, 14132), 'datetime.date', 'datetime.date', (['(2020)', '(3)', '(11)'], {}), '(2020, 3, 11)\n', (14119, 14132), False, 'import datetime\n'), ((14180, 14206), 'datetime.date', 'datetime.date', (['(2020)', '(1)', '(21)'], {}), '(2020, 1, 21)\n', (14193, 14206), False, 'import datetime\n'), ((15506, 15532), 'datetime.date', 'datetime.date', (['(2020)', '(1)', '(10)'], {}), '(2020, 1, 10)\n', (15519, 15532), False, 'import datetime\n'), ((15580, 15606), 'datetime.date', 'datetime.date', (['(2020)', '(1)', '(11)'], {}), '(2020, 1, 11)\n', (15593, 15606), False, 'import datetime\n'), ((15654, 15680), 'datetime.date', 'datetime.date', (['(2020)', '(1)', '(21)'], {}), '(2020, 1, 21)\n', (15667, 15680), False, 'import datetime\n'), ((19303, 19329), 'datetime.date', 'datetime.date', (['(2020)', '(1)', '(10)'], {}), '(2020, 1, 10)\n', (19316, 19329), False, 'import datetime\n'), ((19356, 19391), 'datetime.datetime', 'datetime.datetime', (['(2021)', '(5)', '(5)', '(0)', '(0)'], {}), '(2021, 5, 5, 0, 0)\n', (19373, 19391), False, 'import datetime\n'), ((19473, 19499), 'datetime.date', 'datetime.date', (['(2020)', '(1)', '(11)'], {}), '(2020, 1, 11)\n', (19486, 19499), False, 'import datetime\n'), ((19526, 19561), 'datetime.datetime', 'datetime.datetime', (['(2021)', '(5)', '(5)', '(0)', '(0)'], {}), '(2021, 5, 5, 0, 0)\n', (19543, 19561), False, 'import datetime\n'), ((20916, 20942), 'datetime.date', 'datetime.date', (['(2020)', '(1)', '(10)'], {}), '(2020, 1, 10)\n', (20929, 20942), False, 'import datetime\n'), ((20990, 21016), 'datetime.date', 'datetime.date', (['(2020)', '(1)', '(11)'], {}), '(2020, 1, 11)\n', (21003, 21016), False, 'import datetime\n'), ((1413, 1441), 'marshmallow.fields.String', 'fields.String', ([], {'required': '(True)'}), '(required=True)\n', (1426, 1441), False, 'from marshmallow import fields\n'), ((1466, 1495), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)'}), '(required=True)\n', (1480, 1495), False, 'from marshmallow import fields\n'), ((1525, 1552), 'marshmallow.fields.Float', 'fields.Float', ([], {'required': '(True)'}), '(required=True)\n', (1537, 1552), False, 'from marshmallow import fields\n'), ((1582, 1611), 'marshmallow.fields.Boolean', 'fields.Boolean', ([], {'required': '(True)'}), '(required=True)\n', (1596, 1611), False, 'from marshmallow import fields\n'), ((2694, 2722), 'marshmallow.fields.String', 'fields.String', ([], {'required': '(True)'}), '(required=True)\n', (2707, 2722), False, 'from marshmallow import fields\n'), ((2748, 2774), 'marshmallow.fields.Date', 'fields.Date', ([], {'required': '(True)'}), '(required=True)\n', (2759, 2774), False, 'from marshmallow import fields\n'), ((2805, 2835), 'marshmallow.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)'}), '(required=True)\n', (2820, 2835), False, 'from marshmallow import fields\n'), ((3149, 3176), 'datetime.date', 'datetime.date', (['(1970)', '(10)', '(15)'], {}), '(1970, 10, 15)\n', (3162, 3176), False, 'import datetime\n'), ((3208, 3245), 'datetime.datetime', 'datetime.datetime', (['(1970)', '(10)', '(15)', '(1)', '(0)'], {}), '(1970, 10, 15, 1, 0)\n', (3225, 3245), False, 'import datetime\n'), ((3456, 3484), 'marshmallow.fields.String', 'fields.String', ([], {'required': '(True)'}), '(required=True)\n', (3469, 3484), False, 'from marshmallow import fields\n'), ((4939, 4967), 'marshmallow.fields.String', 'fields.String', ([], {'required': '(True)'}), '(required=True)\n', (4952, 4967), False, 'from marshmallow import fields\n'), ((4992, 5021), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)'}), '(required=True)\n', (5006, 5021), False, 'from marshmallow import fields\n'), ((5051, 5078), 'marshmallow.fields.Float', 'fields.Float', ([], {'required': '(True)'}), '(required=True)\n', (5063, 5078), False, 'from marshmallow import fields\n'), ((5108, 5137), 'marshmallow.fields.Boolean', 'fields.Boolean', ([], {'required': '(True)'}), '(required=True)\n', (5122, 5137), False, 'from marshmallow import fields\n'), ((6220, 6248), 'marshmallow.fields.String', 'fields.String', ([], {'required': '(True)'}), '(required=True)\n', (6233, 6248), False, 'from marshmallow import fields\n'), ((6274, 6300), 'marshmallow.fields.Date', 'fields.Date', ([], {'required': '(True)'}), '(required=True)\n', (6285, 6300), False, 'from marshmallow import fields\n'), ((6331, 6361), 'marshmallow.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)'}), '(required=True)\n', (6346, 6361), False, 'from marshmallow import fields\n'), ((6675, 6702), 'datetime.date', 'datetime.date', (['(1970)', '(10)', '(15)'], {}), '(1970, 10, 15)\n', (6688, 6702), False, 'import datetime\n'), ((6734, 6771), 'datetime.datetime', 'datetime.datetime', (['(1970)', '(10)', '(15)', '(1)', '(0)'], {}), '(1970, 10, 15, 1, 0)\n', (6751, 6771), False, 'import datetime\n'), ((6982, 7010), 'marshmallow.fields.String', 'fields.String', ([], {'required': '(True)'}), '(required=True)\n', (6995, 7010), False, 'from marshmallow import fields\n'), ((821, 836), 'marshmallow.fields.String', 'fields.String', ([], {}), '()\n', (834, 836), False, 'from marshmallow import fields\n'), ((3598, 3626), 'marshmallow.fields.String', 'fields.String', ([], {'required': '(True)'}), '(required=True)\n', (3611, 3626), False, 'from marshmallow import fields\n'), ((3661, 3689), 'marshmallow.fields.String', 'fields.String', ([], {'required': '(True)'}), '(required=True)\n', (3674, 3689), False, 'from marshmallow import fields\n'), ((3723, 3751), 'marshmallow.fields.Number', 'fields.Number', ([], {'required': '(True)'}), '(required=True)\n', (3736, 3751), False, 'from marshmallow import fields\n'), ((7124, 7152), 'marshmallow.fields.String', 'fields.String', ([], {'required': '(True)'}), '(required=True)\n', (7137, 7152), False, 'from marshmallow import fields\n'), ((7187, 7215), 'marshmallow.fields.String', 'fields.String', ([], {'required': '(True)'}), '(required=True)\n', (7200, 7215), False, 'from marshmallow import fields\n'), ((7249, 7277), 'marshmallow.fields.Number', 'fields.Number', ([], {'required': '(True)'}), '(required=True)\n', (7262, 7277), False, 'from marshmallow import fields\n')]
|
import pygame, random
def ball_animation():
global ball_speed_x, ball_speed_y, left_player_score, right_player_score, score_time
ball.x += ball_speed_x
ball.y += ball_speed_y
if ball.top <= 0 or ball.bottom >= screen_height:
ball_speed_y *= -1
# Left Player Score
if ball.right <= 0:
score_time = pygame.time.get_ticks()
left_player_score += 1
# right player Score
if ball.left >= screen_width:
score_time = pygame.time.get_ticks()
right_player_score += 1
if ball.colliderect(left_player) and ball_speed_x < 0:
if abs(ball.right - left_player.right) < 10:
ball_speed_x *= -1
elif abs(ball.bottom - left_player.top) < 10 and ball_speed_y > 0:
ball_speed_y *= -1
elif abs(ball.top - left_player.bottom) < 10 and ball_speed_y < 0:
ball_speed_y *= -1
if ball.colliderect(right_player) and ball_speed_x > 0:
if abs(ball.left - right_player.left) < 10:
ball_speed_x *= -1
elif abs(ball.bottom - right_player.top) < 10 and ball_speed_y > 0:
ball_speed_y *= -1
elif abs(ball.top - right_player.bottom) < 10 and ball_speed_y < 0:
ball_speed_y *= -1
def player_animation():
keys = pygame.key.get_pressed()
if keys[pygame.K_DOWN] and right_player.bottom + PLAYER_VELOSITY < screen_height:
right_player.y += PLAYER_VELOSITY
if keys[pygame.K_UP] and right_player.top - PLAYER_VELOSITY > 0:
right_player.y -= PLAYER_VELOSITY
if keys[pygame.K_s] and left_player.bottom + PLAYER_VELOSITY < screen_height:
left_player.y += PLAYER_VELOSITY
if keys[pygame.K_w] and left_player.top - PLAYER_VELOSITY > 0:
left_player.y -= PLAYER_VELOSITY
def ball_start():
global ball_speed_x, ball_speed_y, ball_moving, score_time
ball.center = (screen_width//2, screen_height//2)
current_time = pygame.time.get_ticks()
if current_time - score_time < 700:
number_three = basic_font.render("3",False,WHITE)
screen.blit(number_three,(screen_width/2 - 10, screen_height/2 + 20))
if 700 < current_time - score_time < 1400:
number_two = basic_font.render("2",False,WHITE)
screen.blit(number_two,(screen_width/2 - 10, screen_height/2 + 20))
if 1400 < current_time - score_time < 2100:
number_one = basic_font.render("1",False,WHITE)
screen.blit(number_one,(screen_width/2 - 10, screen_height/2 + 20))
if current_time - score_time < 2100:
ball_speed_y, ball_speed_x = 0,0
else:
ball_speed_x = 7 * random.choice((1,-1))
ball_speed_y = 7 * random.choice((1,-1))
score_time = None
def draw_winner(text):
draw_text = WINNER_FONT.render(text, 1, WHITE)
screen.blit(draw_text, (screen_width/2 - draw_text.get_width() /
2, screen_height/2 - draw_text.get_height()/2))
pygame.display.update()
pygame.time.delay(5000)
# General setup
pygame.mixer.pre_init(44100,-16,1, 1024)
pygame.init()
clock = pygame.time.Clock()
# Main Window
screen_width = 1280
screen_height = 960
screen = pygame.display.set_mode((screen_width,screen_height))
pygame.display.set_caption('Pong')
# Colors
WHITE = (255,255,255)
# Game Rectangles
ball = pygame.Rect(screen_width // 2 - 10, screen_height // 2 - 10, 20, 20)
right_player = pygame.Rect(screen_width - 30, screen_height // 2 - 70, 20,100)
left_player = pygame.Rect(10, screen_height // 2 - 70, 20,100)
# Game Variables
ball_speed_x = 7 * random.choice((1,-1))
ball_speed_y = 7 * random.choice((1,-1))
FPS = 60
PLAYER_VELOSITY = 6
ball_moving = False
score_time = True
# Score Text
left_player_score = 0
right_player_score = 0
basic_font = pygame.font.SysFont('comicsans', 40)
WINNER_FONT = pygame.font.SysFont('comicsans', 100)
run = True
while run:
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
#Game Logic
ball_animation()
player_animation()
# Visuals
screen.fill(0)
pygame.draw.rect(screen, WHITE, left_player)
pygame.draw.rect(screen, WHITE, right_player)
pygame.draw.rect(screen, WHITE, ball)
pygame.draw.line(screen, WHITE, (screen_width / 2, 0),(screen_width / 2, screen_height), 5)
if score_time:
ball_start()
left_player_text = basic_font.render(f'{left_player_score}',False,WHITE)
screen.blit(left_player_text,(screen_width // 2 + 30, 10))
right_player_text = basic_font.render(f'{right_player_score}',False,WHITE)
screen.blit(right_player_text,(screen_width // 2 - 30, 10))
winner_text = ""
if left_player_score == 5:
winner_text = "Left Wins!"
if right_player_score == 5:
winner_text = "Right Wins!"
if winner_text != "":
draw_winner(winner_text)
break
pygame.display.flip()
clock.tick(FPS)
pygame.quit()
|
[
"pygame.quit",
"pygame.draw.line",
"pygame.font.SysFont",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.draw.rect",
"pygame.Rect",
"pygame.mixer.pre_init",
"pygame.time.delay",
"pygame.init",
"random.choice",
"pygame.display.flip",
"pygame.display.update",
"pygame.time.get_ticks",
"pygame.display.set_caption",
"pygame.time.Clock",
"pygame.key.get_pressed"
] |
[((2741, 2783), 'pygame.mixer.pre_init', 'pygame.mixer.pre_init', (['(44100)', '(-16)', '(1)', '(1024)'], {}), '(44100, -16, 1, 1024)\n', (2762, 2783), False, 'import pygame, random\n'), ((2782, 2795), 'pygame.init', 'pygame.init', ([], {}), '()\n', (2793, 2795), False, 'import pygame, random\n'), ((2804, 2823), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (2821, 2823), False, 'import pygame, random\n'), ((2888, 2942), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(screen_width, screen_height)'], {}), '((screen_width, screen_height))\n', (2911, 2942), False, 'import pygame, random\n'), ((2942, 2976), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Pong"""'], {}), "('Pong')\n", (2968, 2976), False, 'import pygame, random\n'), ((3035, 3103), 'pygame.Rect', 'pygame.Rect', (['(screen_width // 2 - 10)', '(screen_height // 2 - 10)', '(20)', '(20)'], {}), '(screen_width // 2 - 10, screen_height // 2 - 10, 20, 20)\n', (3046, 3103), False, 'import pygame, random\n'), ((3119, 3183), 'pygame.Rect', 'pygame.Rect', (['(screen_width - 30)', '(screen_height // 2 - 70)', '(20)', '(100)'], {}), '(screen_width - 30, screen_height // 2 - 70, 20, 100)\n', (3130, 3183), False, 'import pygame, random\n'), ((3197, 3246), 'pygame.Rect', 'pygame.Rect', (['(10)', '(screen_height // 2 - 70)', '(20)', '(100)'], {}), '(10, screen_height // 2 - 70, 20, 100)\n', (3208, 3246), False, 'import pygame, random\n'), ((3485, 3521), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""comicsans"""', '(40)'], {}), "('comicsans', 40)\n", (3504, 3521), False, 'import pygame, random\n'), ((3536, 3573), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""comicsans"""', '(100)'], {}), "('comicsans', 100)\n", (3555, 3573), False, 'import pygame, random\n'), ((4553, 4566), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (4564, 4566), False, 'import pygame, random\n'), ((1143, 1167), 'pygame.key.get_pressed', 'pygame.key.get_pressed', ([], {}), '()\n', (1165, 1167), False, 'import pygame, random\n'), ((1749, 1772), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (1770, 1772), False, 'import pygame, random\n'), ((2672, 2695), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (2693, 2695), False, 'import pygame, random\n'), ((2700, 2723), 'pygame.time.delay', 'pygame.time.delay', (['(5000)'], {}), '(5000)\n', (2717, 2723), False, 'import pygame, random\n'), ((3283, 3305), 'random.choice', 'random.choice', (['(1, -1)'], {}), '((1, -1))\n', (3296, 3305), False, 'import pygame, random\n'), ((3324, 3346), 'random.choice', 'random.choice', (['(1, -1)'], {}), '((1, -1))\n', (3337, 3346), False, 'import pygame, random\n'), ((3611, 3629), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (3627, 3629), False, 'import pygame, random\n'), ((3764, 3808), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'WHITE', 'left_player'], {}), '(screen, WHITE, left_player)\n', (3780, 3808), False, 'import pygame, random\n'), ((3810, 3855), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'WHITE', 'right_player'], {}), '(screen, WHITE, right_player)\n', (3826, 3855), False, 'import pygame, random\n'), ((3857, 3894), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'WHITE', 'ball'], {}), '(screen, WHITE, ball)\n', (3873, 3894), False, 'import pygame, random\n'), ((3896, 3992), 'pygame.draw.line', 'pygame.draw.line', (['screen', 'WHITE', '(screen_width / 2, 0)', '(screen_width / 2, screen_height)', '(5)'], {}), '(screen, WHITE, (screen_width / 2, 0), (screen_width / 2,\n screen_height), 5)\n', (3912, 3992), False, 'import pygame, random\n'), ((4513, 4534), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (4532, 4534), False, 'import pygame, random\n'), ((315, 338), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (336, 338), False, 'import pygame, random\n'), ((435, 458), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (456, 458), False, 'import pygame, random\n'), ((2366, 2388), 'random.choice', 'random.choice', (['(1, -1)'], {}), '((1, -1))\n', (2379, 2388), False, 'import pygame, random\n'), ((2409, 2431), 'random.choice', 'random.choice', (['(1, -1)'], {}), '((1, -1))\n', (2422, 2431), False, 'import pygame, random\n')]
|
#!/usr/bin/env python
""" Award points for position. """
import benchmark_analysis_utils as bau
import pandas as pd
import sys
def aggregate(df, ratio=False):
values = ['compress', 'decompress', 'dc_no_cache']
if ratio:
values.append('ratio')
results = {}
for size in ('small', 'mid', 'large'):
for storage in ('ephemeral', 'esb'):
for complexity in ('arange', 'linspace', 'poisson', 'neuronal', 'bitcoin'):
for value in values:
it = df.loc[(size, storage, complexity)].sort(value,
ascending=value=='ratio')[value]
if ratio:
codecs = set(df.index.levels[-2]).difference(set(('tables', 'npy')))
it = it.loc[codecs]
for i,(index, value)in enumerate(it.iteritems(),start=1):
#print i, "_".join(map(str,index)), value
codec = "_".join(map(str,index))
if codec not in results:
results[codec] = i
else:
results[codec] += i
return results
df = bau.load_results_file(sys.argv[1]).sort()
df_results = pd.DataFrame.from_dict(aggregate(df, ratio=True), orient='index').sort(0)
df_results.index.names = ('codec',)
df_results.columns = ('score',)
df_results.to_csv('aggregate_with_ratio.csv')
df = bau.load_results_file(sys.argv[1]).sort()
df_results = pd.DataFrame.from_dict(aggregate(df, ratio=False), orient='index').sort(0)
df_results.index.names = ('codec',)
df_results.columns = ('score',)
df_results.to_csv('aggregate_without_ratio.csv')
|
[
"benchmark_analysis_utils.load_results_file"
] |
[((1195, 1229), 'benchmark_analysis_utils.load_results_file', 'bau.load_results_file', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (1216, 1229), True, 'import benchmark_analysis_utils as bau\n'), ((1444, 1478), 'benchmark_analysis_utils.load_results_file', 'bau.load_results_file', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (1465, 1478), True, 'import benchmark_analysis_utils as bau\n')]
|
import os
import pandas as pd
for file in os.listdir():
if file.endswith("json"):
reviews_data = pd.read_json("yelp_academic_dataset_review_41.json")
print(reviews_data.head())
break
|
[
"os.listdir",
"pandas.read_json"
] |
[((43, 55), 'os.listdir', 'os.listdir', ([], {}), '()\n', (53, 55), False, 'import os\n'), ((110, 162), 'pandas.read_json', 'pd.read_json', (['"""yelp_academic_dataset_review_41.json"""'], {}), "('yelp_academic_dataset_review_41.json')\n", (122, 162), True, 'import pandas as pd\n')]
|
import sqlite3
import pomozne_fun
conn = sqlite3.connect("kosarka_turnir")
def najboljsi_na_turnirju():
'''vrne igralca z največ doseženimi točkami na turnirju,
v primeru ko je takšnih igralcev več vrne prvega po abecedi.'''
sql = '''
SELECT
ime,
priimek
FROM (
SELECT igralec,
sum(točke) AS točke,
sum(skoki) AS skoki,
sum(podaje) AS podaje
FROM statistika
GROUP BY igralec
)
JOIN
igralec ON igralec = igralec.id
ORDER BY točke DESC;
'''
ime_prii = conn.execute(sql).fetchone()
ime = ime_prii[0]
priimek = ime_prii[1]
return '{0} {1}'.format(ime, priimek)
def najboljsi_podajalec():
'''vrne igralca z največ doseženimi točkami na turnirju,
v primeru ko je takšnih igralcev več vrne prvega po abecedi.'''
sql = '''
SELECT ime,
priimek
FROM (
SELECT igralec,
sum(točke) AS točke,
sum(skoki) AS skoki,
sum(podaje) AS podaje
FROM statistika
GROUP BY igralec
)
JOIN
igralec ON igralec = igralec.id
ORDER BY podaje DESC;
'''
ime_prii = conn.execute(sql).fetchone()
ime = ime_prii[0]
priimek = ime_prii[1]
return '{0} {1}'.format(ime, priimek)
def najboljsi_skakalec():
'''vrne igralca z največ doseženimi točkami na turnirju,
v primeru ko je takšnih igralcev več vrne prvega po abecedi.'''
sql = '''
SELECT ime,
priimek
FROM (
SELECT igralec,
sum(točke) AS točke,
sum(skoki) AS skoki,
sum(podaje) AS podaje
FROM statistika
GROUP BY igralec
)
JOIN
igralec ON igralec = igralec.id
ORDER BY skoki DESC;
'''
ime_prii = conn.execute(sql).fetchone()
ime = ime_prii[0]
priimek = ime_prii[1]
return '{0} {1}'.format(ime, priimek)
def rezultati_tekem():
'''vrne slovar, katerega ključi so id-ji tekem, njihove vrednosti pa tabele z dvema elemntoma, prvi je število
točk domače ekipe, drugi pa gostujoče'''
slovar_rez = dict()
for i in range(28): #za vsako tekmo pogledamo kateri ekipi sta tekmovali na njej
j = i+1
slovar_rez[j] = list()
sql = '''
SELECT domači,
gosti
FROM tekma
WHERE tekma.id = ?;
'''
for domači, gosti in conn.execute(sql, [j]): #pogledamo katera ekipa je dosegla koliko točk
sql = '''
SELECT SUM(statistika.točke)
FROM tekma
JOIN
statistika ON tekma.id = statistika.tekma
JOIN
igralec ON statistika.igralec = igralec.id
WHERE tekma.id = ? AND
igralec.ekipa = ?;
'''
for točke in conn.execute(sql, [j, domači]): #pogledamo točke prve ekipe
slovar_rez[j].append(točke[0])
sql = '''
SELECT SUM(statistika.točke)
FROM tekma
JOIN
statistika ON tekma.id = statistika.tekma
JOIN
igralec ON statistika.igralec = igralec.id
WHERE tekma.id = ? AND
igralec.ekipa = ?;
'''
for točke in conn.execute(sql, [j, gosti]): #pogledamo točke druge ekipe
slovar_rez[j].append(točke[0])
koncani_sl = dict() #slovar kateremu priredim ključe
for kl in slovar_rez.keys(): #ključe spremenim v tuple imen ekip
koncani_sl[pomozne_fun.ekipi_from_tekma(kl)] = slovar_rez[kl]
return koncani_sl #vrnemo slovar kakršnega smo želeli
def lestvica_rezultatov():
'''Vrne urejen slovar ekip, kjer so ključi idji ekip in vrednosti število točk, ki so jih dosegle '''
slovar_rez = dict() #slovar rezultatov: ključ je id ekipe, vrednost je dosežene točke (3 za zmago 1 za remi 0 za poraz)
sql = '''
SELECT id
FROM ekipa;
'''
i = 1
for _ in conn.execute(sql): #začetne točke posamezne ekipe nastavimo na 0
slovar_rez[i] = 0
i += 1
for i in range(28): #za vsako tekmo pogledamo kateri ekipi sta tekmovali na njej
j = i+1
sql = '''
SELECT domači,
gosti
FROM tekma
WHERE tekma.id = ?;
'''
for domači, gosti in conn.execute(sql, [j]): #pogledamo katera ekipa je dosegla koliko točk (katera je zmagala)
sql = '''
SELECT SUM(statistika.točke)
FROM tekma
JOIN
statistika ON tekma.id = statistika.tekma
JOIN
igralec ON statistika.igralec = igralec.id
WHERE tekma.id = ? AND
igralec.ekipa = ?;
'''
for točke in conn.execute(sql, [j, domači]): #pogledamo točke prve ekipe
domači_rez = točke
sql = '''
SELECT SUM(statistika.točke)
FROM tekma
JOIN
statistika ON tekma.id = statistika.tekma
JOIN
igralec ON statistika.igralec = igralec.id
WHERE tekma.id = ? AND
igralec.ekipa = ?;
'''
for točke in conn.execute(sql, [j, gosti]): #pogledamo točke druge ekipe
gosti_rez = točke
if domači_rez > gosti_rez: #dodamo ustrezne točke v slovar rezultatov
slovar_rez[domači] = slovar_rez[domači] + 3
elif domači_rez == gosti_rez:
slovar_rez[domači] = slovar_rez[domači] + 1
slovar_rez[gosti] = slovar_rez[gosti] + 1
elif domači_rez < gosti_rez:
slovar_rez[gosti] = slovar_rez[gosti] + 3
urejen_sl = {}
for ključ in slovar_rez:
maks = max(slovar_rez.values())
for ključ2 in slovar_rez:
if slovar_rez[ključ2] == maks:
urejen_sl[pomozne_fun.ekipa(ključ2)] = maks
slovar_rez[ključ2] = 0 #nastavimo vrednost na 0
break
return urejen_sl #uredimo slovar in vrnemo urejen slovar, kjer so ključi id ekip in vrednosti končno število točk
def povprečja():
'''vrne povprečno število doseženih točk, podaj, skokov ter pripadajočo ekipo vseh igralcev.'''
sql = '''
SELECT igralec.ime,
priimek,
ekipa.ime,
round(avg(točke), 0),
round(avg(skoki), 0),
round(avg(podaje), 0)
FROM (
statistika
JOIN
igralec ON igralec.id = statistika.igralec
)
JOIN
ekipa ON igralec.ekipa = ekipa.id
GROUP BY igralec;
'''
return conn.execute(sql).fetchall()
def seznam_ekip():
'''vrne seznam imen ekip'''
sql = '''
SELECT ime
FROM ekipa;
'''
tab_ekip = []
for ime in conn.execute(sql):
tab_ekip.append(ime[0])
return tab_ekip
def seznam_igralcev(ekipa1, ekipa2):
'''vrne seznam igralcev ki so igrali na tekmi med ekipa1 in ekipa2'''
sql1 = '''
SELECT id
FROM ekipa
WHERE ime = ?;
'''
sql2 = '''
SELECT igralec.ime,
igralec.priimek,
tekma.gosti,
tekma.domači
FROM tekma
JOIN
statistika ON tekma.id = statistika.tekma
JOIN
igralec ON statistika.igralec = igralec.id
WHERE (domači = ? AND
gosti = ?) OR
(domači = ? AND
gosti = ?);
'''
prva = conn.execute(sql1, [ekipa1]).fetchone() #pridobimo id-je ekip
id1 = prva[0]
druga = conn.execute(sql1, [ekipa2]).fetchone()
id2 = druga[0]
tab_igralcev = []
for ime, priimek, gosti, domači in conn.execute(sql2, [id1, id2, id2, id1]): #poiščemo igralce ki, so igrali na tekmi med ekipa1 in ekipa2
tab_igralcev.append(ime + '_' + priimek + '_' + str(gosti) + '_' + str(domači))
return tab_igralcev
def spremeni_ime_ekipe(ime, novo_ime):
'''spremeni ime ekipe v novo ime'''
sql1 ='''
BEGIN TRANSACTION;
'''
sql2 = '''
UPDATE ekipa
SET ime = ?
WHERE ime = ?
'''
sql3 ='''
COMMIT TRANSACTION;
'''
conn.execute(sql1)
conn.execute(sql2, [novo_ime, ime])
conn.execute(sql3)
return None
def spremeni_statistiko_igralca(ime, priimek, id1, id2, tocke, podaje, skoki):
'''funkcija spremeni statistiko igralca na tekmi med ekipama z id-ji id1 in id2,
določi mu "nove" točke, podaje in skoke'''
sql1 = '''
SELECT id
FROM tekma
WHERE (domači = ? AND
gosti = ?) OR
(domači = ? AND
gosti = ?);
'''
id_tekme = conn.execute(sql1, [id1, id2, id2, id1]).fetchone()
id_tekme = id_tekme[0]
sql2 = '''
SELECT id
FROM igralec
WHERE ime = ? AND
priimek = ?;
'''
id_igralca = conn.execute(sql2, [ime, priimek]).fetchone()
id_igralca = id_igralca[0]
sql3 ='''
BEGIN TRANSACTION;
'''
sql4 = '''
UPDATE statistika
SET točke = ?,
podaje = ?,
skoki = ?
WHERE igralec = ? AND
tekma = ?;
'''
sql5 ='''
COMMIT TRANSACTION;
'''
conn.execute(sql3)
conn.execute(sql4, [tocke, podaje, skoki, id_igralca, id_tekme])
conn.execute(sql5)
return None
def rezultat_igralca_na_tekmi(ime, priimek, id1, id2):
'''vrne tabelo v kateri so: ime, priimek, št. točk, št. podaj, št. skokov, prva ekipa, druga ekipa.
Te točke, podaje in skoke je igralec dosegel na tekmi med ekipama z id-ji id1 in id2'''
sql = '''
SELECT igralec.ime,
igralec.priimek,
statistika.točke,
statistika.podaje,
statistika.skoki
FROM tekma
JOIN
statistika ON tekma.id = statistika.tekma
JOIN
igralec ON statistika.igralec = igralec.id
WHERE igralec.ime = ? AND
igralec.priimek = ? AND
( (tekma.domači = ? AND
tekma.gosti = ?) OR
(tekma.domači = ? AND
tekma.gosti = ?) );
'''
podatki = [] # elementi v tabeli si sledijo: ime, priimek, št skokov, pt podaj, ekipa1, ekipa2
for ime, priimek, točke, podaje, skoki in conn.execute(sql, [ime, priimek, id1, id2, id2, id1]): #poiščemo statistiko igralca na tekmi
ekipa1 = pomozne_fun.ekipa(id1)
ekipa2 = pomozne_fun.ekipa(id2)
podatki = [ime, priimek, točke, podaje, skoki, ekipa1, ekipa2]
return podatki
conn.execute('VACUUM')
|
[
"sqlite3.connect",
"pomozne_fun.ekipi_from_tekma",
"pomozne_fun.ekipa"
] |
[((41, 74), 'sqlite3.connect', 'sqlite3.connect', (['"""kosarka_turnir"""'], {}), "('kosarka_turnir')\n", (56, 74), False, 'import sqlite3\n'), ((11402, 11424), 'pomozne_fun.ekipa', 'pomozne_fun.ekipa', (['id1'], {}), '(id1)\n', (11419, 11424), False, 'import pomozne_fun\n'), ((11442, 11464), 'pomozne_fun.ekipa', 'pomozne_fun.ekipa', (['id2'], {}), '(id2)\n', (11459, 11464), False, 'import pomozne_fun\n'), ((4110, 4142), 'pomozne_fun.ekipi_from_tekma', 'pomozne_fun.ekipi_from_tekma', (['kl'], {}), '(kl)\n', (4138, 4142), False, 'import pomozne_fun\n'), ((6644, 6669), 'pomozne_fun.ekipa', 'pomozne_fun.ekipa', (['ključ2'], {}), '(ključ2)\n', (6661, 6669), False, 'import pomozne_fun\n')]
|
import weakref
import gc
from kivy.uix.screenmanager import WipeTransition, FadeTransition
from mpfmc.config_players.slide_player import McSlidePlayer
from mpfmc.tests.MpfMcTestCase import MpfMcTestCase
from mpfmc.transitions.move_in import MoveInTransition
from mpf.tests.MpfTestCase import MpfTestCase
import mpfmc.core
import os
class TestSlidePlayer(MpfMcTestCase):
def get_machine_path(self):
return 'tests/machine_files/slide_player'
def get_config_file(self):
return 'test_slide_player.yaml'
def test_slide_on_default_display(self):
self.mc.events.post('show_slide_1')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
# now replace that slide at the same priority and make sure it works
self.mc.events.post('show_slide_4')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_4')
def test_slide_on_default_display_hardcoded(self):
self.mc.events.post('show_slide_2')
self.advance_time()
self.assertEqual(self.mc.displays['display1'].current_slide_name,
'machine_slide_2')
def test_animation(self):
self.mc.events.post("show_slide_with_animations")
self.advance_time()
self.assertEqual(self.mc.displays['display1'].current_slide_name,
'my_slide')
slide = weakref.ref(self.mc.targets['display1'].current_slide)
self.assertTrue(slide())
self.mc.events.post("remove_slide_with_animations")
self.advance_time()
self.assertEqual(self.mc.displays['display1'].current_slide_name,
'display1_blank')
self.mc.events.post('show_slide_1')
self.advance_time()
gc.collect()
self.advance_time()
self.assertFalse(slide())
def test_slide_on_second_display(self):
self.mc.events.post('show_slide_3')
self.advance_time()
self.assertEqual(self.mc.displays['display2'].current_slide_name,
'machine_slide_3')
def test_priority_from_slide_player(self):
self.mc.events.post('show_slide_4_p200')
self.advance_time()
self.assertEqual(self.mc.displays['display1'].current_slide_name,
'machine_slide_4')
self.assertEqual(self.mc.displays['display1'].current_slide.priority,
200)
def test_force_slide(self):
self.mc.events.post('show_slide_4_p200')
self.advance_time()
self.assertEqual(self.mc.displays['display1'].current_slide_name,
'machine_slide_4')
self.assertEqual(self.mc.displays['display1'].current_slide.priority,
200)
self.mc.events.post('show_slide_1_force')
self.advance_time()
self.assertEqual(self.mc.displays['display1'].current_slide_name,
'machine_slide_1')
self.assertEqual(self.mc.displays['display1'].current_slide.priority,
0)
def test_dont_show_slide(self):
self.mc.events.post('show_slide_1')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
self.assertEqual(self.mc.displays['display1'].current_slide.priority,
0)
# request a higher priority slide, but don't show it
self.mc.events.post('show_slide_5_dont_show')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
self.assertEqual(self.mc.displays['display1'].current_slide.priority,
0)
def test_mode_slide_player(self):
# set a baseline slide
self.mc.events.post('show_slide_1')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
# post the slide_player event from the mode. Should not show the slide
# since the mode is not running
self.mc.events.post('show_mode1_slide')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
# start the mode and then post that event again. The slide should
# switch
self.mc.modes['mode1'].start()
self.mc.events.post('show_mode1_slide')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'mode1_slide')
slide = weakref.ref(self.mc.targets['display1'].current_slide)
self.assertTrue(slide())
# stop the mode and make sure the slide is removed
num_slides = len(self.mc.targets['display1'].slides)
self.mc.modes['mode1'].stop()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
self.assertEqual(len(self.mc.targets['display1'].slides),
num_slides - 1)
gc.collect()
self.assertFalse(slide())
# post the slide_player event from the mode. Should not show the slide
# since the mode is not running
self.mc.events.post('show_mode1_slide')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
# show a priority 200 slide from the machine config
self.mc.events.post('show_slide_4_p200')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_4')
self.assertEqual(self.mc.targets['display1'].current_slide.priority,
200)
# start the mode again (priority 500)
self.mc.modes['mode1'].start()
# show a slide, but priority 150 which means the slide will not be
# shown
self.mc.events.post('show_mode1_slide_2')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_4')
self.assertEqual(self.mc.targets['display1'].current_slide.priority,
200)
# now kill the current slide and the mode slide should show
self.mc.targets['display1'].remove_slide('machine_slide_4')
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'mode1_slide_2')
self.assertEqual(self.mc.targets['display1'].current_slide.priority,
150)
def test_from_show_via_bcp(self):
from mpf.core.bcp.bcp_socket_client import encode_command_string
show_slide_section = dict()
show_slide_section['widgets'] = list()
show_slide_section['widgets'].append(dict(
type='text', text='TEST FROM SHOW'))
player = McSlidePlayer(self.mc)
show_slide_section = player._validate_config_item('slide1', show_slide_section)
bcp_string = encode_command_string('trigger', name='slides_play', context='test_context', priority=1,
settings=show_slide_section)
self.mc.bcp_processor.receive_bcp_message(bcp_string)
self.advance_time()
def test_slides_created_in_slide_player(self):
# Anon slides are where the widgets are listed in the slide_player
# section of a config file or the slides section of a show
self.mc.events.post('anon_slide_dict')
self.advance_time()
self.mc.events.post('anon_slide_list')
self.advance_time()
self.mc.events.post('anon_slide_widgets')
self.advance_time()
slide = weakref.ref(self.mc.targets['display1'].current_slide)
self.assertTrue(slide())
self.mc.events.post('anon_slide_widgets2')
self.advance_time(1)
slide2 = weakref.ref(self.mc.targets['display1'].current_slide)
gc.collect()
self.assertFalse(slide())
self.assertTrue(slide2())
def test_expire_in_slide(self):
# tests that slide expire time works when configured in a slide
self.mc.events.post('base_slide_no_expire')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_6')
self.mc.events.post('show_slide_7') # expire 1s
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_7')
self.advance_time(1)
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_6')
def test_expire_in_slide_player(self):
# tests that expire time works when configured in the slide player
self.mc.events.post('base_slide_no_expire')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_6')
self.mc.events.post('new_slide_expire') # expire 1s
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
self.advance_time(1)
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_6')
def test_expire_with_transition_out_in_slide(self):
# Tests a slide expiring where the expiring slide has a transition
self.mc.events.post('base_slide_no_expire')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_6')
# show a slide which expires in 1 sec, and has a transition out set
self.mc.events.post('show_slide_8')
self.advance_time(.1)
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_8')
# advance to after this slide_8 expired, transition should be in effect
self.advance_time(1)
self.assertTrue(isinstance(self.mc.targets['display1'].transition,
WipeTransition))
# advance to transition done, should be back to the original slide
self.advance_time(1)
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_6')
def test_current_slide_transition_out(self):
# Tests a new slide with no transition, but the current slide has one,
# so it uses that
# show a slide, no expire, but with transition out
self.mc.events.post('show_slide_9')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_9')
# show a new slide with no transition
self.assertIsNone(self.mc.slides['machine_slide_6']['transition'])
self.mc.events.post('machine_slide_6')
self.advance_time()
# transition from first slide should be happening
self.assertTrue(isinstance(self.mc.targets['display1'].transition,
MoveInTransition))
def test_both_slides_transitions(self):
# current slide has transition out, and new slide has transition, so
# transition of new slide takes precendence
# show a slide, no expire, but with transition out
self.assertEqual(
self.mc.slides['machine_slide_8']['transition_out']['type'],
'wipe')
self.mc.events.post('show_slide_8')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_8')
# show a new slide with a different transition in
self.assertEqual(
self.mc.slides['machine_slide_9']['transition']['type'], 'move_in')
self.mc.events.post('show_slide_9')
self.advance_time()
# transition from second slide should be happening
self.assertTrue(isinstance(self.mc.targets['display1'].transition,
MoveInTransition))
def test_transition_in_slide_player(self):
# transition is specified in slide player for slide that does not have
# transition
# show a base slide with no transition
self.assertIsNone(self.mc.slides['machine_slide_4']['transition'])
self.mc.events.post('machine_slide_4')
self.advance_time()
# show a second slide where the slide has no transition, but the
# slide player does have a transition
self.assertIsNone(self.mc.slides['machine_slide_5']['transition'])
self.mc.events.post('show_slide_5_with_transition')
self.advance_time()
# make sure the transition is happening
self.assertTrue(isinstance(self.mc.targets['display1'].transition,
FadeTransition))
def test_transition_in_slide_player_override(self):
# transition in slide player for slide that already has a transition.
# the slide player transition should override the slide one
# show a base slide with no transition
self.assertIsNone(self.mc.slides['machine_slide_4']['transition'])
self.mc.events.post('machine_slide_4')
self.advance_time()
# show a second slide where the slide has a transition, but the
# slide player has a different transition, so the slide player
# should take precedence
self.assertEqual(
self.mc.slides['machine_slide_9']['transition']['type'], 'move_in')
self.mc.events.post('show_slide_5_with_transition')
self.advance_time()
# make sure the transition from the slide player is happening
self.assertTrue(isinstance(self.mc.targets['display1'].transition,
FadeTransition))
def test_slide_show(self):
# tests the 'show' feature of a slide. This is not a slide show, but
# rather a setting which controls whether a slide is shown right away
# or not
# show a base slide
self.mc.events.post('show_slide_1')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
# post new slide, but with show=False, so it should not show
self.mc.events.post('slide_2_dont_show')
self.advance_time()
# Should still be slide 1
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
def test_slide_removal(self):
# Also test slide events
self.mock_event('slide_machine_slide_1_active')
self.mock_event('slide_machine_slide_1_created')
self.mock_event('slide_machine_slide_1_removed')
self.mock_event('slide_machine_slide_4_active')
self.mock_event('slide_machine_slide_4_created')
self.mock_event('slide_machine_slide_4_removed')
# show a base slide
self.mc.events.post('show_slide_1')
self.advance_time(0.3)
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
self.assertEventCalled('slide_machine_slide_1_created')
self.assertEventCalled('slide_machine_slide_1_active')
self.assertEventNotCalled('slide_machine_slide_1_removed')
self.assertEventNotCalled('slide_machine_slide_4_created')
self.assertEventNotCalled('slide_machine_slide_4_active')
self.assertEventNotCalled('slide_machine_slide_4_removed')
# show another slide
self.mc.events.post('show_slide_4')
self.advance_time(0.3)
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_4')
self.assertEventCalled('slide_machine_slide_1_created', 1)
self.assertEventCalled('slide_machine_slide_1_active', 1)
self.assertEventNotCalled('slide_machine_slide_1_removed')
self.assertEventCalled('slide_machine_slide_4_created', 1)
self.assertEventCalled('slide_machine_slide_4_active', 1)
self.assertEventNotCalled('slide_machine_slide_4_removed')
# make sure base slide comes back
self.mc.events.post('remove_slide_4')
self.advance_time(0.3)
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
self.assertEventCalled('slide_machine_slide_1_created', 1)
self.assertEventCalled('slide_machine_slide_1_active', 2)
self.assertEventNotCalled('slide_machine_slide_1_removed')
self.assertEventCalled('slide_machine_slide_4_created', 1)
self.assertEventCalled('slide_machine_slide_4_active', 1)
self.assertEventCalled('slide_machine_slide_4_removed', 1)
def test_slide_removal_new_transition(self):
# show a base slide
self.mc.events.post('show_slide_1')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
# show a slide with not transition out
self.assertIsNone(self.mc.slides['machine_slide_4']['transition_out'])
self.mc.events.post('show_slide_4')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_4')
# remove that slide with a transition
self.mc.events.post('remove_slide_4_with_transition')
self.advance_time(.1)
# make sure the transition is taking effect
self.assertTrue(isinstance(self.mc.targets['display1'].transition,
WipeTransition))
# original slide is back
self.advance_time(1)
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
def test_slide_removal_existing_transition(self):
# show a base slide
self.mc.events.post('show_slide_1')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
# show a slide which has a transition out
self.assertEqual(
self.mc.slides['machine_slide_8']['transition_out']['type'],
'wipe')
self.mc.events.post('show_slide_8')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_8')
# post an event which does not have a transition
self.mc.events.post('remove_slide_8')
self.advance_time(.1)
# make sure the transition is taking effect
self.assertTrue(isinstance(self.mc.targets['display1'].transition,
WipeTransition))
# original slide is back
self.advance_time(1)
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
def test_slide_removal_override_transition(self):
# show a base slide
self.mc.events.post('show_slide_1')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
# show a slide which has a wipe transition
self.assertEqual(
self.mc.slides['machine_slide_8']['transition_out']['type'],
'wipe')
self.mc.events.post('show_slide_8')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_8')
# remove slide with a fade transition
self.mc.events.post('remove_slide_8_fade')
self.advance_time(.1)
# make sure it uses the fade transition from the slide player
self.assertTrue(isinstance(self.mc.targets['display1'].transition,
FadeTransition))
# original slide should be back
self.advance_time(1)
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
def test_removing_last_slide(self):
self.mc.events.post('show_slide_1')
self.advance_time()
self.assertEqual(self.mc.targets['default'].current_slide_name,
'machine_slide_1')
self.advance_time()
self.mc.targets['default'].remove_slide('machine_slide_1')
self.advance_time()
self.assertEqual(self.mc.targets['default'].current_slide_name,
'display1_blank')
self.assertEqual(1, len(self.mc.targets['default'].screens))
def test_expire_non_current_slide(self):
self.mc.events.post('slide1_expire_1s')
self.advance_time(.1)
self.assertEqual(self.mc.targets['default'].current_slide_name,
'machine_slide_1')
# show slide 2 which should expire in 1s
self.mc.events.post('slide2_expire_1s')
self.advance_time(.1)
self.assertEqual(self.mc.targets['default'].current_slide_name,
'machine_slide_2')
self.advance_time(1)
# should be back to blank, because slide1 expired while slide 2 was up
self.assertEqual(self.mc.targets['default'].current_slide_name,
'display1_blank')
self.assertEqual(1, len(self.mc.targets['default'].screens))
def test_remove_already_removed_slide(self):
self.mc.events.post('slide1_expire_1s')
self.advance_time(.1)
self.assertEqual(self.mc.targets['default'].current_slide_name,
'machine_slide_1')
# grab a reference to this slide
slide1 = self.mc.targets['default'].current_slide
self.advance_time(1)
# should be blank, because slide1 expired
self.assertEqual(self.mc.targets['default'].current_slide_name,
'display1_blank')
self.assertEqual(1, len(self.mc.targets['default'].screens))
# now try to call the now-gone slide's remove
slide1.remove()
self.advance_time()
def test_animation_triggers(self):
bcp_command1 = ('register_trigger', None, {'event': 'flash_widget_1'})
bcp_command2 = ('register_trigger', None, {'event': 'flash_widget_2'})
self.assertNotIn(bcp_command1, self.sent_bcp_commands)
self.assertNotIn(bcp_command2, self.sent_bcp_commands)
self.mc.events.post("client_connected")
self.advance_time()
self.assertIn(bcp_command1, self.sent_bcp_commands)
self.assertIn(bcp_command2, self.sent_bcp_commands)
def test_play_multiple_times(self):
# set a baseline slide
self.mc.events.post('show_slide_1')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name,
'machine_slide_1')
# start the mode and then post that event again. The slide should
# switch
self.mc.modes['mode1'].start()
self.mc.events.post('show_mode1_slide')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name, 'mode1_slide')
slide = weakref.ref(self.mc.targets['display1'].current_slide)
for i in range(10):
self.mc.events.post('show_mode1_slide')
self.advance_time()
self.assertEqual(self.mc.targets['display1'].current_slide_name, 'mode1_slide')
# run garbage collector
gc.collect()
# weak ref to the slide should be none
self.assertIsNone(slide())
# build weak ref to curent slide
slide = weakref.ref(self.mc.targets['display1'].current_slide)
class TestMpfSlidePlayer(MpfTestCase):
# runs the MPF tests (and not the MPF-MC ones) to test the MPF side of the
# slide player plugin
def __init__(self, methodName):
super().__init__(methodName)
# remove config patch which disables bcp
del self.machine_config_patches['bcp']
def getAbsoluteMachinePath(self):
# override the base to we set the patch based on the mpfmc location
return os.path.abspath(os.path.join(
mpfmc.core.__path__[0], os.pardir, self.getMachinePath()))
def get_enable_plugins(self):
return True
def getConfigFile(self):
return 'test_slide_player.yaml'
def getMachinePath(self):
return 'tests/machine_files/slide_player/'
# todo add tests
|
[
"gc.collect",
"weakref.ref",
"mpf.core.bcp.bcp_socket_client.encode_command_string",
"mpfmc.config_players.slide_player.McSlidePlayer"
] |
[((1521, 1575), 'weakref.ref', 'weakref.ref', (["self.mc.targets['display1'].current_slide"], {}), "(self.mc.targets['display1'].current_slide)\n", (1532, 1575), False, 'import weakref\n'), ((1897, 1909), 'gc.collect', 'gc.collect', ([], {}), '()\n', (1907, 1909), False, 'import gc\n'), ((4804, 4858), 'weakref.ref', 'weakref.ref', (["self.mc.targets['display1'].current_slide"], {}), "(self.mc.targets['display1'].current_slide)\n", (4815, 4858), False, 'import weakref\n'), ((5283, 5295), 'gc.collect', 'gc.collect', ([], {}), '()\n', (5293, 5295), False, 'import gc\n'), ((7159, 7181), 'mpfmc.config_players.slide_player.McSlidePlayer', 'McSlidePlayer', (['self.mc'], {}), '(self.mc)\n', (7172, 7181), False, 'from mpfmc.config_players.slide_player import McSlidePlayer\n'), ((7292, 7413), 'mpf.core.bcp.bcp_socket_client.encode_command_string', 'encode_command_string', (['"""trigger"""'], {'name': '"""slides_play"""', 'context': '"""test_context"""', 'priority': '(1)', 'settings': 'show_slide_section'}), "('trigger', name='slides_play', context='test_context',\n priority=1, settings=show_slide_section)\n", (7313, 7413), False, 'from mpf.core.bcp.bcp_socket_client import encode_command_string\n'), ((7986, 8040), 'weakref.ref', 'weakref.ref', (["self.mc.targets['display1'].current_slide"], {}), "(self.mc.targets['display1'].current_slide)\n", (7997, 8040), False, 'import weakref\n'), ((8172, 8226), 'weakref.ref', 'weakref.ref', (["self.mc.targets['display1'].current_slide"], {}), "(self.mc.targets['display1'].current_slide)\n", (8183, 8226), False, 'import weakref\n'), ((8236, 8248), 'gc.collect', 'gc.collect', ([], {}), '()\n', (8246, 8248), False, 'import gc\n'), ((23714, 23768), 'weakref.ref', 'weakref.ref', (["self.mc.targets['display1'].current_slide"], {}), "(self.mc.targets['display1'].current_slide)\n", (23725, 23768), False, 'import weakref\n'), ((24022, 24034), 'gc.collect', 'gc.collect', ([], {}), '()\n', (24032, 24034), False, 'import gc\n'), ((24190, 24244), 'weakref.ref', 'weakref.ref', (["self.mc.targets['display1'].current_slide"], {}), "(self.mc.targets['display1'].current_slide)\n", (24201, 24244), False, 'import weakref\n')]
|
from distutils.core import setup, Extension
import sys
module1 = Extension('bsvcuckoo',
include_dirs=['include'],
sources=["src/cuckoo_filter.c", "src/cuckoo_python.c"],
# https://cibuildwheel.readthedocs.io/en/stable/faq/#windows-importerror-dll-load-failed-the-specific-module-could-not-be-found
extra_compile_args=['/d2FH4-'] if sys.platform == 'win32' else [])
setup(name='bsvcuckoo',
version='1.3',
description='A cuckoo filter implementation.',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/electrumsv/libcuckoofilter',
long_description=open('README.md', 'r').read(),
long_description_content_type='text/markdown',
license='MIT Licence',
# This warns about no `__init__.py` file but seems to install workable types.
packages=['bsvcuckoo-stubs'],
package_data={"bsvcuckoo-stubs": ['__init__.pyi']},
# The actual package.
ext_modules=[ module1 ])
|
[
"distutils.core.Extension"
] |
[((66, 247), 'distutils.core.Extension', 'Extension', (['"""bsvcuckoo"""'], {'include_dirs': "['include']", 'sources': "['src/cuckoo_filter.c', 'src/cuckoo_python.c']", 'extra_compile_args': "(['/d2FH4-'] if sys.platform == 'win32' else [])"}), "('bsvcuckoo', include_dirs=['include'], sources=[\n 'src/cuckoo_filter.c', 'src/cuckoo_python.c'], extra_compile_args=[\n '/d2FH4-'] if sys.platform == 'win32' else [])\n", (75, 247), False, 'from distutils.core import setup, Extension\n')]
|
from importer import *
import sys, os
sys.path.append('/usr/data/minhas/zpace/stellarmass_pca')
import read_results as from_pca
pca_basedir = '/usr/data/minhas2/zpace/CSPs/CSPs_CKC14_MaNGA_20181026-1'
import numpy as np
import matplotlib.pyplot as plt
import pymc3
import manga_tools as m
import metallicity
import pi_grid
import abundances
from extinction import fitzpatrick99
from astropy.cosmology import WMAP9 as cosmo
cloudy_fsps_grid = pi_grid.load_CloudyFSPS_grid(
linenames_fname='./data/cloudyFSPS/linenames.dat',
data_fname='./data/cloudyFSPS/ZAU_ND_mist.lines',
yaml_cfg_fname='./data/cloudyFSPS/cloudyFSPS.yaml',
elines_tab_key='CloudyFSPS-name', elines_table=pi_grid.elines_table,
lines_used=pi_grid.default_lines)
cloudy_fsps_grid.learnspace_GP()
elines = pi_grid.elines_table.copy()
elines.add_index('name')
line_ls = elines.loc[cloudy_fsps_grid.observable_names]['lvac']
ntest = 1
'''
logZ_real = np.random.uniform(*cloudy_fsps_grid.range('logZ'), ntest)
logU_real = np.random.uniform(*cloudy_fsps_grid.range('logU'), ntest)
age_real = np.random.uniform(*cloudy_fsps_grid.range('Age'), ntest)
AV_real = np.random.exponential(1., ntest)
logQH_real = np.random.uniform(48.5, 51., ntest)
linelums_real = 10.**logQH_real[:, None] * cloudy_fsps_grid.predict(
np.stack([logZ_real, logU_real, age_real], axis=0))
extinction_at_AV1 = fitzpatrick99(wave=line_ls, a_v=1., r_v=3.1)
A_lambda = np.outer(AV_real, extinction_at_AV1)
atten = 10.**(-0.4 * A_lambda)
zdist = .0155
distmod = (4. * np.pi * cosmo.luminosity_distance(zdist)**2.).to('cm2').value
linefluxes_real = linelums_real * atten / distmod / 1.0e-17
snr = np.random.uniform(2., 50., linefluxes_real.shape)
real_unc = linefluxes_real / snr
unc_factor = np.e
linefluxes_noise = real_unc * np.random.randn(*linefluxes_real.shape)
linefluxes_obs = linefluxes_real + linefluxes_noise
obs_unc = real_unc / unc_factor
mask_obs = np.any(linefluxes_obs < 0., axis=1)
print(linefluxes_obs.shape)
'''
'''
fakemodel, faketrace = metallicity.find_ism_params(
grid=cloudy_fsps_grid, dustlaw=fitzpatrick99,
line_obs=[linefluxes_obs[~mask_obs], obs_unc[~mask_obs], mask_obs[~mask_obs]],
line_ls=line_ls, drpall_row={'nsa_zdist': zdist})
'''
#####
drpall = m.load_drpall(metallicity.mpl_v)
drpall.add_index('plateifu')
drpall_row = drpall.loc['9497-9101']
plate, ifu = drpall_row['plateifu'].split('-')
el = metallicity.Elines.DAP_from_plateifu(
plate, ifu, mpl_v, 'SPX-GAU-MILESHC', data_colname='MPL-6-name',
lines_used=cloudy_fsps_grid.observable_names, elines_table=elines)
pcares = from_pca.PCAOutput.from_plateifu(
basedir=os.path.join(pca_basedir, 'results'), plate=plate, ifu=ifu)
#'''
model, trace, f, unc, Rreff = metallicity.find_ism_params(
grid=cloudy_fsps_grid, dustlaw=fitzpatrick99,
obs=el, pca_result=pcares, line_ls=line_ls, drpall_row=drpall_row,
nrad=5, m_at_rad=3, rlim=[0.5, 2.])
#'''
model.profile(model.logpt).summary()
|
[
"sys.path.append",
"pi_grid.elines_table.copy",
"metallicity.Elines.DAP_from_plateifu",
"metallicity.find_ism_params",
"pi_grid.load_CloudyFSPS_grid",
"manga_tools.load_drpall",
"os.path.join"
] |
[((39, 96), 'sys.path.append', 'sys.path.append', (['"""/usr/data/minhas/zpace/stellarmass_pca"""'], {}), "('/usr/data/minhas/zpace/stellarmass_pca')\n", (54, 96), False, 'import sys, os\n'), ((448, 751), 'pi_grid.load_CloudyFSPS_grid', 'pi_grid.load_CloudyFSPS_grid', ([], {'linenames_fname': '"""./data/cloudyFSPS/linenames.dat"""', 'data_fname': '"""./data/cloudyFSPS/ZAU_ND_mist.lines"""', 'yaml_cfg_fname': '"""./data/cloudyFSPS/cloudyFSPS.yaml"""', 'elines_tab_key': '"""CloudyFSPS-name"""', 'elines_table': 'pi_grid.elines_table', 'lines_used': 'pi_grid.default_lines'}), "(linenames_fname=\n './data/cloudyFSPS/linenames.dat', data_fname=\n './data/cloudyFSPS/ZAU_ND_mist.lines', yaml_cfg_fname=\n './data/cloudyFSPS/cloudyFSPS.yaml', elines_tab_key='CloudyFSPS-name',\n elines_table=pi_grid.elines_table, lines_used=pi_grid.default_lines)\n", (476, 751), False, 'import pi_grid\n'), ((798, 825), 'pi_grid.elines_table.copy', 'pi_grid.elines_table.copy', ([], {}), '()\n', (823, 825), False, 'import pi_grid\n'), ((2258, 2290), 'manga_tools.load_drpall', 'm.load_drpall', (['metallicity.mpl_v'], {}), '(metallicity.mpl_v)\n', (2271, 2290), True, 'import manga_tools as m\n'), ((2410, 2586), 'metallicity.Elines.DAP_from_plateifu', 'metallicity.Elines.DAP_from_plateifu', (['plate', 'ifu', 'mpl_v', '"""SPX-GAU-MILESHC"""'], {'data_colname': '"""MPL-6-name"""', 'lines_used': 'cloudy_fsps_grid.observable_names', 'elines_table': 'elines'}), "(plate, ifu, mpl_v, 'SPX-GAU-MILESHC',\n data_colname='MPL-6-name', lines_used=cloudy_fsps_grid.observable_names,\n elines_table=elines)\n", (2446, 2586), False, 'import metallicity\n'), ((2740, 2926), 'metallicity.find_ism_params', 'metallicity.find_ism_params', ([], {'grid': 'cloudy_fsps_grid', 'dustlaw': 'fitzpatrick99', 'obs': 'el', 'pca_result': 'pcares', 'line_ls': 'line_ls', 'drpall_row': 'drpall_row', 'nrad': '(5)', 'm_at_rad': '(3)', 'rlim': '[0.5, 2.0]'}), '(grid=cloudy_fsps_grid, dustlaw=fitzpatrick99,\n obs=el, pca_result=pcares, line_ls=line_ls, drpall_row=drpall_row, nrad\n =5, m_at_rad=3, rlim=[0.5, 2.0])\n', (2767, 2926), False, 'import metallicity\n'), ((2644, 2680), 'os.path.join', 'os.path.join', (['pca_basedir', '"""results"""'], {}), "(pca_basedir, 'results')\n", (2656, 2680), False, 'import sys, os\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-22 07:28
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('siteEngine', '0004_auto_20160621_1445'),
]
operations = [
migrations.CreateModel(
name='RoleAdmin',
fields=[
('role_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='siteEngine.Role')),
],
bases=('siteEngine.role',),
),
migrations.AlterField(
model_name='userprofile',
name='user_auth',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
[
"django.db.models.OneToOneField"
] |
[((817, 916), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=\n settings.AUTH_USER_MODEL)\n', (837, 916), False, 'from django.db import migrations, models\n'), ((472, 641), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""siteEngine.Role"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'siteEngine.Role')\n", (492, 641), False, 'from django.db import migrations, models\n')]
|
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "quora.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
from whitenoise.django import DjangoWhiteNoise
application = Cling(get_wsgi_application())
application = DjangoWhiteNoise(application)
|
[
"django.core.wsgi.get_wsgi_application",
"os.environ.setdefault",
"whitenoise.django.DjangoWhiteNoise"
] |
[((10, 75), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""quora.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'quora.settings')\n", (31, 75), False, 'import os\n'), ((261, 290), 'whitenoise.django.DjangoWhiteNoise', 'DjangoWhiteNoise', (['application'], {}), '(application)\n', (277, 290), False, 'from whitenoise.django import DjangoWhiteNoise\n'), ((223, 245), 'django.core.wsgi.get_wsgi_application', 'get_wsgi_application', ([], {}), '()\n', (243, 245), False, 'from django.core.wsgi import get_wsgi_application\n')]
|
import urllib.parse
from notifier.grabbers.base import Base, Internet
class BetterAdvice(object):
@staticmethod
def sync(obj: Base, *args, **kwargs):
r = Internet.html_get(obj.sync_type.base_url)
links = r.html.xpath('/html/body/div[*]/div[*]/div/div[*]/div[*]/section/div[*]/div[*]/div[*]/a')
for a in links[::-1]:
path = a.attrs.get('href').split("?")[0]
url = urllib.parse.urljoin(obj.sync_type.base_url, path)
name = a.text.strip()
obj.add_text_task(
unique_key=url,
name=name,
url=url,
data=dict(text=url)
)
|
[
"notifier.grabbers.base.Internet.html_get"
] |
[((194, 235), 'notifier.grabbers.base.Internet.html_get', 'Internet.html_get', (['obj.sync_type.base_url'], {}), '(obj.sync_type.base_url)\n', (211, 235), False, 'from notifier.grabbers.base import Base, Internet\n')]
|
import tensorflow as tf
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Activation, Dense,Flatten, Add, TimeDistributed, Flatten, BatchNormalization
from tensorflow.keras.layers import Conv1D,MaxPooling1D,GlobalAveragePooling1D, GlobalMaxPooling1D
from tensorflow.keras.layers import Conv2D,MaxPooling2D,GlobalAveragePooling2D, GlobalMaxPooling2D
from tensorflow.keras.layers import Conv3D,MaxPooling3D,GlobalAveragePooling3D, GlobalMaxPooling3D
from tensorflow.keras.layers import Layer
from .groupnorm import GroupNormalization
def define_NormLayers(norm):
if norm=="BatchNorm":
return BatchNormalization
elif norm=="GroupNorm":
return GroupNormalization
else:
raise Exception("Normalization that you specify is invalid! Current value:",norm)
def define_ConvLayer(mode):
if mode=="2D" or mode=="TimeD":
return Conv2D
elif mode=="1D":
return Conv1D
elif mode=="3D":
return Conv3D
else:
raise Exception("Convolution mode that you specify is invalid! Current value:",mode)
def define_Pooling(mode):
if mode=="2D" or mode=="TimeD":
return MaxPooling2D
elif mode=="1D":
return MaxPooling1D
elif mode=="3D":
return MaxPooling3D
else:
raise Exception("Convolution mode that you specify is invalid! Current value:",mode)
def define_GlobalPooling(mode, pooling):
if (mode=="2D" or mode=="TimeD") and pooling=="max":
return GlobalMaxPooling2D
elif mode=="1D" and pooling=="max":
return GlobalMaxPooling1D
elif mode=="3D" and pooling=="max":
return GlobalMaxPooling3D
elif (mode=="2D" or mode=="TimeD") and pooling=="ave":
return GlobalAveragePooling2D
elif mode=="1D" and pooling=="ave":
return GlobalAveragePooling1D
elif mode=="3D" and pooling=="ave":
return GlobalAveragePooling3D
class Conv_stage1_block(tf.keras.Model):
def __init__(self, filters, strides=2, mode="2D", norm="BatchNorm",kernel_initializer='he_normal',name=None):
super(Conv_stage1_block, self).__init__(name=name)
NormLayer = define_NormLayers(norm) # Define Normalization Layers
ConvLayer = define_ConvLayer(mode) #Define ConvLayer
MaxPooling = define_Pooling(mode) # Define Pooling
if mode=="1D" or mode=="2D" or mode=="3D":
self.conv1 = ConvLayer(filters, kernel_size=7,strides=strides,kernel_initializer=kernel_initializer, padding='same')
self.bn1 = NormLayer()
self.act1 = Activation('relu')
self.pool1 = MaxPooling(pool_size=3, strides=2,padding="same")
elif mode=="TimeD":
self.conv1 = TimeDistributed(ConvLayer(filters, kernel_size=7,kernel_initializer=kernel_initializer,strides=strides, padding='same'))
self.bn1 = TimeDistributed(NormLayer())
self.act1 = TimeDistributed(Activation('relu'))
self.pool1 = TimeDistributed(MaxPooling(pool_size=(3,3), strides=(2,2),padding="same"))
def call(self, x):
h = self.conv1(x)
h = self.bn1(h)
h = self.act1(h)
output = self.pool1(h)
return output
class Identity_bottleneck_block(tf.keras.Model):
def __init__(self, filters, kernel_size=3, mode="2D", norm="BatchNorm",kernel_initializer='he_normal' ,name=None):
"""The identity block is the block that has no conv layer at shortcut.
# Arguments
kernel_size: default 3, the kernel size of
middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
# Returns
Output tensor for the block.
"""
super(Identity_bottleneck_block, self).__init__(name=name)
NormLayer = define_NormLayers(norm) # Define Normalization Layers
ConvLayer = define_ConvLayer(mode)
filters1, filters2, filters3 = filters
if mode=="1D" or mode=="2D" or mode=="3D":
self.conv1 = ConvLayer(filters1, 1, kernel_initializer=kernel_initializer,padding='same')
self.bn1 = NormLayer()
self.relu1 = Activation('relu')
self.conv2 = ConvLayer(filters2, kernel_size, kernel_initializer=kernel_initializer,padding='same')
self.bn2 = NormLayer()
self.relu2 = Activation('relu')
self.conv3 = ConvLayer(filters3, 1, kernel_initializer=kernel_initializer,padding='same')
self.bn3 = NormLayer()
self.relu3 = Activation('relu')
elif mode=="TimeD":
self.conv1 = TimeDistributed(ConvLayer(filters1, (1,1), kernel_initializer=kernel_initializer,padding='same'))
self.bn1 = TimeDistributed(NormLayer())
self.relu1 = TimeDistributed(Activation('relu'))
self.conv2 = TimeDistributed(ConvLayer(filters2, kernel_size, kernel_initializer=kernel_initializer,padding='same'))
self.bn2 = TimeDistributed(NormLayer())
self.relu2 = TimeDistributed(Activation('relu'))
self.conv3 = TimeDistributed(ConvLayer(filters3, (1,1), kernel_initializer=kernel_initializer,padding='same'))
self.bn3 = TimeDistributed(NormLayer())
self.relu3 = TimeDistributed(Activation('relu'))
self.add = Add()
def call(self, x):
residual = x
h = self.conv1(x)
h = self.bn1(h)
h = self.relu1(h)
h = self.conv2(h)
h = self.bn2(h)
h = self.relu2(h)
h = self.conv3(h)
h = self.bn3(h)
h = self.relu3(h)
# Merge
output = self.add([residual, h])
return output
class Conv_bottleneck_block(tf.keras.Model):
def __init__(self,filters, kernel_size=3, strides=2, mode="2D",norm="BatchNorm",kernel_initializer='he_normal' , name=None):
"""A block that has a conv layer at shortcut.
# Arguments
kernel_size: default 3, the kernel size of
middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
# Returns
Output tensor for the block.
"""
super(Conv_bottleneck_block, self).__init__(name=name)
NormLayer = define_NormLayers(norm) # Define Normalization Layers
ConvLayer = define_ConvLayer(mode) # Define ConvLayer
filters1, filters2, filters3 = filters
if mode=="1D" or mode=="2D" or mode=="3D":
# Left
self.bn1 = NormLayer()
self.relu1 = Activation('relu')
self.conv1 = ConvLayer(filters1, 1, strides=strides,kernel_initializer=kernel_initializer,padding='same')
self.bn2 = NormLayer()
self.relu2 = Activation('relu')
self.conv2 = ConvLayer(filters2, kernel_size, kernel_initializer=kernel_initializer,padding='same')
self.bn3 = NormLayer()
self.relu3 = Activation('relu')
self.conv3 = ConvLayer(filters3, 1, kernel_initializer=kernel_initializer,padding='same')
#Right(shortcut)
self.s_bn = NormLayer()
self.s_conv = ConvLayer(filters3, 1, strides=strides,
kernel_initializer=kernel_initializer,padding='same')
elif mode == "TimeD":
# Left
self.bn1 = TimeDistributed(NormLayer())
self.relu1 = TimeDistributed(Activation('relu'))
self.conv1 = TimeDistributed(ConvLayer(filters1, (1,1), strides=strides,kernel_initializer=kernel_initializer,padding='same'))
self.bn2 = TimeDistributed(NormLayer())
self.relu2 = TimeDistributed(Activation('relu'))
self.conv2 = TimeDistributed(ConvLayer(filters2, kernel_size, kernel_initializer=kernel_initializer,padding='same'))
self.bn3 = TimeDistributed(NormLayer())
self.relu3 = TimeDistributed(Activation('relu'))
self.conv3 = TimeDistributed(ConvLayer(filters3, (1,1), kernel_initializer=kernel_initializer,padding='same'))
#Right(shortcut)
self.s_bn = TimeDistributed(NormLayer())
self.s_conv = TimeDistributed(ConvLayer(filters3, (1,1), strides=strides, kernel_initializer=kernel_initializer,padding='same'))
self.add = Add()
def call(self, x):
residual = x
#Left
h = self.conv1(x)
h = self.bn1(h)
h = self.relu1(h)
h = self.conv2(h)
h = self.bn2(h)
h = self.relu2(h)
h = self.conv3(h)
h = self.bn3(h)
h = self.relu3(h)
#Right
residual = self.s_conv(residual)
residual = self.s_bn(residual)
# Merge
output = self.add([residual, h])
return output
class Identity_basic_block(tf.keras.Model):
def __init__(self, filters,kernel_size=3, mode="2D", norm="BatchNorm",kernel_initializer='he_normal' , name=None):
"""The identity block is the block that has no conv layer at shortcut.
# Arguments
kernel_size: default 3, the kernel size of
middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
# Returns
Output tensor for the block.
"""
super(Identity_basic_block, self).__init__(name=name)
NormLayer = define_NormLayers(norm) # Define Normalization Layers
ConvLayer = define_ConvLayer(mode) # Define ConvLayer
filters1, filters2 = filters
if mode=="1D" or mode=="2D" or mode=="3D":
self.bn1 = NormLayer()
self.relu1 = Activation('relu')
self.conv1 = ConvLayer(filters1, kernel_size, kernel_initializer=kernel_initializer,padding='same')
self.bn2 = NormLayer()
self.relu2 = Activation('relu')
self.conv2 = ConvLayer(filters2, kernel_size, kernel_initializer=kernel_initializer,padding='same')
elif mode=="TimeD":
self.bn1 = TimeDistributed(NormLayer())
self.relu1 = TimeDistributed(Activation('relu'))
self.conv1 = TimeDistributed(ConvLayer(filters1, kernel_size, kernel_initializer=kernel_initializer,padding='same'))
self.bn2 = TimeDistributed(NormLayer())
self.relu2 = TimeDistributed(Activation('relu'))
self.conv2 = TimeDistributed(ConvLayer(filters2, kernel_size, kernel_initializer=kernel_initializer,padding='same'))
self.add = Add()
def call(self, x):
residual = x
h = self.conv1(x)
h = self.bn1(h)
h = self.relu1(h)
h = self.conv2(h)
h = self.bn2(h)
h = self.relu2(h)
# Merge
output = self.add([residual, h])
return output
class Conv_basic_block(tf.keras.Model):
def __init__(self,filters, kernel_size=3, strides=2, mode="2D", norm="BatchNorm",kernel_initializer='he_normal', name=None):
"""A block that has a conv layer at shortcut.
# Arguments
kernel_size: default 3, the kernel size of
middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
# Returns
Output tensor for the block.
"""
super(Conv_basic_block, self).__init__(name=name)
NormLayer = define_NormLayers(norm) # Define Normalization Layers
ConvLayer = define_ConvLayer(mode) # Define ConvLayer
filters1, filters2 = filters
if mode=="1D" or mode=="2D" or mode=="3D":
# Left
self.bn1 = NormLayer()
self.relu1 = Activation('relu')
self.conv1 = ConvLayer(filters1, 1, strides=strides,kernel_initializer=kernel_initializer,padding='same')
self.bn2 = NormLayer()
self.relu2 = Activation('relu')
self.conv2 = ConvLayer(filters2, kernel_size, kernel_initializer=kernel_initializer,padding='same')
#Right(shortcut)
self.s_bn = NormLayer()
self.s_conv = ConvLayer(filters2, 1, strides=strides,kernel_initializer=kernel_initializer,padding='same')
elif mode=="TimeD":
# Left
self.bn1 = TimeDistributed(NormLayer())
self.relu1 = TimeDistributed(Activation('relu'))
self.conv1 = TimeDistributed(ConvLayer(filters1, (1,1), strides=strides,kernel_initializer=kernel_initializer,padding='same'))
self.bn2 = TimeDistributed(NormLayer())
self.relu2 = TimeDistributed(Activation('relu'))
self.conv2 = TimeDistributed(ConvLayer(filters2, kernel_size, kernel_initializer=kernel_initializer,padding='same'))
#Right(shortcut)
self.s_bn = TimeDistributed(NormLayer())
self.s_conv = TimeDistributed(ConvLayer(filters2, (1,1), strides=strides,kernel_initializer=kernel_initializer,padding='same'))
self.add = Add()
def call(self, x):
#Left
residual = x
h = self.conv1(x)
h = self.bn1(h)
h = self.relu1(h)
h = self.bn2(h)
h = self.relu2(h)
h = self.conv2(h)
#Right
residual = self.s_conv(residual)
residual = self.s_bn(residual)
# Merge
output = self.add([residual, h])
return output
class Fin_layer(tf.keras.Model):
def __init__(self,mode="2D", class_num=1000, include_top=True, pooling='avg', name=None):
super(Fin_layer, self).__init__(name=name)
self.include_top = include_top
self.mode=mode
GlobalPooling = define_GlobalPooling(mode, pooling)
if mode=="1D" or mode=="2D" or mode=="3D":
#Pooling setting
self.gp = GlobalPooling()
if self.include_top:
self.dense = Dense(class_num, 'softmax')
elif mode=="TimeD":
self.gp = TimeDistributed(GlobalPooling())
if self.include_top:
self.flat = Flatten()
self.dense = Dense(class_num, 'softmax')
def call(self, x):
output = self.gp(x)
if self.include_top and (self.mode=="1D" or self.mode=="2D" or self.mode=="3D"):
output = self.dense(output)
if self.include_top and self.mode=="TimeD":
output = self.flat(output)
output = self.dense(output)
return output
class ResnetBuilder(tf.keras.Model):
def __init__(self, class_num=1000, include_top=True, pooling='ave', mode="2D", norm="BatchNorm",kernel_initializer='he_normal', name=None):
super(ResnetBuilder, self).__init__(name=name)
if not (mode=="1D" or mode=="2D" or mode=="TimeD" or mode=="3D"):
raise Exception("'mode' value is invalid. you should use '1D' or '2D' or '3D' or 'TimeD'. Current value :",mode)
if not (pooling=="ave" or pooling=="max" or pooling==None):
raise Exception("'pooling' value is invalid. you should use 'ave' or 'max' or None. Current value :",pooling)
if not (include_top==True or include_top==False):
raise Exception("'include_top' value is invalid. you should use bool value. Current value :",include_top)
self.pooling = pooling
if name == "ResNet18":
self.stage_filters = [64, 128, 256, 512]
self.block_type = "basic"
self.reptitions = [2, 2, 2, 2]
elif name == "ResNet34":
self.stage_filters = [64, 128, 256, 512]
self.block_type = "basic"
self.reptitions = [3, 4, 6, 3]
elif name=="ResNet50":
self.stage_filters = [64, 128, 256, 512]
self.block_type = "bottleneck"
self.reptitions = [3, 4, 6, 3]
elif name=="ResNet101":
self.stage_filters = [64, 128, 256, 512]
self.block_type = "bottleneck"
self.reptitions = [3, 4, 23, 3]
elif name=="ResNet152":
self.stage_filters = [64, 128, 256, 512]
self.block_type = "bottleneck"
self.reptitions = [3, 8, 36, 3]
else:
raise Exception(" Name Error! you can use ResNet18,ResNet34,ResNet50,ResNet101, or ResNet152. Current name:",name)
# block type define
self.define_block_type()
# stage1
self.conv1 = Conv_stage1_block(filters=self.all_filters[0][0],mode=mode,norm=norm,kernel_initializer=kernel_initializer)
# stage2
self.stage2_convs = {}
self.stage2_convs[0] = self.ConvBlock(filters=self.all_filters[0],strides=1,mode=mode,norm=norm,kernel_initializer=kernel_initializer)
for rep in range(1,self.reptitions[0]):
self.stage2_convs[rep] = self.IdBlock(filters=self.all_filters[0],mode=mode,norm=norm,kernel_initializer=kernel_initializer)
# stage3
self.stage3_convs = {}
self.stage3_convs[0] = self.ConvBlock(filters=self.all_filters[1],mode=mode,norm=norm,kernel_initializer=kernel_initializer)
for rep in range(1,self.reptitions[1]):
self.stage3_convs[rep] = self.IdBlock(filters=self.all_filters[1],mode=mode,norm=norm,kernel_initializer=kernel_initializer)
# stage4
self.stage4_convs = {}
self.stage4_convs[0] = self.ConvBlock(filters=self.all_filters[2],mode=mode,norm=norm,kernel_initializer=kernel_initializer)
for rep in range(1,self.reptitions[2]):
self.stage4_convs[rep] = self.IdBlock(filters=self.all_filters[2],mode=mode,norm=norm,kernel_initializer=kernel_initializer)
# stage5
self.stage5_convs = {}
self.stage5_convs[0] = self.ConvBlock(filters=self.all_filters[3],mode=mode,norm=norm,kernel_initializer=kernel_initializer)
for rep in range(1,self.reptitions[3]):
self.stage5_convs[rep] = self.IdBlock(filters=self.all_filters[3],mode=mode,norm=norm,kernel_initializer=kernel_initializer)
# Final Layer
if self.pooling!=None:
self.fin = Fin_layer(mode=mode, include_top=include_top, class_num=class_num, pooling=self.pooling)
def define_block_type(self):
'''define block type
'''
print("original")
if self.block_type=="basic":
self.IdBlock = Identity_basic_block
self.ConvBlock = Conv_basic_block
self.all_filters = []
for s_f in self.stage_filters:
self.all_filters.append([s_f, s_f])
elif self.block_type=="bottleneck":
self.IdBlock = Identity_bottleneck_block
self.ConvBlock = Conv_bottleneck_block
self.all_filters = []
for s_f in self.stage_filters:
self.all_filters.append([s_f, s_f, s_f*4])
def call(self, x):
# stage1
h = self.conv1(x)
# stage2
for rep in range(self.reptitions[0]):
h = self.stage2_convs[rep](h)
# stage3
for rep in range(self.reptitions[1]):
h = self.stage3_convs[rep](h)
# stage4
for rep in range(self.reptitions[2]):
h = self.stage4_convs[rep](h)
# stage5
for rep in range(self.reptitions[3]):
h = self.stage5_convs[rep](h)
# Final stage
if self.pooling!=None:
output = self.fin(h)
return output
else:
return h
|
[
"tensorflow.keras.layers.Add",
"tensorflow.keras.layers.Activation",
"tensorflow.keras.layers.Flatten",
"tensorflow.keras.layers.Dense"
] |
[((5507, 5512), 'tensorflow.keras.layers.Add', 'Add', ([], {}), '()\n', (5510, 5512), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((8674, 8679), 'tensorflow.keras.layers.Add', 'Add', ([], {}), '()\n', (8677, 8679), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((11028, 11033), 'tensorflow.keras.layers.Add', 'Add', ([], {}), '()\n', (11031, 11033), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((13655, 13660), 'tensorflow.keras.layers.Add', 'Add', ([], {}), '()\n', (13658, 13660), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((2587, 2605), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (2597, 2605), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((4352, 4370), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (4362, 4370), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((4544, 4562), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (4554, 4562), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((4725, 4743), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (4735, 4743), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((6911, 6929), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (6921, 6929), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((7108, 7126), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (7118, 7126), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((7300, 7318), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (7310, 7318), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((10172, 10190), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (10182, 10190), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((10363, 10381), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (10373, 10381), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((12346, 12364), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (12356, 12364), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((12543, 12561), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (12553, 12561), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((14530, 14557), 'tensorflow.keras.layers.Dense', 'Dense', (['class_num', '"""softmax"""'], {}), "(class_num, 'softmax')\n", (14535, 14557), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((2947, 2965), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (2957, 2965), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((4988, 5006), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (4998, 5006), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((5231, 5249), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (5241, 5249), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((5467, 5485), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (5477, 5485), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((7792, 7810), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (7802, 7810), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((8044, 8062), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (8054, 8062), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((8287, 8305), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (8297, 8305), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((10616, 10634), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (10626, 10634), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((10858, 10876), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (10868, 10876), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((12999, 13017), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (13009, 13017), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((13251, 13269), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (13261, 13269), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((14722, 14731), 'tensorflow.keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (14729, 14731), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n'), ((14761, 14788), 'tensorflow.keras.layers.Dense', 'Dense', (['class_num', '"""softmax"""'], {}), "(class_num, 'softmax')\n", (14766, 14788), False, 'from tensorflow.keras.layers import Input, Activation, Dense, Flatten, Add, TimeDistributed, Flatten, BatchNormalization\n')]
|
#!/usr/bin/env python3
import sys
import os, inspect
try:
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0,parentdir)
from logic import *
except:
print("Could not import")
sys.exit(1)
def question_b():
entails_table = [\
["False", "True"], \
["True", "False"], \
["A & B", "(A ==> B) & (B ==> A)"], \
["(A ==> B) & (B ==> A)", "A | B"], \
["(A ==> B) & (B <== A)", "~A | B"], \
["(A & B) ==> C", "(A ==> C) | (B ==> C)"] \
]
for i in entails_table:
print(f"Checking if {i[0]} entails {i[1]}")
try:
print(tt_entails(expr(i[0]), expr(i[1])))
except:
print(tt_entails(to_cnf(expr(i[0])), to_cnf(expr(i[1]))))
print("Checking for (C ∨ (¬A ∧ ¬B)) ≡ ((A ⇒ C) ∧ (B ⇒ C))")
b1 = tt_entails(expr("C | (~A | ~B)"), expr("(A ==> C) & (B ==> C)"))
b2 = tt_entails(expr("(A ==> C) & (B ==> C)"), expr("C | (~A | ~B)"))
print(b1 and b2)
entails_table = [\
["(A | B) & (~C | ~D | E)", "(A | B)"], \
["(A | B) & (~C | ~D | E)", "(A | B) & (~D | E)"]]
for i in entails_table:
print(f"Checking if {i[0]} entails {i[1]}")
try:
print(tt_entails(expr(i[0]), expr(i[1])))
except:
print(tt_entails(to_cnf(expr(i[0])), to_cnf(expr(i[1]))))
print("Checking satisfiability of (A | B) & (~(A ==> B))")
print(dpll_satisfiable(expr("(A | B) & (~(A ==> B))")))
print("checking satisfiability of ((A ==> B) & (A <== B)) & (~A | B)")
print(dpll_satisfiable(expr("((A ==> B) & (A <== B)) & (~A | B)")))
n1 = dpll_satisfiable(expr("(((A ==> B) & (A <== B)) ==> C) & (((A ==> B) & (A <== B)) <== C)"))
n2 = dpll_satisfiable(expr("(A ==> B) & (A <== B)"))
if (len(n1) == (len(n2) * 2)):
print("Both of them have the same number of models")
print("---->", len(n1), f"\n{n1}", len(n2), f"\n{n2}")
question_b()
|
[
"os.path.dirname",
"sys.path.insert",
"sys.exit",
"inspect.currentframe"
] |
[((167, 194), 'os.path.dirname', 'os.path.dirname', (['currentdir'], {}), '(currentdir)\n', (182, 194), False, 'import os, inspect\n'), ((199, 228), 'sys.path.insert', 'sys.path.insert', (['(0)', 'parentdir'], {}), '(0, parentdir)\n', (214, 228), False, 'import sys\n'), ((294, 305), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (302, 305), False, 'import sys\n'), ((125, 147), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (145, 147), False, 'import os, inspect\n')]
|
from ov2640_constants import *
#los scripts de constantes lores y hires son para la resolucion
#se los encierra en try except para que si no son incluidos
#no haya problema, sin embargo dara problemas luego si uno de estos
#no es usado y especificado para la iniciacion de la camara
try:
from ov2640_lores_constants import *
except Exception as e:
print(e)
try:
from ov2640_hires_constants import *
except Exception as e:
print(e)
try:
from ov2640_config import *
except Exception as e:
print(e)
import machine
import time
import ubinascii
import uos
import gc
class ov2640(object):
def __init__(self, sclpin=22, sdapin=21, cspin=15, sckpin=14, mosipin=13, misopin=12, resolution=OV2640_320x240_JPEG, imagedecode=OV2640_YUV422):
gc.enable()
#I2C pins
self.sclpin=sclpin
self.sdapin=sdapin
#SPI pins
self.sckpin=sckpin
self.mosipin=mosipin
self.misopin=misopin
self.cspin=cspin
self.standby=False #variable para control de estado de camara
#iniciacion de buses para la comunicacion
self.hspi = machine.SPI(1, baudrate=80000000, polarity=0, phase=0, sck=machine.Pin(self.sckpin), mosi=machine.Pin(self.mosipin), miso=machine.Pin(self.misopin))
self.i2c = machine.I2C(scl=machine.Pin(22), sda=machine.Pin(21), freq=1000000)
self.hspi.init(baudrate=2000000)
#cs pin para la comunicacion spi, tener en cuenta que este puede ser cualquier gpio
self.cspin = machine.Pin(self.cspin, machine.Pin.OUT)
self.cspin.value(1)
#deteccion de la camara
addrs = self.i2c.scan()
print('ov2640_init: devices detected on on i2c:')
for a in addrs:
print('0x%x' % a)
time.sleep(1)
self.i2c.writeto_mem(SENSORADDR, 0xff, b'\x01')
# initiate system reset
self.i2c.writeto_mem(SENSORADDR, 0x12, b'\x80')
# let it come up
time.sleep_ms(100)
# jpg init registers
cam_write_register_set(self.i2c, SENSORADDR, OV2640_JPEG_INIT)
cam_write_register_set(self.i2c, SENSORADDR, imagedecode)
cam_write_register_set(self.i2c, SENSORADDR, OV2640_JPEG)
self.i2c.writeto_mem(SENSORADDR, 0xff, b'\x01')
self.i2c.writeto_mem(SENSORADDR, 0x15, b'\x00')
cam_write_register_set(self.i2c, SENSORADDR, OV2640_1600x1200_JPEG)
cam_spi_write(b'\x00', b'\x55', self.hspi, self.cspin)
res = cam_spi_read(b'\x00', self.hspi, self.cspin)
print(res)
print("ov2640 init: register test return bytes %s" % ubinascii.hexlify(res))
if (res == b'\x55'):
print("ov2640_init: register test successful")
else:
print("ov2640_init: register test failed!")
time.sleep_us(10)
self.i2c.writeto_mem(SENSORADDR, 0xff, b'\x01')
# check the camera type
time.sleep_us(50)
parta = self.i2c.readfrom_mem(SENSORADDR, 0x0a, 1)
time.sleep_us(50)
partb = self.i2c.readfrom_mem(SENSORADDR, 0x0b, 1)
if ((parta != b'\x26') or (partb != b'\x42')):
print("ov2640_init: device type does not appear to be ov2640, bytes: %s/%s" % \
(ubinascii.hexlify(parta), ubinascii.hexlify(partb)))
else:
print("ov2640_init: device type looks correct, bytes: %s/%s" % \
(ubinascii.hexlify(parta), ubinascii.hexlify(partb)))
time.sleep_us(50)
def capture_to_file(self, fn, overwrite):
# bit 0 - clear FIFO write done flag
cam_spi_write(b'\x04', b'\x01', self.hspi, self.cspin)
# bit 1 - start capture then read status
cam_spi_write(b'\x04', b'\x02', self.hspi, self.cspin)
time.sleep_ms(10)
# read status
res = cam_spi_read(b'\x41', self.hspi, self.cspin)
cnt = 0
#if (res == b'\x00'):
# print("initiate capture may have failed, return byte: %s" % ubinascii.hexlify(res))
# read the image from the camera fifo
while True:
res = cam_spi_read(b'\x41', self.hspi, self.cspin)
mask = b'\x08'
if (res[0] & mask[0]):
break
#print("continuing, res register %s" % ubinascii.hexlify(res))
time.sleep_ms(10)
cnt += 1
#print("slept in loop %d times" % cnt)
# read the fifo size
b1 = cam_spi_read(b'\x44', self.hspi, self.cspin)
b2 = cam_spi_read(b'\x43', self.hspi, self.cspin)
b3 = cam_spi_read(b'\x42', self.hspi, self.cspin)
val = b1[0] << 16 | b2[0] << 8 | b3[0]
print("ov2640_capture: %d bytes in fifo" % val)
gc.collect()
bytebuf = [ 0, 0 ]
picbuf = [ b'\x00' ] * PICBUFSIZE
l = 0
bp = 0
if (overwrite == True):
#print("deleting old file %s" % fn)
try:
uos.remove(fn)
except OSError:
pass
while ((bytebuf[0] != b'\xd9') or (bytebuf[1] != b'\xff')):
bytebuf[1] = bytebuf[0]
if (bp > (len(picbuf) - 1)):
#print("appending buffer to %s" % fn)
appendbuf(fn, picbuf, bp)
bp = 0
bytebuf[0] = cam_spi_read(b'\x3d', self.hspi, self.cspin)
l += 1
#print("read so far: %d, next byte: %s" % (l, ubinascii.hexlify(bytebuf[0])))
picbuf[bp] = bytebuf[0]
bp += 1
if (bp > 0):
#print("appending final buffer to %s" % fn)
appendbuf(fn, picbuf, bp)
print("read %d bytes from fifo, camera said %d were available" % (l, val))
return (l)
def set_mode_config(self, mode):
cam_write_register_set(self.i2c, SENSORADDR, mode)
def standby(self):
# register set select
self.i2c.writeto_mem(SENSORADDR, 0xff, b'\x01')
# standby mode
self.i2c.writeto_mem(SENSORADDR, 0x09, b'\x10')
self.standby = True
def wake(self):
# register set select
self.i2c.writeto_mem(SENSORADDR, 0xff, b'\x01')
# standby mode
self.i2c.writeto_mem(SENSORADDR, 0x09, b'\x00')
self.standby = False
def cam_write_register_set(i, addr, set):
for el in set:
raddr = el[0]
val = el[1]
if (raddr == 0xff and val == b'\xff'):
return
i.writeto_mem(SENSORADDR, raddr, val)
def cam_spi_write(address, value, hspi, cspin):
cspin.value(0)
modebit = b'\x80'
d = bytes([address[0] | modebit[0], value[0]])
hspi.write(d)
cspin.value(1)
def appendbuf(fn, picbuf, howmany):
try:
f = open(fn, 'ab')
c = 1
for by in picbuf:
if (c > howmany):
break
c += 1
f.write(bytes([by[0]]))
f.close()
except OSError:
print("error writing file")
print("write %d bytes from buffer" % howmany)
def cam_spi_read(address, hspi, cspin):
cspin.value(0)
maskbits = b'\x7f'
wbuf = bytes([address[0] & maskbits[0]])
hspi.write(wbuf)
buf = hspi.read(1)
cspin.value(1)
return (buf)
|
[
"uos.remove",
"time.sleep",
"gc.collect",
"time.sleep_us",
"ubinascii.hexlify",
"time.sleep_ms",
"gc.enable",
"machine.Pin"
] |
[((771, 782), 'gc.enable', 'gc.enable', ([], {}), '()\n', (780, 782), False, 'import gc\n'), ((1522, 1562), 'machine.Pin', 'machine.Pin', (['self.cspin', 'machine.Pin.OUT'], {}), '(self.cspin, machine.Pin.OUT)\n', (1533, 1562), False, 'import machine\n'), ((1974, 1992), 'time.sleep_ms', 'time.sleep_ms', (['(100)'], {}), '(100)\n', (1987, 1992), False, 'import time\n'), ((2815, 2832), 'time.sleep_us', 'time.sleep_us', (['(10)'], {}), '(10)\n', (2828, 2832), False, 'import time\n'), ((2930, 2947), 'time.sleep_us', 'time.sleep_us', (['(50)'], {}), '(50)\n', (2943, 2947), False, 'import time\n'), ((3015, 3032), 'time.sleep_us', 'time.sleep_us', (['(50)'], {}), '(50)\n', (3028, 3032), False, 'import time\n'), ((3486, 3503), 'time.sleep_us', 'time.sleep_us', (['(50)'], {}), '(50)\n', (3499, 3503), False, 'import time\n'), ((3780, 3797), 'time.sleep_ms', 'time.sleep_ms', (['(10)'], {}), '(10)\n', (3793, 3797), False, 'import time\n'), ((4726, 4738), 'gc.collect', 'gc.collect', ([], {}), '()\n', (4736, 4738), False, 'import gc\n'), ((1780, 1793), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1790, 1793), False, 'import time\n'), ((4324, 4341), 'time.sleep_ms', 'time.sleep_ms', (['(10)'], {}), '(10)\n', (4337, 4341), False, 'import time\n'), ((1190, 1214), 'machine.Pin', 'machine.Pin', (['self.sckpin'], {}), '(self.sckpin)\n', (1201, 1214), False, 'import machine\n'), ((1221, 1246), 'machine.Pin', 'machine.Pin', (['self.mosipin'], {}), '(self.mosipin)\n', (1232, 1246), False, 'import machine\n'), ((1253, 1278), 'machine.Pin', 'machine.Pin', (['self.misopin'], {}), '(self.misopin)\n', (1264, 1278), False, 'import machine\n'), ((1315, 1330), 'machine.Pin', 'machine.Pin', (['(22)'], {}), '(22)\n', (1326, 1330), False, 'import machine\n'), ((1336, 1351), 'machine.Pin', 'machine.Pin', (['(21)'], {}), '(21)\n', (1347, 1351), False, 'import machine\n'), ((2625, 2647), 'ubinascii.hexlify', 'ubinascii.hexlify', (['res'], {}), '(res)\n', (2642, 2647), False, 'import ubinascii\n'), ((4951, 4965), 'uos.remove', 'uos.remove', (['fn'], {}), '(fn)\n', (4961, 4965), False, 'import uos\n'), ((3260, 3284), 'ubinascii.hexlify', 'ubinascii.hexlify', (['parta'], {}), '(parta)\n', (3277, 3284), False, 'import ubinascii\n'), ((3286, 3310), 'ubinascii.hexlify', 'ubinascii.hexlify', (['partb'], {}), '(partb)\n', (3303, 3310), False, 'import ubinascii\n'), ((3425, 3449), 'ubinascii.hexlify', 'ubinascii.hexlify', (['parta'], {}), '(parta)\n', (3442, 3449), False, 'import ubinascii\n'), ((3451, 3475), 'ubinascii.hexlify', 'ubinascii.hexlify', (['partb'], {}), '(partb)\n', (3468, 3475), False, 'import ubinascii\n')]
|
import threading
import os
import logging
import pprint
import traceback
import tempfile
from qtpy import QtCore
LOGGER = logging.getLogger(__name__)
class Executor(QtCore.QObject, threading.Thread):
"""Executor represents a thread of control that runs a python function with
a single input. Once created with the proper inputs, threading.Thread has
the following attributes:
self.module - the loaded module object provided to __init__()
self.args - the argument to the target function. Usually a dict.
self.func_name - the function name that will be called.
self.log_manager - the LogManager instance managing logs for this script
self.failed - defaults to False. Indicates whether the thread raised an
exception while running.
self.execption - defaults to None. If not None, points to the exception
raised while running the thread.
The Executor.run() function is an overridden function from threading.Thread
and is started in the same manner by calling Executor.start(). The run()
function is extremely simple by design: Print the arguments to the logfile
and run the specified function. If an execption is raised, it is printed
and saved locally for retrieval later on.
In keeping with convention, a single Executor thread instance is only
designed to be run once. To run the same function again, it is best to
create a new Executor instance and run that."""
finished = QtCore.Signal()
def __init__(self, target, args, kwargs, logfile, tempdir=None):
QtCore.QObject.__init__(self)
threading.Thread.__init__(self)
self.target = target
self.tempdir = tempdir
if not args:
args = ()
self.args = args
if not kwargs:
kwargs = {}
self.kwargs = kwargs
if logfile is None:
logfile = os.path.join(tempfile.mkdtemp(), 'logfile.txt')
self.logfile = logfile
self.failed = False
self.exception = None
self.traceback = None
def run(self):
"""Run the python script provided by the user with the arguments
specified. This function also prints the arguments to the logfile
handler. If an exception is raised in either the loading or execution
of the module or function, a traceback is printed and the exception is
saved."""
try:
self.target(*self.args, **self.kwargs)
except Exception as error:
# We deliberately want to catch all possible exceptions.
LOGGER.exception(error)
self.failed = True
self.exception = error
self.traceback = traceback.format_exc()
finally:
LOGGER.info('Execution finished')
self.finished.emit()
|
[
"threading.Thread.__init__",
"qtpy.QtCore.QObject.__init__",
"tempfile.mkdtemp",
"traceback.format_exc",
"qtpy.QtCore.Signal",
"logging.getLogger"
] |
[((125, 152), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (142, 152), False, 'import logging\n'), ((1508, 1523), 'qtpy.QtCore.Signal', 'QtCore.Signal', ([], {}), '()\n', (1521, 1523), False, 'from qtpy import QtCore\n'), ((1602, 1631), 'qtpy.QtCore.QObject.__init__', 'QtCore.QObject.__init__', (['self'], {}), '(self)\n', (1625, 1631), False, 'from qtpy import QtCore\n'), ((1640, 1671), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (1665, 1671), False, 'import threading\n'), ((1942, 1960), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1958, 1960), False, 'import tempfile\n'), ((2740, 2762), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (2760, 2762), False, 'import traceback\n')]
|
"""Configuration for mkdocs_mdpo_plugin tests."""
import os
import sys
from tempfile import TemporaryDirectory
import polib
import pytest
import yaml
from mkdocs import config
from mkdocs.commands.build import build
from mkdocs_mdpo_plugin.plugin import MdpoPlugin
ROOT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
if ROOT_DIR not in sys.path:
sys.path.append(ROOT_DIR)
def _mkdocs_build(
input_files_contents,
translations,
plugin_config,
additional_config,
expected_output_files,
callback_after_first_build=None,
insert_plugin_config_at_position=-1,
interrupt_after_first_build=False,
):
with TemporaryDirectory() as site_dir, TemporaryDirectory() as docs_dir, \
TemporaryDirectory() as config_dir:
# build input files
for input_file_name, content in input_files_contents.items():
filename = os.path.join(docs_dir, input_file_name)
os.makedirs(
os.path.abspath(os.path.dirname(filename)),
exist_ok=True,
)
with open(filename, 'w') as f:
f.write(content)
mdpo_config = {}
if plugin_config:
for mdpo_plugin_config_field, _ in MdpoPlugin.config_scheme:
if mdpo_plugin_config_field in plugin_config:
mdpo_config[mdpo_plugin_config_field] = plugin_config.get(
mdpo_plugin_config_field,
)
mkdocs_config = {
'site_name': 'My site',
'site_url': 'https://foo.bar',
'docs_dir': docs_dir,
'site_dir': site_dir,
'plugins': [],
}
if additional_config:
mkdocs_config.update(additional_config)
if insert_plugin_config_at_position == -1:
mkdocs_config['plugins'].append({'mdpo': mdpo_config})
else:
mkdocs_config['plugins'].insert(
insert_plugin_config_at_position,
{'mdpo': mdpo_config},
)
config_filename = os.path.join(config_dir, 'mkdocs.yml')
with open(config_filename, 'w') as f:
yaml.dump(mkdocs_config, f)
# first build, load content to translations (Markdown -> PO files)
try:
build(config.load_config(config_filename))
except Exception:
os.remove(config_filename)
raise
if callback_after_first_build:
callback_after_first_build(locals())
if interrupt_after_first_build:
os.remove(config_filename)
return
# translate PO files
for po_filename, translation_messages in translations.items():
po_filename = os.path.join(docs_dir, os.path.normpath(po_filename))
assert os.path.isfile(po_filename)
po = polib.pofile(po_filename)
for msgid_or_msgctxt, msgstr in translation_messages.items():
if isinstance(msgstr, dict):
# case when msgctxt is passed as key
# and msgid-msgstr as value in a dict
msgid = list(msgstr.keys())[0]
msgstr = msgstr[msgid]
msgctxt = msgid_or_msgctxt
else:
msgid = msgid_or_msgctxt
msgctxt = None
_msgid_in_pofile = False
for entry in po:
if entry.msgid == msgid:
_msgid_in_pofile = True
break
assert _msgid_in_pofile, (
f"'{msgid}' not found in pofile '{po_filename}'"
)
for entry in po:
if entry.msgid == msgid:
entry.msgstr = msgstr
if msgctxt:
entry.msgctxt = msgctxt
break
for entry in po:
# 'Home' is the title given to the page by the default
# Mkdocs theme
if entry.msgid == 'Home':
continue
assert entry.msgstr, (
f"Found '{entry.msgid}' not translated in pofile"
)
po.save(po_filename)
# second build, dump translations in content (PO files -> Markdown)
try:
build(config.load_config(config_filename))
except Exception:
os.remove(config_filename)
raise
# assert that files have been translated
for filename, expected_lines in expected_output_files.items():
if not expected_lines:
raise ValueError(
'Expected file defined without output lines',
)
filename = os.path.join(site_dir, os.path.normpath(filename))
with open(filename) as f:
content = f.read()
for expected_line in expected_lines:
assert expected_line in content
os.remove(config_filename)
@pytest.fixture
def mkdocs_build():
return _mkdocs_build
|
[
"sys.path.append",
"os.remove",
"tempfile.TemporaryDirectory",
"os.path.dirname",
"yaml.dump",
"os.path.isfile",
"os.path.normpath",
"mkdocs.config.load_config",
"polib.pofile",
"os.path.join"
] |
[((375, 400), 'sys.path.append', 'sys.path.append', (['ROOT_DIR'], {}), '(ROOT_DIR)\n', (390, 400), False, 'import sys\n'), ((313, 338), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (328, 338), False, 'import os\n'), ((664, 684), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (682, 684), False, 'from tempfile import TemporaryDirectory\n'), ((698, 718), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (716, 718), False, 'from tempfile import TemporaryDirectory\n'), ((746, 766), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (764, 766), False, 'from tempfile import TemporaryDirectory\n'), ((2088, 2126), 'os.path.join', 'os.path.join', (['config_dir', '"""mkdocs.yml"""'], {}), "(config_dir, 'mkdocs.yml')\n", (2100, 2126), False, 'import os\n'), ((5071, 5097), 'os.remove', 'os.remove', (['config_filename'], {}), '(config_filename)\n', (5080, 5097), False, 'import os\n'), ((904, 943), 'os.path.join', 'os.path.join', (['docs_dir', 'input_file_name'], {}), '(docs_dir, input_file_name)\n', (916, 943), False, 'import os\n'), ((2185, 2212), 'yaml.dump', 'yaml.dump', (['mkdocs_config', 'f'], {}), '(mkdocs_config, f)\n', (2194, 2212), False, 'import yaml\n'), ((2582, 2608), 'os.remove', 'os.remove', (['config_filename'], {}), '(config_filename)\n', (2591, 2608), False, 'import os\n'), ((2828, 2855), 'os.path.isfile', 'os.path.isfile', (['po_filename'], {}), '(po_filename)\n', (2842, 2855), False, 'import os\n'), ((2873, 2898), 'polib.pofile', 'polib.pofile', (['po_filename'], {}), '(po_filename)\n', (2885, 2898), False, 'import polib\n'), ((2320, 2355), 'mkdocs.config.load_config', 'config.load_config', (['config_filename'], {}), '(config_filename)\n', (2338, 2355), False, 'from mkdocs import config\n'), ((2395, 2421), 'os.remove', 'os.remove', (['config_filename'], {}), '(config_filename)\n', (2404, 2421), False, 'import os\n'), ((2778, 2807), 'os.path.normpath', 'os.path.normpath', (['po_filename'], {}), '(po_filename)\n', (2794, 2807), False, 'import os\n'), ((4421, 4456), 'mkdocs.config.load_config', 'config.load_config', (['config_filename'], {}), '(config_filename)\n', (4439, 4456), False, 'from mkdocs import config\n'), ((4496, 4522), 'os.remove', 'os.remove', (['config_filename'], {}), '(config_filename)\n', (4505, 4522), False, 'import os\n'), ((4862, 4888), 'os.path.normpath', 'os.path.normpath', (['filename'], {}), '(filename)\n', (4878, 4888), False, 'import os\n'), ((1001, 1026), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (1016, 1026), False, 'import os\n')]
|
import binascii
import socket
import struct
import sys
string_address = 'fdf8:f53e:61e4::18'
packed = socket.inet_pton(socket.AF_INET6, string_address)
print('Original:', string_address)
print('Packed :', binascii.hexlify(packed))
print('Unpacked:', socket.inet_ntop(socket.AF_INET6, packed))
|
[
"binascii.hexlify",
"socket.inet_pton",
"socket.inet_ntop"
] |
[((103, 152), 'socket.inet_pton', 'socket.inet_pton', (['socket.AF_INET6', 'string_address'], {}), '(socket.AF_INET6, string_address)\n', (119, 152), False, 'import socket\n'), ((208, 232), 'binascii.hexlify', 'binascii.hexlify', (['packed'], {}), '(packed)\n', (224, 232), False, 'import binascii\n'), ((253, 294), 'socket.inet_ntop', 'socket.inet_ntop', (['socket.AF_INET6', 'packed'], {}), '(socket.AF_INET6, packed)\n', (269, 294), False, 'import socket\n')]
|
"""
Licensed Materials - Property of IBM
Restricted Materials of IBM
20190891
© Copyright IBM Corp. 2020 All Rights Reserved.
"""
import logging
import keras
import time
import json
import numpy as np
import tensorflow as tf
from tensorflow.python.keras.backend import set_session
from keras import backend as k
from keras.preprocessing.image import ImageDataGenerator
from keras_preprocessing.image.numpy_array_iterator import NumpyArrayIterator
from ibmfl.util import config
from ibmfl.model.fl_model import FLModel
from ibmfl.model.model_update import ModelUpdate
from ibmfl.exceptions import FLException, LocalTrainingException
import matplotlib.pyplot as plt
from pathlib import Path
logger = logging.getLogger(__name__)
class KerasFLModel(FLModel):
"""
Wrapper class for importing keras and tensorflow.keras models.
"""
def __init__(self, model_name, model_spec, keras_model=None):
"""
Create a `KerasFLModel` instance from a Keras model.
If keras_model is provided, it will use it; otherwise it will take
the model_spec to create the model.
Assumes the `model` passed as argument is compiled.
:param model_name: String specifying the type of model e.g., Keras_CNN
:type model_name: `str`
:param model_spec: Specification of the keras_model
:type model_spec: `dict`
:param keras_model: Compiled keras model.
:type keras_model: `keras.models.Model`
"""
self.graph = tf.get_default_graph()
self.sess = tf.Session()
set_session(self.sess)
if keras_model is None:
if model_spec is None or (not isinstance(model_spec, dict)):
raise ValueError('Initializing model requires '
'a model specification or '
'compiled keras model. '
'None was provided')
# In this case we need to recreate the model from model_spec
self.model = self.load_model_from_spec(model_spec)
else:
if not issubclass(type(keras_model), (keras.models.Model,
tf.keras.models.Model)):
raise ValueError('Compiled keras model needs to be provided '
'(keras.models/tensorflow.keras.models). '
'Type provided' + str(type(keras_model)))
self.model = keras_model
self.model_type = model_name
self.model_name=model_spec['model_name']
# keras flag
if issubclass(type(self.model), keras.models.Model):
self.is_keras = True
else:
self.is_keras = False
# Default values for local training
self.batch_size = 30 # Make this 10 or lower if you get memory errors
self.epochs = 1
self.steps_per_epoch = 100
def fit_model(self, train_data, fit_params=None):
"""
Fits current model with provided training data.
:param train_data: Training data, a tuple given in the form \
(x_train, y_train) or a datagenerator of of type `keras.utils.Sequence`, \
`keras.preprocessing.image.ImageDataGenerator`
:type train_data: `np.ndarray`
:param fit_params: (optional) Dictionary with hyperparameters \
that will be used to call Keras fit function.\
Hyperparameter parameters should match keras expected values \
e.g., `epochs`, which specifies the number of epochs to be run. \
If no `epochs` or `batch_size` are provided, a default value \
will be used (1 and 128, respectively).
:type fit_params: `dict`
:return: None
"""
# Initialized with default values
batch_size = self.batch_size
epochs = self.epochs
steps_per_epoch = self.steps_per_epoch
# Extract x_train and y_train, by default,
# label is stored in the last column
# extract hyperparams from fit_param
if fit_params and ('hyperparams' in fit_params):
hyperparams = fit_params['hyperparams']
try:
training_hp = hyperparams['local']['training']
if 'batch_size' in training_hp:
batch_size = training_hp['batch_size']
else:
# In this case, use default values.
logger.info('Using default hyperparameters: '
' batch_size:' + str(self.batch_size))
if 'epochs' in training_hp:
epochs = training_hp['epochs']
else:
# In this case, use default values.
logger.info('Using default hyperparameters: '
' epochs:' + str(self.epochs))
if 'steps_per_epoch' in training_hp:
steps_per_epoch = training_hp.get('steps_per_epoch')
except Exception as ex:
logger.exception(str(ex))
logger.warning('Hyperparams badly formed.')
# In this case, use default values.
logger.info('Using default hyperparameters: '
'epochs:' + str(self.epochs) +
' batch_size:' + str(self.batch_size))
try:
# if type(train_data) is tuple and type(train_data[0]) is np.ndarray:
self.fit(
train_data, batch_size=batch_size, epochs=epochs)
# else:
# self.fit_generator(
# train_data, batch_size=batch_size, epochs=epochs, steps_per_epoch=steps_per_epoch)
except Exception as e:
logger.exception(str(e))
if epochs is None:
logger.exception('epochs need to be provided')
raise LocalTrainingException(
'Error occurred while performing model.fit')
def fit(self, train_data, batch_size, epochs):
"""
Fits current model using model.fit with provided training data.
:param train_data: Training data, a tuple \
given in the form (x_train, y_train).
:type train_data: `np.ndarray`
:param batch_size: Number of samples per gradient update.
:type batch_size: Integer
:param epochs: Number of epochs to train the model.
:type epochs: Integer
:return: None
"""
filename = f"metrics_{time.time()}.png"
full_path = Path(super().get_model_absolute_path(""))
full_path.joinpath(f"{self.model_name}").mkdir(parents=True, exist_ok=True)
x = train_data[0]
y = train_data[1]
with self.graph.as_default():
set_session(self.sess)
history=self.model.fit(x, y, batch_size=self.batch_size, epochs=epochs)
# for label in self.model.metrics_names:
# plt.plot(history.history[label],label=label)
# plt.plot(history.history["loss"],label="loss")
# plt.legend()
# plt.savefig(full_path.joinpath(filename))
def fit_generator(self, training_generator, batch_size, epochs, steps_per_epoch=None):
"""
Fits current model using model.fit_generator with provided training data generator.
:param train_data: Training datagenerator of of type `keras.utils.Sequence`, \
`keras.preprocessing.image.ImageDataGenerator`
:type train_data: `ImageDataGenerator` or `keras.utils.Sequence`
:param batch_size: Number of samples per gradient update.
:type batch_size: Integer
:param epochs: Number of epochs to train the model.
:type epochs: Integer
:param steps_per_epoch: Total number of steps (batches of samples) \
to yield from `generator` before declaring one epoch. Optional
for `Sequence` data generator`
as a number of steps.
:type steps_per_epoch: `int`
:return: None
"""
if type(training_generator) is NumpyArrayIterator and not steps_per_epoch:
raise LocalTrainingException(
"Variable steps_per_epoch cannot be None for generators not \
of type keras.utils.Sequence!")
with self.graph.as_default():
set_session(self.sess)
self.model.fit_generator(
training_generator, steps_per_epoch=steps_per_epoch, epochs=epochs)
def update_model(self, model_update):
"""
Update keras model with provided model_update, where model_update
should be generated according to `KerasFLModel.get_model_update()`.
:param model_update: `ModelUpdate` object that contains the weight \
that will be used to update the model.
:type model_update: `ModelUpdate`
:return: None
"""
if isinstance(model_update, ModelUpdate):
with self.graph.as_default():
set_session(self.sess)
w = model_update.get("weights")
self.model.set_weights(w)
else:
raise LocalTrainingException('Provided model_update should be of '
'type ModelUpdate. '
'Instead they are:' +
str(type(model_update)))
def get_model_update(self):
"""
Generates a `ModelUpdate` object that will be sent to other entities.
:return: ModelUpdate
:rtype: `ModelUpdate`
"""
w = self.model.get_weights()
return ModelUpdate(weights=w)
def predict(self, x, batch_size=128, **kwargs):
"""
Perform prediction for a batch of inputs. Note that for classification
problems, it returns the resulting probabilities.
:param x: Samples with shape as expected by the model.
:type x: `np.ndarray`
:param batch_size: Size of batches.
:type batch_size: `int`
:param kwargs: Dictionary of keras-specific arguments.
:type kwargs: `dict`
:return: Array of predictions
:rtype: `np.ndarray`
"""
with self.graph.as_default():
set_session(self.sess)
return self.model.predict(x, batch_size=batch_size, **kwargs)
def evaluate(self, test_dataset, **kwargs):
"""
Evaluates the model given testing data.
:param test_dataset: Testing data, a tuple given in the form \
(x_test, test) or a datagenerator of of type `keras.utils.Sequence`,
`keras.preprocessing.image.ImageDataGenerator`
:type test_dataset: `np.ndarray`
:param kwargs: Dictionary of metrics available for the model
:type kwargs: `dict`
"""
if type(test_dataset) is tuple:
x_test = test_dataset[0]
y_test = test_dataset[1]
return self.evaluate_model(x_test, y_test)
else:
return self.evaluate_generator_model(
test_dataset)
def evaluate_model(self, x, y, batch_size=128, **kwargs):
"""
Evaluates the model given x and y.
:param x: Samples with shape as expected by the model.
:type x: `np.ndarray`
:param y: Corresponding labels to x
:type y: `np.ndarray`
:param batch_size: Size of batches.
:type batch_size: `int`
:param kwargs: Dictionary of metrics available for the model
:type kwargs: `dict`
"""
with self.graph.as_default():
set_session(self.sess)
metrics = self.model.evaluate(x, y, batch_size=128, **kwargs)
names = self.model.metrics_names
dict_metrics = {}
if type(metrics) == list:
for metric, name in zip(metrics, names):
dict_metrics[name] = metric
else:
dict_metrics[names[0]] = metrics
filename = f"metrics_{self.model_type}"
full_path = super().get_model_absolute_path(filename)
with open(full_path,"w") as f:
for metric in dict_metrics:
f.write(f"{str(metric)}:{dict_metrics[metric]}\n")
return dict_metrics
def evaluate_generator_model(self, test_generator, batch_size=128, **kwargs):
"""
Evaluates the model based on the provided data generator.
:param test_generator: Testing datagenerator of of type `keras.utils.Sequence`, \
`keras.preprocessing.image.ImageDataGenerator`
:type train_data: `ImageDataGenerator` or `keras.utils.Sequence`
:param batch_size: Number of samples per gradient update.
:type batch_size: Integer
:return: metrics
:rtype: `dict`
"""
batch_size=self.batch_size
steps = self.steps_per_epoch
if 'steps_per_epoch' in kwargs:
steps = kwargs['steps_per_epoch']
if not type(test_generator) is NumpyArrayIterator and not steps:
raise LocalTrainingException(
"Variable steps_per_epoch cannot be None for generator not of type keras.utils.Sequence")
with self.graph.as_default():
metrics = self.model.evaluate_generator(
test_generator, steps=steps)
names = self.model.metrics_names
dict_metrics = {}
if type(metrics) == list:
for metric, name in zip(metrics, names):
dict_metrics[name] = metric
else:
dict_metrics[names[0]] = metrics
return dict_metrics
def save_model(self, filename=None):
"""
Save a model to file in the format specific to the backend framework.
:param filename: Name of the file where to store the model.
:type filename: `str`
:param path: Path of the folder where to store the model. If no path is \
specified, the model will be stored in the default data location of \
the library `DATA_PATH`.
:type path: `str`
:return: filename
"""
if filename is None:
filename = f"model_{time.time()}.h5"
full_path = Path(super().get_model_absolute_path(""))
full_path.joinpath(f"{self.model_name}").mkdir(parents=True, exist_ok=True)
self.model.save(str(full_path.joinpath(filename)))#Would be $MODEL_DIR/filename
logger.info('Model saved in path: %s.', full_path)
return filename
@staticmethod
def load_model(file_name, custom_objects={}):
"""
Loads a model from disk given the specified file_name
:param file_name: Name of the file that contains the model to be loaded.
:type file_name: `str`
:return: Keras model loaded to memory
:rtype: `keras.models.Model`
"""
# try loading model from keras
model = KerasFLModel.load_model_via_keras(file_name,
custom_objects)
if not model:
# try loading model from tf.keras
model = KerasFLModel.load_model_via_tf_keras(file_name,
custom_objects)
if model is None:
logger.error('Loading model failed! '
'An acceptable compiled model should be of type '
'(keras.models/tensorflow.keras.models)!')
raise FLException(
'Unable to load the provided compiled model!')
return model
@staticmethod
def load_model_via_keras(file_name, custom_objects={}):
"""
Loads a model from disk given the specified file_name via keras.
:param file_name: Name of the file that contains the model to be loaded.
:type file_name: `str`
:return: Keras model loaded to memory
:rtype: `keras.models.Model`
"""
# try loading model from keras
model = None
try:
model = keras.models.load_model(
file_name, custom_objects=custom_objects)
model._make_predict_function()
except Exception as ex:
logger.error(
'Loading model via keras.models.load_model failed!')
return model
@staticmethod
def load_model_via_tf_keras(file_name, custom_objects={}):
"""
Loads a model from disk given the specified file_name via tf.keras.
:param file_name: Name of the file that contains the model to be loaded.
:type file_name: `str`
:return: tf.keras model loaded to memory
:rtype: `tf.keras.models.Model`
"""
# try load from tf.keras
model = None
try:
model = tf.keras.models.load_model(
file_name, custom_objects=custom_objects)
model._make_predict_function()
except Exception as ex:
logger.error('Loading model via tf.keras.models.load_model '
'failed!')
return model
@staticmethod
def model_from_json_via_keras(json_file_name):
"""
Loads a model architecture from disk via keras
given the specified json file name.
:param json_file_name: Name of the file that contains \
the model architecture to be loaded.
:type json_file_name: `str`
:return: Keras model with only model architecture loaded to memory
:rtype: `keras.models.Model`
"""
# try loading model from keras
model = None
json_file = open(json_file_name, 'r')
f = json_file.read()
json_file.close()
try:
model = keras.models.model_from_json(f)
except Exception as ex:
logger.error('Loading model via '
'keras.models.model_from_json failed!')
return model
@staticmethod
def model_from_json_via_tf_keras(json_file_name):
"""
Loads a model architecture from disk via tf.keras
given the specified json file name.
:param json_file_name: Name of the file that contains \
the model architecture to be loaded.
:type json_file_name: `str`
:return: tf.keras model with only model architecture loaded to memory
:rtype: `tf.keras.models.Model`
"""
# try loading model from keras
model = None
json_file = open(json_file_name, 'r')
f = json_file.read()
json_file.close()
try:
model = tf.keras.models.model_from_json(f)
except Exception as ex:
logger.error(
'Loading model via tf.keras.models.model_from_json failed! ')
return model
@staticmethod
def load_model_from_spec(model_spec):
"""
Loads model from provided model_spec, where model_spec is a `dict`
that contains two items: model_spec['model_architecture'] has a
pointer to the file where the keras model architecture in stored
in json format, and model_spec['model_weights'] contains
the path where the associated weights are stored as h5.
:return: model
:rtype: `keras.models.Model`
"""
if 'model_definition' in model_spec:
model_file = model_spec['model_definition']
model_absolute_path = config.get_absolute_path(model_file)
custom_objects = {}
if 'custom_objects' in model_spec:
custom_objects_config = model_spec['custom_objects']
for custom_object in custom_objects_config:
key = custom_object['key']
value = custom_object['value']
path = custom_object['path']
custom_objects[key] = config.get_attr_from_path(
path, value)
model = KerasFLModel.load_model(model_absolute_path,
custom_objects=custom_objects)
else:
# Load architecture from json file
try:
model = KerasFLModel.model_from_json_via_keras(
model_spec['model_architecture'])
if not model:
model = KerasFLModel.model_from_json_via_tf_keras(
model_spec['model_architecture'])
if model is None:
logger.error(
'An acceptable compiled model should be of type '
'(keras.models/tensorflow.keras.models)!')
raise FLException(
'Unable to load the provided compiled model!')
except Exception as ex:
logger.error(str(ex))
raise FLException(
'Unable to load the provided compiled model!')
# Load weights from h5 file
if 'model_weights' in model_spec:
model.load_weights(model_spec['model_weights'])
# model.load_weights(weights)
# Compile model with provided parameters:
compiled_option = model_spec['compile_model_options']
try:
if 'optimizer' in compiled_option:
optimizer = compiled_option['optimizer']
else:
logger.warning('No optimizer information was provided '
'in the compile_model_options, '
'set keras optimizer to default: SGD')
optimizer = 'sgd'
if 'loss' in compiled_option:
loss = compiled_option['loss']
else:
logger.warning('No loss function was provided '
'in the compile_model_options.'
'set keras loss function to default: None')
loss = None
if 'metrics' in compiled_option:
metrics = compiled_option['metrics']
metrics = [metrics] if isinstance(
metrics, str) else metrics
else:
logger.warning('No metrics information was provided '
'in the compile_model_options,'
'set keras metrics to default: None')
metrics = None
model.compile(optimizer=optimizer,
loss=loss,
metrics=metrics)
except Exception as ex:
logger.exception(str(ex))
logger.exception('Failed to compiled keras model.')
return model
def expand_model_by_layer_name(self, new_dimension, layer_name="dense"):
"""
Expand the current Keras model with provided dimension of
the hidden layers or model weights.
This method by default expands the dense layer of
the current neural network.
It can be extends to expand other layers specified by `layer_name`,
for example, it can be use to increase the number of CNN filters or
increase the hidden layer size inside LSTM.
:param new_dimension: New number of dimensions for \
the fully connected layers
:type new_dimension: `list`
:param layer_name: layer's name to be expanded
:type layer_name: `str`
:return: None
"""
if new_dimension is None:
raise FLException('No information is provided for '
'the new expanded model. '
'Please provide the new dimension of '
'the resulting expanded model.')
model_config = json.loads(self.model.to_json())
i = 0
for layer in model_config['config']['layers']:
# find the specified layers
if 'class_name' in layer and \
layer['class_name'].strip().lower() == layer_name:
layer['config']['units'] = new_dimension[i]
i += 1
if self.is_keras:
new_model = keras.models.model_from_json(json.dumps(model_config))
else:
new_model = tf.keras.models.model_from_json(
json.dumps(model_config))
metrics = self.model.metrics_names
if 'loss' in metrics:
metrics.remove('loss')
new_model.compile(optimizer=self.model.optimizer,
loss=self.model.loss,
metrics=metrics)
self.model = new_model
def get_gradient(self, train_data):
"""
Compute the gradient with the provided dataset at the current local
model's weights.
:param train_data: Training data, a tuple \
given in the form (x_train, y_train).
:type train_data: `np.ndarray`
:return: gradients
:rtype: `list` of `np.ndarray`
"""
with self.graph.as_default():
set_session(self.sess)
# set up symbolic variables
try:
grads = self.model.optimizer.get_gradients(
self.model.total_loss,
self.model.trainable_weights)
except Exception as ex:
logger.exception(str(ex))
raise FLException('Error occurred when defining '
'gradient expression. ')
symb_inputs = (self.model._feed_inputs +
self.model._feed_targets +
self.model._feed_sample_weights)
# define the symbolic function
if self.is_keras:
from keras import backend as k
else:
from tensorflow.python.keras import backend as k
f = k.function(symb_inputs, grads)
try:
x, y, sample_weight = self.model._standardize_user_data(
train_data[0],
train_data[1])
except Exception as ex:
logger.exception(str(ex))
raise FLException('Error occurred when feeding data samples '
'to compute current gradient.')
return f(x + y + sample_weight)
def is_fitted(self):
"""
Return a boolean value indicating if the model is fitted or not.
In particular, check if the keras model has weights.
If it has, return True; otherwise return false.
:return: res
:rtype: `bool`
"""
try:
self.model.get_weights()
except Exception:
return False
return True
|
[
"keras.models.load_model",
"tensorflow.keras.models.model_from_json",
"ibmfl.exceptions.FLException",
"tensorflow.keras.models.load_model",
"tensorflow.Session",
"ibmfl.model.model_update.ModelUpdate",
"json.dumps",
"time.time",
"ibmfl.exceptions.LocalTrainingException",
"ibmfl.util.config.get_absolute_path",
"ibmfl.util.config.get_attr_from_path",
"keras.models.model_from_json",
"tensorflow.python.keras.backend.function",
"tensorflow.get_default_graph",
"logging.getLogger",
"tensorflow.python.keras.backend.set_session"
] |
[((701, 728), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (718, 728), False, 'import logging\n'), ((1498, 1520), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (1518, 1520), True, 'import tensorflow as tf\n'), ((1541, 1553), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (1551, 1553), True, 'import tensorflow as tf\n'), ((1562, 1584), 'tensorflow.python.keras.backend.set_session', 'set_session', (['self.sess'], {}), '(self.sess)\n', (1573, 1584), False, 'from tensorflow.python.keras.backend import set_session\n'), ((9652, 9674), 'ibmfl.model.model_update.ModelUpdate', 'ModelUpdate', ([], {'weights': 'w'}), '(weights=w)\n', (9663, 9674), False, 'from ibmfl.model.model_update import ModelUpdate\n'), ((6779, 6801), 'tensorflow.python.keras.backend.set_session', 'set_session', (['self.sess'], {}), '(self.sess)\n', (6790, 6801), False, 'from tensorflow.python.keras.backend import set_session\n'), ((8144, 8288), 'ibmfl.exceptions.LocalTrainingException', 'LocalTrainingException', (['"""Variable steps_per_epoch cannot be None for generators not of type keras.utils.Sequence!"""'], {}), "(\n 'Variable steps_per_epoch cannot be None for generators not of type keras.utils.Sequence!'\n )\n", (8166, 8288), False, 'from ibmfl.exceptions import FLException, LocalTrainingException\n'), ((8349, 8371), 'tensorflow.python.keras.backend.set_session', 'set_session', (['self.sess'], {}), '(self.sess)\n', (8360, 8371), False, 'from tensorflow.python.keras.backend import set_session\n'), ((10269, 10291), 'tensorflow.python.keras.backend.set_session', 'set_session', (['self.sess'], {}), '(self.sess)\n', (10280, 10291), False, 'from tensorflow.python.keras.backend import set_session\n'), ((11619, 11641), 'tensorflow.python.keras.backend.set_session', 'set_session', (['self.sess'], {}), '(self.sess)\n', (11630, 11641), False, 'from tensorflow.python.keras.backend import set_session\n'), ((13078, 13200), 'ibmfl.exceptions.LocalTrainingException', 'LocalTrainingException', (['"""Variable steps_per_epoch cannot be None for generator not of type keras.utils.Sequence"""'], {}), "(\n 'Variable steps_per_epoch cannot be None for generator not of type keras.utils.Sequence'\n )\n", (13100, 13200), False, 'from ibmfl.exceptions import FLException, LocalTrainingException\n'), ((16093, 16158), 'keras.models.load_model', 'keras.models.load_model', (['file_name'], {'custom_objects': 'custom_objects'}), '(file_name, custom_objects=custom_objects)\n', (16116, 16158), False, 'import keras\n'), ((16839, 16907), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['file_name'], {'custom_objects': 'custom_objects'}), '(file_name, custom_objects=custom_objects)\n', (16865, 16907), True, 'import tensorflow as tf\n'), ((17776, 17807), 'keras.models.model_from_json', 'keras.models.model_from_json', (['f'], {}), '(f)\n', (17804, 17807), False, 'import keras\n'), ((18630, 18664), 'tensorflow.keras.models.model_from_json', 'tf.keras.models.model_from_json', (['f'], {}), '(f)\n', (18661, 18664), True, 'import tensorflow as tf\n'), ((19454, 19490), 'ibmfl.util.config.get_absolute_path', 'config.get_absolute_path', (['model_file'], {}), '(model_file)\n', (19478, 19490), False, 'from ibmfl.util import config\n'), ((23647, 23792), 'ibmfl.exceptions.FLException', 'FLException', (['"""No information is provided for the new expanded model. Please provide the new dimension of the resulting expanded model."""'], {}), "(\n 'No information is provided for the new expanded model. Please provide the new dimension of the resulting expanded model.'\n )\n", (23658, 23792), False, 'from ibmfl.exceptions import FLException, LocalTrainingException\n'), ((25174, 25196), 'tensorflow.python.keras.backend.set_session', 'set_session', (['self.sess'], {}), '(self.sess)\n', (25185, 25196), False, 'from tensorflow.python.keras.backend import set_session\n'), ((25998, 26028), 'tensorflow.python.keras.backend.function', 'k.function', (['symb_inputs', 'grads'], {}), '(symb_inputs, grads)\n', (26008, 26028), True, 'from tensorflow.python.keras import backend as k\n'), ((5900, 5967), 'ibmfl.exceptions.LocalTrainingException', 'LocalTrainingException', (['"""Error occurred while performing model.fit"""'], {}), "('Error occurred while performing model.fit')\n", (5922, 5967), False, 'from ibmfl.exceptions import FLException, LocalTrainingException\n'), ((6513, 6524), 'time.time', 'time.time', ([], {}), '()\n', (6522, 6524), False, 'import time\n'), ((9008, 9030), 'tensorflow.python.keras.backend.set_session', 'set_session', (['self.sess'], {}), '(self.sess)\n', (9019, 9030), False, 'from tensorflow.python.keras.backend import set_session\n'), ((15526, 15584), 'ibmfl.exceptions.FLException', 'FLException', (['"""Unable to load the provided compiled model!"""'], {}), "('Unable to load the provided compiled model!')\n", (15537, 15584), False, 'from ibmfl.exceptions import FLException, LocalTrainingException\n'), ((24325, 24349), 'json.dumps', 'json.dumps', (['model_config'], {}), '(model_config)\n', (24335, 24349), False, 'import json\n'), ((24438, 24462), 'json.dumps', 'json.dumps', (['model_config'], {}), '(model_config)\n', (24448, 24462), False, 'import json\n'), ((14208, 14219), 'time.time', 'time.time', ([], {}), '()\n', (14217, 14219), False, 'import time\n'), ((19889, 19927), 'ibmfl.util.config.get_attr_from_path', 'config.get_attr_from_path', (['path', 'value'], {}), '(path, value)\n', (19914, 19927), False, 'from ibmfl.util import config\n'), ((20888, 20946), 'ibmfl.exceptions.FLException', 'FLException', (['"""Unable to load the provided compiled model!"""'], {}), "('Unable to load the provided compiled model!')\n", (20899, 20946), False, 'from ibmfl.exceptions import FLException, LocalTrainingException\n'), ((25507, 25572), 'ibmfl.exceptions.FLException', 'FLException', (['"""Error occurred when defining gradient expression. """'], {}), "('Error occurred when defining gradient expression. ')\n", (25518, 25572), False, 'from ibmfl.exceptions import FLException, LocalTrainingException\n'), ((26289, 26378), 'ibmfl.exceptions.FLException', 'FLException', (['"""Error occurred when feeding data samples to compute current gradient."""'], {}), "(\n 'Error occurred when feeding data samples to compute current gradient.')\n", (26300, 26378), False, 'from ibmfl.exceptions import FLException, LocalTrainingException\n'), ((20704, 20762), 'ibmfl.exceptions.FLException', 'FLException', (['"""Unable to load the provided compiled model!"""'], {}), "('Unable to load the provided compiled model!')\n", (20715, 20762), False, 'from ibmfl.exceptions import FLException, LocalTrainingException\n')]
|
# !/usr/bin/python
from tornado import ioloop
async def cal(num):
print('cal called.')
x = await calculator(num)
print(x)
async def calculator(num):
try:
result = 0
for i in range(0, num):
result += i
# print(f'result is {result}')
raise Exception()
return result
except Exception:
pass
async def main():
await cal(100)
print('hh')
if __name__ == '__main__':
# ioloop.IOLoop.current().start()
# main()
ioloop.IOLoop.current().run_sync(main)
|
[
"tornado.ioloop.IOLoop.current"
] |
[((510, 533), 'tornado.ioloop.IOLoop.current', 'ioloop.IOLoop.current', ([], {}), '()\n', (531, 533), False, 'from tornado import ioloop\n')]
|
from pathlib import Path
import os
path = Path(os.getcwd())
npath = path.joinpath('zizi')
# print(path)
# print(npath.parts)
# print(npath.name)
# for idx, dirz in enumerate(path.iterdir()):
# print(idx, dirz)
#
for idx, file in enumerate(path.glob('*.zip')):
print(idx,file)
#
# pp = list(path.glob('*.py'))
# for line in pp[0].open():
# print(line)
|
[
"os.getcwd"
] |
[((48, 59), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (57, 59), False, 'import os\n')]
|
import unittest
import cv2
import numpy as np
from extractor.cropping import clip_to_image_region, \
crop_module, build_merged_index
quadrilaterals = {
('e3e70682-c209-4cac-a29f-6fbed82c07cd',
'frame_000000',
'mask_000000'): {
'quadrilateral': [
[424, 279],
[499, 280],
[499, 327],
[421, 323]
],
'center': (
460.95042812077514,
302.4197085774373
)
},
('f728b4fa-4248-4e3a-8a5d-2f346baa9455',
'frame_000000',
'mask_000001'): {
'quadrilateral': [
[425, 326],
[499, 326],
[499, 377],
[425, 372]
],
'center': (
462.13331381447324,
350.2644805543356
)
},
('eb1167b3-67a9-4378-bc65-c1e582e2e662',
'frame_000000',
'mask_000002'): {
'quadrilateral': [
[164, 358],
[233, 363],
[233, 412],
[164, 408]
],
'center': (
198.48300673606857,
385.4114104919371
)
},
('f7c1bd87-4da5-4709-9471-3d60c8a70639',
'frame_000000',
'mask_000003'): {
'quadrilateral': [
[425, 234],
[497, 231],
[501, 279],
[421, 278]
],
'center': (
461.41970207121716,
255.7820630547903
)
},
('e443df78-9558-467f-9ba9-1faf7a024204',
'frame_000000',
'mask_000004'): {
'quadrilateral': [
[425, 94],
[498, 90],
[502, 136],
[425, 142]
],
'center': (
462.19730041647847,
115.55311355311355
)
}
}
class TestCropping(unittest.TestCase):
def test_clip_to_image_region_no_clip(self):
quad = np.array([
[[424, 279]],
[[499, 280]],
[[499, 327]],
[[421, 323]]
])
image_width = 640
image_height = 512
quad_clipped_gt = quad
quad_clipped = clip_to_image_region(
np.copy(quad), image_width, image_height)
self.assertTrue(
np.allclose(
quad_clipped,
quad_clipped_gt
)
)
def test_clip_to_image_region_clip_max(self):
quad = np.array([
[[424, 279]],
[[499, 280]],
[[499, 327]],
[[421, 323]]
])
image_width = 300
image_height = 200
quad_clipped_gt = np.array([
[[299, 199]],
[[299, 199]],
[[299, 199]],
[[299, 199]]
])
quad_clipped = clip_to_image_region(
np.copy(quad), image_width, image_height)
self.assertTrue(
np.allclose(
quad_clipped,
quad_clipped_gt
)
)
def test_clip_to_image_region_clip_min(self):
quad = np.array([
[[ -1, -1]],
[[100, -1]],
[[100, 100]],
[[ -1, 100]]
])
image_width = 200
image_height = 200
quad_clipped_gt = np.array([
[[ 0, 0]],
[[100, 0]],
[[100, 100]],
[[ 0, 100]]
])
quad_clipped = clip_to_image_region(
np.copy(quad), image_width, image_height)
self.assertTrue(
np.allclose(
quad_clipped,
quad_clipped_gt
)
)
def test_build_merged_index_merged_none(self):
merged_modules = None
merged_index_gt = {
'e3e70682-c209-4cac-a29f-6fbed82c07cd': 'e3e70682-c209-4cac-a29f-6fbed82c07cd',
'e443df78-9558-467f-9ba9-1faf7a024204': 'e443df78-9558-467f-9ba9-1faf7a024204',
'f7c1bd87-4da5-4709-9471-3d60c8a70639': 'f7c1bd87-4da5-4709-9471-3d60c8a70639',
'eb1167b3-67a9-4378-bc65-c1e582e2e662': 'eb1167b3-67a9-4378-bc65-c1e582e2e662',
'f728b4fa-4248-4e3a-8a5d-2f346baa9455': 'f728b4fa-4248-4e3a-8a5d-2f346baa9455'
}
merged_index = build_merged_index(merged_modules, quadrilaterals)
self.assertEqual(merged_index, merged_index_gt)
def test_build_merged_index_merged_empty(self):
merged_modules = []
merged_index_gt = {
'e3e70682-c209-4cac-a29f-6fbed82c07cd': 'e3e70682-c209-4cac-a29f-6fbed82c07cd',
'e443df78-9558-467f-9ba9-1faf7a024204': 'e443df78-9558-467f-9ba9-1faf7a024204',
'f7c1bd87-4da5-4709-9471-3d60c8a70639': 'f7c1bd87-4da5-4709-9471-3d60c8a70639',
'eb1167b3-67a9-4378-bc65-c1e582e2e662': 'eb1167b3-67a9-4378-bc65-c1e582e2e662',
'f728b4fa-4248-4e3a-8a5d-2f346baa9455': 'f728b4fa-4248-4e3a-8a5d-2f346baa9455'
}
merged_index = build_merged_index(merged_modules, quadrilaterals)
self.assertEqual(merged_index, merged_index_gt)
def test_build_merged_index_pair_merged(self):
merged_modules = [[
'f728b4fa-4248-4e3a-8a5d-2f346baa9455',
'f7c1bd87-4da5-4709-9471-3d60c8a70639'
]]
merged_index_gt = {
'e3e70682-c209-4cac-a29f-6fbed82c07cd': 'e3e70682-c209-4cac-a29f-6fbed82c07cd',
'e443df78-9558-467f-9ba9-1faf7a024204': 'e443df78-9558-467f-9ba9-1faf7a024204',
'f7c1bd87-4da5-4709-9471-3d60c8a70639': 'f728b4fa-4248-4e3a-8a5d-2f346baa9455',
'eb1167b3-67a9-4378-bc65-c1e582e2e662': 'eb1167b3-67a9-4378-bc65-c1e582e2e662',
'f728b4fa-4248-4e3a-8a5d-2f346baa9455': 'f728b4fa-4248-4e3a-8a5d-2f346baa9455'
}
merged_index = build_merged_index(merged_modules, quadrilaterals)
self.assertEqual(merged_index, merged_index_gt)
def test_build_merged_index_triplet_merged(self):
merged_modules = [[
'f728b4fa-4248-4e3a-8a5d-2f346baa9455',
'f7c1bd87-4da5-4709-9471-3d60c8a70639',
'e3e70682-c209-4cac-a29f-6fbed82c07cd'
]]
merged_index_gt = {
'e3e70682-c209-4cac-a29f-6fbed82c07cd': 'f728b4fa-4248-4e3a-8a5d-2f346baa9455',
'e443df78-9558-467f-9ba9-1faf7a024204': 'e443df78-9558-467f-9ba9-1faf7a024204',
'f7c1bd87-4da5-4709-9471-3d60c8a70639': 'f728b4fa-4248-4e3a-8a5d-2f346baa9455',
'eb1167b3-67a9-4378-bc65-c1e582e2e662': 'eb1167b3-67a9-4378-bc65-c1e582e2e662',
'f728b4fa-4248-4e3a-8a5d-2f346baa9455': 'f728b4fa-4248-4e3a-8a5d-2f346baa9455'
}
merged_index = build_merged_index(merged_modules, quadrilaterals)
self.assertEqual(merged_index, merged_index_gt)
def test_build_merged_index_two_pairs_merged(self):
merged_modules = [
['f728b4fa-4248-4e3a-8a5d-2f346baa9455',
'f7c1bd87-4da5-4709-9471-3d60c8a70639'],
['e3e70682-c209-4cac-a29f-6fbed82c07cd',
'e443df78-9558-467f-9ba9-1faf7a024204']
]
merged_index_gt = {
'e3e70682-c209-4cac-a29f-6fbed82c07cd': 'e3e70682-c209-4cac-a29f-6fbed82c07cd',
'e443df78-9558-467f-9ba9-1faf7a024204': 'e3e70682-c209-4cac-a29f-6fbed82c07cd',
'f7c1bd87-4da5-4709-9471-3d60c8a70639': 'f728b4fa-4248-4e3a-8a5d-2f346baa9455',
'eb1167b3-67a9-4378-bc65-c1e582e2e662': 'eb1167b3-67a9-4378-bc65-c1e582e2e662',
'f728b4fa-4248-4e3a-8a5d-2f346baa9455': 'f728b4fa-4248-4e3a-8a5d-2f346baa9455'
}
merged_index = build_merged_index(merged_modules, quadrilaterals)
self.assertEqual(merged_index, merged_index_gt)
def test_build_merged_index_all_merged(self):
merged_modules = [[
'f7c1bd87-4da5-4709-9471-3d60c8a70639',
'f728b4fa-4248-4e3a-8a5d-2f346baa9455',
'e3e70682-c209-4cac-a29f-6fbed82c07cd',
'e443df78-9558-467f-9ba9-1faf7a024204',
'eb1167b3-67a9-4378-bc65-c1e582e2e662',
]]
merged_index_gt = {
'e3e70682-c209-4cac-a29f-6fbed82c07cd': 'f7c1bd87-4da5-4709-9471-3d60c8a70639',
'e443df78-9558-467f-9ba9-1faf7a024204': 'f7c1bd87-4da5-4709-9471-3d60c8a70639',
'f7c1bd87-4da5-4709-9471-3d60c8a70639': 'f7c1bd87-4da5-4709-9471-3d60c8a70639',
'eb1167b3-67a9-4378-bc65-c1e582e2e662': 'f7c1bd87-4da5-4709-9471-3d60c8a70639',
'f728b4fa-4248-4e3a-8a5d-2f346baa9455': 'f7c1bd87-4da5-4709-9471-3d60c8a70639'
}
merged_index = build_merged_index(merged_modules, quadrilaterals)
self.assertEqual(merged_index, merged_index_gt)
def test_crop_modules_real_data(self):
frame_file = "tests/unit/data/frame_000000.tiff"
frame = cv2.imread(frame_file, cv2.IMREAD_ANYDEPTH)
quad = np.array([
[[424, 279]],
[[499, 280]],
[[499, 327]],
[[421, 323]]
])
patch_file = "tests/unit/data/frame_000000_mask_000000.tiff"
patch_gt = cv2.imread(patch_file, cv2.IMREAD_ANYDEPTH)
patch, _ = crop_module(
frame,
quad,
crop_width=None,
crop_aspect=None,
rotate_mode="portrait"
)
self.assertTrue(np.allclose(patch, patch_gt))
def test_crop_modules_crop_full_frame(self):
frame_file = "tests/unit/data/frame_000000.tiff"
frame = cv2.imread(frame_file, cv2.IMREAD_ANYDEPTH)
quad = np.array([
[[0, 0]],
[[640, 0]],
[[640, 512]],
[[0, 512]]
])
patch, homography = crop_module(
frame,
quad,
crop_width=None,
crop_aspect=None,
rotate_mode="landscape"
)
self.assertTrue(np.allclose(patch, frame[0:-1, 0:-1]))
self.assertTrue(np.allclose(homography, np.eye(3)))
def test_crop_modules_portrait_vs_landscape(self):
frame_file = "tests/unit/data/frame_000000.tiff"
frame = cv2.imread(frame_file, cv2.IMREAD_ANYDEPTH)
quad = np.array([
[[424, 279]],
[[499, 280]],
[[499, 327]],
[[421, 323]]
])
patch, _ = crop_module(
frame,
quad,
crop_width=None,
crop_aspect=None,
rotate_mode="portrait"
)
self.assertEqual(patch.shape, (78, 47))
patch, _ = crop_module(
frame,
quad,
crop_width=None,
crop_aspect=None,
rotate_mode="landscape"
)
self.assertEqual(patch.shape, (47, 78))
patch, _ = crop_module(
frame,
quad,
crop_width=None,
crop_aspect=None,
rotate_mode=None
)
self.assertEqual(patch.shape, (47, 78)) # ?
def test_crop_modules_crop_width_and_aspect(self):
frame_file = "tests/unit/data/frame_000000.tiff"
frame = cv2.imread(frame_file, cv2.IMREAD_ANYDEPTH)
quad = np.array([
[[424, 279]],
[[499, 280]],
[[499, 327]],
[[421, 323]]
])
patch, _ = crop_module(
frame,
quad,
crop_width=50,
crop_aspect=0.625, # 1/1.6
rotate_mode="portrait"
)
self.assertEqual(patch.shape, (50, 31))
patch, _ = crop_module(
frame,
quad,
crop_width=50,
crop_aspect=1,
rotate_mode="portrait"
)
self.assertEqual(patch.shape, (50, 50))
patch, _ = crop_module(
frame,
quad,
crop_width=50,
crop_aspect=0.625, # 1/1.6
rotate_mode="landscape"
)
self.assertEqual(patch.shape, (31, 50))
patch, _ = crop_module(
frame,
quad,
crop_width=300,
crop_aspect=0.625, # 1/1.6
rotate_mode="portrait"
)
self.assertEqual(patch.shape, (300, 187))
|
[
"numpy.copy",
"extractor.cropping.build_merged_index",
"numpy.allclose",
"extractor.cropping.crop_module",
"cv2.imread",
"numpy.array",
"numpy.eye"
] |
[((1901, 1967), 'numpy.array', 'np.array', (['[[[424, 279]], [[499, 280]], [[499, 327]], [[421, 323]]]'], {}), '([[[424, 279]], [[499, 280]], [[499, 327]], [[421, 323]]])\n', (1909, 1967), True, 'import numpy as np\n'), ((2458, 2524), 'numpy.array', 'np.array', (['[[[424, 279]], [[499, 280]], [[499, 327]], [[421, 323]]]'], {}), '([[[424, 279]], [[499, 280]], [[499, 327]], [[421, 323]]])\n', (2466, 2524), True, 'import numpy as np\n'), ((2665, 2731), 'numpy.array', 'np.array', (['[[[299, 199]], [[299, 199]], [[299, 199]], [[299, 199]]]'], {}), '([[[299, 199]], [[299, 199]], [[299, 199]], [[299, 199]]])\n', (2673, 2731), True, 'import numpy as np\n'), ((3138, 3200), 'numpy.array', 'np.array', (['[[[-1, -1]], [[100, -1]], [[100, 100]], [[-1, 100]]]'], {}), '([[[-1, -1]], [[100, -1]], [[100, 100]], [[-1, 100]]])\n', (3146, 3200), True, 'import numpy as np\n'), ((3345, 3403), 'numpy.array', 'np.array', (['[[[0, 0]], [[100, 0]], [[100, 100]], [[0, 100]]]'], {}), '([[[0, 0]], [[100, 0]], [[100, 100]], [[0, 100]]])\n', (3353, 3403), True, 'import numpy as np\n'), ((4346, 4396), 'extractor.cropping.build_merged_index', 'build_merged_index', (['merged_modules', 'quadrilaterals'], {}), '(merged_modules, quadrilaterals)\n', (4364, 4396), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((5067, 5117), 'extractor.cropping.build_merged_index', 'build_merged_index', (['merged_modules', 'quadrilaterals'], {}), '(merged_modules, quadrilaterals)\n', (5085, 5117), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((5906, 5956), 'extractor.cropping.build_merged_index', 'build_merged_index', (['merged_modules', 'quadrilaterals'], {}), '(merged_modules, quadrilaterals)\n', (5924, 5956), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((6792, 6842), 'extractor.cropping.build_merged_index', 'build_merged_index', (['merged_modules', 'quadrilaterals'], {}), '(merged_modules, quadrilaterals)\n', (6810, 6842), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((7745, 7795), 'extractor.cropping.build_merged_index', 'build_merged_index', (['merged_modules', 'quadrilaterals'], {}), '(merged_modules, quadrilaterals)\n', (7763, 7795), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((8756, 8806), 'extractor.cropping.build_merged_index', 'build_merged_index', (['merged_modules', 'quadrilaterals'], {}), '(merged_modules, quadrilaterals)\n', (8774, 8806), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((8997, 9040), 'cv2.imread', 'cv2.imread', (['frame_file', 'cv2.IMREAD_ANYDEPTH'], {}), '(frame_file, cv2.IMREAD_ANYDEPTH)\n', (9007, 9040), False, 'import cv2\n'), ((9065, 9131), 'numpy.array', 'np.array', (['[[[424, 279]], [[499, 280]], [[499, 327]], [[421, 323]]]'], {}), '([[[424, 279]], [[499, 280]], [[499, 327]], [[421, 323]]])\n', (9073, 9131), True, 'import numpy as np\n'), ((9287, 9330), 'cv2.imread', 'cv2.imread', (['patch_file', 'cv2.IMREAD_ANYDEPTH'], {}), '(patch_file, cv2.IMREAD_ANYDEPTH)\n', (9297, 9330), False, 'import cv2\n'), ((9359, 9447), 'extractor.cropping.crop_module', 'crop_module', (['frame', 'quad'], {'crop_width': 'None', 'crop_aspect': 'None', 'rotate_mode': '"""portrait"""'}), "(frame, quad, crop_width=None, crop_aspect=None, rotate_mode=\n 'portrait')\n", (9370, 9447), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((9719, 9762), 'cv2.imread', 'cv2.imread', (['frame_file', 'cv2.IMREAD_ANYDEPTH'], {}), '(frame_file, cv2.IMREAD_ANYDEPTH)\n', (9729, 9762), False, 'import cv2\n'), ((9787, 9845), 'numpy.array', 'np.array', (['[[[0, 0]], [[640, 0]], [[640, 512]], [[0, 512]]]'], {}), '([[[0, 0]], [[640, 0]], [[640, 512]], [[0, 512]]])\n', (9795, 9845), True, 'import numpy as np\n'), ((9941, 10030), 'extractor.cropping.crop_module', 'crop_module', (['frame', 'quad'], {'crop_width': 'None', 'crop_aspect': 'None', 'rotate_mode': '"""landscape"""'}), "(frame, quad, crop_width=None, crop_aspect=None, rotate_mode=\n 'landscape')\n", (9952, 10030), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((10377, 10420), 'cv2.imread', 'cv2.imread', (['frame_file', 'cv2.IMREAD_ANYDEPTH'], {}), '(frame_file, cv2.IMREAD_ANYDEPTH)\n', (10387, 10420), False, 'import cv2\n'), ((10445, 10511), 'numpy.array', 'np.array', (['[[[424, 279]], [[499, 280]], [[499, 327]], [[421, 323]]]'], {}), '([[[424, 279]], [[499, 280]], [[499, 327]], [[421, 323]]])\n', (10453, 10511), True, 'import numpy as np\n'), ((10598, 10686), 'extractor.cropping.crop_module', 'crop_module', (['frame', 'quad'], {'crop_width': 'None', 'crop_aspect': 'None', 'rotate_mode': '"""portrait"""'}), "(frame, quad, crop_width=None, crop_aspect=None, rotate_mode=\n 'portrait')\n", (10609, 10686), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((10840, 10929), 'extractor.cropping.crop_module', 'crop_module', (['frame', 'quad'], {'crop_width': 'None', 'crop_aspect': 'None', 'rotate_mode': '"""landscape"""'}), "(frame, quad, crop_width=None, crop_aspect=None, rotate_mode=\n 'landscape')\n", (10851, 10929), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((11083, 11160), 'extractor.cropping.crop_module', 'crop_module', (['frame', 'quad'], {'crop_width': 'None', 'crop_aspect': 'None', 'rotate_mode': 'None'}), '(frame, quad, crop_width=None, crop_aspect=None, rotate_mode=None)\n', (11094, 11160), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((11442, 11485), 'cv2.imread', 'cv2.imread', (['frame_file', 'cv2.IMREAD_ANYDEPTH'], {}), '(frame_file, cv2.IMREAD_ANYDEPTH)\n', (11452, 11485), False, 'import cv2\n'), ((11510, 11576), 'numpy.array', 'np.array', (['[[[424, 279]], [[499, 280]], [[499, 327]], [[421, 323]]]'], {}), '([[[424, 279]], [[499, 280]], [[499, 327]], [[421, 323]]])\n', (11518, 11576), True, 'import numpy as np\n'), ((11663, 11750), 'extractor.cropping.crop_module', 'crop_module', (['frame', 'quad'], {'crop_width': '(50)', 'crop_aspect': '(0.625)', 'rotate_mode': '"""portrait"""'}), "(frame, quad, crop_width=50, crop_aspect=0.625, rotate_mode=\n 'portrait')\n", (11674, 11750), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((11913, 11991), 'extractor.cropping.crop_module', 'crop_module', (['frame', 'quad'], {'crop_width': '(50)', 'crop_aspect': '(1)', 'rotate_mode': '"""portrait"""'}), "(frame, quad, crop_width=50, crop_aspect=1, rotate_mode='portrait')\n", (11924, 11991), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((12150, 12238), 'extractor.cropping.crop_module', 'crop_module', (['frame', 'quad'], {'crop_width': '(50)', 'crop_aspect': '(0.625)', 'rotate_mode': '"""landscape"""'}), "(frame, quad, crop_width=50, crop_aspect=0.625, rotate_mode=\n 'landscape')\n", (12161, 12238), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((12401, 12489), 'extractor.cropping.crop_module', 'crop_module', (['frame', 'quad'], {'crop_width': '(300)', 'crop_aspect': '(0.625)', 'rotate_mode': '"""portrait"""'}), "(frame, quad, crop_width=300, crop_aspect=0.625, rotate_mode=\n 'portrait')\n", (12412, 12489), False, 'from extractor.cropping import clip_to_image_region, crop_module, build_merged_index\n'), ((2179, 2192), 'numpy.copy', 'np.copy', (['quad'], {}), '(quad)\n', (2186, 2192), True, 'import numpy as np\n'), ((2267, 2309), 'numpy.allclose', 'np.allclose', (['quad_clipped', 'quad_clipped_gt'], {}), '(quad_clipped, quad_clipped_gt)\n', (2278, 2309), True, 'import numpy as np\n'), ((2859, 2872), 'numpy.copy', 'np.copy', (['quad'], {}), '(quad)\n', (2866, 2872), True, 'import numpy as np\n'), ((2947, 2989), 'numpy.allclose', 'np.allclose', (['quad_clipped', 'quad_clipped_gt'], {}), '(quad_clipped, quad_clipped_gt)\n', (2958, 2989), True, 'import numpy as np\n'), ((3539, 3552), 'numpy.copy', 'np.copy', (['quad'], {}), '(quad)\n', (3546, 3552), True, 'import numpy as np\n'), ((3627, 3669), 'numpy.allclose', 'np.allclose', (['quad_clipped', 'quad_clipped_gt'], {}), '(quad_clipped, quad_clipped_gt)\n', (3638, 3669), True, 'import numpy as np\n'), ((9549, 9577), 'numpy.allclose', 'np.allclose', (['patch', 'patch_gt'], {}), '(patch, patch_gt)\n', (9560, 9577), True, 'import numpy as np\n'), ((10132, 10169), 'numpy.allclose', 'np.allclose', (['patch', 'frame[0:-1, 0:-1]'], {}), '(patch, frame[0:-1, 0:-1])\n', (10143, 10169), True, 'import numpy as np\n'), ((10219, 10228), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (10225, 10228), True, 'import numpy as np\n')]
|
import graphene
from graphene import relay
from graphene_sqlalchemy import SQLAlchemyObjectType
from core.models import Category as CategoryModel
from core.models import CategoryConnector
category_connector = CategoryConnector()
class CategoryNode(SQLAlchemyObjectType):
class Meta:
model = CategoryModel
interfaces = (relay.Node,)
class CreateCategory(graphene.Mutation):
class Arguments:
# TODO: max-length constraint
name = graphene.String(required=True)
Output = CategoryNode
def mutate(self, _, name):
"""
:param _:
:param name:
:return:
"""
category = category_connector.database_helper.create_object(
category_connector.model, name=name
)
category_node = CategoryNode.get_node(_, category.id)
return category_node
class UpdateCategory(graphene.Mutation):
class Arguments:
# TODO: max-length constraint
primary_key = graphene.Int(required=True)
name = graphene.String(required=True)
Output = CategoryNode
def mutate(self, _, primary_key, name):
"""
:param _:
:param primary_key:
:param name:
:return:
"""
category = category_connector.database_helper.update_object(
category_connector.model, primary_key, name=name
)
category_node = CategoryNode.get_node(_, category.id)
return category_node
|
[
"core.models.CategoryConnector",
"graphene.Int",
"graphene.String"
] |
[((211, 230), 'core.models.CategoryConnector', 'CategoryConnector', ([], {}), '()\n', (228, 230), False, 'from core.models import CategoryConnector\n'), ((473, 503), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (488, 503), False, 'import graphene\n'), ((986, 1013), 'graphene.Int', 'graphene.Int', ([], {'required': '(True)'}), '(required=True)\n', (998, 1013), False, 'import graphene\n'), ((1029, 1059), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (1044, 1059), False, 'import graphene\n')]
|
from dcim.models import Site, Rack, DeviceRole, DeviceType, Device, Platform
from ipam.models import IPAddress
from startup_script_utils import load_yaml
import sys
devices = load_yaml('/opt/netbox/initializers/dcim_devices.yml')
if devices is None:
sys.exit()
handled_attrs = [
'primary_ip4_id',
'primary_ip6_id'
]
for params in devices:
update = False
new_params = {}
for field in handled_attrs:
if field in params:
update = True
new_params[field] = params[field]
if len(new_params) == 0:
continue
if update:
Device.objects.filter(name=params['name']).update(**new_params)
print("🖥️ Updated device", params['name'])
|
[
"dcim.models.Device.objects.filter",
"startup_script_utils.load_yaml",
"sys.exit"
] |
[((176, 230), 'startup_script_utils.load_yaml', 'load_yaml', (['"""/opt/netbox/initializers/dcim_devices.yml"""'], {}), "('/opt/netbox/initializers/dcim_devices.yml')\n", (185, 230), False, 'from startup_script_utils import load_yaml\n'), ((254, 264), 'sys.exit', 'sys.exit', ([], {}), '()\n', (262, 264), False, 'import sys\n'), ((557, 599), 'dcim.models.Device.objects.filter', 'Device.objects.filter', ([], {'name': "params['name']"}), "(name=params['name'])\n", (578, 599), False, 'from dcim.models import Site, Rack, DeviceRole, DeviceType, Device, Platform\n')]
|
import sqlite3, ast
##############################################
### Login to database
##############################################
def login(dbfile):
conn = sqlite3.connect(dbfile) # create or open db file
curs = conn.cursor()
return conn, curs
##############################################
### Create new database
##############################################
def makedb(dbfile, table, columnFeatures):
#columnFeatures = input("eg: (Column1 char(30), Column2 char(10), Column3 int(4))")
conn, curs = login(dbfile)
try:
curs.execute('drop table ' + table)
print('Dropped table ' + table)
except:
print('database table did not exist')
command = 'create table %s %s' % (table, columnFeatures)
curs.execute(command)
conn.commit()
##############################################
### Load Data
##############################################
def loaddb(table, dbfile, datafile, conn=None, verbose=True):
conn, curs = login(dbfile)
file = open(datafile)
rows = [line.rstrip().split('\t') for line in file] # [[x,x,x], [x,x,x]]
rows = [str(tuple(rec)) for rec in rows[1:]] # ["(x,x,x)", "(x,x,x)"]
for recstr in rows:
curs.execute('insert into ' + table + ' values ' + recstr)
if conn:
conn.commit()
if verbose:
print(len(rows), 'rows loaded')
##############################################
### Remove a table from Database
##############################################
def cleardb(dbfile, table):
conn, curs = login(dbfile)
try:
curs.execute('drop table ' + table)
conn.commit()
print('Dropped table ', table)
except:
print(table, 'table did not exist, creating this table')
|
[
"sqlite3.connect"
] |
[((175, 198), 'sqlite3.connect', 'sqlite3.connect', (['dbfile'], {}), '(dbfile)\n', (190, 198), False, 'import sqlite3, ast\n')]
|
import os
import json
import boto3
ssm = boto3.client('ssm')
def query_association():
query_association_response = ssm.list_associations(
AssociationFilterList = [
{
"key": "AssociationName",
"value": "ssm-patch-portal-scan"
}
],
)
if len(query_association_response['Associations']) > 0:
return query_association_response['Associations'][0]
else:
return None
def handler(event, context):
api_response = {
"headers": {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Headers": "Origin, X-Requested-With, Content-Type, Accept"
}
}
try:
query_association_response = query_association()
if query_association_response is None:
response = None
else:
association = query_association_response
response = {
"associationId": association["AssociationId"] if "AssociationId" in association else None,
"lastExecutionDate": association["LastExecutionDate"].strftime("%Y-%m-%dT%H:%M:%SZ") if "LastExecutionDate" in association else None,
"overview": association["Overview"] if "Overview" in association else None,
}
api_response["statusCode"] = 200
api_response["body"] = json.dumps(response)
except Exception as e:
api_response["statusCode"] = 400
api_response["body"] = json.dumps({
"message": str(e)
})
finally:
return api_response
|
[
"boto3.client",
"json.dumps"
] |
[((43, 62), 'boto3.client', 'boto3.client', (['"""ssm"""'], {}), "('ssm')\n", (55, 62), False, 'import boto3\n'), ((1400, 1420), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (1410, 1420), False, 'import json\n')]
|
# Generated by Django 2.0.7 on 2018-07-09 08:15
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Heartbeat',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('app_name', models.CharField(max_length=50)),
('last_beat', models.DateTimeField()),
],
),
]
|
[
"django.db.models.CharField",
"django.db.models.DateTimeField",
"django.db.models.AutoField"
] |
[((305, 398), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (321, 398), False, 'from django.db import migrations, models\n'), ((426, 457), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (442, 457), False, 'from django.db import migrations, models\n'), ((490, 512), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (510, 512), False, 'from django.db import migrations, models\n')]
|
from django.db import models
import datetime as dt
# Create your models here.
class Category(models.Model):
category_name = models.CharField(max_length = 50)
# image = models.ForeignKey(Image)
def __str__(self):
return self.category_name
class Location(models.Model):
location_name = models.CharField(max_length = 50)
# image = models.ForeignKey(Image)
def __str__(self):
return self.location_name
def save_location(self):
self.save()
class Image(models.Model):
image = models.ImageField(upload_to = 'gallery/', blank = True)
img_name = models.CharField(max_length = 30)
img_description = models.TextField(max_length=50, blank=True)
pub_date = models.DateTimeField(auto_now_add=True)
location = models.ForeignKey(Location)
category = models.ForeignKey(Category)
def __str__(self):
return self.img_name
def save_image(self):
self.save()
def delete_image(self):
self.delete()
@classmethod
def get_image_by_id(cls, id):
specific_image = cls.objects.get(id = id)
return specific_image
@classmethod
def display_image(cls):
today = dt.date.today()
@classmethod
def get_all(cls):
images = cls.objects.order_by('-pub_date')
return images
@classmethod
def filter_location(cls, location):
images = cls.objects.filter(location__location_name__istartswith=location)
return images
@classmethod
def filter_category(cls, category):
images = cls.objects.filter(category__category_name__istartswith=category)
return images
@classmethod
def search_image(cls, search_term):
images = cls.objects.filter(img_name__icontains=search_term)
return images
|
[
"django.db.models.TextField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"datetime.date.today",
"django.db.models.ImageField",
"django.db.models.DateTimeField"
] |
[((131, 162), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (147, 162), False, 'from django.db import models\n'), ((313, 344), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (329, 344), False, 'from django.db import models\n'), ((534, 585), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""gallery/"""', 'blank': '(True)'}), "(upload_to='gallery/', blank=True)\n", (551, 585), False, 'from django.db import models\n'), ((605, 636), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (621, 636), False, 'from django.db import models\n'), ((661, 704), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(50)', 'blank': '(True)'}), '(max_length=50, blank=True)\n', (677, 704), False, 'from django.db import models\n'), ((720, 759), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (740, 759), False, 'from django.db import models\n'), ((775, 802), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Location'], {}), '(Location)\n', (792, 802), False, 'from django.db import models\n'), ((818, 845), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Category'], {}), '(Category)\n', (835, 845), False, 'from django.db import models\n'), ((1203, 1218), 'datetime.date.today', 'dt.date.today', ([], {}), '()\n', (1216, 1218), True, 'import datetime as dt\n')]
|