code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# Test file for enum checker
from enum import Enum
from six import with_metaclass
from azure.core import CaseInsensitiveEnumMeta
class EnumPython2(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ONE = "one"
TWO = "two"
| [
"six.with_metaclass"
] | [((148, 198), 'six.with_metaclass', 'with_metaclass', (['CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(CaseInsensitiveEnumMeta, str, Enum)\n', (162, 198), False, 'from six import with_metaclass\n')] |
from django import forms
gender = [('male', 'M'), ('female', 'F')]
response = [('1', 'yes'), ('0', 'no')]
time = [('1', 'one'), ('2', 'two'), ('3', 'three'), ('4', 'four')]
education = [('0', 'zero'),('1', 'zero'), ('2', 'two'), ('3', 'three'), ('4', 'four')]
rating = [('1', 'one'), ('2', 'two'), ('3', 'three'), ('4', 'four'), ('5', 'five')]
class ContactForm(forms.Form):
gender_choice = forms.CharField(label="Gender:", widget=forms.Select(choices=gender))
age = forms.CharField(max_length=254)
family_size = forms.CharField(max_length=254)
pstatus = forms.CharField(label="Health Status:", widget=forms.Select(choices=response))
mother_education = forms.CharField(label="Mother's Education:", widget=forms.Select(choices=education))
father_education = forms.CharField(label="Father's Education:", widget=forms.Select(choices=education))
travel_time_to_school = forms.CharField(label="Travel Time To School:", widget=forms.Select(choices=time))
study_time = forms.CharField(label="Study Time To School:", widget=forms.Select(choices=time))
failures = forms.CharField(max_length=254)
family_support = forms.CharField(label="Family Support:", widget=forms.Select(choices=response))
internet = forms.CharField(label="Internet:", widget=forms.Select(choices=response))
relationship = forms.CharField(label="Relationship:", widget=forms.Select(choices=response))
family_relationship_quality = forms.CharField(label="Family Relationship Quality:", widget=forms.Select(choices=rating))
time_after_school = forms.CharField(label="Free Time After School:", widget=forms.Select(choices=rating))
going_out = forms.CharField(label="Going Out With Friends:", widget=forms.Select(choices=rating))
health = forms.CharField(label="Health Status:", widget=forms.Select(choices=rating))
absences = forms.CharField(max_length=254)
| [
"django.forms.Select",
"django.forms.CharField"
] | [((477, 508), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (492, 508), False, 'from django import forms\n'), ((527, 558), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (542, 558), False, 'from django import forms\n'), ((1093, 1124), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (1108, 1124), False, 'from django import forms\n'), ((1854, 1885), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (1869, 1885), False, 'from django import forms\n'), ((437, 465), 'django.forms.Select', 'forms.Select', ([], {'choices': 'gender'}), '(choices=gender)\n', (449, 465), False, 'from django import forms\n'), ((620, 650), 'django.forms.Select', 'forms.Select', ([], {'choices': 'response'}), '(choices=response)\n', (632, 650), False, 'from django import forms\n'), ((727, 758), 'django.forms.Select', 'forms.Select', ([], {'choices': 'education'}), '(choices=education)\n', (739, 758), False, 'from django import forms\n'), ((835, 866), 'django.forms.Select', 'forms.Select', ([], {'choices': 'education'}), '(choices=education)\n', (847, 866), False, 'from django import forms\n'), ((951, 977), 'django.forms.Select', 'forms.Select', ([], {'choices': 'time'}), '(choices=time)\n', (963, 977), False, 'from django import forms\n'), ((1050, 1076), 'django.forms.Select', 'forms.Select', ([], {'choices': 'time'}), '(choices=time)\n', (1062, 1076), False, 'from django import forms\n'), ((1194, 1224), 'django.forms.Select', 'forms.Select', ([], {'choices': 'response'}), '(choices=response)\n', (1206, 1224), False, 'from django import forms\n'), ((1283, 1313), 'django.forms.Select', 'forms.Select', ([], {'choices': 'response'}), '(choices=response)\n', (1295, 1313), False, 'from django import forms\n'), ((1380, 1410), 'django.forms.Select', 'forms.Select', ([], {'choices': 'response'}), '(choices=response)\n', (1392, 1410), False, 'from django import forms\n'), ((1507, 1535), 'django.forms.Select', 'forms.Select', ([], {'choices': 'rating'}), '(choices=rating)\n', (1519, 1535), False, 'from django import forms\n'), ((1617, 1645), 'django.forms.Select', 'forms.Select', ([], {'choices': 'rating'}), '(choices=rating)\n', (1629, 1645), False, 'from django import forms\n'), ((1719, 1747), 'django.forms.Select', 'forms.Select', ([], {'choices': 'rating'}), '(choices=rating)\n', (1731, 1747), False, 'from django import forms\n'), ((1809, 1837), 'django.forms.Select', 'forms.Select', ([], {'choices': 'rating'}), '(choices=rating)\n', (1821, 1837), False, 'from django import forms\n')] |
'''
Main function to be called from GCE's cloud function
This function is in charge of adding training data to
the datastore for later generation of models and feature study
'''
import sys
import os
import time
import numpy as np
from google.cloud import datastore
from google.cloud import storage
from google.api_core import retry
from urllib3.exceptions import ProtocolError
sys.path.insert(0, 'imports')
from imports.video_asset_processor import VideoAssetProcessor
DATASTORE_CLIENT = datastore.Client()
STORAGE_CLIENT = storage.Client()
SOURCES_BUCKET = 'livepeer-verifier-originals'
RENDITIONS_BUCKET = 'livepeer-verifier-renditions'
ENTITY_NAME = 'features_input_60_540'
def download_to_local(bucket_name, local_folder, local_file, origin_blob_name):
"""
Downloads a file from the bucket.
"""
predicate = retry.if_exception_type(ConnectionResetError, ProtocolError)
reset_retry = retry.Retry(predicate)
bucket = STORAGE_CLIENT.get_bucket(bucket_name)
blob = bucket.blob('{}'.format(origin_blob_name))
# print('Downloading blob {} from bucket {}'.format(origin_blob_name, bucket_name))
# print('File download Started…. Wait for the job to complete.')
# Create this folder locally if not exists
if not os.path.exists(local_folder):
os.makedirs(local_folder)
local_path = '{}/{}'.format(local_folder, local_file)
# print('Downloading {} to {}'.format(origin_blob_name, local_path))
reset_retry(blob.download_to_filename(local_path))
# print('Downloaded {} to {}'.format(origin_blob_name, local_path))
def compute_metrics(asset, renditions):
'''
Function that instantiates the VideoAssetProcessor class with a list
of metrics to be computed.
The feature_list argument is left void as every descriptor of each
temporal metric is potentially used for model training
'''
start_time = time.time()
source_asset = asset
max_samples = 30
renditions_list = renditions
metrics_list = ['temporal_ssim',
'temporal_psnr',
'temporal_dct',
'temporal_gaussian_mse',
'temporal_gaussian_difference',
'temporal_threshold_gaussian_difference'
]
asset_processor = VideoAssetProcessor(source_asset,
renditions_list,
metrics_list,
False,
max_samples,
features_list=None)
metrics_df, _, _ = asset_processor.process()
for _, row in metrics_df.iterrows():
line = row.to_dict()
for column in metrics_df.columns:
if 'series' in column:
line[column] = np.array2string(np.around(line[column], decimals=5))
add_asset_input(DATASTORE_CLIENT, '{}/{}'.format(row['title'], row['attack']), line)
elapsed_time = time.time() - start_time
print('Computation time:', elapsed_time)
def add_asset_input(client, title, input_data):
"""
Function to add the asset's computed data to the database
"""
key = client.key(ENTITY_NAME, title, namespace='livepeer-verifier-QoE')
video = datastore.Entity(key)
video.update(input_data)
client.put(video)
def dataset_generator_http(request):
"""HTTP Cloud Function.
Args:
request (flask.Request): The request object, containing the name
of the source asset
Returns:
The response text, or any set of values that can be turned into a
Response object using `make_response`
"""
request_json = request.get_json(silent=True)
request_args = request.args
if request_json and 'name' in request_json:
source_name = request_json['name']
resolution_list = request_json['resolution_list'].split(',')
elif request_args and 'name' in request_args:
source_name = request_args['name']
resolution_list = request_args['resolution_list'].split(',')
print(resolution_list)
# Create the folder for the source asset
source_folder = '/tmp/1080p'
# if not os.path.exists(source_folder):
# os.makedirs(source_folder)
# Get the file that has been uploaded to GCS
asset_path = {'path': '{}/{}'.format(source_folder, source_name)}
renditions_paths = []
# Check if the source is not already in the path
if not os.path.exists(asset_path['path']):
download_to_local(SOURCES_BUCKET, source_folder, source_name, source_name)
#Bring the attacks to be processed locally
# resolution_list = ['1080p', '720p', '480p', '360p', '240p', '144p']
attack_names = ['watermark',
'watermark-345x114',
'watermark-856x856',
'vignette',
# 'rotate_90_clockwise',
'black_and_white',
'low_bitrate_4',
'low_bitrate_8']
# Create a comprehension list with all the possible attacks
attacks_list = ['{}_{}'.format(resolution, attack)
for resolution in resolution_list
for attack in attack_names
]
if '1080p' in resolution_list:
resolution_list.remove('1080p')
attacks_list += resolution_list
for attack in attacks_list:
remote_file = '{}/{}'.format(attack, source_name)
local_folder = '/tmp/{}'.format(attack)
try:
download_to_local(RENDITIONS_BUCKET,
local_folder,
source_name,
remote_file)
local_file = '{}/{}'.format(local_folder, source_name)
renditions_paths.append({'path': local_file})
except Exception as err:
print('Unable to download {}/{}: {}'.format(attack, source_name, err))
if len(renditions_paths) > 0:
print('Processing the following renditions: {}'.format(renditions_paths))
compute_metrics(asset_path, renditions_paths)
else:
print('Empty renditions list. No renditions to process')
# Cleanup
if os.path.exists(asset_path['path']):
os.remove(asset_path['path'])
for rendition in attacks_list:
rendition_folder = '/tmp/{}'.format(rendition)
local_path = '{}/{}'.format(rendition_folder, source_name)
if os.path.exists(local_path):
os.remove(local_path)
return 'Process completed: {}'.format(asset_path['path']) | [
"google.cloud.storage.Client",
"os.path.exists",
"sys.path.insert",
"os.makedirs",
"google.cloud.datastore.Entity",
"google.cloud.datastore.Client",
"imports.video_asset_processor.VideoAssetProcessor",
"numpy.around",
"time.time",
"google.api_core.retry.if_exception_type",
"google.api_core.retry... | [((381, 410), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""imports"""'], {}), "(0, 'imports')\n", (396, 410), False, 'import sys\n'), ((494, 512), 'google.cloud.datastore.Client', 'datastore.Client', ([], {}), '()\n', (510, 512), False, 'from google.cloud import datastore\n'), ((530, 546), 'google.cloud.storage.Client', 'storage.Client', ([], {}), '()\n', (544, 546), False, 'from google.cloud import storage\n'), ((836, 896), 'google.api_core.retry.if_exception_type', 'retry.if_exception_type', (['ConnectionResetError', 'ProtocolError'], {}), '(ConnectionResetError, ProtocolError)\n', (859, 896), False, 'from google.api_core import retry\n'), ((915, 937), 'google.api_core.retry.Retry', 'retry.Retry', (['predicate'], {}), '(predicate)\n', (926, 937), False, 'from google.api_core import retry\n'), ((1891, 1902), 'time.time', 'time.time', ([], {}), '()\n', (1900, 1902), False, 'import time\n'), ((2297, 2405), 'imports.video_asset_processor.VideoAssetProcessor', 'VideoAssetProcessor', (['source_asset', 'renditions_list', 'metrics_list', '(False)', 'max_samples'], {'features_list': 'None'}), '(source_asset, renditions_list, metrics_list, False,\n max_samples, features_list=None)\n', (2316, 2405), False, 'from imports.video_asset_processor import VideoAssetProcessor\n'), ((3294, 3315), 'google.cloud.datastore.Entity', 'datastore.Entity', (['key'], {}), '(key)\n', (3310, 3315), False, 'from google.cloud import datastore\n'), ((6237, 6271), 'os.path.exists', 'os.path.exists', (["asset_path['path']"], {}), "(asset_path['path'])\n", (6251, 6271), False, 'import os\n'), ((1260, 1288), 'os.path.exists', 'os.path.exists', (['local_folder'], {}), '(local_folder)\n', (1274, 1288), False, 'import os\n'), ((1298, 1323), 'os.makedirs', 'os.makedirs', (['local_folder'], {}), '(local_folder)\n', (1309, 1323), False, 'import os\n'), ((3007, 3018), 'time.time', 'time.time', ([], {}), '()\n', (3016, 3018), False, 'import time\n'), ((4490, 4524), 'os.path.exists', 'os.path.exists', (["asset_path['path']"], {}), "(asset_path['path'])\n", (4504, 4524), False, 'import os\n'), ((6281, 6310), 'os.remove', 'os.remove', (["asset_path['path']"], {}), "(asset_path['path'])\n", (6290, 6310), False, 'import os\n'), ((6479, 6505), 'os.path.exists', 'os.path.exists', (['local_path'], {}), '(local_path)\n', (6493, 6505), False, 'import os\n'), ((6519, 6540), 'os.remove', 'os.remove', (['local_path'], {}), '(local_path)\n', (6528, 6540), False, 'import os\n'), ((2857, 2892), 'numpy.around', 'np.around', (['line[column]'], {'decimals': '(5)'}), '(line[column], decimals=5)\n', (2866, 2892), True, 'import numpy as np\n')] |
import Gerador_de_senhas.Defs as ge
import PySimpleGUI as sg
class Gerador:
sg.theme('DarkPurple1')
def __init__(self):
layout = [
[sg.Checkbox('Numeros', key='sonumeros'), sg.Text(size=(3, 1)), sg.Checkbox('Letras', key='soletras'),
sg.Text(size=(3, 1)), sg.Checkbox('Simbolos', key='sosimbolos')],
[sg.Text('Quantidade de senhas'), sg.Combo(values=list(range(30)), key='totalchars', default_value=1,
size=(3, 1))],
[sg.Text(size=(11, 3)), sg.Button('Gerar Senha')],
[sg.Text('Resultado:', size=(9, 1)), sg.Text(size=(22, 0))],
[sg.Output(size=(40, 5))]
]
self.janela = sg.Window("Gerador de Senha").layout(layout)
def iniciar(self):
while True:
self.evento, self.values = self.janela.read()
com_numeros = self.values['sonumeros']
com_letras = self.values['soletras']
com_simbolos = self.values['sosimbolos']
total_de_caracteres = self.values['totalchars']
if self.evento == sg.WINDOW_CLOSED:
break
if self.evento == 'Gerar Senha':
newsenha = ge.truefalse(com_numeros, com_simbolos, com_letras, total_de_caracteres)
print(newsenha)
tela = Gerador()
tela.iniciar() | [
"Gerador_de_senhas.Defs.truefalse",
"PySimpleGUI.Checkbox",
"PySimpleGUI.Text",
"PySimpleGUI.Button",
"PySimpleGUI.theme",
"PySimpleGUI.Output",
"PySimpleGUI.Window"
] | [((82, 105), 'PySimpleGUI.theme', 'sg.theme', (['"""DarkPurple1"""'], {}), "('DarkPurple1')\n", (90, 105), True, 'import PySimpleGUI as sg\n'), ((163, 202), 'PySimpleGUI.Checkbox', 'sg.Checkbox', (['"""Numeros"""'], {'key': '"""sonumeros"""'}), "('Numeros', key='sonumeros')\n", (174, 202), True, 'import PySimpleGUI as sg\n'), ((204, 224), 'PySimpleGUI.Text', 'sg.Text', ([], {'size': '(3, 1)'}), '(size=(3, 1))\n', (211, 224), True, 'import PySimpleGUI as sg\n'), ((226, 263), 'PySimpleGUI.Checkbox', 'sg.Checkbox', (['"""Letras"""'], {'key': '"""soletras"""'}), "('Letras', key='soletras')\n", (237, 263), True, 'import PySimpleGUI as sg\n'), ((277, 297), 'PySimpleGUI.Text', 'sg.Text', ([], {'size': '(3, 1)'}), '(size=(3, 1))\n', (284, 297), True, 'import PySimpleGUI as sg\n'), ((299, 340), 'PySimpleGUI.Checkbox', 'sg.Checkbox', (['"""Simbolos"""'], {'key': '"""sosimbolos"""'}), "('Simbolos', key='sosimbolos')\n", (310, 340), True, 'import PySimpleGUI as sg\n'), ((356, 387), 'PySimpleGUI.Text', 'sg.Text', (['"""Quantidade de senhas"""'], {}), "('Quantidade de senhas')\n", (363, 387), True, 'import PySimpleGUI as sg\n'), ((497, 518), 'PySimpleGUI.Text', 'sg.Text', ([], {'size': '(11, 3)'}), '(size=(11, 3))\n', (504, 518), True, 'import PySimpleGUI as sg\n'), ((520, 544), 'PySimpleGUI.Button', 'sg.Button', (['"""Gerar Senha"""'], {}), "('Gerar Senha')\n", (529, 544), True, 'import PySimpleGUI as sg\n'), ((560, 594), 'PySimpleGUI.Text', 'sg.Text', (['"""Resultado:"""'], {'size': '(9, 1)'}), "('Resultado:', size=(9, 1))\n", (567, 594), True, 'import PySimpleGUI as sg\n'), ((596, 617), 'PySimpleGUI.Text', 'sg.Text', ([], {'size': '(22, 0)'}), '(size=(22, 0))\n', (603, 617), True, 'import PySimpleGUI as sg\n'), ((633, 656), 'PySimpleGUI.Output', 'sg.Output', ([], {'size': '(40, 5)'}), '(size=(40, 5))\n', (642, 656), True, 'import PySimpleGUI as sg\n'), ((690, 719), 'PySimpleGUI.Window', 'sg.Window', (['"""Gerador de Senha"""'], {}), "('Gerador de Senha')\n", (699, 719), True, 'import PySimpleGUI as sg\n'), ((1192, 1264), 'Gerador_de_senhas.Defs.truefalse', 'ge.truefalse', (['com_numeros', 'com_simbolos', 'com_letras', 'total_de_caracteres'], {}), '(com_numeros, com_simbolos, com_letras, total_de_caracteres)\n', (1204, 1264), True, 'import Gerador_de_senhas.Defs as ge\n')] |
#!/usr/bin/env python3.5
import sys
import os
import logging
import numpy as np
import musm
from sklearn.utils import check_random_state
from textwrap import dedent
#1Social Choice
_LOG = musm.get_logger('adt17')
PROBLEMS = {
'synthetic': musm.Synthetic,
'pc': musm.PC,
}
USERS = {
'noiseless': musm.NoiselessUser,
'pl': musm.PlackettLuceUser,
}
def get_results_path(args):
properties = [
args['problem'], args['num_groups'], args['num_clusters_per_group'],
args['num_users_per_group'], args['max_iters'], args['set_size'],
args['pick'], args['transform'], args['tau'], args['lmbda'],
args['enable_cv'], args['min_regret'], args['distrib'],
args['density'], args['response_model'], args['noise'], args['seed'],
]
return os.path.join('results', '_'.join(map(str, properties)) + '.pickle')
def _sparsify(w, density, rng):
if not (0 < density <= 1):
raise ValueError('density must be in (0, 1], got {}'.format(density))
w = np.array(w, copy=True)
perm = rng.permutation(w.shape[1])
num_zeros = round((1 - density) * w.shape[1])
w[:,perm[:min(num_zeros, w.shape[1] - 1)]] = 0
return w
def sample_cluster(problem, num_users=5, distrib='normal', density=1, rng=0):
num_attributes = problem.num_attributes
if hasattr(problem, 'cost_matrix'):
num_attributes += problem.cost_matrix.shape[0]
if distrib == 'uniform':
w_mean = rng.uniform(0, 1, size=num_attributes)
elif distrib == 'normal':
w_mean = rng.uniform(-1, 1, size=num_attributes)
else:
raise ValueError('invalid distrib, got {}'.format(distrib))
if True: # XXX
w = w_mean + np.zeros((num_users, num_attributes))
else:
w = w_mean + rng.uniform(0, 25, size=(num_users, num_attributes))
return _sparsify(np.abs(w), density, rng)
def generate_user_groups(problem, args):
User = USERS[args['response_model']]
rng = check_random_state(0)
num_users_per_cluster = max(1, round(args['num_users_per_group'] /
args['num_clusters_per_group']))
user_groups = []
for gid in range(args['num_groups']):
w_star = []
for cid in range(1, args['num_clusters_per_group'] + 1):
if cid == args['num_clusters_per_group']:
num_users_in_cluster = args['num_users_per_group'] - len(w_star)
else:
num_users_in_cluster = num_users_per_cluster
temp = sample_cluster(problem,
num_users=num_users_in_cluster,
distrib=args['distrib'],
density=args['density'],
rng=rng)
ttemp = temp
if hasattr(problem, 'cost_matrix'):
num_costs = problem.cost_matrix.shape[0]
temp_bools = temp[:, :-num_costs]
temp_costs = temp[:, -num_costs:]
ttemp = temp_bools + np.dot(temp_costs, problem.cost_matrix)
_LOG.debug(dedent('''\
CLUSTER {cid}:
true user weights =
{temp}
true user weights transformed by cost matrix =
{ttemp}
''').format(**locals()))
if len(w_star) == 0:
w_star = ttemp
else:
w_star = np.append(w_star, ttemp, axis=0)
user_groups.append([User(problem,
w_star[uid],
min_regret=args['min_regret'],
noise=args['noise'],
rng=rng)
for uid in range(args['num_users_per_group'])])
return user_groups
def run(args):
problem = PROBLEMS[args['problem']]()
try:
user_groups = musm.load(args['groups'])
except:
user_groups = generate_user_groups(problem,
musm.subdict(args, nokeys={'problem'}))
if args['groups'] is not None:
musm.dump(args['groups'], user_groups)
rng = check_random_state(args['seed'])
traces = []
for gid in range(args['num_groups']):
traces.append(musm.musm(problem,
user_groups[gid],
gid,
set_size=args['set_size'],
max_iters=args['max_iters'],
enable_cv=args['enable_cv'],
pick=args['pick'],
transform=args['transform'],
tau=args['tau'],
lmbda=args['lmbda'],
rng=0))
musm.dump(get_results_path(args), {'args': args, 'traces': traces})
def main():
import argparse
np.seterr(all='raise')
np.set_printoptions(precision=2, linewidth=1000000)
fmt = argparse.ArgumentDefaultsHelpFormatter
parser = argparse.ArgumentParser(formatter_class=fmt)
group = parser.add_argument_group('Experiment')
group.add_argument('problem', type=str,
help='the problem, any of {}'
.format(sorted(PROBLEMS.keys())))
group.add_argument('-N', '--num-groups', type=int, default=20,
help='number of user groups')
group.add_argument('-C', '--num-clusters-per-group', type=int, default=1,
help='number of clusters in a group')
group.add_argument('-M', '--num-users-per-group', type=int, default=5,
help='number of users in a group')
group.add_argument('-T', '--max-iters', type=int, default=100,
help='maximum number of elicitation iterations')
group.add_argument('-s', '--seed', type=int, default=0,
help='RNG seed')
group.add_argument('-v', '--verbose', action='store_true',
help='enable debug spew')
group = parser.add_argument_group('Algorithm')
group.add_argument('-K', '--set-size', type=int, default=2,
help='set size')
group.add_argument('-P', '--pick', type=str, default='maxvar',
help='critertion used for picking users')
group.add_argument('-F', '--transform', type=str, default='indep',
help='user-user transformation to use')
group.add_argument('-t', '--tau', type=float, default=0.25,
help='kernel inverse temperature parameter')
group.add_argument('-L', '--lmbda', type=float, default=0.5,
help='transform importance')
group.add_argument('-X', '--enable-cv', action='store_true',
help='enable hyperparameter cross-validation')
group = parser.add_argument_group('User Simulation')
group.add_argument('--min-regret', type=float, default=0,
help='minimum regret for satisfaction')
group.add_argument('-G', '--groups', type=str, default=None,
help='path to pickle with user weights')
group.add_argument('-u', '--distrib', type=str, default='normal',
help='distribution of user weights')
group.add_argument('-d', '--density', type=float, default=1,
help='proportion of non-zero user weights')
group.add_argument('-R', '--response-model', type=str, default='pl',
help='user response model for choice queries')
group.add_argument('-n', '--noise', type=float, default=1,
help='amount of user response noise')
args = parser.parse_args()
handlers = []
if args.verbose:
handlers.append(logging.StreamHandler(sys.stdout))
logging.basicConfig(level=logging.DEBUG, handlers=handlers,
format='%(levelname)-6s %(name)-6s %(funcName)-12s: %(message)s')
run(vars(args))
if __name__ == '__main__':
main()
| [
"logging.basicConfig",
"numpy.abs",
"textwrap.dedent",
"sklearn.utils.check_random_state",
"logging.StreamHandler",
"argparse.ArgumentParser",
"musm.musm",
"musm.dump",
"numpy.append",
"numpy.array",
"numpy.zeros",
"numpy.dot",
"musm.get_logger",
"numpy.seterr",
"musm.subdict",
"musm.l... | [((191, 215), 'musm.get_logger', 'musm.get_logger', (['"""adt17"""'], {}), "('adt17')\n", (206, 215), False, 'import musm\n'), ((1014, 1036), 'numpy.array', 'np.array', (['w'], {'copy': '(True)'}), '(w, copy=True)\n', (1022, 1036), True, 'import numpy as np\n'), ((1965, 1986), 'sklearn.utils.check_random_state', 'check_random_state', (['(0)'], {}), '(0)\n', (1983, 1986), False, 'from sklearn.utils import check_random_state\n'), ((4204, 4236), 'sklearn.utils.check_random_state', 'check_random_state', (["args['seed']"], {}), "(args['seed'])\n", (4222, 4236), False, 'from sklearn.utils import check_random_state\n'), ((4972, 4994), 'numpy.seterr', 'np.seterr', ([], {'all': '"""raise"""'}), "(all='raise')\n", (4981, 4994), True, 'import numpy as np\n'), ((4999, 5050), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'precision': '(2)', 'linewidth': '(1000000)'}), '(precision=2, linewidth=1000000)\n', (5018, 5050), True, 'import numpy as np\n'), ((5114, 5158), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'fmt'}), '(formatter_class=fmt)\n', (5137, 5158), False, 'import argparse\n'), ((7898, 8028), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'handlers': 'handlers', 'format': '"""%(levelname)-6s %(name)-6s %(funcName)-12s: %(message)s"""'}), "(level=logging.DEBUG, handlers=handlers, format=\n '%(levelname)-6s %(name)-6s %(funcName)-12s: %(message)s')\n", (7917, 8028), False, 'import logging\n'), ((1845, 1854), 'numpy.abs', 'np.abs', (['w'], {}), '(w)\n', (1851, 1854), True, 'import numpy as np\n'), ((3930, 3955), 'musm.load', 'musm.load', (["args['groups']"], {}), "(args['groups'])\n", (3939, 3955), False, 'import musm\n'), ((1701, 1738), 'numpy.zeros', 'np.zeros', (['(num_users, num_attributes)'], {}), '((num_users, num_attributes))\n', (1709, 1738), True, 'import numpy as np\n'), ((4318, 4551), 'musm.musm', 'musm.musm', (['problem', 'user_groups[gid]', 'gid'], {'set_size': "args['set_size']", 'max_iters': "args['max_iters']", 'enable_cv': "args['enable_cv']", 'pick': "args['pick']", 'transform': "args['transform']", 'tau': "args['tau']", 'lmbda': "args['lmbda']", 'rng': '(0)'}), "(problem, user_groups[gid], gid, set_size=args['set_size'],\n max_iters=args['max_iters'], enable_cv=args['enable_cv'], pick=args[\n 'pick'], transform=args['transform'], tau=args['tau'], lmbda=args[\n 'lmbda'], rng=0)\n", (4327, 4551), False, 'import musm\n'), ((7859, 7892), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (7880, 7892), False, 'import logging\n'), ((3458, 3490), 'numpy.append', 'np.append', (['w_star', 'ttemp'], {'axis': '(0)'}), '(w_star, ttemp, axis=0)\n', (3467, 3490), True, 'import numpy as np\n'), ((4063, 4101), 'musm.subdict', 'musm.subdict', (['args'], {'nokeys': "{'problem'}"}), "(args, nokeys={'problem'})\n", (4075, 4101), False, 'import musm\n'), ((4154, 4192), 'musm.dump', 'musm.dump', (["args['groups']", 'user_groups'], {}), "(args['groups'], user_groups)\n", (4163, 4192), False, 'import musm\n'), ((3036, 3075), 'numpy.dot', 'np.dot', (['temp_costs', 'problem.cost_matrix'], {}), '(temp_costs, problem.cost_matrix)\n', (3042, 3075), True, 'import numpy as np\n'), ((3100, 3337), 'textwrap.dedent', 'dedent', (['""" CLUSTER {cid}:\n true user weights =\n {temp}\n true user weights transformed by cost matrix =\n {ttemp}\n """'], {}), '(\n """ CLUSTER {cid}:\n true user weights =\n {temp}\n true user weights transformed by cost matrix =\n {ttemp}\n """\n )\n', (3106, 3337), False, 'from textwrap import dedent\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2021, <NAME> and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.model.mapper import get_mapped_doc
class RentRequest(Document):
def validate(self):
frappe.msgprint("Hi")
def get_feed(self):
pass
@frappe.whitelist()
def make_request_for_rent_contract(source_name, target_doc=None):
# doc = frappe.get_doc(dt, dn)
# rc = frappe.new_doc("Rent Contract")
# rc.rent_request = doc
# #frappe.msgprint("Hi=={0}".format(doc.requster_name))
# return rc
mapped = get_mapped_doc("Rent Request", source_name, {
"Rent Request": {
"doctype": "Rent Contract",
}
}, target_doc)
#frappe.msgprint(mapped)
return mapped
| [
"frappe.model.mapper.get_mapped_doc",
"frappe.whitelist",
"frappe.msgprint"
] | [((370, 388), 'frappe.whitelist', 'frappe.whitelist', ([], {}), '()\n', (386, 388), False, 'import frappe\n'), ((633, 740), 'frappe.model.mapper.get_mapped_doc', 'get_mapped_doc', (['"""Rent Request"""', 'source_name', "{'Rent Request': {'doctype': 'Rent Contract'}}", 'target_doc'], {}), "('Rent Request', source_name, {'Rent Request': {'doctype':\n 'Rent Contract'}}, target_doc)\n", (647, 740), False, 'from frappe.model.mapper import get_mapped_doc\n'), ((318, 339), 'frappe.msgprint', 'frappe.msgprint', (['"""Hi"""'], {}), "('Hi')\n", (333, 339), False, 'import frappe\n')] |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 4 15:57:47 2017
@author: wangronin
"""
import pdb
import numpy as np
from deap import benchmarks
from GaussianProcess_old import GaussianProcess_extra as GaussianProcess
from BayesOpt import BayesOpt, RandomForest, RrandomForest
np.random.seed(1)
dim = 2
n_step = 20
n_init_sample = 10
obj_func = lambda x: benchmarks.himmelblau(x)[0]
lb = np.array([-6] * dim)
ub = np.array([6] * dim)
x1 = {'name' : "x1",
'type' : 'R',
'bounds': [lb[0], ub[0]]}
x2 = {'name' : "x2",
'type' : 'R',
'bounds': [lb[1], ub[1]]}
thetaL = 1e-3 * (ub - lb) * np.ones(dim)
thetaU = 10 * (ub - lb) * np.ones(dim)
theta0 = np.random.rand(dim) * (thetaU - thetaL) + thetaL
model = GaussianProcess(regr='constant', corr='matern',
theta0=theta0, thetaL=thetaL,
thetaU=thetaU, nugget=None,
nugget_estim=False, normalize=False,
verbose=False, random_start=15 * dim,
random_state=None, optimizer='BFGS')
# min_samples_leaf = max(1, int(n_init_sample / 20.))
# max_features = int(np.ceil(dim * 5 / 6.))
# model = RandomForest(n_estimators=100,
# max_features=max_features,
# min_samples_leaf=min_samples_leaf)
# model = RrandomForest()
search_space = [x1, x2]
opt = BayesOpt(search_space, obj_func, model, max_iter=n_step, random_seed=None,
n_init_sample=n_init_sample, minimize=True, verbose=False, debug=True,
optimizer='MIES')
opt.run()
| [
"numpy.ones",
"numpy.random.rand",
"GaussianProcess_old.GaussianProcess_extra",
"deap.benchmarks.himmelblau",
"numpy.array",
"numpy.random.seed",
"BayesOpt.BayesOpt"
] | [((304, 321), 'numpy.random.seed', 'np.random.seed', (['(1)'], {}), '(1)\n', (318, 321), True, 'import numpy as np\n'), ((416, 436), 'numpy.array', 'np.array', (['([-6] * dim)'], {}), '([-6] * dim)\n', (424, 436), True, 'import numpy as np\n'), ((442, 461), 'numpy.array', 'np.array', (['([6] * dim)'], {}), '([6] * dim)\n', (450, 461), True, 'import numpy as np\n'), ((758, 984), 'GaussianProcess_old.GaussianProcess_extra', 'GaussianProcess', ([], {'regr': '"""constant"""', 'corr': '"""matern"""', 'theta0': 'theta0', 'thetaL': 'thetaL', 'thetaU': 'thetaU', 'nugget': 'None', 'nugget_estim': '(False)', 'normalize': '(False)', 'verbose': '(False)', 'random_start': '(15 * dim)', 'random_state': 'None', 'optimizer': '"""BFGS"""'}), "(regr='constant', corr='matern', theta0=theta0, thetaL=\n thetaL, thetaU=thetaU, nugget=None, nugget_estim=False, normalize=False,\n verbose=False, random_start=15 * dim, random_state=None, optimizer='BFGS')\n", (773, 984), True, 'from GaussianProcess_old import GaussianProcess_extra as GaussianProcess\n'), ((1402, 1573), 'BayesOpt.BayesOpt', 'BayesOpt', (['search_space', 'obj_func', 'model'], {'max_iter': 'n_step', 'random_seed': 'None', 'n_init_sample': 'n_init_sample', 'minimize': '(True)', 'verbose': '(False)', 'debug': '(True)', 'optimizer': '"""MIES"""'}), "(search_space, obj_func, model, max_iter=n_step, random_seed=None,\n n_init_sample=n_init_sample, minimize=True, verbose=False, debug=True,\n optimizer='MIES')\n", (1410, 1573), False, 'from BayesOpt import BayesOpt, RandomForest, RrandomForest\n'), ((639, 651), 'numpy.ones', 'np.ones', (['dim'], {}), '(dim)\n', (646, 651), True, 'import numpy as np\n'), ((678, 690), 'numpy.ones', 'np.ones', (['dim'], {}), '(dim)\n', (685, 690), True, 'import numpy as np\n'), ((383, 407), 'deap.benchmarks.himmelblau', 'benchmarks.himmelblau', (['x'], {}), '(x)\n', (404, 407), False, 'from deap import benchmarks\n'), ((700, 719), 'numpy.random.rand', 'np.random.rand', (['dim'], {}), '(dim)\n', (714, 719), True, 'import numpy as np\n')] |
import unittest
import sys
sys.path.append("../")
from store.store import Store
from store.neo4jstore import Neo4jStore
from store.sqlitestore import SqliteStore
from neo4j.v1 import GraphDatabase, basic_auth
#TODO fix tests
'''
class TestStore(unittest.TestCase):
def setUp(self):
self.store = Neo4jStore()
self.pages = [
{
'pageid': 1,
'title': 'one',
'fullurl' : 'https://wiki.com/one'
},
{
'pageid': 2,
'title': 'two',
'fullurl' : 'https://wiki.com/two'
},
]
self.pages_dist_1 = [
{
'pageid': 3,
'title': 'three',
'fullurl' : 'https://wiki.com/three'
},
{
'pageid': 4,
'title': 'four',
'fullurl' : 'https://wiki.com/four'
},
]
self.pages_dist_2 = [
{
'pageid': 5,
'title': 'five',
'fullurl' : 'https://wiki.com/five'
},
{
'pageid': 6,
'title': 'six',
'fullurl' : 'https://wiki.com/six'
},
]
def tearDown(self):
pass
def test_save_pages(self):
self.store.save_pages(self.pages)
for page in self.pages:
read_page = self.store.get_page_from_id(page['pageid'])
self.assertEqual(read_page, page)
def test_save_and_get_page_links(self):
self.store.save_pages(self.pages)
self.store.save_page_links(1, self.pages_dist_1)
self.store.save_page_links(3, self.pages_dist_2)
read_page_links = self.store.get_page_links(1)
self.assertEqual(2, len(read_page_links))
page_3 = filter(lambda p: p['pageid'] == 3, read_page_links)[0]
page_4 = filter(lambda p: p['pageid'] == 4, read_page_links)[0]
self.assertEqual(self.pages_dist_1[0], page_3)
self.assertEqual(self.pages_dist_1[1], page_4)
read_page_links2 = self.store.get_page_links(3)
self.assertEqual(2, len(read_page_links2))
page_5 = filter(lambda p: p['pageid'] == 5, read_page_links2)[0]
page_6 = filter(lambda p: p['pageid'] == 6, read_page_links2)[0]
self.assertEqual(self.pages_dist_2[0], page_5)
self.assertEqual(self.pages_dist_2[1], page_6)
self.assertEqual([], self.store.get_page_links(2))
self.assertEqual([], self.store.get_page_links(4))
self.assertEqual([], self.store.get_page_links(5))
self.assertEqual([], self.store.get_page_links(6))
def test_get_page_from_url_title(self):
self.store.save_pages(self.pages)
page1 = self.store.get_page_from_url_title('one')
self.assertEqual(1, len(page1))
self.assertEqual(self.pages[0], page1[0])
'''
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"sys.path.append"
] | [((27, 49), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (42, 49), False, 'import sys\n'), ((2662, 2677), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2675, 2677), False, 'import unittest\n')] |
from loom.tables import InputTable, DerivedTable
from loom.fields import RawField, DerivedField
t1 = InputTable("main", "table_1", "Table 1", "t1")
RawField(t1, "id", "Id")
RawField(t1, "value", "Value")
RawField(t1, "income", "Customer Income")
t3 = InputTable("main", "table_1", "Table 1", "t3")
RawField(t3, "id", "Id")
RawField(t3, "value", "Value")
RawField(t3, "income", "Customer Income")
t2 = DerivedTable("main", "table_2", "Table 2", "t2", [t1, t3])
DerivedField(t2, "id", "Id", "{t1.id}"),
DerivedField(t2, "abs_value", "Absolute Value", "abs({t1.id})")
DerivedField(t2, "div_income", "", "{t3.value}/{t1.income}")
print(t2.get_source("t1"))
print(t2.div_income.get_sources())
print(t2.get_select()) | [
"loom.fields.DerivedField",
"loom.tables.DerivedTable",
"loom.fields.RawField",
"loom.tables.InputTable"
] | [((105, 151), 'loom.tables.InputTable', 'InputTable', (['"""main"""', '"""table_1"""', '"""Table 1"""', '"""t1"""'], {}), "('main', 'table_1', 'Table 1', 't1')\n", (115, 151), False, 'from loom.tables import InputTable, DerivedTable\n'), ((153, 177), 'loom.fields.RawField', 'RawField', (['t1', '"""id"""', '"""Id"""'], {}), "(t1, 'id', 'Id')\n", (161, 177), False, 'from loom.fields import RawField, DerivedField\n'), ((179, 209), 'loom.fields.RawField', 'RawField', (['t1', '"""value"""', '"""Value"""'], {}), "(t1, 'value', 'Value')\n", (187, 209), False, 'from loom.fields import RawField, DerivedField\n'), ((211, 252), 'loom.fields.RawField', 'RawField', (['t1', '"""income"""', '"""Customer Income"""'], {}), "(t1, 'income', 'Customer Income')\n", (219, 252), False, 'from loom.fields import RawField, DerivedField\n'), ((261, 307), 'loom.tables.InputTable', 'InputTable', (['"""main"""', '"""table_1"""', '"""Table 1"""', '"""t3"""'], {}), "('main', 'table_1', 'Table 1', 't3')\n", (271, 307), False, 'from loom.tables import InputTable, DerivedTable\n'), ((309, 333), 'loom.fields.RawField', 'RawField', (['t3', '"""id"""', '"""Id"""'], {}), "(t3, 'id', 'Id')\n", (317, 333), False, 'from loom.fields import RawField, DerivedField\n'), ((335, 365), 'loom.fields.RawField', 'RawField', (['t3', '"""value"""', '"""Value"""'], {}), "(t3, 'value', 'Value')\n", (343, 365), False, 'from loom.fields import RawField, DerivedField\n'), ((367, 408), 'loom.fields.RawField', 'RawField', (['t3', '"""income"""', '"""Customer Income"""'], {}), "(t3, 'income', 'Customer Income')\n", (375, 408), False, 'from loom.fields import RawField, DerivedField\n'), ((417, 475), 'loom.tables.DerivedTable', 'DerivedTable', (['"""main"""', '"""table_2"""', '"""Table 2"""', '"""t2"""', '[t1, t3]'], {}), "('main', 'table_2', 'Table 2', 't2', [t1, t3])\n", (429, 475), False, 'from loom.tables import InputTable, DerivedTable\n'), ((519, 582), 'loom.fields.DerivedField', 'DerivedField', (['t2', '"""abs_value"""', '"""Absolute Value"""', '"""abs({t1.id})"""'], {}), "(t2, 'abs_value', 'Absolute Value', 'abs({t1.id})')\n", (531, 582), False, 'from loom.fields import RawField, DerivedField\n'), ((584, 644), 'loom.fields.DerivedField', 'DerivedField', (['t2', '"""div_income"""', '""""""', '"""{t3.value}/{t1.income}"""'], {}), "(t2, 'div_income', '', '{t3.value}/{t1.income}')\n", (596, 644), False, 'from loom.fields import RawField, DerivedField\n'), ((477, 516), 'loom.fields.DerivedField', 'DerivedField', (['t2', '"""id"""', '"""Id"""', '"""{t1.id}"""'], {}), "(t2, 'id', 'Id', '{t1.id}')\n", (489, 516), False, 'from loom.fields import RawField, DerivedField\n')] |
# Generated by Django 2.0.2 on 2018-03-12 11:45
import datetime
import django.contrib.gis.db.models.fields
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('events', '0004_auto_20180309_1804'),
]
operations = [
migrations.AddField(
model_name='event',
name='location',
field=django.contrib.gis.db.models.fields.PointField(null=True, srid=4326),
),
migrations.AlterField(
model_name='event',
name='timestamp',
field=models.DateTimeField(default=datetime.datetime(2018, 3, 12, 11, 45, 2, 775370, tzinfo=utc)),
),
]
| [
"datetime.datetime"
] | [((652, 713), 'datetime.datetime', 'datetime.datetime', (['(2018)', '(3)', '(12)', '(11)', '(45)', '(2)', '(775370)'], {'tzinfo': 'utc'}), '(2018, 3, 12, 11, 45, 2, 775370, tzinfo=utc)\n', (669, 713), False, 'import datetime\n')] |
#NOTE: for this script to work you have to first sign-in on Whatsapp web using QR code
from splinter import Browser
browser = Browser()
browser.visit('https://web.whatsapp.com/')
input('press enter to continue') #to make sure page is completely loaded
count=20;
friend_list=["friend 1","friend 2","friend 3"] #Whatsapp names of friends
for friend in friend_list:
xp='//span[contains(text(),'+friend+')]'
chat = browser.find_element_by_path(xp)
chat.click()
elem1 = browser.find_elements_by_class_name('input')
for i in range(1,count):
elem1[1].send_keys('Whatsapp has expired') #text to send
browser.find_element_by_class_name('send-container').click()
| [
"splinter.Browser"
] | [((128, 137), 'splinter.Browser', 'Browser', ([], {}), '()\n', (135, 137), False, 'from splinter import Browser\n')] |
import requests
from common import *
def send_request(url):
try:
res = requests.get(url)
res.raise_for_status()
except Exception as exc:
print('Generated an exception: %s' % exc)
return ERR_WEB_ERROR, exc
return ERR_SUCCESS, res.text
| [
"requests.get"
] | [((86, 103), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (98, 103), False, 'import requests\n')] |
import xarray as xr
from .basic import zonal_mean, zonal_wave_coeffs, zonal_wave_covariance
def _print_if_true(msg, condition, **kwargs):
r"""Simple utility function to print only if the given condition is True.
Parameters
----------
msg : string
The message to print
condition : bool
The boolean that determines whether
anything is actually printed.
"""
if (condition is True):
print(msg, **kwargs)
return
def create_zonal_mean_dataset(ds, verbose=False, include_waves=False,
waves=None, fftpkg='scipy'):
r"""Compiles a "zonal mean dataset".
Given an xarray dataset containing full fields of basic state
variables such as velocity components and temperatures, this
function will compute as many zonal mean diagnostics as possible.
Parameters
----------
ds : `xarray.Dataset`
Dataset containing full fields (i.e., containing latitude &
longitude dimensions) of basic state variables. This function
currently assumes specific names and units:
'u' = zonal wind component in m/s
'v' = meridional wind component in m/s
'w' = vertical pressure velocity in Pa/s
'T' = temperature in K
'Z' = geopotential height in m
If your data names, dimensions, and/or units do not conform to these
restrictions, please change beforehand. Dimensions and names can
easily be changed with the `rename` method of xarray Datasets/DataArrays.
Note that ds need not contain all of these variables, and this
function will still provide as many diagnostics as possible.
verbose : bool, optional
Whether to print out progress information as the function proceeds.
Defaults to False.
include_waves : bool, optional
Whether to include possible longitudinal wave diagnostics such as
eddy covariances and fourier coefficients. Defaults to False.
waves : array-like, optional
The specific zonal wavenumbers to maintain in the output. This
kwarg is only considered if include_waves is True.
fftpkg : string, optional
String that specifies how to perform the FFT on the data. Options are
'scipy' or 'xrft'. Specifying scipy uses some operations that are memory-eager
and leverages scipy.fft.rfft. Specifying xrft should leverage the benefits
of xarray/dask for large datasets by using xrft.fft. Defaults to scipy.
This kwarg is only considered if include_waves is True.
Returns
-------
`xarray.Dataset`
An xarray Dataset containing the possible zonal mean diagnostics.
Notes
-----
Please see https://essd.copernicus.org/articles/10/1925/2018/ for
a description of a different zonal mean dataset compiled for the
SPARC Reanalysis Intercomparison Project. This function does *not*
provide all the same diagnostics as listed in that publication.
However, if this function is provided with all of u, v, w, and T,
it will return all terms necessary from which further diagnostics
can be computed to, for instance, perform zonal Eulerian and
Transformed Eulerian Mean momentum budgets.
"""
all_vars = ['u', 'v', 'w', 'T', 'Z']
cov_pairs = [('u', 'v'), ('v', 'T'), ('u', 'w'), ('w', 'T')]
wave_coeffs = ['T', 'Z']
long_names = {
'u': 'Zonal Mean Zonal Wind',
'v': 'Zonal Mean Meridional Wind',
'w': 'Zonal Mean Vertical Pressure Velocity',
'T': 'Zonal Mean Temperature',
'Z': 'Zonal Mean Geopotential Height',
'uv': 'Total Eddy Momentum Flux',
'vT': 'Total Eddy Heat Flux',
'uw': 'Total Eddy Vertical Momentum Flux',
'wT': 'Total Eddy Vertical Heat Flux',
'uv_k': 'Eddy Momentum Flux due to Zonal Wave-k',
'vT_k': 'Eddy Heat Flux due to Zonal Wave-k',
'uw_k': 'Eddy Vertical Momentum Flux due to Zonal Wave-k',
'wT_k': 'Eddy Vertical Heat Flux due to Zonal Wave-k',
'Z_k_real': 'Real part of Fourier coefficients of Zonal Geohgt Waves',
'Z_k_imag': 'Imaginary part of Fourier coefficients of Zonal Geohgt Waves',
'T_k_real': 'Real part of Fourier coefficients of Zonal Temperature Waves',
'T_k_imag': 'Imaginary part of Fourier coefficients of Zonal Temperature Waves'
}
units = {
'u': 'm s-1',
'v': 'm s-1',
'w': 'Pa s-1',
'T': 'K',
'Z': 'm',
'uv': 'm+2 s-2',
'vT': 'K m s-1',
'uw': 'm Pa s-2',
'wT': 'K Pa s-1',
'uv_k': 'm+2 s-2',
'vT_k': 'K m s-1',
'uw_k': 'm Pa s-2',
'wT_k': 'K Pa s-1',
'Z_k_real': 'm',
'Z_k_imag': 'm',
'T_k_real': 'K',
'T_k_imag': 'K'
}
inter = {}
_print_if_true('*** Compiling zonal means and eddies', verbose)
for var in all_vars:
if (var in ds.variables):
_print_if_true(f' {var}', verbose)
zm = zonal_mean(ds[var])
ed = ds[var] - zm
inter[f'{var}'] = zm
inter[f'{var}ed'] = ed
out_coords = inter[f'{var}'].coords
_print_if_true('*** Compiling zonal covariances', verbose)
for var1, var2 in cov_pairs:
if (var1 in ds.variables) and (var2 in ds.variables):
_print_if_true(f' {var1}{var2}', verbose)
cov = zonal_mean(inter[f'{var1}ed'] * inter[f'{var2}ed'])
inter[f'{var1}{var2}'] = cov
if include_waves is True:
_print_if_true('*** Compiling zonal wave covariances', verbose)
for var1, var2 in cov_pairs:
if (var1 in ds.variables) and (var2 in ds.variables):
_print_if_true(f' {var1}{var2}', verbose)
cov = zonal_wave_covariance(ds[var1], ds[var2], waves=waves, fftpkg=fftpkg)
inter[f'{var1}{var2}_k'] = cov
out_coords = inter[f'{var1}{var2}_k'].coords
_print_if_true('*** Compiling zonal wave Fourier coefficients', verbose)
for var in wave_coeffs:
if (var in ds.variables):
_print_if_true(f' {var}', verbose)
fc = zonal_wave_coeffs(ds[var], waves=waves, fftpkg=fftpkg)
inter[f'{var}_k_real'] = fc.real
inter[f'{var}_k_imag'] = fc.imag
out_coords = inter[f'{var}_k_real'].coords
# Remove the eddy fields
out_vars = list(inter.keys())
for var in out_vars:
if 'ed' in var:
inter.pop(var)
# Ascribe names and long_name attributes to each DataArray
# and create the encoding dictionary to use
out_vars = inter.keys()
for var in out_vars:
inter[var].name = var
inter[var].attrs['long_name'] = long_names[var]
inter[var].attrs['units'] = units[var]
out_ds = xr.Dataset(inter, coords=out_coords)
out_ds.attrs['nlons'] = ds.longitude.size
return out_ds
| [
"xarray.Dataset"
] | [((6901, 6937), 'xarray.Dataset', 'xr.Dataset', (['inter'], {'coords': 'out_coords'}), '(inter, coords=out_coords)\n', (6911, 6937), True, 'import xarray as xr\n')] |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2022 Graz University of Technology.
#
# invenio-records-lom is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""OAI-PMH serializers for LOM-records."""
from flask import current_app, g
from invenio_pidstore.errors import PIDDoesNotExistError
from invenio_pidstore.models import PersistentIdentifier
from invenio_records_resources.services.errors import PermissionDeniedError
from .proxies import current_records_lom
from .resources.serializers import LOMToLOMXMLSerializer
def lom_etree(pid, record): # pylint: disable=unused-argument
"""Get LOM XML for OAI-PMH."""
return LOMToLOMXMLSerializer(
metadata=record["_source"]["metadata"],
lom_id=record["_source"]["id"],
oaiserver_id_prefix=current_app.config.get("OAISERVER_ID_PREFIX"),
doi=record["_source"]["pids"]["doi"]["identifier"],
).serialize_object_xml()
def getrecord_fetcher(record_id):
"""Fetch record data as dict with identity check for serialization."""
lomid = PersistentIdentifier.get_by_object(
pid_type="lomid", object_uuid=record_id, object_type="rec"
)
try:
result = current_records_lom.records_service.read(g.identity, lomid.pid_value)
except PermissionDeniedError as error:
# if it is a restricted record.
raise PIDDoesNotExistError("lomid", None) from error
return result.to_dict()
| [
"invenio_pidstore.errors.PIDDoesNotExistError",
"invenio_pidstore.models.PersistentIdentifier.get_by_object",
"flask.current_app.config.get"
] | [((1110, 1208), 'invenio_pidstore.models.PersistentIdentifier.get_by_object', 'PersistentIdentifier.get_by_object', ([], {'pid_type': '"""lomid"""', 'object_uuid': 'record_id', 'object_type': '"""rec"""'}), "(pid_type='lomid', object_uuid=record_id,\n object_type='rec')\n", (1144, 1208), False, 'from invenio_pidstore.models import PersistentIdentifier\n'), ((1413, 1448), 'invenio_pidstore.errors.PIDDoesNotExistError', 'PIDDoesNotExistError', (['"""lomid"""', 'None'], {}), "('lomid', None)\n", (1433, 1448), False, 'from invenio_pidstore.errors import PIDDoesNotExistError\n'), ((851, 896), 'flask.current_app.config.get', 'current_app.config.get', (['"""OAISERVER_ID_PREFIX"""'], {}), "('OAISERVER_ID_PREFIX')\n", (873, 896), False, 'from flask import current_app, g\n')] |
import numpy as np
from compmech.stiffpanelbay import StiffPanelBay
from compmech.analysis import freq, lb
def test_freq_models():
print('Testing frequency analysis for StiffPanelBay with 2 plates')
# From Table 4 of
# Lee and Lee. "Vibration analysis of anisotropic plates with eccentric
# stiffeners". Computers & Structures, Vol. 57, No. 1, pp. 99-105,
# 1995.
for model in ['plate_clt_donnell_bardell',
'cpanel_clt_donnell_bardell',
'kpanel_clt_donnell_bardell']:
spb = StiffPanelBay()
spb.a = 0.5
spb.b = 0.250
spb.plyt = 0.00013
spb.laminaprop = (128.e9, 11.e9, 0.25, 4.48e9, 1.53e9, 1.53e9)
spb.stack = [0, -45, +45, 90, 90, +45, -45, 0]
spb.model = model
spb.r = 1.e6
spb.alphadeg = 0.
spb.mu = 1.5e3
spb.m = 9
spb.n = 10
# clamping
spb.w1rx = 0.
spb.w2rx = 0.
spb.w1ry = 0.
spb.w2ry = 0.
spb.add_panel(0, spb.b/2., plyt=spb.plyt)
spb.add_panel(spb.b/2., spb.b, plyt=spb.plyt)
k0 = spb.calc_k0(silent=True)
M = spb.calc_kM(silent=True)
eigvals, eigvecs = freq(k0, M, silent=True)
ref = [85.12907802-0.j, 134.16422850-0.j, 206.77295186-0.j,
216.45992453-0.j, 252.24546171-0.j]
assert np.allclose(eigvals[:5]/2/np.pi, ref, atol=0.1, rtol=0)
def test_lb_Stiffener1D():
print('Testing linear buckling for StiffPanelBay with a 1D Stiffener')
spb = StiffPanelBay()
spb.a = 1.
spb.b = 0.5
spb.stack = [0, 90, 90, 0]
spb.plyt = 1e-3*0.125
spb.laminaprop = (142.5e9, 8.7e9, 0.28, 5.1e9, 5.1e9, 5.1e9)
spb.model = 'plate_clt_donnell_bardell'
spb.mu = 1.3e3
spb.m = 15
spb.n = 16
spb.add_panel(y1=0, y2=spb.b/2., plyt=spb.plyt, Nxx=-1.)
spb.add_panel(y1=spb.b/2., y2=spb.b, plyt=spb.plyt, Nxx_cte=1000.)
spb.add_bladestiff1d(ys=spb.b/2., Fx=0., bf=0.05, fstack=[0, 90, 90, 0],
fplyt=spb.plyt, flaminaprop=spb.laminaprop)
k0 = spb.calc_k0(silent=True)
kG = spb.calc_kG0(silent=True)
eigvals, eigvecs = lb(k0, kG, silent=True)
assert np.isclose(eigvals[0].real, 297.54633, atol=0.1, rtol=0)
def test_lb_Stiffener2D():
print('Testing linear buckling for StiffPanelBay with a 2D Stiffener')
spb = StiffPanelBay()
spb.a = 1.
spb.b = 0.5
spb.stack = [0, 90, 90, 0]
spb.plyt = 1e-3*0.125
spb.laminaprop = (142.5e9, 8.7e9, 0.28, 5.1e9, 5.1e9, 5.1e9)
spb.model = 'plate_clt_donnell_bardell'
spb.mu = 1.3e3
spb.m = 15
spb.n = 16
spb.add_panel(y1=0, y2=spb.b/2., plyt=spb.plyt, Nxx=-1.)
spb.add_panel(y1=spb.b/2., y2=spb.b, plyt=spb.plyt, Nxx_cte=1000.)
spb.add_bladestiff2d(ys=spb.b/2., m1=14, n1=11, bf=0.05,
fstack=[0, 90, 90, 0],
fplyt=spb.plyt, flaminaprop=spb.laminaprop)
k0 = spb.calc_k0(silent=True)
kG = spb.calc_kG0(silent=True)
eigvals, eigvecs = lb(k0, kG, silent=True)
assert np.isclose(eigvals[0].real, 301.0825234, atol=0.1, rtol=0)
def test_freq_Stiffener1D():
print('Testing frequency analysis for StiffPanelBay with a 1D Stiffener')
spb = StiffPanelBay()
spb.a = 2.
spb.b = 0.5
spb.stack = [0, 90, 90, 0]
spb.plyt = 1e-3*0.125
spb.laminaprop = (142.5e9, 8.7e9, 0.28, 5.1e9, 5.1e9, 5.1e9)
spb.model = 'plate_clt_donnell_bardell'
spb.mu = 1.3e3
spb.m = 15
spb.n = 16
spb.add_panel(y1=0, y2=spb.b/2., plyt=spb.plyt)
spb.add_panel(y1=spb.b/2., y2=spb.b, plyt=spb.plyt)
spb.add_bladestiff1d(ys=spb.b/2., Fx=0., bf=0.08, fstack=[0, 90, 90, 0]*5,
fplyt=spb.plyt, flaminaprop=spb.laminaprop)
k0 = spb.calc_k0(silent=True)
M = spb.calc_kM(silent=True)
eigvals, eigvecs = freq(k0, M, silent=True, num_eigvalues=10)
assert np.isclose(eigvals[0].real, 79.5906673583, atol=0.1, rtol=0)
def test_freq_Stiffener2D():
print('Testing frequency analysis for StiffPanelBay with a 2D Stiffener')
spb = StiffPanelBay()
spb.a = 1.
spb.b = 0.5
spb.stack = [0, 90, 90, 0]
spb.plyt = 1e-3*0.125
spb.laminaprop = (142.5e9, 8.7e9, 0.28, 5.1e9, 5.1e9, 5.1e9)
spb.model = 'plate_clt_donnell_bardell'
spb.mu = 1.3e3
spb.m = 11
spb.n = 12
spb.add_panel(y1=0, y2=spb.b/2., plyt=spb.plyt)
spb.add_panel(y1=spb.b/2., y2=spb.b, plyt=spb.plyt)
spb.add_bladestiff2d(ys=spb.b/2., m1=14, n1=11, bf=0.08,
fstack=[0, 90, 90, 0]*5, fplyt=spb.plyt,
flaminaprop=spb.laminaprop)
k0 = spb.calc_k0(silent=True)
M = spb.calc_kM(silent=True)
eigvals, eigvecs = freq(k0, M, silent=True)
assert np.isclose(eigvals[0].real, 137.97927190657148, atol=0.01, rtol=0)
def test_Lee_and_Lee_table4():
print('Testing Lee and Lee Table 4')
# Lee and Lee. "Vibration analysis of anisotropic plates with eccentric
# stiffeners". Computers & Structures, Vol. 57, No. 1, pp. 99-105,
# 1995.
models = (
('model4', 0.00208, 0.0060, 138.99917796302756),
('model5', 0.00260, 0.0075, 175.00597239286196),
('model7', 0.00364, 0.0105, 205.433509024))
for model, hf, bf, value in models:
spb = StiffPanelBay()
spb.model = 'plate_clt_donnell_bardell'
spb.mu = 1.500e3 # plate material density in kg / m^3
spb.laminaprop = (128.e9, 11.e9, 0.25, 4.48e9, 1.53e9, 1.53e9)
spb.stack = [0, -45, +45, 90, 90, +45, -45, 0]
plyt = 0.00013
spb.plyt = plyt
spb.a = 0.5
spb.b = 0.250
spb.m = 14
spb.n = 15
hf = hf
bf = bf
n = int(hf/plyt)
fstack = [0]*(n//4) + [90]*(n//4) + [90]*(n//4) + [0]*(n//4)
# clamping
spb.w1rx = 0.
spb.w2rx = 0.
spb.w1ry = 0.
spb.w2ry = 0.
spb.add_panel(y1=0, y2=spb.b/2.)
spb.add_panel(y1=spb.b/2., y2=spb.b)
spb.add_bladestiff1d(mu=spb.mu, ys=spb.b/2., bb=0., bf=bf,
fstack=fstack, fplyt=plyt, flaminaprop=spb.laminaprop)
k0 = spb.calc_k0(silent=True)
M = spb.calc_kM(silent=True)
eigvals, eigvecs = freq(k0, M, silent=True)
herz = eigvals[0].real/2/np.pi
assert np.isclose(herz, value, atol=0.001, rtol=0.001)
| [
"compmech.stiffpanelbay.StiffPanelBay",
"numpy.allclose",
"compmech.analysis.lb",
"numpy.isclose",
"compmech.analysis.freq"
] | [((1542, 1557), 'compmech.stiffpanelbay.StiffPanelBay', 'StiffPanelBay', ([], {}), '()\n', (1555, 1557), False, 'from compmech.stiffpanelbay import StiffPanelBay\n'), ((2164, 2187), 'compmech.analysis.lb', 'lb', (['k0', 'kG'], {'silent': '(True)'}), '(k0, kG, silent=True)\n', (2166, 2187), False, 'from compmech.analysis import freq, lb\n'), ((2200, 2256), 'numpy.isclose', 'np.isclose', (['eigvals[0].real', '(297.54633)'], {'atol': '(0.1)', 'rtol': '(0)'}), '(eigvals[0].real, 297.54633, atol=0.1, rtol=0)\n', (2210, 2256), True, 'import numpy as np\n'), ((2371, 2386), 'compmech.stiffpanelbay.StiffPanelBay', 'StiffPanelBay', ([], {}), '()\n', (2384, 2386), False, 'from compmech.stiffpanelbay import StiffPanelBay\n'), ((3036, 3059), 'compmech.analysis.lb', 'lb', (['k0', 'kG'], {'silent': '(True)'}), '(k0, kG, silent=True)\n', (3038, 3059), False, 'from compmech.analysis import freq, lb\n'), ((3072, 3130), 'numpy.isclose', 'np.isclose', (['eigvals[0].real', '(301.0825234)'], {'atol': '(0.1)', 'rtol': '(0)'}), '(eigvals[0].real, 301.0825234, atol=0.1, rtol=0)\n', (3082, 3130), True, 'import numpy as np\n'), ((3250, 3265), 'compmech.stiffpanelbay.StiffPanelBay', 'StiffPanelBay', ([], {}), '()\n', (3263, 3265), False, 'from compmech.stiffpanelbay import StiffPanelBay\n'), ((3848, 3890), 'compmech.analysis.freq', 'freq', (['k0', 'M'], {'silent': '(True)', 'num_eigvalues': '(10)'}), '(k0, M, silent=True, num_eigvalues=10)\n', (3852, 3890), False, 'from compmech.analysis import freq, lb\n'), ((3903, 3963), 'numpy.isclose', 'np.isclose', (['eigvals[0].real', '(79.5906673583)'], {'atol': '(0.1)', 'rtol': '(0)'}), '(eigvals[0].real, 79.5906673583, atol=0.1, rtol=0)\n', (3913, 3963), True, 'import numpy as np\n'), ((4083, 4098), 'compmech.stiffpanelbay.StiffPanelBay', 'StiffPanelBay', ([], {}), '()\n', (4096, 4098), False, 'from compmech.stiffpanelbay import StiffPanelBay\n'), ((4724, 4748), 'compmech.analysis.freq', 'freq', (['k0', 'M'], {'silent': '(True)'}), '(k0, M, silent=True)\n', (4728, 4748), False, 'from compmech.analysis import freq, lb\n'), ((4761, 4827), 'numpy.isclose', 'np.isclose', (['eigvals[0].real', '(137.97927190657148)'], {'atol': '(0.01)', 'rtol': '(0)'}), '(eigvals[0].real, 137.97927190657148, atol=0.01, rtol=0)\n', (4771, 4827), True, 'import numpy as np\n'), ((551, 566), 'compmech.stiffpanelbay.StiffPanelBay', 'StiffPanelBay', ([], {}), '()\n', (564, 566), False, 'from compmech.stiffpanelbay import StiffPanelBay\n'), ((1211, 1235), 'compmech.analysis.freq', 'freq', (['k0', 'M'], {'silent': '(True)'}), '(k0, M, silent=True)\n', (1215, 1235), False, 'from compmech.analysis import freq, lb\n'), ((1372, 1431), 'numpy.allclose', 'np.allclose', (['(eigvals[:5] / 2 / np.pi)', 'ref'], {'atol': '(0.1)', 'rtol': '(0)'}), '(eigvals[:5] / 2 / np.pi, ref, atol=0.1, rtol=0)\n', (1383, 1431), True, 'import numpy as np\n'), ((5302, 5317), 'compmech.stiffpanelbay.StiffPanelBay', 'StiffPanelBay', ([], {}), '()\n', (5315, 5317), False, 'from compmech.stiffpanelbay import StiffPanelBay\n'), ((6247, 6271), 'compmech.analysis.freq', 'freq', (['k0', 'M'], {'silent': '(True)'}), '(k0, M, silent=True)\n', (6251, 6271), False, 'from compmech.analysis import freq, lb\n'), ((6327, 6374), 'numpy.isclose', 'np.isclose', (['herz', 'value'], {'atol': '(0.001)', 'rtol': '(0.001)'}), '(herz, value, atol=0.001, rtol=0.001)\n', (6337, 6374), True, 'import numpy as np\n')] |
"""
Script to export a PyTorch-based Pyrado policy to C++
"""
import numpy as np
import torch as to
from rcsenv import ControlPolicy
from pyrado.policies.linear import LinearPolicy
from pyrado.policies.rnn import RNNPolicy
from pyrado.spaces.box import BoxSpace
from pyrado.utils.data_types import EnvSpec
from pyrado.policies.features import FeatureStack, squared_feat, identity_feat, const_feat
def create_nonrecurrent_policy():
return LinearPolicy(
EnvSpec(
BoxSpace(-1, 1, 4),
BoxSpace(-1, 1, 3),
),
FeatureStack([
const_feat,
identity_feat,
squared_feat
])
)
def create_recurrent_policy():
return RNNPolicy(
EnvSpec(
BoxSpace(-1, 1, 4),
BoxSpace(-1, 1, 3),
),
hidden_size=32, num_recurrent_layers=1, hidden_nonlin='tanh'
)
if __name__ == '__main__':
tmpfile = '/tmp/torchscriptsaved.pt'
to.set_default_dtype(to.double)
# Create a Pyrado policy
model = create_nonrecurrent_policy()
# model = create_recurrent_policy()
# Trace the Pyrado policy (inherits from PyTorch module)
traced_script_module = model.trace()
print(traced_script_module.graph)
# Save the scripted module
traced_script_module.save(tmpfile)
# Load in C++
cp = ControlPolicy('torch', tmpfile)
# Print more digits
to.set_printoptions(precision=8, linewidth=200)
np.set_printoptions(precision=8, linewidth=200)
print(f'manual: {model(to.tensor([1, 2, 3, 4], dtype=to.get_default_dtype()))}')
print(f'script: {traced_script_module(to.tensor([1, 2, 3, 4], dtype=to.get_default_dtype()))}')
print(f'cpp: {cp(np.array([1, 2, 3, 4]), 3)}')
| [
"torch.get_default_dtype",
"torch.set_printoptions",
"rcsenv.ControlPolicy",
"torch.set_default_dtype",
"numpy.array",
"pyrado.policies.features.FeatureStack",
"pyrado.spaces.box.BoxSpace",
"numpy.set_printoptions"
] | [((963, 994), 'torch.set_default_dtype', 'to.set_default_dtype', (['to.double'], {}), '(to.double)\n', (983, 994), True, 'import torch as to\n'), ((1346, 1377), 'rcsenv.ControlPolicy', 'ControlPolicy', (['"""torch"""', 'tmpfile'], {}), "('torch', tmpfile)\n", (1359, 1377), False, 'from rcsenv import ControlPolicy\n'), ((1407, 1454), 'torch.set_printoptions', 'to.set_printoptions', ([], {'precision': '(8)', 'linewidth': '(200)'}), '(precision=8, linewidth=200)\n', (1426, 1454), True, 'import torch as to\n'), ((1459, 1506), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'precision': '(8)', 'linewidth': '(200)'}), '(precision=8, linewidth=200)\n', (1478, 1506), True, 'import numpy as np\n'), ((559, 614), 'pyrado.policies.features.FeatureStack', 'FeatureStack', (['[const_feat, identity_feat, squared_feat]'], {}), '([const_feat, identity_feat, squared_feat])\n', (571, 614), False, 'from pyrado.policies.features import FeatureStack, squared_feat, identity_feat, const_feat\n'), ((488, 506), 'pyrado.spaces.box.BoxSpace', 'BoxSpace', (['(-1)', '(1)', '(4)'], {}), '(-1, 1, 4)\n', (496, 506), False, 'from pyrado.spaces.box import BoxSpace\n'), ((520, 538), 'pyrado.spaces.box.BoxSpace', 'BoxSpace', (['(-1)', '(1)', '(3)'], {}), '(-1, 1, 3)\n', (528, 538), False, 'from pyrado.spaces.box import BoxSpace\n'), ((751, 769), 'pyrado.spaces.box.BoxSpace', 'BoxSpace', (['(-1)', '(1)', '(4)'], {}), '(-1, 1, 4)\n', (759, 769), False, 'from pyrado.spaces.box import BoxSpace\n'), ((783, 801), 'pyrado.spaces.box.BoxSpace', 'BoxSpace', (['(-1)', '(1)', '(3)'], {}), '(-1, 1, 3)\n', (791, 801), False, 'from pyrado.spaces.box import BoxSpace\n'), ((1717, 1739), 'numpy.array', 'np.array', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (1725, 1739), True, 'import numpy as np\n'), ((1565, 1587), 'torch.get_default_dtype', 'to.get_default_dtype', ([], {}), '()\n', (1585, 1587), True, 'import torch as to\n'), ((1665, 1687), 'torch.get_default_dtype', 'to.get_default_dtype', ([], {}), '()\n', (1685, 1687), True, 'import torch as to\n')] |
import logging
from casymda.blocks.entity import Entity
from simpy.core import Environment
class Order(Entity):
def __init__(self, env: Environment, name: str):
super().__init__(env, name)
self._time_of_acceptance = -1
self._initial_eta = -1
self._eta = -1
self._ready_at = -1
self._sop_at = -1
self._finished_at = -1
@property
def time_of_acceptance(self):
return self._time_of_acceptance
@time_of_acceptance.setter
def time_of_acceptance(self, value):
self._time_of_acceptance = value
update(
self.name,
"time_of_acceptance",
self._time_of_acceptance,
self.env.now,
self,
)
@property
def initial_eta(self):
return self._initial_eta
@initial_eta.setter
def initial_eta(self, value):
self._initial_eta = value
update(self.name, "initial_eta", self._initial_eta, self.env.now, self)
@property
def eta(self):
return self._eta
@eta.setter
def eta(self, value):
self._eta = value
update(self.name, "eta", self._eta, self.env.now, self)
@property
def ready_at(self):
return self._ready_at
@ready_at.setter
def ready_at(self, value):
self._ready_at = value
update(self.name, "ready_at", self._ready_at, self.env.now, self)
@property
def sop_at(self):
return self._sop_at
@sop_at.setter
def sop_at(self, value):
self._sop_at = value
update(self.name, "sop_at", self._sop_at, self.env.now, self)
@property
def finished_at(self):
return self._finished_at
@finished_at.setter
def finished_at(self, value):
self._finished_at = value
update(self.name, "finished_at", self._finished_at, self.env.now, self)
def update(name: str, attribute: str, value: float, t: float, order: Order):
# (replaced by actual callback during emulation initialization)
logging.info(
f"order update - name: {name}, attribute: {attribute}, value: {value}, t: {t}"
)
| [
"logging.info"
] | [((2029, 2131), 'logging.info', 'logging.info', (['f"""order update - name: {name}, attribute: {attribute}, value: {value}, t: {t}"""'], {}), "(\n f'order update - name: {name}, attribute: {attribute}, value: {value}, t: {t}'\n )\n", (2041, 2131), False, 'import logging\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for delta functions."""
from unittest import TestCase
from hiispider import delta
from pprint import pprint
import os
import random
import time
from datetime import datetime
from hiiguid import HiiGUID
srt = lambda l: list(sorted(l))
DATAPATH = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data'))
def read(path):
"""Read a file and return its contents, cleaning up after file handles."""
with open(path) as f:
contents = f.read()
return contents
def autogenerate(*args, **kwargs):
autogenerator = delta.Autogenerator()
return [x.data for x in autogenerator(*args, **kwargs)]
class TestAutogenerate(TestCase):
"""Test delta.autogenerate."""
def setUp(self):
autogenerate = delta.Autogenerator()
def test_includes(self):
def autogenerate_z_include(*args, **kwargs):
autogenerator = delta.Autogenerator(includes="z")
return [x.data for x in autogenerator(*args, **kwargs)]
# Neither value has the included key.
a = [{"x":1, "y":1}]
b = [{"x":1, "y":1}, {"x":3, "y":2}]
self.assertEqual(autogenerate_z_include(a, b), [])
# Neither value has the included key, second value is empty.
a = [{"x":3, "y":2}]
b = []
self.assertEqual(autogenerate_z_include(a, b), [])
# 'a' has the included key.
a = [{"x":1, "y":1}, {"x":3, "y":2, "z":1}]
b = [{"x":1, "y":1}]
self.assertEqual(autogenerate_z_include(a, b), [{"x":3, "y":2, "z":1}])
# 'a' and 'b' have the included key, but it does not change.
a = [{"x":1, "y":1}, {"x":3, "y":2, "z":1}]
b = [{"x":1, "y":1}, {"x":3, "y":2, "z":1}, {"x":4, "y":9, "z":1}]
self.assertEqual(autogenerate_z_include(a, b), [])
# 'a' and 'b' have the included key, but it does not change.
a = [{"x":1, "y":1, "z":2}, {"x":3, "y":2, "z":1}]
b = [{"x":5, "y":5, "z":2}, {"x":9, "y":4, "z":1}]
self.assertEqual(autogenerate_z_include(a, b), [])
# 'a' and 'b' have the included key, and it changes.
a = [{"x":1, "y":1}, {"x":3, "y":2, "z":1}]
b = [{"x":1, "y":1}, {"x":3, "y":2, "z":2}, {"x":4, "y":9, "z":3}]
self.assertEqual(
sorted(autogenerate_z_include(a, b)),
sorted([{"x":3, "y":2, "z":1}]))
# 'a' and 'b' have the included key, and it changes.
a = [{"x":1, "y":1}, {"x":3, "y":2, "z":2}, {"x":4, "y":9, "z":3}]
b = [{"x":1, "y":1}, {"x":3, "y":2, "z":1}]
self.assertEqual(
sorted(autogenerate_z_include(a, b)),
sorted([{"x":3, "y":2, "z":2}, {"x":4, "y":9, "z":3}]))
# 'a' and 'b' have the included key, and it changes.
a = [{"x":1, "y":1, "z":1}, {"x":3, "y":2, "z":2}, {"x":4, "y":9, "z":3}]
b = [{"x":4, "y":5, "z":1}]
self.assertEqual(
sorted(autogenerate_z_include(a, b)),
sorted([{"x":3, "y":2, "z":2}, {"x":4, "y":9, "z":3}]))
# Nested, multiple includes
autogenerator = delta.Autogenerator(includes=["example/y", "example/z"], paths="example")
a = {"example":[{"x":1, "y":2, "z":3}]}
b = {"example":[{"x":2, "y":2, "z":3}]}
self.assertEqual(autogenerator(a, b), [])
a = {"example":[{"x":2, "y":1, "z":3}]}
b = {"example":[{"x":2, "y":2, "z":3}]}
self.assertEqual(autogenerator(a, b)[0].data, {"x":2, "y":1, "z":3})
a = {"example":[{"x":2, "y":2, "z":3}]}
b = {"example":[{"x":2, "y":2, "z":4}]}
self.assertEqual(autogenerator(a, b)[0].data, {"x":2, "y":2, "z":3})
def test_date_parsing(self):
def autogenerate_z_date(*args, **kwargs):
autogenerator = delta.Autogenerator(dates="z")
return HiiGUID(autogenerator(*args, **kwargs)[0].id).timestamp
now = time.time()
b = []
# Float
a = [{"z":float(now)}]
self.assertEqual(autogenerate_z_date(a, b), int(now))
# Int
a = [{"z":int(now)}]
self.assertEqual(autogenerate_z_date(a, b), int(now))
# Str
a = [{"z":datetime.fromtimestamp(now).isoformat()}]
self.assertEqual(autogenerate_z_date(a, b), int(now))
# With include
autogenerator = delta.Autogenerator(dates="date", includes="z")
a = [{"date":int(now), "z":1}]
b = [{"date":int(now + 10), "z":1}]
self.assertEqual(autogenerator(a, b), [])
# With nested include
autogenerator = delta.Autogenerator(paths='example', dates="example/date", includes="example/z")
a = {"example":[{"date":int(now), "z":1}]}
b = {"example":[{"date":int(now + 10), "z":1}]}
self.assertEqual(autogenerator(a, b), [])
# Nested, multiple includes
autogenerator = delta.Autogenerator(includes=["example/y", "example/z"], paths="example", dates="example/date")
a = {"example":[{"x":1, "y":2, "z":3, "date":int(now)}]}
b = {"example":[{"x":2, "y":2, "z":3, "date":int(now + 10)}]}
self.assertEqual(autogenerator(a, b), [])
a = {"example":[{"x":2, "y":1, "z":3, "date":int(now)}]}
b = {"example":[{"x":2, "y":2, "z":3, "date":int(now + 10)}]}
self.assertEqual(autogenerator(a, b)[0].data, {"x":2, "y":1, "z":3, "date":int(now)})
a = {"example":[{"x":2, "y":2, "z":3, "date":int(now)}]}
b = {"example":[{"x":2, "y":2, "z":4, "date":int(now + 10)}]}
self.assertEqual(autogenerator(a, b)[0].data, {"x":2, "y":2, "z":3, "date":int(now)})
def test_exceptional_cases(self):
"""Test exceptional cases for autogenerate."""
# autogeneratearing strings throws a type error
args = ('foo', 'bar')
self.assertRaises(TypeError, autogenerate, args)
# UNLESS(?) they're the same
args = ('foo', 'foo')
self.assertEqual(autogenerate(*args), [])
# raise type error if classes aren't the same
args = (['foo'], {'bar': 1})
self.assertRaises(TypeError, autogenerate, args)
# raises an error if it doesn't know about what classes they are
class Flub(object):
def __init__(self, val):
self.val = val
self.assertRaises(TypeError, autogenerate, (Flub('hi'), Flub('bye')))
# but it doesn't if cmp works on the object and they're the same...
class Flub2(Flub):
def __cmp__(self, other):
return cmp(self.val, other.val)
self.assertEqual(autogenerate(Flub2('hi'), Flub2('hi')), [])
def test_lists(self):
"""Basic list autogenerate comparisson tests."""
# test the identity autogenerate comparisson against an empty list
for i in range(10):
rand = srt(random.sample(xrange(10000), 100))
self.assertEqual(srt(autogenerate(rand, [])), rand)
# test some known valued lists against eachother
self.assertEqual(autogenerate([1,2,3,4], [2,3,4]), [1])
# going the other way doesn't get us anything
self.assertEqual(autogenerate([2,3,4], [1,2,3,4]), [])
# test interpolated values
self.assertEqual(autogenerate([1,2,3,4,5,6,7], [2,5,1,3]), [4,6,7])
# establish what duplicates mean
self.assertEqual(autogenerate([1,1,2,3,4,4,5], [1,2,3,4,5]), [])
# test autogenerate comparissons on autogenerateosite types
self.assertEqual(srt(autogenerate(
[{'a': 1, 'b': 2}, {'c': 3}, ['foo', 'bar']],
[{'c': 3}])),
srt([{'a': 1, 'b': 2}, ['foo', 'bar']]),
)
self.assertEqual(srt(autogenerate(
[{'a': 1, 'b': 2}, {'c': 3}, ['foo', 'bar']],
[{'a': 1, 'b': 3}])),
srt([{'a': 1, 'b': 2}, {'c': 3}, ['foo', 'bar']]),
)
def test_goodreads(self):
def autogenerate_goodreads(*args, **kwargs):
autogenerator = delta.Autogenerator(paths="books", includes="books/id")
return [x.data for x in autogenerator(*args, **kwargs)]
from ujson import decode
old = decode(read("%s/goodreads/old.json" % DATAPATH))
new = decode(read("%s/goodreads/new.json" % DATAPATH))
self.assertEqual(autogenerate_goodreads(old, new), [])
def test_unicode(self):
a = {"x":["y", 1, 2]}
b = {"x":[1, u"y", 2]}
self.assertEqual(autogenerate(a, b), [])
a = {"x":["y", 1, 2]}
b = {u"x":[1, u"y", 2]}
self.assertEqual(autogenerate(a, b), [])
a = [{"z":1}, {"x":["y", 1, 2]}]
b = [{u"x":[1, u"y", 2]}, {u"z":1}]
self.assertEqual(autogenerate(a, b), [])
| [
"os.path.dirname",
"hiispider.delta.Autogenerator",
"datetime.datetime.fromtimestamp",
"time.time"
] | [((595, 616), 'hiispider.delta.Autogenerator', 'delta.Autogenerator', ([], {}), '()\n', (614, 616), False, 'from hiispider import delta\n'), ((333, 358), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (348, 358), False, 'import os\n'), ((792, 813), 'hiispider.delta.Autogenerator', 'delta.Autogenerator', ([], {}), '()\n', (811, 813), False, 'from hiispider import delta\n'), ((3100, 3173), 'hiispider.delta.Autogenerator', 'delta.Autogenerator', ([], {'includes': "['example/y', 'example/z']", 'paths': '"""example"""'}), "(includes=['example/y', 'example/z'], paths='example')\n", (3119, 3173), False, 'from hiispider import delta\n'), ((3898, 3909), 'time.time', 'time.time', ([], {}), '()\n', (3907, 3909), False, 'import time\n'), ((4322, 4369), 'hiispider.delta.Autogenerator', 'delta.Autogenerator', ([], {'dates': '"""date"""', 'includes': '"""z"""'}), "(dates='date', includes='z')\n", (4341, 4369), False, 'from hiispider import delta\n'), ((4557, 4642), 'hiispider.delta.Autogenerator', 'delta.Autogenerator', ([], {'paths': '"""example"""', 'dates': '"""example/date"""', 'includes': '"""example/z"""'}), "(paths='example', dates='example/date', includes='example/z'\n )\n", (4576, 4642), False, 'from hiispider import delta\n'), ((4855, 4954), 'hiispider.delta.Autogenerator', 'delta.Autogenerator', ([], {'includes': "['example/y', 'example/z']", 'paths': '"""example"""', 'dates': '"""example/date"""'}), "(includes=['example/y', 'example/z'], paths='example',\n dates='example/date')\n", (4874, 4954), False, 'from hiispider import delta\n'), ((925, 958), 'hiispider.delta.Autogenerator', 'delta.Autogenerator', ([], {'includes': '"""z"""'}), "(includes='z')\n", (944, 958), False, 'from hiispider import delta\n'), ((3778, 3808), 'hiispider.delta.Autogenerator', 'delta.Autogenerator', ([], {'dates': '"""z"""'}), "(dates='z')\n", (3797, 3808), False, 'from hiispider import delta\n'), ((7952, 8007), 'hiispider.delta.Autogenerator', 'delta.Autogenerator', ([], {'paths': '"""books"""', 'includes': '"""books/id"""'}), "(paths='books', includes='books/id')\n", (7971, 8007), False, 'from hiispider import delta\n'), ((4171, 4198), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['now'], {}), '(now)\n', (4193, 4198), False, 'from datetime import datetime\n')] |
import logging
from pymongo import MongoClient
from scrapy.exceptions import CloseSpider
class MongoDBPipeline(object):
config = {
'uri': 'mongodb://localhost:270017',
'database': 'scrapyu',
'collection': 'items',
'unique_key': None,
'buffer_length': 0,
}
def open_spider(self, spider):
self.logger = logging.getLogger('scrapyu.MongoDBPipeline')
self.configure(spider.settings)
config = self.config
self.connection = MongoClient(config['uri'])
self.database = self.connection[config['database']]
self.collection = self.database[config['collection']]
self.logger.info(
f"Connected to MongoDB {config['uri']}, "
f"using {config['database']}.{config['collection']}"
)
self.has_buffer = bool(config['buffer_length'])
if self.has_buffer:
self._item_buffer = []
def close_spider(self, spider):
if self.has_buffer and len(self._item_buffer):
self._commit_buffer()
self.connection.close()
def process_item(self, item, spider):
item_dict = dict(item)
if self.has_buffer:
self.insert_buffer(item_dict)
else:
self.insert_one(item_dict)
return item
def _commit_buffer(self):
items = self._item_buffer.copy()
self.collection.insert_many(items)
self._item_buffer.clear()
def insert_buffer(self, item):
self._item_buffer.append(item)
if len(self._item_buffer) >= self.config['buffer_length']:
self._commit_buffer()
def insert_one(self, item):
unique_key = self.config['unique_key']
if unique_key is None:
self.collection.insert_one(item)
else:
spec = {}
try:
for k in unique_key:
spec[k] = item[k]
self.collection.update_one(spec, {'$set': item}, upsert=True)
except KeyError as e:
msg = f"unique_key defined error, item has no {str(e)} field"
raise CloseSpider(msg)
def configure(self, settings):
uri = settings.get('MONGODB_URI')
if uri is None:
host = settings.get('MONGODB_HOST', 'localhost')
port = settings.get('MONGODB_PORT', 27017)
uri = f'mongodb://{host}:{port}'
self.config['uri'] = uri
self.config['database'] = settings.get('MONGODB_DATABASE', 'scrapyu')
self.config['collection'] = settings.get('MONGODB_COLLECTION', 'items')
unique_key = settings.get('MONGODB_UNIQUE_KEY')
if unique_key is not None:
if isinstance(unique_key, str):
unique_key = unique_key.split()
else:
unique_key = list(unique_key)
self.config['unique_key'] = unique_key
self.config['buffer_length'] = settings.get('MONGODB_BUFFER_LENGTH', 0)
| [
"logging.getLogger",
"pymongo.MongoClient",
"scrapy.exceptions.CloseSpider"
] | [((366, 410), 'logging.getLogger', 'logging.getLogger', (['"""scrapyu.MongoDBPipeline"""'], {}), "('scrapyu.MongoDBPipeline')\n", (383, 410), False, 'import logging\n'), ((506, 532), 'pymongo.MongoClient', 'MongoClient', (["config['uri']"], {}), "(config['uri'])\n", (517, 532), False, 'from pymongo import MongoClient\n'), ((2125, 2141), 'scrapy.exceptions.CloseSpider', 'CloseSpider', (['msg'], {}), '(msg)\n', (2136, 2141), False, 'from scrapy.exceptions import CloseSpider\n')] |
import torch.nn as nn
import torch
import os
class BasicConv3d(nn.Module):
def __init__(self, in_planes, out_planes, kernel_size, stride, padding=0):
super(BasicConv3d, self).__init__()
self.conv = nn.Conv3d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, bias=False) # verify bias false
# verify defalt value in sonnet
#self.bn = nn.BatchNorm3d(out_planes, eps=1.e-5, momentum=0.1, affine=True)
self.relu = nn.LeakyReLU(negative_slope=0.01, inplace=True)
def forward(self, x):
x = self.conv(x)
#x = self.bn(x)
x = self.relu(x)
return x
class STConv3d(nn.Module):
def __init__(self, in_planes, out_planes, kernel_size, stride, padding=0):
super(STConv3d, self).__init__()
self.conv = nn.Conv3d(in_planes, out_planes, kernel_size=(1, kernel_size, kernel_size),
stride=(1, stride, stride), padding=(0, padding, padding))
self.conv2 = nn.Conv3d(out_planes, out_planes, kernel_size=(kernel_size, 1, 1), stride=(stride, 1, 1),
padding=(padding, 0, 0))
#self.bn = nn.BatchNorm3d(out_planes, eps=1.e-5, momentum=0.1, affine=True)
self.relu = nn.LeakyReLU(negative_slope=0.01, inplace=True)
#self.bn2 = nn.BatchNorm3d(out_planes, eps=1.e-5, momentum=0.1, affine=True)
self.relu2 = nn.LeakyReLU(negative_slope=0.01, inplace=True)
nn.init.normal_(self.conv2.weight, mean=0, std=0.01)
nn.init.constant_(self.conv2.bias, 0)
def forward(self, x):
x=self.conv(x)
#x=self.bn(x)
x=self.relu(x)
x=self.conv2(x)
#x=self.bn2(x)
x=self.relu2(x)
return x
# Note the operations here for S3D-G:
# If we set two convs: 1xkxk + kx1x1, it's as follows: (p=(k-1)/2)
# BasicConv3d(input,output,kernel_size=(1,k,k),stride=1,padding=(0,p,p))
# Then BasicConv3d(output,output,kernel_size=(k,1,1),stride=1,padding=(p,0,0))
class Mixed_3b(nn.Module):
def __init__(self):
super(Mixed_3b, self).__init__()
self.branch0 = nn.Sequential(
BasicConv3d(64, 16, kernel_size=1, stride=1),
)
self.branch1 = nn.Sequential(
BasicConv3d(64, 32, kernel_size=1, stride=1),
STConv3d(32, 24, kernel_size=3, stride=1, padding=1),
)
self.branch2 = nn.Sequential(
BasicConv3d(64, 16, kernel_size=1, stride=1),
STConv3d(16, 12, kernel_size=3, stride=1, padding=1),
)
self.branch3 = nn.Sequential(
nn.MaxPool3d(kernel_size=(3, 3, 3), stride=1, padding=1),
BasicConv3d(64, 12, kernel_size=1, stride=1),
)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class Mixed_3c(nn.Module):
def __init__(self):
super(Mixed_3c, self).__init__()
self.branch0 = nn.Sequential(
BasicConv3d(64, 24, kernel_size=1, stride=1),
)
self.branch1 = nn.Sequential(
BasicConv3d(64, 24, kernel_size=1, stride=1),
STConv3d(24, 40, kernel_size=3, stride=1, padding=1),
)
self.branch2 = nn.Sequential(
BasicConv3d(64, 12, kernel_size=1, stride=1),
STConv3d(12, 16, kernel_size=3, stride=1, padding=1),
)
self.branch3 = nn.Sequential(
nn.MaxPool3d(kernel_size=(3, 3, 3), stride=1, padding=1),
BasicConv3d(64, 16, kernel_size=1, stride=1),
)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class Mixed_4b(nn.Module):
def __init__(self):
super(Mixed_4b, self).__init__()
self.branch0 = nn.Sequential(
BasicConv3d(96, 32, kernel_size=1, stride=1),
)
self.branch1 = nn.Sequential(
BasicConv3d(96, 32, kernel_size=1, stride=1),
STConv3d(32, 64, kernel_size=3, stride=1, padding=1),
)
self.branch2 = nn.Sequential(
BasicConv3d(96, 32, kernel_size=1, stride=1),
STConv3d(32, 16, kernel_size=3, stride=1, padding=1),
)
self.branch3 = nn.Sequential(
nn.MaxPool3d(kernel_size=(3, 3, 3), stride=1, padding=1),
BasicConv3d(96, 16, kernel_size=1, stride=1),
)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class Mixed_4c(nn.Module):
def __init__(self):
super(Mixed_4c, self).__init__()
self.branch0 = nn.Sequential(
BasicConv3d(128, 60, kernel_size=1, stride=1),
)
self.branch1 = nn.Sequential(
BasicConv3d(128, 64, kernel_size=1, stride=1),
STConv3d(64, 84, kernel_size=3, stride=1, padding=1),
)
self.branch2 = nn.Sequential(
BasicConv3d(128, 16, kernel_size=1, stride=1),
STConv3d(16, 24, kernel_size=3, stride=1, padding=1),
)
self.branch3 = nn.Sequential(
nn.MaxPool3d(kernel_size=(3, 3, 3), stride=1, padding=1),
BasicConv3d(128, 24, kernel_size=1, stride=1),
)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class Mixed_4d(nn.Module):
def __init__(self):
super(Mixed_4d, self).__init__()
self.branch0 = nn.Sequential(
BasicConv3d(512, 128, kernel_size=1, stride=1),
)
self.branch1 = nn.Sequential(
BasicConv3d(512, 128, kernel_size=1, stride=1),
STConv3d(128, 256, kernel_size=3, stride=1, padding=1),
)
self.branch2 = nn.Sequential(
BasicConv3d(512, 24, kernel_size=1, stride=1),
STConv3d(24, 64, kernel_size=3, stride=1, padding=1),
)
self.branch3 = nn.Sequential(
nn.MaxPool3d(kernel_size=(3, 3, 3), stride=1, padding=1),
BasicConv3d(512, 64, kernel_size=1, stride=1),
)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class Mixed_4e(nn.Module):
def __init__(self):
super(Mixed_4e, self).__init__()
self.branch0 = nn.Sequential(
BasicConv3d(512, 112, kernel_size=1, stride=1),
)
self.branch1 = nn.Sequential(
BasicConv3d(512, 144, kernel_size=1, stride=1),
STConv3d(144, 288, kernel_size=3, stride=1, padding=1),
)
self.branch2 = nn.Sequential(
BasicConv3d(512, 32, kernel_size=1, stride=1),
STConv3d(32, 64, kernel_size=3, stride=1, padding=1),
)
self.branch3 = nn.Sequential(
nn.MaxPool3d(kernel_size=(3, 3, 3), stride=1, padding=1),
BasicConv3d(512, 64, kernel_size=1, stride=1),
)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class Mixed_4f(nn.Module):
def __init__(self):
super(Mixed_4f, self).__init__()
self.branch0 = nn.Sequential(
BasicConv3d(128, 32, kernel_size=1, stride=1), # 4
)
self.branch1 = nn.Sequential(
BasicConv3d(128, 36, kernel_size=1, stride=1),
STConv3d(36, 64, kernel_size=3, stride=1, padding=1), # 6
)
self.branch2 = nn.Sequential(
BasicConv3d(128, 12, kernel_size=1, stride=1),
STConv3d(12, 16, kernel_size=3, stride=1, padding=1), # 3
)
self.branch3 = nn.Sequential(
nn.MaxPool3d(kernel_size=(3, 3, 3), stride=1, padding=1),
BasicConv3d(128, 16, kernel_size=1, stride=1), # 3
)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class Mixed_5b(nn.Module):
def __init__(self):
super(Mixed_5b, self).__init__()
self.branch0 = nn.Sequential(
BasicConv3d(96, 32, kernel_size=1, stride=1), # 4
)
self.branch1 = nn.Sequential(
BasicConv3d(96, 24, kernel_size=1, stride=1),
STConv3d(24, 48, kernel_size=3, stride=1, padding=1), # 6
)
self.branch2 = nn.Sequential(
BasicConv3d(96, 12, kernel_size=1, stride=1),
STConv3d(12, 24, kernel_size=3, stride=1, padding=1), # 3
)
self.branch3 = nn.Sequential(
nn.MaxPool3d(kernel_size=(3, 3, 3), stride=1, padding=1),
BasicConv3d(96, 24, kernel_size=1, stride=1), # 3
)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class Mixed_5c(nn.Module):
def __init__(self):
super(Mixed_5c, self).__init__()
self.branch0 = nn.Sequential(
BasicConv3d(192, 96, kernel_size=1, stride=1), # 3
)
self.branch1 = nn.Sequential(
BasicConv3d(192, 64, kernel_size=1, stride=1),
STConv3d(64, 96, kernel_size=3, stride=1, padding=1), # 3
)
self.branch2 = nn.Sequential(
BasicConv3d(192, 16, kernel_size=1, stride=1),
STConv3d(16, 32, kernel_size=3, stride=1, padding=1), # 1
)
self.branch3 = nn.Sequential(
nn.MaxPool3d(kernel_size=(3, 3, 3), stride=1, padding=1),
BasicConv3d(192, 32, kernel_size=1, stride=1), # 1
)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out
class Permute(nn.Module):
def __init__(self):
super(Permute, self).__init__()
def forward(self, x):
return x.permute(0,2,1,3,4)
class S3DG(nn.Module):
def __init__(self, embed_size = 512, device = None, dropout = 0.5, input_channel=3, spatial_squeeze=True):
super(S3DG, self).__init__()
self.features = nn.Sequential(
############################ The Stem Network ############################
BasicConv3d(input_channel, 16, kernel_size=6, stride=2, padding=2), # (16, 112, 114, 98)
BasicConv3d(16, 32, kernel_size=3, stride=1, padding=1), # (32, 112, 114, 98)
nn.MaxPool3d(kernel_size=(2, 2, 2), stride=(2, 2, 2), padding=(0, 0, 0)), # (32, 112, 57, 49)
STConv3d(32, 64, kernel_size=3, stride=1, padding=1), # (64, 112, 57, 49)
nn.MaxPool3d(kernel_size=(1, 3, 3), stride=(1, 2, 2), padding=(0, 0, 0)), # (64, 112, 28, 24)
######################## Incption + Residual conv3d ########################
Mixed_3c(), # (96, 112, 28, 24)
nn.MaxPool3d(kernel_size=(1, 2, 2), stride=(1, 2, 2), padding=(0, 0, 0)), # (96, 112, 14, 12)
Mixed_4b(), # (128, 112, 14, 12)
Mixed_4c(), # (192, 112, 14, 12)
nn.MaxPool3d(kernel_size=(2, 2, 2), stride=(2, 2, 2), padding=(0, 0, 0)), # (192, 56, 7, 6)
Mixed_5c(), # (256, 56, 7, 6)
nn.AdaptiveAvgPool3d(output_size=(56, 2, 2)),
)
self.fc_out_1 = nn.Linear(1024, embed_size)
#self.fc_out_2 = nn.Linear(1660, 64*4*2)
self.dropout = nn.Dropout(p=0.5)
self.spatial_squeeze = spatial_squeeze
self.softmax = nn.Softmax()
self.device = device
self.relu = nn.LeakyReLU(negative_slope=0.01, inplace=True)
def forward(self, x):
#print("initial shape : ", x.shape)
x = x.permute(0,2,1,3,4).to(self.device)
logits = self.features(x)
if self.spatial_squeeze:
logits = logits.squeeze(3)
logits = logits.squeeze(3)
BS = logits.shape[0]
print("feature shape : ", logits.shape)
out = self.relu(self.fc_out_1(self.dropout(logits.permute(0,2,1,3,4).reshape(BS, 56, -1))))
return out
def load_state_dict(self, path):
target_weights = torch.load(path)
own_state = self.state_dict()
for name, param in target_weights.items():
if name in own_state:
if isinstance(param, nn.Parameter):
param = param.data
try:
if len(param.size()) == 5 and param.size()[3] in [3, 7]:
own_state[name][:, :, 0, :, :] = torch.mean(param, 2)
else:
own_state[name].copy_(param)
except Exception:
raise RuntimeError('While copying the parameter named {}.\
whose dimensions in the model are {} and \
whose dimensions in the checkpoint are {}.\
'.format(name, own_state[name].size(), param.size()))
else:
print('{} meets error in locating parameters'.format(name))
missing = set(own_state.keys()) - set(target_weights.keys())
print('{} keys are not holded in target checkpoints'.format(len(missing)))
if __name__ == '__main__':
model = S3DG(embed_size=512)
# Initialize the weights with pretrained I3D net. In detail,
# please refer to specific reproduced load_state_dict() function
#if not os.path.exists('modelweights/RGB_imagenet.pkl'):
#print 'No weights Found! please download first, or comment 382~384th line'
#model.load_state_dict('modelweights/RGB_imagenet.pkl')
model = model.cuda()
data = torch.autograd.Variable(torch.rand(3, 224, 3, 228, 196)).cuda() # [BS, C, T, H, W]
print(model)
out = model(data)
print(out.shape) | [
"torch.nn.Dropout",
"torch.nn.init.constant_",
"torch.nn.LeakyReLU",
"torch.nn.Softmax",
"torch.rand",
"torch.load",
"torch.nn.MaxPool3d",
"torch.mean",
"torch.cat",
"torch.nn.AdaptiveAvgPool3d",
"torch.nn.Linear",
"torch.nn.init.normal_",
"torch.nn.Conv3d"
] | [((228, 333), 'torch.nn.Conv3d', 'nn.Conv3d', (['in_planes', 'out_planes'], {'kernel_size': 'kernel_size', 'stride': 'stride', 'padding': 'padding', 'bias': '(False)'}), '(in_planes, out_planes, kernel_size=kernel_size, stride=stride,\n padding=padding, bias=False)\n', (237, 333), True, 'import torch.nn as nn\n'), ((500, 547), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.01)', 'inplace': '(True)'}), '(negative_slope=0.01, inplace=True)\n', (512, 547), True, 'import torch.nn as nn\n'), ((847, 985), 'torch.nn.Conv3d', 'nn.Conv3d', (['in_planes', 'out_planes'], {'kernel_size': '(1, kernel_size, kernel_size)', 'stride': '(1, stride, stride)', 'padding': '(0, padding, padding)'}), '(in_planes, out_planes, kernel_size=(1, kernel_size, kernel_size),\n stride=(1, stride, stride), padding=(0, padding, padding))\n', (856, 985), True, 'import torch.nn as nn\n'), ((1035, 1154), 'torch.nn.Conv3d', 'nn.Conv3d', (['out_planes', 'out_planes'], {'kernel_size': '(kernel_size, 1, 1)', 'stride': '(stride, 1, 1)', 'padding': '(padding, 0, 0)'}), '(out_planes, out_planes, kernel_size=(kernel_size, 1, 1), stride=(\n stride, 1, 1), padding=(padding, 0, 0))\n', (1044, 1154), True, 'import torch.nn as nn\n'), ((1290, 1337), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.01)', 'inplace': '(True)'}), '(negative_slope=0.01, inplace=True)\n', (1302, 1337), True, 'import torch.nn as nn\n'), ((1448, 1495), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.01)', 'inplace': '(True)'}), '(negative_slope=0.01, inplace=True)\n', (1460, 1495), True, 'import torch.nn as nn\n'), ((1507, 1559), 'torch.nn.init.normal_', 'nn.init.normal_', (['self.conv2.weight'], {'mean': '(0)', 'std': '(0.01)'}), '(self.conv2.weight, mean=0, std=0.01)\n', (1522, 1559), True, 'import torch.nn as nn\n'), ((1569, 1606), 'torch.nn.init.constant_', 'nn.init.constant_', (['self.conv2.bias', '(0)'], {}), '(self.conv2.bias, 0)\n', (1586, 1606), True, 'import torch.nn as nn\n'), ((2968, 2998), 'torch.cat', 'torch.cat', (['(x0, x1, x2, x3)', '(1)'], {}), '((x0, x1, x2, x3), 1)\n', (2977, 2998), False, 'import torch\n'), ((3925, 3955), 'torch.cat', 'torch.cat', (['(x0, x1, x2, x3)', '(1)'], {}), '((x0, x1, x2, x3), 1)\n', (3934, 3955), False, 'import torch\n'), ((4882, 4912), 'torch.cat', 'torch.cat', (['(x0, x1, x2, x3)', '(1)'], {}), '((x0, x1, x2, x3), 1)\n', (4891, 4912), False, 'import torch\n'), ((5843, 5873), 'torch.cat', 'torch.cat', (['(x0, x1, x2, x3)', '(1)'], {}), '((x0, x1, x2, x3), 1)\n', (5852, 5873), False, 'import torch\n'), ((6808, 6838), 'torch.cat', 'torch.cat', (['(x0, x1, x2, x3)', '(1)'], {}), '((x0, x1, x2, x3), 1)\n', (6817, 6838), False, 'import torch\n'), ((7773, 7803), 'torch.cat', 'torch.cat', (['(x0, x1, x2, x3)', '(1)'], {}), '((x0, x1, x2, x3), 1)\n', (7782, 7803), False, 'import torch\n'), ((8771, 8801), 'torch.cat', 'torch.cat', (['(x0, x1, x2, x3)', '(1)'], {}), '((x0, x1, x2, x3), 1)\n', (8780, 8801), False, 'import torch\n'), ((9771, 9801), 'torch.cat', 'torch.cat', (['(x0, x1, x2, x3)', '(1)'], {}), '((x0, x1, x2, x3), 1)\n', (9780, 9801), False, 'import torch\n'), ((10771, 10801), 'torch.cat', 'torch.cat', (['(x0, x1, x2, x3)', '(1)'], {}), '((x0, x1, x2, x3), 1)\n', (10780, 10801), False, 'import torch\n'), ((12670, 12697), 'torch.nn.Linear', 'nn.Linear', (['(1024)', 'embed_size'], {}), '(1024, embed_size)\n', (12679, 12697), True, 'import torch.nn as nn\n'), ((12772, 12789), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)'}), '(p=0.5)\n', (12782, 12789), True, 'import torch.nn as nn\n'), ((12862, 12874), 'torch.nn.Softmax', 'nn.Softmax', ([], {}), '()\n', (12872, 12874), True, 'import torch.nn as nn\n'), ((12926, 12973), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.01)', 'inplace': '(True)'}), '(negative_slope=0.01, inplace=True)\n', (12938, 12973), True, 'import torch.nn as nn\n'), ((13521, 13537), 'torch.load', 'torch.load', (['path'], {}), '(path)\n', (13531, 13537), False, 'import torch\n'), ((2676, 2732), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(3, 3, 3)', 'stride': '(1)', 'padding': '(1)'}), '(kernel_size=(3, 3, 3), stride=1, padding=1)\n', (2688, 2732), True, 'import torch.nn as nn\n'), ((3633, 3689), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(3, 3, 3)', 'stride': '(1)', 'padding': '(1)'}), '(kernel_size=(3, 3, 3), stride=1, padding=1)\n', (3645, 3689), True, 'import torch.nn as nn\n'), ((4590, 4646), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(3, 3, 3)', 'stride': '(1)', 'padding': '(1)'}), '(kernel_size=(3, 3, 3), stride=1, padding=1)\n', (4602, 4646), True, 'import torch.nn as nn\n'), ((5550, 5606), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(3, 3, 3)', 'stride': '(1)', 'padding': '(1)'}), '(kernel_size=(3, 3, 3), stride=1, padding=1)\n', (5562, 5606), True, 'import torch.nn as nn\n'), ((6515, 6571), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(3, 3, 3)', 'stride': '(1)', 'padding': '(1)'}), '(kernel_size=(3, 3, 3), stride=1, padding=1)\n', (6527, 6571), True, 'import torch.nn as nn\n'), ((7480, 7536), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(3, 3, 3)', 'stride': '(1)', 'padding': '(1)'}), '(kernel_size=(3, 3, 3), stride=1, padding=1)\n', (7492, 7536), True, 'import torch.nn as nn\n'), ((8465, 8521), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(3, 3, 3)', 'stride': '(1)', 'padding': '(1)'}), '(kernel_size=(3, 3, 3), stride=1, padding=1)\n', (8477, 8521), True, 'import torch.nn as nn\n'), ((9465, 9521), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(3, 3, 3)', 'stride': '(1)', 'padding': '(1)'}), '(kernel_size=(3, 3, 3), stride=1, padding=1)\n', (9477, 9521), True, 'import torch.nn as nn\n'), ((10464, 10520), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(3, 3, 3)', 'stride': '(1)', 'padding': '(1)'}), '(kernel_size=(3, 3, 3), stride=1, padding=1)\n', (10476, 10520), True, 'import torch.nn as nn\n'), ((11524, 11596), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(2, 2, 2)', 'stride': '(2, 2, 2)', 'padding': '(0, 0, 0)'}), '(kernel_size=(2, 2, 2), stride=(2, 2, 2), padding=(0, 0, 0))\n', (11536, 11596), True, 'import torch.nn as nn\n'), ((11740, 11812), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(1, 3, 3)', 'stride': '(1, 2, 2)', 'padding': '(0, 0, 0)'}), '(kernel_size=(1, 3, 3), stride=(1, 2, 2), padding=(0, 0, 0))\n', (11752, 11812), True, 'import torch.nn as nn\n'), ((12048, 12120), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(1, 2, 2)', 'stride': '(1, 2, 2)', 'padding': '(0, 0, 0)'}), '(kernel_size=(1, 2, 2), stride=(1, 2, 2), padding=(0, 0, 0))\n', (12060, 12120), True, 'import torch.nn as nn\n'), ((12374, 12446), 'torch.nn.MaxPool3d', 'nn.MaxPool3d', ([], {'kernel_size': '(2, 2, 2)', 'stride': '(2, 2, 2)', 'padding': '(0, 0, 0)'}), '(kernel_size=(2, 2, 2), stride=(2, 2, 2), padding=(0, 0, 0))\n', (12386, 12446), True, 'import torch.nn as nn\n'), ((12586, 12630), 'torch.nn.AdaptiveAvgPool3d', 'nn.AdaptiveAvgPool3d', ([], {'output_size': '(56, 2, 2)'}), '(output_size=(56, 2, 2))\n', (12606, 12630), True, 'import torch.nn as nn\n'), ((15125, 15156), 'torch.rand', 'torch.rand', (['(3)', '(224)', '(3)', '(228)', '(196)'], {}), '(3, 224, 3, 228, 196)\n', (15135, 15156), False, 'import torch\n'), ((13919, 13939), 'torch.mean', 'torch.mean', (['param', '(2)'], {}), '(param, 2)\n', (13929, 13939), False, 'import torch\n')] |
#
# Copyright 2019-2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from bz2 import BZ2File
from gzip import GzipFile
from io import TextIOWrapper
from logging import getLogger
from lzma import LZMAFile
from pathlib import Path
from types import TracebackType
from typing import BinaryIO, Optional, Type, cast
from overrides import overrides
from tqdm import tqdm
from zstandard import ZstdDecompressor
from nasty_utils.logging_ import ColoredBraceStyleAdapter
_LOGGER = ColoredBraceStyleAdapter(getLogger(__name__))
class DecompressingTextIOWrapper(TextIOWrapper):
# TODO: implement write access
def __init__(
self,
path: Path,
*,
encoding: str,
warn_uncompressed: bool = True,
progress_bar: bool = False,
progress_bar_desc: Optional[str] = None,
):
self.path = path
self._fp = path.open("rb")
self._fin: BinaryIO
if path.suffix == ".gz":
self._fin = cast(BinaryIO, GzipFile(fileobj=self._fp))
elif path.suffix == ".bz2":
self._fin = cast(BinaryIO, BZ2File(self._fp))
elif path.suffix == ".xz":
self._fin = cast(BinaryIO, LZMAFile(self._fp))
elif path.suffix == ".zst":
self._fin = cast(BinaryIO, ZstdDecompressor().stream_reader(self._fp))
else:
if warn_uncompressed: # pragma: no cover
_LOGGER.warning(
"Could not detect compression type of file '{}' from its "
"extension, treating as uncompressed file.",
path,
)
self._fin = self._fp
self._progress_bar: Optional[tqdm[None]] = None
if progress_bar:
self._progress_bar = tqdm(
desc=progress_bar_desc or self.path.name,
total=self.size(),
unit="B",
unit_scale=True,
unit_divisor=1024,
dynamic_ncols=True,
)
super().__init__(self._fin, encoding=encoding)
def size(self) -> int:
return self.path.stat().st_size
@overrides
def read(self, n: Optional[int] = -1) -> str:
result = super().read(n)
if self._progress_bar is not None:
self._progress_bar.update(self.tell() - self._progress_bar.n)
return result
@overrides
def readline(self, size: int = -1) -> str:
result = super().readline(size)
if self._progress_bar is not None:
self._progress_bar.update(self.tell() - self._progress_bar.n)
return result
@overrides
def tell(self) -> int:
"""Tells the number of compressed bytes that have already been read."""
return self._fp.tell()
@overrides
def __enter__(self) -> "DecompressingTextIOWrapper":
return cast(DecompressingTextIOWrapper, super().__enter__())
# In the following the type-comment is used to have Mypy ignore that this method
# definition does not match the supertype (no idea why that can be or to fix it).
# The noqa-comment is to have flake8 not print an error on not knowing the
# ignore[override] type, which is a Mypy-annotation flake8 doesn't know about.
@overrides
def __exit__( # type: ignore[override] # noqa: F821
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> Optional[bool]:
self._fp.close()
self._fin.close()
if self._progress_bar is not None:
self._progress_bar.close()
return super().__exit__(exc_type, exc_value, traceback)
| [
"logging.getLogger",
"lzma.LZMAFile",
"gzip.GzipFile",
"bz2.BZ2File",
"zstandard.ZstdDecompressor"
] | [((1011, 1030), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (1020, 1030), False, 'from logging import getLogger\n'), ((1498, 1524), 'gzip.GzipFile', 'GzipFile', ([], {'fileobj': 'self._fp'}), '(fileobj=self._fp)\n', (1506, 1524), False, 'from gzip import GzipFile\n'), ((1601, 1618), 'bz2.BZ2File', 'BZ2File', (['self._fp'], {}), '(self._fp)\n', (1608, 1618), False, 'from bz2 import BZ2File\n'), ((1694, 1712), 'lzma.LZMAFile', 'LZMAFile', (['self._fp'], {}), '(self._fp)\n', (1702, 1712), False, 'from lzma import LZMAFile\n'), ((1789, 1807), 'zstandard.ZstdDecompressor', 'ZstdDecompressor', ([], {}), '()\n', (1805, 1807), False, 'from zstandard import ZstdDecompressor\n')] |
# ___________________________________________________________________________
#
# Pyomo: Python Optimization Modeling Objects
# Copyright 2017 National Technology and Engineering Solutions of Sandia, LLC
# Under the terms of Contract DE-NA0003525 with National Technology and
# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain
# rights in this software.
# This software is distributed under the 3-clause BSD License.
# ___________________________________________________________________________
#
# This file was originally part of PySP and Pyomo, available: https://github.com/Pyomo/pysp
# Copied with modification from pysp/tests/unit/test_scenariotree.py
import pyomo.common.unittest as unittest
from mpisppy.utils.pysp_model.tree_structure_model import \
(ScenarioTreeModelFromNetworkX,
CreateConcreteTwoStageScenarioTreeModel)
from mpisppy.utils.pysp_model.tree_structure import ScenarioTree
from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list
from pyomo.core import (ConcreteModel,
Set,
Var,
Expression,
Objective,
Block,
value)
from pyomo.common.dependencies import (
networkx, networkx_available as has_networkx
)
def _get_names(iterable):
return [_.name for _ in iterable]
class TestScenarioTree(unittest.TestCase):
def _get_block_model(self):
model = ConcreteModel()
model.s = Set(initialize=[1,2])
b = Block(concrete=True)
b.s = Set(initialize=[1,2])
b.x = Var()
b.X = Var(model.s)
model.b1 = b.clone()
model.b2 = b.clone()
model.b3 = b.clone()
model.b4 = b.clone()
model.B1 = Block(model.s, rule=lambda _,i: b.clone())
model.B2 = Block(model.s, rule=lambda _,i: b.clone())
model.B3 = Block(model.s, rule=lambda _,i: b.clone())
model.B4 = Block(model.s, rule=lambda _,i: b.clone())
model.FirstStageCost = Expression(expr=0.0)
model.SecondStageCost = Expression(expr=0.0)
model.obj = Objective(expr=0.0)
return model
def test_indexedblock_noindextemplate(self):
st_model = CreateConcreteTwoStageScenarioTreeModel(1)
st_model.StageVariables['Stage1'].add("B1")
st_model.StageDerivedVariables['Stage1'].add("B2")
st_model.NodeVariables['RootNode'].add("B3")
st_model.NodeDerivedVariables['RootNode'].add("B4")
st_model.StageCost['Stage1'] = "FirstStageCost"
st_model.StageCost['Stage2'] = "SecondStageCost"
scenario_tree = ScenarioTree(scenariotreeinstance=st_model)
self.assertEqual(len(scenario_tree.stages), 2)
self.assertEqual(len(scenario_tree.nodes), 2)
self.assertEqual(len(scenario_tree.scenarios), 1)
model = self._get_block_model()
root = scenario_tree.findRootNode()
root_nonant_names = _get_names(_get_nonant_list(model, root))
root_derived_nonant_names = _get_names(_get_derived_nonant_list(model, root))
assert len(root_nonant_names) == 12
assert len(root_derived_nonant_names) == 12
for name in (
"B1[1].x", "B1[2].x",
"B3[1].x", "B3[2].x",
):
assert name in root_nonant_names
for name in (
"B1[1].X", "B1[2].X",
"B3[1].X", "B3[2].X",
):
var = model.find_component(name)
for vardata in var.values():
assert vardata.name in root_nonant_names
for name in (
"B2[1].x", "B2[2].x",
"B4[1].x", "B4[2].x",
):
assert name in root_derived_nonant_names
for name in (
"B2[1].X", "B2[2].X",
"B4[1].X", "B4[2].X",
):
var = model.find_component(name)
for vardata in var.values():
assert vardata.name in root_derived_nonant_names
def test_indexedblock_wildcardtemplate(self):
st_model = CreateConcreteTwoStageScenarioTreeModel(1)
st_model.StageVariables['Stage1'].add("B1[*]")
st_model.StageDerivedVariables['Stage1'].add("B2[*]")
st_model.NodeVariables['RootNode'].add("B3[*]")
st_model.NodeDerivedVariables['RootNode'].add("B4[*]")
st_model.StageCost['Stage1'] = "FirstStageCost"
st_model.StageCost['Stage2'] = "SecondStageCost"
scenario_tree = ScenarioTree(scenariotreeinstance=st_model)
self.assertEqual(len(scenario_tree.stages), 2)
self.assertEqual(len(scenario_tree.nodes), 2)
self.assertEqual(len(scenario_tree.scenarios), 1)
model = self._get_block_model()
root = scenario_tree.findRootNode()
root_nonant_names = _get_names(_get_nonant_list(model, root))
root_derived_nonant_names = _get_names(_get_derived_nonant_list(model, root))
assert len(root_nonant_names) == 12
assert len(root_derived_nonant_names) == 12
for name in (
"B1[1].x", "B1[2].x",
"B3[1].x", "B3[2].x",
):
assert name in root_nonant_names
for name in (
"B1[1].X", "B1[2].X",
"B3[1].X", "B3[2].X",
):
var = model.find_component(name)
for vardata in var.values():
assert vardata.name in root_nonant_names
for name in (
"B2[1].x", "B2[2].x",
"B4[1].x", "B4[2].x",
):
assert name in root_derived_nonant_names
for name in (
"B2[1].X", "B2[2].X",
"B4[1].X", "B4[2].X",
):
var = model.find_component(name)
for vardata in var.values():
assert vardata.name in root_derived_nonant_names
def test_singletonblock_wildcardtemplate(self):
st_model = CreateConcreteTwoStageScenarioTreeModel(1)
st_model.StageVariables['Stage1'].add("b1[*]")
st_model.StageDerivedVariables['Stage1'].add("b2[*]")
st_model.NodeVariables['RootNode'].add("b3[*]")
st_model.NodeDerivedVariables['RootNode'].add("b4[*]")
st_model.StageCost['Stage1'] = "FirstStageCost"
st_model.StageCost['Stage2'] = "SecondStageCost"
scenario_tree = ScenarioTree(scenariotreeinstance=st_model)
self.assertEqual(len(scenario_tree.stages), 2)
self.assertEqual(len(scenario_tree.nodes), 2)
self.assertEqual(len(scenario_tree.scenarios), 1)
model = self._get_block_model()
root = scenario_tree.findRootNode()
root_nonant_names = _get_names(_get_nonant_list(model, root))
root_derived_nonant_names = _get_names(_get_derived_nonant_list(model, root))
assert len(root_nonant_names) == 6
assert len(root_derived_nonant_names) == 6
for name in ("b1.x", "b3.x"):
assert name in root_nonant_names
for name in ("b1.X", "b3.X"):
var = model.find_component(name)
for vardata in var.values():
assert vardata.name in root_nonant_names
for name in ("b2.x", "b4.x"):
assert name in root_derived_nonant_names
for name in ("b2.X", "b4.X"):
var = model.find_component(name)
for vardata in var.values():
assert vardata.name in root_derived_nonant_names
def test_singletonblock_noindextemplate(self):
st_model = CreateConcreteTwoStageScenarioTreeModel(1)
st_model.StageVariables['Stage1'].add("b1")
st_model.StageDerivedVariables['Stage1'].add("b2")
st_model.NodeVariables['RootNode'].add("b3")
st_model.NodeDerivedVariables['RootNode'].add("b4")
st_model.StageCost['Stage1'] = "FirstStageCost"
st_model.StageCost['Stage2'] = "SecondStageCost"
scenario_tree = ScenarioTree(scenariotreeinstance=st_model)
self.assertEqual(len(scenario_tree.stages), 2)
self.assertEqual(len(scenario_tree.nodes), 2)
self.assertEqual(len(scenario_tree.scenarios), 1)
model = self._get_block_model()
root = scenario_tree.findRootNode()
root_nonant_names = _get_names(_get_nonant_list(model, root))
root_derived_nonant_names = _get_names(_get_derived_nonant_list(model, root))
assert len(root_nonant_names) == 6
assert len(root_derived_nonant_names) == 6
for name in ("b1.x", "b3.x"):
assert name in root_nonant_names
for name in ("b1.X", "b3.X"):
var = model.find_component(name)
for vardata in var.values():
assert vardata.name in root_nonant_names
for name in ("b2.x", "b4.x"):
assert name in root_derived_nonant_names
for name in ("b2.X", "b4.X"):
var = model.find_component(name)
for vardata in var.values():
assert vardata.name in root_derived_nonant_names
def test_singletonvar_noindextemplate(self):
st_model = CreateConcreteTwoStageScenarioTreeModel(1)
st_model.StageVariables['Stage1'].add("x")
st_model.StageDerivedVariables['Stage1'].add("y")
st_model.NodeVariables['RootNode'].add("z")
st_model.NodeDerivedVariables['RootNode'].add("q")
st_model.StageCost['Stage1'] = "FirstStageCost"
st_model.StageCost['Stage2'] = "SecondStageCost"
scenario_tree = ScenarioTree(scenariotreeinstance=st_model)
self.assertEqual(len(scenario_tree.stages), 2)
self.assertEqual(len(scenario_tree.nodes), 2)
self.assertEqual(len(scenario_tree.scenarios), 1)
model = ConcreteModel()
model.x = Var()
model.y = Var()
model.z = Var()
model.q = Var()
model.FirstStageCost = Expression(expr=0.0)
model.SecondStageCost = Expression(expr=0.0)
model.obj = Objective(expr=0.0)
root = scenario_tree.findRootNode()
root_nonant_names = _get_names(_get_nonant_list(model, root))
root_derived_nonant_names = _get_names(_get_derived_nonant_list(model, root))
assert len(root_nonant_names) == 2
assert len(root_derived_nonant_names) == 2
for name in ("x", "z"):
assert name in root_nonant_names
for name in ("y", "q"):
assert name in root_derived_nonant_names
def test_singletonvar_wildcardtemplate(self):
st_model = CreateConcreteTwoStageScenarioTreeModel(1)
st_model.StageVariables['Stage1'].add("x[*]")
st_model.StageDerivedVariables['Stage1'].add("y[*]")
st_model.NodeVariables['RootNode'].add("z[*]")
st_model.NodeDerivedVariables['RootNode'].add("q[*]")
st_model.StageCost['Stage1'] = "FirstStageCost"
st_model.StageCost['Stage2'] = "SecondStageCost"
scenario_tree = ScenarioTree(scenariotreeinstance=st_model)
self.assertEqual(len(scenario_tree.stages), 2)
self.assertEqual(len(scenario_tree.nodes), 2)
self.assertEqual(len(scenario_tree.scenarios), 1)
model = ConcreteModel()
model.x = Var()
model.y = Var()
model.z = Var()
model.q = Var()
model.FirstStageCost = Expression(expr=0.0)
model.SecondStageCost = Expression(expr=0.0)
model.obj = Objective(expr=0.0)
root = scenario_tree.findRootNode()
root_nonant_names = _get_names(_get_nonant_list(model, root))
root_derived_nonant_names = _get_names(_get_derived_nonant_list(model, root))
assert len(root_nonant_names) == 2
assert len(root_derived_nonant_names) == 2
for name in ("x", "z"):
assert name in root_nonant_names
for name in ("y", "q"):
assert name in root_derived_nonant_names
def test_multiindexedvar_singlewildcardtemplate(self):
st_model = CreateConcreteTwoStageScenarioTreeModel(1)
st_model.StageVariables['Stage1'].add("x[*,* ]")
st_model.StageDerivedVariables['Stage1'].add("y[ *,*]")
st_model.NodeVariables['RootNode'].add("z[*,*]")
st_model.NodeDerivedVariables['RootNode'].add("q[ * , * ]")
st_model.StageCost['Stage1'] = "FirstStageCost"
st_model.StageCost['Stage2'] = "SecondStageCost"
scenario_tree = ScenarioTree(scenariotreeinstance=st_model)
self.assertEqual(len(scenario_tree.stages), 2)
self.assertEqual(len(scenario_tree.nodes), 2)
self.assertEqual(len(scenario_tree.scenarios), 1)
model = ConcreteModel()
model.s = Set(initialize=[(1,'a'),(2,'b'),(3,'c')])
model.x = Var(model.s)
model.y = Var(model.s)
model.z = Var(model.s)
model.q = Var(model.s)
model.FirstStageCost = Expression(expr=0.0)
model.SecondStageCost = Expression(expr=0.0)
model.obj = Objective(expr=0.0)
root = scenario_tree.findRootNode()
root_nonant_names = _get_names(_get_nonant_list(model, root))
root_derived_nonant_names = _get_names(_get_derived_nonant_list(model, root))
assert len(root_nonant_names) == 6
assert len(root_derived_nonant_names) == 6
for name in ("x", "z"):
indexed_var = model.find_component(name)
for index in model.s:
var = indexed_var[index]
assert var.name in root_nonant_names
for name in ("y", "q"):
indexed_var = model.find_component(name)
for index in model.s:
var = indexed_var[index]
assert var.name in root_derived_nonant_names
def test_indexedvar_indextemplate(self):
st_model = CreateConcreteTwoStageScenarioTreeModel(1)
st_model.StageVariables['Stage1'].add("x[*]")
st_model.StageDerivedVariables['Stage1'].add("y[*]")
st_model.NodeVariables['RootNode'].add("z[*]")
st_model.NodeDerivedVariables['RootNode'].add("q[*]")
st_model.StageCost['Stage1'] = "FirstStageCost"
st_model.StageCost['Stage2'] = "SecondStageCost"
scenario_tree = ScenarioTree(scenariotreeinstance=st_model)
self.assertEqual(len(scenario_tree.stages), 2)
self.assertEqual(len(scenario_tree.nodes), 2)
self.assertEqual(len(scenario_tree.scenarios), 1)
model = ConcreteModel()
model.s = Set(initialize=[1,2,3])
model.x = Var(model.s)
model.y = Var(model.s)
model.z = Var(model.s)
model.q = Var(model.s)
model.FirstStageCost = Expression(expr=0.0)
model.SecondStageCost = Expression(expr=0.0)
model.obj = Objective(expr=0.0)
root = scenario_tree.findRootNode()
root_nonant_names = _get_names(_get_nonant_list(model, root))
root_derived_nonant_names = _get_names(_get_derived_nonant_list(model, root))
assert len(root_nonant_names) == 6
assert len(root_derived_nonant_names) == 6
for name in ("x", "z"):
indexed_var = model.find_component(name)
for index in model.s:
var = indexed_var[index]
assert var.name in root_nonant_names
for name in ("y", "q"):
indexed_var = model.find_component(name)
for index in model.s:
var = indexed_var[index]
assert var.name in root_derived_nonant_names
def test_indexedvar_noindextemplate(self):
st_model = CreateConcreteTwoStageScenarioTreeModel(1)
st_model.StageVariables['Stage1'].add("x")
st_model.StageDerivedVariables['Stage1'].add("y")
st_model.NodeVariables['RootNode'].add("z")
st_model.NodeDerivedVariables['RootNode'].add("q")
st_model.StageCost['Stage1'] = "FirstStageCost"
st_model.StageCost['Stage2'] = "SecondStageCost"
scenario_tree = ScenarioTree(scenariotreeinstance=st_model)
self.assertEqual(len(scenario_tree.stages), 2)
self.assertEqual(len(scenario_tree.nodes), 2)
self.assertEqual(len(scenario_tree.scenarios), 1)
model = ConcreteModel()
model.s = Set(initialize=[1,2,3])
model.x = Var(model.s)
model.y = Var(model.s)
model.z = Var(model.s)
model.q = Var(model.s)
model.FirstStageCost = Expression(expr=0.0)
model.SecondStageCost = Expression(expr=0.0)
model.obj = Objective(expr=0.0)
root = scenario_tree.findRootNode()
root_nonant_names = _get_names(_get_nonant_list(model, root))
root_derived_nonant_names = _get_names(_get_derived_nonant_list(model, root))
assert len(root_nonant_names) == 6
assert len(root_derived_nonant_names) == 6
for name in ("x", "z"):
indexed_var = model.find_component(name)
for index in model.s:
var = indexed_var[index]
assert var.name in root_nonant_names
for name in ("y", "q"):
indexed_var = model.find_component(name)
for index in model.s:
var = indexed_var[index]
assert var.name in root_derived_nonant_names
@unittest.skipIf(not has_networkx, "Requires networkx module")
class TestScenarioTreeFromNetworkX(unittest.TestCase):
def test_empty(self):
G = networkx.DiGraph()
with self.assertRaises(networkx.NetworkXPointlessConcept):
ScenarioTreeModelFromNetworkX(G)
def test_not_tree(self):
G = networkx.DiGraph()
G.add_node("1")
G.add_node("2")
G.add_edge("1", "2")
G.add_edge("2", "1")
with self.assertRaises(TypeError):
ScenarioTreeModelFromNetworkX(G)
def test_not_directed(self):
G = networkx.Graph()
G.add_node("1")
G.add_node("2")
G.add_edge("1", "2")
with self.assertRaises(TypeError):
ScenarioTreeModelFromNetworkX(G)
def test_not_branching(self):
G = networkx.DiGraph()
G.add_node("1")
G.add_node("2")
G.add_node("R")
G.add_edge("1", "R")
G.add_edge("2", "R")
with self.assertRaises(TypeError):
ScenarioTreeModelFromNetworkX(G)
def test_not_enough_stages(self):
G = networkx.DiGraph()
G.add_node("R")
with self.assertRaises(ValueError):
ScenarioTreeModelFromNetworkX(G)
def test_missing_node_name(self):
G = networkx.DiGraph()
G.add_node("R", name="Root")
G.add_node("C")
G.add_edge("R", "C", weight=1)
with self.assertRaises(KeyError):
ScenarioTreeModelFromNetworkX(
G,
node_name_attribute="name")
def test_missing_scenario_name(self):
G = networkx.DiGraph()
G.add_node("R", name="Root")
G.add_node("C")
G.add_edge("R", "C", weight=1)
with self.assertRaises(KeyError):
ScenarioTreeModelFromNetworkX(
G,
scenario_name_attribute="name")
def test_missing_weight(self):
G = networkx.DiGraph()
G.add_node("R", name="Root")
G.add_node("C", name="Child")
G.add_edge("R", "C")
with self.assertRaises(KeyError):
ScenarioTreeModelFromNetworkX(G)
def test_bad_weight1(self):
G = networkx.DiGraph()
G.add_node("R",)
G.add_node("C",)
G.add_edge("R", "C",weight=0.8)
with self.assertRaises(ValueError):
ScenarioTreeModelFromNetworkX(G)
def test_bad_weight2(self):
G = networkx.DiGraph()
G.add_node("R")
G.add_node("C1")
G.add_edge("R", "C1", weight=0.8)
G.add_node("C2")
G.add_edge("R", "C2", weight=0.1)
with self.assertRaises(ValueError):
ScenarioTreeModelFromNetworkX(G)
def test_bad_custom_stage_names1(self):
G = networkx.DiGraph()
G.add_node("R",)
G.add_node("C1")
G.add_edge("R", "C1", weight=1.0)
with self.assertRaises(ValueError):
ScenarioTreeModelFromNetworkX(
G, stage_names=["Stage1"])
def test_bad_custom_stage_names2(self):
G = networkx.DiGraph()
G.add_node("R")
G.add_node("C1")
G.add_edge("R", "C1", weight=1.0)
with self.assertRaises(ValueError):
ScenarioTreeModelFromNetworkX(
G, stage_names=["Stage1","Stage1"])
def test_two_stage(self):
G = networkx.DiGraph()
G.add_node("Root")
G.add_node("Child1")
G.add_edge("Root", "Child1", weight=0.8)
G.add_node("Child2")
G.add_edge("Root", "Child2", weight=0.2)
model = ScenarioTreeModelFromNetworkX(G)
self.assertEqual(
sorted(list(model.Stages)),
sorted(["Stage1", "Stage2"]))
self.assertEqual(
sorted(list(model.Nodes)),
sorted(["Root", "Child1", "Child2"]))
self.assertEqual(
sorted(list(model.Children["Root"])),
sorted(["Child1", "Child2"]))
self.assertEqual(
sorted(list(model.Children["Child1"])),
sorted([]))
self.assertEqual(
sorted(list(model.Children["Child2"])),
sorted([]))
self.assertEqual(
sorted(list(model.Scenarios)),
sorted(["Child1", "Child2"]))
self.assertEqual(value(model.ConditionalProbability["Root"]), 1.0)
self.assertEqual(value(model.ConditionalProbability["Child1"]), 0.8)
self.assertEqual(value(model.ConditionalProbability["Child2"]), 0.2)
model.StageCost["Stage1"] = "c1"
model.StageCost["Stage2"] = "c2"
model.StageVariables["Stage1"].add("x")
self.assertEqual(model.Bundling.value, False)
self.assertEqual(list(model.Bundles), [])
self.assertEqual(len(model.BundleScenarios), 0)
ScenarioTree(scenariotreeinstance=model)
def test_two_stage_more_node_attributes(self):
G = networkx.DiGraph()
G.add_node("Root",
cost="c1",
variables=["x"],
derived_variables=["y"])
G.add_node("Child1",
cost="c2",
variables=["q"],
derived_variables=["z"])
G.add_edge("Root", "Child1", weight=0.8)
G.add_node("Child2",
cost="c2",
variables=["q"],
derived_variables=["z"])
G.add_edge("Root", "Child2", weight=0.2)
model = ScenarioTreeModelFromNetworkX(G)
self.assertEqual(
sorted(list(model.Stages)),
sorted(["Stage1", "Stage2"]))
self.assertEqual(
sorted(list(model.Nodes)),
sorted(["Root", "Child1", "Child2"]))
self.assertEqual(
sorted(list(model.Children["Root"])),
sorted(["Child1", "Child2"]))
self.assertEqual(
sorted(list(model.Children["Child1"])),
sorted([]))
self.assertEqual(
sorted(list(model.Children["Child2"])),
sorted([]))
self.assertEqual(
sorted(list(model.Scenarios)),
sorted(["Child1", "Child2"]))
self.assertEqual(value(model.ConditionalProbability["Root"]), 1.0)
self.assertEqual(value(model.ConditionalProbability["Child1"]), 0.8)
self.assertEqual(value(model.ConditionalProbability["Child2"]), 0.2)
self.assertEqual(model.StageCost["Stage1"].value, None)
self.assertEqual(list(model.StageVariables["Stage1"]), [])
self.assertEqual(list(model.StageDerivedVariables["Stage1"]), [])
self.assertEqual(model.NodeCost["Root"].value, "c1")
self.assertEqual(list(model.NodeVariables["Root"]), ["x"])
self.assertEqual(list(model.NodeDerivedVariables["Root"]), ["y"])
self.assertEqual(model.StageCost["Stage2"].value, None)
self.assertEqual(list(model.StageVariables["Stage2"]), [])
self.assertEqual(list(model.StageDerivedVariables["Stage2"]), [])
self.assertEqual(model.NodeCost["Child1"].value, "c2")
self.assertEqual(list(model.NodeVariables["Child1"]), ["q"])
self.assertEqual(list(model.NodeDerivedVariables["Child1"]), ["z"])
self.assertEqual(model.NodeCost["Child2"].value, "c2")
self.assertEqual(list(model.NodeVariables["Child2"]), ["q"])
self.assertEqual(list(model.NodeDerivedVariables["Child2"]), ["z"])
self.assertEqual(model.Bundling.value, False)
self.assertEqual(list(model.Bundles), [])
self.assertEqual(len(model.BundleScenarios), 0)
ScenarioTree(scenariotreeinstance=model)
def test_two_stage_custom_names(self):
G = networkx.DiGraph()
G.add_node("R", label="Root")
G.add_node("C1", label="Child1", scenario="S1")
G.add_edge("R", "C1", probability=0.8)
G.add_node("C2", label="Child2", scenario="S2")
G.add_edge("R", "C2", probability=0.2)
model = ScenarioTreeModelFromNetworkX(
G,
edge_probability_attribute="probability",
node_name_attribute="label",
stage_names=["T1","T2"],
scenario_name_attribute="scenario")
self.assertEqual(
sorted(list(model.Stages)),
sorted(["T1", "T2"]))
self.assertEqual(
sorted(list(model.Nodes)),
sorted(["Root", "Child1", "Child2"]))
self.assertEqual(
sorted(list(model.Children["Root"])),
sorted(["Child1", "Child2"]))
self.assertEqual(
sorted(list(model.Children["Child1"])),
sorted([]))
self.assertEqual(
sorted(list(model.Children["Child2"])),
sorted([]))
self.assertEqual(
sorted(list(model.Scenarios)),
sorted(["S1", "S2"]))
self.assertEqual(value(model.ConditionalProbability["Root"]), 1.0)
self.assertEqual(value(model.ConditionalProbability["Child1"]), 0.8)
self.assertEqual(value(model.ConditionalProbability["Child2"]), 0.2)
model.StageCost["T1"] = "c1"
model.StageCost["T2"] = "c2"
model.StageVariables["T1"].add("x")
self.assertEqual(model.Bundling.value, False)
self.assertEqual(list(model.Bundles), [])
self.assertEqual(len(model.BundleScenarios), 0)
ScenarioTree(scenariotreeinstance=model)
def test_multi_stage(self):
G = networkx.balanced_tree(3,2,networkx.DiGraph())
model = ScenarioTreeModelFromNetworkX(
G,
edge_probability_attribute=None)
self.assertEqual(
sorted(list(model.Stages)),
sorted(["Stage1", "Stage2", "Stage3"]))
self.assertEqual(
sorted(list(model.Nodes)),
sorted([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]))
self.assertEqual(
sorted(list(model.Children[0])),
sorted([1,2,3]))
self.assertEqual(
sorted(list(model.Children[1])),
sorted([4,5,6]))
self.assertEqual(
sorted(list(model.Children[2])),
sorted([7,8,9]))
self.assertEqual(
sorted(list(model.Children[3])),
sorted([10,11,12]))
self.assertEqual(
sorted(list(model.Children[4])),
sorted([]))
self.assertEqual(
sorted(list(model.Children[5])),
sorted([]))
self.assertEqual(
sorted(list(model.Children[6])),
sorted([]))
self.assertEqual(
sorted(list(model.Children[7])),
sorted([]))
self.assertEqual(
sorted(list(model.Children[8])),
sorted([]))
self.assertEqual(
sorted(list(model.Children[9])),
sorted([]))
self.assertEqual(
sorted(list(model.Children[10])),
sorted([]))
self.assertEqual(
sorted(list(model.Children[11])),
sorted([]))
self.assertEqual(
sorted(list(model.Children[12])),
sorted([]))
self.assertEqual(
sorted(list(model.Scenarios)),
sorted([4, 5, 6, 7, 8, 9, 10, 11, 12]))
self.assertEqual(value(model.ConditionalProbability[0]), 1.0)
self.assertAlmostEqual(value(model.ConditionalProbability[1]), 1.0/3)
self.assertAlmostEqual(value(model.ConditionalProbability[2]), 1.0/3)
self.assertAlmostEqual(value(model.ConditionalProbability[3]), 1.0/3)
self.assertAlmostEqual(value(model.ConditionalProbability[4]), 1.0/3)
self.assertAlmostEqual(value(model.ConditionalProbability[5]), 1.0/3)
self.assertAlmostEqual(value(model.ConditionalProbability[6]), 1.0/3)
self.assertAlmostEqual(value(model.ConditionalProbability[7]), 1.0/3)
self.assertAlmostEqual(value(model.ConditionalProbability[8]), 1.0/3)
self.assertAlmostEqual(value(model.ConditionalProbability[9]), 1.0/3)
self.assertAlmostEqual(value(model.ConditionalProbability[10]), 1.0/3)
self.assertAlmostEqual(value(model.ConditionalProbability[11]), 1.0/3)
self.assertAlmostEqual(value(model.ConditionalProbability[12]), 1.0/3)
model.StageCost["Stage1"] = "c1"
model.StageCost["Stage2"] = "c2"
model.StageCost["Stage3"] = "c3"
model.StageVariables["Stage1"].add("x")
model.StageVariables["Stage2"].add("y")
model.StageVariables["Stage3"].add("y")
self.assertEqual(model.Bundling.value, False)
self.assertEqual(list(model.Bundles), [])
self.assertEqual(len(model.BundleScenarios), 0)
ScenarioTree(scenariotreeinstance=model)
def test_unbalanced(self):
G = networkx.DiGraph()
G.add_node("R")
G.add_node("0")
G.add_node("1")
G.add_edge("R", "0")
G.add_edge("R", "1")
G.add_node("00")
G.add_node("01")
G.add_edge("0", "00")
G.add_edge("0", "01")
model = ScenarioTreeModelFromNetworkX(
G,
edge_probability_attribute=None)
self.assertEqual(
sorted(list(model.Stages)),
sorted(["Stage1", "Stage2", "Stage3"]))
self.assertEqual(
sorted(list(model.Nodes)),
sorted(["R","0","1","00","01"]))
self.assertEqual(
sorted(list(model.Children["R"])),
sorted(["0", "1"]))
self.assertEqual(
sorted(list(model.Children["0"])),
sorted(["00","01"]))
self.assertEqual(
sorted(list(model.Children["1"])),
sorted([]))
self.assertEqual(
sorted(list(model.Children["00"])),
sorted([]))
self.assertEqual(
sorted(list(model.Children["01"])),
sorted([]))
self.assertEqual(
sorted(list(model.Scenarios)),
sorted(["00", "01", "1"]))
self.assertEqual(value(model.ConditionalProbability["R"]), 1.0)
self.assertEqual(value(model.ConditionalProbability["0"]), 0.5)
self.assertEqual(value(model.ConditionalProbability["1"]), 0.5)
self.assertEqual(value(model.ConditionalProbability["00"]), 0.5)
self.assertEqual(value(model.ConditionalProbability["01"]), 0.5)
model.StageCost["Stage1"] = "c1"
model.StageCost["Stage2"] = "c2"
model.StageCost["Stage3"] = "c3"
model.StageVariables["Stage1"].add("x")
model.StageVariables["Stage2"].add("x")
self.assertEqual(model.Bundling.value, False)
self.assertEqual(list(model.Bundles), [])
self.assertEqual(len(model.BundleScenarios), 0)
ScenarioTree(scenariotreeinstance=model)
def test_bundles(self):
G = networkx.DiGraph()
G.add_node("r")
for i in range(4):
G.add_node("u"+str(i), bundle=i%2)
G.add_edge("r", "u"+str(i))
model = ScenarioTreeModelFromNetworkX(
G,
edge_probability_attribute=None)
self.assertEqual(
sorted(list(model.Stages)),
sorted(["Stage1", "Stage2"]))
self.assertEqual(
sorted(list(model.Nodes)),
sorted(["r", "u0", "u1", "u2", "u3"]))
self.assertEqual(
sorted(list(model.Children["r"])),
sorted(["u0", "u1", "u2", "u3"]))
for i in range(4):
self.assertEqual(
sorted(list(model.Children["u"+str(i)])),
sorted([]))
self.assertEqual(
sorted(list(model.Scenarios)),
sorted(["u0", "u1", "u2", "u3"]))
self.assertEqual(value(model.ConditionalProbability["r"]), 1.0)
for i in range(4):
self.assertEqual(value(model.ConditionalProbability["u"+str(i)]),
0.25)
self.assertEqual(model.Bundling.value, True)
self.assertEqual(list(model.Bundles), [0, 1])
for k, bundle_name in enumerate(model.Bundles):
self.assertEqual(list(model.BundleScenarios[bundle_name]),
["u"+str(i) for i in range(4)
if i%2 == k])
model.StageCost["Stage1"] = "c1"
model.StageCost["Stage2"] = "c2"
model.StageVariables["Stage1"].add("x")
ScenarioTree(scenariotreeinstance=model)
def test_bundles_incomplete(self):
G = networkx.DiGraph()
G.add_node("r")
for i in range(4):
G.add_node("u"+str(i), bundle="B")
G.add_edge("r", "u"+str(i))
model = ScenarioTreeModelFromNetworkX(
G,
edge_probability_attribute=None)
self.assertEqual(model.Bundling.value, True)
self.assertEqual(list(model.Bundles), ["B"])
self.assertEqual(list(model.BundleScenarios["B"]),
["u"+str(i) for i in range(4)])
G.nodes["u0"]["bundle"] = None
with self.assertRaises(ValueError):
ScenarioTreeModelFromNetworkX(
G,
edge_probability_attribute=None)
del G.nodes["u0"]["bundle"]
with self.assertRaises(ValueError):
ScenarioTreeModelFromNetworkX(
G,
edge_probability_attribute=None)
if __name__ == "__main__":
unittest.main()
| [
"mpisppy.utils.pysp_model.pysp_model._get_derived_nonant_list",
"mpisppy.utils.pysp_model.pysp_model._get_nonant_list",
"pyomo.common.dependencies.networkx.Graph",
"pyomo.common.unittest.main",
"pyomo.common.unittest.skipIf",
"pyomo.core.Objective",
"mpisppy.utils.pysp_model.tree_structure_model.CreateC... | [((17450, 17511), 'pyomo.common.unittest.skipIf', 'unittest.skipIf', (['(not has_networkx)', '"""Requires networkx module"""'], {}), "(not has_networkx, 'Requires networkx module')\n", (17465, 17511), True, 'import pyomo.common.unittest as unittest\n'), ((34737, 34752), 'pyomo.common.unittest.main', 'unittest.main', ([], {}), '()\n', (34750, 34752), True, 'import pyomo.common.unittest as unittest\n'), ((1513, 1528), 'pyomo.core.ConcreteModel', 'ConcreteModel', ([], {}), '()\n', (1526, 1528), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((1547, 1569), 'pyomo.core.Set', 'Set', ([], {'initialize': '[1, 2]'}), '(initialize=[1, 2])\n', (1550, 1569), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((1581, 1601), 'pyomo.core.Block', 'Block', ([], {'concrete': '(True)'}), '(concrete=True)\n', (1586, 1601), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((1616, 1638), 'pyomo.core.Set', 'Set', ([], {'initialize': '[1, 2]'}), '(initialize=[1, 2])\n', (1619, 1638), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((1652, 1657), 'pyomo.core.Var', 'Var', ([], {}), '()\n', (1655, 1657), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((1672, 1684), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (1675, 1684), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((2080, 2100), 'pyomo.core.Expression', 'Expression', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (2090, 2100), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((2133, 2153), 'pyomo.core.Expression', 'Expression', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (2143, 2153), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((2174, 2193), 'pyomo.core.Objective', 'Objective', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (2183, 2193), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((2284, 2326), 'mpisppy.utils.pysp_model.tree_structure_model.CreateConcreteTwoStageScenarioTreeModel', 'CreateConcreteTwoStageScenarioTreeModel', (['(1)'], {}), '(1)\n', (2323, 2326), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((2689, 2732), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'st_model'}), '(scenariotreeinstance=st_model)\n', (2701, 2732), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((4233, 4275), 'mpisppy.utils.pysp_model.tree_structure_model.CreateConcreteTwoStageScenarioTreeModel', 'CreateConcreteTwoStageScenarioTreeModel', (['(1)'], {}), '(1)\n', (4272, 4275), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((4650, 4693), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'st_model'}), '(scenariotreeinstance=st_model)\n', (4662, 4693), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((6196, 6238), 'mpisppy.utils.pysp_model.tree_structure_model.CreateConcreteTwoStageScenarioTreeModel', 'CreateConcreteTwoStageScenarioTreeModel', (['(1)'], {}), '(1)\n', (6235, 6238), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((6613, 6656), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'st_model'}), '(scenariotreeinstance=st_model)\n', (6625, 6656), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((7777, 7819), 'mpisppy.utils.pysp_model.tree_structure_model.CreateConcreteTwoStageScenarioTreeModel', 'CreateConcreteTwoStageScenarioTreeModel', (['(1)'], {}), '(1)\n', (7816, 7819), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((8182, 8225), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'st_model'}), '(scenariotreeinstance=st_model)\n', (8194, 8225), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((9345, 9387), 'mpisppy.utils.pysp_model.tree_structure_model.CreateConcreteTwoStageScenarioTreeModel', 'CreateConcreteTwoStageScenarioTreeModel', (['(1)'], {}), '(1)\n', (9384, 9387), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((9746, 9789), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'st_model'}), '(scenariotreeinstance=st_model)\n', (9758, 9789), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((9974, 9989), 'pyomo.core.ConcreteModel', 'ConcreteModel', ([], {}), '()\n', (9987, 9989), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((10008, 10013), 'pyomo.core.Var', 'Var', ([], {}), '()\n', (10011, 10013), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((10032, 10037), 'pyomo.core.Var', 'Var', ([], {}), '()\n', (10035, 10037), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((10056, 10061), 'pyomo.core.Var', 'Var', ([], {}), '()\n', (10059, 10061), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((10080, 10085), 'pyomo.core.Var', 'Var', ([], {}), '()\n', (10083, 10085), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((10117, 10137), 'pyomo.core.Expression', 'Expression', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (10127, 10137), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((10170, 10190), 'pyomo.core.Expression', 'Expression', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (10180, 10190), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((10211, 10230), 'pyomo.core.Objective', 'Objective', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (10220, 10230), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((10762, 10804), 'mpisppy.utils.pysp_model.tree_structure_model.CreateConcreteTwoStageScenarioTreeModel', 'CreateConcreteTwoStageScenarioTreeModel', (['(1)'], {}), '(1)\n', (10801, 10804), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((11175, 11218), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'st_model'}), '(scenariotreeinstance=st_model)\n', (11187, 11218), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((11403, 11418), 'pyomo.core.ConcreteModel', 'ConcreteModel', ([], {}), '()\n', (11416, 11418), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((11437, 11442), 'pyomo.core.Var', 'Var', ([], {}), '()\n', (11440, 11442), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((11461, 11466), 'pyomo.core.Var', 'Var', ([], {}), '()\n', (11464, 11466), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((11485, 11490), 'pyomo.core.Var', 'Var', ([], {}), '()\n', (11488, 11490), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((11509, 11514), 'pyomo.core.Var', 'Var', ([], {}), '()\n', (11512, 11514), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((11546, 11566), 'pyomo.core.Expression', 'Expression', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (11556, 11566), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((11599, 11619), 'pyomo.core.Expression', 'Expression', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (11609, 11619), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((11640, 11659), 'pyomo.core.Objective', 'Objective', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (11649, 11659), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((12199, 12241), 'mpisppy.utils.pysp_model.tree_structure_model.CreateConcreteTwoStageScenarioTreeModel', 'CreateConcreteTwoStageScenarioTreeModel', (['(1)'], {}), '(1)\n', (12238, 12241), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((12626, 12669), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'st_model'}), '(scenariotreeinstance=st_model)\n', (12638, 12669), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((12854, 12869), 'pyomo.core.ConcreteModel', 'ConcreteModel', ([], {}), '()\n', (12867, 12869), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((12888, 12934), 'pyomo.core.Set', 'Set', ([], {'initialize': "[(1, 'a'), (2, 'b'), (3, 'c')]"}), "(initialize=[(1, 'a'), (2, 'b'), (3, 'c')])\n", (12891, 12934), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((12948, 12960), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (12951, 12960), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((12979, 12991), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (12982, 12991), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((13010, 13022), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (13013, 13022), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((13041, 13053), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (13044, 13053), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((13085, 13105), 'pyomo.core.Expression', 'Expression', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (13095, 13105), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((13138, 13158), 'pyomo.core.Expression', 'Expression', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (13148, 13158), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((13179, 13198), 'pyomo.core.Objective', 'Objective', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (13188, 13198), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((13995, 14037), 'mpisppy.utils.pysp_model.tree_structure_model.CreateConcreteTwoStageScenarioTreeModel', 'CreateConcreteTwoStageScenarioTreeModel', (['(1)'], {}), '(1)\n', (14034, 14037), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((14408, 14451), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'st_model'}), '(scenariotreeinstance=st_model)\n', (14420, 14451), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((14636, 14651), 'pyomo.core.ConcreteModel', 'ConcreteModel', ([], {}), '()\n', (14649, 14651), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((14670, 14695), 'pyomo.core.Set', 'Set', ([], {'initialize': '[1, 2, 3]'}), '(initialize=[1, 2, 3])\n', (14673, 14695), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((14712, 14724), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (14715, 14724), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((14743, 14755), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (14746, 14755), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((14774, 14786), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (14777, 14786), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((14805, 14817), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (14808, 14817), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((14849, 14869), 'pyomo.core.Expression', 'Expression', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (14859, 14869), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((14902, 14922), 'pyomo.core.Expression', 'Expression', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (14912, 14922), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((14943, 14962), 'pyomo.core.Objective', 'Objective', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (14952, 14962), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((15761, 15803), 'mpisppy.utils.pysp_model.tree_structure_model.CreateConcreteTwoStageScenarioTreeModel', 'CreateConcreteTwoStageScenarioTreeModel', (['(1)'], {}), '(1)\n', (15800, 15803), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((16162, 16205), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'st_model'}), '(scenariotreeinstance=st_model)\n', (16174, 16205), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((16390, 16405), 'pyomo.core.ConcreteModel', 'ConcreteModel', ([], {}), '()\n', (16403, 16405), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((16424, 16449), 'pyomo.core.Set', 'Set', ([], {'initialize': '[1, 2, 3]'}), '(initialize=[1, 2, 3])\n', (16427, 16449), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((16466, 16478), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (16469, 16478), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((16497, 16509), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (16500, 16509), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((16528, 16540), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (16531, 16540), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((16559, 16571), 'pyomo.core.Var', 'Var', (['model.s'], {}), '(model.s)\n', (16562, 16571), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((16603, 16623), 'pyomo.core.Expression', 'Expression', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (16613, 16623), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((16656, 16676), 'pyomo.core.Expression', 'Expression', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (16666, 16676), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((16697, 16716), 'pyomo.core.Objective', 'Objective', ([], {'expr': '(0.0)'}), '(expr=0.0)\n', (16706, 16716), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((17606, 17624), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (17622, 17624), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((17779, 17797), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (17795, 17797), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((18038, 18054), 'pyomo.common.dependencies.networkx.Graph', 'networkx.Graph', ([], {}), '()\n', (18052, 18054), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((18267, 18285), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (18283, 18285), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((18555, 18573), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (18571, 18573), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((18738, 18756), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (18754, 18756), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((19060, 19078), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (19076, 19078), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((19379, 19397), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (19395, 19397), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((19634, 19652), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (19650, 19652), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((19877, 19895), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (19893, 19895), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((20200, 20218), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (20216, 20218), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((20498, 20516), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (20514, 20516), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((20790, 20808), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (20806, 20808), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((21008, 21040), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {}), '(G)\n', (21037, 21040), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((22224, 22264), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'model'}), '(scenariotreeinstance=model)\n', (22236, 22264), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((22329, 22347), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (22345, 22347), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((22877, 22909), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {}), '(G)\n', (22906, 22909), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((24996, 25036), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'model'}), '(scenariotreeinstance=model)\n', (25008, 25036), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((25093, 25111), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (25109, 25111), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((25372, 25545), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {'edge_probability_attribute': '"""probability"""', 'node_name_attribute': '"""label"""', 'stage_names': "['T1', 'T2']", 'scenario_name_attribute': '"""scenario"""'}), "(G, edge_probability_attribute='probability',\n node_name_attribute='label', stage_names=['T1', 'T2'],\n scenario_name_attribute='scenario')\n", (25401, 25545), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((26753, 26793), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'model'}), '(scenariotreeinstance=model)\n', (26765, 26793), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((26902, 26967), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {'edge_probability_attribute': 'None'}), '(G, edge_probability_attribute=None)\n', (26931, 26967), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((30066, 30106), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'model'}), '(scenariotreeinstance=model)\n', (30078, 30106), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((30151, 30169), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (30167, 30169), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((30426, 30491), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {'edge_probability_attribute': 'None'}), '(G, edge_probability_attribute=None)\n', (30455, 30491), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((32106, 32146), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'model'}), '(scenariotreeinstance=model)\n', (32118, 32146), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((32188, 32206), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (32204, 32206), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((32361, 32426), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {'edge_probability_attribute': 'None'}), '(G, edge_probability_attribute=None)\n', (32390, 32426), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((33740, 33780), 'mpisppy.utils.pysp_model.tree_structure.ScenarioTree', 'ScenarioTree', ([], {'scenariotreeinstance': 'model'}), '(scenariotreeinstance=model)\n', (33752, 33780), False, 'from mpisppy.utils.pysp_model.tree_structure import ScenarioTree\n'), ((33833, 33851), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (33849, 33851), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((34006, 34071), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {'edge_probability_attribute': 'None'}), '(G, edge_probability_attribute=None)\n', (34035, 34071), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((3025, 3054), 'mpisppy.utils.pysp_model.pysp_model._get_nonant_list', '_get_nonant_list', (['model', 'root'], {}), '(model, root)\n', (3041, 3054), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((3103, 3140), 'mpisppy.utils.pysp_model.pysp_model._get_derived_nonant_list', '_get_derived_nonant_list', (['model', 'root'], {}), '(model, root)\n', (3127, 3140), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((4986, 5015), 'mpisppy.utils.pysp_model.pysp_model._get_nonant_list', '_get_nonant_list', (['model', 'root'], {}), '(model, root)\n', (5002, 5015), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((5064, 5101), 'mpisppy.utils.pysp_model.pysp_model._get_derived_nonant_list', '_get_derived_nonant_list', (['model', 'root'], {}), '(model, root)\n', (5088, 5101), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((6949, 6978), 'mpisppy.utils.pysp_model.pysp_model._get_nonant_list', '_get_nonant_list', (['model', 'root'], {}), '(model, root)\n', (6965, 6978), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((7027, 7064), 'mpisppy.utils.pysp_model.pysp_model._get_derived_nonant_list', '_get_derived_nonant_list', (['model', 'root'], {}), '(model, root)\n', (7051, 7064), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((8518, 8547), 'mpisppy.utils.pysp_model.pysp_model._get_nonant_list', '_get_nonant_list', (['model', 'root'], {}), '(model, root)\n', (8534, 8547), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((8596, 8633), 'mpisppy.utils.pysp_model.pysp_model._get_derived_nonant_list', '_get_derived_nonant_list', (['model', 'root'], {}), '(model, root)\n', (8620, 8633), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((10315, 10344), 'mpisppy.utils.pysp_model.pysp_model._get_nonant_list', '_get_nonant_list', (['model', 'root'], {}), '(model, root)\n', (10331, 10344), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((10393, 10430), 'mpisppy.utils.pysp_model.pysp_model._get_derived_nonant_list', '_get_derived_nonant_list', (['model', 'root'], {}), '(model, root)\n', (10417, 10430), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((11744, 11773), 'mpisppy.utils.pysp_model.pysp_model._get_nonant_list', '_get_nonant_list', (['model', 'root'], {}), '(model, root)\n', (11760, 11773), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((11822, 11859), 'mpisppy.utils.pysp_model.pysp_model._get_derived_nonant_list', '_get_derived_nonant_list', (['model', 'root'], {}), '(model, root)\n', (11846, 11859), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((13283, 13312), 'mpisppy.utils.pysp_model.pysp_model._get_nonant_list', '_get_nonant_list', (['model', 'root'], {}), '(model, root)\n', (13299, 13312), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((13361, 13398), 'mpisppy.utils.pysp_model.pysp_model._get_derived_nonant_list', '_get_derived_nonant_list', (['model', 'root'], {}), '(model, root)\n', (13385, 13398), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((15047, 15076), 'mpisppy.utils.pysp_model.pysp_model._get_nonant_list', '_get_nonant_list', (['model', 'root'], {}), '(model, root)\n', (15063, 15076), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((15125, 15162), 'mpisppy.utils.pysp_model.pysp_model._get_derived_nonant_list', '_get_derived_nonant_list', (['model', 'root'], {}), '(model, root)\n', (15149, 15162), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((16801, 16830), 'mpisppy.utils.pysp_model.pysp_model._get_nonant_list', '_get_nonant_list', (['model', 'root'], {}), '(model, root)\n', (16817, 16830), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((16879, 16916), 'mpisppy.utils.pysp_model.pysp_model._get_derived_nonant_list', '_get_derived_nonant_list', (['model', 'root'], {}), '(model, root)\n', (16903, 16916), False, 'from mpisppy.utils.pysp_model.pysp_model import _get_nonant_list, _get_derived_nonant_list\n'), ((17704, 17736), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {}), '(G)\n', (17733, 17736), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((17959, 17991), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {}), '(G)\n', (17988, 17991), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((18187, 18219), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {}), '(G)\n', (18216, 18219), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((18471, 18503), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {}), '(G)\n', (18500, 18503), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((18654, 18686), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {}), '(G)\n', (18683, 18686), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((18911, 18971), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {'node_name_attribute': '"""name"""'}), "(G, node_name_attribute='name')\n", (18940, 18971), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((19233, 19297), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {'scenario_name_attribute': '"""name"""'}), "(G, scenario_name_attribute='name')\n", (19262, 19297), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((19556, 19588), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {}), '(G)\n', (19585, 19588), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((19799, 19831), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {}), '(G)\n', (19828, 19831), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((20110, 20142), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {}), '(G)\n', (20139, 20142), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((20367, 20423), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {'stage_names': "['Stage1']"}), "(G, stage_names=['Stage1'])\n", (20396, 20423), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((20664, 20730), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {'stage_names': "['Stage1', 'Stage1']"}), "(G, stage_names=['Stage1', 'Stage1'])\n", (20693, 20730), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((21722, 21765), 'pyomo.core.value', 'value', (["model.ConditionalProbability['Root']"], {}), "(model.ConditionalProbability['Root'])\n", (21727, 21765), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((21797, 21842), 'pyomo.core.value', 'value', (["model.ConditionalProbability['Child1']"], {}), "(model.ConditionalProbability['Child1'])\n", (21802, 21842), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((21874, 21919), 'pyomo.core.value', 'value', (["model.ConditionalProbability['Child2']"], {}), "(model.ConditionalProbability['Child2'])\n", (21879, 21919), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((23591, 23634), 'pyomo.core.value', 'value', (["model.ConditionalProbability['Root']"], {}), "(model.ConditionalProbability['Root'])\n", (23596, 23634), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((23666, 23711), 'pyomo.core.value', 'value', (["model.ConditionalProbability['Child1']"], {}), "(model.ConditionalProbability['Child1'])\n", (23671, 23711), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((23743, 23788), 'pyomo.core.value', 'value', (["model.ConditionalProbability['Child2']"], {}), "(model.ConditionalProbability['Child2'])\n", (23748, 23788), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((26263, 26306), 'pyomo.core.value', 'value', (["model.ConditionalProbability['Root']"], {}), "(model.ConditionalProbability['Root'])\n", (26268, 26306), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((26338, 26383), 'pyomo.core.value', 'value', (["model.ConditionalProbability['Child1']"], {}), "(model.ConditionalProbability['Child1'])\n", (26343, 26383), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((26415, 26460), 'pyomo.core.value', 'value', (["model.ConditionalProbability['Child2']"], {}), "(model.ConditionalProbability['Child2'])\n", (26420, 26460), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((26866, 26884), 'pyomo.common.dependencies.networkx.DiGraph', 'networkx.DiGraph', ([], {}), '()\n', (26882, 26884), False, 'from pyomo.common.dependencies import networkx, networkx_available as has_networkx\n'), ((28647, 28685), 'pyomo.core.value', 'value', (['model.ConditionalProbability[0]'], {}), '(model.ConditionalProbability[0])\n', (28652, 28685), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((28723, 28761), 'pyomo.core.value', 'value', (['model.ConditionalProbability[1]'], {}), '(model.ConditionalProbability[1])\n', (28728, 28761), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((28801, 28839), 'pyomo.core.value', 'value', (['model.ConditionalProbability[2]'], {}), '(model.ConditionalProbability[2])\n', (28806, 28839), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((28879, 28917), 'pyomo.core.value', 'value', (['model.ConditionalProbability[3]'], {}), '(model.ConditionalProbability[3])\n', (28884, 28917), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((28957, 28995), 'pyomo.core.value', 'value', (['model.ConditionalProbability[4]'], {}), '(model.ConditionalProbability[4])\n', (28962, 28995), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((29035, 29073), 'pyomo.core.value', 'value', (['model.ConditionalProbability[5]'], {}), '(model.ConditionalProbability[5])\n', (29040, 29073), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((29113, 29151), 'pyomo.core.value', 'value', (['model.ConditionalProbability[6]'], {}), '(model.ConditionalProbability[6])\n', (29118, 29151), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((29191, 29229), 'pyomo.core.value', 'value', (['model.ConditionalProbability[7]'], {}), '(model.ConditionalProbability[7])\n', (29196, 29229), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((29269, 29307), 'pyomo.core.value', 'value', (['model.ConditionalProbability[8]'], {}), '(model.ConditionalProbability[8])\n', (29274, 29307), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((29347, 29385), 'pyomo.core.value', 'value', (['model.ConditionalProbability[9]'], {}), '(model.ConditionalProbability[9])\n', (29352, 29385), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((29425, 29464), 'pyomo.core.value', 'value', (['model.ConditionalProbability[10]'], {}), '(model.ConditionalProbability[10])\n', (29430, 29464), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((29504, 29543), 'pyomo.core.value', 'value', (['model.ConditionalProbability[11]'], {}), '(model.ConditionalProbability[11])\n', (29509, 29543), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((29583, 29622), 'pyomo.core.value', 'value', (['model.ConditionalProbability[12]'], {}), '(model.ConditionalProbability[12])\n', (29588, 29622), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((31382, 31422), 'pyomo.core.value', 'value', (["model.ConditionalProbability['R']"], {}), "(model.ConditionalProbability['R'])\n", (31387, 31422), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((31454, 31494), 'pyomo.core.value', 'value', (["model.ConditionalProbability['0']"], {}), "(model.ConditionalProbability['0'])\n", (31459, 31494), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((31526, 31566), 'pyomo.core.value', 'value', (["model.ConditionalProbability['1']"], {}), "(model.ConditionalProbability['1'])\n", (31531, 31566), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((31598, 31639), 'pyomo.core.value', 'value', (["model.ConditionalProbability['00']"], {}), "(model.ConditionalProbability['00'])\n", (31603, 31639), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((31671, 31712), 'pyomo.core.value', 'value', (["model.ConditionalProbability['01']"], {}), "(model.ConditionalProbability['01'])\n", (31676, 31712), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((33078, 33118), 'pyomo.core.value', 'value', (["model.ConditionalProbability['r']"], {}), "(model.ConditionalProbability['r'])\n", (33083, 33118), False, 'from pyomo.core import ConcreteModel, Set, Var, Expression, Objective, Block, value\n'), ((34414, 34479), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {'edge_probability_attribute': 'None'}), '(G, edge_probability_attribute=None)\n', (34443, 34479), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n'), ((34605, 34670), 'mpisppy.utils.pysp_model.tree_structure_model.ScenarioTreeModelFromNetworkX', 'ScenarioTreeModelFromNetworkX', (['G'], {'edge_probability_attribute': 'None'}), '(G, edge_probability_attribute=None)\n', (34634, 34670), False, 'from mpisppy.utils.pysp_model.tree_structure_model import ScenarioTreeModelFromNetworkX, CreateConcreteTwoStageScenarioTreeModel\n')] |
# -*- coding: utf-8 -*-
# Copyright 2013-2014 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Authentication and Authorization models
"""
import base64
import logging
import urllib2
import xml
from beaker.cache import cache_region
from boto import ec2
from boto.ec2.connection import EC2Connection
# uncomment to enable boto request logger. Use only for development (see ref in _euca_connection)
#from boto.requestlog import RequestLogger
import boto.ec2.autoscale
import boto.ec2.cloudwatch
import boto.ec2.elb
import boto.iam
from boto.handler import XmlHandler as BotoXmlHandler
from boto.regioninfo import RegionInfo
from boto.sts.credentials import Credentials
from pyramid.security import Authenticated, authenticated_userid
class User(object):
"""Authenticated/Anonymous User object for Pyramid Auth."""
def __init__(self, user_id=None):
self.user_id = user_id
@classmethod
def get_auth_user(cls, request):
"""Get an authenticated user. Note that self.user_id = None if not authenticated.
See: http://docs.pylonsproject.org/projects/pyramid_cookbook/en/latest/auth/user_object.html
"""
user_id = authenticated_userid(request)
return cls(user_id=user_id)
def is_authenticated(self):
"""user_id will be None if the user isn't authenticated"""
return self.user_id
@staticmethod
def get_account_id(iam_conn=None, request=None):
"""Get 12-digit account ID for the currently signed-in user's account"""
from ..views import boto_error_handler
if iam_conn and request:
with boto_error_handler(request):
user = iam_conn.get_user()
if user and user.arn:
return user.arn.split(':')[4]
class ConnectionManager(object):
"""Returns connection objects, pulling from Beaker cache when available"""
@staticmethod
def aws_connection(region, access_key, secret_key, token, conn_type):
"""Return AWS EC2 connection object
Pulls from Beaker cache on subsequent calls to avoid connection overhead
:type region: string
:param region: region name (e.g. 'us-east-1')
:type access_key: string
:param access_key: AWS access key
:type secret_key: string
:param secret_key: AWS secret key
:type conn_type: string
:param conn_type: Connection type ('ec2', 'autoscale', 'cloudwatch', or 'elb')
"""
cache_key = 'aws_connection_cache_{conn_type}_{region}'.format(conn_type=conn_type, region=region)
# @cache_region('short_term', cache_key)
def _aws_connection(_region, _access_key, _secret_key, _token, _conn_type):
conn = None
if conn_type == 'ec2':
conn = ec2.connect_to_region(
_region, aws_access_key_id=_access_key, aws_secret_access_key=_secret_key, security_token=_token)
elif conn_type == 'autoscale':
conn = ec2.autoscale.connect_to_region(
_region, aws_access_key_id=_access_key, aws_secret_access_key=_secret_key, security_token=_token)
elif conn_type == 'cloudwatch':
conn = ec2.cloudwatch.connect_to_region(
_region, aws_access_key_id=_access_key, aws_secret_access_key=_secret_key, security_token=_token)
if conn_type == 'elb':
conn = ec2.elb.connect_to_region(
_region, aws_access_key_id=_access_key, aws_secret_access_key=_secret_key, security_token=_token)
return conn
return _aws_connection(region, access_key, secret_key, token, conn_type)
@staticmethod
def euca_connection(clchost, port, access_id, secret_key, token, conn_type):
"""Return Eucalyptus connection object
Pulls from Beaker cache on subsequent calls to avoid connection overhead
:type clchost: string
:param clchost: FQDN or IP of Eucalyptus CLC (cloud controller)
:type port: int
:param port: Port of Eucalyptus CLC (usually 8773)
:type access_id: string
:param access_id: Euca access id
:type secret_key: string
:param secret_key: Eucalyptus secret key
:type conn_type: string
:param conn_type: Connection type ('ec2', 'autoscale', 'cloudwatch', or 'elb')
"""
cache_key = 'euca_connection_cache_{conn_type}_{clchost}_{port}'.format(
conn_type=conn_type, clchost=clchost, port=port
)
# @cache_region('short_term', cache_key)
def _euca_connection(_clchost, _port, _access_id, _secret_key, _token, _conn_type):
region = RegionInfo(name='eucalyptus', endpoint=_clchost)
path = '/services/Eucalyptus'
conn_class = EC2Connection
api_version = '2012-12-01'
# Configure based on connection type
if conn_type == 'autoscale':
api_version = '2011-01-01'
conn_class = boto.ec2.autoscale.AutoScaleConnection
path = '/services/AutoScaling'
elif conn_type == 'cloudwatch':
path = '/services/CloudWatch'
conn_class = boto.ec2.cloudwatch.CloudWatchConnection
elif conn_type == 'elb':
path = '/services/LoadBalancing'
conn_class = boto.ec2.elb.ELBConnection
elif conn_type == 'iam':
path = '/services/Euare'
conn_class = boto.iam.IAMConnection
if conn_type == 'sts':
conn = EucaAuthenticator(_clchost, _port)
elif conn_type != 'iam':
conn = conn_class(
_access_id, _secret_key, region=region, port=_port, path=path, is_secure=True, security_token=_token
)
else:
conn = conn_class(
_access_id, _secret_key, host=_clchost, port=_port, path=path, is_secure=True, security_token=_token
)
# AutoScaling service needs additional auth info
if conn_type == 'autoscale':
conn.auth_region_name = 'Eucalyptus'
if conn_type != 'sts': # this is the only non-boto connection
setattr(conn, 'APIVersion', api_version)
conn.https_validate_certificates = False
conn.http_connection_kwargs['timeout'] = 30
# uncomment to enable boto request logger. Use only for development
#conn.set_request_hook(RequestLogger())
return conn
return _euca_connection(clchost, port, access_id, secret_key, token, conn_type)
def groupfinder(user_id, request):
if user_id is not None:
return [Authenticated]
return []
class EucaAuthenticator(object):
"""Eucalyptus cloud token authenticator"""
TEMPLATE = 'https://{host}:{port}/services/Tokens?Action=GetAccessToken&DurationSeconds={dur}&Version=2011-06-15'
def __init__(self, host, port):
"""
Configure connection to Eucalyptus STS service to authenticate with the CLC (cloud controller)
:type host: string
:param host: IP address or FQDN of CLC host
:type port: integer
:param port: port number to use when making the connection
"""
self.host = host
self.port = port
def authenticate(self, account, user, passwd, new_passwd=None, timeout=15, duration=3600):
if user == 'admin' and duration > 3600: # admin cannot have more than 1 hour duration
duration = 3600
# because of the variability, we need to keep this here, not in __init__
self.auth_url = self.TEMPLATE.format(
host=self.host,
port=self.port,
dur=duration,
)
req = urllib2.Request(self.auth_url)
if new_passwd:
auth_string = "{user}@{account};{pw}@{new_pw}".format(
user=base64.b64encode(user),
account=base64.b64encode(account),
pw=base64.b64encode(passwd),
new_pw=new_passwd
)
else:
auth_string = "{user}@{account}:{pw}".format(
user=base64.b64encode(user),
account=base64.b64encode(account),
pw=passwd
)
encoded_auth = base64.b64encode(auth_string)
req.add_header('Authorization', "Basic %s" % encoded_auth)
response = urllib2.urlopen(req, timeout=timeout)
body = response.read()
# parse AccessKeyId, SecretAccessKey and SessionToken
creds = Credentials()
h = BotoXmlHandler(creds, None)
xml.sax.parseString(body, h)
logging.info("Authenticated Eucalyptus user: " + account + "/" + user)
return creds
class AWSAuthenticator(object):
def __init__(self, package):
"""
Configure connection to AWS STS service
:type package: string
:param package: a pre-signed request string for the STS GetSessionToken call
"""
self.endpoint = 'https://sts.amazonaws.com'
self.package = package
def authenticate(self, timeout=20):
""" Make authentication request to AWS STS service
Timeout defaults to 20 seconds"""
req = urllib2.Request(self.endpoint, data=self.package)
response = urllib2.urlopen(req, timeout=timeout)
body = response.read()
# parse AccessKeyId, SecretAccessKey and SessionToken
creds = Credentials()
h = BotoXmlHandler(creds, None)
xml.sax.parseString(body, h)
logging.info("Authenticated AWS user")
return creds
| [
"boto.ec2.elb.connect_to_region",
"boto.ec2.autoscale.connect_to_region",
"urllib2.urlopen",
"boto.ec2.connect_to_region",
"base64.b64encode",
"boto.sts.credentials.Credentials",
"xml.sax.parseString",
"urllib2.Request",
"boto.regioninfo.RegionInfo",
"boto.ec2.cloudwatch.connect_to_region",
"bot... | [((2439, 2468), 'pyramid.security.authenticated_userid', 'authenticated_userid', (['request'], {}), '(request)\n', (2459, 2468), False, 'from pyramid.security import Authenticated, authenticated_userid\n'), ((9130, 9160), 'urllib2.Request', 'urllib2.Request', (['self.auth_url'], {}), '(self.auth_url)\n', (9145, 9160), False, 'import urllib2\n'), ((9672, 9701), 'base64.b64encode', 'base64.b64encode', (['auth_string'], {}), '(auth_string)\n', (9688, 9701), False, 'import base64\n'), ((9788, 9825), 'urllib2.urlopen', 'urllib2.urlopen', (['req'], {'timeout': 'timeout'}), '(req, timeout=timeout)\n', (9803, 9825), False, 'import urllib2\n'), ((9936, 9949), 'boto.sts.credentials.Credentials', 'Credentials', ([], {}), '()\n', (9947, 9949), False, 'from boto.sts.credentials import Credentials\n'), ((9962, 9989), 'boto.handler.XmlHandler', 'BotoXmlHandler', (['creds', 'None'], {}), '(creds, None)\n', (9976, 9989), True, 'from boto.handler import XmlHandler as BotoXmlHandler\n'), ((9998, 10026), 'xml.sax.parseString', 'xml.sax.parseString', (['body', 'h'], {}), '(body, h)\n', (10017, 10026), False, 'import xml\n'), ((10035, 10105), 'logging.info', 'logging.info', (["('Authenticated Eucalyptus user: ' + account + '/' + user)"], {}), "('Authenticated Eucalyptus user: ' + account + '/' + user)\n", (10047, 10105), False, 'import logging\n'), ((10627, 10676), 'urllib2.Request', 'urllib2.Request', (['self.endpoint'], {'data': 'self.package'}), '(self.endpoint, data=self.package)\n', (10642, 10676), False, 'import urllib2\n'), ((10696, 10733), 'urllib2.urlopen', 'urllib2.urlopen', (['req'], {'timeout': 'timeout'}), '(req, timeout=timeout)\n', (10711, 10733), False, 'import urllib2\n'), ((10844, 10857), 'boto.sts.credentials.Credentials', 'Credentials', ([], {}), '()\n', (10855, 10857), False, 'from boto.sts.credentials import Credentials\n'), ((10870, 10897), 'boto.handler.XmlHandler', 'BotoXmlHandler', (['creds', 'None'], {}), '(creds, None)\n', (10884, 10897), True, 'from boto.handler import XmlHandler as BotoXmlHandler\n'), ((10906, 10934), 'xml.sax.parseString', 'xml.sax.parseString', (['body', 'h'], {}), '(body, h)\n', (10925, 10934), False, 'import xml\n'), ((10943, 10981), 'logging.info', 'logging.info', (['"""Authenticated AWS user"""'], {}), "('Authenticated AWS user')\n", (10955, 10981), False, 'import logging\n'), ((5971, 6019), 'boto.regioninfo.RegionInfo', 'RegionInfo', ([], {'name': '"""eucalyptus"""', 'endpoint': '_clchost'}), "(name='eucalyptus', endpoint=_clchost)\n", (5981, 6019), False, 'from boto.regioninfo import RegionInfo\n'), ((4066, 4189), 'boto.ec2.connect_to_region', 'ec2.connect_to_region', (['_region'], {'aws_access_key_id': '_access_key', 'aws_secret_access_key': '_secret_key', 'security_token': '_token'}), '(_region, aws_access_key_id=_access_key,\n aws_secret_access_key=_secret_key, security_token=_token)\n', (4087, 4189), False, 'from boto import ec2\n'), ((4701, 4828), 'boto.ec2.elb.connect_to_region', 'ec2.elb.connect_to_region', (['_region'], {'aws_access_key_id': '_access_key', 'aws_secret_access_key': '_secret_key', 'security_token': '_token'}), '(_region, aws_access_key_id=_access_key,\n aws_secret_access_key=_secret_key, security_token=_token)\n', (4726, 4828), False, 'from boto import ec2\n'), ((4273, 4406), 'boto.ec2.autoscale.connect_to_region', 'ec2.autoscale.connect_to_region', (['_region'], {'aws_access_key_id': '_access_key', 'aws_secret_access_key': '_secret_key', 'security_token': '_token'}), '(_region, aws_access_key_id=_access_key,\n aws_secret_access_key=_secret_key, security_token=_token)\n', (4304, 4406), False, 'from boto import ec2\n'), ((9273, 9295), 'base64.b64encode', 'base64.b64encode', (['user'], {}), '(user)\n', (9289, 9295), False, 'import base64\n'), ((9321, 9346), 'base64.b64encode', 'base64.b64encode', (['account'], {}), '(account)\n', (9337, 9346), False, 'import base64\n'), ((9367, 9391), 'base64.b64encode', 'base64.b64encode', (['passwd'], {}), '(passwd)\n', (9383, 9391), False, 'import base64\n'), ((9534, 9556), 'base64.b64encode', 'base64.b64encode', (['user'], {}), '(user)\n', (9550, 9556), False, 'import base64\n'), ((9582, 9607), 'base64.b64encode', 'base64.b64encode', (['account'], {}), '(account)\n', (9598, 9607), False, 'import base64\n'), ((4491, 4625), 'boto.ec2.cloudwatch.connect_to_region', 'ec2.cloudwatch.connect_to_region', (['_region'], {'aws_access_key_id': '_access_key', 'aws_secret_access_key': '_secret_key', 'security_token': '_token'}), '(_region, aws_access_key_id=_access_key,\n aws_secret_access_key=_secret_key, security_token=_token)\n', (4523, 4625), False, 'from boto import ec2\n')] |
# Copyright (c) 2021 <NAME> <<EMAIL>>
# Author: <NAME> <<EMAIL>>
# See LICENSE file
import urwid
from .. import main_loop as shared_main_loop
from .error_dialog import ErrorDialog
from .info_dialog import InfoDialog
from .quit_dialog import QuitDialog
class SessionPopupLauncher(urwid.PopUpLauncher):
DEFAULT_WR = 60
DEFAULT_HR = 30
def __init__(self, widget):
super().__init__(widget)
self._max_width = None
self._max_height = None
self._width_ratio = self.DEFAULT_WR
self._height_ratio = self.DEFAULT_HR
# Used in testing
self.showing_error_modal = False
self._popup_factory_method = None
def create_pop_up(self):
return self._popup_factory_method()
def get_pop_up_parameters(self):
width = int(self._width_ratio * self._max_width / 100)
height = int(self._height_ratio * self._max_height / 100)
left = (self._max_width - width) / 2
top = (self._max_height - height) / 2
return {'left': left, 'top': top, 'overlay_width': width, 'overlay_height':
height}
def close_pop_up(self, *args, **kwargs):
self.showing_error_modal = False
self._reset_popup_size_ratio()
return super().close_pop_up()
def quit(self, *args, **kwargs):
raise urwid.ExitMainLoop()
def show_fatal_error(self, error):
self.showing_error_modal = True
def factory_method():
dialog = ErrorDialog(error, title=u'Unhandled Exception',
prefix='An unhandled error occured. Exception details:')
urwid.connect_signal(dialog, dialog.SIGNAL_OK, self.quit)
return urwid.Filler(dialog)
self._popup_factory_method = factory_method
return self.open_pop_up()
def show_error(self, error, on_close=None):
self.showing_error_modal = True
def on_ok_signal(*args, **kwargs):
self.close_pop_up()
if callable(on_close):
on_close()
def factory_method():
dialog = ErrorDialog(error)
urwid.connect_signal(dialog, dialog.SIGNAL_OK,
on_ok_signal)
return urwid.Filler(dialog)
self._popup_factory_method = factory_method
return self.open_pop_up()
def show_info(self, message, on_close=None):
def on_ok_signal(*args, **kwargs):
self.close_pop_up()
if callable(on_close):
on_close()
def factory_method():
dialog = InfoDialog(message)
urwid.connect_signal(dialog, dialog.SIGNAL_OK, on_ok_signal)
return urwid.Filler(dialog)
self._popup_factory_method = factory_method
return self.open_pop_up()
def close_pop_up_then(self, callback):
'''Close popup then execute callback'''
def fn(*args, **kwargs):
self.close_pop_up()
callback()
return fn
def show_quit_dialog(self, on_no=None):
def factory_method():
dialog = QuitDialog()
urwid.connect_signal(dialog, dialog.SIGNAL_OK, self.quit)
if on_no is not None:
urwid.connect_signal(dialog, dialog.SIGNAL_CANCEL,
self.close_pop_up_then(on_no))
else:
urwid.connect_signal(dialog, dialog.SIGNAL_CANCEL,
self.close_pop_up)
return urwid.Filler(dialog)
self._popup_factory_method = factory_method
return self.open_pop_up()
def show_loading_dialog(self):
self._width_ratio = 40
self._height_ratio = 30
def factory_method():
dialog = urwid.Text(u'\nLoading...', align='center')
dialog = urwid.Filler(dialog)
dialog = urwid.AttrMap(urwid.LineBox(dialog), 'linebox')
return dialog
self._popup_factory_method = factory_method
result = self.open_pop_up()
shared_main_loop.refresh()
return result
def show_big_popup(self, widget):
self._width_ratio = 90
self._height_ratio = 80
def factory_method():
dialog = urwid.AttrMap(urwid.LineBox(widget, title=widget.name), 'linebox')
urwid.connect_signal(widget, widget.SIGNAL_ESCAPE, self.close_pop_up)
urwid.connect_signal(widget, widget.SIGNAL_QUIT,
self.close_pop_up_then(self.show_quit_dialog))
return dialog
self._popup_factory_method = factory_method
result = self.open_pop_up()
shared_main_loop.refresh()
return result
def show_table_changer(self, widget):
self._width_ratio = 25
self._height_ratio = 70
def factory_method():
dialog = urwid.AttrMap(urwid.LineBox(widget, title=u'Change table'), 'linebox')
urwid.connect_signal(widget, widget.SIGNAL_ESCAPE, self.close_pop_up)
return dialog
self._popup_factory_method = factory_method
result = self.open_pop_up()
shared_main_loop.refresh()
return result
def _reset_popup_size_ratio(self):
self._width_ratio = self.DEFAULT_WR
self._height_ratio = self.DEFAULT_HR
def render(self, size, focus=False):
self._max_width, self._max_height = size
return super().render(size, focus)
| [
"urwid.ExitMainLoop",
"urwid.LineBox",
"urwid.Filler",
"urwid.connect_signal",
"urwid.Text"
] | [((1329, 1349), 'urwid.ExitMainLoop', 'urwid.ExitMainLoop', ([], {}), '()\n', (1347, 1349), False, 'import urwid\n'), ((1615, 1672), 'urwid.connect_signal', 'urwid.connect_signal', (['dialog', 'dialog.SIGNAL_OK', 'self.quit'], {}), '(dialog, dialog.SIGNAL_OK, self.quit)\n', (1635, 1672), False, 'import urwid\n'), ((1692, 1712), 'urwid.Filler', 'urwid.Filler', (['dialog'], {}), '(dialog)\n', (1704, 1712), False, 'import urwid\n'), ((2110, 2170), 'urwid.connect_signal', 'urwid.connect_signal', (['dialog', 'dialog.SIGNAL_OK', 'on_ok_signal'], {}), '(dialog, dialog.SIGNAL_OK, on_ok_signal)\n', (2130, 2170), False, 'import urwid\n'), ((2210, 2230), 'urwid.Filler', 'urwid.Filler', (['dialog'], {}), '(dialog)\n', (2222, 2230), False, 'import urwid\n'), ((2591, 2651), 'urwid.connect_signal', 'urwid.connect_signal', (['dialog', 'dialog.SIGNAL_OK', 'on_ok_signal'], {}), '(dialog, dialog.SIGNAL_OK, on_ok_signal)\n', (2611, 2651), False, 'import urwid\n'), ((2671, 2691), 'urwid.Filler', 'urwid.Filler', (['dialog'], {}), '(dialog)\n', (2683, 2691), False, 'import urwid\n'), ((3098, 3155), 'urwid.connect_signal', 'urwid.connect_signal', (['dialog', 'dialog.SIGNAL_OK', 'self.quit'], {}), '(dialog, dialog.SIGNAL_OK, self.quit)\n', (3118, 3155), False, 'import urwid\n'), ((3460, 3480), 'urwid.Filler', 'urwid.Filler', (['dialog'], {}), '(dialog)\n', (3472, 3480), False, 'import urwid\n'), ((3717, 3760), 'urwid.Text', 'urwid.Text', (['u"""\nLoading..."""'], {'align': '"""center"""'}), "(u'\\nLoading...', align='center')\n", (3727, 3760), False, 'import urwid\n'), ((3782, 3802), 'urwid.Filler', 'urwid.Filler', (['dialog'], {}), '(dialog)\n', (3794, 3802), False, 'import urwid\n'), ((4275, 4344), 'urwid.connect_signal', 'urwid.connect_signal', (['widget', 'widget.SIGNAL_ESCAPE', 'self.close_pop_up'], {}), '(widget, widget.SIGNAL_ESCAPE, self.close_pop_up)\n', (4295, 4344), False, 'import urwid\n'), ((4884, 4953), 'urwid.connect_signal', 'urwid.connect_signal', (['widget', 'widget.SIGNAL_ESCAPE', 'self.close_pop_up'], {}), '(widget, widget.SIGNAL_ESCAPE, self.close_pop_up)\n', (4904, 4953), False, 'import urwid\n'), ((3347, 3416), 'urwid.connect_signal', 'urwid.connect_signal', (['dialog', 'dialog.SIGNAL_CANCEL', 'self.close_pop_up'], {}), '(dialog, dialog.SIGNAL_CANCEL, self.close_pop_up)\n', (3367, 3416), False, 'import urwid\n'), ((3838, 3859), 'urwid.LineBox', 'urwid.LineBox', (['dialog'], {}), '(dialog)\n', (3851, 3859), False, 'import urwid\n'), ((4210, 4250), 'urwid.LineBox', 'urwid.LineBox', (['widget'], {'title': 'widget.name'}), '(widget, title=widget.name)\n', (4223, 4250), False, 'import urwid\n'), ((4815, 4859), 'urwid.LineBox', 'urwid.LineBox', (['widget'], {'title': 'u"""Change table"""'}), "(widget, title=u'Change table')\n", (4828, 4859), False, 'import urwid\n')] |
#Adicione ao módulo moeda.py criado nos desafios anteriores, uma função chamada resumo(), que mostre na tela algumas
#informaçôes geradas pelas funções que já temos no módulo criado até aqui.
import moeda
p = float(input('Preço: R$'))
moeda.resumo(p, 20, 12)
| [
"moeda.resumo"
] | [((237, 260), 'moeda.resumo', 'moeda.resumo', (['p', '(20)', '(12)'], {}), '(p, 20, 12)\n', (249, 260), False, 'import moeda\n')] |
# The Demo to test the Cozmo's camera image
import asyncio
import time
import os
import cozmo
# BUFF_PATH = "/home/wmh/work/seqbuff/"
BUFF_PATH = "/Users/wty/work/TestSeq/MoveInLine5/"
BUFF_LENGTH = 10000
last_image = None
def loop(robot: cozmo.robot.Robot):
inc = 1
HeadAngle = 0
robot.set_lift_height(50.0).wait_for_completed()
# MIN_HEAD_ANGLE = util.degrees(-25)
robot.set_head_angle(cozmo.util.degrees(HeadAngle)).wait_for_completed()
# robot.set_head_angle(cozmo.robot.MIN_HEAD_ANGLE).wait_for_completed()
# initialize csv log file
csvfp = open(BUFF_PATH+'RobotState.csv','w')
csvfp.write('Time, HeadAngle,')
csvfp.write('PosX, PosY, PosZ,')
csvfp.write('RotQ0, RotQ2, RotQ3, RotQ4,')
csvfp.write('AngleZ, OriginID,')
csvfp.write('AccX, AccY, AccZ,')
csvfp.write('GyroX, GyroY, GyroZ,')
csvfp.write('GX, GY, GZ,')
csvfp.write('RealX, RealY, RealZ\n')
last_g = [0.0, 0.0, 0.0]
g = [0.0, 0.0, 0.0]
acc = [0.0, 0.0, 0.0]
while inc < BUFF_LENGTH :
timestamp = str('%.4f' % time.time())
im = capture_pic(robot).raw_image
# timestamp = str(time.strftime("%H%M%S"))+"_"+str(time.time())
print("Get image No." + str(inc)+", in size:"+str(im.size))
# im.save(str('%.4f'%time.time())+'.png','png')
# im.save(BUFF_PATH + str(inc) + '.png','png')
im.save(BUFF_PATH + str(inc) + '.jpg')
#Write timestamp and robot state to file
fp = open(BUFF_PATH+str(inc)+'.txt','w')
fp.write(timestamp)
fp.close()
# GetRobotState(robot,timestamp,csvfp)
pose = robot.pose
csvfp.write(timestamp +',' + str(HeadAngle) + ',')
csvfp.write('%.1f, %.1f, %.1f,' % pose.position.x_y_z)
csvfp.write('%.1f, %.1f, %.1f, %.1f,' % pose.rotation.q0_q1_q2_q3)
csvfp.write('%.1f,' % pose.rotation.angle_z.degrees)
csvfp.write('%s,' % pose.origin_id)
csvfp.write('%.1f, %.1f, %.1f,' % robot.accelerometer.x_y_z)
csvfp.write('%.1f, %.1f, %.1f,' % robot.gyro.x_y_z)
# Calculate G and Real Acc
acc = robot.accelerometer.x_y_z
g[0] = last_g[0] * 0.8 + acc[0] * 0.2
g[1] = last_g[1] * 0.8 + acc[1] * 0.2
g[2] = last_g[2] * 0.8 + acc[2] * 0.2
last_g = g
acc[0] = acc[0] - g[0]
acc[1] = acc[1] - g[1]
acc[2] = acc[2] - g[2]
csvfp.write('%.1f, %.1f, %.1f,' % g)
csvfp.write('%.1f, %.1f, %.1f,' % acc)
csvfp.write('\n')
inc += 1
print(inc)
time.sleep(0.19)
csvfp.close()
def GetRobotState(robot:cozmo.robot.Robot):
# Display the Pose info for the robot
pose = robot.pose
print('Pose: Pos = <%.1f, %.1f, %.1f>' % pose.position.x_y_z)
print('Pose: Rot quat = <%.1f, %.1f, %.1f, %.1f>' % pose.rotation.q0_q1_q2_q3)
print('Pose: angle_z = %.1f' % pose.rotation.angle_z.degrees)
print('Pose: origin_id: %s' % pose.origin_id)
# Display the Accelerometer and Gyro data for the robot
print('Accelmtr: <%.1f, %.1f, %.1f>' % robot.accelerometer.x_y_z)
print('Gyro: <%.1f, %.1f, %.1f>' % robot.gyro.x_y_z)
def capture_pic(robot: cozmo.robot.Robot):
robot.camera.image_stream_enabled = True
print("Waiting for a picture...")
# wait for a new camera image to ensure it is captured properly
global last_image
image = robot.world.latest_image
while image == last_image:
time.sleep(0.02)
image = robot.world.latest_image
last_image = image
return last_image
if __name__ == '__main__':
cozmo.run_program(loop, use_viewer=True)
| [
"cozmo.run_program",
"cozmo.util.degrees",
"time.time",
"time.sleep"
] | [((3607, 3647), 'cozmo.run_program', 'cozmo.run_program', (['loop'], {'use_viewer': '(True)'}), '(loop, use_viewer=True)\n', (3624, 3647), False, 'import cozmo\n'), ((2571, 2587), 'time.sleep', 'time.sleep', (['(0.19)'], {}), '(0.19)\n', (2581, 2587), False, 'import time\n'), ((3468, 3484), 'time.sleep', 'time.sleep', (['(0.02)'], {}), '(0.02)\n', (3478, 3484), False, 'import time\n'), ((415, 444), 'cozmo.util.degrees', 'cozmo.util.degrees', (['HeadAngle'], {}), '(HeadAngle)\n', (433, 444), False, 'import cozmo\n'), ((1073, 1084), 'time.time', 'time.time', ([], {}), '()\n', (1082, 1084), False, 'import time\n')] |
from django.contrib.admin import ModelAdmin
from public_admin.sites import PublicAdminSite
class PublicModelAdmin(ModelAdmin):
"""This mimics the Django's native ModelAdmin but filters URLs that should
not exist in a public admin, and deals with request-based permissions."""
def has_view_permission(self, request, obj=None):
"""Only allows view requests if the method is GET"""
return request.method == "GET"
def has_add_permission(self, request):
"""Denies permission to any request trying to add new objects."""
return False
def has_change_permission(self, request, obj=None):
"""Denies permission to any request trying to change objects."""
return False
def has_delete_permission(self, request, obj=None):
"""Denies permission to any request trying to delete objects."""
return False
def get_urls(self):
"""Filter out the URLs that should not exist in a public admin."""
return [url for url in super().get_urls() if PublicAdminSite.valid_url(url)]
| [
"public_admin.sites.PublicAdminSite.valid_url"
] | [((1035, 1065), 'public_admin.sites.PublicAdminSite.valid_url', 'PublicAdminSite.valid_url', (['url'], {}), '(url)\n', (1060, 1065), False, 'from public_admin.sites import PublicAdminSite\n')] |
import dill
import torch
from torchtext.datasets import TranslationDataset
from translation.transformer import Transformer
from translation.translate import translate_dataset
if __name__ == "__main__":
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
src_field_f = open("data/SRC_Field.pt", "rb")
tgt_field_f = open("data/TGT_Field.pt", "rb")
SRC = dill.load(src_field_f)
TGT = dill.load(tgt_field_f)
data = TranslationDataset(
"data/en-es/en-es_", ("en_test.txt", "es_test.txt"), (SRC, TGT))
model_data = torch.load("data/en-es_checkpoint_2.pt")
model = Transformer(len(SRC.vocab), len(TGT.vocab)).to(device)
model.load_state_dict(model_data)
pad = SRC.vocab.stoi['<pad>']
sos = SRC.vocab.stoi['<s>']
eos = SRC.vocab.stoi['</s>']
translate_dataset(model, data, SRC, TGT, sos, eos, pad, device, 5)
| [
"torchtext.datasets.TranslationDataset",
"translation.translate.translate_dataset",
"torch.load",
"torch.cuda.is_available",
"dill.load"
] | [((388, 410), 'dill.load', 'dill.load', (['src_field_f'], {}), '(src_field_f)\n', (397, 410), False, 'import dill\n'), ((421, 443), 'dill.load', 'dill.load', (['tgt_field_f'], {}), '(tgt_field_f)\n', (430, 443), False, 'import dill\n'), ((456, 544), 'torchtext.datasets.TranslationDataset', 'TranslationDataset', (['"""data/en-es/en-es_"""', "('en_test.txt', 'es_test.txt')", '(SRC, TGT)'], {}), "('data/en-es/en-es_', ('en_test.txt', 'es_test.txt'), (\n SRC, TGT))\n", (474, 544), False, 'from torchtext.datasets import TranslationDataset\n'), ((566, 606), 'torch.load', 'torch.load', (['"""data/en-es_checkpoint_2.pt"""'], {}), "('data/en-es_checkpoint_2.pt')\n", (576, 606), False, 'import torch\n'), ((816, 882), 'translation.translate.translate_dataset', 'translate_dataset', (['model', 'data', 'SRC', 'TGT', 'sos', 'eos', 'pad', 'device', '(5)'], {}), '(model, data, SRC, TGT, sos, eos, pad, device, 5)\n', (833, 882), False, 'from translation.translate import translate_dataset\n'), ((239, 264), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (262, 264), False, 'import torch\n')] |
from networkx.algorithms.components.connected import node_connected_component as nx_node_connected_component
from networkx.algorithms.components.connected import connected_components as nx_connected_components
import pygfa.gfa # required for GFAError (gives error otherwise)
def nodes_connected_component(gfa_, nid):
"""Return the connected component
belonging to the given node.
:param nid: The id of the node to find the reachable nodes.
"""
if nid not in gfa_:
raise pygfa.gfa.GFAError("The source node is not in the graph.")
return nx_node_connected_component(\
gfa_._graph, nid)
def nodes_connected_components(gfa_):
"""Return a generator of sets with nodes of each weakly
connected component in the graph.
"""
return nx_connected_components(gfa_._graph)
| [
"networkx.algorithms.components.connected.node_connected_component",
"networkx.algorithms.components.connected.connected_components"
] | [((571, 616), 'networkx.algorithms.components.connected.node_connected_component', 'nx_node_connected_component', (['gfa_._graph', 'nid'], {}), '(gfa_._graph, nid)\n', (598, 616), True, 'from networkx.algorithms.components.connected import node_connected_component as nx_node_connected_component\n'), ((799, 835), 'networkx.algorithms.components.connected.connected_components', 'nx_connected_components', (['gfa_._graph'], {}), '(gfa_._graph)\n', (822, 835), True, 'from networkx.algorithms.components.connected import connected_components as nx_connected_components\n')] |
"""
hello.py: Simple example using write
"""
#pylint: disable-msg=import-error
from turtleplotbot import TurtlePlotBot
def main():
"""
Write "Hello!"
"""
bot = TurtlePlotBot()
bot.setscale(2)
bot.write("Hello!", "fonts/scripts.fnt")
bot.done()
main()
__import__("menu") # optional return to turtleplotbot menu
| [
"turtleplotbot.TurtlePlotBot"
] | [((177, 192), 'turtleplotbot.TurtlePlotBot', 'TurtlePlotBot', ([], {}), '()\n', (190, 192), False, 'from turtleplotbot import TurtlePlotBot\n')] |
# -*- coding: utf-8 -*-
from snakemake.shell import shell
paths_tsv = " ".join(snakemake.input.tsv)
shell(
r"""
export TMPDIR=$(mktemp -d)
trap "rm -rf $TMPDIR" ERR EXIT
export THEANO_FLAGS="base_compiledir=$TMPDIR/theano_compile_dir"
PRIORS=$TMPDIR/ploidy_priors.tsv
echo -e "CONTIG_NAME\tPLOIDY_PRIOR_0\tPLOIDY_PRIOR_1\tPLOIDY_PRIOR_2\tPLOIDY_PRIOR_3" \
> $PRIORS
for i in {{1..22}}; do
echo -e "$i\t0\t0.01\t0.98\t0.01" >> $PRIORS
done
echo -e "X\t0.01\t0.49\t0.49\t0.01" >> $PRIORS
echo -e "Y\t0.495\t0.495\t0.01\t0" >> $PRIORS
set -x
gatk DetermineGermlineContigPloidy \
-L {snakemake.input.interval_list} \
--interval-merging-rule OVERLAPPING_ONLY \
$(for tsv in {paths_tsv}; do echo -I $tsv; done) \
--contig-ploidy-priors $PRIORS \
--output $(dirname {snakemake.output}) \
--output-prefix ploidy
"""
)
| [
"snakemake.shell.shell"
] | [((103, 874), 'snakemake.shell.shell', 'shell', (['"""\nexport TMPDIR=$(mktemp -d)\ntrap "rm -rf $TMPDIR" ERR EXIT\n\nexport THEANO_FLAGS="base_compiledir=$TMPDIR/theano_compile_dir"\n\nPRIORS=$TMPDIR/ploidy_priors.tsv\necho -e "CONTIG_NAME\\\\tPLOIDY_PRIOR_0\\\\tPLOIDY_PRIOR_1\\\\tPLOIDY_PRIOR_2\\\\tPLOIDY_PRIOR_3" \\\\\n> $PRIORS\nfor i in {{1..22}}; do\n echo -e "$i\\\\t0\\\\t0.01\\\\t0.98\\\\t0.01" >> $PRIORS\ndone\necho -e "X\\\\t0.01\\\\t0.49\\\\t0.49\\\\t0.01" >> $PRIORS\necho -e "Y\\\\t0.495\\\\t0.495\\\\t0.01\\\\t0" >> $PRIORS\n\nset -x\n\ngatk DetermineGermlineContigPloidy \\\\\n -L {snakemake.input.interval_list} \\\\\n --interval-merging-rule OVERLAPPING_ONLY \\\\\n $(for tsv in {paths_tsv}; do echo -I $tsv; done) \\\\\n --contig-ploidy-priors $PRIORS \\\\\n --output $(dirname {snakemake.output}) \\\\\n --output-prefix ploidy\n"""'], {}), '(\n """\nexport TMPDIR=$(mktemp -d)\ntrap "rm -rf $TMPDIR" ERR EXIT\n\nexport THEANO_FLAGS="base_compiledir=$TMPDIR/theano_compile_dir"\n\nPRIORS=$TMPDIR/ploidy_priors.tsv\necho -e "CONTIG_NAME\\\\tPLOIDY_PRIOR_0\\\\tPLOIDY_PRIOR_1\\\\tPLOIDY_PRIOR_2\\\\tPLOIDY_PRIOR_3" \\\\\n> $PRIORS\nfor i in {{1..22}}; do\n echo -e "$i\\\\t0\\\\t0.01\\\\t0.98\\\\t0.01" >> $PRIORS\ndone\necho -e "X\\\\t0.01\\\\t0.49\\\\t0.49\\\\t0.01" >> $PRIORS\necho -e "Y\\\\t0.495\\\\t0.495\\\\t0.01\\\\t0" >> $PRIORS\n\nset -x\n\ngatk DetermineGermlineContigPloidy \\\\\n -L {snakemake.input.interval_list} \\\\\n --interval-merging-rule OVERLAPPING_ONLY \\\\\n $(for tsv in {paths_tsv}; do echo -I $tsv; done) \\\\\n --contig-ploidy-priors $PRIORS \\\\\n --output $(dirname {snakemake.output}) \\\\\n --output-prefix ploidy\n"""\n )\n', (108, 874), False, 'from snakemake.shell import shell\n')] |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name,unused-argument
"""Strategy and AlterOpLayout functions of MLAS operators"""
import tvm
from tvm import relay, topi
from tvm.te.hybrid import script
from .strategy import wrap_topi_schedule
from . import op as reg
# Mlas_matmul
# Mlas_matmul strategy
@tvm.target.override_native_generic_func("mlas_matmul_strategy")
def mlas_matmul_strategy(attrs, inputs, out_type, target):
"""mlas_matmul generic strategy"""
return None
@mlas_matmul_strategy.register(["cpu", "arm_cpu"])
def mlas_matmul_strategy_cpu(attrs, inputs, out_type, target):
"""mlas_matmul strategy"""
strategy = reg.OpStrategy()
def wrap_compute_mlas_matmul(topi_compute):
"""wrap mlas_matmul topi compute"""
def _compute_mlas_matmul(attrs, inputs, out_type):
args = [inputs[0], inputs[1], attrs.packb, attrs.K, attrs.N]
return [topi_compute(*args)]
return _compute_mlas_matmul
strategy.add_implementation(
wrap_compute_mlas_matmul(topi.mlas_matmul),
wrap_topi_schedule(topi.generic.schedule_extern),
name="mlas_matmul",
plevel=1,
)
return strategy
reg.register_strategy("mlas_matmul", mlas_matmul_strategy)
reg.register_pattern("mlas_matmul", reg.OpPattern.OUT_ELEMWISE_FUSABLE)
# Mlas_matmul AlterOpLayout
@tvm.target.generic_func
def batch_matmul_alter_layout(attrs, inputs, tinfos, out_type):
"""Change batch_matmul layout."""
# not to change by default
return None
@batch_matmul_alter_layout.register(["cpu", "arm_cpu"])
def _alter_batch_matmul_layout(attrs, inputs, tinfos, out_type):
target = tvm.target.Target.current(allow_none=False)
if (
"mlas" in target.libs
and tinfos[0].dtype == "float32"
and tinfos[1].dtype == "float32"
and out_type.dtype == "float32"
):
# mlas is only used for static tensors
if not (
any([isinstance(dim, tvm.tir.Any) for dim in tinfos[0].shape])
or any([isinstance(dim, tvm.tir.Any) for dim in tinfos[1].shape])
):
# if matrix B is constant, use packed matmul
if isinstance(inputs[1], relay.expr.Constant):
b_shape = inputs[1].data.shape
assert len(b_shape) == 3
batch, N, K = b_shape[0], b_shape[1], b_shape[2]
# batch_B must be 1
if batch == 1:
packed_b = relay.op.mlas_packb(inputs[1], K, N)
output = relay.op.mlas_matmul(inputs[0], packed_b, True, K, N)
return output
# if matrix A, B are not constant and no other libs are enabled, use normal matmul
if not any([item in target.libs for item in ["mkl", "clbas", "mkldnn"]]):
return relay.op.mlas_matmul(inputs[0], inputs[1], False)
return None
@reg.register_alter_op_layout("nn.batch_matmul")
def alter_op_layout_dense(attrs, inputs, tinfos, out_type):
"""Alternate the layout of batch_matmul"""
return batch_matmul_alter_layout(attrs, inputs, tinfos, out_type)
# Dense
# Dense strategy
@tvm.target.override_native_generic_func("mlas_packb_strategy")
def mlas_packb_strategy(attrs, inputs, out_type, target):
"""mlas_packb generic strategy"""
strategy = reg.OpStrategy()
def wrap_mlas_packb(topi_compute):
"""Wrap mlas_packb topi compute"""
def _compute_mlas_packb(attrs, inputs, _):
return [topi_compute(inputs[0], attrs.K, attrs.N, attrs.size, attrs.transb)]
return _compute_mlas_packb
strategy.add_implementation(
wrap_mlas_packb(topi.mlas_packb),
wrap_topi_schedule(topi.generic.schedule_extern),
name="mlas_packb",
)
return strategy
reg.register_strategy("mlas_packb", mlas_packb_strategy)
# Dense AlterOpLayout
# See tvm.topi.x86.dense_alter_op
@script
def _mlas_matmul_shape_func(tensor_a_shape, tensor_b_shape):
out = output_tensor((tensor_a_shape.shape[0],), "int64")
if tensor_a_shape.shape[0] == 3:
out[0] = tensor_a_shape[0]
out[1] = tensor_a_shape[1]
out[2] = tensor_b_shape[1]
else:
out[0] = tensor_a_shape[0]
out[1] = tensor_b_shape[0]
return out
@script
def _mlas_matmul_packb_shape_func(tensor_a_shape, N):
out = output_tensor((tensor_a_shape.shape[0],), "int64")
if tensor_a_shape.shape[0] == 3:
out[0] = tensor_a_shape[0]
out[1] = tensor_a_shape[1]
out[2] = N
else:
out[0] = tensor_a_shape[0]
out[1] = N
return out
@reg.register_shape_func("mlas_matmul", False)
def matmul_shape_func(attrs, inputs, _):
"""Shape function for matmul op."""
if attrs.packb:
return [_mlas_matmul_packb_shape_func(inputs[0], tvm.tir.expr.IntImm("int64", attrs.N))]
return [_mlas_matmul_shape_func(inputs[0], inputs[1])]
| [
"tvm.target.override_native_generic_func",
"tvm.relay.op.mlas_packb",
"tvm.tir.expr.IntImm",
"tvm.target.Target.current",
"tvm.relay.op.mlas_matmul"
] | [((1070, 1133), 'tvm.target.override_native_generic_func', 'tvm.target.override_native_generic_func', (['"""mlas_matmul_strategy"""'], {}), "('mlas_matmul_strategy')\n", (1109, 1133), False, 'import tvm\n'), ((3907, 3969), 'tvm.target.override_native_generic_func', 'tvm.target.override_native_generic_func', (['"""mlas_packb_strategy"""'], {}), "('mlas_packb_strategy')\n", (3946, 3969), False, 'import tvm\n'), ((2420, 2463), 'tvm.target.Target.current', 'tvm.target.Target.current', ([], {'allow_none': '(False)'}), '(allow_none=False)\n', (2445, 2463), False, 'import tvm\n'), ((3585, 3634), 'tvm.relay.op.mlas_matmul', 'relay.op.mlas_matmul', (['inputs[0]', 'inputs[1]', '(False)'], {}), '(inputs[0], inputs[1], False)\n', (3605, 3634), False, 'from tvm import relay, topi\n'), ((5567, 5604), 'tvm.tir.expr.IntImm', 'tvm.tir.expr.IntImm', (['"""int64"""', 'attrs.N'], {}), "('int64', attrs.N)\n", (5586, 5604), False, 'import tvm\n'), ((3227, 3263), 'tvm.relay.op.mlas_packb', 'relay.op.mlas_packb', (['inputs[1]', 'K', 'N'], {}), '(inputs[1], K, N)\n', (3246, 3263), False, 'from tvm import relay, topi\n'), ((3293, 3346), 'tvm.relay.op.mlas_matmul', 'relay.op.mlas_matmul', (['inputs[0]', 'packed_b', '(True)', 'K', 'N'], {}), '(inputs[0], packed_b, True, K, N)\n', (3313, 3346), False, 'from tvm import relay, topi\n')] |
from flask import Flask, render_template, url_for, request
import numpy as np
import pandas as pd
import pickle
import warnings
warnings.filterwarnings("ignore")
app = Flask(__name__)
@app.route('/')
def index():
return render_template("index.html")
@app.route('/predict', methods=['GET', 'POST'])
def predict():
model_2 = pickle.load(open('data/car_model.sav', 'rb'))
if request.method == 'POST':
make = request.form['inputMk']
fuel_type = request.form['inputFt']
aspiration = request.form['inputAs']
num_doors = request.form['inputNd']
body_style = request.form['inputBs']
drive_wheels = request.form['inputDw']
engine_location = request.form['inputEl']
wheel_base = request.form['inputWb']
length = request.form['inputLn']
width = request.form['inputWd']
height = request.form['inputHt']
curb_weight = request.form['inputCw']
engine_type = request.form['inputEt']
num_cylinders = request.form['inputNc']
engine_size = request.form['inputEs']
fuel_system = request.form['inputFs']
bore = request.form['inputBr']
stroke = request.form['inputSk']
compression_ratio = request.form['inputCr']
horsepower = request.form['inputHp']
peak_rpm = request.form['inputPr']
city_mpg = request.form['inputCm']
highway_mpg = request.form['inputHm']
d = { 'make': str(make),
'fuel_type': str(fuel_type),
'aspiration': str(aspiration),
'num_doors': str(num_doors),
'body_style': str(body_style),
'drive_wheels': str(drive_wheels),
'engine_location':str(engine_location),
'wheel_base': float(wheel_base),
'length': float(length),
'width': float(width),
'height': float(height),
'curb_weight':float(curb_weight),
'engine_type': str(engine_type),
'num_cylinders': str(num_cylinders),
'engine_size': float(engine_size),
'fuel_system': str(fuel_system),
'bore': float(bore),
'stroke' : float(stroke),
'compression_ratio': float(compression_ratio),
'horsepower': float(horsepower),
'peak_rpm': float(peak_rpm),
'city_mpg': float(city_mpg),
'highway_mpg':float(highway_mpg) }
data_2 = pd.DataFrame(d, index=[0])
y_pred = model_2.predict(data_2)
y_pred = y_pred[0]
y_pred = round(y_pred, 2)
return render_template('result.html', y_pred = y_pred)
if __name__ == '__main__':
app.run()
| [
"flask.render_template",
"warnings.filterwarnings",
"pandas.DataFrame",
"flask.Flask"
] | [((130, 163), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (153, 163), False, 'import warnings\n'), ((170, 185), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (175, 185), False, 'from flask import Flask, render_template, url_for, request\n'), ((228, 257), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (243, 257), False, 'from flask import Flask, render_template, url_for, request\n'), ((2462, 2488), 'pandas.DataFrame', 'pd.DataFrame', (['d'], {'index': '[0]'}), '(d, index=[0])\n', (2474, 2488), True, 'import pandas as pd\n'), ((2591, 2636), 'flask.render_template', 'render_template', (['"""result.html"""'], {'y_pred': 'y_pred'}), "('result.html', y_pred=y_pred)\n", (2606, 2636), False, 'from flask import Flask, render_template, url_for, request\n')] |
# Standalone GUI Applet for creating levels
import os
os.environ['pg_HIDE_SUPPORT_PROMPT'] = "hide"
import pygame as pg
from multiprocessing import Process
from ui_helpers import *
from levels import levels, read_level, write_level, LEVELS_DIR
from main import play_level
from engine import Level, board_copy
# --- UI-Related Constants --- #
STARTING_SCREEN_WIDTH, STARTING_SCREEN_HEIGHT = 1000, 600 # starting dimensions of screen (px)
MIN_SCREEN_WIDTH = 160
MIN_SCREEN_HEIGHT = 120
VIEWPORT_MIN_PADDING = 50 # minimum viewport edge padding (px)
SCREEN_BACKGROUND_COLOR = (25, 25, 32)
VIEWPORT_BACKGROUND_COLOR = (15, 15, 15)
GRID_COLOR = (0, 80, 90, 127)
TARGET_FPS = 60
FILE_DIALOG_OPTIONS = { # https://docs.python.org/3.9/library/dialog.html#native-load-save-dialogs
"initialdir": LEVELS_DIR,
"filetypes": [("Level Files", ".lvl")],
"defaultextension": ".lvl"
}
# --- Level-Related Constants --- #
# the layout of entities in the palette (must be rectangular)
PALETTE_LAYOUT = [
[Nouns.MOMO, Objects.MOMO],
[Nouns.WALL, Objects.WALL],
[Nouns.ROCK, Objects.ROCK],
[Nouns.FLAG, Objects.FLAG],
[Nouns.WATER, Objects.WATER],
[None, None],
[Adjectives.YOU, Adjectives.WIN],
[Adjectives.STOP, Adjectives.PUSH],
[Adjectives.DEFEAT, Adjectives.SINK],
[None, None],
[Verbs.IS, Verbs.HAS]
]
PALETTE_WIDTH = len(PALETTE_LAYOUT[0])
PALETTE_HEIGHT = len(PALETTE_LAYOUT)
# build palette board for easy rendering purposes (None -> empty cell)
PALETTE_BOARD = [
[[e] if e else [] for e in row]
for row in PALETTE_LAYOUT
]
# Valid board size ranges (inclusive)
BOARD_WIDTH_RANGE = (5, 35)
BOARD_HEIGHT_RANGE = (5, 25)
# Size of the default empty board
BOARD_DEFAULT_DIMS = (15, 12)
# Draw the level onto a fresh viewport surface, render UI elements, blit them to the screen, and flip the display
# only re-draws the board layer if corresponding flag is set; otherwise, cached surface is used
def update_screen(screen, board, main_viewport_rect, palette_viewport_rect, redraw_board=True, selected_entity=None, cursor_position=None):
screen.fill(SCREEN_BACKGROUND_COLOR)
global board_layer_cache
if redraw_board or board_layer_cache is None:
board_layer = pg.Surface((main_viewport_rect.width, main_viewport_rect.height))
draw_board_onto_viewport(board_layer, board, VIEWPORT_BACKGROUND_COLOR, GRID_COLOR)
board_layer_cache = board_layer.copy()
else:
board_layer = board_layer_cache
screen.blit(board_layer, main_viewport_rect)
palette_layer = pg.Surface((palette_viewport_rect.width, palette_viewport_rect.height))
draw_board_onto_viewport(palette_layer, PALETTE_BOARD, VIEWPORT_BACKGROUND_COLOR)
screen.blit(palette_layer, palette_viewport_rect)
if selected_entity and cursor_position:
board_width, board_height = len(board[0]), len(board)
tile_size_px = min(main_viewport_rect.width // board_width, main_viewport_rect.height // board_height)
img = get_entity_image(selected_entity, tile_size_px)
draw_pos = (cursor_position[0] - tile_size_px // 2, cursor_position[1] - tile_size_px // 2)
screen.blit(img, draw_pos)
pg.display.update()
# Size the 'root', 'main', and 'palette' viewports to both preserve level.board's aspect ratio and respect VIEWPORT_MIN_PADDING
# Returns (root_viewport_rect, main_viewport_rect, palette_rect)
def get_viewport_rects(screen_width_px, screen_height_px, board_width_tiles, board_height_tiles):
width_ratio = (screen_width_px - VIEWPORT_MIN_PADDING * 2) // (board_width_tiles + PALETTE_WIDTH + 1)
height_ratio = (screen_height_px - VIEWPORT_MIN_PADDING * 2) // board_height_tiles
pixels_per_tile = min(width_ratio, height_ratio)
root_viewport_width = (board_width_tiles + PALETTE_WIDTH + 1) * pixels_per_tile
root_viewport_height = board_height_tiles * pixels_per_tile
root_viewport_rect = pg.Rect(
((screen_width_px - root_viewport_width) // 2, (screen_height_px - root_viewport_height) // 2), # centered in screen
(root_viewport_width, root_viewport_height)
)
# calculate palette tile size (cannot be larger than main's)
pixels_per_tile_palette = root_viewport_height // PALETTE_HEIGHT
pixels_per_tile_palette = min(pixels_per_tile_palette, pixels_per_tile)
palette_viewport_width = pixels_per_tile_palette * PALETTE_WIDTH
palette_viewport_height = pixels_per_tile_palette * PALETTE_HEIGHT
palette_viewport_rect = pg.Rect(
(root_viewport_rect.left, root_viewport_rect.top + (root_viewport_height - palette_viewport_height) // 2),
(palette_viewport_width, palette_viewport_height)
)
main_viewport_rect = pg.Rect(
(root_viewport_rect.left + pixels_per_tile_palette * PALETTE_WIDTH + pixels_per_tile, root_viewport_rect.top),
(board_width_tiles * pixels_per_tile, root_viewport_height)
)
return (root_viewport_rect, main_viewport_rect, palette_viewport_rect)
def get_initialized_screen(screen_width_px, screen_height_px):
new_screen = pg.display.set_mode((screen_width_px, screen_height_px), pg.RESIZABLE)
new_screen.fill(SCREEN_BACKGROUND_COLOR)
return new_screen
# Takes a screen location in pixels and returns the corresponding board location
def pixels_to_tiles(x_px, y_px, viewport_rect, board_width_tiles, board_height_tiles):
x_px -= viewport_rect.left
y_px -= viewport_rect.top
x_tiles = int(float(x_px) / viewport_rect.width * board_width_tiles)
y_tiles = int(float(y_px) / viewport_rect.height * board_height_tiles)
return x_tiles, y_tiles
# Takes a screen location in pixels and returns the corresponding palette location
def pixels_to_tiles_palette(x_px, y_px, viewport_rect, palette_width_tiles, palette_height_tiles):
x_px -= viewport_rect.left
y_px -= viewport_rect.top
x_tiles = int(float(x_px) / viewport_rect.width * palette_width_tiles)
y_tiles = int(float(y_px) / viewport_rect.height * palette_height_tiles)
return x_tiles, y_tiles
# Initializes display, listens for keypress's, and handles window re-size events
def run_editor(board=None):
level_filename = None
board_width, board_height = None, None
root_viewport_rect, main_viewport_rect, palette_viewport_rect = None, None, None
# initialize screen; VIDEORESIZE event is generated immediately
screen = get_initialized_screen(STARTING_SCREEN_WIDTH, STARTING_SCREEN_HEIGHT)
board_layer_cache = None
if board is None:
board = [[[] for _ in range(BOARD_DEFAULT_DIMS[0])] for _ in range(BOARD_DEFAULT_DIMS[1])]
selected_entity = None
key_mods = pg.key.get_mods()
board_save_state = board_copy(board)
playtest_process = None
# discard selected entity and update screen (if CAPS-LOCK is not enabled)
def discard_selected_item():
nonlocal selected_entity
nonlocal root_viewport_rect, main_viewport_rect, palette_viewport_rect
if selected_entity:
if not key_mods & pg.KMOD_CAPS:
# discard selected entity
selected_entity = None
update_screen(screen, board, main_viewport_rect, palette_viewport_rect)
else:
# keep selected entity
update_screen(screen, board, main_viewport_rect, palette_viewport_rect, redraw_board=True, selected_entity=selected_entity, cursor_position=event.pos)
# recalculate board dimensions, recalculate viewports, and update screen
def refresh_layout():
nonlocal board_width, board_height
board_width, board_height = len(board[0]), len(board)
nonlocal root_viewport_rect, main_viewport_rect, palette_viewport_rect
root_viewport_rect, main_viewport_rect, palette_viewport_rect =\
get_viewport_rects(new_screen_width, new_screen_height, board_width, board_height)
update_screen(screen, board, main_viewport_rect, palette_viewport_rect)
# update window caption based off level_filename
def refresh_caption():
if level_filename:
caption = level_filename
else:
caption = "~ Unsaved Level ~"
pg.display.set_caption(caption)
refresh_caption()
# restore the initial VIDEORESIZE event (removed in pg 2.1)
pg.event.post(pg.event.Event(
pg.VIDEORESIZE,
{"w": STARTING_SCREEN_WIDTH, "h": STARTING_SCREEN_HEIGHT}
))
# main game loop
clock = pg.time.Clock()
editor_alive = True
while editor_alive:
clock.tick(TARGET_FPS)
# process input
for event in pg.event.get():
if event.type == pg.QUIT:
if board_save_state != board:
# if board_save_state is None or any(any(row) for row in board):
if not ask_yes_no("Level Editor", "You have unsaved work. Are you sure you want to quit?"):
continue
editor_alive = False
elif event.type == pg.VIDEORESIZE:
new_screen_width = max(event.w, MIN_SCREEN_WIDTH)
new_screen_height = max(event.h, MIN_SCREEN_HEIGHT)
screen = get_initialized_screen(new_screen_width, new_screen_height)
refresh_layout()
elif event.type == pg.MOUSEBUTTONDOWN:
if event.button == 1:
# handle main viewport clicks
if main_viewport_rect.collidepoint(event.pos):
x_tiles, y_tiles = pixels_to_tiles(*event.pos, main_viewport_rect, board_width, board_height)
clicked_tile = board[y_tiles][x_tiles]
if selected_entity is None:
# select an entity and redraw
if len(clicked_tile) > 0:
selected_entity = clicked_tile.pop() # remove top entity
update_screen(screen, board, main_viewport_rect, palette_viewport_rect, redraw_board=True, selected_entity=selected_entity, cursor_position=event.pos)
else:
# deselect the entity and redraw
clicked_tile.append(selected_entity)
discard_selected_item()
# handle palette viewport clicks
elif palette_viewport_rect.collidepoint(event.pos):
if selected_entity:
selected_entity = None
update_screen(screen, board, main_viewport_rect, palette_viewport_rect)
else:
x_tiles, y_tiles = pixels_to_tiles_palette(*event.pos, palette_viewport_rect, PALETTE_WIDTH, PALETTE_HEIGHT)
choice = PALETTE_LAYOUT[y_tiles][x_tiles]
selected_entity = choice
update_screen(screen, board, main_viewport_rect, palette_viewport_rect, redraw_board=True, selected_entity=selected_entity, cursor_position=event.pos)
# handle background clicks (i.e. no viewports)
else:
discard_selected_item()
elif event.button == 3:
discard_selected_item()
elif event.type == pg.MOUSEMOTION:
if selected_entity:
if root_viewport_rect.collidepoint(event.pos):
update_screen(screen, board, main_viewport_rect, palette_viewport_rect, redraw_board=False, selected_entity=selected_entity, cursor_position=event.pos)
elif event.type == pg.KEYDOWN:
key_mods = pg.key.get_mods()
# handle board size changes
board_size_changed = False
decreasing = key_mods & pg.KMOD_SHIFT
increasing = not decreasing
# # format (x, y, delta)
# size_delta = [0, 0, 0] # one of (0,0,0), (-1,0,1), (1,0,1), (0,-1,1), (0,1,1),
# # (-1,0,-1), (1,0,-1), (0,-1,-1), (0,1,-1)
if event.key == pg.K_UP:
if increasing and board_height < BOARD_HEIGHT_RANGE[1]:
board.insert(0, [[] for _ in range(board_width)])
board_size_changed = True
elif decreasing and board_height > BOARD_HEIGHT_RANGE[0]:
board.pop(0)
board_size_changed = True
elif event.key == pg.K_DOWN:
if increasing and board_height < BOARD_HEIGHT_RANGE[1]:
board.append([[] for _ in range(board_width)])
board_size_changed = True
elif decreasing and board_height > BOARD_HEIGHT_RANGE[0]:
board.pop()
board_size_changed = True
elif event.key == pg.K_RIGHT:
if increasing and board_width < BOARD_WIDTH_RANGE[1]:
for row in board: row.append([])
board_size_changed = True
elif decreasing and board_height > BOARD_WIDTH_RANGE[0]:
for row in board: row.pop()
board_size_changed = True
elif event.key == pg.K_LEFT:
if increasing and board_width < BOARD_WIDTH_RANGE[1]:
for row in board: row.insert(0, [])
board_size_changed = True
elif decreasing and board_height > BOARD_WIDTH_RANGE[0]:
for row in board: row.pop()
board_size_changed = True
if board_size_changed:
refresh_layout()
# handle keyboard shortcuts
if key_mods & pg.KMOD_CTRL:
if event.key == pg.K_o:
# Open
if board_save_state != board:
if not ask_yes_no("Level Editor", "You have unsaved work that will be overwitten by opening another level. Are you sure you want to continue?"):
continue
if res := ask_open_filename(**FILE_DIALOG_OPTIONS):
level_filename = res
board = read_level(level_filename)
board_save_state = board_copy(board)
refresh_layout()
refresh_caption()
print(f"opened {level_filename}")
elif event.key == pg.K_s:
if key_mods & pg.KMOD_SHIFT:
# Save as
if res := ask_save_as_filename(**FILE_DIALOG_OPTIONS):
level_filename = res
write_level(level_filename, board)
board_save_state = board_copy(board)
refresh_caption()
print(f"saved to {level_filename}")
else:
# Save
if level_filename is None:
if res := ask_save_as_filename(**FILE_DIALOG_OPTIONS):
level_filename = res
if level_filename:
write_level(level_filename, board)
board_save_state = board_copy(board)
refresh_caption()
print(f"saved to {level_filename}")
elif event.key == pg.K_SPACE:
# spawn a new process running play_level (can only have one alive at a time)
if playtest_process is None or not playtest_process.is_alive():
playtest_process = Process(target=play_level, args=(Level(board_copy(board), logging=False),))
playtest_process.start()
elif event.type == pg.KEYUP:
key_mods = pg.key.get_mods()
USAGE_TEXT = """
+------------- SHORTCUTS -------------+
| Open: CTRL + O |
| Save: CTRL + S |
| Save as: CTRL + SHIFT + S |
| --------------------------------- |
| Size++: ARROW-KEYS |
| Size--: SHIFT + ARROW-KEYS |
| Repeat mode: CAPS-LOCK |
| Playtest: SPACE |
+-------------------------------------+
"""
if __name__ == "__main__":
print(USAGE_TEXT)
run_editor()
# run_editor(levels[0])
| [
"pygame.display.set_caption",
"engine.board_copy",
"pygame.key.get_mods",
"pygame.Surface",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.event.Event",
"pygame.time.Clock",
"pygame.display.update",
"pygame.Rect",
"levels.read_level",
"levels.write_level"
] | [((2578, 2649), 'pygame.Surface', 'pg.Surface', (['(palette_viewport_rect.width, palette_viewport_rect.height)'], {}), '((palette_viewport_rect.width, palette_viewport_rect.height))\n', (2588, 2649), True, 'import pygame as pg\n'), ((3211, 3230), 'pygame.display.update', 'pg.display.update', ([], {}), '()\n', (3228, 3230), True, 'import pygame as pg\n'), ((3945, 4097), 'pygame.Rect', 'pg.Rect', (['((screen_width_px - root_viewport_width) // 2, (screen_height_px -\n root_viewport_height) // 2)', '(root_viewport_width, root_viewport_height)'], {}), '(((screen_width_px - root_viewport_width) // 2, (screen_height_px -\n root_viewport_height) // 2), (root_viewport_width, root_viewport_height))\n', (3952, 4097), True, 'import pygame as pg\n'), ((4523, 4698), 'pygame.Rect', 'pg.Rect', (['(root_viewport_rect.left, root_viewport_rect.top + (root_viewport_height -\n palette_viewport_height) // 2)', '(palette_viewport_width, palette_viewport_height)'], {}), '((root_viewport_rect.left, root_viewport_rect.top + (\n root_viewport_height - palette_viewport_height) // 2), (\n palette_viewport_width, palette_viewport_height))\n', (4530, 4698), True, 'import pygame as pg\n'), ((4737, 4924), 'pygame.Rect', 'pg.Rect', (['(root_viewport_rect.left + pixels_per_tile_palette * PALETTE_WIDTH +\n pixels_per_tile, root_viewport_rect.top)', '(board_width_tiles * pixels_per_tile, root_viewport_height)'], {}), '((root_viewport_rect.left + pixels_per_tile_palette * PALETTE_WIDTH +\n pixels_per_tile, root_viewport_rect.top), (board_width_tiles *\n pixels_per_tile, root_viewport_height))\n', (4744, 4924), True, 'import pygame as pg\n'), ((5097, 5167), 'pygame.display.set_mode', 'pg.display.set_mode', (['(screen_width_px, screen_height_px)', 'pg.RESIZABLE'], {}), '((screen_width_px, screen_height_px), pg.RESIZABLE)\n', (5116, 5167), True, 'import pygame as pg\n'), ((6684, 6701), 'pygame.key.get_mods', 'pg.key.get_mods', ([], {}), '()\n', (6699, 6701), True, 'import pygame as pg\n'), ((6726, 6743), 'engine.board_copy', 'board_copy', (['board'], {}), '(board)\n', (6736, 6743), False, 'from engine import Level, board_copy\n'), ((8501, 8516), 'pygame.time.Clock', 'pg.time.Clock', ([], {}), '()\n', (8514, 8516), True, 'import pygame as pg\n'), ((2253, 2318), 'pygame.Surface', 'pg.Surface', (['(main_viewport_rect.width, main_viewport_rect.height)'], {}), '((main_viewport_rect.width, main_viewport_rect.height))\n', (2263, 2318), True, 'import pygame as pg\n'), ((8216, 8247), 'pygame.display.set_caption', 'pg.display.set_caption', (['caption'], {}), '(caption)\n', (8238, 8247), True, 'import pygame as pg\n'), ((8354, 8447), 'pygame.event.Event', 'pg.event.Event', (['pg.VIDEORESIZE', "{'w': STARTING_SCREEN_WIDTH, 'h': STARTING_SCREEN_HEIGHT}"], {}), "(pg.VIDEORESIZE, {'w': STARTING_SCREEN_WIDTH, 'h':\n STARTING_SCREEN_HEIGHT})\n", (8368, 8447), True, 'import pygame as pg\n'), ((8642, 8656), 'pygame.event.get', 'pg.event.get', ([], {}), '()\n', (8654, 8656), True, 'import pygame as pg\n'), ((11815, 11832), 'pygame.key.get_mods', 'pg.key.get_mods', ([], {}), '()\n', (11830, 11832), True, 'import pygame as pg\n'), ((16409, 16426), 'pygame.key.get_mods', 'pg.key.get_mods', ([], {}), '()\n', (16424, 16426), True, 'import pygame as pg\n'), ((14590, 14616), 'levels.read_level', 'read_level', (['level_filename'], {}), '(level_filename)\n', (14600, 14616), False, 'from levels import levels, read_level, write_level, LEVELS_DIR\n'), ((14664, 14681), 'engine.board_copy', 'board_copy', (['board'], {}), '(board)\n', (14674, 14681), False, 'from engine import Level, board_copy\n'), ((15141, 15175), 'levels.write_level', 'write_level', (['level_filename', 'board'], {}), '(level_filename, board)\n', (15152, 15175), False, 'from levels import levels, read_level, write_level, LEVELS_DIR\n'), ((15227, 15244), 'engine.board_copy', 'board_copy', (['board'], {}), '(board)\n', (15237, 15244), False, 'from engine import Level, board_copy\n'), ((15706, 15740), 'levels.write_level', 'write_level', (['level_filename', 'board'], {}), '(level_filename, board)\n', (15717, 15740), False, 'from levels import levels, read_level, write_level, LEVELS_DIR\n'), ((15792, 15809), 'engine.board_copy', 'board_copy', (['board'], {}), '(board)\n', (15802, 15809), False, 'from engine import Level, board_copy\n'), ((16254, 16271), 'engine.board_copy', 'board_copy', (['board'], {}), '(board)\n', (16264, 16271), False, 'from engine import Level, board_copy\n')] |
# -*- coding: utf-8 -*-
import tensorflow as tf
import matplotlib.pyplot as plt
import matplotlib as mpl
import simulators
import derivatives
import utils
import books
import hedge_models
import preprocessing
import approximators
from constants import FLOAT_DTYPE
class BrownianMotion(simulators.GBM):
def __init__(self, diffusion):
super().__init__(
rate=0.,
drift=tf.constant([0.], FLOAT_DTYPE),
diffusion=tf.constant([[diffusion]], FLOAT_DTYPE)
)
def advance(self, state, rvs, dt, risk_neutral):
return state + self.diffusion * tf.sqrt(dt) * rvs
class BachelierBinary(derivatives.BinaryCall):
def __init__(self, maturity, strike, volatility):
super().__init__(maturity, strike, volatility)
def adjoint(self, time, instrument, numeraire):
raise NotImplementedError
def value(self, time, instrument, numeraire):
ttm = self.maturity - time
vol_time = self.volatility * tf.sqrt(ttm)
d = (instrument - self.strike) / vol_time
return utils.norm_cdf(d)
def delta(self, time, instrument, numeraire):
ttm = self.maturity - time
vol_time = self.volatility * tf.sqrt(ttm)
d = (instrument - self.strike) / vol_time
return utils.norm_pdf(d) / vol_time
def gamma(self, time, instrument, numeraire):
ttm = self.maturity - time
vol_time = self.volatility * tf.sqrt(ttm)
d = (instrument - self.strike) / vol_time
return -d * self.delta(time, instrument, numeraire)
cost = False
spot = 1
strike = 1
timesteps = 14
sigma = 0.2
maturity = timesteps / 250
if cost:
instrument_simulator = simulators.GBM(0.0, 0.0, [[sigma]])
derivative = derivatives.PutCall(maturity, strike, 0.0, sigma, 1)
else:
instrument_simulator = BrownianMotion(sigma)
derivative = BachelierBinary(maturity, strike, sigma)
numeraire_simulator = simulators.ConstantBankAccount(0.0)
book = books.DerivativeBook(
maturity,
instrument_simulator,
numeraire_simulator)
book.add_derivative(derivative, 0, 1.0)
init_instruments = tf.constant([spot], FLOAT_DTYPE)
init_numeraire = tf.constant([1.0], FLOAT_DTYPE)
driver = utils.HedgeDriver(
timesteps=timesteps,
frequency=0, # no need for frequency for non-path dependent derivatives.
init_instruments=init_instruments,
init_numeraire=init_numeraire,
book=book,
cost=1/100 if cost else None,
risk_neutral=True,
learning_rate=1e-1
)
driver.verbose = 2
risklevels = [0.05, 0.5, 0.95] if not cost else [0.95]
for alpha in risklevels:
driver.add_testcase(
f"deep network {alpha}",
hedge_models.NeuralHedge(
timesteps=timesteps,
instrument_dim=book.instrument_dim,
internal_dim=0,
num_layers=4,
num_units=5,
activation=tf.keras.activations.softplus),
risk_measure=hedge_models.ExpectedShortfall(alpha),
normaliser=preprocessing.MeanVarianceNormaliser(),
feature_function="log_martingale",
price_type="arbitrage")
if driver.cost is not None or not driver.risk_neutral:
driver.add_liability_free(
hedge_models.LinearFeatureHedge(
timesteps=timesteps,
instrument_dim=book.instrument_dim,
mappings=[approximators.IdentityFeatureMap] \
* (1 + (driver.cost is not None))),
risk_measure=hedge_models.ExpectedShortfall(alpha),
normaliser=preprocessing.MeanVarianceNormaliser(),
feature_function="log_martingale")
train_size, test_size = int(2**18), int(2**18)
driver.train(train_size, epochs=1000, batch_size=64)
driver.test(test_size)
# ==============================================================================
# === visualise
raw_data = driver.sample(int(2**18))
idx = 8
if cost:
case = driver.testcases[1]
input_data = driver.get_input(case, raw_data)
ratios = case["model"].strategy(input_data[0], training=False)
x1 = raw_data["instruments"][:, 0, idx]
x2 = ratios[:, 0, idx - 1]
y = ratios[:, 0, idx] - raw_data["delta"][:, 0, idx]
plt.figure()
plt.xlabel("value of underlying instrument")
plt.ylabel("holdings from previous period")
plt.scatter(x1.numpy(), x2.numpy(), c=y.numpy(), s=0.5)
plt.colorbar()
plt.ioff()
plt.savefig(fr"figures\riskaverseplot-cost-{case['name']}.png", dpi=500)
else:
colours = ["#E32D91", "#C830CC", "#4EA6DC", "#4775E7", "#8971E1"]
mpl.rcParams['axes.prop_cycle'] = mpl.cycler(color=colours)
time, instruments, numeraire = raw_data["time"], raw_data["instruments"], \
raw_data["numeraire"]
plt.figure()
x = raw_data["instruments"][:, 0, idx]
key = tf.argsort(x)
delta = derivative.delta(time, instruments, numeraire)[:, 0, idx]
gamma = derivative.gamma(time, instruments, numeraire)[:, 0, idx]
# plt.plot(tf.gather(x, key).numpy(), tf.gather(delta, key).numpy(), "--", color="black")
plt.plot(tf.gather(x, key).numpy(), tf.gather(gamma, key).numpy(), "-.", color="black")
for case in driver.testcases:
input_data = driver.get_input(case, raw_data)
strategy = case["model"].strategy(input_data[0], training=False)
y = strategy[:, 0, idx] - delta # remove
plt.plot(tf.gather(x, key).numpy(), tf.gather(y, key).numpy())
# plt.xlim(0.85, 1.15)
plt.xlabel("value of underlying instrument")
plt.ylabel("exposure to underlying instrument")
plt.legend(["\u0394", "\u0393"] + [f"\u03B1={alpha:.0%}" for alpha in risklevels])
plt.savefig(r"figures\riskaverseplot-nocost.eps")
| [
"matplotlib.pyplot.ylabel",
"utils.norm_pdf",
"hedge_models.NeuralHedge",
"utils.norm_cdf",
"matplotlib.pyplot.xlabel",
"simulators.GBM",
"hedge_models.LinearFeatureHedge",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.ioff",
"derivatives.PutCall",
"tensorflow.sqrt",
"tensorflow.gather",
"... | [((1935, 1970), 'simulators.ConstantBankAccount', 'simulators.ConstantBankAccount', (['(0.0)'], {}), '(0.0)\n', (1965, 1970), False, 'import simulators\n'), ((1979, 2052), 'books.DerivativeBook', 'books.DerivativeBook', (['maturity', 'instrument_simulator', 'numeraire_simulator'], {}), '(maturity, instrument_simulator, numeraire_simulator)\n', (1999, 2052), False, 'import books\n'), ((2127, 2159), 'tensorflow.constant', 'tf.constant', (['[spot]', 'FLOAT_DTYPE'], {}), '([spot], FLOAT_DTYPE)\n', (2138, 2159), True, 'import tensorflow as tf\n'), ((2177, 2208), 'tensorflow.constant', 'tf.constant', (['[1.0]', 'FLOAT_DTYPE'], {}), '([1.0], FLOAT_DTYPE)\n', (2188, 2208), True, 'import tensorflow as tf\n'), ((2219, 2427), 'utils.HedgeDriver', 'utils.HedgeDriver', ([], {'timesteps': 'timesteps', 'frequency': '(0)', 'init_instruments': 'init_instruments', 'init_numeraire': 'init_numeraire', 'book': 'book', 'cost': '(1 / 100 if cost else None)', 'risk_neutral': '(True)', 'learning_rate': '(0.1)'}), '(timesteps=timesteps, frequency=0, init_instruments=\n init_instruments, init_numeraire=init_numeraire, book=book, cost=1 / \n 100 if cost else None, risk_neutral=True, learning_rate=0.1)\n', (2236, 2427), False, 'import utils\n'), ((1694, 1729), 'simulators.GBM', 'simulators.GBM', (['(0.0)', '(0.0)', '[[sigma]]'], {}), '(0.0, 0.0, [[sigma]])\n', (1708, 1729), False, 'import simulators\n'), ((1747, 1799), 'derivatives.PutCall', 'derivatives.PutCall', (['maturity', 'strike', '(0.0)', 'sigma', '(1)'], {}), '(maturity, strike, 0.0, sigma, 1)\n', (1766, 1799), False, 'import derivatives\n'), ((4160, 4172), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4170, 4172), True, 'import matplotlib.pyplot as plt\n'), ((4177, 4221), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""value of underlying instrument"""'], {}), "('value of underlying instrument')\n", (4187, 4221), True, 'import matplotlib.pyplot as plt\n'), ((4226, 4269), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""holdings from previous period"""'], {}), "('holdings from previous period')\n", (4236, 4269), True, 'import matplotlib.pyplot as plt\n'), ((4335, 4349), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (4347, 4349), True, 'import matplotlib.pyplot as plt\n'), ((4354, 4364), 'matplotlib.pyplot.ioff', 'plt.ioff', ([], {}), '()\n', (4362, 4364), True, 'import matplotlib.pyplot as plt\n'), ((4369, 4441), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""figures\\\\riskaverseplot-cost-{case[\'name\']}.png"""'], {'dpi': '(500)'}), '(f"figures\\\\riskaverseplot-cost-{case[\'name\']}.png", dpi=500)\n', (4380, 4441), True, 'import matplotlib.pyplot as plt\n'), ((4556, 4581), 'matplotlib.cycler', 'mpl.cycler', ([], {'color': 'colours'}), '(color=colours)\n', (4566, 4581), True, 'import matplotlib as mpl\n'), ((4697, 4709), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4707, 4709), True, 'import matplotlib.pyplot as plt\n'), ((4763, 4776), 'tensorflow.argsort', 'tf.argsort', (['x'], {}), '(x)\n', (4773, 4776), True, 'import tensorflow as tf\n'), ((5417, 5461), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""value of underlying instrument"""'], {}), "('value of underlying instrument')\n", (5427, 5461), True, 'import matplotlib.pyplot as plt\n'), ((5466, 5513), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""exposure to underlying instrument"""'], {}), "('exposure to underlying instrument')\n", (5476, 5513), True, 'import matplotlib.pyplot as plt\n'), ((5518, 5585), 'matplotlib.pyplot.legend', 'plt.legend', (["(['Δ', 'Γ'] + [f'α={alpha:.0%}' for alpha in risklevels])"], {}), "(['Δ', 'Γ'] + [f'α={alpha:.0%}' for alpha in risklevels])\n", (5528, 5585), True, 'import matplotlib.pyplot as plt\n'), ((5605, 5654), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""figures\\\\riskaverseplot-nocost.eps"""'], {}), "('figures\\\\riskaverseplot-nocost.eps')\n", (5616, 5654), True, 'import matplotlib.pyplot as plt\n'), ((1072, 1089), 'utils.norm_cdf', 'utils.norm_cdf', (['d'], {}), '(d)\n', (1086, 1089), False, 'import utils\n'), ((2681, 2857), 'hedge_models.NeuralHedge', 'hedge_models.NeuralHedge', ([], {'timesteps': 'timesteps', 'instrument_dim': 'book.instrument_dim', 'internal_dim': '(0)', 'num_layers': '(4)', 'num_units': '(5)', 'activation': 'tf.keras.activations.softplus'}), '(timesteps=timesteps, instrument_dim=book.\n instrument_dim, internal_dim=0, num_layers=4, num_units=5, activation=\n tf.keras.activations.softplus)\n', (2705, 2857), False, 'import hedge_models\n'), ((3211, 3388), 'hedge_models.LinearFeatureHedge', 'hedge_models.LinearFeatureHedge', ([], {'timesteps': 'timesteps', 'instrument_dim': 'book.instrument_dim', 'mappings': '([approximators.IdentityFeatureMap] * (1 + (driver.cost is not None)))'}), '(timesteps=timesteps, instrument_dim=book.\n instrument_dim, mappings=[approximators.IdentityFeatureMap] * (1 + (\n driver.cost is not None)))\n', (3242, 3388), False, 'import hedge_models\n'), ((993, 1005), 'tensorflow.sqrt', 'tf.sqrt', (['ttm'], {}), '(ttm)\n', (1000, 1005), True, 'import tensorflow as tf\n'), ((1213, 1225), 'tensorflow.sqrt', 'tf.sqrt', (['ttm'], {}), '(ttm)\n', (1220, 1225), True, 'import tensorflow as tf\n'), ((1292, 1309), 'utils.norm_pdf', 'utils.norm_pdf', (['d'], {}), '(d)\n', (1306, 1309), False, 'import utils\n'), ((1444, 1456), 'tensorflow.sqrt', 'tf.sqrt', (['ttm'], {}), '(ttm)\n', (1451, 1456), True, 'import tensorflow as tf\n'), ((2943, 2980), 'hedge_models.ExpectedShortfall', 'hedge_models.ExpectedShortfall', (['alpha'], {}), '(alpha)\n', (2973, 2980), False, 'import hedge_models\n'), ((3001, 3039), 'preprocessing.MeanVarianceNormaliser', 'preprocessing.MeanVarianceNormaliser', ([], {}), '()\n', (3037, 3039), False, 'import preprocessing\n'), ((3456, 3493), 'hedge_models.ExpectedShortfall', 'hedge_models.ExpectedShortfall', (['alpha'], {}), '(alpha)\n', (3486, 3493), False, 'import hedge_models\n'), ((3514, 3552), 'preprocessing.MeanVarianceNormaliser', 'preprocessing.MeanVarianceNormaliser', ([], {}), '()\n', (3550, 3552), False, 'import preprocessing\n'), ((405, 436), 'tensorflow.constant', 'tf.constant', (['[0.0]', 'FLOAT_DTYPE'], {}), '([0.0], FLOAT_DTYPE)\n', (416, 436), True, 'import tensorflow as tf\n'), ((459, 498), 'tensorflow.constant', 'tf.constant', (['[[diffusion]]', 'FLOAT_DTYPE'], {}), '([[diffusion]], FLOAT_DTYPE)\n', (470, 498), True, 'import tensorflow as tf\n'), ((5025, 5042), 'tensorflow.gather', 'tf.gather', (['x', 'key'], {}), '(x, key)\n', (5034, 5042), True, 'import tensorflow as tf\n'), ((5052, 5073), 'tensorflow.gather', 'tf.gather', (['gamma', 'key'], {}), '(gamma, key)\n', (5061, 5073), True, 'import tensorflow as tf\n'), ((607, 618), 'tensorflow.sqrt', 'tf.sqrt', (['dt'], {}), '(dt)\n', (614, 618), True, 'import tensorflow as tf\n'), ((5332, 5349), 'tensorflow.gather', 'tf.gather', (['x', 'key'], {}), '(x, key)\n', (5341, 5349), True, 'import tensorflow as tf\n'), ((5359, 5376), 'tensorflow.gather', 'tf.gather', (['y', 'key'], {}), '(y, key)\n', (5368, 5376), True, 'import tensorflow as tf\n')] |
"""
Copyright MIT and Harvey Mudd College
MIT License
Summer 2020
Lab 4B - LIDAR Wall Following
"""
########################################################################################
# Imports
########################################################################################
import sys
import cv2 as cv
import numpy as np
sys.path.insert(0, "../../library")
import racecar_core
import racecar_utils as rc_utils
from enum import IntEnum
########################################################################################
# Global variables
########################################################################################
rc = racecar_core.create_racecar()
# Add any global variables here
DIST = 45
LEFT_WINDOW = (260, 280)
RIGHT_WINDOW = (80, 100)
# RIGHT_TOP_WINDOW = (50, 70)
# LEFT_TOP_WINDOW = (320, 340)
RIGHT_TOP_WINDOW = (30, 50)
LEFT_TOP_WINDOW = (310, 330)
FRONT_WINDOW = (-20, 20)
REAR_WINDOW = (170, 190)
class State(IntEnum):
drive = 0
cur_state = State.drive
########################################################################################
# Functions
########################################################################################
def start():
"""
This function is run once every time the start button is pressed
"""
# Have the car begin at a stop
rc.drive.stop()
# Print start message
print(">> Lab 4B - LIDAR Wall Following")
def update():
"""
After start() is run, this function is run every frame until the back button
is pressed
"""
# TODO: Follow the wall to the right of the car without hitting anything.
global DIST, RIGHT_TOP_WINDOW, LEFT_TOP_WINDOW, RIGHT_WINDOW, LEFT_WINDOW, FRONT_WINDOW, REAR_WINDOW
global cur_state
scan = rc.lidar.get_samples()
scan = (scan - 0.01) % 100000
speed = 1
angle = 0
_, right_dist = rc_utils.get_lidar_closest_point(scan, RIGHT_WINDOW)
_, left_dist = rc_utils.get_lidar_closest_point(scan, LEFT_WINDOW)
_, right_top_dist = rc_utils.get_lidar_closest_point(scan, RIGHT_TOP_WINDOW)
_, left_top_dist = rc_utils.get_lidar_closest_point(scan, LEFT_TOP_WINDOW)
_, front_dist = rc_utils.get_lidar_closest_point(scan, FRONT_WINDOW)
_, rear_dist = rc_utils.get_lidar_closest_point(scan, REAR_WINDOW)
if cur_state == State.drive:
if right_top_dist > left_top_dist:
angle = angle_controller(right_top_dist, 1)
else:
angle = angle_controller(left_top_dist, -1)
if abs(angle) > 0.75:
kP = 2
speed = 1 / (abs(angle) * kP)
speed = rc_utils.clamp(speed, -1, 1)
rc.drive.set_speed_angle(speed, angle)
def angle_controller(distance, direction):
global DIST
kP = 4
angle = 0
error = distance - DIST
angle = kP * (error / DIST) * direction
return rc_utils.clamp(angle, -1, 1)
########################################################################################
# DO NOT MODIFY: Register start and update and begin execution
########################################################################################
if __name__ == "__main__":
rc.set_start_update(start, update, None)
rc.go()
| [
"racecar_utils.clamp",
"racecar_core.create_racecar",
"sys.path.insert",
"racecar_utils.get_lidar_closest_point"
] | [((339, 374), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""../../library"""'], {}), "(0, '../../library')\n", (354, 374), False, 'import sys\n'), ((658, 687), 'racecar_core.create_racecar', 'racecar_core.create_racecar', ([], {}), '()\n', (685, 687), False, 'import racecar_core\n'), ((1886, 1938), 'racecar_utils.get_lidar_closest_point', 'rc_utils.get_lidar_closest_point', (['scan', 'RIGHT_WINDOW'], {}), '(scan, RIGHT_WINDOW)\n', (1918, 1938), True, 'import racecar_utils as rc_utils\n'), ((1958, 2009), 'racecar_utils.get_lidar_closest_point', 'rc_utils.get_lidar_closest_point', (['scan', 'LEFT_WINDOW'], {}), '(scan, LEFT_WINDOW)\n', (1990, 2009), True, 'import racecar_utils as rc_utils\n'), ((2034, 2090), 'racecar_utils.get_lidar_closest_point', 'rc_utils.get_lidar_closest_point', (['scan', 'RIGHT_TOP_WINDOW'], {}), '(scan, RIGHT_TOP_WINDOW)\n', (2066, 2090), True, 'import racecar_utils as rc_utils\n'), ((2114, 2169), 'racecar_utils.get_lidar_closest_point', 'rc_utils.get_lidar_closest_point', (['scan', 'LEFT_TOP_WINDOW'], {}), '(scan, LEFT_TOP_WINDOW)\n', (2146, 2169), True, 'import racecar_utils as rc_utils\n'), ((2190, 2242), 'racecar_utils.get_lidar_closest_point', 'rc_utils.get_lidar_closest_point', (['scan', 'FRONT_WINDOW'], {}), '(scan, FRONT_WINDOW)\n', (2222, 2242), True, 'import racecar_utils as rc_utils\n'), ((2262, 2313), 'racecar_utils.get_lidar_closest_point', 'rc_utils.get_lidar_closest_point', (['scan', 'REAR_WINDOW'], {}), '(scan, REAR_WINDOW)\n', (2294, 2313), True, 'import racecar_utils as rc_utils\n'), ((2879, 2907), 'racecar_utils.clamp', 'rc_utils.clamp', (['angle', '(-1)', '(1)'], {}), '(angle, -1, 1)\n', (2893, 2907), True, 'import racecar_utils as rc_utils\n'), ((2630, 2658), 'racecar_utils.clamp', 'rc_utils.clamp', (['speed', '(-1)', '(1)'], {}), '(speed, -1, 1)\n', (2644, 2658), True, 'import racecar_utils as rc_utils\n')] |
import indicoio, os, json
indicoio.config.api_key = '27df1eee04c5b65fb3113e9458d1d701'
fileDir = os.path.dirname(os.path.realpath('__file__'))
fileResumeTxt = open(os.path.join(fileDir, "data/resume.txt"), 'w')
resume = "data/resumePDF.pdf"
print(json.dumps(indicoio.pdf_extraction(resume))) | [
"os.path.realpath",
"indicoio.pdf_extraction",
"os.path.join"
] | [((119, 147), 'os.path.realpath', 'os.path.realpath', (['"""__file__"""'], {}), "('__file__')\n", (135, 147), False, 'import indicoio, os, json\n'), ((171, 211), 'os.path.join', 'os.path.join', (['fileDir', '"""data/resume.txt"""'], {}), "(fileDir, 'data/resume.txt')\n", (183, 211), False, 'import indicoio, os, json\n'), ((269, 300), 'indicoio.pdf_extraction', 'indicoio.pdf_extraction', (['resume'], {}), '(resume)\n', (292, 300), False, 'import indicoio, os, json\n')] |
# :coding: utf-8
import pytest
import os
import champollion.parser.helper
@pytest.mark.parametrize(
("content_lines", "line_number", "expected"),
[
(
[
"/**",
" * An function example.",
" *",
" * Detailed description.",
" *",
" * .. note::",
" *",
" * A note.",
" */",
"function sum(a, b) {",
" return a+b;",
"}",
],
10,
(
"An function example.\n"
"\n"
"Detailed description.\n"
"\n"
".. note::\n"
"\n"
" A note."
)
),
(
[
"/** A cool data. */",
"const Data = null",
],
2,
(
"A cool data."
)
),
(
[
"/*",
" * Incorrect docstring",
" */",
"function doSomething() {",
" console.log('something');",
"}",
],
4,
None
),
(
[
"/*",
"",
" Incorrect docstring",
"",
"*/",
"function doSomethingElse() {",
" console.log('something_else');",
"}",
],
6,
None
),
(
[
"// Incorrect docstring",
"function doSomethingElse() {",
" console.log('something_else');",
"}",
],
2,
None
),
(
[
"",
"function doSomethingElse() {",
" console.log('something_else');",
"}",
],
2,
None
),
(
[
"/** A cool data. */",
"const Data = null",
],
1,
None
)
],
ids=[
"valid element line number with multiline docstring",
"valid element line number with one line docstring",
"valid element line number with incorrect docstring 1",
"valid element line number with incorrect docstring 2",
"valid element line number with incorrect docstring 3",
"valid element line number with no docstring",
"invalid line_number",
]
)
def test_get_docstrings(content_lines, line_number, expected):
"""Return docstrings from a element's line number."""
assert champollion.parser.helper.get_docstring(
line_number, content_lines
) == expected
def test_filter_comments():
"""Remove all comments from content"""
content = (
"'use strict'; /* a beautiful comment */\n"
"\n"
"/*\n"
"a long comment that can take a lot of places so\n"
"we put it on several lines.\n"
"*/\n"
"\n"
"// a data docstring\n"
"const DATA = 1;\n"
"\n"
"/**\n"
" * Function docstring\n"
" */\n"
"function sum(a, b) {\n"
" // Return the sum of a and b\n"
" return a+b;\n"
"}\n"
"\n"
"const url = 'http://somewhere.com';\n"
"\n"
)
expected = (
"'use strict'; \n"
"\n"
"\n"
"\n"
"\n"
"\n"
"\n"
"\n"
"const DATA = 1;\n"
"\n"
"\n"
"\n"
"\n"
"function sum(a, b) {\n"
" \n"
" return a+b;\n"
"}\n"
"\n"
"const url = 'http://somewhere.com';\n"
"\n"
)
assert champollion.parser.helper.filter_comments(content) == expected
def test_filter_comments_keep_content_size():
"""Remove all comments from content while keeping content size."""
content = (
"'use strict'; /* a beautiful comment */\n"
"\n"
"/*\n"
"a long comment that can take a lot of places so\n"
"we put it on several lines.\n"
"*/\n"
"\n"
"// a data docstring\n"
"const DATA = 1;\n"
"\n"
"/**\n"
" * Function docstring\n"
" */\n"
"function sum(a, b) {\n"
" // Return the sum of a and b\n"
" return a+b;\n"
"}\n"
"\n"
"const url = 'http://somewhere.com';\n"
"\n"
)
expected = (
"'use strict'; {comment1}\n"
"\n"
"{comment2}\n"
"\n"
"\n"
"\n"
"\n"
"{comment3}\n"
"const DATA = 1;\n"
"\n"
"{comment4}\n"
"\n"
"\n"
"function sum(a, b) {{\n"
" {comment5}\n"
" return a+b;\n"
"}}\n"
"\n"
"const url = 'http://somewhere.com';\n"
"\n"
).format(
comment1=" " * len("/* a beautiful comment */"),
comment2=" " * len(
"/*"
"a long comment that can take a lot of places so"
"we put it on several lines."
"*/"
),
comment3=" " * len("// a data docstring"),
comment4=" " * len(
"/**"
" * Function docstring"
" */"
),
comment5=" " * len("// Return the sum of a and b")
)
assert champollion.parser.helper.filter_comments(
content, keep_content_size=True
) == expected
def test_filter_comments_without_multiline_comments():
"""Remove all comments from content without multiline comments."""
content = (
"'use strict'; /* a beautiful comment */\n"
"\n"
"/*\n"
"a long comment that can take a lot of places so\n"
"we put it on several lines.\n"
"*/\n"
"\n"
"// a data docstring\n"
"const DATA = 1;\n"
"\n"
"/**\n"
" * Function docstring\n"
" */\n"
"function sum(a, b) {\n"
" // Return the sum of a and b\n"
" return a+b;\n"
"}\n"
"\n"
"const url = 'http://somewhere.com';\n"
"\n"
)
expected = (
"'use strict'; /* a beautiful comment */\n"
"\n"
"/*\n"
"a long comment that can take a lot of places so\n"
"we put it on several lines.\n"
"*/\n"
"\n"
"\n"
"const DATA = 1;\n"
"\n"
"/**\n"
" * Function docstring\n"
" */\n"
"function sum(a, b) {\n"
" \n"
" return a+b;\n"
"}\n"
"\n"
"const url = 'http://somewhere.com';\n"
"\n"
)
assert champollion.parser.helper.filter_comments(
content, filter_multiline_comment=False
) == expected
@pytest.mark.parametrize(
("content", "expected_content", "expected_collapsed_content"),
[
(
"const emptyObject = {};",
"const emptyObject = {};",
{}
),
(
"let test = {a: 1, b: 2, c: 3};",
"let test = {};",
{
1: "{a: 1, b: 2, c: 3}"
}
),
(
(
"const element = {\n"
" key1: value1,\n"
" key2: value2,\n"
" key3: value3,\n"
"};\n"
"\n"
"function sum(a, b) {\n"
" return a+b\n"
"}\n"
"\n"
),
(
"const element = {}\n"
"\n"
"\n"
"\n"
";\n"
"\n"
"function sum(a, b) {}\n"
"\n"
"\n"
"\n"
),
{
1: (
"{\n"
" key1: value1,\n"
" key2: value2,\n"
" key3: value3,\n"
"}"
),
7: "{\n"
" return a+b\n"
"}"
}
),
(
(
"class AwesomeClass {\n"
" constructor() {\n"
" this.data = 1;\n"
" }\n"
"\n"
" increase() {\n"
" this.data += 1;\n"
" }\n"
"}\n"
),
(
"class AwesomeClass {}\n"
"\n"
"\n"
"\n"
"\n"
"\n"
"\n"
"\n"
"\n"
),
{
1: (
"{\n"
" constructor() {\n"
" this.data = 1;\n"
" }\n"
"\n"
" increase() {\n"
" this.data += 1;\n"
" }\n"
"}"
),
2: (
"{\n"
" this.data = 1;\n"
" }"
),
6: (
"{\n"
" this.data += 1;\n"
" }"
)
}
)
],
ids=[
"empty object",
"simple object",
"objects and functions on multiple lines",
"nested class"
]
)
def test_collapse_all(content, expected_content, expected_collapsed_content):
"""Collapse all objects, classes and functions."""
assert champollion.parser.helper.collapse_all(content) == (
expected_content, expected_collapsed_content
)
| [
"pytest.mark.parametrize"
] | [((79, 1414), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('content_lines', 'line_number', 'expected')", '[([\'/**\', \' * An function example.\', \' *\', \' * Detailed description.\', \' *\',\n \' * .. note::\', \' *\', \' * A note.\', \' */\', \'function sum(a, b) {\',\n \' return a+b;\', \'}\'], 10,\n """An function example.\n\nDetailed description.\n\n.. note::\n\n A note."""\n ), ([\'/** A cool data. */\', \'const Data = null\'], 2, \'A cool data.\'), (\n [\'/*\', \' * Incorrect docstring\', \' */\', \'function doSomething() {\',\n " console.log(\'something\');", \'}\'], 4, None), ([\'/*\', \'\',\n \' Incorrect docstring\', \'\', \'*/\', \'function doSomethingElse() {\',\n " console.log(\'something_else\');", \'}\'], 6, None), ([\n \'// Incorrect docstring\', \'function doSomethingElse() {\',\n " console.log(\'something_else\');", \'}\'], 2, None), ([\'\',\n \'function doSomethingElse() {\', " console.log(\'something_else\');",\n \'}\'], 2, None), ([\'/** A cool data. */\', \'const Data = null\'], 1, None)]'], {'ids': "['valid element line number with multiline docstring',\n 'valid element line number with one line docstring',\n 'valid element line number with incorrect docstring 1',\n 'valid element line number with incorrect docstring 2',\n 'valid element line number with incorrect docstring 3',\n 'valid element line number with no docstring', 'invalid line_number']"}), '((\'content_lines\', \'line_number\', \'expected\'), [([\n \'/**\', \' * An function example.\', \' *\', \' * Detailed description.\',\n \' *\', \' * .. note::\', \' *\', \' * A note.\', \' */\',\n \'function sum(a, b) {\', \' return a+b;\', \'}\'], 10,\n """An function example.\n\nDetailed description.\n\n.. note::\n\n A note."""\n ), ([\'/** A cool data. */\', \'const Data = null\'], 2, \'A cool data.\'), (\n [\'/*\', \' * Incorrect docstring\', \' */\', \'function doSomething() {\',\n " console.log(\'something\');", \'}\'], 4, None), ([\'/*\', \'\',\n \' Incorrect docstring\', \'\', \'*/\', \'function doSomethingElse() {\',\n " console.log(\'something_else\');", \'}\'], 6, None), ([\n \'// Incorrect docstring\', \'function doSomethingElse() {\',\n " console.log(\'something_else\');", \'}\'], 2, None), ([\'\',\n \'function doSomethingElse() {\', " console.log(\'something_else\');",\n \'}\'], 2, None), ([\'/** A cool data. */\', \'const Data = null\'], 1, None)\n ], ids=[\'valid element line number with multiline docstring\',\n \'valid element line number with one line docstring\',\n \'valid element line number with incorrect docstring 1\',\n \'valid element line number with incorrect docstring 2\',\n \'valid element line number with incorrect docstring 3\',\n \'valid element line number with no docstring\', \'invalid line_number\'])\n', (102, 1414), False, 'import pytest\n'), ((7048, 8100), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('content', 'expected_content', 'expected_collapsed_content')", '[(\'const emptyObject = {};\', \'const emptyObject = {};\', {}), (\n \'let test = {a: 1, b: 2, c: 3};\', \'let test = {};\', {(1):\n \'{a: 1, b: 2, c: 3}\'}), (\n """const element = {\n key1: value1,\n key2: value2,\n key3: value3,\n};\n\nfunction sum(a, b) {\n return a+b\n}\n\n"""\n , """const element = {}\n\n\n\n;\n\nfunction sum(a, b) {}\n\n\n\n""", {(1):\n """{\n key1: value1,\n key2: value2,\n key3: value3,\n}""", (7):\n """{\n return a+b\n}"""}), (\n """class AwesomeClass {\n constructor() {\n this.data = 1;\n }\n\n increase() {\n this.data += 1;\n }\n}\n"""\n , \'class AwesomeClass {}\\n\\n\\n\\n\\n\\n\\n\\n\\n\', {(1):\n """{\n constructor() {\n this.data = 1;\n }\n\n increase() {\n this.data += 1;\n }\n}"""\n , (2): """{\n this.data = 1;\n }""", (6):\n """{\n this.data += 1;\n }"""})]'], {'ids': "['empty object', 'simple object', 'objects and functions on multiple lines',\n 'nested class']"}), '((\'content\', \'expected_content\',\n \'expected_collapsed_content\'), [(\'const emptyObject = {};\',\n \'const emptyObject = {};\', {}), (\'let test = {a: 1, b: 2, c: 3};\',\n \'let test = {};\', {(1): \'{a: 1, b: 2, c: 3}\'}), (\n """const element = {\n key1: value1,\n key2: value2,\n key3: value3,\n};\n\nfunction sum(a, b) {\n return a+b\n}\n\n"""\n , """const element = {}\n\n\n\n;\n\nfunction sum(a, b) {}\n\n\n\n""", {(1):\n """{\n key1: value1,\n key2: value2,\n key3: value3,\n}""", (7):\n """{\n return a+b\n}"""}), (\n """class AwesomeClass {\n constructor() {\n this.data = 1;\n }\n\n increase() {\n this.data += 1;\n }\n}\n"""\n , \'class AwesomeClass {}\\n\\n\\n\\n\\n\\n\\n\\n\\n\', {(1):\n """{\n constructor() {\n this.data = 1;\n }\n\n increase() {\n this.data += 1;\n }\n}"""\n , (2): """{\n this.data = 1;\n }""", (6):\n """{\n this.data += 1;\n }"""})], ids=[\'empty object\',\n \'simple object\', \'objects and functions on multiple lines\', \'nested class\']\n )\n', (7071, 8100), False, 'import pytest\n')] |
import os
import tbs.logger.log as logger
import tbs.helper.filedescriptor as fd
def checkRoot(message):
"""
Check if the user is root otherwise error out
"""
if os.geteuid() != 0:
logger.log(message, logger.LOG_ERROR)
raise Exception("You need root privileges to do this operation.")
def inCorrectDirectory(subpath="toslive"):
"""
try to check if the current directory is the directory containing the build files
"""
# check if the current repo is correct
result = fd.CMD(["git", "remote", "-v"]).execute()
if not result.exitcode == 0:
logger.log("Something went wrong when scanning the current directory for build files")
raise Exception(result.stderr)
if not "ODEX-TOS/tos-live" in result.stdout:
logger.log("Current directory does not contain build files, downloading files")
return False
result = fd.CMD(["git", "rev-parse", "--show-toplevel"]).execute()
if not result.exitcode == 0:
logger.log("Could not move to the correct location in the repo")
raise Exception(result.stderr)
os.chdir(result.stdout+"/"+subpath)
return True | [
"os.chdir",
"tbs.helper.filedescriptor.CMD",
"os.geteuid",
"tbs.logger.log.log"
] | [((1106, 1145), 'os.chdir', 'os.chdir', (["(result.stdout + '/' + subpath)"], {}), "(result.stdout + '/' + subpath)\n", (1114, 1145), False, 'import os\n'), ((178, 190), 'os.geteuid', 'os.geteuid', ([], {}), '()\n', (188, 190), False, 'import os\n'), ((205, 242), 'tbs.logger.log.log', 'logger.log', (['message', 'logger.LOG_ERROR'], {}), '(message, logger.LOG_ERROR)\n', (215, 242), True, 'import tbs.logger.log as logger\n'), ((602, 693), 'tbs.logger.log.log', 'logger.log', (['"""Something went wrong when scanning the current directory for build files"""'], {}), "(\n 'Something went wrong when scanning the current directory for build files')\n", (612, 693), True, 'import tbs.logger.log as logger\n'), ((785, 864), 'tbs.logger.log.log', 'logger.log', (['"""Current directory does not contain build files, downloading files"""'], {}), "('Current directory does not contain build files, downloading files')\n", (795, 864), True, 'import tbs.logger.log as logger\n'), ((998, 1062), 'tbs.logger.log.log', 'logger.log', (['"""Could not move to the correct location in the repo"""'], {}), "('Could not move to the correct location in the repo')\n", (1008, 1062), True, 'import tbs.logger.log as logger\n'), ((519, 550), 'tbs.helper.filedescriptor.CMD', 'fd.CMD', (["['git', 'remote', '-v']"], {}), "(['git', 'remote', '-v'])\n", (525, 550), True, 'import tbs.helper.filedescriptor as fd\n'), ((899, 946), 'tbs.helper.filedescriptor.CMD', 'fd.CMD', (["['git', 'rev-parse', '--show-toplevel']"], {}), "(['git', 'rev-parse', '--show-toplevel'])\n", (905, 946), True, 'import tbs.helper.filedescriptor as fd\n')] |
# Copyright (c) 2016-2020 <NAME>
# Licensed under the zlib/libpng License
# https://opensource.org/licenses/Zlib
# xusb: Generic USB test program
# Copyright © 2009-2012 <NAME> <<EMAIL>>
# Contributions to Mass Storage by <NAME>.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import sys
import os
import ctypes as ct
import libusb as usb
#include <stdio.h>
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <stdarg.h>
#if defined(_WIN32)
#define msleep(msecs) Sleep(msecs)
#else
#include <time.h>
#define msleep(msecs) nanosleep(&(struct timespec){msecs / 1000, (msecs * 1000000) % 1000000000UL}, NULL);
#endif
#if defined(_MSC_VER)
#define snprintf _snprintf
#endif
# Future versions of libusb will use usb_interface instead of interface
# in usb.config_descriptor => catter for that
#define usb_interface interface
# Global variables
binary_dump = False # bool
binary_name = None # str|None
extra_info = False # bool
force_device_request = False # bool # For WCID descriptor queries
def perr(fmt, *args):
print(fmt.format(*args), file=sys.stderr, end="")
def err_exit(errcode):
perr(" {}\n", usb.strerror(usb.error(errcode)))
return -1
B = lambda x: 1 if x != 0 else 0
be_to_int32 = lambda buf: (buf[0] << 24) | (buf[1] << 16) | (buf[2] << 8) | buf[3]
RETRY_MAX = 5
REQUEST_SENSE_LENGTH = 0x12
INQUIRY_LENGTH = 0x24
READ_CAPACITY_LENGTH = 0x08
# HID Class-Specific Requests values.
# See section 7.2 of the HID specifications
HID_GET_REPORT = 0x01
HID_GET_IDLE = 0x02
HID_GET_PROTOCOL = 0x03
HID_SET_REPORT = 0x09
HID_SET_IDLE = 0x0A
HID_SET_PROTOCOL = 0x0B
HID_REPORT_TYPE_INPUT = 0x01
HID_REPORT_TYPE_OUTPUT = 0x02
HID_REPORT_TYPE_FEATURE = 0x03
# Mass Storage Requests values.
# See section 3 of the Bulk-Only Mass Storage Class specifications
BOMS_RESET = 0xFF
BOMS_GET_MAX_LUN = 0xFE
# Microsoft OS Descriptor
MS_OS_DESC_STRING_INDEX = 0xEE
MS_OS_DESC_STRING_LENGTH = 0x12
MS_OS_DESC_VENDOR_CODE_OFFSET = 0x10
#static const
ms_os_desc_string = (ct.c_uint8 * 16)(
MS_OS_DESC_STRING_LENGTH,
usb.LIBUSB_DT_STRING,
ord(b'M'), 0, ord(b'S'), 0, ord(b'F'), 0, ord(b'T'), 0,
ord(b'1'), 0, ord(b'0'), 0, ord(b'0'), 0,
)
# Section 5.1: Command Block Wrapper (CBW)
class command_block_wrapper(ct.Structure):
_fields_ = [
("dCBWSignature", (ct.c_uint8 * 4)),
("dCBWTag", ct.c_uint32),
("dCBWDataTransferLength", ct.c_uint32),
("bmCBWFlags", ct.c_uint8),
("bCBWLUN", ct.c_uint8),
("bCBWCBLength", ct.c_uint8),
("CBWCB", (ct.c_uint8 * 16)),
]
# Section 5.2: Command Status Wrapper (CSW)
class command_status_wrapper(ct.Structure):
_fields_ = [
("dCSWSignature", (ct.c_uint8 * 4)),
("dCSWTag", ct.c_uint32),
("dCSWDataResidue", ct.c_uint32),
("bCSWStatus", ct.c_uint8),
]
#static const
cdb_length = (ct.c_uint8 * 256)(
# 0 1 2 3 4 5 6 7 8 9 A B C D E F
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, # 0
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, # 1
10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10, # 2
10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10, # 3
10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10, # 4
10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10, # 5
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 6
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 7
16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16, # 8
16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16, # 9
12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12, # A
12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12, # B
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # C
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # D
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # E
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # F
)
cdb_length[255] = 0
test_type = ct.c_int
(
USE_GENERIC,
USE_PS3,
USE_XBOX,
USE_SCSI,
USE_HID
) = (0, 1, 2, 3, 4)
test_mode = USE_GENERIC
VID = 0 # ct.c_uint16
PID = 0 # ct.c_uint16
#static
#@annotate(buffer=unsigned char*, size=unsigned int)
def display_buffer_hex(buffer, size):
for i in range(0, size, 16):
print("\n {:08x} ".format(i), end="")
for j in range(16):
if i + j < size:
print("{:02x}".format(buffer[i + j]), end="")
else:
print(" ", end="")
print(" ", end="")
print(" ", end="")
for j in range(16):
if i + j < size:
if buffer[i + j] < 32 or buffer[i + j] > 126:
print(".", end="")
else:
print("%c", buffer[i + j], end="")
print()
#static
#@annotate(str|None, uuid=const ct.POINTER(ct.c_uint8))
def uuid_to_string(uuid):
if uuid == NULL:
return None
return ("{{{:02x}{:02x}{:02x}{:02x}-{:02x}{:02x}-{:02x}{:02x}"
"-{:02x}{:02x}-{:02x}{:02x}{:02x}{:02x}{:02x}{:02x}}}".format(
uuid[0], uuid[1], uuid[2], uuid[3], uuid[4], uuid[5], uuid[6], uuid[7],
uuid[8], uuid[9], uuid[10], uuid[11], uuid[12], uuid[13], uuid[14], uuid[15]))
#static
#@annotate(int, handle=ct.POINTER(usb.device_handle))
def display_ps3_status(handle):
# The PS3 Controller is really a HID device that got its HID Report Descriptors
# removed by Sony
input_report = (ct.c_uint8 * 49)()
master_bt_address = (ct.c_uint8 * 8)()
device_bt_address = (ct.c_uint8 * 18)()
# Get the controller's bluetooth address of its master device
r = usb.control_transfer(handle,
usb.LIBUSB_ENDPOINT_IN |
usb.LIBUSB_REQUEST_TYPE_CLASS |
usb.LIBUSB_RECIPIENT_INTERFACE,
HID_GET_REPORT,
0x03f5, 0,
master_bt_address,
ct.sizeof(master_bt_address),
100)
if r < 0:
return err_exit(r)
print("\nMaster's bluetooth address: "
"{:02X}:{:02X}:{:02X}:{:02X}:{:02X}:{:02X}".format(
master_bt_address[2], master_bt_address[3], master_bt_address[4],
master_bt_address[5], master_bt_address[6], master_bt_address[7]))
# Get the controller's bluetooth address
r = usb.control_transfer(handle,
usb.LIBUSB_ENDPOINT_IN |
usb.LIBUSB_REQUEST_TYPE_CLASS |
usb.LIBUSB_RECIPIENT_INTERFACE,
HID_GET_REPORT,
0x03f2, 0,
device_bt_address,
ct.sizeof(device_bt_address),
100)
if r < 0:
return err_exit(r)
print("\nMaster's bluetooth address: "
"{:02X}:{:02X}:{:02X}:{:02X}:{:02X}:{:02X}".format(
device_bt_address[4], device_bt_address[5], device_bt_address[6],
device_bt_address[7], device_bt_address[8], device_bt_address[9]))
# Get the status of the controller's buttons via its HID report
print("\nReading PS3 Input Report...")
r = usb.control_transfer(handle,
usb.LIBUSB_ENDPOINT_IN |
usb.LIBUSB_REQUEST_TYPE_CLASS |
usb.LIBUSB_RECIPIENT_INTERFACE,
HID_GET_REPORT,
(HID_REPORT_TYPE_INPUT << 8) | 0x01, 0,
input_report,
ct.sizeof(input_report),
1000)
if r < 0:
return err_exit(r)
pressed = input_report[2] # Direction pad plus start, select, and joystick buttons
if pressed == 0x01: print("\tSELECT pressed")
elif pressed == 0x02: print("\tLEFT 3 pressed")
elif pressed == 0x04: print("\tRIGHT 3 pressed")
elif pressed == 0x08: print("\tSTART presed")
elif pressed == 0x10: print("\tUP pressed")
elif pressed == 0x20: print("\tRIGHT pressed")
elif pressed == 0x40: print("\tDOWN pressed")
elif pressed == 0x80: print("\tLEFT pressed")
pressed = input_report[3] # Shapes plus top right and left buttons
if pressed == 0x01: print("\tLEFT 2 pressed")
elif pressed == 0x02: print("\tRIGHT 2 pressed")
elif pressed == 0x04: print("\tLEFT 1 pressed")
elif pressed == 0x08: print("\tRIGHT 1 presed")
elif pressed == 0x10: print("\tTRIANGLE pressed")
elif pressed == 0x20: print("\tCIRCLE pressed")
elif pressed == 0x40: print("\tCROSS pressed")
elif pressed == 0x80: print("\tSQUARE pressed")
print("\tPS button: {}".format(input_report[4]))
print("\tLeft Analog (X,Y): ({},{})".format(input_report[6], input_report[7]))
print("\tRight Analog (X,Y): ({},{})".format(input_report[8], input_report[9]))
print("\tL2 Value: {}\tR2 Value: {}".format(input_report[18], input_report[19]))
print("\tL1 Value: {}\tR1 Value: {}".format(input_report[20], input_report[21]))
print("\tRoll (x axis): {} Yaw (y axis): {} Pitch (z axis) {}".format(
#(((input_report[42] + 128) % 256) - 128),
int8_t(input_report[42]),
int8_t(input_report[44]),
int8_t(input_report[46])))
print("\tAcceleration: {}".format(int8_t(input_report[48])))
print()
return 0
#static
#@annotate(int, handle=ct.POINTER(usb.device_handle))
def display_xbox_status(handle):
# The XBOX Controller is really a HID device that got its HID Report Descriptors
# removed by Microsoft.
# Input/Output reports described at http://euc.jp/periphs/xbox-controller.ja.html
input_report = (20 * ct.c_uint8)()
print("\nReading XBox Input Report...")
r = usb.control_transfer(handle,
usb.LIBUSB_ENDPOINT_IN |
usb.LIBUSB_REQUEST_TYPE_CLASS |
usb.LIBUSB_RECIPIENT_INTERFACE,
HID_GET_REPORT,
(HID_REPORT_TYPE_INPUT << 8) | 0x00, 0,
input_report, 20,
1000)
if r < 0:
return err_exit(r)
print(" D-pad: {:02X}".format(input_report[2] & 0x0F))
print(" Start:{}, Back:{}, "
"Left Stick Press:{}, Right Stick Press:{}".format(
B(input_report[2] & 0x10), B(input_report[2] & 0x20),
B(input_report[2] & 0x40), B(input_report[2] & 0x80)))
# A, B, X, Y, Black, White are pressure sensitive
print(" A:{}, B:{}, X:{}, Y:{}, White:{}, Black:{}".format(
input_report[4], input_report[5], input_report[6],
input_report[7], input_report[9], input_report[8]))
print(" Left Trigger: {}, Right Trigger: {}".format(
input_report[10], input_report[11]))
print(" Left Analog (X,Y): ({},{})".format(
int16_t((input_report[13] << 8) | input_report[12]),
int16_t((input_report[15] << 8) | input_report[14])))
print(" Right Analog (X,Y): ({},{})".format(
int16_t((input_report[17] << 8) | input_report[16]),
int16_t((input_report[19] << 8) | input_report[18])))
return 0
#static
#@annotate(int, handle=ct.POINTER(usb.device_handle), left=ct.c_uint8, right=ct.c_uint8)
def set_xbox_actuators(handle, left, right):
print("\nWriting XBox Controller Output Report...")
output_report = (6 * ct.c_uint8)()
output_report[1] = ct.sizeof(output_report)
output_report[3] = left
output_report[5] = right
r = usb.control_transfer(handle,
usb.LIBUSB_ENDPOINT_OUT |
usb.LIBUSB_REQUEST_TYPE_CLASS |
usb.LIBUSB_RECIPIENT_INTERFACE,
HID_SET_REPORT,
(HID_REPORT_TYPE_OUTPUT << 8) | 0x00, 0,
output_report, 6,
1000)
if r < 0:
return err_exit(r)
return 0
_tag = 1 # ct.c_uint32
#static
#@annotate(int, handle=ct.POINTER(usb.device_handle), endpoint=int)
# ct.c_uint8 lun, ct.c_uint8* cdb, ct.c_uint8 direction, int data_length,
# ct.c_uint32* ret_tag):
def send_mass_storage_command(handle, endpoint, lun, cdb, direction, data_length, ret_tag):
global _tag
#int i, r;
cbw = command_block_wrapper()
if not cdb:
return -1
if endpoint & usb.LIBUSB_ENDPOINT_IN:
perr("send_mass_storage_command: cannot send command on IN endpoint\n")
return -1
#ct.c_uint8 cdb_len;
cdb_len = cdb_length[cdb[0]]
if cdb_len == 0 or cdb_len > ct.sizeof(cbw.CBWCB):
perr("send_mass_storage_command: don't know how to handle this command ({:02X}, length {})\n",
cdb[0], cdb_len)
return -1
cbw.dCBWSignature[0] = 'U'
cbw.dCBWSignature[1] = 'S'
cbw.dCBWSignature[2] = 'B'
cbw.dCBWSignature[3] = 'C'
ret_tag[0] = _tag
cbw.dCBWTag = _tag
cbw.dCBWDataTransferLength = data_length
cbw.bmCBWFlags = direction
cbw.bCBWLUN = lun
_tag += 1
# Subclass is 1 or 6 => cdb_len
cbw.bCBWCBLength = cdb_len
memcpy(cbw.CBWCB, cdb, cdb_len);
i = 0
while True:
# The transfer length must always be exactly 31 bytes.
size = ct.c_int()
r = usb.bulk_transfer(handle, endpoint, ct.cast(ct.pointer(cbw), ct.POINTER(ct.c_ubyte)), 31, ct.byref(size), 1000)
if r == usb.LIBUSB_ERROR_PIPE:
usb.clear_halt(handle, endpoint)
i += 1
if r != usb.LIBUSB_ERROR_PIPE or i >= RETRY_MAX:
break
if r != usb.LIBUSB_SUCCESS:
perr(" send_mass_storage_command: {}\n", usb.strerror(usb.error(r)))
return -1
print(" sent {} CDB bytes".format(cdb_len))
return 0
#static
#@annotate(int, handle=ct.POINTER(usb.device_handle), endpoint=int, ct.c_uint32 expected_tag)
def get_mass_storage_status(handle, endpoint, expected_tag):
#int r;
csw = command_status_wrapper()
# The device is allowed to STALL this transfer. If it does, you have to
# clear the stall and try again.
i = 0;
while True:
size = ct.c_int()
r = usb.bulk_transfer(handle, endpoint, ct.cast(ct.pointer(csw), ct.POINTER(ct.c_ubyte)), 13, ct.byref(size), 1000)
if r == usb.LIBUSB_ERROR_PIPE:
usb.clear_halt(handle, endpoint)
i += 1
if r != usb.LIBUSB_ERROR_PIPE or i >= RETRY_MAX:
break
if r != usb.LIBUSB_SUCCESS:
perr(" get_mass_storage_status: {}\n", usb.strerror(usb.error(r)))
return -1
size = size.value
if size != 13:
perr(" get_mass_storage_status: received {} bytes (expected 13)\n", size)
return -1
if csw.dCSWTag != expected_tag:
perr(" get_mass_storage_status: mismatched tags (expected {:08X}, received {:08X})\n",
expected_tag, csw.dCSWTag)
return -1
# For this test, we ignore the dCSWSignature check for validity...
print(" Mass Storage Status: {:02X} ({})".format(
csw.bCSWStatus, "FAILED" if csw.bCSWStatus else "Success"))
if csw.dCSWTag != expected_tag:
return -1
if csw.bCSWStatus:
# REQUEST SENSE is appropriate only if bCSWStatus is 1, meaning that the
# command failed somehow. Larger values (2 in particular) mean that
# the command couldn't be understood.
if csw.bCSWStatus == 1:
return -2 # request Get Sense
else:
return -1
# In theory we also should check dCSWDataResidue. But lots of devices
# set it wrongly.
return 0
#static
#@annotate(handle=ct.POINTER(usb.device_handle), endpoint_in=int, endpoint_out=int)
def get_sense(handle, endpoint_in, endpoint_out):
# Request Sense
print("Request Sense:")
sense = (ct.c_uint8 * 18)()
cdb = (ct.c_uint8 * 16)() # SCSI Command Descriptor Block
cdb[0] = 0x03 # Request Sense
cdb[4] = REQUEST_SENSE_LENGTH
expected_tag = ct.c_uint32()
send_mass_storage_command(handle, endpoint_out, 0, cdb, usb.LIBUSB_ENDPOINT_IN, REQUEST_SENSE_LENGTH, ct.pointer(expected_tag))
size = ct.c_int()
rc = usb.bulk_transfer(handle, endpoint_in, ct.cast(ct.pointer(sense), ct.POINTER(ct.c_ubyte)), REQUEST_SENSE_LENGTH, ct.byref(size), 1000)
if rc < 0:
print("usb.bulk_transfer failed: {}".format(usb.error_name(rc)))
return
size = size.value
print(" received {} bytes".format(size))
if sense[0] != 0x70 and sense[0] != 0x71:
perr(" ERROR No sense data\n")
else:
perr(" ERROR Sense: {:02X} {:02X} {:02X}\n",
sense[2] & 0x0F, sense[12], sense[13])
# Strictly speaking, the get_mass_storage_status() call should come
# before these perr() lines. If the status is nonzero then we must
# assume there's no data in the buffer. For xusb it doesn't matter.
get_mass_storage_status(handle, endpoint_in, expected_tag)
#static
#@annotate(int, handle=ct.POINTER(usb.device_handle), endpoint_in=int, endpoint_out=int)
def test_mass_storage(handle, endpoint_in, endpoint_out):
# Mass Storage device to test bulk transfers (non destructive test)
global binary_dump
global binary_name
#int r;
#ct.c_uint32 i
print("Reading Max LUN:")
lun = ct.c_uint8()
r = usb.control_transfer(handle,
usb.LIBUSB_ENDPOINT_IN |
usb.LIBUSB_REQUEST_TYPE_CLASS |
usb.LIBUSB_RECIPIENT_INTERFACE,
BOMS_GET_MAX_LUN,
0, 0,
ct.byref(lun), 1,
1000)
lun = lun.value
# Some devices send a STALL instead of the actual value.
# In such cases we should set lun to 0.
if r == 0:
lun = 0
elif r < 0:
perr(" Failed: {}".format(usb.strerror(usb.error(r))))
print(" Max LUN = {}".format(lun))
# Send Inquiry
print("Sending Inquiry:")
buffer = (ct.c_uint8 * 64)()
cdb = (ct.c_uint8 * 16)() # SCSI Command Descriptor Block
cdb[0] = 0x12 # Inquiry
cdb[4] = INQUIRY_LENGTH
expected_tag = ct.c_uint32()
send_mass_storage_command(handle, endpoint_out, lun, cdb, usb.LIBUSB_ENDPOINT_IN, INQUIRY_LENGTH, ct.pointer(expected_tag))
size = ct.c_int()
r = usb.bulk_transfer(handle, endpoint_in, ct.cast(ct.pointer(buffer), ct.POINTER(ct.c_ubyte)), INQUIRY_LENGTH, ct.byref(size), 1000)
if r < 0:
return err_exit(r)
size = size.value
print(" received {} bytes".format(size))
# The following strings are not zero terminated
vid = (ct.c_char * 9)()
pid = (ct.c_char * 9)()
rev = (ct.c_char * 5)()
for i in range(8):
vid[i] = buffer[8 + i]
pid[i] = buffer[16 + i]
rev[i / 2] = buffer[32 + i / 2] # instead of another loop
vid[8] = 0
pid[8] = 0
rev[4] = 0
print(" VID:PID:REV \"%8s\":\"%8s\":\"%4s\"".format(vid, pid, rev))
if get_mass_storage_status(handle, endpoint_in, expected_tag) == -2:
get_sense(handle, endpoint_in, endpoint_out)
# Read capacity
print("Reading Capacity:")
buffer = (ct.c_uint8 * 64)()
cdb = (ct.c_uint8 * 16)() # SCSI Command Descriptor Block
cdb[0] = 0x25 # Read Capacity
expected_tag = ct.c_uint32()
send_mass_storage_command(handle, endpoint_out, lun, cdb, usb.LIBUSB_ENDPOINT_IN, READ_CAPACITY_LENGTH, ct.pointer(expected_tag))
size = ct.c_int()
r = usb.bulk_transfer(handle, endpoint_in, ct.cast(ct.pointer(buffer), ct.POINTER(ct.c_ubyte)), READ_CAPACITY_LENGTH, ct.byref(size), 1000)
if r < 0:
return err_exit(r)
size = size.value
print(" received {} bytes".format(size))
max_lba = be_to_int32(buffer[0:])
block_size = be_to_int32(buffer[4:])
device_size = (max_lba + 1.0) * block_size / (1024 * 1024 * 1024)
print(" Max LBA: {:08X}, Block Size: {:08X} (%.2f GB)".format(
max_lba, block_size, device_size))
if get_mass_storage_status(handle, endpoint_in, expected_tag) == -2:
get_sense(handle, endpoint_in, endpoint_out)
# coverity[tainted_data]
try:
data = ct.cast(calloc(1, block_size), ct.POINTER(ct.c_ubyte)) # unsigned char*
except:
perr(" unable to allocate data buffer\n")
return -1
# Send Read
print("Attempting to read %u bytes:".format(block_size))
cdb = (ct.c_uint8 * 16)() # SCSI Command Descriptor Block
cdb[0] = 0x28 # Read(10)
cdb[8] = 0x01 # 1 block
expected_tag = ct.c_uint32()
send_mass_storage_command(handle, endpoint_out, lun, cdb, usb.LIBUSB_ENDPOINT_IN, block_size, ct.pointer(expected_tag))
size = ct.c_int()
usb.bulk_transfer(handle, endpoint_in, data, block_size, ct.byref(size), 5000)
size = size.value
print(" READ: received {} bytes".format(size))
if get_mass_storage_status(handle, endpoint_in, expected_tag) == -2:
get_sense(handle, endpoint_in, endpoint_out)
else:
display_buffer_hex(data, size)
if binary_dump:
try:
fd = open(binary_name, "w")
except: pass
else:
with fd:
if fd.fwrite(data, ct.c_size_t(size).value) != ct.c_uint(size).value:
perr(" unable to write binary data\n")
free(data)
return 0
# HID
#static
#@annotate(int, ct.c_uint8* hid_report_descriptor, int size, int type)
def get_hid_record_size(hid_report_descriptor, size, type):
record_size = [0, 0, 0] # [int, ...]
nb_bits = 0 # int
nb_items = 0 # int
found_record_marker = False
j = 0
i = hid_report_descriptor[0] + 1
while i < size:
offset = (hid_report_descriptor[i] & 0x03) + 1 # ct.c_uint8
if offset == 4:
offset = 5
kind_of = hid_report_descriptor[i] & 0xFC
if kind_of == 0x74: # bitsize
nb_bits = hid_report_descriptor[i + 1]
elif kind_of == 0x94: # count
nb_items = 0
for j in range(1, offset):
nb_items = ct.c_uint32(hid_report_descriptor[i + j]).value << (8 * (j - 1))
i = offset # ???
elif kind_of == 0x80: # input
found_record_marker = True
j = 0
elif kind_of == 0x90: # output
found_record_marker = True
j = 1
elif kind_of == 0xb0: # feature
found_record_marker = True
j = 2
elif kind_of == 0xC0: # end of collection
nb_items = 0
nb_bits = 0
else:
i += offset
continue
if found_record_marker:
found_record_marker = False
record_size[j] += nb_items * nb_bits
i += offset
if type < HID_REPORT_TYPE_INPUT or type > HID_REPORT_TYPE_FEATURE:
return 0
else:
return (record_size[type - HID_REPORT_TYPE_INPUT] + 7) / 8
#static
#@annotate(int, handle=ct.POINTER(usb.device_handle), endpoint_in=int)
def test_hid(handle, endpoint_in):
global binary_dump
global binary_name
#int r;
hid_report_descriptor = (ct.c_uint8 * 256)()
report_buffer = ct.POINTER(ct.c_uint8)
print("\nReading HID Report Descriptors:")
descriptor_size = usb.control_transfer(handle,
usb.LIBUSB_ENDPOINT_IN |
usb.LIBUSB_REQUEST_TYPE_STANDARD |
usb.LIBUSB_RECIPIENT_INTERFACE,
usb.LIBUSB_REQUEST_GET_DESCRIPTOR,
usb.LIBUSB_DT_REPORT << 8, 0,
hid_report_descriptor,
ct.sizeof(hid_report_descriptor),
1000)
if descriptor_size < 0:
print(" Failed")
return -1
display_buffer_hex(hid_report_descriptor, descriptor_size)
if binary_dump:
try:
fd = open(binary_name, "w")
except: pass
else:
with fd:
if fd.fwrite(hid_report_descriptor, descriptor_size) != descriptor_size:
print(" Error writing descriptor to file")
size = get_hid_record_size(hid_report_descriptor, descriptor_size, HID_REPORT_TYPE_FEATURE)
if size <= 0:
print("\nSkipping Feature Report readout (None detected)")
else:
report_buffer = ct.cast(calloc(size, 1), ct.POINTER(ct.c_uint8))
if not report_buffer:
return -1
print("\nReading Feature Report (length {})...".format(size))
r = usb.control_transfer(handle,
usb.LIBUSB_ENDPOINT_IN |
usb.LIBUSB_REQUEST_TYPE_CLASS |
usb.LIBUSB_RECIPIENT_INTERFACE,
HID_GET_REPORT,
(HID_REPORT_TYPE_FEATURE << 8) | 0, 0,
report_buffer, ct.c_uint16(size),
5000)
if r >= 0:
display_buffer_hex(report_buffer, size)
else:
if r == usb.LIBUSB_ERROR_NOT_FOUND:
print(" No Feature Report available for this device")
elif r == usb.LIBUSB_ERROR_PIPE:
print(" Detected stall - resetting pipe...")
usb.clear_halt(handle, 0)
else:
print(" Error: {}".format(usb.strerror(usb.error(r))))
free(report_buffer)
size = get_hid_record_size(hid_report_descriptor, descriptor_size, HID_REPORT_TYPE_INPUT)
if size <= 0:
print("\nSkipping Input Report readout (None detected)")
else:
report_buffer = ct.cast(calloc(size, 1), ct.POINTER(ct.c_uint8))
if not report_buffer:
return -1
print("\nReading Input Report (length {})...".format(size))
r = usb.control_transfer(handle,
usb.LIBUSB_ENDPOINT_IN |
usb.LIBUSB_REQUEST_TYPE_CLASS |
usb.LIBUSB_RECIPIENT_INTERFACE,
HID_GET_REPORT,
(HID_REPORT_TYPE_INPUT << 8) | 0x00, 0,
report_buffer, ct.c_uint16(size),
5000)
if r >= 0:
display_buffer_hex(report_buffer, size)
else:
if r == usb.LIBUSB_ERROR_TIMEOUT:
print(" Timeout! Please make sure you act on the device within the 5 seconds allocated...")
elif r == usb.LIBUSB_ERROR_PIPE:
print(" Detected stall - resetting pipe...")
usb.clear_halt(handle, 0)
else:
print(" Error: {}".format(usb.strerror(usb.error(r))))
# Attempt a bulk read from endpoint 0 (this should just return a raw input report)
print("\nTesting interrupt read using endpoint {:02X}...".format(endpoint_in))
r = usb.interrupt_transfer(handle, endpoint_in, report_buffer, size, ct.byref(size), 5000)
if r >= 0:
display_buffer_hex(report_buffer, size)
else:
print(" {}".format(usb.strerror(usb.error(r))))
free(report_buffer)
return 0
#static
#@annotate(handle=ct.POINTER(usb.device_handle), ct.c_uint8 bRequest, int iface_number)
def read_ms_winsub_feature_descriptors(handle, bRequest, iface_number):
# Read the MS WinUSB Feature Descriptors, that are used on Windows 8 for automated driver installation
MAX_OS_FD_LENGTH = 256
#int r;
os_desc = (ct.c_uint8 * MAX_OS_FD_LENGTH)()
class struct_os_fd(ct.Structure):
_fields_ = [
("desc", ct.c_char_p),
("recipient", ct.c_uint8),
("index", ct.c_uint16),
("header_size", ct.c_uint16),
]
os_fd = [
struct_os_fd(b"Extended Compat ID", usb.LIBUSB_RECIPIENT_DEVICE, 0x0004, 0x10),
struct_os_fd(b"Extended Properties", usb.LIBUSB_RECIPIENT_INTERFACE, 0x0005, 0x0A),
]
if iface_number < 0:
return
# WinUSB has a limitation that forces wIndex to the interface number when issuing
# an Interface Request. To work around that, we can force a Device Request for
# the Extended Properties, assuming the device answers both equally.
if force_device_request:
os_fd[1].recipient = usb.LIBUSB_RECIPIENT_DEVICE
for i in range(2):
print("\nReading {} OS Feature Descriptor (wIndex = 0x%04d):".format(
os_fd[i].desc, os_fd[i].index))
# Read the header part
r = usb.control_transfer(handle,
ct.c_uint8(usb.LIBUSB_ENDPOINT_IN |
usb.LIBUSB_REQUEST_TYPE_VENDOR |
os_fd[i].recipient),
bRequest,
ct.c_uint16((iface_number << 8) | 0x00), os_fd[i].index,
os_desc, os_fd[i].header_size,
1000)
if r < os_fd[i].header_size:
perr(" Failed: {}", usb.strerror(usb.error(r)) if r < 0 else "header size is too small")
return
le_type_punning_IS_fine = ct.cast(os_desc, ct.c_void_p)
length = ct.cast(le_type_punning_IS_fine, ct.POINTER(ct.c_uint32))[0].value # ct.c_uint32
length = min(length, MAX_OS_FD_LENGTH)
# Read the full feature descriptor
r = usb.control_transfer(handle,
ct.c_uint8(usb.LIBUSB_ENDPOINT_IN |
usb.LIBUSB_REQUEST_TYPE_VENDOR |
os_fd[i].recipient),
bRequest,
ct.c_uint16((iface_number << 8) | 0x00), os_fd[i].index,
os_desc, ct.c_uint16(length),
1000)
if r < 0:
perr(" Failed: {}", usb.strerror(usb.error(r)))
return
else:
display_buffer_hex(os_desc, r)
#@annotate(dev_cap=ct.POINTER(usb.bos_dev_capability_descriptor))
def print_device_cap(dev_cap):
if dev_cap[0].bDevCapabilityType == usb.LIBUSB_BT_USB_2_0_EXTENSION:
usb_2_0_ext = ct.POINTER(usb.usb_2_0_extension_descriptor)()
usb.get_usb_2_0_extension_descriptor(None, dev_cap, ct.byref(usb_2_0_ext))
if usb_2_0_ext:
print(" USB 2.0 extension:")
print(" attributes : {:02X}".format(usb_2_0_ext[0].bmAttributes))
usb.free_usb_2_0_extension_descriptor(usb_2_0_ext)
elif dev_cap[0].bDevCapabilityType == usb.LIBUSB_BT_SS_USB_DEVICE_CAPABILITY:
ss_usb_device_cap = ct.POINTER(usb.ss_usb_device_capability_descriptor)()
usb.get_ss_usb_device_capability_descriptor(None, dev_cap, ct.byref(ss_usb_device_cap))
if ss_usb_device_cap:
print(" USB 3.0 capabilities:")
print(" attributes : {:02X}".format(ss_usb_device_cap[0].bmAttributes))
print(" supported speeds : {:04X}".format(ss_usb_device_cap[0].wSpeedSupported))
print(" supported functionality: {:02X}".format(ss_usb_device_cap[0].bFunctionalitySupport))
usb.free_ss_usb_device_capability_descriptor(ss_usb_device_cap)
elif dev_cap[0].bDevCapabilityType == usb.LIBUSB_BT_CONTAINER_ID:
container_id = ct.POINTER(usb.container_id_descriptor)()
usb.get_container_id_descriptor(None, dev_cap, ct.byref(container_id))
if container_id:
print(" Container ID:\n {}".format(uuid_to_string(container_id[0].ContainerID)))
usb.free_container_id_descriptor(container_id)
else:
print(" Unknown BOS device capability {:02x}:".format(dev_cap[0].bDevCapabilityType))
#static
#@annotate(int, ct.c_uint16 vid, ct.c_uint16 pid)
def test_device(vid, pid):
#int r;
speed_name = [
"Unknown",
"1.5 Mbit/s (USB LowSpeed)",
"12 Mbit/s (USB FullSpeed)",
"480 Mbit/s (USB HighSpeed)",
"5000 Mbit/s (USB SuperSpeed)",
"10000 Mbit/s (USB SuperSpeedPlus)",
]
print("Opening device {:04X}:{:04X}...".format(vid, pid))
#handle = ct.POINTER(usb.device_handle)()
handle = usb.open_device_with_vid_pid(None, vid, pid)
if not handle:
perr(" Failed.\n")
return -1
endpoint_in = 0 # default IN endpoint
endpoint_out = 0 # default OUT endpoint
try:
dev = usb.get_device(handle) # usb.device*
bus = usb.get_bus_number(dev) # ct.c_uint8
if extra_info:
port_path = (ct.c_uint8 * 8)()
r = usb.get_port_numbers(dev, port_path, ct.sizeof(port_path))
if r > 0:
print("\nDevice properties:")
print(" bus number: {}".format(bus))
print(" port path: {}".format(port_path[0]), end="")
for i in range(1, r):
print("->{}".format(port_path[i]), end="")
print(" (from root hub)")
r = usb.get_device_speed(dev)
if r < 0 or r > 5: r = 0
print(" speed: {}".format(speed_name[r]))
print("\nReading device descriptor:")
dev_desc = usb.device_descriptor()
r = usb.get_device_descriptor(dev, ct.byref(dev_desc))
if r < 0:
return err_exit(r)
print(" length: {}".format(dev_desc.bLength))
print(" device class: {}".format(dev_desc.bDeviceClass))
print(" S/N: {}".format(dev_desc.iSerialNumber))
print(" VID:PID: {:04X}:{:04X}".format(dev_desc.idVendor,
dev_desc.idProduct))
print(" bcdDevice: {:04X}".format(dev_desc.bcdDevice))
print(" iMan:iProd:iSer: {}:{}:{}".format(
dev_desc.iManufacturer, dev_desc.iProduct, dev_desc.iSerialNumber))
print(" nb confs: {}".format(dev_desc.bNumConfigurations))
# Copy the string descriptors for easier parsing
string_index = (ct.c_uint8 * 3)() # indexes of the string descriptors
string_index[0] = dev_desc.iManufacturer
string_index[1] = dev_desc.iProduct
string_index[2] = dev_desc.iSerialNumber
print("\nReading BOS descriptor: ", end="")
bos_desc = usb.bos_descriptor*()
if usb.get_bos_descriptor(handle, ct.byref(bos_desc)) == usb.LIBUSB_SUCCESS:
print("{} caps".format(bos_desc[0].bNumDeviceCaps))
for i in range(bos_desc[0].bNumDeviceCaps):
print_device_cap(bos_desc[0].dev_capability[i])
usb.free_bos_descriptor(bos_desc)
else:
print("no descriptor")
print("\nReading first configuration descriptor:")
conf_desc = usb.config_descriptor*()
r = usb.get_config_descriptor(dev, 0, ct.byref(conf_desc))
if r < 0:
return err_exit(r)
nb_ifaces = conf_desc[0].bNumInterfaces # int
print(" nb interfaces: {}".format(nb_ifaces))
first_iface = (conf_desc[0].usb_interface[0].altsetting[0].bInterfaceNumber
if nb_ifaces > 0 else -1)
for i in range(nb_ifaces):
usb_interface = conf_desc[0].usb_interface[i]
print(" interface[{}]: id = {}".format(
i, usb_interface.altsetting[0].bInterfaceNumber))
for j in range(usb_interface.num_altsetting):
altsetting = usb_interface.altsetting[j]
print("interface[{}].altsetting[{}]: num endpoints = {}".format(
i, j, altsetting.bNumEndpoints))
print(" Class.SubClass.Protocol: {:02X}.{:02X}.{:02X}".format(
altsetting.bInterfaceClass,
altsetting.bInterfaceSubClass,
altsetting.bInterfaceProtocol))
if (altsetting.bInterfaceClass == usb.LIBUSB_CLASS_MASS_STORAGE and
(altsetting.bInterfaceSubClass == 0x01 or
altsetting.bInterfaceSubClass == 0x06) and
altsetting.bInterfaceProtocol == 0x50):
# Mass storage devices that can use basic SCSI commands
test_mode = USE_SCSI
for k in range(altsetting.bNumEndpoints):
endpoint = altsetting.endpoint[k] # const usb.endpoint_descriptor*
print(" endpoint[{}].address: {:02X}".format(
k, endpoint.bEndpointAddress))
# Use the first interrupt or bulk IN/OUT endpoints as default for testing
if ((endpoint.bmAttributes & usb.LIBUSB_TRANSFER_TYPE_MASK) &
(usb.LIBUSB_TRANSFER_TYPE_BULK | usb.LIBUSB_TRANSFER_TYPE_INTERRUPT)):
if endpoint.bEndpointAddress & usb.LIBUSB_ENDPOINT_IN:
if not endpoint_in:
endpoint_in = endpoint.bEndpointAddress
else:
if not endpoint_out:
endpoint_out = endpoint.bEndpointAddress
print(" max packet size: {:04X}".format(endpoint.wMaxPacketSize))
print(" polling interval: {:02X}".format(endpoint.bInterval))
ep_comp = ct.POINTER(usb.ss_endpoint_companion_descriptor)()
usb.get_ss_endpoint_companion_descriptor(None, ct.byref(altsetting.endpoint[k]),
ct.byref(ep_comp))
if ep_comp:
print(" max burst: {:02X} (USB 3.0)".format(ep_comp[0].bMaxBurst))
print(" bytes per interval: {:04X} (USB 3.0)".format(ep_comp[0].wBytesPerInterval))
usb.free_ss_endpoint_companion_descriptor(ep_comp)
usb.free_config_descriptor(conf_desc)
usb.set_auto_detach_kernel_driver(handle, 1)
for iface in range(nb_ifaces):
print("\nClaiming interface {}...".format(iface))
r = usb.claim_interface(handle, iface)
if r != usb.LIBUSB_SUCCESS:
perr(" Failed.\n")
print("\nReading string descriptors:")
string = (ct.c_char * 128)()
for i in range(3):
if string_index[i] == 0:
continue
if usb.get_string_descriptor_ascii(handle, string_index[i],
ct.cast(string, ct.POINTER(ct.c_ubyte)), ct.sizeof(string)) > 0:
print(" String ({:#04X}): \"{}\"".format(string_index[i], string))
# Read the OS String Descriptor
r = usb.get_string_descriptor(handle, MS_OS_DESC_STRING_INDEX, 0,
ct.cast(string, ct.POINTER(ct.c_ubyte)), MS_OS_DESC_STRING_LENGTH)
if r == MS_OS_DESC_STRING_LENGTH and memcmp(ms_os_desc_string, string, sizeof(ms_os_desc_string)) == 0:
# If this is a Microsoft OS String Descriptor,
# attempt to read the WinUSB extended Feature Descriptors
read_ms_winsub_feature_descriptors(handle, string[MS_OS_DESC_VENDOR_CODE_OFFSET], first_iface)
if test_mode == USE_PS3:
r = display_ps3_status(handle)
if r < 0:
return err_exit(r)
elif test_mode == USE_XBOX:
r = display_xbox_status(handle)
if r < 0:
return err_exit(r)
r = set_xbox_actuators(handle, 128, 222)
if r < 0:
return err_exit(r)
msleep(2000)
r = set_xbox_actuators(handle, 0, 0)
if r < 0:
return err_exit(r)
elif test_mode == USE_HID:
test_hid(handle, endpoint_in)
elif test_mode == USE_SCSI:
r = test_mass_storage(handle, endpoint_in, endpoint_out)
if r < 0:
return err_exit(r)
elif test_mode == USE_GENERIC:
pass
print()
for iface in range(nb_ifaces):
print("Releasing interface {}...".format(iface))
usb.release_interface(handle, iface)
print("Closing device...")
finally:
usb.close(handle)
return 0
def main(argv=sys.argv):
global VID, PID
global test_mode
global binary_dump
global binary_name
show_help = False # bool
debug_mode = False # bool
error_lang = None # char*
# Default to generic, expecting VID:PID
VID = 0
PID = 0
test_mode = USE_GENERIC
endian_test = ct.c_uint16(0xBE00)
if ct.cast(ct.pointer(endian_test), ct.POINTER(ct.c_uint8))[0] == 0xBE:
print("Despite their natural superiority for end users, big endian\n"
"CPUs are not supported with this program, sorry.")
return 0
#if len(argv) >= 2:
for j in range(1, len(argv)):
arglen = len(argv[j])
if argv[j][0] in ('-', '/') and arglen >= 2:
opt = argv[j][1]
if opt == 'd':
debug_mode = True
elif opt == 'i':
extra_info = True
elif opt == 'w':
force_device_request = True
elif opt == 'b':
j += 1
if j >= len(argv) or argv[j][0] in ('-', '/'):
print(" Option -b requires a file name")
return 1
binary_name = argv[j]
binary_dump = True
elif opt == 'l':
j += 1
if j >= len(argv) or argv[j][0] in ('-', '/'):
print(" Option -l requires an ISO 639-1 language parameter")
return 1
error_lang = argv[j]
elif opt == 'j':
# OLIMEX ARM-USB-TINY JTAG, 2 channel composite device - 2 interfaces
if not VID and not PID:
VID = 0x15BA
PID = 0x0004
elif opt == 'k':
# Generic 2 GB USB Key (SCSI Transparent/Bulk Only) - 1 interface
if not VID and not PID:
VID = 0x0204
PID = 0x6025
# The following tests will force VID:PID if already provided
elif opt == 'p':
# Sony PS3 Controller - 1 interface
VID = 0x054C
PID = 0x0268
test_mode = USE_PS3
elif opt == 's':
# Microsoft Sidewinder Precision Pro Joystick - 1 HID interface
VID = 0x045E
PID = 0x0008
test_mode = USE_HID
elif opt == 'x':
# Microsoft XBox Controller Type S - 1 interface
VID = 0x045E
PID = 0x0289
test_mode = USE_XBOX
else:
show_help = True
else:
for i in range(arglen):
if argv[j][i] == ':':
tmp_vid = 0 # unsigned int
tmp_pid = 0 # unsigned int
if sscanf(argv[j], "%x:%x" , ct.pointer(tmp_vid), ct.pointer(tmp_pid)) != 2:
print(" Please specify VID & PID as \"vid:pid\" in hexadecimal format")
return 1
VID = ct.c_uint16(tmp_vid)
PID = ct.c_uint16(tmp_pid)
break
else:
show_help = True
if show_help or len(argv) == 1 or len(argv) > 7:
print("usage: {} [-h] [-d] [-i] [-k] [-b file] [-l lang] [-j] [-x] [-s] [-p] [-w] [vid:pid]".format(argv[0]))
print(" -h : display usage")
print(" -d : enable debug output")
print(" -i : print topology and speed info")
print(" -j : test composite FTDI based JTAG device")
print(" -k : test Mass Storage device")
print(" -b file : dump Mass Storage data to file 'file'")
print(" -p : test Sony PS3 SixAxis controller")
print(" -s : test Microsoft Sidewinder Precision Pro (HID)")
print(" -x : test Microsoft XBox Controller Type S")
print(" -l lang : language to report errors in (ISO 639-1)")
print(" -w : force the use of device requests when querying WCID descriptors")
print("If only the vid:pid is provided, xusb attempts to run the most appropriate test")
return 0
# xusb is commonly used as a debug tool, so it's convenient to have debug output
# during usb.init(), but since we can't call on usb.set_option() before usb.init(),
# we use the env variable method
old_dbg_str = os.environ.get("LIBUSB_DEBUG", None)
if debug_mode:
try:
os.environ["LIBUSB_DEBUG"] = "4" # usb.LIBUSB_LOG_LEVEL_DEBUG
except:
print("Unable to set debug level")
version = usb.get_version()[0]
print("Using libusb v{}.{}.{}.{}\n".format(
version.major, version.minor, version.micro, version.nano))
r = usb.init(None)
if r < 0:
return r
try:
# If not set externally, and no debug option was given, use info log level
if old_dbg_str is None and not debug_mode:
usb.set_option(None, usb.LIBUSB_OPTION_LOG_LEVEL, usb.LIBUSB_LOG_LEVEL_INFO)
if error_lang is not None:
r = usb.setlocale(error_lang)
if r < 0:
print("Invalid or unsupported locale '{}': {}".format(
error_lang, usb.strerror(usb.error(r))))
test_device(VID, PID)
finally:
usb.exit(None)
if debug_mode:
#char string[256];
string = "LIBUSB_DEBUG={}".format("" if old_dbg_str is None else old_dbg_str)
return 0
sys.exit(main())
| [
"libusb.get_device_speed",
"libusb.release_interface",
"libusb.device_descriptor",
"ctypes.pointer",
"libusb.free_bos_descriptor",
"libusb.close",
"libusb.free_config_descriptor",
"libusb.error_name",
"ctypes.c_uint",
"libusb.control_transfer",
"ctypes.cast",
"ctypes.c_int",
"libusb.claim_in... | [((10679, 10880), 'libusb.control_transfer', 'usb.control_transfer', (['handle', '(usb.LIBUSB_ENDPOINT_IN | usb.LIBUSB_REQUEST_TYPE_CLASS | usb.\n LIBUSB_RECIPIENT_INTERFACE)', 'HID_GET_REPORT', '(HID_REPORT_TYPE_INPUT << 8 | 0)', '(0)', 'input_report', '(20)', '(1000)'], {}), '(handle, usb.LIBUSB_ENDPOINT_IN | usb.\n LIBUSB_REQUEST_TYPE_CLASS | usb.LIBUSB_RECIPIENT_INTERFACE,\n HID_GET_REPORT, HID_REPORT_TYPE_INPUT << 8 | 0, 0, input_report, 20, 1000)\n', (10699, 10880), True, 'import libusb as usb\n'), ((12389, 12413), 'ctypes.sizeof', 'ct.sizeof', (['output_report'], {}), '(output_report)\n', (12398, 12413), True, 'import ctypes as ct\n'), ((12480, 12683), 'libusb.control_transfer', 'usb.control_transfer', (['handle', '(usb.LIBUSB_ENDPOINT_OUT | usb.LIBUSB_REQUEST_TYPE_CLASS | usb.\n LIBUSB_RECIPIENT_INTERFACE)', 'HID_SET_REPORT', '(HID_REPORT_TYPE_OUTPUT << 8 | 0)', '(0)', 'output_report', '(6)', '(1000)'], {}), '(handle, usb.LIBUSB_ENDPOINT_OUT | usb.\n LIBUSB_REQUEST_TYPE_CLASS | usb.LIBUSB_RECIPIENT_INTERFACE,\n HID_SET_REPORT, HID_REPORT_TYPE_OUTPUT << 8 | 0, 0, output_report, 6, 1000)\n', (12500, 12683), True, 'import libusb as usb\n'), ((17010, 17023), 'ctypes.c_uint32', 'ct.c_uint32', ([], {}), '()\n', (17021, 17023), True, 'import ctypes as ct\n'), ((17167, 17177), 'ctypes.c_int', 'ct.c_int', ([], {}), '()\n', (17175, 17177), True, 'import ctypes as ct\n'), ((18331, 18343), 'ctypes.c_uint8', 'ct.c_uint8', ([], {}), '()\n', (18341, 18343), True, 'import ctypes as ct\n'), ((19225, 19238), 'ctypes.c_uint32', 'ct.c_uint32', ([], {}), '()\n', (19236, 19238), True, 'import ctypes as ct\n'), ((19378, 19388), 'ctypes.c_int', 'ct.c_int', ([], {}), '()\n', (19386, 19388), True, 'import ctypes as ct\n'), ((20386, 20399), 'ctypes.c_uint32', 'ct.c_uint32', ([], {}), '()\n', (20397, 20399), True, 'import ctypes as ct\n'), ((20545, 20555), 'ctypes.c_int', 'ct.c_int', ([], {}), '()\n', (20553, 20555), True, 'import ctypes as ct\n'), ((21635, 21648), 'ctypes.c_uint32', 'ct.c_uint32', ([], {}), '()\n', (21646, 21648), True, 'import ctypes as ct\n'), ((21784, 21794), 'ctypes.c_int', 'ct.c_int', ([], {}), '()\n', (21792, 21794), True, 'import ctypes as ct\n'), ((24290, 24312), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_uint8'], {}), '(ct.c_uint8)\n', (24300, 24312), True, 'import ctypes as ct\n'), ((33641, 33685), 'libusb.open_device_with_vid_pid', 'usb.open_device_with_vid_pid', (['None', 'vid', 'pid'], {}), '(None, vid, pid)\n', (33669, 33685), True, 'import libusb as usb\n'), ((42126, 42144), 'ctypes.c_uint16', 'ct.c_uint16', (['(48640)'], {}), '(48640)\n', (42137, 42144), True, 'import ctypes as ct\n'), ((46238, 46274), 'os.environ.get', 'os.environ.get', (['"""LIBUSB_DEBUG"""', 'None'], {}), "('LIBUSB_DEBUG', None)\n", (46252, 46274), False, 'import os\n'), ((46607, 46621), 'libusb.init', 'usb.init', (['None'], {}), '(None)\n', (46615, 46621), True, 'import libusb as usb\n'), ((6819, 6847), 'ctypes.sizeof', 'ct.sizeof', (['master_bt_address'], {}), '(master_bt_address)\n', (6828, 6847), True, 'import ctypes as ct\n'), ((7603, 7631), 'ctypes.sizeof', 'ct.sizeof', (['device_bt_address'], {}), '(device_bt_address)\n', (7612, 7631), True, 'import ctypes as ct\n'), ((8477, 8500), 'ctypes.sizeof', 'ct.sizeof', (['input_report'], {}), '(input_report)\n', (8486, 8500), True, 'import ctypes as ct\n'), ((14289, 14299), 'ctypes.c_int', 'ct.c_int', ([], {}), '()\n', (14297, 14299), True, 'import ctypes as ct\n'), ((15161, 15171), 'ctypes.c_int', 'ct.c_int', ([], {}), '()\n', (15169, 15171), True, 'import ctypes as ct\n'), ((17130, 17154), 'ctypes.pointer', 'ct.pointer', (['expected_tag'], {}), '(expected_tag)\n', (17140, 17154), True, 'import ctypes as ct\n'), ((17300, 17314), 'ctypes.byref', 'ct.byref', (['size'], {}), '(size)\n', (17308, 17314), True, 'import ctypes as ct\n'), ((18668, 18681), 'ctypes.byref', 'ct.byref', (['lun'], {}), '(lun)\n', (18676, 18681), True, 'import ctypes as ct\n'), ((19341, 19365), 'ctypes.pointer', 'ct.pointer', (['expected_tag'], {}), '(expected_tag)\n', (19351, 19365), True, 'import ctypes as ct\n'), ((19505, 19519), 'ctypes.byref', 'ct.byref', (['size'], {}), '(size)\n', (19513, 19519), True, 'import ctypes as ct\n'), ((20508, 20532), 'ctypes.pointer', 'ct.pointer', (['expected_tag'], {}), '(expected_tag)\n', (20518, 20532), True, 'import ctypes as ct\n'), ((20678, 20692), 'ctypes.byref', 'ct.byref', (['size'], {}), '(size)\n', (20686, 20692), True, 'import ctypes as ct\n'), ((21747, 21771), 'ctypes.pointer', 'ct.pointer', (['expected_tag'], {}), '(expected_tag)\n', (21757, 21771), True, 'import ctypes as ct\n'), ((21856, 21870), 'ctypes.byref', 'ct.byref', (['size'], {}), '(size)\n', (21864, 21870), True, 'import ctypes as ct\n'), ((24893, 24925), 'ctypes.sizeof', 'ct.sizeof', (['hid_report_descriptor'], {}), '(hid_report_descriptor)\n', (24902, 24925), True, 'import ctypes as ct\n'), ((30524, 30553), 'ctypes.cast', 'ct.cast', (['os_desc', 'ct.c_void_p'], {}), '(os_desc, ct.c_void_p)\n', (30531, 30553), True, 'import ctypes as ct\n'), ((33866, 33888), 'libusb.get_device', 'usb.get_device', (['handle'], {}), '(handle)\n', (33880, 33888), True, 'import libusb as usb\n'), ((33919, 33942), 'libusb.get_bus_number', 'usb.get_bus_number', (['dev'], {}), '(dev)\n', (33937, 33942), True, 'import libusb as usb\n'), ((34657, 34680), 'libusb.device_descriptor', 'usb.device_descriptor', ([], {}), '()\n', (34678, 34680), True, 'import libusb as usb\n'), ((39434, 39471), 'libusb.free_config_descriptor', 'usb.free_config_descriptor', (['conf_desc'], {}), '(conf_desc)\n', (39460, 39471), True, 'import libusb as usb\n'), ((39481, 39525), 'libusb.set_auto_detach_kernel_driver', 'usb.set_auto_detach_kernel_driver', (['handle', '(1)'], {}), '(handle, 1)\n', (39514, 39525), True, 'import libusb as usb\n'), ((41767, 41784), 'libusb.close', 'usb.close', (['handle'], {}), '(handle)\n', (41776, 41784), True, 'import libusb as usb\n'), ((46460, 46477), 'libusb.get_version', 'usb.get_version', ([], {}), '()\n', (46475, 46477), True, 'import libusb as usb\n'), ((47171, 47185), 'libusb.exit', 'usb.exit', (['None'], {}), '(None)\n', (47179, 47185), True, 'import libusb as usb\n'), ((1859, 1877), 'libusb.error', 'usb.error', (['errcode'], {}), '(errcode)\n', (1868, 1877), True, 'import libusb as usb\n'), ((13583, 13603), 'ctypes.sizeof', 'ct.sizeof', (['cbw.CBWCB'], {}), '(cbw.CBWCB)\n', (13592, 13603), True, 'import ctypes as ct\n'), ((14402, 14416), 'ctypes.byref', 'ct.byref', (['size'], {}), '(size)\n', (14410, 14416), True, 'import ctypes as ct\n'), ((14475, 14507), 'libusb.clear_halt', 'usb.clear_halt', (['handle', 'endpoint'], {}), '(handle, endpoint)\n', (14489, 14507), True, 'import libusb as usb\n'), ((15274, 15288), 'ctypes.byref', 'ct.byref', (['size'], {}), '(size)\n', (15282, 15288), True, 'import ctypes as ct\n'), ((15347, 15379), 'libusb.clear_halt', 'usb.clear_halt', (['handle', 'endpoint'], {}), '(handle, endpoint)\n', (15361, 15379), True, 'import libusb as usb\n'), ((17234, 17251), 'ctypes.pointer', 'ct.pointer', (['sense'], {}), '(sense)\n', (17244, 17251), True, 'import ctypes as ct\n'), ((17253, 17275), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_ubyte'], {}), '(ct.c_ubyte)\n', (17263, 17275), True, 'import ctypes as ct\n'), ((19444, 19462), 'ctypes.pointer', 'ct.pointer', (['buffer'], {}), '(buffer)\n', (19454, 19462), True, 'import ctypes as ct\n'), ((19464, 19486), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_ubyte'], {}), '(ct.c_ubyte)\n', (19474, 19486), True, 'import ctypes as ct\n'), ((20611, 20629), 'ctypes.pointer', 'ct.pointer', (['buffer'], {}), '(buffer)\n', (20621, 20629), True, 'import ctypes as ct\n'), ((20631, 20653), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_ubyte'], {}), '(ct.c_ubyte)\n', (20641, 20653), True, 'import ctypes as ct\n'), ((21289, 21311), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_ubyte'], {}), '(ct.c_ubyte)\n', (21299, 21311), True, 'import ctypes as ct\n'), ((25636, 25658), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_uint8'], {}), '(ct.c_uint8)\n', (25646, 25658), True, 'import ctypes as ct\n'), ((26181, 26198), 'ctypes.c_uint16', 'ct.c_uint16', (['size'], {}), '(size)\n', (26192, 26198), True, 'import ctypes as ct\n'), ((26951, 26973), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_uint8'], {}), '(ct.c_uint8)\n', (26961, 26973), True, 'import ctypes as ct\n'), ((27495, 27512), 'ctypes.c_uint16', 'ct.c_uint16', (['size'], {}), '(size)\n', (27506, 27512), True, 'import ctypes as ct\n'), ((28291, 28305), 'ctypes.byref', 'ct.byref', (['size'], {}), '(size)\n', (28299, 28305), True, 'import ctypes as ct\n'), ((29917, 30010), 'ctypes.c_uint8', 'ct.c_uint8', (['(usb.LIBUSB_ENDPOINT_IN | usb.LIBUSB_REQUEST_TYPE_VENDOR | os_fd[i].recipient)'], {}), '(usb.LIBUSB_ENDPOINT_IN | usb.LIBUSB_REQUEST_TYPE_VENDOR | os_fd[\n i].recipient)\n', (29927, 30010), True, 'import ctypes as ct\n'), ((30171, 30205), 'ctypes.c_uint16', 'ct.c_uint16', (['(iface_number << 8 | 0)'], {}), '(iface_number << 8 | 0)\n', (30182, 30205), True, 'import ctypes as ct\n'), ((30818, 30911), 'ctypes.c_uint8', 'ct.c_uint8', (['(usb.LIBUSB_ENDPOINT_IN | usb.LIBUSB_REQUEST_TYPE_VENDOR | os_fd[i].recipient)'], {}), '(usb.LIBUSB_ENDPOINT_IN | usb.LIBUSB_REQUEST_TYPE_VENDOR | os_fd[\n i].recipient)\n', (30828, 30911), True, 'import ctypes as ct\n'), ((31072, 31106), 'ctypes.c_uint16', 'ct.c_uint16', (['(iface_number << 8 | 0)'], {}), '(iface_number << 8 | 0)\n', (31083, 31106), True, 'import ctypes as ct\n'), ((31171, 31190), 'ctypes.c_uint16', 'ct.c_uint16', (['length'], {}), '(length)\n', (31182, 31190), True, 'import ctypes as ct\n'), ((31583, 31627), 'ctypes.POINTER', 'ct.POINTER', (['usb.usb_2_0_extension_descriptor'], {}), '(usb.usb_2_0_extension_descriptor)\n', (31593, 31627), True, 'import ctypes as ct\n'), ((31690, 31711), 'ctypes.byref', 'ct.byref', (['usb_2_0_ext'], {}), '(usb_2_0_ext)\n', (31698, 31711), True, 'import ctypes as ct\n'), ((31888, 31938), 'libusb.free_usb_2_0_extension_descriptor', 'usb.free_usb_2_0_extension_descriptor', (['usb_2_0_ext'], {}), '(usb_2_0_ext)\n', (31925, 31938), True, 'import libusb as usb\n'), ((34462, 34487), 'libusb.get_device_speed', 'usb.get_device_speed', (['dev'], {}), '(dev)\n', (34482, 34487), True, 'import libusb as usb\n'), ((34724, 34742), 'ctypes.byref', 'ct.byref', (['dev_desc'], {}), '(dev_desc)\n', (34732, 34742), True, 'import ctypes as ct\n'), ((36088, 36121), 'libusb.free_bos_descriptor', 'usb.free_bos_descriptor', (['bos_desc'], {}), '(bos_desc)\n', (36111, 36121), True, 'import libusb as usb\n'), ((36322, 36341), 'ctypes.byref', 'ct.byref', (['conf_desc'], {}), '(conf_desc)\n', (36330, 36341), True, 'import ctypes as ct\n'), ((39643, 39677), 'libusb.claim_interface', 'usb.claim_interface', (['handle', 'iface'], {}), '(handle, iface)\n', (39662, 39677), True, 'import libusb as usb\n'), ((41673, 41709), 'libusb.release_interface', 'usb.release_interface', (['handle', 'iface'], {}), '(handle, iface)\n', (41694, 41709), True, 'import libusb as usb\n'), ((46809, 46885), 'libusb.set_option', 'usb.set_option', (['None', 'usb.LIBUSB_OPTION_LOG_LEVEL', 'usb.LIBUSB_LOG_LEVEL_INFO'], {}), '(None, usb.LIBUSB_OPTION_LOG_LEVEL, usb.LIBUSB_LOG_LEVEL_INFO)\n', (46823, 46885), True, 'import libusb as usb\n'), ((46937, 46962), 'libusb.setlocale', 'usb.setlocale', (['error_lang'], {}), '(error_lang)\n', (46950, 46962), True, 'import libusb as usb\n'), ((14356, 14371), 'ctypes.pointer', 'ct.pointer', (['cbw'], {}), '(cbw)\n', (14366, 14371), True, 'import ctypes as ct\n'), ((14373, 14395), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_ubyte'], {}), '(ct.c_ubyte)\n', (14383, 14395), True, 'import ctypes as ct\n'), ((14694, 14706), 'libusb.error', 'usb.error', (['r'], {}), '(r)\n', (14703, 14706), True, 'import libusb as usb\n'), ((15228, 15243), 'ctypes.pointer', 'ct.pointer', (['csw'], {}), '(csw)\n', (15238, 15243), True, 'import ctypes as ct\n'), ((15245, 15267), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_ubyte'], {}), '(ct.c_ubyte)\n', (15255, 15267), True, 'import ctypes as ct\n'), ((15564, 15576), 'libusb.error', 'usb.error', (['r'], {}), '(r)\n', (15573, 15576), True, 'import libusb as usb\n'), ((17389, 17407), 'libusb.error_name', 'usb.error_name', (['rc'], {}), '(rc)\n', (17403, 17407), True, 'import libusb as usb\n'), ((32051, 32102), 'ctypes.POINTER', 'ct.POINTER', (['usb.ss_usb_device_capability_descriptor'], {}), '(usb.ss_usb_device_capability_descriptor)\n', (32061, 32102), True, 'import ctypes as ct\n'), ((32172, 32199), 'ctypes.byref', 'ct.byref', (['ss_usb_device_cap'], {}), '(ss_usb_device_cap)\n', (32180, 32199), True, 'import ctypes as ct\n'), ((32605, 32668), 'libusb.free_ss_usb_device_capability_descriptor', 'usb.free_ss_usb_device_capability_descriptor', (['ss_usb_device_cap'], {}), '(ss_usb_device_cap)\n', (32649, 32668), True, 'import libusb as usb\n'), ((34076, 34096), 'ctypes.sizeof', 'ct.sizeof', (['port_path'], {}), '(port_path)\n', (34085, 34096), True, 'import ctypes as ct\n'), ((35849, 35867), 'ctypes.byref', 'ct.byref', (['bos_desc'], {}), '(bos_desc)\n', (35857, 35867), True, 'import ctypes as ct\n'), ((40338, 40360), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_ubyte'], {}), '(ct.c_ubyte)\n', (40348, 40360), True, 'import ctypes as ct\n'), ((42161, 42184), 'ctypes.pointer', 'ct.pointer', (['endian_test'], {}), '(endian_test)\n', (42171, 42184), True, 'import ctypes as ct\n'), ((42186, 42208), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_uint8'], {}), '(ct.c_uint8)\n', (42196, 42208), True, 'import ctypes as ct\n'), ((26568, 26593), 'libusb.clear_halt', 'usb.clear_halt', (['handle', '(0)'], {}), '(handle, 0)\n', (26582, 26593), True, 'import libusb as usb\n'), ((27918, 27943), 'libusb.clear_halt', 'usb.clear_halt', (['handle', '(0)'], {}), '(handle, 0)\n', (27932, 27943), True, 'import libusb as usb\n'), ((30604, 30627), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_uint32'], {}), '(ct.c_uint32)\n', (30614, 30627), True, 'import ctypes as ct\n'), ((31296, 31308), 'libusb.error', 'usb.error', (['r'], {}), '(r)\n', (31305, 31308), True, 'import libusb as usb\n'), ((32764, 32803), 'ctypes.POINTER', 'ct.POINTER', (['usb.container_id_descriptor'], {}), '(usb.container_id_descriptor)\n', (32774, 32803), True, 'import ctypes as ct\n'), ((32861, 32883), 'ctypes.byref', 'ct.byref', (['container_id'], {}), '(container_id)\n', (32869, 32883), True, 'import ctypes as ct\n'), ((33023, 33069), 'libusb.free_container_id_descriptor', 'usb.free_container_id_descriptor', (['container_id'], {}), '(container_id)\n', (33055, 33069), True, 'import libusb as usb\n'), ((40061, 40078), 'ctypes.sizeof', 'ct.sizeof', (['string'], {}), '(string)\n', (40070, 40078), True, 'import ctypes as ct\n'), ((44861, 44881), 'ctypes.c_uint16', 'ct.c_uint16', (['tmp_vid'], {}), '(tmp_vid)\n', (44872, 44881), True, 'import ctypes as ct\n'), ((44908, 44928), 'ctypes.c_uint16', 'ct.c_uint16', (['tmp_pid'], {}), '(tmp_pid)\n', (44919, 44928), True, 'import ctypes as ct\n'), ((18942, 18954), 'libusb.error', 'usb.error', (['r'], {}), '(r)\n', (18951, 18954), True, 'import libusb as usb\n'), ((28444, 28456), 'libusb.error', 'usb.error', (['r'], {}), '(r)\n', (28453, 28456), True, 'import libusb as usb\n'), ((30415, 30427), 'libusb.error', 'usb.error', (['r'], {}), '(r)\n', (30424, 30427), True, 'import libusb as usb\n'), ((38862, 38910), 'ctypes.POINTER', 'ct.POINTER', (['usb.ss_endpoint_companion_descriptor'], {}), '(usb.ss_endpoint_companion_descriptor)\n', (38872, 38910), True, 'import ctypes as ct\n'), ((38980, 39012), 'ctypes.byref', 'ct.byref', (['altsetting.endpoint[k]'], {}), '(altsetting.endpoint[k])\n', (38988, 39012), True, 'import ctypes as ct\n'), ((39075, 39092), 'ctypes.byref', 'ct.byref', (['ep_comp'], {}), '(ep_comp)\n', (39083, 39092), True, 'import ctypes as ct\n'), ((39374, 39424), 'libusb.free_ss_endpoint_companion_descriptor', 'usb.free_ss_endpoint_companion_descriptor', (['ep_comp'], {}), '(ep_comp)\n', (39415, 39424), True, 'import libusb as usb\n'), ((40036, 40058), 'ctypes.POINTER', 'ct.POINTER', (['ct.c_ubyte'], {}), '(ct.c_ubyte)\n', (40046, 40058), True, 'import ctypes as ct\n'), ((22348, 22363), 'ctypes.c_uint', 'ct.c_uint', (['size'], {}), '(size)\n', (22357, 22363), True, 'import ctypes as ct\n'), ((23191, 23232), 'ctypes.c_uint32', 'ct.c_uint32', (['hid_report_descriptor[i + j]'], {}), '(hid_report_descriptor[i + j])\n', (23202, 23232), True, 'import ctypes as ct\n'), ((44656, 44675), 'ctypes.pointer', 'ct.pointer', (['tmp_vid'], {}), '(tmp_vid)\n', (44666, 44675), True, 'import ctypes as ct\n'), ((44677, 44696), 'ctypes.pointer', 'ct.pointer', (['tmp_pid'], {}), '(tmp_pid)\n', (44687, 44696), True, 'import ctypes as ct\n'), ((47103, 47115), 'libusb.error', 'usb.error', (['r'], {}), '(r)\n', (47112, 47115), True, 'import libusb as usb\n'), ((22320, 22337), 'ctypes.c_size_t', 'ct.c_size_t', (['size'], {}), '(size)\n', (22331, 22337), True, 'import ctypes as ct\n'), ((26669, 26681), 'libusb.error', 'usb.error', (['r'], {}), '(r)\n', (26678, 26681), True, 'import libusb as usb\n'), ((28019, 28031), 'libusb.error', 'usb.error', (['r'], {}), '(r)\n', (28028, 28031), True, 'import libusb as usb\n')] |
# -*- coding: utf-8 -*-
# type: ignore
# copyright: (c) 2020 by <NAME>.
# license: Apache 2.0, see LICENSE for more details.
'''Validation Task-Runner.'''
import os
import shutil
from invoke import task
@task
def mkdir(ctx, path):
'''Make directory path.'''
try:
os.makedirs(path, exist_ok=True)
except OSError as err:
print(f"unable to download github release due to: {err}")
@task
def rmdir(ctx, path):
''''Remove directory path.'''
try:
shutil.rmtree(path)
except OSError as err:
print(f"unable to delete direcotry path due to: {err}")
| [
"os.makedirs",
"shutil.rmtree"
] | [((283, 315), 'os.makedirs', 'os.makedirs', (['path'], {'exist_ok': '(True)'}), '(path, exist_ok=True)\n', (294, 315), False, 'import os\n'), ((490, 509), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (503, 509), False, 'import shutil\n')] |
# Author: <NAME>
'''
Hierarchy of subroutines::
hh_param_from_profile
|--- _calc_rho()
|--- helper_site_response.thk2dep()
|--- _calc_Gmax()
|--- _calc_vertical_stress()
|--- _calc_OCR()
|--- _calc_K0()
|--- _calc_PI()
|--- _calc_shear_strength()
|--- _calc_K0()
|--- produce_HH_G_param()
|--- _calc_mean_confining_stress()
|--- produce_Darendeli_curves()
|--- helper_generic.assert_1D_numpy_array()
|--- helper_generic.check_length_or_extend_to_array()
|--- _calc_K0()
|--- _calc_mean_confining_stress()
|--- helper_mkz_model.fit_MKZ()
|--- _optimization_kernel()
|
|--- hlper_mkz_model.tau_MKZ()
|--- helper_generic.find_closest_index()
|--- __calc_area()
|--- helper_hh_model.tau_FKZ()
|--- helper_generic.find_closest_index()
|--- __find_x_t_and_d()
|--- helper_hh_model.tau_FKZ()
|--- helper_generic.find_closest_index()
'''
import os
import numpy as np
import matplotlib.pyplot as plt
from . import helper_generic as hlp
from . import helper_site_response as sr
from . import helper_mkz_model as mkz
from . import helper_hh_model as hh
#%%----------------------------------------------------------------------------
def hh_param_from_profile(vs_profile, *, Tmax=None, show_fig=False,
save_fig=False, fig_output_dir=None,
save_HH_G_file=False, HH_G_file_dir=None,
profile_name=None, verbose=True):
'''
Get HH parameters of each soil layer from the Vs values of every layer.
Parameters
----------
vs_profile : numpy.ndarray
Shear-wave velocity profile, as a 2D numpy array. It should have the
following columns:
+---------------+----------+---------+---------+-----------------+
| Thickness (m) | Vs (m/s) | Damping | Density | Material Number |
+===============+==========+=========+=========+=================+
| ... | ... | ... | ... | ... |
+---------------+----------+---------+---------+-----------------+
Tmax : numpy.ndarray or ``None``
Shear strength of each layer of soil. If ``None``, it will be
calculated using a combination of Ladd (1991) and Mohr-Coulomb criteria.
show_fig : bool
Whether or not to show figures G/Gmax and stress-strain curves of MKZ,
FKZ, and HH for each layer.
save_fig : bool
Whether or not to save the figures to the hard drive. Only effective
if ``show_fig`` is set to ``True``.
fig_output_dir : str
The output directory for the figures. Only effective if ``show_fig``
and ``save_fig`` are both ``True``.
save_HH_G_file : bool
Whether or not to save the HH parameters to the hard drive (as a
"HH_G" file).
HH_G_file_dir : str
The output directory for the "HH_G" file. Only effective if
``save_HH_G_file`` is ``True``.
profile_name : str or ``None``
The name of the Vs profile, such as "CE.12345". If ``None``, a string
of current date and time will be used as the profile name.
verbose : bool
Whether or not to print progresses on the console.
Returns
-------
HH_G_param : numpy.ndarray
The HH parameters of each layer. It's a 2D array of shape
``(9, n_layer)``. For each layer (i.e., column), the values are in
this order:
gamma_t, a, gamma_ref, beta, s, Gmax, mu, Tmax, d
'''
phi = 30. # friction angle (choose 30 degrees, because there is no better info)
hlp.check_Vs_profile_format(vs_profile)
h = vs_profile[:-1, 0]
Vs = vs_profile[:-1, 1] # exclude the last layer (i.e., half space)
n_layer = len(Vs)
if Tmax is not None:
hlp.assert_array_length(Tmax, n_layer)
rho = _calc_rho(h, Vs)
Gmax = _calc_Gmax(Vs, rho)
sigma_v0 = _calc_vertical_stress(h, rho)
OCR = _calc_OCR(Vs, rho, sigma_v0)
K0 = _calc_K0(OCR, phi=phi)
PI = _calc_PI(Vs)
if Tmax is None:
Tmax = _calc_shear_strength(Vs, OCR, sigma_v0, K0=K0, phi=phi)
HH_G_param = produce_HH_G_param(Vs, Gmax, Tmax, OCR, sigma_v0, K0,
curves=None, PI=PI, phi=phi,
show_fig=show_fig, save_fig=save_fig,
fig_output_dir=fig_output_dir,
verbose=verbose)
if save_HH_G_file:
if HH_G_file_dir is None:
raise ValueError('Please specify `HH_G_file_dir`.')
if profile_name is None:
profile_name = hlp.get_current_time(for_filename=True)
np.savetxt(os.path.join(HH_G_file_dir, 'HH_G_%s.txt' % profile_name),
HH_G_param, delimiter='\t', fmt='%.6g')
return HH_G_param
#%%----------------------------------------------------------------------------
def hh_param_from_curves(vs_profile, curves, *, Tmax=None, show_fig=False,
save_fig=False, fig_output_dir=None, save_HH_G_file=False,
HH_G_file_dir=None, profile_name=None, verbose=True):
'''
Get HH parameters of each soil layer from the Vs profile and G/Gmax curves.
Parameters
----------
vs_profile : numpy.ndarray
Shear-wave velocity profile, as a 2D numpy array. It should have the
following columns:
+---------------+----------+---------+---------+-----------------+
| Thickness (m) | Vs (m/s) | Damping | Density | Material Number |
+===============+==========+=========+=========+=================+
| ... | ... | ... | ... | ... |
+---------------+----------+---------+---------+-----------------+
curves : numpy.ndarray
A 2D numpy array that represents G/Gmax and damping curves of each
layer, in the following format:
+------------+--------+------------+-------------+-------------+--------+-----+
| strain [%] | G/Gmax | strain [%] | damping [%] | strain [%] | G/Gmax | ... |
+============+========+============+=============+=============+========+=====+
| ... | ... | ... | ... | ... | ... | ... |
+------------+--------+------------+-------------+-------------+--------+-----+
The damping information is neglected in this function, so users can
supply some dummy values.
Tmax : numpy.ndarray or ``None``
Shear strength of each layer of soil. If ``None``, it will be
calculated using a combination of Ladd (1991) and Mohr-Coulomb criteria.
show_fig : bool
Whether or not to show figures G/Gmax and stress-strain curves of MKZ,
FKZ, and HH for each layer.
save_fig : bool
Whether or not to save the figures to the hard drive. Only effective
if ``show_fig`` is set to ``True``.
fig_output_dir : str
The output directory for the figures. Only effective if ``show_fig``
and ``save_fig`` are both ``True``.
save_HH_G_file : bool
Whether or not to save the HH parameters to the hard drive (as a
"HH_G" file).
HH_G_file_dir : str
The output directory for the "HH_G" file. Only effective if
``save_HH_G_file`` is ``True``.
profile_name : str or ``None``
The name of the Vs profile, such as "CE.12345". If ``None``, a string
of current date and time will be used as the profile name.
verbose : bool
Whether or not to print progresses on the console.
Returns
-------
HH_G_param : numpy.ndarray
The HH parameters of each layer. It's a 2D array of shape
``(9, n_layer)``. For each layer (i.e., column), the values are in
this order:
gamma_t, a, gamma_ref, beta, s, Gmax, mu, Tmax, d
'''
phi = 30.0
hlp.check_Vs_profile_format(vs_profile)
h = vs_profile[:-1, 0]
Vs = vs_profile[:-1, 1] # exclude the last layer (i.e., half space)
n_layer = len(Vs)
if vs_profile.shape[1] == 5: # there can only be 5 or 2 columns
mat = vs_profile[:-1, -1]
rho = vs_profile[:-1, 3]
else: # only 2 columns
mat = np.arange(1, n_layer+1)
rho = _calc_rho(h, Vs)
if Tmax is not None:
hlp.assert_array_length(Tmax, n_layer)
Gmax = _calc_Gmax(Vs, rho)
sigma_v0 = _calc_vertical_stress(h, rho)
OCR = _calc_OCR(Vs, rho, sigma_v0)
K0 = _calc_K0(OCR, phi=phi)
if Tmax is None:
Tmax = _calc_shear_strength(Vs, OCR, sigma_v0, K0=K0, phi=phi)
curves_old = curves.copy()
curves_expanded = None
for j in range(n_layer):
tmp = curves_old[:, int(mat[j]) * 4 - 4: int(mat[j]) * 4]
if curves_expanded is None:
curves_expanded = tmp
else:
curves_expanded = np.column_stack((curves_expanded, tmp))
curves = curves_expanded
HH_G_param = produce_HH_G_param(Vs, Gmax, Tmax, OCR, sigma_v0, K0,
curves=curves, PI=None, phi=None,
show_fig=show_fig, save_fig=save_fig,
fig_output_dir=fig_output_dir,
verbose=verbose)
if save_HH_G_file:
if HH_G_file_dir is None:
raise ValueError('Please specify `HH_G_file_dir`.')
if profile_name is None:
profile_name = hlp.get_current_time(for_filename=True)
np.savetxt(os.path.join(HH_G_file_dir, 'HH_G_%s.txt' % profile_name),
HH_G_param, delimiter='\t', fmt='%.6g')
return HH_G_param
#%%----------------------------------------------------------------------------
def produce_HH_G_param(Vs, Gmax, Tmax, OCR, sigma_v0, K0, curves=None,
PI=None, phi=None, show_fig=False, save_fig=False,
fig_output_dir=None, verbose=True):
'''
Produce HH_G parameters from profiles of Vs, Tmax, OCR, etc.
Parameters
----------
Vs : numpy.ndarray
Vs values of each layer. Shape: ``(n_layer, )``, where ``n_layer`` is
the length of ``Vs``. Unit: m/s.
Gmax : numpy.ndarray
Initial stiffness of each layer. Shape: ``(n_layer, )``. Unit: Pa.
Tmax : numpy.ndarray
The shear strength of each layer. Shape: ``(n_layer, )``. Unit: Pa.
OCR : numpy.ndarray
Over-consolidation ratio of each layer. Shape: ``(n_layer, )``.
sigma_v0 : numpy.ndarray
Vertical effective confining stress of each layer. Shape:
``(n_layer, )``. Unit: Pa.
K0 : numpy.ndarray or float
Lateral soil pressure coefficient. If an array, it must have shape
``(n_layer, )``. If a single value, it means that all layers share
this same value.
curves : numpy.ndarray or ``None``
A 2D numpy array that represents G/Gmax and damping curves of each
layer, in the following format:
+------------+--------+------------+-------------+-------------+--------+-----+
| strain [%] | G/Gmax | strain [%] | damping [%] | strain [%] | G/Gmax | ... |
+============+========+============+=============+=============+========+=====+
| ... | ... | ... | ... | ... | ... | ... |
+------------+--------+------------+-------------+-------------+--------+-----+
The damping information is neglected in this function, so users can
supply some dummy values. If ``None``, it means that the users do not
have G/Gmax curve information, so this function will calculate the
MKZ G/Gmax curves from the empirical formulas in Darendeli (2001).
PI : float or numpy.ndarray or ``None``
Plasticity index of the soils. It is not necessary (can be ``None``) if
``curves`` is provided (i.e., not ``None``). If an array, it must have
shape ``(n_layer, )``. If a single value, it means that all layers
share this same value.
phi : float or numpy.ndarray or ``None``
Effective internal frictional angle (in degrees). It is not necessary
(can be ``None``) if ``curve`` is provided (i.e., not ``None``). If
an array, it must have shape ``(n_layer, )``. If a single value, it
means that all layers share this same value.
show_fig : bool
Whether or not to show figures G/Gmax and stress-strain curves of MKZ,
FKZ, and HH for each layer.
save_fig : bool
Whether or not to save the figures to the hard drive. Only effective
if ``show_fig`` is set to ``True``.
fig_output_dir : str
The output directory for the figures. Only effective if ``show_fig``
and ``save_fig`` are both ``True``.
verbose : bool
Whether or not to print progresses on the console.
Returns
-------
parameters : numpy.ndarray
The HH parameters of each layer. It's a 2D array of shape
``(9, n_layer)``. For each layer (i.e., column), the values are in
this order:
gamma_t, a, gamma_ref, beta, s, Gmax, mu, Tmax, d
Notes
-----
This function is based on ``hybridParaKernel_FKZ.m``.
'''
hlp.assert_1D_numpy_array(Vs, '`Vs`')
n_layer = len(Vs)
hlp.assert_array_length(Gmax, n_layer, name='`Gmax`')
hlp.assert_array_length(Tmax, n_layer, name='`Tmax`')
hlp.assert_array_length(OCR, n_layer, name='`OCR`')
hlp.assert_array_length(sigma_v0, n_layer, name='`sigma_v0`')
K0 = hlp.check_length_or_extend_to_array(K0, n_layer, name='`K0`')
p0 = _calc_mean_confining_stress(sigma_v0, K0)
if verbose:
print('========== Start optimizing for HH_G parameters ===========')
# ============= MKZ fit ===================================================
if curves is None: # user does not provide curves
if verbose:
print('------ G/Gmax not provided; will generate MKZ curves using '
'Darendeli (2001): ------')
strain_ = np.geomspace(1e-4, 10, 400) # unit: percent
GGmax, _, gamma_ref = produce_Darendeli_curves(sigma_v0, PI, OCR=OCR,
K0=K0, phi=phi,
strain_in_pct=strain_)
strain = np.tile(strain_, (n_layer, 1)).T # strain matrix for all layers
beta = np.ones(n_layer)
s = 0.9190 * np.ones(n_layer)
else: # user provides own curves
if verbose:
print('------ G/Gmax provided; fitting MKZ curves to data: ------')
hlp.assert_2D_numpy_array(curves)
assert(curves.shape[1] == n_layer * 4)
# ----------- Extract G/Gmax information ------------------------------
strain = curves[:, 0::4] # unit: percent
GGmax = curves[:, 1::4]
# ----------- Fit MKZ parameters --------------------------------------
param_MKZ, _ = mkz.fit_MKZ(curves, show_fig=show_fig)
gamma_ref = param_MKZ[:, 0]
s = param_MKZ[:, 2]
beta = param_MKZ[:, 3]
# ----------- Show results on console ---------------------------------
if verbose:
print('****** MKZ parameters: ******')
for j in range(n_layer):
if verbose:
print('Layer %d: gamma_ref = %.3g, s = %.3g, beta = %.3g' \
% (j, gamma_ref[j], s[j], beta[j]))
# ========== Stress-strain curve implied by G/Gmax ========================
sigma = np.zeros_like(GGmax)
for j in range(n_layer):
sigma[0, j] = 0
for k in range(1, GGmax.shape[0]):
sigma[k, j] = GGmax[k, j] * Gmax[j] * strain[k, j] / 100.0
# END FOR
# END FOR
# ========== Estimate mu using empirical correlations =====================
p0 = p0 / 1000.0 # unit: Pa --> kPa
mu = np.zeros_like(OCR)
for j in range(n_layer):
if Vs[j] <= 760: # softer soil: use Vardanega & Bolton (2011) CGJ formula
mu[j] = 1.0 / (0.000872 * Gmax[j]/Tmax[j] * OCR[j]**0.47 * p0[j]**0.28)
if mu[j] <= 0.02: # mu too small --> too low tau_FKZ --> sharply decreasing tau_HH
mu[j] = mu[j] * 10.0 ** (0.236 * 3) # 0.236 is the standard error suggested in Vardanega & Bolton (2011)
elif mu[j] <= 0.03:
mu[j] = mu[j] * 10.0 ** (0.236 * 2)
elif mu[j] <= 0.04:
mu[j] = mu[j] * 10.0 ** (0.236 * 1)
# END IF
else: # stiffer soils: set mu to 1 for lack of better information
mu[j] = 1.0
# END IF
# END FOR
# ========== Start FKZ optimization =======================================
if verbose:
print('----------- FKZ optimization -----------------------------')
parameters = np.zeros((9, n_layer))
lw = 1.0
muted_blue = np.array([107, 174, 214]) / 255.
muted_green = np.array([120, 198, 121]) / 255.
muted_red = np.array([222, 45, 38]) / 255.
for j in range(n_layer):
strain_j = strain[:, j]
a, gamma_t, d = _optimization_kernel(strain_j / 100.0, gamma_ref[j],
beta[j], s[j], Gmax[j], Tmax[j],
mu[j])
if verbose:
print('%d/%d: mu = %.3f, a = %.1f, gamma_t = %.3f%%, d = %.3f' \
% (j + 1, n_layer, mu[j], a, gamma_t * 100, d))
T_FKZ = hh.tau_FKZ(strain_j / 100.0, Gmax=Gmax[j], mu=mu[j], d=d,
Tmax=Tmax[j])
T_HH = hh.tau_HH(strain_j / 100.0, gamma_t=gamma_t, a=a,
gamma_ref=gamma_ref[j], beta=beta[j], s=s[j],
Gmax=Gmax[j], mu=mu[j], Tmax=Tmax[j], d=d)
parameters[0, j] = gamma_t
parameters[1, j] = a
parameters[2, j] = gamma_ref[j]
parameters[3, j] = beta[j]
parameters[4, j] = s[j]
parameters[5, j] = Gmax[j]
parameters[6, j] = mu[j]
parameters[7, j] = Tmax[j]
parameters[8, j] = d
GGmax_HH = T_HH / (Gmax[j] * (strain_j / 100.0))
if show_fig:
fig = plt.figure(figsize=(4.2, 6.0))
plt.subplot(211)
if curves is None:
plt.semilogx(strain_j, sigma[:, j] / 1000., c=muted_blue,
lw=lw*2.5, label='MKZ') # Darendeli's curve
plt.semilogx(strain_j, T_FKZ / 1000., c=muted_green, lw=lw*1.75,
label='FKZ')
else:
plt.semilogx(strain_j, sigma[:, j] / 1000., c=muted_blue,
marker='o', ls='-', lw=lw*2.5,
label='Given $G/G_{\max}$')
plt.semilogx(strain_j, T_FKZ / 1000., c=muted_green, lw=lw*1.75,
label='FKZ')
plt.grid(ls=':', lw=0.5)
plt.plot([np.min(strain_j), np.max(strain_j)],
np.array([Tmax[j], Tmax[j]]) / 1000., lw=lw, c='gray',
ls='--')
plt.plot(strain_j, T_HH / 1000., c=muted_red, lw=lw, label='HH')
plt.plot([gamma_t * 100] * 2, plt.ylim(), ls='--', c='gray')
plt.ylabel('Stress [kPa]')
plt.xlim(np.min(strain_j), np.max(strain_j))
plt.legend(loc='upper left')
plt.title('$V_S$ = %.1f m/s, $G_{\max}$ = %.3f MPa,'
'\n$\\tau_{\mathrm{ff}}$ = %.3f kPa, '
'$\gamma_{\mathrm{ref}}$ = %.3f%%' \
% (Vs[j], Gmax[j]/1e6, Tmax[j]/1e3, gamma_ref[j]*100))
plt.subplot(212)
if curves is None:
plt.semilogx(strain_j, GGmax[:, j], c=muted_blue, lw=lw*2.5)
else:
plt.semilogx(strain_j, GGmax[:, j], c=muted_blue, ls='-',
marker='o', lw=lw*2.5)
plt.grid(ls=':', lw=0.5)
plt.plot(strain_j,
mu[j] / (1 + Gmax[j]/Tmax[j]*mu[j]*np.abs(strain_j/100.) ),
c=muted_green, lw=lw*1.75)
plt.plot(strain_j, GGmax_HH, c=muted_red, lw=lw)
plt.plot([gamma_t * 100] * 2, plt.ylim(), ls='--', c='gray')
plt.ylabel('$G/G_{\max}$')
plt.xlabel('Strain [%]')
plt.xlim(np.min(strain_j), np.max(strain_j))
plt.title("$\mu$ = %.3f, a = %.1f, $\gamma_{\mathrm{t}}$ = %.4f%%\n"
"d = %.4f, $p'_{\mathrm{m0}}$ = %.2f kPa" \
% (mu[j], a, gamma_t * 100, d, p0[j]))
fig.tight_layout(pad=0.5, h_pad=1.2, w_pad=0.3)
if save_fig:
if fig_output_dir is None:
raise ValueError('Please specify `fig_output_dir`.')
fig.savefig(os.path.join(fig_output_dir,
'Stress_GGmax_of_Layer_#%d.png' % (j+1)))
return parameters
#%%----------------------------------------------------------------------------
def _calc_shear_strength(Vs, OCR, sigma_v0, K0=None, phi=30.0):
'''
Calculate shear strength of soils.
Parameters
----------
Vs : numpy.ndarray
A 1D array of Vs values of each layer. Unit: m/s.
OCR : numpy.ndarray
A 1D array of OCR (over-consolidation ratio) of each layer. Unit: 1.
sigma_v0 : numpy.ndarray
A 1D array of vertical overburden pressure. Unit: Pa.
K0 : float, int, numpy.ndarray, or ``None``
Lateral soil pressure coefficient. If a single value is given, it is
assumed to be the value for all layers. If ``None``, it will be
determined from OCR via an empirical formula by Mayne & Kulhawy (1982).
phi : float, int, or numpy.ndarray
Effective internal friction angle of soils (in degrees).
Returns
-------
Tmax : numpy.ndarray
Shear strength of soils of each layer. Unit: Pa.
'''
dyna_coeff = 1.2 # assume a strain rate of 0.01 sec^(-1), from Vardanega & Bolton (2013)
phi = hlp.check_length_or_extend_to_array(phi, len(Vs), name='`phi`')
if K0 is None:
K0 = _calc_K0(OCR, phi=phi)
else:
K0 = hlp.check_length_or_extend_to_array(K0, len(Vs), name='`K0`')
Tmax = np.zeros(len(Vs))
for j in range(len(Vs)):
if Vs[j] <= 760: # for softer soils, calculate undrained shear strength
Tmax[j] = dyna_coeff * 0.28 * OCR[j]**0.8 * sigma_v0[j] # Ladd (1991)
else: # stiffer soils: Mohr-Coulomb criterion
sigma_h0 = K0[j] * sigma_v0[j] # horizontal stress
sigma_1 = np.max([sigma_v0[j], sigma_h0]) # largest principal stress
sigma_3 = np.min([sigma_v0[j], sigma_h0]) # smallest principal stress
# normal effective stress on the slip plane
sigma_n = (sigma_1 + sigma_3)/2.0 \
- (sigma_1 - sigma_3)/2.0 * np.sin(np.deg2rad(phi[j]))
Tmax[j] = dyna_coeff * sigma_n * np.tan(np.deg2rad(phi[j]))
return Tmax
#%%----------------------------------------------------------------------------
def _calc_Gmax(Vs, rho):
'''
Calculate initial stiffness of each soil layer.
Parameters
----------
Vs : numpy.ndarray
1D array of Vs of layers. Unit: m/s.
rho : numpy.ndarray
1D array of mass density of layers. Unit: kg/m^3.
Returns
-------
Gmax : numpy.ndarray
1D array of initial stiffness. Unit: Pa
'''
Gmax = rho * Vs**2
return Gmax
#%%----------------------------------------------------------------------------
def _calc_OCR(Vs, rho, sigma_v0, OCR_upper_limit=None):
'''
Calculate OCR (over-consolidation ratio) of each layer from the Vs profile.
Parameters
----------
Vs : numpy.ndarray
1D array of Vs of layers. Unit: m/s.
rho : numpy.ndarray
1D array of mass density of layers. Unit: kg/m^3.
sigma_v0 : numpy.ndarray
Vertical overburden stress at the mid-point of each layer. Unit: Pa.
OCR_upper_limit : float or ``None``
The maximum acceptable OCR value. If ``None``, there is no limit.
Returns
-------
OCR : numpy.ndarray
1D array of OCR value, for each soil layer. (Unitless.)
'''
sigma_p0 = 0.106 * Vs**1.47 # Mayne, <NAME> (1998) "Clay stress history evaluated fromseismic piezocone tests"
sigma_p0 = sigma_p0 * 1000 # kPa --> Pa
OCR = sigma_p0 / sigma_v0
OCR = np.minimum(OCR, np.inf if OCR_upper_limit is None else OCR_upper_limit)
return OCR
#%%----------------------------------------------------------------------------
def _calc_vertical_stress(h, rho):
'''
Calculate vertical overburden stress at the mid-point of each layer.
Parameters
----------
h : numpy.ndarray
1D array of layer thickness. Unit: m.
rho : numpy.ndarray
1D array of mass density of each layer. Unit: kg/m^3.
Returns
-------
stress : numpy.ndarray
Vertical overburden stress at the mid-point of each layer. Unit: Pa.
'''
g = 9.81 # unit: m/s/s
n = len(h)
stress = np.zeros_like(h)
if np.mean(rho) < 1000:
print('Warning in __calc_vertical_stress(): It looks like the unit '
'of mass density is g/cm^3. The correct unit should be kg/m^3.')
if h[-1] == 0: # zero thickness, i.e., half space
h[-1] = 1
stress[0] = rho[0] * g * h[0]/2 # divided by 2: middle of layer
for i in range(1, n):
stress[i] = stress[i-1] + rho[i-1] * g * h[i-1]/2 + rho[i] * g * h[i]/2
return stress
#%%----------------------------------------------------------------------------
def _calc_rho(h, Vs):
'''
Calculate mass density of soils from Vs values, using the empirical formula
by Mayne, Schneider & Martin (1999) and Burns & Mayne (1996).
Parameters
----------
h : numpy.ndarray
The thickness of each soil layer. Unit: m.
Vs : numpy.ndarray
The shear-wave velocity for each layer. It needs to be a 1D numpy array.
Unit: m/s.
Returns
-------
rho : numpy.ndarray
Mass density of each soil layer. Unit: kg/m^3.
References
----------
1. Mayne, Schneider & Martin (1999) "Small- and large-strain soil
properties from seismic flat dilatometer tests." Pre-failure
deformation characteristics of geomaterials, 1999 Balkema, Rotterdam.
2. Burns & Mayne (1996) "Small- and high-strain soil properties using the
seismic piezocone." Transportation Research Record 1548, National
Acad. Press, Washington DC, 81-88.
'''
z = sr.thk2dep(h, midpoint=False)
z[z == 0] = 0.0001 # avoid error of dividing by zero
lb = 1.65 # lower bound of density: 1.65 g/cm^3
rho = np.maximum(lb, 1 + 1. / (0.614 + 58.7 * (np.log(z) + 1.095) / Vs))
rho *= 1000 # unit: g/cm^3 --> kg/m^3
return rho
#%%----------------------------------------------------------------------------
def _calc_PI(Vs):
'''
Calculate PI (plasticity index) from Vs values.
Parameters
----------
Vs : numpy.ndarray
The shear-wave velocity for each layer. It needs to be a 1D numpy array.
Unit: m/s.
Returns
-------
PI : numpy.ndarray
The plasticity index for each layer. Unit: %.
'''
PI = np.zeros_like(Vs)
for j in range(len(Vs)):
if Vs[j] <= 200:
PI[j] = 10
elif Vs[j] <= 360:
PI[j] = 5
else:
PI[j] = 0
return PI
#%%----------------------------------------------------------------------------
def _calc_K0(OCR, phi=30.):
'''
Calculate K0 (lateral earth pressure coefficient at rest) from OCR using
the empirical formula by Mayne & Kulhawy (1982).
Parameters
----------
OCR : float, int, or numpy.ndarray
Over-consolidation ratio of each layer of soils. If it is a float/int,
it means only one layer, or all the layers have the same OCR.
phi : float, int, or numpy.ndarray
Internal effective friction angle of soils. If it is a float/int, it
means only one soil layer, or all the layers have the same angle.
Unit: deg.
Returns
-------
K0 : float or numpy.ndarray
K0 value(s). If either ``OCR`` or ``phi`` is an array, ``K0`` will be
an array of the same length.
'''
K0 = (1 - np.sin(np.deg2rad(phi))) * OCR ** (np.sin(np.deg2rad(phi)))
return K0
#%%----------------------------------------------------------------------------
def produce_Darendeli_curves(sigma_v0, PI=20., OCR=1., K0=0.5, phi=30.0,
strain_in_pct=None):
'''
Produce G/Gmax and damping curves using empirical correlations by
Darendeli (2001).
Parameters
----------
sigma_v0 : numpy.ndarray
Effective vertical confining stress of each layer. Unit: Pa.
PI : int, float, or numpy.ndarray
Plasticity index of each layer. Unit: %. If a single value is given,
it is assumed to be the PI for all layers.
OCR : int, float, or numpy.ndarray
Over-consolidation ratio of each layer. If a single value is given,
it is assumed to be the value for all layers.
K0 : int, float, numpy.ndarray, or ``None``
Lateral soil pressure coefficient. If a single value is given, it is
assumed to be the value for all layers. If ``None``, it will be
determined from OCR via an empirical formula by Mayne & Kulhawy (1982).
phi : int, float, or numpy.ndarray
Internal effective friction angle of soils. If it is a float/int, it
means all the layers have the same angle. Unit: deg.
strain_in_pct : numpy.ndarray or ``None``
The strain values at which to calculate G/Gmax and damping values. If
``None``, numpy.geomspace(1e-4, 10, 400) will be used. Unit: percent.
Returns
-------
GGmax : numpy.ndarray
G/Gmax curves for each layer. It is a 2D numpy array. Each column of it
represents the G/Gmax curve of a particular layer. Unit: 1
xi : numpy.ndarray
Damping curves for each layer. Same shape as ``GGmax``. Unit: 1.
gamma_r : numpy.ndarray
Reference strain for each layer. It is a 1D numpy array, corresponding
to each soil layer. Unit: 1.
'''
hlp.assert_1D_numpy_array(sigma_v0)
n_layer = len(sigma_v0)
phi = hlp.check_length_or_extend_to_array(phi, n_layer, name='`phi`')
PI = hlp.check_length_or_extend_to_array(PI, n_layer, name='`PI`')
OCR = hlp.check_length_or_extend_to_array(OCR, n_layer, name='`OCR`')
if K0 is None:
K0 = _calc_K0(OCR, phi=phi)
else:
K0 = hlp.check_length_or_extend_to_array(K0, n_layer, name='`K0`')
if strain_in_pct is None:
gamma = np.geomspace(1e-4, 10, 400)
else:
gamma = strain_in_pct.copy()
# Define all constants
nr_cycle = 10
frq = 1
N = nr_cycle
phi1 = 0.0352
phi2 = 0.0010
phi3 = 0.3246
phi4 = 0.3483
phi5 = 0.9190
phi6 = 0.8005
phi7 = 0.0129
phi8 = -0.1069
phi9 = -0.2889
phi10 = 0.2919
phi11 = 0.6329
phi12 = -0.0057
a = phi5
c1 = -1.1143*a**2 + 1.8618*a + 0.2523 # from Darendeli (2001), page 226
c2 = 0.0805*a**2 - 0.0710*a - 0.0095
c3 = -0.0005*a**2 + 0.0002*a + 0.0003
b = phi11 + phi12 * np.log(N) # Darendeli (2001) Eq 9.1d
# Confinine stress
sigma_0 = _calc_mean_confining_stress(sigma_v0, K0) # octahedral stress
sigma_0 = sigma_0 / 101325.0 # unit: Pa --> atm
n_strain_pts = len(strain_in_pct)
# Reference strain for each layer (Eq 9.1a). Unit: percent
gamma_r = (phi1 + phi2 * PI * OCR**phi3) * sigma_0**phi4
GGmax = np.zeros((n_strain_pts, n_layer))
xi = np.zeros_like(GGmax)
for i in range(n_layer):
GGmax[:, i] = 1. / (1 + (gamma / gamma_r[i])**a) # G of i-th layer (Eq 9.2a)
D_masing_1 = (100. / np.pi) \
* (4 * (gamma - gamma_r[i] * np.log((gamma + gamma_r[i]) / gamma_r[i])) \
/ (gamma**2 / (gamma + gamma_r[i])) - 2) # Unit: percent (page 226)
D_masing = c1 * D_masing_1 + c2 * D_masing_1**2 + c3 * D_masing_1**3 # Unit: percent (page 226)
D_min = (phi6 + phi7 * PI[i] * OCR[i]**phi8) * sigma_0[i]**phi9 * (1 + phi10 * np.log(frq)) # Eq 9.1c (page 221)
xi[:, i] = b * GGmax[:, i]**0.1 * D_masing + D_min # Eq 9.2b (page 224). Unit: percent
xi /= 100.0
gamma_r /= 100.0
return GGmax, xi, gamma_r
#%%----------------------------------------------------------------------------
def _calc_mean_confining_stress(sigma_v0, K0):
'''
Calculate mean (of three directions) confining stress.
Parameters
----------
sigma_v0 : numpy.ndarray
(Effective) vertical stress of each layer. Unit: Pa.
K0 : numpy.ndarray
Lateral stress coefficient of each layer. Unit: 1.
Returns
-------
sigma_m0 : numpy.ndarray
Mean effective confining stress (of three directions). Unit: Pa.
'''
sigma_m0 = (2 * K0 + 1)/3.0 * sigma_v0
return sigma_m0
#%%----------------------------------------------------------------------------
def _optimization_kernel(x, x_ref, beta, s, Gmax, tau_f, mu):
'''
Optimization process to find FKZ parameters.
Parameters
----------
x : numpy.ndarray
An 1D array of shear strain. Unit: 1.
x_ref : float
The "reference strain" parameter (in MKZ) of the soil. Unit: 1.
beta : float
A shape parameter of the FKZ model.
s : float
A shape parameter of the FKZ model.
Gmax : float
Initial shear modulus. Unit: Pa.
tau_f : float
The shear strength of the current soil layer. Unit: Pa.
mu : float
The "shape parameter" of the FKZ model.
Returns
-------
a : float
A parameter of the HH model that defines the "speed" of transition from
MKZ to FKZ
gamma_t : float
The shear strain at which the transition from MKZ to FKZ happens.
Unit: 1
d : float
The "shape power" parameter in the FKZ model.
Notes
-----
Based on optHybFKZ.m
'''
T_MKZ = mkz.tau_MKZ(x, gamma_ref=x_ref, beta=beta, s=s, Gmax=Gmax)
if mu <= 0.03: # when mu is too small, there may be some numerical issues
gamma_t_LB = 0.001 # therefore gamma_t lower bound is relaxed
else:
gamma_t_LB = 0.01
gamma_t_UB = 3.0 # unit: percent
index_gamma_t_LB, _ = hlp.find_closest_index(x, gamma_t_LB / 100.0)
if T_MKZ[index_gamma_t_LB] >= 0.85 * tau_f:
gamma_t_LB = 0.005 # for very deep layers, tau_MKZ may be larger than tau_f at gamma_t_LB
index_gamma_t_LB, _ = hlp.find_closest_index(x, gamma_t_LB / 100.0) # do it again
if T_MKZ[index_gamma_t_LB] >= 0.85 * tau_f:
gamma_t_LB = 0.001
range_d = np.linspace(0.67, 1.39, 200)
area = __calc_area(range_d, x, Gmax, mu, tau_f, gamma_t_LB, gamma_t_UB, T_MKZ)
if np.min(area) < np.inf: # it means that a proper d value is found
gamma_t, d = __find_x_t_and_d(area, range_d, x, Gmax, mu, tau_f, T_MKZ)
else: # cannot find a proper d value
range_d = np.linspace(0.67, 1.39, 400) # increase grid density to 400
area = __calc_area(range_d, x, Gmax, mu, tau_f, gamma_t_LB,
gamma_t_UB, T_MKZ)
if np.min(area) < np.inf:
gamma_t, d = __find_x_t_and_d(area, range_d, x, Gmax, mu, tau_f, T_MKZ)
else:
range_d = np.linspace(0.67, 1.39, 1000) # increase grid density
new_gamma_t_LB = 0.005 # further relax
area = __calc_area(range_d, x, Gmax, mu, tau_f, new_gamma_t_LB,
gamma_t_UB, T_MKZ)
if np.min(area) < np.inf:
gamma_t, d = __find_x_t_and_d(area, range_d, x, Gmax, mu, tau_f, T_MKZ)
else:
d = 1.03
gamma_t = 1e-3 / 100.0 # further ralax to 0.001%
# END IF
# END IF
#END IF
a = 100.0 # always use a fast transition
return a, gamma_t, d
#%%----------------------------------------------------------------------------
def __find_x_t_and_d(area, range_d, x, Gmax, mu, tau_f, T_MKZ):
'''
Find the ``x_t`` (transition strain) that minimizes the "area" between
the MKZ stress curve and the FKZ stress curve.
Parameters
----------
area : numpy.ndarray
The "area" between the MKZ stress curve and the FKZ stress curve. It
has the same shape as ``range_d``, because each element of ``area`` is
the area corresponding to a ``d`` value within ``range_d``.
range_d : numpy.ndarray
The range of ``d`` to search from. Must be a 1D numpy array.
x : numpy.ndarray
An 1D array of shear strain. Unit: 1.
Gmax : float
Initial shear modulus. Unit: Pa.
mu : float
The "shape parameter" of the FKZ model.
tau_f : float
The shear strength of the current soil layer. Unit: Pa.
T_MKZ : numpy.ndarray
The MKZ stress curve, which has the same shape as ``x``. Unit: Pa.
Returns
-------
x_t : float
The ``x_t`` value that minimizes the "area" between the MKZ stress
curve and the FKZ stress curve. Unit: 1.
d : float
The ``d`` value that minimizes the "area" between the MKZ stress curve
and the FKZ stress curve. (No unit.)
'''
j_ = np.argmin(area)
d = range_d[j_]
T_FKZ = hh.tau_FKZ(x, Gmax=Gmax, mu=mu, d=d, Tmax=tau_f)
copt, _ = hlp.find_closest_index(np.abs(T_MKZ - T_FKZ), 0)
x_t = x[copt]
return x_t, d
#%%----------------------------------------------------------------------------
def __calc_area(range_d, x, Gmax, mu, tau_f, gamma_t_LB, gamma_t_UB, T_MKZ):
'''
Calculate the "area" between the MKZ stress curve and the FKZ stress curve.
The MKZ stress curve is supplied as a parameter, and the FKZ stress curve
is determined by ``x``, ``Gmax``, ``mu``, ``d``, ``tau_f``, and ``gamma_t``.
Parameters
----------
range_d : numpy.ndarray
The range of ``d`` to search from. Must be a 1D numpy array.
x : numpy.ndarray
An 1D array of shear strain. Unit: 1.
Gmax : float
Initial shear modulus. Unit: Pa.
mu : float
The "shape parameter" of the FKZ model.
tau_f : float
The shear strength of the current soil layer. Unit: Pa.
gamma_t_LB : float
The lower bound of ``gamma_t`` (:math:`\gamma_t`), i.e., the transition
strain. Unit: %.
gamma_t_UB : float
The upper bound of ``gamma_t``. Unit: %.
T_MKZ : numpy.ndarray
The MKZ stress curve, which has the same shape as ``x``. Unit: Pa.
Returns
-------
area : numpy.ndarray
The "area" between the MKZ stress curve and the FKZ stress curve. It
has the same shape as ``range_d``, because each element of ``area`` is
the area corresponding to a ``d`` value within ``range_d``.
'''
area = np.zeros_like(range_d)
for j in range(len(range_d)):
d = range_d[j]
T_FKZ = hh.tau_FKZ(x, Gmax=Gmax, mu=mu, d=d, Tmax=tau_f)
range_gamma_t = np.geomspace(gamma_t_LB, gamma_t_UB, 200) / 100.0 # unit: 1
copt, _ = hlp.find_closest_index(np.abs(T_MKZ - T_FKZ), 0) # "copt" = cross-over point
gamma_t = x[copt]
if (gamma_t >= range_gamma_t[0]) and (gamma_t <= range_gamma_t[-1]):
diff_T = np.abs(T_MKZ[:copt+1] - T_FKZ[:copt+1])
area[j] = np.linalg.norm(diff_T) / (copt + 1.0)
else:
area[j] = np.inf
# END IF
# END FOR
return area
| [
"matplotlib.pyplot.grid",
"matplotlib.pyplot.ylabel",
"numpy.log",
"numpy.column_stack",
"numpy.array",
"numpy.linalg.norm",
"numpy.arange",
"numpy.mean",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.zeros_like",
"numpy.max",
"numpy.linspace",
"numpy.min",
"numpy.argmin",... | [((15961, 15981), 'numpy.zeros_like', 'np.zeros_like', (['GGmax'], {}), '(GGmax)\n', (15974, 15981), True, 'import numpy as np\n'), ((16313, 16331), 'numpy.zeros_like', 'np.zeros_like', (['OCR'], {}), '(OCR)\n', (16326, 16331), True, 'import numpy as np\n'), ((17257, 17279), 'numpy.zeros', 'np.zeros', (['(9, n_layer)'], {}), '((9, n_layer))\n', (17265, 17279), True, 'import numpy as np\n'), ((24879, 24950), 'numpy.minimum', 'np.minimum', (['OCR', '(np.inf if OCR_upper_limit is None else OCR_upper_limit)'], {}), '(OCR, np.inf if OCR_upper_limit is None else OCR_upper_limit)\n', (24889, 24950), True, 'import numpy as np\n'), ((25541, 25557), 'numpy.zeros_like', 'np.zeros_like', (['h'], {}), '(h)\n', (25554, 25557), True, 'import numpy as np\n'), ((27763, 27780), 'numpy.zeros_like', 'np.zeros_like', (['Vs'], {}), '(Vs)\n', (27776, 27780), True, 'import numpy as np\n'), ((32186, 32219), 'numpy.zeros', 'np.zeros', (['(n_strain_pts, n_layer)'], {}), '((n_strain_pts, n_layer))\n', (32194, 32219), True, 'import numpy as np\n'), ((32229, 32249), 'numpy.zeros_like', 'np.zeros_like', (['GGmax'], {}), '(GGmax)\n', (32242, 32249), True, 'import numpy as np\n'), ((35336, 35364), 'numpy.linspace', 'np.linspace', (['(0.67)', '(1.39)', '(200)'], {}), '(0.67, 1.39, 200)\n', (35347, 35364), True, 'import numpy as np\n'), ((37928, 37943), 'numpy.argmin', 'np.argmin', (['area'], {}), '(area)\n', (37937, 37943), True, 'import numpy as np\n'), ((39523, 39545), 'numpy.zeros_like', 'np.zeros_like', (['range_d'], {}), '(range_d)\n', (39536, 39545), True, 'import numpy as np\n'), ((8656, 8681), 'numpy.arange', 'np.arange', (['(1)', '(n_layer + 1)'], {}), '(1, n_layer + 1)\n', (8665, 8681), True, 'import numpy as np\n'), ((14473, 14502), 'numpy.geomspace', 'np.geomspace', (['(0.0001)', '(10)', '(400)'], {}), '(0.0001, 10, 400)\n', (14485, 14502), True, 'import numpy as np\n'), ((14842, 14858), 'numpy.ones', 'np.ones', (['n_layer'], {}), '(n_layer)\n', (14849, 14858), True, 'import numpy as np\n'), ((17311, 17336), 'numpy.array', 'np.array', (['[107, 174, 214]'], {}), '([107, 174, 214])\n', (17319, 17336), True, 'import numpy as np\n'), ((17362, 17387), 'numpy.array', 'np.array', (['[120, 198, 121]'], {}), '([120, 198, 121])\n', (17370, 17387), True, 'import numpy as np\n'), ((17411, 17434), 'numpy.array', 'np.array', (['[222, 45, 38]'], {}), '([222, 45, 38])\n', (17419, 17434), True, 'import numpy as np\n'), ((25566, 25578), 'numpy.mean', 'np.mean', (['rho'], {}), '(rho)\n', (25573, 25578), True, 'import numpy as np\n'), ((31247, 31276), 'numpy.geomspace', 'np.geomspace', (['(0.0001)', '(10)', '(400)'], {}), '(0.0001, 10, 400)\n', (31259, 31276), True, 'import numpy as np\n'), ((35455, 35467), 'numpy.min', 'np.min', (['area'], {}), '(area)\n', (35461, 35467), True, 'import numpy as np\n'), ((35661, 35689), 'numpy.linspace', 'np.linspace', (['(0.67)', '(1.39)', '(400)'], {}), '(0.67, 1.39, 400)\n', (35672, 35689), True, 'import numpy as np\n'), ((38062, 38083), 'numpy.abs', 'np.abs', (['(T_MKZ - T_FKZ)'], {}), '(T_MKZ - T_FKZ)\n', (38068, 38083), True, 'import numpy as np\n'), ((5079, 5136), 'os.path.join', 'os.path.join', (['HH_G_file_dir', "('HH_G_%s.txt' % profile_name)"], {}), "(HH_G_file_dir, 'HH_G_%s.txt' % profile_name)\n", (5091, 5136), False, 'import os\n'), ((9293, 9332), 'numpy.column_stack', 'np.column_stack', (['(curves_expanded, tmp)'], {}), '((curves_expanded, tmp))\n', (9308, 9332), True, 'import numpy as np\n'), ((9939, 9996), 'os.path.join', 'os.path.join', (['HH_G_file_dir', "('HH_G_%s.txt' % profile_name)"], {}), "(HH_G_file_dir, 'HH_G_%s.txt' % profile_name)\n", (9951, 9996), False, 'import os\n'), ((14762, 14792), 'numpy.tile', 'np.tile', (['strain_', '(n_layer, 1)'], {}), '(strain_, (n_layer, 1))\n', (14769, 14792), True, 'import numpy as np\n'), ((14880, 14896), 'numpy.ones', 'np.ones', (['n_layer'], {}), '(n_layer)\n', (14887, 14896), True, 'import numpy as np\n'), ((18595, 18625), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(4.2, 6.0)'}), '(figsize=(4.2, 6.0))\n', (18605, 18625), True, 'import matplotlib.pyplot as plt\n'), ((18639, 18655), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(211)'], {}), '(211)\n', (18650, 18655), True, 'import matplotlib.pyplot as plt\n'), ((19302, 19326), 'matplotlib.pyplot.grid', 'plt.grid', ([], {'ls': '""":"""', 'lw': '(0.5)'}), "(ls=':', lw=0.5)\n", (19310, 19326), True, 'import matplotlib.pyplot as plt\n'), ((19504, 19569), 'matplotlib.pyplot.plot', 'plt.plot', (['strain_j', '(T_HH / 1000.0)'], {'c': 'muted_red', 'lw': 'lw', 'label': '"""HH"""'}), "(strain_j, T_HH / 1000.0, c=muted_red, lw=lw, label='HH')\n", (19512, 19569), True, 'import matplotlib.pyplot as plt\n'), ((19654, 19680), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Stress [kPa]"""'], {}), "('Stress [kPa]')\n", (19664, 19680), True, 'import matplotlib.pyplot as plt\n'), ((19750, 19778), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper left"""'}), "(loc='upper left')\n", (19760, 19778), True, 'import matplotlib.pyplot as plt\n'), ((19791, 19998), 'matplotlib.pyplot.title', 'plt.title', (['("""$V_S$ = %.1f m/s, $G_{\\\\max}$ = %.3f MPa,\n$\\\\tau_{\\\\mathrm{ff}}$ = %.3f kPa, $\\\\gamma_{\\\\mathrm{ref}}$ = %.3f%%"""\n % (Vs[j], Gmax[j] / 1000000.0, Tmax[j] / 1000.0, gamma_ref[j] * 100))'], {}), '(\n """$V_S$ = %.1f m/s, $G_{\\\\max}$ = %.3f MPa,\n$\\\\tau_{\\\\mathrm{ff}}$ = %.3f kPa, $\\\\gamma_{\\\\mathrm{ref}}$ = %.3f%%"""\n % (Vs[j], Gmax[j] / 1000000.0, Tmax[j] / 1000.0, gamma_ref[j] * 100))\n', (19800, 19998), True, 'import matplotlib.pyplot as plt\n'), ((20054, 20070), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(212)'], {}), '(212)\n', (20065, 20070), True, 'import matplotlib.pyplot as plt\n'), ((20335, 20359), 'matplotlib.pyplot.grid', 'plt.grid', ([], {'ls': '""":"""', 'lw': '(0.5)'}), "(ls=':', lw=0.5)\n", (20343, 20359), True, 'import matplotlib.pyplot as plt\n'), ((20532, 20580), 'matplotlib.pyplot.plot', 'plt.plot', (['strain_j', 'GGmax_HH'], {'c': 'muted_red', 'lw': 'lw'}), '(strain_j, GGmax_HH, c=muted_red, lw=lw)\n', (20540, 20580), True, 'import matplotlib.pyplot as plt\n'), ((20666, 20693), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$G/G_{\\\\max}$"""'], {}), "('$G/G_{\\\\max}$')\n", (20676, 20693), True, 'import matplotlib.pyplot as plt\n'), ((20705, 20729), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Strain [%]"""'], {}), "('Strain [%]')\n", (20715, 20729), True, 'import matplotlib.pyplot as plt\n'), ((20799, 20962), 'matplotlib.pyplot.title', 'plt.title', (['("""$\\\\mu$ = %.3f, a = %.1f, $\\\\gamma_{\\\\mathrm{t}}$ = %.4f%%\nd = %.4f, $p\'_{\\\\mathrm{m0}}$ = %.2f kPa"""\n % (mu[j], a, gamma_t * 100, d, p0[j]))'], {}), '(\n """$\\\\mu$ = %.3f, a = %.1f, $\\\\gamma_{\\\\mathrm{t}}$ = %.4f%%\nd = %.4f, $p\'_{\\\\mathrm{m0}}$ = %.2f kPa"""\n % (mu[j], a, gamma_t * 100, d, p0[j]))\n', (20808, 20962), True, 'import matplotlib.pyplot as plt\n'), ((23026, 23057), 'numpy.max', 'np.max', (['[sigma_v0[j], sigma_h0]'], {}), '([sigma_v0[j], sigma_h0])\n', (23032, 23057), True, 'import numpy as np\n'), ((23108, 23139), 'numpy.min', 'np.min', (['[sigma_v0[j], sigma_h0]'], {}), '([sigma_v0[j], sigma_h0])\n', (23114, 23139), True, 'import numpy as np\n'), ((31818, 31827), 'numpy.log', 'np.log', (['N'], {}), '(N)\n', (31824, 31827), True, 'import numpy as np\n'), ((35847, 35859), 'numpy.min', 'np.min', (['area'], {}), '(area)\n', (35853, 35859), True, 'import numpy as np\n'), ((35990, 36019), 'numpy.linspace', 'np.linspace', (['(0.67)', '(1.39)', '(1000)'], {}), '(0.67, 1.39, 1000)\n', (36001, 36019), True, 'import numpy as np\n'), ((39692, 39733), 'numpy.geomspace', 'np.geomspace', (['gamma_t_LB', 'gamma_t_UB', '(200)'], {}), '(gamma_t_LB, gamma_t_UB, 200)\n', (39704, 39733), True, 'import numpy as np\n'), ((39795, 39816), 'numpy.abs', 'np.abs', (['(T_MKZ - T_FKZ)'], {}), '(T_MKZ - T_FKZ)\n', (39801, 39816), True, 'import numpy as np\n'), ((39974, 40017), 'numpy.abs', 'np.abs', (['(T_MKZ[:copt + 1] - T_FKZ[:copt + 1])'], {}), '(T_MKZ[:copt + 1] - T_FKZ[:copt + 1])\n', (39980, 40017), True, 'import numpy as np\n'), ((18703, 18791), 'matplotlib.pyplot.semilogx', 'plt.semilogx', (['strain_j', '(sigma[:, j] / 1000.0)'], {'c': 'muted_blue', 'lw': '(lw * 2.5)', 'label': '"""MKZ"""'}), "(strain_j, sigma[:, j] / 1000.0, c=muted_blue, lw=lw * 2.5,\n label='MKZ')\n", (18715, 18791), True, 'import matplotlib.pyplot as plt\n'), ((18851, 18936), 'matplotlib.pyplot.semilogx', 'plt.semilogx', (['strain_j', '(T_FKZ / 1000.0)'], {'c': 'muted_green', 'lw': '(lw * 1.75)', 'label': '"""FKZ"""'}), "(strain_j, T_FKZ / 1000.0, c=muted_green, lw=lw * 1.75, label='FKZ'\n )\n", (18863, 18936), True, 'import matplotlib.pyplot as plt\n'), ((18992, 19117), 'matplotlib.pyplot.semilogx', 'plt.semilogx', (['strain_j', '(sigma[:, j] / 1000.0)'], {'c': 'muted_blue', 'marker': '"""o"""', 'ls': '"""-"""', 'lw': '(lw * 2.5)', 'label': '"""Given $G/G_{\\\\max}$"""'}), "(strain_j, sigma[:, j] / 1000.0, c=muted_blue, marker='o', ls=\n '-', lw=lw * 2.5, label='Given $G/G_{\\\\max}$')\n", (19004, 19117), True, 'import matplotlib.pyplot as plt\n'), ((19183, 19268), 'matplotlib.pyplot.semilogx', 'plt.semilogx', (['strain_j', '(T_FKZ / 1000.0)'], {'c': 'muted_green', 'lw': '(lw * 1.75)', 'label': '"""FKZ"""'}), "(strain_j, T_FKZ / 1000.0, c=muted_green, lw=lw * 1.75, label='FKZ'\n )\n", (19195, 19268), True, 'import matplotlib.pyplot as plt\n'), ((19611, 19621), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {}), '()\n', (19619, 19621), True, 'import matplotlib.pyplot as plt\n'), ((19702, 19718), 'numpy.min', 'np.min', (['strain_j'], {}), '(strain_j)\n', (19708, 19718), True, 'import numpy as np\n'), ((19720, 19736), 'numpy.max', 'np.max', (['strain_j'], {}), '(strain_j)\n', (19726, 19736), True, 'import numpy as np\n'), ((20118, 20180), 'matplotlib.pyplot.semilogx', 'plt.semilogx', (['strain_j', 'GGmax[:, j]'], {'c': 'muted_blue', 'lw': '(lw * 2.5)'}), '(strain_j, GGmax[:, j], c=muted_blue, lw=lw * 2.5)\n', (20130, 20180), True, 'import matplotlib.pyplot as plt\n'), ((20213, 20299), 'matplotlib.pyplot.semilogx', 'plt.semilogx', (['strain_j', 'GGmax[:, j]'], {'c': 'muted_blue', 'ls': '"""-"""', 'marker': '"""o"""', 'lw': '(lw * 2.5)'}), "(strain_j, GGmax[:, j], c=muted_blue, ls='-', marker='o', lw=lw *\n 2.5)\n", (20225, 20299), True, 'import matplotlib.pyplot as plt\n'), ((20623, 20633), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {}), '()\n', (20631, 20633), True, 'import matplotlib.pyplot as plt\n'), ((20751, 20767), 'numpy.min', 'np.min', (['strain_j'], {}), '(strain_j)\n', (20757, 20767), True, 'import numpy as np\n'), ((20769, 20785), 'numpy.max', 'np.max', (['strain_j'], {}), '(strain_j)\n', (20775, 20785), True, 'import numpy as np\n'), ((28834, 28849), 'numpy.deg2rad', 'np.deg2rad', (['phi'], {}), '(phi)\n', (28844, 28849), True, 'import numpy as np\n'), ((28869, 28884), 'numpy.deg2rad', 'np.deg2rad', (['phi'], {}), '(phi)\n', (28879, 28884), True, 'import numpy as np\n'), ((36238, 36250), 'numpy.min', 'np.min', (['area'], {}), '(area)\n', (36244, 36250), True, 'import numpy as np\n'), ((40036, 40058), 'numpy.linalg.norm', 'np.linalg.norm', (['diff_T'], {}), '(diff_T)\n', (40050, 40058), True, 'import numpy as np\n'), ((19349, 19365), 'numpy.min', 'np.min', (['strain_j'], {}), '(strain_j)\n', (19355, 19365), True, 'import numpy as np\n'), ((19367, 19383), 'numpy.max', 'np.max', (['strain_j'], {}), '(strain_j)\n', (19373, 19383), True, 'import numpy as np\n'), ((19407, 19435), 'numpy.array', 'np.array', (['[Tmax[j], Tmax[j]]'], {}), '([Tmax[j], Tmax[j]])\n', (19415, 19435), True, 'import numpy as np\n'), ((21226, 21297), 'os.path.join', 'os.path.join', (['fig_output_dir', "('Stress_GGmax_of_Layer_#%d.png' % (j + 1))"], {}), "(fig_output_dir, 'Stress_GGmax_of_Layer_#%d.png' % (j + 1))\n", (21238, 21297), False, 'import os\n'), ((23402, 23420), 'numpy.deg2rad', 'np.deg2rad', (['phi[j]'], {}), '(phi[j])\n', (23412, 23420), True, 'import numpy as np\n'), ((32766, 32777), 'numpy.log', 'np.log', (['frq'], {}), '(frq)\n', (32772, 32777), True, 'import numpy as np\n'), ((23330, 23348), 'numpy.deg2rad', 'np.deg2rad', (['phi[j]'], {}), '(phi[j])\n', (23340, 23348), True, 'import numpy as np\n'), ((20447, 20471), 'numpy.abs', 'np.abs', (['(strain_j / 100.0)'], {}), '(strain_j / 100.0)\n', (20453, 20471), True, 'import numpy as np\n'), ((27247, 27256), 'numpy.log', 'np.log', (['z'], {}), '(z)\n', (27253, 27256), True, 'import numpy as np\n'), ((32444, 32485), 'numpy.log', 'np.log', (['((gamma + gamma_r[i]) / gamma_r[i])'], {}), '((gamma + gamma_r[i]) / gamma_r[i])\n', (32450, 32485), True, 'import numpy as np\n')] |
#
# Created on 2020/2/25
#
import os
import sys
sys.path.append("..")
import cv2 as cv
from nets import get_model
from nets.network import *
class Mtldesc(object):
def __init__(self, **config):
self.name = 'MTLDesc'
self.config = {
"detection_threshold": 0.9,
"nms_dist": 4,
"dim": 128,
"nms_radius": 4,
"border_remove": 4,
}
self.config.update(config)
self.detection_threshold = self.config["detection_threshold"]
self.nms_dist = self.config["nms_dist"]
if torch.cuda.is_available():
print('gpu is available, set device to cuda !')
self.device = torch.device('cuda:0')
self.gpu_count = 1
else:
print('gpu is not available, set device to cpu !')
self.device = torch.device('cpu')
# 初始化模型
self.model_name = self.config['backbone'].split('.')[-1]
model = get_model(self.config['backbone'])()
self.model = model.to(self.device)
print("Initialize " +str(self.model_name))
if self.config['ckpt_name'] == '':
assert False
self.load(self.config['weight_path'],self.config['ckpt_name'],self.config['weights_id'])
def _load_model_params(self, ckpt_file, previous_model):
if ckpt_file is None:
print("Please input correct checkpoint file dir!")
return False
print("Load pretrained model %s " % ckpt_file)
model_dict = previous_model.state_dict()
pretrain_dict = torch.load(ckpt_file, map_location=self.device)
model_dict.update(pretrain_dict)
previous_model.load_state_dict(model_dict)
return previous_model
def load(self, weight_path,checkpoint_root,model_idx):
backbone_ckpt = os.path.join(weight_path,checkpoint_root, "model_"+str(model_idx)+".pt")
self.model = self._load_model_params(backbone_ckpt, self.model)
total = sum([param.nelement() for param in self.model.parameters()])
def load_split(self, model_ckpt, extractor_ckpt):
self.model = self._load_model_params(model_ckpt, self.model)
def _generate_predict_point(self, heatmap, height, width):
xs, ys = np.where(heatmap >= self.config['detection_threshold'])
pts = np.zeros((3, len(xs))) # Populate point data sized 3xN.
if len(xs) > 0:
pts[0, :] = ys
pts[1, :] = xs
pts[2, :] = heatmap[xs, ys]
if self.config['nms_radius']:
pts, _ = self.nms_fast(
pts, height, width, dist_thresh=self.config['nms_radius'])
inds = np.argsort(pts[2, :])
pts = pts[:, inds[::-1]] # Sort by confidence.
# Remove points along border.
bord = self.config['border_remove']
toremoveW = np.logical_or(pts[0, :] < bord, pts[0, :] >= (width-bord))
toremoveH = np.logical_or(pts[1, :] < bord, pts[1, :] >= (height-bord))
toremove = np.logical_or(toremoveW, toremoveH)
pts = pts[:, ~toremove]
pts = pts.transpose()
point = pts[:, :2][:, ::-1]
score = pts[:, 2]
return point, score
def nms_fast(self, in_corners, H, W, dist_thresh):
"""
Run a faster approximate Non-Max-Suppression on numpy corners shaped:
3xN [x_i,y_i,conf_i]^T
Algo summary: Create a grid sized HxW. Assign each corner location a 1, rest
are zeros. Iterate through all the 1's and convert them either to -1 or 0.
Suppress points by setting nearby values to 0.
Grid Value Legend:
-1 : Kept.
0 : Empty or suppressed.
1 : To be processed (converted to either kept or supressed).
NOTE: The NMS first rounds points to integers, so NMS distance might not
be exactly dist_thresh. It also assumes points are within image boundaries.
Inputs
in_corners - 3xN numpy array with corners [x_i, y_i, confidence_i]^T.
H - Image height.
W - Image width.
dist_thresh - Distance to suppress, measured as an infinty norm distance.
Returns
nmsed_corners - 3xN numpy matrix with surviving corners.
nmsed_inds - N length numpy vector with surviving corner indices.
"""
grid = np.zeros((H, W)).astype(int) # Track NMS data.
inds = np.zeros((H, W)).astype(int) # Store indices of points.
# Sort by confidence and round to nearest int.
inds1 = np.argsort(-in_corners[2,:])
corners = in_corners[:,inds1]
rcorners = corners[:2,:].round().astype(int) # Rounded corners.
# Check for edge case of 0 or 1 corners.
if rcorners.shape[1] == 0:
return np.zeros((3,0)).astype(int), np.zeros(0).astype(int)
if rcorners.shape[1] == 1:
out = np.vstack((rcorners, in_corners[2])).reshape(3,1)
return out, np.zeros((1)).astype(int)
# Initialize the grid.
for i, rc in enumerate(rcorners.T):
grid[rcorners[1,i], rcorners[0,i]] = 1
inds[rcorners[1,i], rcorners[0,i]] = i
# Pad the border of the grid, so that we can NMS points near the border.
pad = dist_thresh
grid = np.pad(grid, ((pad,pad), (pad,pad)), mode='constant')
# Iterate through points, highest to lowest conf, suppress neighborhood.
count = 0
for i, rc in enumerate(rcorners.T):
# Account for top and left padding.
pt = (rc[0]+pad, rc[1]+pad)
if grid[pt[1], pt[0]] == 1: # If not yet suppressed.
grid[pt[1]-pad:pt[1]+pad+1, pt[0]-pad:pt[0]+pad+1] = 0
grid[pt[1], pt[0]] = -1
count += 1
# Get all surviving -1's and return sorted array of remaining corners.
keepy, keepx = np.where(grid==-1)
keepy, keepx = keepy - pad, keepx - pad
inds_keep = inds[keepy, keepx]
out = corners[:, inds_keep]
values = out[-1, :]
inds2 = np.argsort(-values)
out = out[:, inds2]
out_inds = inds1[inds_keep[inds2]]
return out, out_inds
def predict(self, img, keys="*"):
"""
获取一幅灰度图像对应的特征点及其描述子
Args:
img: [h,w] 灰度图像,要求h,w能被16整除
Returns:
point: [n,2] 特征点,输出点以y,x为顺序
descriptor: [n,128] 描述子
"""
# switch to eval mode
self.model.eval()
# self.extractor.eval()
shape = img.shape
assert shape[2] == 3 # must be rgb
org_h, org_w = shape[0], shape[1]
# rescale to 16*
if org_h % 16 != 0:
scale_h = int(np.round(org_h / 16.) * 16.)
sh = org_h / scale_h
else:
scale_h = org_h
sh = 1.0
if org_w % 16 != 0:
scale_w = int(np.round(org_w / 16.) * 16.)
sw = org_w / scale_w
else:
scale_w = org_w
sw = 1.0
img = cv.resize(img, dsize=(scale_w, scale_h), interpolation=cv.INTER_LINEAR)
# to torch and scale to [-1,1]
img = torch.from_numpy(img).to(torch.float).unsqueeze(dim=0).permute((0, 3, 1, 2)).to(self.device)
img = (img / 255.) * 2. - 1.
# detector
heatmap, feature,weightmap = self.model(img)
#heatmap2=f.interpolate(weightmap, heatmap.shape[2:], mode='bilinear')
prob = torch.sigmoid(heatmap)
#prob2 = torch.sigmoid(heatmap2)
#prob=(prob+prob2)/2
# 得到对应的预测点
prob = prob.detach().cpu().numpy()
prob = prob[0, 0]
point, score = self._generate_predict_point(prob, height=scale_h, width=scale_w) # [n,2]
#weightmap=heatmap
# descriptor
desp = self._generate_combined_descriptor_fast(point, feature,weightmap, scale_h, scale_w)
#print(weightmap)
#exit(0)
# scale point back to the original scale and change to x-y
point = (point * np.array((sh, sw)))[:, ::-1]
predictions = {
"shape": shape,
"keypoints": point,
"descriptors": desp,
"scores": score,
}
if keys != '*':
predictions = {k: predictions[k] for k in keys}
return predictions
def generate_descriptor(self, input_image, point, image_shape):
"""
给定点,获取描述子
"""
# switch to eval mode
self.model.eval()
# self.extractor.eval()
img = input_image
shape = img.shape
if len(shape) == 3:
assert shape[2] == 1 # only support grayscale image
img = img[:, :, 0]
org_h, org_w = shape[0], shape[1]
# rescale to 16*
if org_h % 16 != 0:
scale_h = np.round(org_h / 16.) * 16.
else:
scale_h = org_h
if org_w % 16 != 0:
scale_w = np.round(org_w / 16.) * 16.
else:
scale_w = org_w
img = cv.resize(img, dsize=(int(scale_w), int(scale_h)), interpolation=cv.INTER_LINEAR)
# to torch and scale to [-1,1]
img = torch.from_numpy(img).to(torch.float).unsqueeze(dim=0).unsqueeze(dim=0).to(self.device)
img = (img / 255.) * 2. - 1.
# detector
_, c1, c2, c3, c4 = self.model(img)
# descriptor
descriptor = self._generate_combined_descriptor_fast(
point[:, ::-1], c1, c2, c3, c4, image_shape[0], image_shape[1]
)
return descriptor
def _generate_combined_descriptor_fast(self, point, feature,weight_map, height, width):
"""
用多层级的组合特征构造描述子
Args:
point: [n,2] 顺序是y,x
c1,c2,c3,c4: 分别对应resnet4个block输出的特征,batchsize都是1
Returns:
desp: [n,dim]
"""
point = torch.from_numpy(point[:, ::-1].copy()).to(torch.float).to(self.device)
# 归一化采样坐标到[-1,1]
point = point * 2. / torch.tensor((width-1, height-1), dtype=torch.float, device=self.device) - 1
point = point.unsqueeze(dim=0).unsqueeze(dim=2) # [1,n,1,2]
feature_pair = f.grid_sample(feature, point, mode="bilinear")[:, :, :, 0].transpose(1, 2)[0]
weight_pair = f.grid_sample(weight_map, point, mode="bilinear", padding_mode="border")[:, :, :, 0].transpose(1, 2)[0]#.squeeze(dim=1)
desp_pair = feature_pair / torch.norm(feature_pair, p=2, dim=1, keepdim=True)
#desp=desp_pair
desp = desp_pair * weight_pair.expand_as(desp_pair)
#desp = desp / torch.norm(desp, p=2, dim=1, keepdim=True)
desp = desp.detach().cpu().numpy()
return desp
def __call__(self, *args, **kwargs):
raise NotImplementedError
def __enter__(self):
return self
def __exit__(self, *args):
pass
| [
"cv2.resize",
"nets.get_model",
"sys.path.append"
] | [((49, 70), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (64, 70), False, 'import sys\n'), ((7059, 7130), 'cv2.resize', 'cv.resize', (['img'], {'dsize': '(scale_w, scale_h)', 'interpolation': 'cv.INTER_LINEAR'}), '(img, dsize=(scale_w, scale_h), interpolation=cv.INTER_LINEAR)\n', (7068, 7130), True, 'import cv2 as cv\n'), ((972, 1006), 'nets.get_model', 'get_model', (["self.config['backbone']"], {}), "(self.config['backbone'])\n", (981, 1006), False, 'from nets import get_model\n')] |
from peewee import CharField, IntegerField
from core.db.db import BaseModel
class Objects(BaseModel):
name = CharField(unique=True)
is_archive = IntegerField()
class Meta:
table_name = 'objects'
| [
"peewee.CharField",
"peewee.IntegerField"
] | [((115, 137), 'peewee.CharField', 'CharField', ([], {'unique': '(True)'}), '(unique=True)\n', (124, 137), False, 'from peewee import CharField, IntegerField\n'), ((155, 169), 'peewee.IntegerField', 'IntegerField', ([], {}), '()\n', (167, 169), False, 'from peewee import CharField, IntegerField\n')] |
import time
import functools
import numpy as np
def time_compute(fun):
@functools.wraps(fun)
def wrapper(*args, **kwargs):
start_time = time.time()
tmp = fun(*args, **kwargs)
end_time = time.time()
print('{} cost {} s.'.format(fun.__name__, end_time-start_time))
return tmp
return wrapper
def time_avg_compute(fun):
@functools.wraps(fun)
def wrapper(*args, **kwargs):
times = []
tmp = None
for _ in range(20):
start_time = time.time()
tmp = fun(*args, **kwargs)
end_time = time.time()
times.append(end_time-start_time)
times = np.array(times)
times = times[5:]
print('{} avg cost {} s.'.format(fun.__name__, times.mean()))
return tmp
return wrapper
| [
"numpy.array",
"time.time",
"functools.wraps"
] | [((78, 98), 'functools.wraps', 'functools.wraps', (['fun'], {}), '(fun)\n', (93, 98), False, 'import functools\n'), ((378, 398), 'functools.wraps', 'functools.wraps', (['fun'], {}), '(fun)\n', (393, 398), False, 'import functools\n'), ((154, 165), 'time.time', 'time.time', ([], {}), '()\n', (163, 165), False, 'import time\n'), ((220, 231), 'time.time', 'time.time', ([], {}), '()\n', (229, 231), False, 'import time\n'), ((673, 688), 'numpy.array', 'np.array', (['times'], {}), '(times)\n', (681, 688), True, 'import numpy as np\n'), ((524, 535), 'time.time', 'time.time', ([], {}), '()\n', (533, 535), False, 'import time\n'), ((598, 609), 'time.time', 'time.time', ([], {}), '()\n', (607, 609), False, 'import time\n')] |
#!/usr/bin/env python
"""A BlobStore proxy that writes to two BlobStores."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import logging
import threading
import time
from future.moves import queue
from typing import Dict, Iterable, Optional, Text
from grr_response_core import config
from grr_response_core.lib.util import compatibility
from grr_response_core.lib.util import precondition
from grr_response_core.stats import stats_collector_instance
from grr_response_server import blob_store
from grr_response_server.rdfvalues import objects as rdf_objects
# Maximum queue length, where each queue entry can consist of multiple blobs.
# Thus the number of enqueued blobs can be considerably bigger. This only
# serves as a basic measure to prevent unbounded memory growth.
_SECONDARY_WRITE_QUEUE_MAX_LENGTH = 10
def _InstantiateBlobStore(name):
try:
cls = blob_store.REGISTRY[name]
except KeyError:
raise ValueError("No blob store %s found." % name)
return cls()
def _WriteBlobs(bs,
blobs, name):
"""Writes blobs into blob_store and tracks latency and error metrics."""
start_time = time.time()
cls_name = compatibility.GetName(type(bs))
try:
bs.WriteBlobs(blobs)
except Exception: # pylint: disable=broad-except
stats_collector_instance.Get().IncrementCounter(
"dual_blob_store_error_count",
delta=len(blobs),
fields=[name, cls_name])
raise
stats_collector_instance.Get().RecordEvent(
"dual_blob_store_write_latency",
time.time() - start_time,
fields=[name, cls_name])
stats_collector_instance.Get().IncrementCounter(
"dual_blob_store_success_count",
delta=len(blobs),
fields=[name, cls_name])
class DualBlobStore(blob_store.BlobStore):
"""A BlobStore proxy that writes to two BlobStores.
This class is backed by both a primary and secondary BlobStore. Requests to
read and write blobs are immediately processed by the primary, return as soon
as the primary has finished processing, and only raise if the primary raises.
Additionally, blobs are concurrently, non-blockingly written to the secondary
from a background thread. If the secondary processes blobs slower than the
primary, writes are queued and delayed. Writes to the secondary can be
discarded, if the number of queued writes is too high. Writes to the primary
are never discarded or delayed.
"""
def __init__(self,
primary = None,
secondary = None):
"""Instantiates a new DualBlobStore and its primary and secondary BlobStore.
Args:
primary: The class name of the primary blob store implementation
secondary: The class name of the secondary blob store implementation
"""
if primary is None:
primary = config.CONFIG["DualBlobStore.primary_implementation"]
if secondary is None:
secondary = config.CONFIG["DualBlobStore.secondary_implementation"]
precondition.AssertType(primary, Text)
precondition.AssertType(secondary, Text)
self._primary = _InstantiateBlobStore(primary)
self._secondary = _InstantiateBlobStore(secondary)
self._queue = queue.Queue(_SECONDARY_WRITE_QUEUE_MAX_LENGTH)
self._thread_running = True
self._thread = threading.Thread(target=self._WriteBlobsIntoSecondary)
self._thread.daemon = True
self._thread.start()
def WriteBlobs(self,
blob_id_data_map):
"""Creates or overwrites blobs."""
try:
self._queue.put_nowait(dict(blob_id_data_map))
except queue.Full:
stats_collector_instance.Get().IncrementCounter(
"dual_blob_store_discard_count",
delta=len(blob_id_data_map),
fields=["secondary",
compatibility.GetName(type(self._secondary))])
_WriteBlobs(self._primary, blob_id_data_map, "primary")
def ReadBlobs(self, blob_ids
):
"""Reads all blobs, specified by blob_ids, returning their contents."""
return self._primary.ReadBlobs(blob_ids)
def ReadBlob(self, blob_id):
"""Reads the blob contents, identified by the given BlobID."""
return self._primary.ReadBlob(blob_id)
def CheckBlobExists(self, blob_id):
"""Checks if a blob with a given BlobID exists."""
return self._primary.CheckBlobExists(blob_id)
def CheckBlobsExist(self, blob_ids
):
"""Checks if blobs for the given identifiers already exist."""
return self._primary.CheckBlobsExist(blob_ids)
def _WriteBlobsIntoSecondary(self):
"""Loops endlessly, writing queued blobs to the secondary."""
while self._thread_running:
blobs = self._queue.get()
try:
_WriteBlobs(self._secondary, blobs, "secondary")
except Exception as e: # pylint: disable=broad-except
# Failed writes to secondary are not critical, because primary is read
# from.
logging.warn(e)
self._queue.task_done()
| [
"logging.warn",
"future.moves.queue.Queue",
"grr_response_core.stats.stats_collector_instance.Get",
"threading.Thread",
"grr_response_core.lib.util.precondition.AssertType",
"time.time"
] | [((1193, 1204), 'time.time', 'time.time', ([], {}), '()\n', (1202, 1204), False, 'import time\n'), ((3011, 3049), 'grr_response_core.lib.util.precondition.AssertType', 'precondition.AssertType', (['primary', 'Text'], {}), '(primary, Text)\n', (3034, 3049), False, 'from grr_response_core.lib.util import precondition\n'), ((3054, 3094), 'grr_response_core.lib.util.precondition.AssertType', 'precondition.AssertType', (['secondary', 'Text'], {}), '(secondary, Text)\n', (3077, 3094), False, 'from grr_response_core.lib.util import precondition\n'), ((3220, 3266), 'future.moves.queue.Queue', 'queue.Queue', (['_SECONDARY_WRITE_QUEUE_MAX_LENGTH'], {}), '(_SECONDARY_WRITE_QUEUE_MAX_LENGTH)\n', (3231, 3266), False, 'from future.moves import queue\n'), ((3318, 3372), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._WriteBlobsIntoSecondary'}), '(target=self._WriteBlobsIntoSecondary)\n', (3334, 3372), False, 'import threading\n'), ((1499, 1529), 'grr_response_core.stats.stats_collector_instance.Get', 'stats_collector_instance.Get', ([], {}), '()\n', (1527, 1529), False, 'from grr_response_core.stats import stats_collector_instance\n'), ((1588, 1599), 'time.time', 'time.time', ([], {}), '()\n', (1597, 1599), False, 'import time\n'), ((1647, 1677), 'grr_response_core.stats.stats_collector_instance.Get', 'stats_collector_instance.Get', ([], {}), '()\n', (1675, 1677), False, 'from grr_response_core.stats import stats_collector_instance\n'), ((1339, 1369), 'grr_response_core.stats.stats_collector_instance.Get', 'stats_collector_instance.Get', ([], {}), '()\n', (1367, 1369), False, 'from grr_response_core.stats import stats_collector_instance\n'), ((4945, 4960), 'logging.warn', 'logging.warn', (['e'], {}), '(e)\n', (4957, 4960), False, 'import logging\n'), ((3619, 3649), 'grr_response_core.stats.stats_collector_instance.Get', 'stats_collector_instance.Get', ([], {}), '()\n', (3647, 3649), False, 'from grr_response_core.stats import stats_collector_instance\n')] |
import numpy as np
arr1 = np.ones (2, dtype=float)
print("1D Array with ones ")
print(arr1)
#[1. 1.] | [
"numpy.ones"
] | [((27, 50), 'numpy.ones', 'np.ones', (['(2)'], {'dtype': 'float'}), '(2, dtype=float)\n', (34, 50), True, 'import numpy as np\n')] |
#!/usr/bin/python3
import json
import os
import subprocess
# Icons for the animation
sleep = ""
icons_base = ["","","","",""]
# Path to the script
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
# Run the script for getting the CPU usage
subprocess.Popen([os.path.join(__location__, "speedcpu.py")], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
with open(os.path.join(__location__, "data.json"),'r+') as file:
data = json.load(file)
cpu_usage = data["SpeedClock"]
fotograma = data["Photogram"]
if cpu_usage<10:
print(sleep + " " + str(round(cpu_usage)) + "%")
#If you want to change the speed of the animation, you can change the numbers below
else:
# You can change the interval of the different speeds.
if cpu_usage>=10 and cpu_usage<40:
keys = [0,0,0,0, 1,1,1,1, 2,2,2,2, 3,3,3,3, 4,4,4,4]
elif cpu_usage>=40 and cpu_usage<70:
keys = [0,0, 1,1, 2,2, 3,3, 4,4]
elif cpu_usage>=70 and cpu_usage<100:
keys = [0, 1, 2, 3, 4]
#print(keys[fotograma%len(keys)])
print(icons_base[keys[fotograma%len(keys)]] + " " + str(round(cpu_usage)) + "%")
data["Photogram"] = (fotograma + 1)%20
file.seek(0)
json.dump(data, file, indent=4)
file.truncate()
| [
"os.path.join",
"os.getcwd",
"os.path.dirname",
"json.load",
"json.dump"
] | [((485, 500), 'json.load', 'json.load', (['file'], {}), '(file)\n', (494, 500), False, 'import json\n'), ((1308, 1339), 'json.dump', 'json.dump', (['data', 'file'], {'indent': '(4)'}), '(data, file, indent=4)\n', (1317, 1339), False, 'import json\n'), ((205, 216), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (214, 216), False, 'import os\n'), ((218, 243), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (233, 243), False, 'import os\n'), ((308, 349), 'os.path.join', 'os.path.join', (['__location__', '"""speedcpu.py"""'], {}), "(__location__, 'speedcpu.py')\n", (320, 349), False, 'import os\n'), ((414, 453), 'os.path.join', 'os.path.join', (['__location__', '"""data.json"""'], {}), "(__location__, 'data.json')\n", (426, 453), False, 'import os\n')] |
import matplotlib.pyplot as plt
import numpy as np
import operator
import os
import sys
import tensorflow as tf
# 256*256 映射到 0~1 区间
def Transform(input):
if not (operator.eq(input.shape, (256, 256))):
print("not matched")
sys.exit(1)
max = np.max(input)
min = np.min(input)
for i in range(256):
for j in range(256):
input[i][j] = (input[i][j] - min) / (max - min)
return input
##img:256x256的数组,i的类型为str!!!!
def Visualize(img, i):
plt.matshow(img, cmap=plt.get_cmap('RdBu'), alpha=0.5)
exist = os.path.exists('./Uncertainty')
if not exist:
os.makedirs('./Uncertainty')
plt.savefig('./Uncertainty/test_'+i+'.jpg')
plt.show()
# Test
# test_input = np.random.rand(128, 256)
test_input = np.ones((256,256),dtype=float)
Visualize(test_input, '1')
# Visualize(Transform(test_input), '1')
print(test_input)
| [
"os.path.exists",
"matplotlib.pyplot.savefig",
"numpy.ones",
"sys.exit",
"os.makedirs",
"numpy.max",
"numpy.min",
"operator.eq",
"matplotlib.pyplot.get_cmap",
"matplotlib.pyplot.show"
] | [((774, 806), 'numpy.ones', 'np.ones', (['(256, 256)'], {'dtype': 'float'}), '((256, 256), dtype=float)\n', (781, 806), True, 'import numpy as np\n'), ((268, 281), 'numpy.max', 'np.max', (['input'], {}), '(input)\n', (274, 281), True, 'import numpy as np\n'), ((292, 305), 'numpy.min', 'np.min', (['input'], {}), '(input)\n', (298, 305), True, 'import numpy as np\n'), ((563, 594), 'os.path.exists', 'os.path.exists', (['"""./Uncertainty"""'], {}), "('./Uncertainty')\n", (577, 594), False, 'import os\n'), ((654, 701), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('./Uncertainty/test_' + i + '.jpg')"], {}), "('./Uncertainty/test_' + i + '.jpg')\n", (665, 701), True, 'import matplotlib.pyplot as plt\n'), ((702, 712), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (710, 712), True, 'import matplotlib.pyplot as plt\n'), ((169, 205), 'operator.eq', 'operator.eq', (['input.shape', '(256, 256)'], {}), '(input.shape, (256, 256))\n', (180, 205), False, 'import operator\n'), ((245, 256), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (253, 256), False, 'import sys\n'), ((621, 649), 'os.makedirs', 'os.makedirs', (['"""./Uncertainty"""'], {}), "('./Uncertainty')\n", (632, 649), False, 'import os\n'), ((518, 538), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""RdBu"""'], {}), "('RdBu')\n", (530, 538), True, 'import matplotlib.pyplot as plt\n')] |
import string
def onlyletters(s):
s1 = []
for i in s:
if i not in string.punctuation: # проверяем посимвольно, есть ли в списке пунктуационных знаков
s1.append(i)
s2 = ''.join(s1) # соединяем массив символов в слово
return s2
words = (input().lower()).split() # преобразуем слова на этапе ввода
words_opt = [] # создаем оптимизированный список
for item in words:
words_opt.append(onlyletters(item))
count1 = words_opt.count('a') + words_opt.count('an') + words_opt.count('the')
print(f'Общее количество артиклей {count1}')
# my_02
words = (input().lower()).split()
count1 = words.count('a') + words.count('an') + words.count('the')
print(f'Общее количество артиклей {count1}')
# f01
print('Общее количество артиклей:', sum([1 for i in input().split() if i.lower() in ('a', 'an', 'the')]))
# f2
print('Общее количество артиклей:', len([i for i in input().split() if i.lower() in ('a', 'an', 'the')]))
# f3
print(f"Общее количество артиклей: {len([i for i in input().split() if i.lower() in ['a', 'an', 'the']])}")
# f4
s = input().lower().split()
count = [s.count('a'), s.count('an'), s.count('the')]
count = sum(count)
print(f'Общее количество артиклей: {count}')
# f5
ss = input().lower().split()
print('Общее количество артиклей:', sum(ss.count(article) for article in ('a', 'an', 'the')))
# f6
s, cnt = input().lower().split(), 0
articles = ['a', 'an', 'the']
for word in s:
if word in articles:
cnt += 1
print("Общее количество артиклей:", cnt)
# f7
# put your python code here
l = input().lower()
count = 0
for i in l.split(' '):
if i == 'a' or i == 'an' or i == 'the':
count += 1
print('Общее количество артиклей:', count)
# f8
from re import findall
txt = input().lower()
res = findall(r'\ba\b|\ban\b|\bthe\b', txt)
print(f"Общее количество артиклей: {len(res)}")
# f9
| [
"re.findall"
] | [((1768, 1810), 're.findall', 'findall', (['"""\\\\ba\\\\b|\\\\ban\\\\b|\\\\bthe\\\\b"""', 'txt'], {}), "('\\\\ba\\\\b|\\\\ban\\\\b|\\\\bthe\\\\b', txt)\n", (1775, 1810), False, 'from re import findall\n')] |
import tensorflow as tf
from keras.preprocessing.image import ImageDataGenerator
import numpy as np
from keras.preprocessing import image
import zipfile
import os
import pickle
from keras.models import model_from_json
from django.conf import settings
from django.conf import settings
def training():
zip_ref = zipfile.ZipFile("check.zip", 'r')
zip_ref.extractall("check/chest_xray")
zip_ref.close()
train_datagen = ImageDataGenerator(rescale = 1./255,shear_range = 0.2,zoom_range = 0.2,horizontal_flip = True)
training_set = train_datagen.flow_from_directory('check/chest_xray/train',target_size = (64, 64),batch_size = 32,class_mode = 'binary')
train_datagen = ImageDataGenerator(rescale = 1./255,shear_range = 0.2,zoom_range = 0.2,horizontal_flip = True)
test_set = train_datagen.flow_from_directory('check/chest_xray/test',target_size = (64, 64),batch_size = 32,class_mode = 'binary')
DESIRED_ACCURACY = 0.95
class myCallback(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs={}):
if(logs.get('acc')>DESIRED_ACCURACY):
print("\nReached 99.9% accuracy so cancelling training!")
self.model.stop_training = True
callbacks = myCallback()
cnn = tf.keras.models.Sequential()
# Convolution
cnn.add(tf.keras.layers.Conv2D(filters=32, kernel_size=3, activation='relu', input_shape=[64, 64, 3]))
# Pooling
cnn.add(tf.keras.layers.MaxPool2D(pool_size=2, strides=2))
### Adding a second convolutional layer
cnn.add(tf.keras.layers.Conv2D(filters=32, kernel_size=3, activation='relu'))
##Pooling
cnn.add(tf.keras.layers.MaxPool2D(pool_size=2, strides=2))
# Flattening
cnn.add(tf.keras.layers.Flatten())
### Step 4 - Full Connection
cnn.add(tf.keras.layers.Dense(units=128, activation='relu'))
# Output layer
cnn.add(tf.keras.layers.Dense(units=1, activation='sigmoid'))
# Compiling the CNN
cnn.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])
# Training the CNN on the Training set and evaluating it on the Test set
cnn.fit(x = training_set, validation_data = test_set, epochs = 1)
# serialize model to JSON
model_json = cnn.to_json()
with open("datasets/model_check.json", "w") as json_file:
json_file.write(model_json)
# serialize weights to HDF5
cnn.save_weights("datasets/model_check.h5")
print("Saved model to disk")
def predImageBlock(ob):
name = ob.file.name
fullpath = os.path.abspath(name)
test_image = image.load_img(fullpath, target_size = (64, 64 ))
test_image = image.img_to_array(test_image)
test_image = np.expand_dims(test_image, axis = 0)
# later...
# load json and create model
json_file = open('datasets/model_check.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# load weights into new model
loaded_model.load_weights("datasets/model_check.h5")
result = loaded_model.predict(test_image)
print("yes"*20, result)
return result
if __name__=="__main__":
training()
# pred1()
| [
"keras.preprocessing.image.img_to_array",
"tensorflow.keras.layers.Conv2D",
"zipfile.ZipFile",
"keras.preprocessing.image.ImageDataGenerator",
"keras.models.model_from_json",
"tensorflow.keras.layers.Dense",
"numpy.expand_dims",
"os.path.abspath",
"tensorflow.keras.layers.Flatten",
"tensorflow.ker... | [((316, 349), 'zipfile.ZipFile', 'zipfile.ZipFile', (['"""check.zip"""', '"""r"""'], {}), "('check.zip', 'r')\n", (331, 349), False, 'import zipfile\n'), ((434, 530), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)', 'shear_range': '(0.2)', 'zoom_range': '(0.2)', 'horizontal_flip': '(True)'}), '(rescale=1.0 / 255, shear_range=0.2, zoom_range=0.2,\n horizontal_flip=True)\n', (452, 530), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((690, 786), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1.0 / 255)', 'shear_range': '(0.2)', 'zoom_range': '(0.2)', 'horizontal_flip': '(True)'}), '(rescale=1.0 / 255, shear_range=0.2, zoom_range=0.2,\n horizontal_flip=True)\n', (708, 786), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((1290, 1318), 'tensorflow.keras.models.Sequential', 'tf.keras.models.Sequential', ([], {}), '()\n', (1316, 1318), True, 'import tensorflow as tf\n'), ((2608, 2629), 'os.path.abspath', 'os.path.abspath', (['name'], {}), '(name)\n', (2623, 2629), False, 'import os\n'), ((2648, 2694), 'keras.preprocessing.image.load_img', 'image.load_img', (['fullpath'], {'target_size': '(64, 64)'}), '(fullpath, target_size=(64, 64))\n', (2662, 2694), False, 'from keras.preprocessing import image\n'), ((2719, 2749), 'keras.preprocessing.image.img_to_array', 'image.img_to_array', (['test_image'], {}), '(test_image)\n', (2737, 2749), False, 'from keras.preprocessing import image\n'), ((2767, 2801), 'numpy.expand_dims', 'np.expand_dims', (['test_image'], {'axis': '(0)'}), '(test_image, axis=0)\n', (2781, 2801), True, 'import numpy as np\n'), ((2997, 3031), 'keras.models.model_from_json', 'model_from_json', (['loaded_model_json'], {}), '(loaded_model_json)\n', (3012, 3031), False, 'from keras.models import model_from_json\n'), ((1351, 1448), 'tensorflow.keras.layers.Conv2D', 'tf.keras.layers.Conv2D', ([], {'filters': '(32)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'input_shape': '[64, 64, 3]'}), "(filters=32, kernel_size=3, activation='relu',\n input_shape=[64, 64, 3])\n", (1373, 1448), True, 'import tensorflow as tf\n'), ((1474, 1523), 'tensorflow.keras.layers.MaxPool2D', 'tf.keras.layers.MaxPool2D', ([], {'pool_size': '(2)', 'strides': '(2)'}), '(pool_size=2, strides=2)\n', (1499, 1523), True, 'import tensorflow as tf\n'), ((1583, 1651), 'tensorflow.keras.layers.Conv2D', 'tf.keras.layers.Conv2D', ([], {'filters': '(32)', 'kernel_size': '(3)', 'activation': '"""relu"""'}), "(filters=32, kernel_size=3, activation='relu')\n", (1605, 1651), True, 'import tensorflow as tf\n'), ((1681, 1730), 'tensorflow.keras.layers.MaxPool2D', 'tf.keras.layers.MaxPool2D', ([], {'pool_size': '(2)', 'strides': '(2)'}), '(pool_size=2, strides=2)\n', (1706, 1730), True, 'import tensorflow as tf\n'), ((1763, 1788), 'tensorflow.keras.layers.Flatten', 'tf.keras.layers.Flatten', ([], {}), '()\n', (1786, 1788), True, 'import tensorflow as tf\n'), ((1837, 1888), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', ([], {'units': '(128)', 'activation': '"""relu"""'}), "(units=128, activation='relu')\n", (1858, 1888), True, 'import tensorflow as tf\n'), ((1923, 1975), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', ([], {'units': '(1)', 'activation': '"""sigmoid"""'}), "(units=1, activation='sigmoid')\n", (1944, 1975), True, 'import tensorflow as tf\n')] |
from pyHalo.Rendering.SpatialDistributions.uniform import LensConeUniform
import numpy as np
from copy import deepcopy
from pyHalo.Rendering.MassFunctions.power_law import GeneralPowerLaw
from pyHalo.Rendering.rendering_class_base import RenderingClassBase
class TwoHaloContribution(RenderingClassBase):
"""
This class adds correlated structure associated with the host dark matter halo. The amount of structure added is
proportional to b * corr, where b is the halo bias as computed by Sheth and Tormen (1999) and corr is the
matter-matter correlation function. Currently, this term is implemented as a rescaling of the background density by
b * corr, where the product is the average value computed over 2*dz, where dz is the spacing of the redshift planes
adjacent the redshift plane of the main deflector.
"""
def __init__(self, keywords_master, halo_mass_function, geometry, lens_cosmo, lens_plane_redshifts, delta_z_list):
self._rendering_kwargs = self.keyword_parse_render(keywords_master)
self.halo_mass_function = halo_mass_function
self.geometry = geometry
self.lens_cosmo = lens_cosmo
self.spatial_distribution_model = LensConeUniform(keywords_master['cone_opening_angle'], geometry)
self._lens_plane_redshifts = lens_plane_redshifts
self._delta_z_list = delta_z_list
super(TwoHaloContribution, self).__init__()
def render(self):
"""
Generates halo masses and positions for correlated structure around the main deflector
:return: mass (in Msun), x (arcsec), y (arcsec), r3d (kpc), redshift
"""
idx = np.argmin(abs(np.array(self._lens_plane_redshifts) - self.lens_cosmo.z_lens))
delta_z = self._delta_z_list[idx]
m = self.render_masses_at_z(self.lens_cosmo.z_lens, delta_z)
x, y = self.render_positions_at_z(self.lens_cosmo.z_lens, len(m))
subhalo_flag = [False] * len(m)
redshifts = [self.lens_cosmo.z_lens] * len(m)
r3d = np.array([None] * len(m))
return m, x, y, r3d, redshifts, subhalo_flag
def render_masses_at_z(self, z, delta_z):
"""
:param z: redshift at which to render masses
:param delta_z: thickness of the redshift slice
:return: halo masses at the desired redshift in units Msun
"""
norm, slope = self._norm_slope(z, delta_z)
args = deepcopy(self._rendering_kwargs)
log_mlow, log_mhigh = self._redshift_dependent_mass_range(z, args['log_mlow'], args['log_mhigh'])
mfunc = GeneralPowerLaw(log_mlow, log_mhigh, slope, args['draw_poisson'],
norm, args['log_mc'], args['a_wdm'], args['b_wdm'],
args['c_wdm'])
m = mfunc.draw()
return m
def render_positions_at_z(self, z, nhalos):
"""
:param z: redshift
:param nhalos: number of halos or objects to generate
:return: the x, y coordinate of objects in arcsec, and a 3 dimensional coordinate in kpc
The 3d coordinate only has a clear physical interpretation for subhalos, and is used to compute truncation raddi.
For line of sight halos it is set to None.
"""
x_kpc, y_kpc = self.spatial_distribution_model.draw(nhalos, z)
if len(x_kpc) > 0:
kpc_per_asec = self.geometry.kpc_per_arcsec(z)
x_arcsec = x_kpc * kpc_per_asec ** -1
y_arcsec = y_kpc * kpc_per_asec ** -1
return x_arcsec, y_arcsec
else:
return np.array([]), np.array([])
def _norm_slope(self, z, delta_z):
"""
This method computes the normalization of the mass function for correlated structure around the main deflector.
The normalization is defined as (boost - 1) * background, where background is the mean normalization of the
halo mass function computed with (for example) Sheth-Tormen, and boost is the average contribution of the
two-halo term integrated over a comoving distance corresponding to 2 * dz, where dz is the redshift plane
spacing.
boost(z, r_min, r_max) = 2 / r_max int_{r_min}^{r_max} x(r, z, M_{host}) * dr
where xi(r, M_{host) is the linear halo bias times the matter-matter correlation function,
r_min is set of 0.5 Mpc, and r_max is the comoving distance corresponding to 2*dz, where dz is the redshift
spacing. M_host is the mass in M_sun of the host dark matter halo
:param z: the redshift which to evaluate the matter-matter correlation function and halo bias
:param delta_z: the redshift spacing of the lens planes adjacent the main deflector
:return: the normalization of the two-halo term mass function. The form of the two-halo term mass function is
assumed to have the same shape as the background halo mass function
"""
if z != self.lens_cosmo.z_lens:
raise Exception('this class must be evaluated at the main deflector redshift')
volume_element_comoving = self.geometry.volume_element_comoving(z, delta_z)
plaw_index = self.halo_mass_function.plaw_index_z(z) + self._rendering_kwargs['delta_power_law_index']
norm_per_unit_volume = self.halo_mass_function.norm_at_z_density(z, plaw_index,
self._rendering_kwargs['m_pivot'])
norm_per_unit_volume *= self._rendering_kwargs['LOS_normalization']
reference_norm = norm_per_unit_volume * volume_element_comoving
rmax = self.lens_cosmo.cosmo.D_C_transverse(z + delta_z) - self.lens_cosmo.cosmo.D_C_transverse(z)
rmin = min(rmax, 0.5)
two_halo_boost = self.halo_mass_function.two_halo_boost(self._rendering_kwargs['host_m200'], z, rmax=rmax,
rmin=rmin)
slope = self.halo_mass_function.plaw_index_z(z) + self._rendering_kwargs['delta_power_law_index']
norm = (two_halo_boost - 1) * reference_norm
return norm, slope
def convergence_sheet_correction(self, *args, **kwargs):
return {}, [], []
@staticmethod
def keyword_parse_render(keywords_master):
kwargs = {}
required_keys = ['log_mlow', 'log_mhigh', 'host_m200', 'LOS_normalization',
'draw_poisson', 'delta_power_law_index', 'm_pivot', 'log_mc', 'a_wdm', 'b_wdm', 'c_wdm']
for key in required_keys:
if key not in keywords_master:
raise Exception('Required keyword argument ' + str(key) + ' not specified.')
else:
kwargs[key] = keywords_master[key]
return kwargs
def keys_convergence_sheets(self):
return {}
| [
"pyHalo.Rendering.MassFunctions.power_law.GeneralPowerLaw",
"numpy.array",
"pyHalo.Rendering.SpatialDistributions.uniform.LensConeUniform",
"copy.deepcopy"
] | [((1205, 1269), 'pyHalo.Rendering.SpatialDistributions.uniform.LensConeUniform', 'LensConeUniform', (["keywords_master['cone_opening_angle']", 'geometry'], {}), "(keywords_master['cone_opening_angle'], geometry)\n", (1220, 1269), False, 'from pyHalo.Rendering.SpatialDistributions.uniform import LensConeUniform\n'), ((2425, 2457), 'copy.deepcopy', 'deepcopy', (['self._rendering_kwargs'], {}), '(self._rendering_kwargs)\n', (2433, 2457), False, 'from copy import deepcopy\n'), ((2580, 2716), 'pyHalo.Rendering.MassFunctions.power_law.GeneralPowerLaw', 'GeneralPowerLaw', (['log_mlow', 'log_mhigh', 'slope', "args['draw_poisson']", 'norm', "args['log_mc']", "args['a_wdm']", "args['b_wdm']", "args['c_wdm']"], {}), "(log_mlow, log_mhigh, slope, args['draw_poisson'], norm,\n args['log_mc'], args['a_wdm'], args['b_wdm'], args['c_wdm'])\n", (2595, 2716), False, 'from pyHalo.Rendering.MassFunctions.power_law import GeneralPowerLaw\n'), ((3583, 3595), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3591, 3595), True, 'import numpy as np\n'), ((3597, 3609), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3605, 3609), True, 'import numpy as np\n'), ((1671, 1707), 'numpy.array', 'np.array', (['self._lens_plane_redshifts'], {}), '(self._lens_plane_redshifts)\n', (1679, 1707), True, 'import numpy as np\n')] |
from decouple import config
import heroku3
import uuid
HEROKU_API_KEY = config("HEROKU_API_KEY")
heroku_conn = heroku3.from_key(HEROKU_API_KEY)
def footprint_walt():
appy = heroku_conn.create_app(name=f"footprint-{uuid.uuid4().hex[:8]}")
appy.create_build(f'{config("GIT_URL")}/tarball/master')
appy.config().update(
{
f"API_HASH": f'{config("API_HASH")}',
f"API_ID": f'{config("API_ID")}',
f"CHATINPUT": f'{config("CHATINPUT")}',
f"CHATOUTPUT": f'{config("CHATOUTPUT")}',
f"REDISTOGO_URL": f'{config("REDISTOGO_URL")}',
f"SESSION": f'{config("SESSION")}',
f"HEROKU_API_KEY": f'{config("HEROKU_API_KEY")}',
}
)
app = heroku_conn.apps()[appy.name]
app.process_formation()["worker"].scale(1)
# app.delete()
# config = app.config()
# onfig['New_var'] = 'new_val'
# newconfig = config.update({u'TEST1': u'A1', u'TEST2': u'A2', u'TEST3': u'A3'})
# proclist = app.process_formation()
# app.process_formation()['web'].scale(0)
# logdrainlist = app.logdrains()
# accepts the same params as above - lines|dyno|source|timeout (passed to requests)
# log = heroku_conn.stream_app_log(<app_id_or_name>, lines=1, timeout=100)
# #or
# for line in app.stream_log(lines=1):
# print(line)
# builds
# app.create_build('https://github.com/konichar/parsesig/tarball/master')
| [
"heroku3.from_key",
"decouple.config",
"uuid.uuid4"
] | [((73, 97), 'decouple.config', 'config', (['"""HEROKU_API_KEY"""'], {}), "('HEROKU_API_KEY')\n", (79, 97), False, 'from decouple import config\n'), ((112, 144), 'heroku3.from_key', 'heroku3.from_key', (['HEROKU_API_KEY'], {}), '(HEROKU_API_KEY)\n', (128, 144), False, 'import heroku3\n'), ((271, 288), 'decouple.config', 'config', (['"""GIT_URL"""'], {}), "('GIT_URL')\n", (277, 288), False, 'from decouple import config\n'), ((371, 389), 'decouple.config', 'config', (['"""API_HASH"""'], {}), "('API_HASH')\n", (377, 389), False, 'from decouple import config\n'), ((419, 435), 'decouple.config', 'config', (['"""API_ID"""'], {}), "('API_ID')\n", (425, 435), False, 'from decouple import config\n'), ((468, 487), 'decouple.config', 'config', (['"""CHATINPUT"""'], {}), "('CHATINPUT')\n", (474, 487), False, 'from decouple import config\n'), ((521, 541), 'decouple.config', 'config', (['"""CHATOUTPUT"""'], {}), "('CHATOUTPUT')\n", (527, 541), False, 'from decouple import config\n'), ((578, 601), 'decouple.config', 'config', (['"""REDISTOGO_URL"""'], {}), "('REDISTOGO_URL')\n", (584, 601), False, 'from decouple import config\n'), ((632, 649), 'decouple.config', 'config', (['"""SESSION"""'], {}), "('SESSION')\n", (638, 649), False, 'from decouple import config\n'), ((687, 711), 'decouple.config', 'config', (['"""HEROKU_API_KEY"""'], {}), "('HEROKU_API_KEY')\n", (693, 711), False, 'from decouple import config\n'), ((221, 233), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (231, 233), False, 'import uuid\n')] |
# -*- coding: utf-8 -*-
from unittest import TestCase
from eduid_userdb.element import DuplicateElementViolation
from eduid_userdb.profile import Profile, ProfileList
__author__ = 'lundberg'
OPAQUE_DATA = {'a_string': 'I am a string', 'an_int': 3, 'a_list': ['eins', 2, 'drei'], 'a_map': {'some': 'data'}}
class ProfileTest(TestCase):
def test_create_profile(self):
profile = Profile(
owner='test owner', schema='test schema', profile_data=OPAQUE_DATA, created_by='test created_by',
)
self.assertEqual(profile.owner, 'test owner')
self.assertEqual(profile.schema, 'test schema')
self.assertEqual(profile.created_by, 'test created_by')
self.assertIsNotNone(profile.created_ts)
for key, value in OPAQUE_DATA.items():
self.assertIn(key, profile.profile_data)
self.assertEqual(value, profile.profile_data[key])
def test_profile_list(self):
profile = Profile(
owner='test owner 1', schema='test schema', profile_data=OPAQUE_DATA, created_by='test created_by',
)
profile2 = Profile(
owner='test owner 2', created_by='test created_by', schema='test schema', profile_data=OPAQUE_DATA,
)
profile_list = ProfileList([profile, profile2])
self.assertIsNotNone(profile_list)
self.assertEqual(profile_list.count, 2)
self.assertIsNotNone(profile_list.find('test owner 1'))
self.assertIsNotNone(profile_list.find('test owner 2'))
def test_empty_profile_list(self):
profile_list = ProfileList([])
self.assertIsNotNone(profile_list)
self.assertEqual(profile_list.count, 0)
def test_profile_list_owner_conflict(self):
profile = Profile(
owner='test owner 1', schema='test schema', profile_data=OPAQUE_DATA, created_by='test created_by',
)
profile_dict = profile.to_dict()
profile2 = Profile.from_dict(profile_dict)
with self.assertRaises(DuplicateElementViolation):
ProfileList([profile, profile2])
| [
"eduid_userdb.profile.ProfileList",
"eduid_userdb.profile.Profile.from_dict",
"eduid_userdb.profile.Profile"
] | [((395, 504), 'eduid_userdb.profile.Profile', 'Profile', ([], {'owner': '"""test owner"""', 'schema': '"""test schema"""', 'profile_data': 'OPAQUE_DATA', 'created_by': '"""test created_by"""'}), "(owner='test owner', schema='test schema', profile_data=OPAQUE_DATA,\n created_by='test created_by')\n", (402, 504), False, 'from eduid_userdb.profile import Profile, ProfileList\n'), ((962, 1074), 'eduid_userdb.profile.Profile', 'Profile', ([], {'owner': '"""test owner 1"""', 'schema': '"""test schema"""', 'profile_data': 'OPAQUE_DATA', 'created_by': '"""test created_by"""'}), "(owner='test owner 1', schema='test schema', profile_data=\n OPAQUE_DATA, created_by='test created_by')\n", (969, 1074), False, 'from eduid_userdb.profile import Profile, ProfileList\n'), ((1112, 1224), 'eduid_userdb.profile.Profile', 'Profile', ([], {'owner': '"""test owner 2"""', 'created_by': '"""test created_by"""', 'schema': '"""test schema"""', 'profile_data': 'OPAQUE_DATA'}), "(owner='test owner 2', created_by='test created_by', schema=\n 'test schema', profile_data=OPAQUE_DATA)\n", (1119, 1224), False, 'from eduid_userdb.profile import Profile, ProfileList\n'), ((1267, 1299), 'eduid_userdb.profile.ProfileList', 'ProfileList', (['[profile, profile2]'], {}), '([profile, profile2])\n', (1278, 1299), False, 'from eduid_userdb.profile import Profile, ProfileList\n'), ((1582, 1597), 'eduid_userdb.profile.ProfileList', 'ProfileList', (['[]'], {}), '([])\n', (1593, 1597), False, 'from eduid_userdb.profile import Profile, ProfileList\n'), ((1756, 1868), 'eduid_userdb.profile.Profile', 'Profile', ([], {'owner': '"""test owner 1"""', 'schema': '"""test schema"""', 'profile_data': 'OPAQUE_DATA', 'created_by': '"""test created_by"""'}), "(owner='test owner 1', schema='test schema', profile_data=\n OPAQUE_DATA, created_by='test created_by')\n", (1763, 1868), False, 'from eduid_userdb.profile import Profile, ProfileList\n'), ((1947, 1978), 'eduid_userdb.profile.Profile.from_dict', 'Profile.from_dict', (['profile_dict'], {}), '(profile_dict)\n', (1964, 1978), False, 'from eduid_userdb.profile import Profile, ProfileList\n'), ((2051, 2083), 'eduid_userdb.profile.ProfileList', 'ProfileList', (['[profile, profile2]'], {}), '([profile, profile2])\n', (2062, 2083), False, 'from eduid_userdb.profile import Profile, ProfileList\n')] |
"""Module containing file system sensors."""
from airflow.sensors.base_sensor_operator import BaseSensorOperator
from airflow.utils.decorators import apply_defaults
from airflow_fs.hooks import LocalHook
class FileSensor(BaseSensorOperator):
"""Sensor that waits for files matching a given file pattern.
:param str path: File path to match files to. Can be any valid
glob pattern.
:param FsHook hook: File system hook to use when looking for files.
"""
template_fields = ("file_pattern",)
@apply_defaults
def __init__(self, path, hook=None, **kwargs):
super(FileSensor, self).__init__(**kwargs)
self._path = path
self._hook = hook or LocalHook()
# pylint: disable=unused-argument,missing-docstring
def poke(self, context):
with self._hook as hook:
if hook.glob(self._path):
return True
return False
| [
"airflow_fs.hooks.LocalHook"
] | [((702, 713), 'airflow_fs.hooks.LocalHook', 'LocalHook', ([], {}), '()\n', (711, 713), False, 'from airflow_fs.hooks import LocalHook\n')] |
import vtreat.util
import pandas
import numpy
def test_range():
# https://github.com/WinVector/pyvtreat/blob/master/Examples/Bugs/asarray_issue.md
# https://github.com/WinVector/pyvtreat/issues/7
numpy.random.seed(2019)
arr = numpy.random.randint(2, size=10)
sparr = pandas.arrays.SparseArray(arr, fill_value=0)
assert vtreat.util.has_range(arr)
assert vtreat.util.has_range(sparr)
| [
"pandas.arrays.SparseArray",
"numpy.random.randint",
"numpy.random.seed"
] | [((210, 233), 'numpy.random.seed', 'numpy.random.seed', (['(2019)'], {}), '(2019)\n', (227, 233), False, 'import numpy\n'), ((244, 276), 'numpy.random.randint', 'numpy.random.randint', (['(2)'], {'size': '(10)'}), '(2, size=10)\n', (264, 276), False, 'import numpy\n'), ((289, 333), 'pandas.arrays.SparseArray', 'pandas.arrays.SparseArray', (['arr'], {'fill_value': '(0)'}), '(arr, fill_value=0)\n', (314, 333), False, 'import pandas\n')] |
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-eremaea2',
version='2.0.17',
packages=['eremaea','eremaea.ctl','eremaea.migrations'],
entry_points={'console_scripts': [
'eremaeactl = eremaea.ctl.commandline:execute_from_commandline',
]},
include_package_data=True,
license='BSD-2-Clause',
description='A simple Django application to store and show webcam snapshots',
long_description=README,
long_description_content_type="text/markdown",
url='https://github.com/matwey/django-eremaea2',
author='<NAME>',
author_email='<EMAIL>',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
test_suite='runtests.runtests',
install_requires=[
'Django',
'djangorestframework',
'requests',
'cmdln',
]
)
| [
"os.path.abspath",
"os.path.dirname",
"setuptools.setup"
] | [((261, 1266), 'setuptools.setup', 'setup', ([], {'name': '"""django-eremaea2"""', 'version': '"""2.0.17"""', 'packages': "['eremaea', 'eremaea.ctl', 'eremaea.migrations']", 'entry_points': "{'console_scripts': [\n 'eremaeactl = eremaea.ctl.commandline:execute_from_commandline']}", 'include_package_data': '(True)', 'license': '"""BSD-2-Clause"""', 'description': '"""A simple Django application to store and show webcam snapshots"""', 'long_description': 'README', 'long_description_content_type': '"""text/markdown"""', 'url': '"""https://github.com/matwey/django-eremaea2"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'classifiers': "['Environment :: Web Environment', 'Framework :: Django',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent', 'Programming Language :: Python',\n 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP',\n 'Topic :: Internet :: WWW/HTTP :: Dynamic Content']", 'test_suite': '"""runtests.runtests"""', 'install_requires': "['Django', 'djangorestframework', 'requests', 'cmdln']"}), "(name='django-eremaea2', version='2.0.17', packages=['eremaea',\n 'eremaea.ctl', 'eremaea.migrations'], entry_points={'console_scripts':\n ['eremaeactl = eremaea.ctl.commandline:execute_from_commandline']},\n include_package_data=True, license='BSD-2-Clause', description=\n 'A simple Django application to store and show webcam snapshots',\n long_description=README, long_description_content_type='text/markdown',\n url='https://github.com/matwey/django-eremaea2', author='<NAME>',\n author_email='<EMAIL>', classifiers=['Environment :: Web Environment',\n 'Framework :: Django', 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent', 'Programming Language :: Python',\n 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP',\n 'Topic :: Internet :: WWW/HTTP :: Dynamic Content'], test_suite=\n 'runtests.runtests', install_requires=['Django', 'djangorestframework',\n 'requests', 'cmdln'])\n", (266, 1266), False, 'from setuptools import setup\n'), ((63, 88), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (78, 88), False, 'import os\n'), ((220, 245), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (235, 245), False, 'import os\n')] |
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.models import PermissionsMixin
# https://wsvincent.com/django-custom-user-model-tutorial/
from django.db import models
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from users.managers import UserManager
class User(AbstractBaseUser, PermissionsMixin):
name = models.CharField(_("full name"), max_length=255, unique=True)
username = models.CharField(_("username"), max_length=255, unique=True)
email = models.EmailField(_("email address"), unique=True)
is_staff = models.BooleanField(
_("staff status"),
default=False,
help_text=_("Designates whether the user can log into this admin site."),
)
is_active = models.BooleanField(
_("active"),
default=True,
help_text=_(
"Designates whether this user should be treated as active. " "Unselect this instead of deleting accounts."
),
)
date_joined = models.DateTimeField(_("date joined"), default=timezone.now)
objects = UserManager()
EMAIL_FIELD = "email"
USERNAME_FIELD = "username"
REQUIRED_FIELDS = ["name"]
| [
"django.utils.translation.gettext_lazy",
"users.managers.UserManager"
] | [((1102, 1115), 'users.managers.UserManager', 'UserManager', ([], {}), '()\n', (1113, 1115), False, 'from users.managers import UserManager\n'), ((412, 426), 'django.utils.translation.gettext_lazy', '_', (['"""full name"""'], {}), "('full name')\n", (413, 426), True, 'from django.utils.translation import gettext_lazy as _\n'), ((489, 502), 'django.utils.translation.gettext_lazy', '_', (['"""username"""'], {}), "('username')\n", (490, 502), True, 'from django.utils.translation import gettext_lazy as _\n'), ((563, 581), 'django.utils.translation.gettext_lazy', '_', (['"""email address"""'], {}), "('email address')\n", (564, 581), True, 'from django.utils.translation import gettext_lazy as _\n'), ((641, 658), 'django.utils.translation.gettext_lazy', '_', (['"""staff status"""'], {}), "('staff status')\n", (642, 658), True, 'from django.utils.translation import gettext_lazy as _\n'), ((816, 827), 'django.utils.translation.gettext_lazy', '_', (['"""active"""'], {}), "('active')\n", (817, 827), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1047, 1063), 'django.utils.translation.gettext_lazy', '_', (['"""date joined"""'], {}), "('date joined')\n", (1048, 1063), True, 'from django.utils.translation import gettext_lazy as _\n'), ((701, 763), 'django.utils.translation.gettext_lazy', '_', (['"""Designates whether the user can log into this admin site."""'], {}), "('Designates whether the user can log into this admin site.')\n", (702, 763), True, 'from django.utils.translation import gettext_lazy as _\n'), ((869, 980), 'django.utils.translation.gettext_lazy', '_', (['"""Designates whether this user should be treated as active. Unselect this instead of deleting accounts."""'], {}), "('Designates whether this user should be treated as active. Unselect this instead of deleting accounts.'\n )\n", (870, 980), True, 'from django.utils.translation import gettext_lazy as _\n')] |
from datetime import datetime, timedelta
from random import sample, choice, randrange
from unittest import TestCase
import tests.test_timeinterval as ti
from tests.factories import make_sets, make_moments
from timeset import TimeSet
t0 = datetime(2019, 7, 19)
t6 = datetime(2019, 7, 25)
t = make_moments(20, t0, t6)
sets = make_sets(20, t0, t6, 3) # Guaranteed not to make empty sets
class TestTimeSet(TestCase):
def test_from_interval(self):
s = make_sets(1, t[0], t[19], 1)[0]
i, = s.intervals # Unpacking single element
s1 = TimeSet.from_interval(i.start, i.end)
self.assertEqual(s1, s)
def test_empty(self):
self.assertTrue(TimeSet([]).is_empty(), "Set without intervals")
self.assertTrue(TimeSet.empty().is_empty(), "Empty set")
self.assertTrue(TimeSet([ti.empty, ti.empty]).is_empty(), "Set of empty intervals")
def test_not_empty(self):
self.assertFalse(TimeSet.from_interval(t[2], t[5]).is_empty())
def test_union(self):
s0, s1 = sample(sets, k=2)
u = s0.union(s1)
self.assertTrue(s0.is_subset(u))
self.assertTrue(s1.is_subset(u))
intervals = list(s0.intervals.union(s1.intervals))
self.assertEqual(u, TimeSet(intervals))
def test_empty_union(self):
e = TimeSet.empty()
s = choice(sets)
self.assertEqual(e.union(s), s)
def test_intersection(self):
s0, s1 = sample(sets, k=2)
intersection = s0.intersection(s1)
self.assertTrue(intersection.is_subset(s0))
self.assertTrue(intersection.is_subset(s1))
def test_difference(self):
s0, s1 = sample(sets, k=2)
diff = s1.difference(s0)
self.assertTrue(diff.is_subset(s1))
self.assertTrue(s0.intersection(diff).is_empty())
def test_contains(self):
s = choice(sets)
i = next(iter(s.intervals))
middle = i.start + (i.end-i.start)/2
self.assertTrue(s.contains(i.start), "Starting point")
self.assertTrue(s.contains(middle), "Middle point")
def test_not_contains(self):
s = choice(sets)
i = next(iter(s.intervals))
self.assertFalse(s.contains(i.end), "Interval ending point")
self.assertFalse(s.contains(t6+timedelta(days=1)), "Point outside")
def test_is_subset(self):
s = choice(sets)
i = sample(s.intervals, randrange(1, len(s.intervals)+1))
self.assertTrue(TimeSet(i).is_subset(s))
def test_is_not_subset(self):
s0, s1 = sample(sets, k=2)
while s0.is_subset(s1) or s1.is_subset(s0):
s0, s1 = sample(sets, k=2)
self.assertFalse(s0.union(s1).is_subset(s0), "Not subset!")
self.assertFalse(s0.union(s1).is_subset(s1), "Not subset!")
def test_is_empty(self):
self.assertTrue(TimeSet([]).is_empty(), "No intervals")
self.assertTrue(TimeSet([ti.empty]).is_empty(), "Empty interval")
def test_is_not_empty(self):
s = choice(sets)
self.assertFalse(s.is_empty(), "Not empty set!")
| [
"datetime.datetime",
"timeset.TimeSet.from_interval",
"random.sample",
"random.choice",
"tests.factories.make_moments",
"tests.factories.make_sets",
"timeset.TimeSet",
"timeset.TimeSet.empty",
"datetime.timedelta"
] | [((240, 261), 'datetime.datetime', 'datetime', (['(2019)', '(7)', '(19)'], {}), '(2019, 7, 19)\n', (248, 261), False, 'from datetime import datetime, timedelta\n'), ((267, 288), 'datetime.datetime', 'datetime', (['(2019)', '(7)', '(25)'], {}), '(2019, 7, 25)\n', (275, 288), False, 'from datetime import datetime, timedelta\n'), ((293, 317), 'tests.factories.make_moments', 'make_moments', (['(20)', 't0', 't6'], {}), '(20, t0, t6)\n', (305, 317), False, 'from tests.factories import make_sets, make_moments\n'), ((325, 349), 'tests.factories.make_sets', 'make_sets', (['(20)', 't0', 't6', '(3)'], {}), '(20, t0, t6, 3)\n', (334, 349), False, 'from tests.factories import make_sets, make_moments\n'), ((562, 599), 'timeset.TimeSet.from_interval', 'TimeSet.from_interval', (['i.start', 'i.end'], {}), '(i.start, i.end)\n', (583, 599), False, 'from timeset import TimeSet\n'), ((1035, 1052), 'random.sample', 'sample', (['sets'], {'k': '(2)'}), '(sets, k=2)\n', (1041, 1052), False, 'from random import sample, choice, randrange\n'), ((1312, 1327), 'timeset.TimeSet.empty', 'TimeSet.empty', ([], {}), '()\n', (1325, 1327), False, 'from timeset import TimeSet\n'), ((1340, 1352), 'random.choice', 'choice', (['sets'], {}), '(sets)\n', (1346, 1352), False, 'from random import sample, choice, randrange\n'), ((1444, 1461), 'random.sample', 'sample', (['sets'], {'k': '(2)'}), '(sets, k=2)\n', (1450, 1461), False, 'from random import sample, choice, randrange\n'), ((1658, 1675), 'random.sample', 'sample', (['sets'], {'k': '(2)'}), '(sets, k=2)\n', (1664, 1675), False, 'from random import sample, choice, randrange\n'), ((1853, 1865), 'random.choice', 'choice', (['sets'], {}), '(sets)\n', (1859, 1865), False, 'from random import sample, choice, randrange\n'), ((2116, 2128), 'random.choice', 'choice', (['sets'], {}), '(sets)\n', (2122, 2128), False, 'from random import sample, choice, randrange\n'), ((2353, 2365), 'random.choice', 'choice', (['sets'], {}), '(sets)\n', (2359, 2365), False, 'from random import sample, choice, randrange\n'), ((2533, 2550), 'random.sample', 'sample', (['sets'], {'k': '(2)'}), '(sets, k=2)\n', (2539, 2550), False, 'from random import sample, choice, randrange\n'), ((2992, 3004), 'random.choice', 'choice', (['sets'], {}), '(sets)\n', (2998, 3004), False, 'from random import sample, choice, randrange\n'), ((464, 492), 'tests.factories.make_sets', 'make_sets', (['(1)', 't[0]', 't[19]', '(1)'], {}), '(1, t[0], t[19], 1)\n', (473, 492), False, 'from tests.factories import make_sets, make_moments\n'), ((1247, 1265), 'timeset.TimeSet', 'TimeSet', (['intervals'], {}), '(intervals)\n', (1254, 1265), False, 'from timeset import TimeSet\n'), ((2624, 2641), 'random.sample', 'sample', (['sets'], {'k': '(2)'}), '(sets, k=2)\n', (2630, 2641), False, 'from random import sample, choice, randrange\n'), ((683, 694), 'timeset.TimeSet', 'TimeSet', (['[]'], {}), '([])\n', (690, 694), False, 'from timeset import TimeSet\n'), ((756, 771), 'timeset.TimeSet.empty', 'TimeSet.empty', ([], {}), '()\n', (769, 771), False, 'from timeset import TimeSet\n'), ((821, 850), 'timeset.TimeSet', 'TimeSet', (['[ti.empty, ti.empty]'], {}), '([ti.empty, ti.empty])\n', (828, 850), False, 'from timeset import TimeSet\n'), ((945, 978), 'timeset.TimeSet.from_interval', 'TimeSet.from_interval', (['t[2]', 't[5]'], {}), '(t[2], t[5])\n', (966, 978), False, 'from timeset import TimeSet\n'), ((2273, 2290), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (2282, 2290), False, 'from datetime import datetime, timedelta\n'), ((2456, 2466), 'timeset.TimeSet', 'TimeSet', (['i'], {}), '(i)\n', (2463, 2466), False, 'from timeset import TimeSet\n'), ((2832, 2843), 'timeset.TimeSet', 'TimeSet', (['[]'], {}), '([])\n', (2839, 2843), False, 'from timeset import TimeSet\n'), ((2896, 2915), 'timeset.TimeSet', 'TimeSet', (['[ti.empty]'], {}), '([ti.empty])\n', (2903, 2915), False, 'from timeset import TimeSet\n')] |
# -*- coding: utf-8 -*-
import json
def storage(name, data):
with open('tmp/%s.json' % name, 'w', encoding = 'UTF-8') as f:
json.dump(data, f, sort_keys = False, indent = 2, ensure_ascii = False)
| [
"json.dump"
] | [((129, 194), 'json.dump', 'json.dump', (['data', 'f'], {'sort_keys': '(False)', 'indent': '(2)', 'ensure_ascii': '(False)'}), '(data, f, sort_keys=False, indent=2, ensure_ascii=False)\n', (138, 194), False, 'import json\n')] |
import numpy as np
def directional_coupler_lc(wavelength_nm, n_eff_1, n_eff_2):
'''
Calculates the coherence length (100% power transfer) of a
directional coupler.
Args:
wavelength_nm (float): The wavelength in [nm] the
directional coupler should operate at.
n_eff_1 (float): n_eff of the fundamental (even)
supermode of the directional coupler.
n_eff_2 (float): n_eff of the first-order (odd)
supermode of the directional coupler.
Returns:
float: The length [um] the directional coupler
needs to be to achieve 100% power transfer.
'''
wavelength_m = wavelength_nm * 1.e-9
dn_eff = (n_eff_1 - n_eff_2).real
lc_m = wavelength_m / (2. * dn_eff)
lc_um = lc_m * 1.e6
return lc_um
def grating_coupler_period(wavelength,
n_eff,
n_clad,
incidence_angle_deg,
diffration_order=1):
'''
Calculate the period needed for a grating coupler.
Args:
wavelength (float): The target wavelength for the
grating coupler.
n_eff (float): The effective index of the mode
of a waveguide with the width of the grating
coupler.
n_clad (float): The refractive index of the cladding.
incidence_angle_deg (float): The incidence angle
the grating coupler should operate at [degrees].
diffration_order (int): The grating order the coupler
should work at. Default is 1st order (1).
Returns:
float: The period needed for the grating coupler
in the same units as the wavelength was given at.
'''
k0 = 2. * np.pi / wavelength
beta = n_eff.real * k0
n_inc = n_clad
grating_period = (2.*np.pi*diffration_order) \
/ (beta - k0*n_inc*np.sin(np.radians(incidence_angle_deg)))
return grating_period
def loss(n, wavelength):
kappa = n.imag
alpha = 4.34 * 4 * np.pi * np.abs(
kappa) / wavelength # 4.34 = 10*np.log10(np.e) -> [dB/m] = 4.34 [/m]
return alpha # [db/um] if working in [um]
def qpm_wavenumber(pmp_n,
pmp_l,
sig_n,
sig_l,
idl_n,
idl_l,
period_qpm,
type='forward'):
pi2 = np.pi * 2
k_pmp = pmp_n * pi2 / pmp_l
k_sig = sig_n * pi2 / sig_l
k_idl = idl_n * pi2 / idl_l
k_qpm = pi2 / period_qpm
if type == 'forward':
sgn_1 = 1
sgn_2 = 1
elif type == 'forward_backward':
sgn_1 = 1
sgn_2 = -1
elif type == 'backward':
sgn_1 = -1
sgn_2 = -1
k_mismatch = k_idl * sgn_1 + k_sig * sgn_2 + k_qpm - k_pmp
return k_mismatch
def qpm_period(pmp_n, pmp_l, sig_n, sig_l, idl_n, idl_l, type='forward'):
pi2 = np.pi * 2
k_pmp = pmp_n * pi2 / pmp_l
k_sig = sig_n * pi2 / sig_l
k_idl = idl_n * pi2 / idl_l
if type == 'forward':
sgn_1 = 1
sgn_2 = 1
elif type == 'forward_backward':
sgn_1 = 1
sgn_2 = -1
elif type == 'backward':
sgn_1 = -1
sgn_2 = -1
k_qpm = k_pmp - k_idl * sgn_1 - k_sig * sgn_2
l_qpm = pi2 / k_qpm
return l_qpm
| [
"numpy.radians",
"numpy.abs"
] | [((2038, 2051), 'numpy.abs', 'np.abs', (['kappa'], {}), '(kappa)\n', (2044, 2051), True, 'import numpy as np\n'), ((1900, 1931), 'numpy.radians', 'np.radians', (['incidence_angle_deg'], {}), '(incidence_angle_deg)\n', (1910, 1931), True, 'import numpy as np\n')] |
import os
from typing import IO
from PySDDP.newave.script.templates.confhd import ConfhdTemplate
from matplotlib import pyplot as plt
import numpy as np
from random import randint
from mpl_toolkits.mplot3d import Axes3D
class Confhd(ConfhdTemplate):
def __init__(self):
super().__init__()
self.lista_entrada = list()
self._conteudo_ = None
self.dir_base = None
self._numero_registros_ = None
def ler(self, file_name: str, hidr, vazoes, dger, modif, exph) -> None:
"""
Implementa o método para leitura do arquivo HIDR.DAT que contem os dados cadastrais das usinas
hidrelétricas que podem ser utilizadas para a execucao do NEWAVE
:param file_name: string com o caminho completo para o arquivo,
hidr: classe contendo o cadastro de todas as usinas hidreletrica,
vazoes: classe contendo o historico de vazoes completo
"""
self.dir_base = os.path.split(file_name)[0]
self.nome_arquivo = os.path.split(file_name)[1]
self._copiavazoes = vazoes.vaz_nat
self._numero_registros_ = 0
self.nuhe = 0
nanos = dger.num_anos['valor']
try:
with open(file_name, 'r', encoding='latin-1') as f: # type: IO[str]
continua = True
contador = 1
while continua:
self.next_line(f)
linha = self.linha
if contador >= 3:
if len(linha) > 5:
self._codigo["valor"].append(int(linha[1:5]))
else:
break
self._nome["valor"].append(linha[6:18])
self._posto["valor"].append(int(linha[19:23]))
self._jusante["valor"].append(int(linha[25:29]))
self._ree["valor"].append(int(linha[30:34]))
self._vol_ini["valor"].append(float(linha[35:41]))
self._status["valor"].append(linha[44:46])
self._modif["valor"].append(int(linha[49:53]))
self._ano_i["valor"].append(int(linha[58:62]))
self._ano_f["valor"].append(int(linha[67:71]))
# Preenche com dados cadastrais
uhe = hidr.get(self._codigo["valor"][-1])
self._bdh['valor'].append(uhe['bdh'])
self._sist['valor'].append(uhe['sist'])
self._empr['valor'].append(uhe['empr'])
self._desvio['valor'].append(uhe['desvio'])
self._vol_min['valor'].append(uhe['vol_min'])
self._vol_max['valor'].append(uhe['vol_max'])
self._vol_vert['valor'].append(uhe['vol_vert'])
self._vol_min_desv['valor'].append(uhe['vol_min_desv'])
self._cota_min['valor'].append(uhe['cota_min'])
self._cota_max['valor'].append(uhe['cota_max'])
self._pol_cota_vol['valor'].append(uhe['pol_cota_vol'])
self._pol_cota_area['valor'].append(uhe['pol_cota_area'])
self._coef_evap['valor'].append(uhe['coef_evap'])
self._num_conj_maq['valor'].append(uhe['num_conj_maq'])
self._maq_por_conj['valor'].append(uhe['maq_por_conj'])
self._pef_por_conj['valor'].append(uhe['pef_por_conj'])
self._cf_hbqt['valor'].append(uhe['cf_hbqt'])
self._cf_hbqt['valor_2'].append(uhe['cf_hbqt_2'])
self._cf_hbqt['valor_3'].append(uhe['cf_hbqt_3'])
self._cf_hbqt['valor_4'].append(uhe['cf_hbqt_4'])
self._cf_hbqt['valor_5'].append(uhe['cf_hbqt_5'])
self._cf_hbqg['valor'].append(uhe['cf_hbqg'])
self._cf_hbqg['valor_2'].append(uhe['cf_hbqg_2'])
self._cf_hbqg['valor_3'].append(uhe['cf_hbqg_3'])
self._cf_hbqg['valor_4'].append(uhe['cf_hbqg_4'])
self._cf_hbqg['valor_5'].append(uhe['cf_hbqg_5'])
self._cf_hbpt['valor'].append(uhe['cf_hbpt'])
self._cf_hbpt['valor_2'].append(uhe['cf_hbpt_2'])
self._cf_hbpt['valor_3'].append(uhe['cf_hbpt_3'])
self._cf_hbpt['valor_4'].append(uhe['cf_hbpt_4'])
self._cf_hbpt['valor_5'].append(uhe['cf_hbpt_5'])
self._alt_efet_conj['valor'].append(uhe['alt_efet_conj'])
self._vaz_efet_conj['valor'].append(uhe['vaz_efet_conj'])
self._prod_esp['valor'].append(uhe['prod_esp'])
self._perda_hid['valor'].append(uhe['perda_hid'])
self._num_pol_vnj['valor'].append(uhe['num_pol_vnj'])
self._pol_vaz_niv_jus['valor'].append(uhe['pol_vaz_niv_jus'])
self._pol_vaz_niv_jus['valor_2'].append(uhe['pol_vaz_niv_jus_2'])
self._pol_vaz_niv_jus['valor_3'].append(uhe['pol_vaz_niv_jus_3'])
self._pol_vaz_niv_jus['valor_4'].append(uhe['pol_vaz_niv_jus_4'])
self._pol_vaz_niv_jus['valor_5'].append(uhe['pol_vaz_niv_jus_5'])
self._cota_ref_nivel_jus['valor'].append(uhe['cota_ref_nivel_jus'])
self._cfmed['valor'].append(uhe['cfmed'])
self._inf_canal_fuga['valor'].append(uhe['inf_canal_fuga'])
self._fator_carga_max['valor'].append(uhe['fator_carga_max'])
self._fator_carga_min['valor'].append(uhe['fator_carga_min'])
self._vaz_min['valor'].append(uhe['vaz_min'])
self._unid_base['valor'].append(uhe['unid_base'])
self._tipo_turb['valor'].append(uhe['tipo_turb'])
self._repres_conj['valor'].append(uhe['repres_conj'])
self._teifh['valor'].append(uhe['teifh'])
self._ip['valor'].append(uhe['ip'])
self._tipo_perda['valor'].append(uhe['tipo_perda'])
self._data['valor'].append(uhe['data'])
self._observ['valor'].append(uhe['observ'])
self._vol_ref['valor'].append(uhe['vol_ref'])
self._tipo_reg['valor'].append(uhe['tipo_reg'])
# Inclui as vazoes naturais
vaz_nat = vazoes.vaz_nat.transpose()
vaz_nat = vaz_nat[self._posto["valor"][-1]-1]
vaz_nat = vaz_nat.transpose()
self._vazoes['valor'].append(vaz_nat)
# Se a usina for 'NE' ou 'EE' nao deve possuir maquinas
if self._status['valor'][-1] == 'NE' or self._status['valor'][-1] == 'EE':
for iconj in range(5):
self._maq_por_conj['valor'][-1][iconj] = 0
# Parametros Temporais controlados pelo MODIF.DAT
self._vol_mint['valor'].append(self._vol_min['valor'][-1]*np.ones((nanos, 12), 'f'))
self._vol_maxt['valor'].append(self._vol_max['valor'][-1]*np.ones((nanos, 12), 'f'))
self._vol_minp['valor'].append(self._vol_min['valor'][-1]*np.ones((nanos, 12), 'f'))
self._vaz_mint['valor'].append(self._vaz_min['valor'][-1]*np.ones((nanos, 12), 'f'))
self._cfugat['valor'].append(self._cfmed['valor'][-1]*np.ones((nanos, 12), 'f'))
self._cmont['valor'].append(self._cota_max['valor'][-1]*np.ones((nanos, 12), 'f'))
#
# Calcula Volume Útil
#
if self._tipo_reg['valor'][-1] == 'M':
self._vol_util['valor'].append(self._vol_max['valor'][-1] - self._vol_min['valor'][-1])
else:
self._vol_util['valor'].append(float(0))
self._vol_min['valor'][-1] = self._vol_max['valor'][-1]
# Incorpora Modificações do MODIF.DAT
usinadf = modif.bloco_usina['df'][modif.bloco_usina['df']['codigo'] == self._codigo['valor'][-1]]
self._acerta_modif(usinadf, dger)
# Calcula Parametros
#
# Re-Calcula Volume Útil
#
if self._tipo_reg['valor'][-1] == 'M':
self._vol_util['valor'][-1] = self._vol_max['valor'][-1] - self._vol_min['valor'][-1]
else:
self._vol_min['valor'][-1] = self._vol_max['valor'][-1]
self._calc_pot_efetiva()
self._calc_vaz_efetiva()
self._calc_produtibs(nanos)
self._calc_engol_maximo()
# Parametros Temporais calculados pelo EXPH.DAT
if self._status['valor'][-1] == 'EX':
self._status_vol_morto['valor'].append(2 * np.ones((nanos, 12), 'i'))
self._status_motoriz['valor'].append(2 * np.ones((nanos, 12), 'i'))
self._vol_morto_tempo['valor'].append(np.zeros((nanos, 12), 'f'))
self._engol_tempo['valor'].append(self._engolimento['valor'][-1] * np.ones((nanos, 12), 'f'))
self._potencia_tempo['valor'].append(self._pot_efet['valor'][-1] * np.ones((nanos, 12), 'f'))
self._unidades_tempo['valor'].append(sum(self._maq_por_conj['valor'][-1]) * np.ones((nanos, 12), 'f'))
else:
self._status_vol_morto['valor'].append(np.zeros((nanos, 12), 'i'))
self._status_motoriz['valor'].append(np.zeros((nanos, 12), 'i'))
self._vol_morto_tempo['valor'].append(np.zeros((nanos, 12), 'f'))
if self._status['valor'][-1] == 'EE':
self._engol_tempo['valor'].append(self._engolimento['valor'][-1] * np.ones((nanos, 12), 'f'))
self._potencia_tempo['valor'].append(self._pot_efet['valor'][-1] * np.ones((nanos, 12), 'f'))
self._unidades_tempo['valor'].append(sum(self._maq_por_conj['valor'][-1]) * np.ones((nanos, 12), 'f'))
else:
self._engol_tempo['valor'].append(np.zeros((nanos, 12), 'f'))
self._potencia_tempo['valor'].append(np.zeros((nanos, 12), 'f'))
self._unidades_tempo['valor'].append(np.zeros((nanos, 12), 'i'))
#
# Insere matrizes com nanos x 12 para cada tipo de produtibilidade acumulada
#
self._ro_acum_a_ree['valor'].append(np.zeros((nanos, 12), 'd'))
self._ro_acum_b_ree['valor'].append(np.zeros((nanos, 12), 'd'))
self._ro_acum_c_ree['valor'].append(np.zeros((nanos, 12), 'd'))
self._ro_acum_a_sist['valor'].append(np.zeros((nanos, 12), 'd'))
self._ro_acum_b_sist['valor'].append(np.zeros((nanos, 12), 'd'))
self._ro_acum_c_sist['valor'].append(np.zeros((nanos, 12), 'd'))
self._ro_acum['valor'].append(np.zeros((nanos, 12), 'd'))
self._ro_acum_65['valor'].append(np.zeros((nanos, 12), 'd'))
self._ro_acum_max['valor'].append(np.zeros((nanos, 12), 'd'))
self._ro_acum_med['valor'].append(np.zeros((nanos, 12), 'd'))
self._ro_acum_min['valor'].append(np.zeros((nanos, 12), 'd'))
# Incorpora Modificações do EXPH.DAT
usinadf = exph.bloco_usina['df'][exph.bloco_usina['df']['codigo'] == self._codigo['valor'][-1]]
self._acerta_exph(usinadf, dger)
self.nuhe += 1
self._numero_registros_ += 1
contador += 1
except Exception as err:
if isinstance(err, StopIteration):
maior = np.array(self._codigo['valor'], dtype=int)
maior = np.max(maior)
self._mapa = -np.ones(maior+1, dtype=int)
for i, codigo in enumerate(self._codigo['valor']):
self._mapa[codigo]=int(i)
# Acerta Produtibilidades Acumuladas
self._prod_acum()
print("OK! Leitura do", os.path.split(file_name)[1], "realizada com sucesso.")
else:
raise
def escrever(self, file_out: str) -> None:
"""
Implementa o método para escrita do arquivo HIDR.DAT que contem os dados cadastrais das usinas
hidrelétricas que podem ser utilizadas para a execucao do NEWAVE
:param file_out: string com o caminho completo para o arquivo
"""
self.dir_base = os.path.split(file_out)[0]
self.nome_arquivo = os.path.split(file_out)[1]
self._numero_registros_ = 0
formato = "{codigo: >5} {nome: <12} {posto: >4} {jusante: >5} {ree: >4} {vol_ini: >6} {status: >4} {modif: >6} {ano_i: >8} {ano_f: >8}\n"
if not os.path.isdir(os.path.split(file_out)[0]):
os.mkdir(os.path.split(file_out)[0])
try:
with open(file_out, 'w', encoding='latin-1') as f: # type: IO[str]
# Imprime Cabeçalho
f.write(" NUM NOME POSTO JUS REE V.INIC U.EXIS MODIF INIC.HIST FIM HIST\n")
f.write(" XXXX XXXXXXXXXXXX XXXX XXXX XXXX XXX.XX XXXX XXXX XXXX XXXX \n")
for iusi in range(self.nuhe):
linha = dict(
codigo=self._codigo['valor'][iusi],
nome=self._nome['valor'][iusi],
posto=self._posto['valor'][iusi],
jusante=self._jusante['valor'][iusi],
ree=self._ree['valor'][iusi],
vol_ini=self._vol_ini['valor'][iusi],
status=self._status['valor'][iusi],
modif=self._modif['valor'][iusi],
ano_i=self._ano_i['valor'][iusi],
ano_f=self._ano_f['valor'][iusi]
)
f.write(formato.format(**linha))
self._numero_registros_ += 1
except Exception as err:
raise
print("OK! Escrita do", os.path.split(file_out)[1], "realizada com sucesso.")
def get(self, entrada):
"""
Busca uma usina hidreletrica do arquivo CONFHD e retorna um dicionario de dados contendo todas as
informacoes desta usina
:param entrada: string com o nome da usina ou inteiro com o numero de referencia da usina
"""
if (type(entrada) == float) or (type(entrada) == int):
#for i, valor in enumerate(self._codigo["valor"]):
# if valor == int(entrada):
# posicao = i
# break
if type(entrada) == float:
entrada = int(entrada)
posicao = int(self._mapa[entrada])
if posicao == -1:
return None
if type(entrada) == str:
posicao = None
for i, valor in enumerate(self._nome["valor"]):
if (valor.upper()).strip() == (entrada.upper()).strip():
posicao = i
break
if posicao is None:
return None
uhe = {
'codigo': self._codigo['valor'][posicao],
'nome': self._nome['valor'][posicao],
'posto': self._posto['valor'][posicao],
'ree': self._ree["valor"][posicao],
'vol_ini': self._vol_ini["valor"][posicao],
'status': self._status["valor"][posicao],
'modif': self._modif["valor"][posicao],
'ano_i': self._ano_i["valor"][posicao],
'ano_f': self._ano_f["valor"][posicao],
'bdh': self._bdh['valor'][posicao],
'sist': self._sist['valor'][posicao],
'empr': self._empr['valor'][posicao],
'jusante': self._jusante['valor'][posicao],
'desvio': self._desvio['valor'][posicao],
'vol_min': self._vol_min['valor'][posicao],
'vol_max': self._vol_max['valor'][posicao],
'vol_vert': self._vol_vert['valor'][posicao],
'vol_min_desv': self._vol_min_desv['valor'][posicao],
'cota_min': self._cota_min['valor'][posicao],
'cota_max': self._cota_max['valor'][posicao],
'pol_cota_vol': self._pol_cota_vol['valor'][posicao],
'pol_cota_area': self._pol_cota_area['valor'][posicao],
'coef_evap': self._coef_evap['valor'][posicao],
'num_conj_maq': self._num_conj_maq['valor'][posicao],
'maq_por_conj': self._maq_por_conj['valor'][posicao],
'pef_por_conj': self._pef_por_conj['valor'][posicao],
'cf_hbqt': self._cf_hbqt['valor'][posicao],
'cf_hbqt_2': self._cf_hbqt['valor_2'][posicao],
'cf_hbqt_3': self._cf_hbqt['valor_3'][posicao],
'cf_hbqt_4': self._cf_hbqt['valor_4'][posicao],
'cf_hbqt_5': self._cf_hbqt['valor_5'][posicao],
'cf_hbqg': self._cf_hbqg['valor'][posicao],
'cf_hbqg_2': self._cf_hbqg['valor_2'][posicao],
'cf_hbqg_3': self._cf_hbqg['valor_3'][posicao],
'cf_hbqg_4': self._cf_hbqg['valor_4'][posicao],
'cf_hbqg_5': self._cf_hbqg['valor_5'][posicao],
'cf_hbpt': self._cf_hbpt['valor'][posicao],
'cf_hbpt_2': self._cf_hbpt['valor_2'][posicao],
'cf_hbpt_3': self._cf_hbpt['valor_3'][posicao],
'cf_hbpt_4': self._cf_hbpt['valor_4'][posicao],
'cf_hbpt_5': self._cf_hbpt['valor_5'][posicao],
'alt_efet_conj': self._alt_efet_conj['valor'][posicao],
'vaz_efet_conj': self._vaz_efet_conj['valor'][posicao],
'prod_esp': self._prod_esp['valor'][posicao],
'perda_hid': self._perda_hid['valor'][posicao],
'num_pol_vnj': self._num_pol_vnj['valor'][posicao],
'pol_vaz_niv_jus': self._pol_vaz_niv_jus['valor'][posicao],
'pol_vaz_niv_jus_2': self._pol_vaz_niv_jus['valor_2'][posicao],
'pol_vaz_niv_jus_3': self._pol_vaz_niv_jus['valor_3'][posicao],
'pol_vaz_niv_jus_4': self._pol_vaz_niv_jus['valor_4'][posicao],
'pol_vaz_niv_jus_5': self._pol_vaz_niv_jus['valor_5'][posicao],
'cota_ref_nivel_jus': self._cota_ref_nivel_jus['valor'][posicao],
'cfmed': self._cfmed['valor'][posicao],
'inf_canal_fuga': self._inf_canal_fuga['valor'][posicao],
'fator_carga_max': self._fator_carga_max['valor'][posicao],
'fator_carga_min': self._fator_carga_min['valor'][posicao],
'vaz_min': self._vaz_min['valor'][posicao],
'unid_base': self._unid_base['valor'][posicao],
'tipo_turb': self._tipo_turb['valor'][posicao],
'repres_conj': self._repres_conj['valor'][posicao],
'teifh': self._teifh['valor'][posicao],
'ip': self._ip['valor'][posicao],
'tipo_perda': self._tipo_perda['valor'][posicao],
'data': self._data['valor'][posicao],
'observ': self._observ['valor'][posicao],
'vol_ref': self._vol_ref['valor'][posicao],
'tipo_reg': self._tipo_reg['valor'][posicao],
'vazoes': self._vazoes['valor'][posicao],
'vol_mint': self._vol_mint['valor'][posicao],
'vol_maxt': self._vol_maxt['valor'][posicao],
'vol_minp': self._vol_minp['valor'][posicao],
'vaz_mint': self._vaz_mint['valor'][posicao],
'cmont': self._cmont['valor'][posicao],
'cfugat': self._cfugat['valor'][posicao],
'vol_util': self._vol_util['valor'][posicao],
'pot_efet': self._pot_efet['valor'][posicao],
'vaz_efet': self._vaz_efet['valor'][posicao],
'status_vol_morto': self._status_vol_morto['valor'][posicao],
'status_motoriz': self._status_motoriz['valor'][posicao],
'vol_morto_tempo': self._vol_morto_tempo['valor'][posicao],
'engol_tempo': self._engol_tempo['valor'][posicao],
'potencia_tempo': self._potencia_tempo['valor'][posicao],
'unidades_tempo': self._unidades_tempo['valor'][posicao],
'ro_65': self._ro_65['valor'][posicao],
'ro_50': self._ro_50['valor'][posicao],
'ro_equiv': self._ro_equiv['valor'][posicao],
'ro_equiv65': self._ro_equiv65['valor'][posicao],
'ro_min': self._ro_min['valor'][posicao],
'ro_max': self._ro_max['valor'][posicao],
'engolimento': self._engolimento['valor'][posicao],
'ro_acum_a_ree': self._ro_acum_a_ree['valor'][posicao],
'ro_acum_b_ree': self._ro_acum_b_ree['valor'][posicao],
'ro_acum_c_ree': self._ro_acum_c_ree['valor'][posicao],
'ro_acum_a_sist': self._ro_acum_a_sist['valor'][posicao],
'ro_acum_b_sist': self._ro_acum_b_sist['valor'][posicao],
'ro_acum_c_sist': self._ro_acum_c_sist['valor'][posicao],
'ro_acum': self._ro_acum['valor'][posicao],
'ro_acum_65': self._ro_acum_65['valor'][posicao],
'ro_acum_max': self._ro_acum_max['valor'][posicao],
'ro_acum_med': self._ro_acum_med['valor'][posicao],
'ro_acum_med': self._ro_acum_min['valor'][posicao]
}
return uhe
def put(self, uhe):
"""
Atualiza os dados da usina com do CONFHD de acordo com o dicionario de dados fornecido na entrada.
As chaves do dicionario de dados de entrada devem ser as mesmas do dicionario obtido atraves do
comando get.
:param uhe: dicionario de dados contendo informacoes da usina a ser atualizada.
"""
posicao = None
for i, valor in enumerate(self._codigo["valor"]):
if valor == uhe['codigo']:
posicao = i
break
if posicao is None:
return None
self._codigo['valor'][posicao] = uhe['codigo']
self._nome['valor'][posicao] = uhe['nome']
self._posto['valor'][posicao] = uhe['posto']
self._bdh['valor'][posicao] = uhe['bdh']
self._sist['valor'][posicao] = uhe['sist']
self._empr['valor'][posicao] = uhe['empr']
self._jusante['valor'][posicao] = uhe['jusante']
self._desvio['valor'][posicao] = uhe['desvio']
self._vol_min['valor'][posicao] = uhe['vol_min']
self._vol_max['valor'][posicao] = uhe['vol_max']
self._vol_vert['valor'][posicao] = uhe['vol_vert']
self._vol_min_desv['valor'][posicao] = uhe['vol_min_desv']
self._cota_min['valor'][posicao] = uhe['cota_min']
self._cota_max['valor'][posicao] = uhe['cota_max']
self._pol_cota_vol['valor'][posicao] = uhe['pol_cota_vol']
self._pol_cota_area['valor'][posicao] = uhe['pol_cota_area']
self._coef_evap['valor'][posicao] = uhe['coef_evap']
self._num_conj_maq['valor'][posicao] = uhe['num_conj_maq']
self._maq_por_conj['valor'][posicao] = uhe['maq_por_conj']
self._pef_por_conj['valor'][posicao] = uhe['pef_por_conj']
self._cf_hbqt['valor'][posicao] = uhe['cf_hbqt']
self._cf_hbqt['valor_2'][posicao] = uhe['cf_hbqt_2']
self._cf_hbqt['valor_3'][posicao] = uhe['cf_hbqt_3']
self._cf_hbqt['valor_4'][posicao] = uhe['cf_hbqt_4']
self._cf_hbqt['valor_5'][posicao] = uhe['cf_hbqt_5']
self._cf_hbqg['valor'][posicao] = uhe['cf_hbqg']
self._cf_hbqg['valor_2'][posicao] = uhe['cf_hbqg_2']
self._cf_hbqg['valor_3'][posicao] = uhe['cf_hbqg_3']
self._cf_hbqg['valor_4'][posicao] = uhe['cf_hbqg_4']
self._cf_hbqg['valor_5'][posicao] = uhe['cf_hbqg_5']
self._cf_hbpt['valor'][posicao] = uhe['cf_hbpt']
self._cf_hbpt['valor_2'][posicao] = uhe['cf_hbpt_2']
self._cf_hbpt['valor_3'][posicao] = uhe['cf_hbpt_3']
self._cf_hbpt['valor_4'][posicao] = uhe['cf_hbpt_4']
self._cf_hbpt['valor_5'][posicao] = uhe['cf_hbpt_5']
self._alt_efet_conj['valor'][posicao] = uhe['alt_efet_conj']
self._vaz_efet_conj['valor'][posicao] = uhe['vaz_efet_conj']
self._prod_esp['valor'][posicao] = uhe['prod_esp']
self._perda_hid['valor'][posicao] = uhe['perda_hid']
self._num_pol_vnj['valor'][posicao] = uhe['num_pol_vnj']
self._pol_vaz_niv_jus['valor'] = uhe['pol_vaz_niv_jus']
self._pol_vaz_niv_jus['valor_2'][posicao] = uhe['pol_vaz_niv_jus_2']
self._pol_vaz_niv_jus['valor_3'][posicao] = uhe['pol_vaz_niv_jus_3']
self._pol_vaz_niv_jus['valor_4'][posicao] = uhe['pol_vaz_niv_jus_4']
self._pol_vaz_niv_jus['valor_5'][posicao] = uhe['pol_vaz_niv_jus_5']
self._cota_ref_nivel_jus['valor'][posicao] = uhe['cota_ref_nivel_jus']
self._cfmed['valor'][posicao] = uhe['cfmed']
self._inf_canal_fuga['valor'][posicao] = uhe['inf_canal_fuga']
self._fator_carga_max['valor'][posicao] = uhe['fator_carga_max']
self._fator_carga_min['valor'][posicao] = uhe['fator_carga_min']
self._vaz_min['valor'][posicao] = uhe['vaz_min']
self._unid_base['valor'][posicao] = uhe['unid_base']
self._tipo_turb['valor'][posicao] = uhe['tipo_turb']
self._repres_conj['valor'][posicao] = uhe['repres_conj']
self._teifh['valor'][posicao] = uhe['teifh']
self._ip['valor'][posicao] = uhe['ip']
self._tipo_perda['valor'][posicao] = uhe['tipo_perda']
self._data['valor'][posicao] = uhe['data']
self._observ['valor'][posicao] = uhe['observ']
self._vol_ref['valor'][posicao] = uhe['vol_ref']
self._tipo_reg['valor'][posicao] = uhe['tipo_reg']
self._vazoes['valor'][posicao] = uhe['vazoes']
self._vol_mint['valor'][posicao] = uhe['vol_mint']
self._vol_maxt['valor'][posicao] = uhe['vol_maxt']
self._vol_minp['valor'][posicao] = uhe['vol_minp']
self._vaz_mint['valor'][posicao] = uhe['vaz_mint']
self._cfugat['valor'][posicao] = uhe['cfugat']
self._vol_util['valor'][posicao] = uhe['vol_util']
self._pot_efet['valor'][posicao] = uhe['pot_efet']
self._vaz_efet['valor'][posicao] = uhe['vaz_efet']
self._status_vol_morto['valor'][posicao] = uhe['status_vol_morto']
self._status_motoriz['valor'][posicao] = uhe['status_motoriz']
self._vol_morto_tempo['valor'][posicao] = uhe['vol_morto_tempo']
self._engol_tempo['valor'][posicao] = uhe['engol_tempo']
self._potencia_tempo['valor'][posicao] = uhe['potencia_tempo']
self._unidades_tempo['valor'][posicao] = uhe['unidades_tempo']
self._ro_65['valor'][posicao] = uhe['ro_65']
self._ro_50['valor'][posicao] = uhe['ro_50']
self._ro_equiv['valor'][posicao] = uhe['ro_equiv']
self._ro_equiv65['valor'][posicao] = uhe['ro_equiv65']
self._ro_min['valor'][posicao] = uhe['ro_min']
self._ro_max['valor'][posicao] = uhe['ro_max']
self._engolimento['valor'][posicao] = uhe['engolimento']
print(np.shape(self._copiavazoes))
for iano in range(np.shape(self._copiavazoes)[0]):
for imes in range(12):
self._copiavazoes[iano][imes][self._posto['valor'][posicao]-1] = self._vazoes['valor'][posicao][iano][imes]
return 'sucesso'
def help(self, parametro):
"""
Detalha o tipo de informacao de uma chave do dicionario de dados obtido pelo comando get.
:param parametro: string contendo a chave do dicionario de dados cuja o detalhamento eh desejado
"""
duvida = getattr(self, '_'+parametro)
return duvida['descricao']
# Calcula Vazao Incremental
def vaz_inc(self, uhe, iano, imes):
def Montante(uhe, iano, imes):
for iusi in self.lista_uhes():
usina = self.get(iusi)
if usina['jusante'] == uhe['codigo']:
if usina['status_vol_morto'][iano][imes] == 2:
yield iusi
else:
yield from Montante(usina, iano, imes)
# Inicia a vazão incremental da uhe com a sua vazão natural, depois abate as naturais de montante
incremental = uhe['vazoes'][:,imes]
if uhe['status_vol_morto'][iano][imes] != 2:
print ('Erro: Tentativa de calculo de Incremental para usina (', uhe['nome'], ') fora de operacao no mes ', imes, ' e ano ', iano)
return 0
else:
for iusina in Montante(uhe, iano, imes):
usina = self.get(iusina)
incremental = incremental - usina['vazoes'][:,imes]
# Caso Alguma Incremental seja Menor que zero, força para zero
codigos = np.where(incremental<0)
incremental[codigos] = 0
return incremental
def vaz_inc_entre_res(self, codigo, ianoconf, imesconf):
uhe = self.get(codigo)
nanos_hist = len(uhe['vazoes'])
def Montante(codigo, iano, imes):
#for iusi in self.lista_uhes():
# usina = self.get(iusi)
for iusi, jusante in enumerate(self._jusante['valor']):
if jusante == codigo:
if self._status_vol_morto['valor'][iusi][iano][imes] == 2:
if self._vol_util['valor'][iusi] > 0:
yield iusi
else:
yield from Montante(self._codigo['valor'][iusi], iano, imes)
else:
yield from Montante(self._codigo['valor'][iusi], iano, imes)
if uhe['status_vol_morto'][ianoconf][imesconf] != 2:
print ('Erro: Tentativa de calculo de Incremental para usina (', uhe['nome'], ') fora de operacao no mes ', imesconf, ' e ano ', ianoconf)
return 0
else:
incremental = np.zeros(nanos_hist)
for ianoh in range(nanos_hist):
incremental[ianoh] = uhe['vazoes'][ianoh][imesconf]
for iusina in Montante(codigo, ianoconf, imesconf):
for ianoh in range(nanos_hist):
incremental[ianoh] = incremental[ianoh] - self._vazoes['valor'][iusina][ianoh][imesconf]
# Caso Alguma Incremental seja Menor que zero, força para zero
codigos = np.where(incremental<0)
incremental[codigos] = 0
return incremental
##########################################################################################################
# Calcula Parametros das Usinas
##########################################################################################################
#def _calc_vol_util(self): # Calcula Volume Util da Usina
# if self._tipo_reg['valor'][-1] == 'M':
# self._vol_util['valor'].append(self._vol_max['valor'][-1] - self._vol_min['valor'][-1])
# else:
# self._vol_util['valor'].append(float(0))
# self._vol_min['valor'][-1] = self._vol_max['valor'][-1]
def _calc_pot_efetiva(self): # Calcula Potencia Efetiva da Usina
a = np.array(self._maq_por_conj["valor"][-1])
b = np.array(self._pef_por_conj["valor"][-1])
self._pot_efet['valor'].append(np.vdot(a, b))
def _calc_vaz_efetiva(self): # Calcula Vazao Efetiva da Usina
a = np.array(self._maq_por_conj["valor"][-1])
b = np.array(self._vaz_efet_conj["valor"][-1])
self._vaz_efet['valor'].append(np.vdot(a, b))
def _calc_produtibs(self, nanos): # Calcula Produtibilidades Associadas aa diversos volumes
self._ro_65['valor'].append(np.zeros( (nanos,12), 'd' ))
self._ro_50['valor'].append(np.zeros( (nanos,12), 'd' ))
self._ro_equiv['valor'].append(np.zeros( (nanos,12), 'd' ))
self._ro_equiv65['valor'].append(np.zeros( (nanos,12), 'd' ))
self._ro_min['valor'].append(np.zeros( (nanos,12), 'd' ))
self._ro_max['valor'].append(np.zeros( (nanos,12), 'd' ))
a = self._pol_cota_vol["valor"][-1][0]
b = self._pol_cota_vol["valor"][-1][1]
c = self._pol_cota_vol["valor"][-1][2]
d = self._pol_cota_vol["valor"][-1][3]
e = self._pol_cota_vol["valor"][-1][4]
# Calcula Produtibilidade Associada a 65% do Volume Util
volume = self._vol_min['valor'][-1] + 0.65*self._vol_util['valor'][-1]
cota = a + b*volume + c*volume**2 + d*volume**3 + e*volume**4
for iano in range(nanos):
for imes in range(12):
cfuga = self._cfugat['valor'][-1][iano][imes]
if self._tipo_perda['valor'][-1] == 2:
self._ro_65['valor'][-1][iano][imes] = self._prod_esp['valor'][-1] * (cota - cfuga - self._perda_hid['valor'][-1])
else:
self._ro_65['valor'][-1][iano][imes] = self._prod_esp['valor'][-1] * (cota - cfuga)*(1. - self._perda_hid['valor'][-1]/100)
# Calcula Produtibilidade Associada a 50% do Volume Util
volume = self._vol_min['valor'][-1] + 0.50*self._vol_util['valor'][-1]
cota = a + b*volume + c*volume**2 + d*volume**3 + e*volume**4
for iano in range(nanos):
for imes in range(12):
cfuga = self._cfugat['valor'][-1][iano][imes]
if self._tipo_perda['valor'][-1] == 2:
self._ro_50['valor'][-1][iano][imes] = self._prod_esp['valor'][-1] * (cota - cfuga - self._perda_hid['valor'][-1])
else:
self._ro_50['valor'][-1][iano][imes] = self._prod_esp['valor'][-1] * (cota - cfuga)*(1. - self._perda_hid['valor'][-1]/100)
# Calcula Produtibilidade Associada ao Volume Maximo
volume = self._vol_max['valor'][-1]
cota = a + b*volume + c*volume**2 + d*volume**3 + e*volume**4
for iano in range(nanos):
for imes in range(12):
cfuga = self._cfugat['valor'][-1][iano][imes]
if self._tipo_perda['valor'][-1] == 2:
self._ro_max['valor'][-1][iano][imes] = self._prod_esp['valor'][-1] * (cota - cfuga - self._perda_hid['valor'][-1])
else:
self._ro_max['valor'][-1][iano][imes] = self._prod_esp['valor'][-1] * (cota - cfuga)*(1. - self._perda_hid['valor'][-1]/100)
# Calcula Produtibilidade Associada ao Volume Minimo
volume = self._vol_min['valor'][-1]
cota = a + b*volume + c*volume**2 + d*volume**3 + e*volume**4
for iano in range(nanos):
for imes in range(12):
cfuga = self._cfugat['valor'][-1][iano][imes]
if self._tipo_perda['valor'][-1] == 2:
self._ro_min['valor'][-1][iano][imes] = self._prod_esp['valor'][-1] * (cota - cfuga - self._perda_hid['valor'][-1])
else:
self._ro_min['valor'][-1][iano][imes] = self._prod_esp['valor'][-1] * (cota - cfuga)*(1. - self._perda_hid['valor'][-1]/100)
# Calcula Produtibilidade Equivalente
if ( self._vol_util['valor'][-1] > 0):
cota = 0
cota65 = 0
Vol65 = self._vol_min['valor'][-1] + 0.65*self._vol_util['valor'][-1]
for i in range(5):
cota = cota + self._pol_cota_vol["valor"][-1][i] * (self._vol_max['valor'][-1]**(i+1)) / (i+1)
cota = cota - self._pol_cota_vol["valor"][-1][i] * (self._vol_min['valor'][-1]**(i+1)) / (i+1)
cota65 = cota65 + self._pol_cota_vol["valor"][-1][i] * (Vol65**(i+1)) / (i+1)
cota65 = cota65 - self._pol_cota_vol["valor"][-1][i] * (self._vol_min['valor'][-1]**(i+1)) / (i+1)
cota = cota / self._vol_util['valor'][-1]
cota65 = cota65 / (Vol65 - self._vol_min['valor'][-1])
else:
cota65 = cota
for iano in range(nanos):
for imes in range(12):
cfuga = self._cfugat['valor'][-1][iano][imes]
if self._tipo_perda['valor'][-1] == 2:
self._ro_equiv['valor'][-1][iano][imes] = self._prod_esp['valor'][-1] * (cota - cfuga - self._perda_hid['valor'][-1])
self._ro_equiv65['valor'][-1][iano][imes] = self._prod_esp['valor'][-1] * (cota65 - cfuga - self._perda_hid['valor'][-1])
else:
self._ro_equiv['valor'][-1][iano][imes] = self._prod_esp['valor'][-1] * (cota - cfuga)*(1. - self._perda_hid['valor'][-1]/100)
self._ro_equiv65['valor'][-1][iano][imes] = self._prod_esp['valor'][-1] * (cota65 - cfuga)*(1. - self._perda_hid['valor'][-1]/100)
return
def _prod_acum(self):
def cascata(confhd, codigo, iano,imes):
current = confhd.get(codigo)
if current['status_vol_morto'][iano][imes] == 2:
yield current['codigo']
while current['jusante'] != 0:
current = confhd.get(current['jusante'])
if current['status_vol_morto'][iano][imes] == 2:
yield current['codigo']
#
# Percorre todas as usinas do confhd para inserir produtibilidades acumuladas
#
for reg, codigo in enumerate(self._codigo['valor']):
nanos = len(self._status_vol_morto['valor'][reg])
#
# As produtibilidades devem ser calculadas para cada mês/ano do histórico
#
for iano in range(nanos):
for imes in range(12):
trocouRee = 0
trocouSist = 0
FioRee = True
FioSist = True
for iusina in cascata(self, codigo, iano, imes):
uhe = self.get(iusina)
produtib = uhe['ro_equiv'][iano][imes]
produtib65 = uhe['ro_equiv65'][iano][imes]
produtibMax = uhe['ro_max'][iano][imes]
produtibMed = uhe['ro_65'][iano][imes]
produtibMin = uhe['ro_min'][iano][imes]
if uhe['status_motoriz'][iano][imes] == 2:
self._ro_acum['valor'][reg][iano][imes] += produtib
self._ro_acum_65['valor'][reg][iano][imes] += produtib65
self._ro_acum_max['valor'][reg][iano][imes] += produtibMax
self._ro_acum_med['valor'][reg][iano][imes] += produtibMed
self._ro_acum_min['valor'][reg][iano][imes] += produtibMin
if uhe['sist'] != self._sist['valor'][reg]:
trocouSist = trocouSist + 1
if uhe['ree'] != self._ree['valor'][reg]:
trocouRee = trocouRee + 1
if trocouRee == 0:
if uhe['status_motoriz'][iano][imes] == 2:
self._ro_acum_a_ree['valor'][reg][iano][imes] += produtib
else:
if uhe['vol_util'] > 0:
FioRee = False
if FioRee:
if uhe['status_motoriz'][iano][imes] == 2:
self._ro_acum_b_ree['valor'][reg][iano][imes] += produtib
else:
if uhe['status_motoriz'][iano][imes] == 2:
self._ro_acum_c_ree['valor'][reg][iano][imes] += produtib
if trocouSist == 0:
if uhe['status_motoriz'][iano][imes] == 2:
self._ro_acum_a_sist['valor'][reg][iano][imes] += produtib
else:
if uhe['vol_util'] > 0:
FioSist = False
if FioSist:
if uhe['status_motoriz'][iano][imes] == 2:
self._ro_acum_b_sist['valor'][reg][iano][imes] += produtib
else:
if uhe['status_motoriz'][iano][imes] == 2:
self._ro_acum_c_sist['valor'][reg][iano][imes] += produtib
def _prod_acum_entre_res_ree(self, uhe, iano, imes):
if uhe['jusante'] == 0:
return 0
uhe_nova = self.get(uhe['jusante'])
if uhe_nova['vol_util'] != 0:
return 0.
elif uhe_nova['ree'] != uhe['ree']:
return 0.
elif uhe_nova['status_motoriz'][iano][imes] == 2:
return uhe_nova['ro_equiv'] + self._prod_acum_entre_res_ree(uhe_nova, iano, imes)
else:
return self._prod_acum_entre_res_ree(uhe_nova, iano, imes)
#
# def ProdAcumEntreResSist(self, iano, imes, usinas):
# if self.Jusante == 0:
# return 0
# for iusina in usinas:
# if iusina.Codigo == self.Jusante:
# if iusina.VolUtil != 0:
# return 0.
# elif self.Sist != iusina.Sist:
# return 0.
# elif iusina.StatusMotoriz[iano][imes] == 2:
# return iusina.RoEquiv + iusina.ProdAcumEntreResSist(iano, imes, usinas)
# else:
# return iusina.ProdAcumEntreResSist(iano, imes, usinas)
# break
def _calc_engol(self, ql):
engol = 0.
for i in range(5): # Varre Conjuntos de Maquinas
if self._maq_por_conj['valor'][-1][i] > 0:
if ql < self._alt_efet_conj['valor'][-1][i]:
if self._tipo_turb == 1 or self._tipo_turb == 3:
alpha = 0.5
else:
alpha = 0.2
else:
alpha = -1
if self._alt_efet_conj['valor'][-1][i] != 0:
engol = engol + self._maq_por_conj['valor'][-1][i]*self._vaz_efet_conj['valor'][-1][i]*((ql/self._alt_efet_conj['valor'][-1][i])**alpha)
return engol
def _calc_engol_maximo(self): # Estima Engolimento Maximo da Usina
a = self._pol_cota_vol['valor'][-1][0]
b = self._pol_cota_vol['valor'][-1][1]
c = self._pol_cota_vol['valor'][-1][2]
d = self._pol_cota_vol['valor'][-1][3]
e = self._pol_cota_vol['valor'][-1][4]
# Calcula Engolimento a 65% do Volume Util
volume = self._vol_min['valor'][-1] + 0.65*self._vol_util['valor'][-1]
cota = a + b*volume + c*volume**2 + d*volume**3 + e*volume**4
queda65 = cota - self._cfmed['valor'][-1]
engol65 = self._calc_engol(queda65)
# Calcula Engolimento a 50% do Volume Util
volume = self._vol_min['valor'][-1] + 0.50*self._vol_util['valor'][-1]
cota = a + b*volume + c*volume**2 + d*volume**3 + e*volume**4
queda50 = cota - self._cfmed['valor'][-1]
engol50 = self._calc_engol(queda50)
# Calcula Engolimento Associada ao Volume Maximo
volume = self._vol_max['valor'][-1]
cota = a + b*volume + c*volume**2 + d*volume**3 + e*volume**4
quedaMax = cota - self._cfmed['valor'][-1]
engolMax = self._calc_engol(quedaMax)
# Calcula Engolimento Associada ao Volume Minimo
volume = self._vol_min['valor'][-1]
cota = a + b*volume + c*volume**2 + d*volume**3 + e*volume**4
quedaMin = cota - self._cfmed['valor'][-1]
engolMin = self._calc_engol(quedaMin)
# Calcula Engolimento Associado a Altura Equivalente
if ( self._vol_util['valor'][-1] > 0):
cota = 0
for i in range(5):
cota = cota + self._pol_cota_vol['valor'][-1][i] * (self._vol_max['valor'][-1]**(i+1)) / (i+1)
cota = cota - self._pol_cota_vol['valor'][-1][i] * (self._vol_min['valor'][-1]**(i+1)) / (i+1)
cota = cota / self._vol_util['valor'][-1]
quedaEquiv = cota - self._cfmed['valor'][-1]
engolEquiv = self._calc_engol(quedaEquiv)
self._engolimento['valor'].append((engol50+engol65+engolEquiv+engolMax+engolMin)/5)
return
def lista_uhes(self):
"""
Calcula um generator contendo todos os codigos de referencia das usinas pertencentes ao CONFHD.
"""
for i in range(self.nuhe):
yield self._codigo["valor"][i]
def _acerta_modif(self, df, dger):
tamanho = df.shape
tamanho = tamanho[0]
for linha in range(tamanho):
registro = df.iloc[linha].values
#
# Palavras chaves tipo zero - somente atualiza valores
#
if registro[4].upper() == 'NUMCNJ':
self._num_conj_maq['valor'][-1] = registro[5]
if registro[4].upper() == 'PRODESP':
self._prod_esp['valor'][-1] = registro[5]
if registro[4].upper() == 'TEIF':
self._teifh['valor'][-1] = registro[5]
if registro[4].upper() == 'IP':
self._ip['valor'][-1] = registro[5]
if registro[4].upper() == 'PERDHID':
self._perda_hid['valor'][-1] = registro[5]
if registro[4].upper() == 'VAZMIN':
self._vaz_min['valor'][-1] = registro[5]
if registro[4].upper() == 'NUMBAS':
self._unid_base['valor'][-1] = registro[5]
#
# Palavras chaves tipo um - dois campos
#
if registro[4].upper() == 'NUMMAQ':
nr_conj = int(registro[6])
self._maq_por_conj['valor'][-1][nr_conj-1] = int(registro[5])
if registro[4].upper() == 'POTEFE':
nr_conj = int(registro[6])
self._pef_por_conj['valor'][-1][nr_conj-1] = registro[5]
if registro[4].upper() == 'COEFEVAP':
mes = int(registro[6])
self._coef_evap['valor'][-1][mes-1] = registro[5]
if registro[4].upper() == 'VOLMIN':
if registro[6].find("%") == 1:
self._vol_min['valor'][-1] = self._vol_min['valor'][-1] + \
float(registro[5]) * self._vol_util['valor'][-1] / 100
if registro[6].find("h") == 1:
self._vol_min['valor'][-1] = registro[5]
if registro[4].upper() == 'VOLMAX':
if registro[6].find("%") == 1:
self._vol_max['valor'][-1] = self._vol_min['valor'][-1] + \
float(registro[5]) * self._vol_util['valor'][-1] / 100
if registro[6].find("h") == 1:
self._vol_max['valor'][-1] = registro[5]
#
# Palavras chaves tipo dois - coeficientes PCA e PCV
#
if registro[4].upper() == 'VOLCOTA':
self._pol_cota_vol['valor'][-1] = registro[5]
if registro[4].upper() == 'COTAREA':
self._pol_cota_area['valor'][-1] = registro[5]
#
# Palavras chaves tipo 3 - Data e valor
#
if registro[4].upper() == 'CFUGA':
ano = int(registro[0]) - dger.ano_ini['valor']
mes = int(registro[3]) - 1
while ano < dger.num_anos['valor']:
while mes < 12:
self._cfugat['valor'][-1][ano][mes] = registro[5]
mes += 1
mes = 0
ano += 1
if registro[4].upper() == 'VAZMINT':
ano = int(registro[0]) - dger.ano_ini['valor']
mes = int(registro[3]) - 1
while ano < dger.num_anos['valor']:
while mes < 12:
self._vaz_mint['valor'][-1][ano][mes] = registro[5]
mes += 1
mes = 0
ano += 1
if registro[4].upper() == 'CMONT':
ano = int(registro[0]) - dger.ano_ini['valor']
mes = int(registro[3]) - 1
while ano < dger.num_anos['valor']:
while mes < 12:
self._cmont['valor'][-1][ano][mes] = registro[5]
mes += 1
mes = 0
ano += 1
#
# Palavras chaves tipo 4 - Data, valor e ('h' ou '%')
#
if registro[4].upper() == 'VMINP':
ano = int(registro[0]) - dger.ano_ini['valor']
mes = int(registro[3]) - 1
while ano < dger.num_anos['valor']:
while mes < 12:
if registro[6].find("h") == 1:
self._vol_minp['valor'][-1][ano][mes] = registro[5]
if registro[6].find("%") == 1:
self._vol_minp['valor'][-1][ano][mes] = self._vol_min['valor'][-1] + \
float(registro[5]) * self._vol_util['valor'][-1] / 100
mes += 1
mes = 0
ano += 1
if registro[4].upper() == 'VMINT':
ano = int(registro[0]) - dger.ano_ini['valor']
mes = int(registro[3]) - 1
while ano < dger.num_anos['valor']:
while mes < 12:
if registro[6].find("h") == 1:
self._vol_mint['valor'][-1][ano][mes] = registro[5]
if registro[6].find("%") == 1:
self._vol_mint['valor'][-1][ano][mes] = self._vol_min['valor'][-1] + \
float(registro[5]) * self._vol_util['valor'][-1] / 100
mes += 1
mes = 0
ano += 1
if registro[4].upper() == 'VMAXT':
ano = int(registro[0]) - dger.ano_ini['valor']
mes = int(registro[3]) - 1
while ano < dger.num_anos['valor']:
while mes < 12:
if registro[6].find("h") == 1:
self._vol_maxt['valor'][-1][ano][mes] = registro[5]
if registro[6].find("%") == 1:
self._vol_maxt['valor'][-1][ano][mes] = self._vol_min['valor'][-1] + \
float(registro[5]) * self._vol_util['valor'][-1] / 100
mes += 1
mes = 0
ano += 1
return
def _acerta_exph(self, df, dger):
tamanho = df.shape
tamanho = tamanho[0]
#
# Organização do Registro
#
# registro[0] = 'codigo',
# registro[1] = 'nome',
# registro[2] = 'mesi_evm',
# registro[3] = 'anoi_evm',
# registro[4] = 'dura_evm',
# registro[5] = 'perc_evm',
# registro[6] = 'mesi_tur',
# registro[7] = 'anoi_tur',
# registro[8] = 'comentar',
# registro[9] = 'nume_tur',
# registro[10] = 'nume_cnj']
if tamanho > 0:
registro = df.iloc[0].values
#
# Trata Enchimento de Volume Morto
#
if not np.isnan(registro[2]):
dur_vm = int(registro[4])
mesinicial = int(registro[2])
anoinicial = int(registro[3])
volume = self._vol_min['valor'][-1] * float(registro[5]) / 100
volume = (self._vol_min['valor'][-1] - volume) / dur_vm
vol_frac = volume
for iano in range(anoinicial - dger.ano_ini['valor'], dger.num_anos['valor']):
for imes in range(mesinicial - 1, 12):
if dur_vm > 0:
self._status_vol_morto['valor'][-1][iano][imes] = 1
self._vol_morto_tempo['valor'][-1][iano][imes] += volume
volume += vol_frac
dur_vm -= 1
else:
self._status_vol_morto['valor'][-1][iano][imes] = 2
self._vol_morto_tempo['valor'][-1][iano][imes] = 0.
mesinicial = 1
else:
self._status_vol_morto['valor'][-1] = 2 * np.ones((dger.num_anos['valor'], 12), 'i')
for linha in range(tamanho):
registro = df.iloc[linha].values
if not np.isnan(registro[6]):
#
# Preenche evolução temporal do (1) Número de Unidades; (2) Engolimento; (3) Potência
#
mes_ent = int(registro[6])
ano_ent = int(registro[7])
pot_ent = float(registro[8])
unidade = int(registro[9])
conjunto = int(registro[10])
if mes_ent > 0:
mesinicial = mes_ent
self._maq_por_conj['valor'][-1][conjunto - 1] = unidade
self._pef_por_conj['valor'][-1][conjunto - 1] = pot_ent
self._calc_pot_efetiva()
self._calc_engol_maximo()
for iano in range(ano_ent - dger.ano_ini['valor'], dger.num_anos['valor']):
for imes in range(mesinicial - 1, 12):
self._unidades_tempo['valor'][-1][iano][imes] += 1
self._engol_tempo['valor'][-1][iano][imes] = self._engolimento['valor'][-1]
self._potencia_tempo['valor'][-1][iano][imes] = self._pot_efet['valor'][-1]
mesinicial = 1
#
# Acerta Status da Motorização
#
for iano in range(dger.num_anos['valor']):
for imes in range(12):
if self._unidades_tempo['valor'][-1][iano][imes] >= self._unid_base['valor'][-1]:
self._status_motoriz['valor'][-1][iano][imes] = 2
elif self._unidades_tempo['valor'][-1][iano][imes] > 0:
self._status_motoriz['valor'][-1][iano][imes] = 1
else:
if self._status_motoriz['valor'][-1][iano][imes] == 2:
self._status_motoriz['valor'][-1][iano][imes] = 1
else:
self._status_motoriz['valor'][-1][iano][imes] = 0
##########################################################################################################
# Plota Gráficos Diversos
##########################################################################################################
def plota_volume(self, uhe):
nanos = len(uhe['vol_mint'])
fig = plt.figure()
ax = plt.subplot(111)
x_axis = np.arange(1,nanos*12+1)
ax.plot(x_axis,uhe['vol_mint'].reshape(nanos*12),'g-.',lw=2, label = 'Vol.Min.Operat.')
ax.plot(x_axis,uhe['vol_maxt'].reshape(nanos*12),'g-.',lw=2, label = 'Vol.Max.Operat.')
ax.plot(x_axis,uhe['vol_max']*np.ones(nanos*12),'b-',lw=3, label = 'Vol.Minimo Real')
ax.plot(x_axis,uhe['vol_min']*np.ones(nanos*12),'b-',lw=3, label = 'Vol.Maximo Real')
ax.plot(x_axis,uhe['vol_minp'].reshape(nanos*12),'b-.',lw=2, label = 'Vol.Min.com Pen.')
plt.fill_between(x_axis,uhe['vol_mint'].reshape(nanos*12), uhe['vol_maxt'].reshape(nanos*12), facecolor='g', alpha=0.1)
titulo = 'Evolucao dos Volumes da Usina \n' + uhe['nome']
plt.title(titulo, fontsize=16)
plt.xlabel('Mes de Estudo', fontsize=16)
plt.ylabel('Volume em hm^3', fontsize=16)
box = ax.get_position()
ax.set_position([ box.x0, box.y0, box.width*0.7, box.height] )
ax.legend(loc='center left', shadow=True, fontsize=12, bbox_to_anchor=(1, 0.5))
plt.show()
def plota_vaz_min(self, uhe):
nanos = len(uhe['vaz_mint'])
fig = plt.figure()
ax = plt.subplot(111)
x_axis = np.arange(1,nanos*12+1)
ax.plot(x_axis,uhe['vaz_mint'].reshape(nanos*12),'g-.',lw=2, label='Vaz.Min.Operat.')
ax.plot(x_axis,uhe['vaz_min']*np.ones(nanos*12),'b-',lw=3, label='Vaz.Min.Cadastro')
titulo = 'Evolucao da Vazao Minima da Usina \n' + uhe['nome']
plt.title(titulo, fontsize=16)
plt.xlabel('Mes de Estudo', fontsize=16)
plt.ylabel('Vazao Minima em m^3', fontsize=16)
box = ax.get_position()
ax.set_position([ box.x0, box.y0, box.width*0.7, box.height] )
ax.legend(loc='center left', shadow=True, fontsize=12, bbox_to_anchor=(1, 0.5))
plt.show()
def plota_volmorto(self, uhe):
if uhe['status'] == 'EX':
print('Grafico de Volume Morto nao impresso, pois ', uhe['nome'], 'e uma usina existente')
return
nanos = len(uhe['vol_morto_tempo'])
nmeses = np.count_nonzero(uhe['vol_morto_tempo'])
legenda = str(nmeses) + ' Meses'
ax = plt.subplot(111)
x_axis = np.arange(1,nanos*12+1)
p1 = ax.plot(x_axis,uhe['vol_morto_tempo'].reshape(nanos*12),'g-.',lw=2, label = legenda )
titulo = 'Enchimento do Volume Morto da Usina \n' + uhe['nome']
plt.title(titulo, fontsize=16)
plt.xlabel('Mes de Estudo', fontsize=16)
plt.ylabel('Volume Morto em hm^3', fontsize=16)
plt.legend(fontsize=12)
np.count_nonzero(uhe['vol_morto_tempo'])
plt.show()
def plota_potencia(self, uhe):
nanos = len(uhe['potencia_tempo'])
ax = plt.subplot(111)
x_axis = np.arange(1, nanos * 12 + 1)
p1 = ax.plot(x_axis, uhe['potencia_tempo'].reshape(nanos * 12), 'g-.', lw=2)
titulo = 'Evolucao da Potencia Efetiva da Usina \n' + uhe['nome']
plt.title(titulo, fontsize=16)
plt.xlabel('Mes de Estudo', fontsize=16)
plt.ylabel('Potencia Efetiva em MW', fontsize=16)
plt.show()
def plot_vaz(self, uhe):
"""
Plota as todas as series historicas anuais da usina cujo dicionario de dados eh fornecia na entrada.
Em ciano estao as diversas series anuais.
Em azul escuro esta a ultima serie anual.
Em vermelho continuo esta a media mensal.
Em vermelho pontilhado esta a media menos ou mais o desvio padrao.
:param uhe: Dicionario de dados contendo informacoes de uma usina hidreletrica
"""
vaz_nat = uhe['vazoes']
x_axis = np.arange(1, 13)
plt.plot(x_axis, vaz_nat.transpose(), 'c-')
media = np.mean(vaz_nat, axis=0)
plt.plot(x_axis, media, 'r-', lw=3)
desvio = np.nanstd(vaz_nat, axis=0)
plt.plot(x_axis, media + desvio, 'r-.', lw=2)
plt.plot(x_axis, media - desvio, 'r-.', lw=2)
ultimo = len(vaz_nat) - 1
plt.plot(x_axis, vaz_nat[:][ultimo], 'b-')
titulo = 'Historico de Vazoes da Usina ' + uhe['nome']
plt.title(titulo, fontsize=16)
plt.xlabel('Mes do Ano', fontsize=16)
plt.ylabel('Vazao', fontsize=16)
plt.show()
return
# Plota Polinomio Cota-Volume
def plot_pcv(self, uhe):
"""
Plota polinimo Cota-Volume da usina hidreletrica especificada na entrada
:param uhe: Dicionario de dados contendo informacoes da usina hidreletrica
"""
if uhe["vol_min"] == 0:
return
a = uhe['pol_cota_vol'][0]
b = uhe['pol_cota_vol'][1]
c = uhe['pol_cota_vol'][2]
d = uhe['pol_cota_vol'][3]
e = uhe['pol_cota_vol'][4]
if (uhe["vol_min"] == uhe["vol_max"]):
volumes = np.linspace(uhe["vol_min"] - 1,uhe["vol_max"] + 1, 100)
cota = a + b*uhe["vol_min"] + c*uhe["vol_min"]**2 + d*uhe["vol_min"]**3 + e*uhe["vol_min"]**4
cota = cota*np.ones(100)
else:
volumes = np.linspace(uhe["vol_min"],uhe["vol_max"],100)
cota = a + b*volumes + c*volumes**2 + d*volumes**3 + e*volumes**4
cota.shape = volumes.shape
plt.plot(volumes, cota, 'b-', lw=3)
plt.xlabel('Volume do Reservatorio (hm^3)', fontsize=16)
titulo = 'Polinomio Cota-Volume da Usina ' + uhe['nome']
plt.title(titulo, fontsize=16)
plt.ylabel('Cota em Metros', fontsize=16)
plt.xlim(volumes[0], volumes[99])
if ( cota[0] == cota[99]):
plt.ylim(cota[0]-1, cota[99]+1)
else:
plt.ylim(cota[0], cota[99])
plt.show()
# Plota Polinomio Cota-Area
def plot_pca(self, uhe):
"""
Plota polinimo cota-area da usina hidreletrica especificada na entrada
:param uhe: Dicionario de dados contendo informacoes da usina hidreletrica
"""
if uhe['vol_min'] == 0:
return
if (uhe['cota_min'] == uhe['cota_max']):
cotas = np.linspace(uhe['cota_min'] - 1,uhe['cota_max'] + 1, 100)
else:
cotas = np.linspace(uhe['cota_min'],uhe['cota_max'],100)
a = uhe['pol_cota_area'][0]
b = uhe['pol_cota_area'][1]
c = uhe['pol_cota_area'][2]
d = uhe['pol_cota_area'][3]
e = uhe['pol_cota_area'][4]
areas = a + b*cotas + c*cotas**2 + d*cotas**3 + e*cotas**4
areas.shape = cotas.shape
plt.plot(cotas, areas, 'b-', lw=3)
plt.xlabel('Cota do Reservatorio (em metros)', fontsize=16)
titulo = 'Polinomio Cota-Area da Usina ' + uhe['nome']
plt.title(titulo, fontsize=16)
plt.ylabel('Area Superficia em km^2', fontsize=16)
plt.xlim(cotas[0], cotas[99])
if ( areas[0] == areas[99]):
plt.ylim(areas[0]-1, areas[99]+1)
else:
plt.ylim(areas[0], areas[99])
plt.show()
# Plota Produtibilidades Constantes da Usina
def plota_produtibs(self, uhe, iano, imes):
"""
Plota polinimo cota-area da usina hidreletrica especificada na entrada
:param uhe: Dicionario de dados contendo informacoes da usina hidreletrica
"""
x_axis = np.arange(1,7)
y_axis = [ uhe['ro_equiv'][iano][imes], uhe['ro_equiv65'][iano][imes], uhe['ro_min'][iano][imes],
uhe['ro_50'][iano][imes], uhe['ro_65'][iano][imes], uhe['ro_max'][iano][imes] ]
fig, ax = plt.subplots()
a, b, c, d, e, f = plt.bar(x_axis, y_axis)
a.set_facecolor('r')
b.set_facecolor('g')
c.set_facecolor('b')
d.set_facecolor('y')
e.set_facecolor('m')
f.set_facecolor('c')
ax.set_xticks(x_axis)
ax.set_xticklabels(['Equiv', 'Equiv65', 'Min', '50%', '65%', 'Max'])
titulo = 'Produtibilidades da Usina ' + uhe['nome'] + ' - Ano: ' + str(iano+1) + ' - Mês:' + str(imes+1)
plt.title(titulo, fontsize=16)
plt.xlabel('Tipo de Produtibilidade', fontsize=16)
plt.ylabel('Produtibilidade', fontsize=16)
plt.show()
# Plota Variação de Produtibilidade
def plot_var_prod(self, uhe):
"""
Plota variacao da produtibilidade da usina hidreletrica especificada na entrada
:param uhe: Dicionario de dados contendo informacoes da usina hidreletrica
"""
if uhe['vol_min'] == 0:
return
a = uhe['pol_cota_vol'][0]
b = uhe['pol_cota_vol'][1]
c = uhe['pol_cota_vol'][2]
d = uhe['pol_cota_vol'][3]
e = uhe['pol_cota_vol'][4]
if (uhe["vol_min"] == uhe["vol_max"]):
volumes = np.linspace(uhe["vol_min"] - 1,uhe["vol_max"] + 1, 100)
cotamont = a + b*uhe["vol_min"] + c*uhe["vol_min"]**2 + d*uhe["vol_min"]**3 + e*uhe["vol_min"]**4
cotamont = cotamont*np.ones(100)
else:
volumes = np.linspace(uhe["vol_min"],uhe["vol_max"],100)
cotamont = a + b*volumes + c*volumes**2 + d*volumes**3 + e*volumes**4
cotamont.shape = volumes.shape
qdef = np.linspace(uhe['vaz_min'], 2*uhe['engolimento'], 100)
a = uhe['pol_vaz_niv_jus'][0]
b = uhe['pol_vaz_niv_jus'][1]
c = uhe['pol_vaz_niv_jus'][2]
d = uhe['pol_vaz_niv_jus'][3]
e = uhe['pol_vaz_niv_jus'][4]
cotajus = a + b*qdef + c*qdef**2 + d*qdef**3 + e*qdef**4
cotajus.shape = qdef.shape
xGrid, yGrid = np.meshgrid(cotamont, cotajus)
z = uhe['prod_esp'] * ( xGrid - yGrid )
fig = plt.figure()
ax = fig.gca(projection='3d')
surf = ax.plot_surface(qdef, volumes,z, rcount=100, ccount = 100, cmap=plt.cm.coolwarm,
linewidth=0, antialiased=False)
plt.xlabel('Vazão Defluente em m^3/s', fontsize=12)
titulo = 'Produtibilidade da Usina ' + uhe['nome']
plt.title(titulo, fontsize=16)
plt.ylabel('Volume Armazenado em hm^3', fontsize=12)
fig.colorbar(surf, shrink=0.5, aspect=5)
plt.show()
# Plota Usinas Não Existentes e Existentes em Expansao
def plota_expansao(self):
# Conta quantas usinas estao
cont = 0
nomes = []
for iusi, status in enumerate(self._status['valor']):
if status == 'EE' or status == 'NE':
cont += 1
nomes.append(self._nome['valor'][iusi])
motorizada = np.zeros(cont)
vazia = np.zeros(cont)
enchendo = np.zeros(cont)
submotorizada = np.zeros(cont)
ind = np.arange(cont)
cont = 0
nanos = len(self._status_vol_morto['valor'][0])
for iusi, status in enumerate(self._status['valor']):
if status == 'EE' or status == 'NE':
# Meses em que a usina esta motorizada
motorizada[cont] = nanos * 12 - np.count_nonzero(self._status_motoriz['valor'][iusi] - 2)
# Meses que a usina ainda nao iniciou o enchimento do volume morto
vazia[cont] = nanos * 12 - np.count_nonzero(self._status_vol_morto['valor'][iusi])
# Meses que a usina encontra-se enchendo o volume morto
enchendo[cont] = nanos * 12 - np.count_nonzero(self._status_vol_morto['valor'][iusi] - 1)
# Meses que a usina encontra-se motorizando
submotorizada[cont] = nanos * 12 - np.count_nonzero(self._status_motoriz['valor'][iusi] - 1)
cont += 1
width = 0.35 # the width of the bars: can also be len(x) sequence
ax = plt.axes()
p1 = plt.barh(ind, vazia, width, color='w')
p2 = plt.barh(ind, enchendo, width, color='lime', left=vazia)
p3 = plt.barh(ind, submotorizada, width, color='sienna', left=vazia + enchendo)
p4 = plt.barh(ind, motorizada, width, color='black', left=vazia + enchendo + submotorizada)
plt.ylabel('Usinas', fontsize=16)
plt.title('Usinas Hidreletricas em Expansao', fontsize=16)
plt.yticks(ind, nomes, fontsize=12)
plt.xticks(np.arange(0, nanos * 12 + 2, 12))
# plt.yticks(np.arange(0, 81, 10))
plt.legend((p1[0], p2[0], p3[0], p4[0]), ('Nao Entrou', 'Enchendo Vol. Morto', 'Submotorizada', 'Motorizada'),
fontsize=12)
plt.xlabel('Meses do Estudo', fontsize=16)
ax.xaxis.grid()
plt.show()
def parp(self, uhe, ord_max):
"""
Implementa o método para o calculo dos coeficentes do modelo PAR(p).
:param uhe: dicionario de dados com informacoes da usina hidreletrica,
ord_max: ord_max do modelo PAR(p)
:returns ordem: Ordem do modelo Ar para cada mes,
coef_parp: Coeficientes do modelo AR para cada mes,
fac: Funcao de Auto-Correlacao,
facp: Funcao de Auto-Correlacao Parcial,
residuos: Matriz de residuos
"""
vazoes = uhe['vazoes']
nanos = len(vazoes) # A serie historica do ultimo ano geralmente nao vem completa (despreze-a)
media = np.mean(vazoes[1:(nanos-1)], 0) # A primeira serie historica eh utilizada como tendencia (despreze-a)
desvio = np.std(vazoes[1:(nanos-1)], 0) # A primeira serie historica eh utilizada como tendencia (despreze-a)
# Calcula vazao normalizada (nao precisa)
#vaznorm = np.zeros((nanos,12),'d')
#for iano in range(nanos):
# for imes in range(12):
# vaznorm[iano][imes] = (self.Vazoes[iano][imes] - media[imes])/desvio[imes]
# Calcula funcao de auto-correlacao (uma para cada mes)
fac = np.zeros( (12, ord_max+1), 'd')
for ilag in range(ord_max+1):
for imes in range(12):
for iano in np.arange(1,nanos-1):
ano_ant = iano
mes_ant = imes - ilag
if mes_ant < 0:
ano_ant -= 1
mes_ant += 12
fac[imes][ilag] += (vazoes[iano][imes] - media[imes]) * (vazoes[ano_ant][mes_ant] - media[mes_ant])
fac[imes][ilag] /= (nanos-2)
fac[imes][ilag] /= (desvio[imes]*desvio[mes_ant])
# Calcula funcao de auto-correlacao parcial (uma para cada mes)
facp = np.zeros((12, ord_max+1), 'd')
for ilag in np.arange(1,ord_max+1):
for imes in range(12):
A = np.eye(ilag)
B = np.zeros(ilag)
# Preenche matriz triangular superior
for ilin in range(len(A)):
for icol in range( len(A) ): # TODO: Aqui poderia ser np.arange(ilin+1,len(A)): Testar depois
if icol > ilin:
mes = imes - ilin - 1
if mes < 0:
mes = mes + 12
A[ilin][icol] = fac[mes][icol-ilin]
B[ilin] = fac[imes][ilin+1]
# Preenche matriz triangular inferior
for ilin in range(len(A)):
for icol in range( len(A) ): # TODO: Aqui poderia ser np.arange(0, ilin): Testar depois
if icol < ilin:
A[ilin][icol] = A[icol][ilin]
phi = np.linalg.solve(A,B)
facp[imes][ilag] = phi[ len(phi)-1 ]
# Identificacao da ordem
IC = 1.96/np.sqrt(nanos-2)
ordem = np.zeros(12, 'i')
for imes in range(12):
ordem[imes] = 0
for ilag in range(ord_max+1):
if facp[imes][ilag] > IC or facp[imes][ilag] < -IC:
ordem[imes] = ilag
# Calculo dos coeficientes
coef_parp = np.zeros( (12,ord_max), 'd')
for imes in range(12):
ilag = ordem[imes]
A = np.eye(ilag)
B = np.zeros(ilag)
# Preenche matriz triangular superior
for ilin in range(len(A)):
for icol in range( len(A) ): # TODO: Aqui poderia ser np.arange(ilin+1,len(A)): Testar depois
if icol > ilin:
mes = imes - ilin - 1
if mes < 0:
mes = mes + 12
A[ilin][icol] = fac[mes][icol-ilin]
B[ilin] = fac[imes][ilin+1]
# Preenche matriz triangular inferior
for ilin in range(len(A)):
for icol in range( len(A) ): # TODO: Aqui poderia ser np.arange(0, ilin): Testar depois
if icol < ilin:
A[ilin][icol] = A[icol][ilin]
phi = np.linalg.solve(A,B)
for iord in range ( len(phi) ):
coef_parp[imes][iord ] = phi[ iord ]
# Calculo dos Residuos Normalizados
residuos = np.zeros( (nanos-1, 12) )
for iano in np.arange(1,nanos-1):
for imes in range(12):
residuos[iano][imes]= ( vazoes[iano][imes]-media[imes] ) / desvio[imes]
for ilag in range(ord_max):
ano_ant = iano
mes_ant = imes - ilag - 1
if mes_ant < 0:
ano_ant -= 1
mes_ant += 12
residuos[iano][imes] -= coef_parp[imes][ilag]*\
(vazoes[ano_ant][mes_ant]-media[mes_ant])/desvio[mes_ant]
return ordem, coef_parp, fac, facp, residuos
def plota_parp(self, uhe, mes, ordmax):
"""
Implementa o método para a impressao do grafico da fac e facp para a uhe cujo
dicionário de dados é fornecido.
:param uhe: dicionario de dados com informacoes da usina hidreletrica,
mes: mes de 0 a 11 (jan a dez) a ser considerado,
ord_max: ordem maxima do modelo PAR(p)
"""
ordem, coef_parp, fac, facp, residuos = self.parp(uhe, ordmax)
vazoes = uhe['vazoes']
nanos = len(vazoes) - 1
if mes == 0:
str_mes = 'January'
elif mes == 1:
str_mes = 'Fevereiro'
elif mes == 2:
str_mes = 'Marco'
elif mes == 3:
str_mes = 'Abril'
elif mes == 4:
str_mes = 'Maio'
elif mes == 5:
str_mes = 'Junho'
elif mes == 6:
str_mes = 'Julho'
elif mes == 7:
str_mes = 'Agosto'
elif mes == 8:
str_mes = 'Setembro'
elif mes == 9:
str_mes = 'Outubro'
elif mes == 10:
str_mes = 'Novembro'
else:
str_mes = 'Dezembro'
IC = 1.96/np.sqrt(nanos-1)
cores = []
limitesup = []
limiteinf = []
for elemento in facp[mes][1:ordmax+1]:
limitesup.append(IC)
limiteinf.append(-IC)
if elemento > IC or elemento < -IC:
cores.append('r')
else:
cores.append('b')
f, (ax1, ax2) = plt.subplots(1, 2, sharey=True)
barWidth = 0.40
titulo = 'FAC e FACP of ' + str_mes + ' - UHE ' + uhe['nome']
f.canvas.set_window_title(titulo)
ax1.bar(np.arange(1,ordmax+1), fac[mes][1:ordmax+1], barWidth, align='center')
ax2.bar(np.arange(1,ordmax+1), facp[mes][1:ordmax+1], barWidth, align='center', color = cores)
ax2.plot(np.arange(1,ordmax+1), limitesup, 'm--', lw=1)
ax2.plot(np.arange(1,ordmax+1), limiteinf, 'm--', lw=1)
ax1.set_xticks(np.arange(1,ordmax+1))
ax2.set_xticks(np.arange(1,ordmax+1))
tituloFAC = 'FAC - Month: ' + str_mes + '\n of UHE ' + uhe['nome']
tituloFACP = 'FACP - Month ' + str_mes + '\n of UHE ' + uhe['nome']
ax1.set_title(tituloFAC, fontsize = 13)
ax2.set_title(tituloFACP, fontsize =13)
#ax1.xlabel('Lag')
#ax2.xlabel('Lag')
#ax1.ylabel('Autocorrelacao e Autocorrelacao Parcial')
plt.show()
def gera_cen_sinteticos(self, uhe, ord_max, nr_cen):
"""
Implementa o método para a geração de vazões natuarais sintéticas para a uhe cujo
dicionário de dados é fornecido.
:param uhe: dicionario de dados com informacoes da usina hidreletrica,
ord_max: ord_max do modelo PAR(p),
nr_cen: numero de series sinteticas geradas
:returns sintetica_adit: array(nseries, nestagios) contendo cenários gerados
"""
ordem, coef_parp, fac, facp, residuos = self.parp(uhe, ord_max)
#
# Pega Parâmetros Básicos
#
nanos_estudo = len(uhe['status_vol_morto'])
nmeses_estudo = len(uhe['status_vol_morto'][0])
nestagios = nanos_estudo*nmeses_estudo
vazoes = uhe['vazoes']
nanos = len(vazoes) - 1
media = np.mean(vazoes[1:(nanos-1)], 0) # A primeira serie historica eh utilizada como tendencia (despreze-a)
desvio = np.std(vazoes[1:(nanos-1)], 0) # A primeira serie historica eh utilizada como tendencia (despreze-a)
# Gera series sinteticas
sintetica_adit = np.zeros((nr_cen,nestagios),'d')
for iser in range(nr_cen):
contador = -1
for iano in range(nanos_estudo):
for imes in range(nmeses_estudo):
contador += 1
serie = randint(1,nanos-2)
valor = media[imes] + desvio[imes]*residuos[serie][imes]
for ilag in range(ord_max):
mes_ant = imes - ilag - 1
ano_ant = iano
if mes_ant < 0:
mes_ant += 12
ano_ant -= 1
if ano_ant < 0:
vazant = media[mes_ant]
else:
vazant = sintetica_adit[iser][contador-1-ilag]
valor += desvio[imes]*coef_parp[imes][ilag]*(vazant-media[mes_ant])/desvio[mes_ant]
sintetica_adit[iser][contador] = valor
x_axis = np.arange(1, nestagios+1)
plt.plot(x_axis, sintetica_adit.transpose(), 'c-')
plt.plot(x_axis, np.mean(sintetica_adit,0), 'r-', lw=3, label='Mean - Synthetic Series')
plt.plot(x_axis, np.mean(sintetica_adit,0) + np.nanstd(sintetica_adit, axis=0), 'r-.', lw=2, label='Std Synthetic Series')
plt.plot(x_axis, np.mean(sintetica_adit,0) - np.nanstd(sintetica_adit, axis=0), 'r-.', lw=2)
m = np.concatenate([ media, media, media, media, media])
d = np.concatenate([ desvio, desvio, desvio, desvio, desvio])
plt.plot(x_axis, m, 'mo', lw=3, label='Mean - Hystorical Series')
plt.plot(x_axis, m + d, 'bo', lw=2, label='Std - Hystorical Series')
plt.plot(x_axis, m - d, 'bo', lw=2)
titulo = uhe['nome'].strip() + "'s Synthetic Series of Natural \n" " Inflows - Aditive Noise "
plt.title(titulo, fontsize=16)
plt.xlabel('Month', fontsize=16)
plt.ylabel('Inflow (m^3/s', fontsize=16)
plt.legend(fontsize=12)
plt.show()
return sintetica_adit
| [
"numpy.sqrt",
"matplotlib.pyplot.ylabel",
"numpy.count_nonzero",
"numpy.array",
"numpy.arange",
"numpy.mean",
"numpy.where",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.barh",
"os.path.split",
"numpy.max",
"numpy.linspace",
"matplotlib.pyplot.yticks",
"numpy.... | [((29927, 29952), 'numpy.where', 'np.where', (['(incremental < 0)'], {}), '(incremental < 0)\n', (29935, 29952), True, 'import numpy as np\n'), ((31508, 31533), 'numpy.where', 'np.where', (['(incremental < 0)'], {}), '(incremental < 0)\n', (31516, 31533), True, 'import numpy as np\n'), ((32292, 32333), 'numpy.array', 'np.array', (["self._maq_por_conj['valor'][-1]"], {}), "(self._maq_por_conj['valor'][-1])\n", (32300, 32333), True, 'import numpy as np\n'), ((32346, 32387), 'numpy.array', 'np.array', (["self._pef_por_conj['valor'][-1]"], {}), "(self._pef_por_conj['valor'][-1])\n", (32354, 32387), True, 'import numpy as np\n'), ((32526, 32567), 'numpy.array', 'np.array', (["self._maq_por_conj['valor'][-1]"], {}), "(self._maq_por_conj['valor'][-1])\n", (32534, 32567), True, 'import numpy as np\n'), ((32580, 32622), 'numpy.array', 'np.array', (["self._vaz_efet_conj['valor'][-1]"], {}), "(self._vaz_efet_conj['valor'][-1])\n", (32588, 32622), True, 'import numpy as np\n'), ((56108, 56120), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (56118, 56120), True, 'from matplotlib import pyplot as plt\n'), ((56134, 56150), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (56145, 56150), True, 'from matplotlib import pyplot as plt\n'), ((56170, 56198), 'numpy.arange', 'np.arange', (['(1)', '(nanos * 12 + 1)'], {}), '(1, nanos * 12 + 1)\n', (56179, 56198), True, 'import numpy as np\n'), ((56879, 56909), 'matplotlib.pyplot.title', 'plt.title', (['titulo'], {'fontsize': '(16)'}), '(titulo, fontsize=16)\n', (56888, 56909), True, 'from matplotlib import pyplot as plt\n'), ((56918, 56958), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Mes de Estudo"""'], {'fontsize': '(16)'}), "('Mes de Estudo', fontsize=16)\n", (56928, 56958), True, 'from matplotlib import pyplot as plt\n'), ((56967, 57008), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Volume em hm^3"""'], {'fontsize': '(16)'}), "('Volume em hm^3', fontsize=16)\n", (56977, 57008), True, 'from matplotlib import pyplot as plt\n'), ((57212, 57222), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (57220, 57222), True, 'from matplotlib import pyplot as plt\n'), ((57310, 57322), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (57320, 57322), True, 'from matplotlib import pyplot as plt\n'), ((57336, 57352), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (57347, 57352), True, 'from matplotlib import pyplot as plt\n'), ((57371, 57399), 'numpy.arange', 'np.arange', (['(1)', '(nanos * 12 + 1)'], {}), '(1, nanos * 12 + 1)\n', (57380, 57399), True, 'import numpy as np\n'), ((57663, 57693), 'matplotlib.pyplot.title', 'plt.title', (['titulo'], {'fontsize': '(16)'}), '(titulo, fontsize=16)\n', (57672, 57693), True, 'from matplotlib import pyplot as plt\n'), ((57702, 57742), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Mes de Estudo"""'], {'fontsize': '(16)'}), "('Mes de Estudo', fontsize=16)\n", (57712, 57742), True, 'from matplotlib import pyplot as plt\n'), ((57751, 57797), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Vazao Minima em m^3"""'], {'fontsize': '(16)'}), "('Vazao Minima em m^3', fontsize=16)\n", (57761, 57797), True, 'from matplotlib import pyplot as plt\n'), ((58001, 58011), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (58009, 58011), True, 'from matplotlib import pyplot as plt\n'), ((58268, 58308), 'numpy.count_nonzero', 'np.count_nonzero', (["uhe['vol_morto_tempo']"], {}), "(uhe['vol_morto_tempo'])\n", (58284, 58308), True, 'import numpy as np\n'), ((58365, 58381), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (58376, 58381), True, 'from matplotlib import pyplot as plt\n'), ((58400, 58428), 'numpy.arange', 'np.arange', (['(1)', '(nanos * 12 + 1)'], {}), '(1, nanos * 12 + 1)\n', (58409, 58428), True, 'import numpy as np\n'), ((58604, 58634), 'matplotlib.pyplot.title', 'plt.title', (['titulo'], {'fontsize': '(16)'}), '(titulo, fontsize=16)\n', (58613, 58634), True, 'from matplotlib import pyplot as plt\n'), ((58643, 58683), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Mes de Estudo"""'], {'fontsize': '(16)'}), "('Mes de Estudo', fontsize=16)\n", (58653, 58683), True, 'from matplotlib import pyplot as plt\n'), ((58692, 58739), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Volume Morto em hm^3"""'], {'fontsize': '(16)'}), "('Volume Morto em hm^3', fontsize=16)\n", (58702, 58739), True, 'from matplotlib import pyplot as plt\n'), ((58749, 58772), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'fontsize': '(12)'}), '(fontsize=12)\n', (58759, 58772), True, 'from matplotlib import pyplot as plt\n'), ((58782, 58822), 'numpy.count_nonzero', 'np.count_nonzero', (["uhe['vol_morto_tempo']"], {}), "(uhe['vol_morto_tempo'])\n", (58798, 58822), True, 'import numpy as np\n'), ((58832, 58842), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (58840, 58842), True, 'from matplotlib import pyplot as plt\n'), ((58937, 58953), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (58948, 58953), True, 'from matplotlib import pyplot as plt\n'), ((58972, 59000), 'numpy.arange', 'np.arange', (['(1)', '(nanos * 12 + 1)'], {}), '(1, nanos * 12 + 1)\n', (58981, 59000), True, 'import numpy as np\n'), ((59169, 59199), 'matplotlib.pyplot.title', 'plt.title', (['titulo'], {'fontsize': '(16)'}), '(titulo, fontsize=16)\n', (59178, 59199), True, 'from matplotlib import pyplot as plt\n'), ((59208, 59248), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Mes de Estudo"""'], {'fontsize': '(16)'}), "('Mes de Estudo', fontsize=16)\n", (59218, 59248), True, 'from matplotlib import pyplot as plt\n'), ((59257, 59306), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Potencia Efetiva em MW"""'], {'fontsize': '(16)'}), "('Potencia Efetiva em MW', fontsize=16)\n", (59267, 59306), True, 'from matplotlib import pyplot as plt\n'), ((59316, 59326), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (59324, 59326), True, 'from matplotlib import pyplot as plt\n'), ((59854, 59870), 'numpy.arange', 'np.arange', (['(1)', '(13)'], {}), '(1, 13)\n', (59863, 59870), True, 'import numpy as np\n'), ((59939, 59963), 'numpy.mean', 'np.mean', (['vaz_nat'], {'axis': '(0)'}), '(vaz_nat, axis=0)\n', (59946, 59963), True, 'import numpy as np\n'), ((59972, 60007), 'matplotlib.pyplot.plot', 'plt.plot', (['x_axis', 'media', '"""r-"""'], {'lw': '(3)'}), "(x_axis, media, 'r-', lw=3)\n", (59980, 60007), True, 'from matplotlib import pyplot as plt\n'), ((60025, 60051), 'numpy.nanstd', 'np.nanstd', (['vaz_nat'], {'axis': '(0)'}), '(vaz_nat, axis=0)\n', (60034, 60051), True, 'import numpy as np\n'), ((60060, 60105), 'matplotlib.pyplot.plot', 'plt.plot', (['x_axis', '(media + desvio)', '"""r-."""'], {'lw': '(2)'}), "(x_axis, media + desvio, 'r-.', lw=2)\n", (60068, 60105), True, 'from matplotlib import pyplot as plt\n'), ((60114, 60159), 'matplotlib.pyplot.plot', 'plt.plot', (['x_axis', '(media - desvio)', '"""r-."""'], {'lw': '(2)'}), "(x_axis, media - desvio, 'r-.', lw=2)\n", (60122, 60159), True, 'from matplotlib import pyplot as plt\n'), ((60202, 60244), 'matplotlib.pyplot.plot', 'plt.plot', (['x_axis', 'vaz_nat[:][ultimo]', '"""b-"""'], {}), "(x_axis, vaz_nat[:][ultimo], 'b-')\n", (60210, 60244), True, 'from matplotlib import pyplot as plt\n'), ((60316, 60346), 'matplotlib.pyplot.title', 'plt.title', (['titulo'], {'fontsize': '(16)'}), '(titulo, fontsize=16)\n', (60325, 60346), True, 'from matplotlib import pyplot as plt\n'), ((60355, 60392), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Mes do Ano"""'], {'fontsize': '(16)'}), "('Mes do Ano', fontsize=16)\n", (60365, 60392), True, 'from matplotlib import pyplot as plt\n'), ((60401, 60433), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Vazao"""'], {'fontsize': '(16)'}), "('Vazao', fontsize=16)\n", (60411, 60433), True, 'from matplotlib import pyplot as plt\n'), ((60442, 60452), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (60450, 60452), True, 'from matplotlib import pyplot as plt\n'), ((61429, 61464), 'matplotlib.pyplot.plot', 'plt.plot', (['volumes', 'cota', '"""b-"""'], {'lw': '(3)'}), "(volumes, cota, 'b-', lw=3)\n", (61437, 61464), True, 'from matplotlib import pyplot as plt\n'), ((61474, 61530), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Volume do Reservatorio (hm^3)"""'], {'fontsize': '(16)'}), "('Volume do Reservatorio (hm^3)', fontsize=16)\n", (61484, 61530), True, 'from matplotlib import pyplot as plt\n'), ((61604, 61634), 'matplotlib.pyplot.title', 'plt.title', (['titulo'], {'fontsize': '(16)'}), '(titulo, fontsize=16)\n', (61613, 61634), True, 'from matplotlib import pyplot as plt\n'), ((61643, 61684), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Cota em Metros"""'], {'fontsize': '(16)'}), "('Cota em Metros', fontsize=16)\n", (61653, 61684), True, 'from matplotlib import pyplot as plt\n'), ((61693, 61726), 'matplotlib.pyplot.xlim', 'plt.xlim', (['volumes[0]', 'volumes[99]'], {}), '(volumes[0], volumes[99])\n', (61701, 61726), True, 'from matplotlib import pyplot as plt\n'), ((61868, 61878), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (61876, 61878), True, 'from matplotlib import pyplot as plt\n'), ((62681, 62715), 'matplotlib.pyplot.plot', 'plt.plot', (['cotas', 'areas', '"""b-"""'], {'lw': '(3)'}), "(cotas, areas, 'b-', lw=3)\n", (62689, 62715), True, 'from matplotlib import pyplot as plt\n'), ((62725, 62784), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Cota do Reservatorio (em metros)"""'], {'fontsize': '(16)'}), "('Cota do Reservatorio (em metros)', fontsize=16)\n", (62735, 62784), True, 'from matplotlib import pyplot as plt\n'), ((62856, 62886), 'matplotlib.pyplot.title', 'plt.title', (['titulo'], {'fontsize': '(16)'}), '(titulo, fontsize=16)\n', (62865, 62886), True, 'from matplotlib import pyplot as plt\n'), ((62895, 62945), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Area Superficia em km^2"""'], {'fontsize': '(16)'}), "('Area Superficia em km^2', fontsize=16)\n", (62905, 62945), True, 'from matplotlib import pyplot as plt\n'), ((62954, 62983), 'matplotlib.pyplot.xlim', 'plt.xlim', (['cotas[0]', 'cotas[99]'], {}), '(cotas[0], cotas[99])\n', (62962, 62983), True, 'from matplotlib import pyplot as plt\n'), ((63131, 63141), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (63139, 63141), True, 'from matplotlib import pyplot as plt\n'), ((63446, 63461), 'numpy.arange', 'np.arange', (['(1)', '(7)'], {}), '(1, 7)\n', (63455, 63461), True, 'import numpy as np\n'), ((63684, 63698), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (63696, 63698), True, 'from matplotlib import pyplot as plt\n'), ((63726, 63749), 'matplotlib.pyplot.bar', 'plt.bar', (['x_axis', 'y_axis'], {}), '(x_axis, y_axis)\n', (63733, 63749), True, 'from matplotlib import pyplot as plt\n'), ((64152, 64182), 'matplotlib.pyplot.title', 'plt.title', (['titulo'], {'fontsize': '(16)'}), '(titulo, fontsize=16)\n', (64161, 64182), True, 'from matplotlib import pyplot as plt\n'), ((64191, 64241), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Tipo de Produtibilidade"""'], {'fontsize': '(16)'}), "('Tipo de Produtibilidade', fontsize=16)\n", (64201, 64241), True, 'from matplotlib import pyplot as plt\n'), ((64250, 64292), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Produtibilidade"""'], {'fontsize': '(16)'}), "('Produtibilidade', fontsize=16)\n", (64260, 64292), True, 'from matplotlib import pyplot as plt\n'), ((64301, 64311), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (64309, 64311), True, 'from matplotlib import pyplot as plt\n'), ((65316, 65372), 'numpy.linspace', 'np.linspace', (["uhe['vaz_min']", "(2 * uhe['engolimento'])", '(100)'], {}), "(uhe['vaz_min'], 2 * uhe['engolimento'], 100)\n", (65327, 65372), True, 'import numpy as np\n'), ((65687, 65717), 'numpy.meshgrid', 'np.meshgrid', (['cotamont', 'cotajus'], {}), '(cotamont, cotajus)\n', (65698, 65717), True, 'import numpy as np\n'), ((65782, 65794), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (65792, 65794), True, 'from matplotlib import pyplot as plt\n'), ((65994, 66045), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Vazão Defluente em m^3/s"""'], {'fontsize': '(12)'}), "('Vazão Defluente em m^3/s', fontsize=12)\n", (66004, 66045), True, 'from matplotlib import pyplot as plt\n'), ((66113, 66143), 'matplotlib.pyplot.title', 'plt.title', (['titulo'], {'fontsize': '(16)'}), '(titulo, fontsize=16)\n', (66122, 66143), True, 'from matplotlib import pyplot as plt\n'), ((66152, 66204), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Volume Armazenado em hm^3"""'], {'fontsize': '(12)'}), "('Volume Armazenado em hm^3', fontsize=12)\n", (66162, 66204), True, 'from matplotlib import pyplot as plt\n'), ((66263, 66273), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (66271, 66273), True, 'from matplotlib import pyplot as plt\n'), ((66654, 66668), 'numpy.zeros', 'np.zeros', (['cont'], {}), '(cont)\n', (66662, 66668), True, 'import numpy as np\n'), ((66685, 66699), 'numpy.zeros', 'np.zeros', (['cont'], {}), '(cont)\n', (66693, 66699), True, 'import numpy as np\n'), ((66719, 66733), 'numpy.zeros', 'np.zeros', (['cont'], {}), '(cont)\n', (66727, 66733), True, 'import numpy as np\n'), ((66758, 66772), 'numpy.zeros', 'np.zeros', (['cont'], {}), '(cont)\n', (66766, 66772), True, 'import numpy as np\n'), ((66788, 66803), 'numpy.arange', 'np.arange', (['cont'], {}), '(cont)\n', (66797, 66803), True, 'import numpy as np\n'), ((67798, 67808), 'matplotlib.pyplot.axes', 'plt.axes', ([], {}), '()\n', (67806, 67808), True, 'from matplotlib import pyplot as plt\n'), ((67822, 67860), 'matplotlib.pyplot.barh', 'plt.barh', (['ind', 'vazia', 'width'], {'color': '"""w"""'}), "(ind, vazia, width, color='w')\n", (67830, 67860), True, 'from matplotlib import pyplot as plt\n'), ((67874, 67930), 'matplotlib.pyplot.barh', 'plt.barh', (['ind', 'enchendo', 'width'], {'color': '"""lime"""', 'left': 'vazia'}), "(ind, enchendo, width, color='lime', left=vazia)\n", (67882, 67930), True, 'from matplotlib import pyplot as plt\n'), ((67944, 68018), 'matplotlib.pyplot.barh', 'plt.barh', (['ind', 'submotorizada', 'width'], {'color': '"""sienna"""', 'left': '(vazia + enchendo)'}), "(ind, submotorizada, width, color='sienna', left=vazia + enchendo)\n", (67952, 68018), True, 'from matplotlib import pyplot as plt\n'), ((68032, 68122), 'matplotlib.pyplot.barh', 'plt.barh', (['ind', 'motorizada', 'width'], {'color': '"""black"""', 'left': '(vazia + enchendo + submotorizada)'}), "(ind, motorizada, width, color='black', left=vazia + enchendo +\n submotorizada)\n", (68040, 68122), True, 'from matplotlib import pyplot as plt\n'), ((68128, 68161), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Usinas"""'], {'fontsize': '(16)'}), "('Usinas', fontsize=16)\n", (68138, 68161), True, 'from matplotlib import pyplot as plt\n'), ((68170, 68228), 'matplotlib.pyplot.title', 'plt.title', (['"""Usinas Hidreletricas em Expansao"""'], {'fontsize': '(16)'}), "('Usinas Hidreletricas em Expansao', fontsize=16)\n", (68179, 68228), True, 'from matplotlib import pyplot as plt\n'), ((68237, 68272), 'matplotlib.pyplot.yticks', 'plt.yticks', (['ind', 'nomes'], {'fontsize': '(12)'}), '(ind, nomes, fontsize=12)\n', (68247, 68272), True, 'from matplotlib import pyplot as plt\n'), ((68377, 68504), 'matplotlib.pyplot.legend', 'plt.legend', (['(p1[0], p2[0], p3[0], p4[0])', "('Nao Entrou', 'Enchendo Vol. Morto', 'Submotorizada', 'Motorizada')"], {'fontsize': '(12)'}), "((p1[0], p2[0], p3[0], p4[0]), ('Nao Entrou',\n 'Enchendo Vol. Morto', 'Submotorizada', 'Motorizada'), fontsize=12)\n", (68387, 68504), True, 'from matplotlib import pyplot as plt\n'), ((68528, 68570), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Meses do Estudo"""'], {'fontsize': '(16)'}), "('Meses do Estudo', fontsize=16)\n", (68538, 68570), True, 'from matplotlib import pyplot as plt\n'), ((68604, 68614), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (68612, 68614), True, 'from matplotlib import pyplot as plt\n'), ((69318, 69349), 'numpy.mean', 'np.mean', (['vazoes[1:nanos - 1]', '(0)'], {}), '(vazoes[1:nanos - 1], 0)\n', (69325, 69349), True, 'import numpy as np\n'), ((69440, 69470), 'numpy.std', 'np.std', (['vazoes[1:nanos - 1]', '(0)'], {}), '(vazoes[1:nanos - 1], 0)\n', (69446, 69470), True, 'import numpy as np\n'), ((69881, 69913), 'numpy.zeros', 'np.zeros', (['(12, ord_max + 1)', '"""d"""'], {}), "((12, ord_max + 1), 'd')\n", (69889, 69913), True, 'import numpy as np\n'), ((70549, 70581), 'numpy.zeros', 'np.zeros', (['(12, ord_max + 1)', '"""d"""'], {}), "((12, ord_max + 1), 'd')\n", (70557, 70581), True, 'import numpy as np\n'), ((70600, 70625), 'numpy.arange', 'np.arange', (['(1)', '(ord_max + 1)'], {}), '(1, ord_max + 1)\n', (70609, 70625), True, 'import numpy as np\n'), ((71729, 71746), 'numpy.zeros', 'np.zeros', (['(12)', '"""i"""'], {}), "(12, 'i')\n", (71737, 71746), True, 'import numpy as np\n'), ((72011, 72039), 'numpy.zeros', 'np.zeros', (['(12, ord_max)', '"""d"""'], {}), "((12, ord_max), 'd')\n", (72019, 72039), True, 'import numpy as np\n'), ((75388, 75419), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'sharey': '(True)'}), '(1, 2, sharey=True)\n', (75400, 75419), True, 'from matplotlib import pyplot as plt\n'), ((76345, 76355), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (76353, 76355), True, 'from matplotlib import pyplot as plt\n'), ((77207, 77238), 'numpy.mean', 'np.mean', (['vazoes[1:nanos - 1]', '(0)'], {}), '(vazoes[1:nanos - 1], 0)\n', (77214, 77238), True, 'import numpy as np\n'), ((77329, 77359), 'numpy.std', 'np.std', (['vazoes[1:nanos - 1]', '(0)'], {}), '(vazoes[1:nanos - 1], 0)\n', (77335, 77359), True, 'import numpy as np\n'), ((77492, 77526), 'numpy.zeros', 'np.zeros', (['(nr_cen, nestagios)', '"""d"""'], {}), "((nr_cen, nestagios), 'd')\n", (77500, 77526), True, 'import numpy as np\n'), ((78481, 78508), 'numpy.arange', 'np.arange', (['(1)', '(nestagios + 1)'], {}), '(1, nestagios + 1)\n', (78490, 78508), True, 'import numpy as np\n'), ((78907, 78958), 'numpy.concatenate', 'np.concatenate', (['[media, media, media, media, media]'], {}), '([media, media, media, media, media])\n', (78921, 78958), True, 'import numpy as np\n'), ((78972, 79028), 'numpy.concatenate', 'np.concatenate', (['[desvio, desvio, desvio, desvio, desvio]'], {}), '([desvio, desvio, desvio, desvio, desvio])\n', (78986, 79028), True, 'import numpy as np\n'), ((79038, 79103), 'matplotlib.pyplot.plot', 'plt.plot', (['x_axis', 'm', '"""mo"""'], {'lw': '(3)', 'label': '"""Mean - Hystorical Series"""'}), "(x_axis, m, 'mo', lw=3, label='Mean - Hystorical Series')\n", (79046, 79103), True, 'from matplotlib import pyplot as plt\n'), ((79112, 79180), 'matplotlib.pyplot.plot', 'plt.plot', (['x_axis', '(m + d)', '"""bo"""'], {'lw': '(2)', 'label': '"""Std - Hystorical Series"""'}), "(x_axis, m + d, 'bo', lw=2, label='Std - Hystorical Series')\n", (79120, 79180), True, 'from matplotlib import pyplot as plt\n'), ((79189, 79224), 'matplotlib.pyplot.plot', 'plt.plot', (['x_axis', '(m - d)', '"""bo"""'], {'lw': '(2)'}), "(x_axis, m - d, 'bo', lw=2)\n", (79197, 79224), True, 'from matplotlib import pyplot as plt\n'), ((79336, 79366), 'matplotlib.pyplot.title', 'plt.title', (['titulo'], {'fontsize': '(16)'}), '(titulo, fontsize=16)\n', (79345, 79366), True, 'from matplotlib import pyplot as plt\n'), ((79375, 79407), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Month"""'], {'fontsize': '(16)'}), "('Month', fontsize=16)\n", (79385, 79407), True, 'from matplotlib import pyplot as plt\n'), ((79416, 79456), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Inflow (m^3/s"""'], {'fontsize': '(16)'}), "('Inflow (m^3/s', fontsize=16)\n", (79426, 79456), True, 'from matplotlib import pyplot as plt\n'), ((79465, 79488), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'fontsize': '(12)'}), '(fontsize=12)\n', (79475, 79488), True, 'from matplotlib import pyplot as plt\n'), ((79497, 79507), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (79505, 79507), True, 'from matplotlib import pyplot as plt\n'), ((968, 992), 'os.path.split', 'os.path.split', (['file_name'], {}), '(file_name)\n', (981, 992), False, 'import os\n'), ((1024, 1048), 'os.path.split', 'os.path.split', (['file_name'], {}), '(file_name)\n', (1037, 1048), False, 'import os\n'), ((13687, 13710), 'os.path.split', 'os.path.split', (['file_out'], {}), '(file_out)\n', (13700, 13710), False, 'import os\n'), ((13742, 13765), 'os.path.split', 'os.path.split', (['file_out'], {}), '(file_out)\n', (13755, 13765), False, 'import os\n'), ((28232, 28259), 'numpy.shape', 'np.shape', (['self._copiavazoes'], {}), '(self._copiavazoes)\n', (28240, 28259), True, 'import numpy as np\n'), ((31064, 31084), 'numpy.zeros', 'np.zeros', (['nanos_hist'], {}), '(nanos_hist)\n', (31072, 31084), True, 'import numpy as np\n'), ((32427, 32440), 'numpy.vdot', 'np.vdot', (['a', 'b'], {}), '(a, b)\n', (32434, 32440), True, 'import numpy as np\n'), ((32662, 32675), 'numpy.vdot', 'np.vdot', (['a', 'b'], {}), '(a, b)\n', (32669, 32675), True, 'import numpy as np\n'), ((32816, 32842), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (32824, 32842), True, 'import numpy as np\n'), ((32881, 32907), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (32889, 32907), True, 'import numpy as np\n'), ((32949, 32975), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (32957, 32975), True, 'import numpy as np\n'), ((33019, 33045), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (33027, 33045), True, 'import numpy as np\n'), ((33085, 33111), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (33093, 33111), True, 'import numpy as np\n'), ((33151, 33177), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (33159, 33177), True, 'import numpy as np\n'), ((61021, 61077), 'numpy.linspace', 'np.linspace', (["(uhe['vol_min'] - 1)", "(uhe['vol_max'] + 1)", '(100)'], {}), "(uhe['vol_min'] - 1, uhe['vol_max'] + 1, 100)\n", (61032, 61077), True, 'import numpy as np\n'), ((61256, 61304), 'numpy.linspace', 'np.linspace', (["uhe['vol_min']", "uhe['vol_max']", '(100)'], {}), "(uhe['vol_min'], uhe['vol_max'], 100)\n", (61267, 61304), True, 'import numpy as np\n'), ((61774, 61809), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(cota[0] - 1)', '(cota[99] + 1)'], {}), '(cota[0] - 1, cota[99] + 1)\n', (61782, 61809), True, 'from matplotlib import pyplot as plt\n'), ((61832, 61859), 'matplotlib.pyplot.ylim', 'plt.ylim', (['cota[0]', 'cota[99]'], {}), '(cota[0], cota[99])\n', (61840, 61859), True, 'from matplotlib import pyplot as plt\n'), ((62251, 62309), 'numpy.linspace', 'np.linspace', (["(uhe['cota_min'] - 1)", "(uhe['cota_max'] + 1)", '(100)'], {}), "(uhe['cota_min'] - 1, uhe['cota_max'] + 1, 100)\n", (62262, 62309), True, 'import numpy as np\n'), ((62343, 62393), 'numpy.linspace', 'np.linspace', (["uhe['cota_min']", "uhe['cota_max']", '(100)'], {}), "(uhe['cota_min'], uhe['cota_max'], 100)\n", (62354, 62393), True, 'import numpy as np\n'), ((63033, 63070), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(areas[0] - 1)', '(areas[99] + 1)'], {}), '(areas[0] - 1, areas[99] + 1)\n', (63041, 63070), True, 'from matplotlib import pyplot as plt\n'), ((63093, 63122), 'matplotlib.pyplot.ylim', 'plt.ylim', (['areas[0]', 'areas[99]'], {}), '(areas[0], areas[99])\n', (63101, 63122), True, 'from matplotlib import pyplot as plt\n'), ((64881, 64937), 'numpy.linspace', 'np.linspace', (["(uhe['vol_min'] - 1)", "(uhe['vol_max'] + 1)", '(100)'], {}), "(uhe['vol_min'] - 1, uhe['vol_max'] + 1, 100)\n", (64892, 64937), True, 'import numpy as np\n'), ((65128, 65176), 'numpy.linspace', 'np.linspace', (["uhe['vol_min']", "uhe['vol_max']", '(100)'], {}), "(uhe['vol_min'], uhe['vol_max'], 100)\n", (65139, 65176), True, 'import numpy as np\n'), ((68292, 68324), 'numpy.arange', 'np.arange', (['(0)', '(nanos * 12 + 2)', '(12)'], {}), '(0, nanos * 12 + 2, 12)\n', (68301, 68324), True, 'import numpy as np\n'), ((71696, 71714), 'numpy.sqrt', 'np.sqrt', (['(nanos - 2)'], {}), '(nanos - 2)\n', (71703, 71714), True, 'import numpy as np\n'), ((72118, 72130), 'numpy.eye', 'np.eye', (['ilag'], {}), '(ilag)\n', (72124, 72130), True, 'import numpy as np\n'), ((72147, 72161), 'numpy.zeros', 'np.zeros', (['ilag'], {}), '(ilag)\n', (72155, 72161), True, 'import numpy as np\n'), ((72950, 72971), 'numpy.linalg.solve', 'np.linalg.solve', (['A', 'B'], {}), '(A, B)\n', (72965, 72971), True, 'import numpy as np\n'), ((73140, 73165), 'numpy.zeros', 'np.zeros', (['(nanos - 1, 12)'], {}), '((nanos - 1, 12))\n', (73148, 73165), True, 'import numpy as np\n'), ((73190, 73213), 'numpy.arange', 'np.arange', (['(1)', '(nanos - 1)'], {}), '(1, nanos - 1)\n', (73199, 73213), True, 'import numpy as np\n'), ((75032, 75050), 'numpy.sqrt', 'np.sqrt', (['(nanos - 1)'], {}), '(nanos - 1)\n', (75039, 75050), True, 'import numpy as np\n'), ((75574, 75598), 'numpy.arange', 'np.arange', (['(1)', '(ordmax + 1)'], {}), '(1, ordmax + 1)\n', (75583, 75598), True, 'import numpy as np\n'), ((75661, 75685), 'numpy.arange', 'np.arange', (['(1)', '(ordmax + 1)'], {}), '(1, ordmax + 1)\n', (75670, 75685), True, 'import numpy as np\n'), ((75765, 75789), 'numpy.arange', 'np.arange', (['(1)', '(ordmax + 1)'], {}), '(1, ordmax + 1)\n', (75774, 75789), True, 'import numpy as np\n'), ((75829, 75853), 'numpy.arange', 'np.arange', (['(1)', '(ordmax + 1)'], {}), '(1, ordmax + 1)\n', (75838, 75853), True, 'import numpy as np\n'), ((75900, 75924), 'numpy.arange', 'np.arange', (['(1)', '(ordmax + 1)'], {}), '(1, ordmax + 1)\n', (75909, 75924), True, 'import numpy as np\n'), ((75946, 75970), 'numpy.arange', 'np.arange', (['(1)', '(ordmax + 1)'], {}), '(1, ordmax + 1)\n', (75955, 75970), True, 'import numpy as np\n'), ((78591, 78617), 'numpy.mean', 'np.mean', (['sintetica_adit', '(0)'], {}), '(sintetica_adit, 0)\n', (78598, 78617), True, 'import numpy as np\n'), ((15275, 15298), 'os.path.split', 'os.path.split', (['file_out'], {}), '(file_out)\n', (15288, 15298), False, 'import os\n'), ((28287, 28314), 'numpy.shape', 'np.shape', (['self._copiavazoes'], {}), '(self._copiavazoes)\n', (28295, 28314), True, 'import numpy as np\n'), ((52625, 52646), 'numpy.isnan', 'np.isnan', (['registro[2]'], {}), '(registro[2])\n', (52633, 52646), True, 'import numpy as np\n'), ((53859, 53880), 'numpy.isnan', 'np.isnan', (['registro[6]'], {}), '(registro[6])\n', (53867, 53880), True, 'import numpy as np\n'), ((56424, 56443), 'numpy.ones', 'np.ones', (['(nanos * 12)'], {}), '(nanos * 12)\n', (56431, 56443), True, 'import numpy as np\n'), ((56520, 56539), 'numpy.ones', 'np.ones', (['(nanos * 12)'], {}), '(nanos * 12)\n', (56527, 56539), True, 'import numpy as np\n'), ((57527, 57546), 'numpy.ones', 'np.ones', (['(nanos * 12)'], {}), '(nanos * 12)\n', (57534, 57546), True, 'import numpy as np\n'), ((61207, 61219), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (61214, 61219), True, 'import numpy as np\n'), ((65079, 65091), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (65086, 65091), True, 'import numpy as np\n'), ((70014, 70037), 'numpy.arange', 'np.arange', (['(1)', '(nanos - 1)'], {}), '(1, nanos - 1)\n', (70023, 70037), True, 'import numpy as np\n'), ((70679, 70691), 'numpy.eye', 'np.eye', (['ilag'], {}), '(ilag)\n', (70685, 70691), True, 'import numpy as np\n'), ((70712, 70726), 'numpy.zeros', 'np.zeros', (['ilag'], {}), '(ilag)\n', (70720, 70726), True, 'import numpy as np\n'), ((71570, 71591), 'numpy.linalg.solve', 'np.linalg.solve', (['A', 'B'], {}), '(A, B)\n', (71585, 71591), True, 'import numpy as np\n'), ((78688, 78714), 'numpy.mean', 'np.mean', (['sintetica_adit', '(0)'], {}), '(sintetica_adit, 0)\n', (78695, 78714), True, 'import numpy as np\n'), ((78716, 78749), 'numpy.nanstd', 'np.nanstd', (['sintetica_adit'], {'axis': '(0)'}), '(sintetica_adit, axis=0)\n', (78725, 78749), True, 'import numpy as np\n'), ((78819, 78845), 'numpy.mean', 'np.mean', (['sintetica_adit', '(0)'], {}), '(sintetica_adit, 0)\n', (78826, 78845), True, 'import numpy as np\n'), ((78847, 78880), 'numpy.nanstd', 'np.nanstd', (['sintetica_adit'], {'axis': '(0)'}), '(sintetica_adit, axis=0)\n', (78856, 78880), True, 'import numpy as np\n'), ((12867, 12909), 'numpy.array', 'np.array', (["self._codigo['valor']"], {'dtype': 'int'}), "(self._codigo['valor'], dtype=int)\n", (12875, 12909), True, 'import numpy as np\n'), ((12934, 12947), 'numpy.max', 'np.max', (['maior'], {}), '(maior)\n', (12940, 12947), True, 'import numpy as np\n'), ((13983, 14006), 'os.path.split', 'os.path.split', (['file_out'], {}), '(file_out)\n', (13996, 14006), False, 'import os\n'), ((14033, 14056), 'os.path.split', 'os.path.split', (['file_out'], {}), '(file_out)\n', (14046, 14056), False, 'import os\n'), ((53713, 53755), 'numpy.ones', 'np.ones', (["(dger.num_anos['valor'], 12)", '"""i"""'], {}), "((dger.num_anos['valor'], 12), 'i')\n", (53720, 53755), True, 'import numpy as np\n'), ((67091, 67148), 'numpy.count_nonzero', 'np.count_nonzero', (["(self._status_motoriz['valor'][iusi] - 2)"], {}), "(self._status_motoriz['valor'][iusi] - 2)\n", (67107, 67148), True, 'import numpy as np\n'), ((67276, 67331), 'numpy.count_nonzero', 'np.count_nonzero', (["self._status_vol_morto['valor'][iusi]"], {}), "(self._status_vol_morto['valor'][iusi])\n", (67292, 67331), True, 'import numpy as np\n'), ((67451, 67510), 'numpy.count_nonzero', 'np.count_nonzero', (["(self._status_vol_morto['valor'][iusi] - 1)"], {}), "(self._status_vol_morto['valor'][iusi] - 1)\n", (67467, 67510), True, 'import numpy as np\n'), ((67623, 67680), 'numpy.count_nonzero', 'np.count_nonzero', (["(self._status_motoriz['valor'][iusi] - 1)"], {}), "(self._status_motoriz['valor'][iusi] - 1)\n", (67639, 67680), True, 'import numpy as np\n'), ((77743, 77764), 'random.randint', 'randint', (['(1)', '(nanos - 2)'], {}), '(1, nanos - 2)\n', (77750, 77764), False, 'from random import randint\n'), ((12978, 13007), 'numpy.ones', 'np.ones', (['(maior + 1)'], {'dtype': 'int'}), '(maior + 1, dtype=int)\n', (12985, 13007), True, 'import numpy as np\n'), ((11499, 11525), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (11507, 11525), True, 'import numpy as np\n'), ((11587, 11613), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (11595, 11613), True, 'import numpy as np\n'), ((11675, 11701), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (11683, 11701), True, 'import numpy as np\n'), ((11765, 11791), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (11773, 11791), True, 'import numpy as np\n'), ((11854, 11880), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (11862, 11880), True, 'import numpy as np\n'), ((11943, 11969), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (11951, 11969), True, 'import numpy as np\n'), ((12026, 12052), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (12034, 12052), True, 'import numpy as np\n'), ((12111, 12137), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (12119, 12137), True, 'import numpy as np\n'), ((12197, 12223), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (12205, 12223), True, 'import numpy as np\n'), ((12283, 12309), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (12291, 12309), True, 'import numpy as np\n'), ((12369, 12395), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""d"""'], {}), "((nanos, 12), 'd')\n", (12377, 12395), True, 'import numpy as np\n'), ((13246, 13270), 'os.path.split', 'os.path.split', (['file_name'], {}), '(file_name)\n', (13259, 13270), False, 'import os\n'), ((7489, 7514), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (7496, 7514), True, 'import numpy as np\n'), ((7598, 7623), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (7605, 7623), True, 'import numpy as np\n'), ((7707, 7732), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (7714, 7732), True, 'import numpy as np\n'), ((7816, 7841), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (7823, 7841), True, 'import numpy as np\n'), ((7921, 7946), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (7928, 7946), True, 'import numpy as np\n'), ((8028, 8053), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (8035, 8053), True, 'import numpy as np\n'), ((9794, 9820), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (9802, 9820), True, 'import numpy as np\n'), ((10294, 10320), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""i"""'], {}), "((nanos, 12), 'i')\n", (10302, 10320), True, 'import numpy as np\n'), ((10387, 10413), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""i"""'], {}), "((nanos, 12), 'i')\n", (10395, 10413), True, 'import numpy as np\n'), ((10481, 10507), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (10489, 10507), True, 'import numpy as np\n'), ((9605, 9630), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""i"""'], {}), "((nanos, 12), 'i')\n", (9612, 9630), True, 'import numpy as np\n'), ((9701, 9726), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""i"""'], {}), "((nanos, 12), 'i')\n", (9708, 9726), True, 'import numpy as np\n'), ((9917, 9942), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (9924, 9942), True, 'import numpy as np\n'), ((10039, 10064), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (10046, 10064), True, 'import numpy as np\n'), ((10170, 10195), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (10177, 10195), True, 'import numpy as np\n'), ((11062, 11088), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (11070, 11088), True, 'import numpy as np\n'), ((11159, 11185), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (11167, 11185), True, 'import numpy as np\n'), ((11256, 11282), 'numpy.zeros', 'np.zeros', (['(nanos, 12)', '"""i"""'], {}), "((nanos, 12), 'i')\n", (11264, 11282), True, 'import numpy as np\n'), ((10674, 10699), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (10681, 10699), True, 'import numpy as np\n'), ((10800, 10825), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (10807, 10825), True, 'import numpy as np\n'), ((10935, 10960), 'numpy.ones', 'np.ones', (['(nanos, 12)', '"""f"""'], {}), "((nanos, 12), 'f')\n", (10942, 10960), True, 'import numpy as np\n')] |
""" This module contains a number of useful math related functions that are used throughout this project """
from __future__ import annotations
import math
from typing import List, Union, Tuple
from deprecated import deprecated # type: ignore
AnyNumber = Union[int, float]
FloatIterable = Union[List[float], Tuple[float, ...]]
IntIterable = Union[List[int], Tuple[int, ...]]
AnyNumberIterable = Union[List[AnyNumber], Tuple[AnyNumber, ...]]
class AxisAlignedBoundingBox(object):
"""Contains data for an Axis Aligned Bounding Box"""
def __init__(self):
super(AxisAlignedBoundingBox, self).__init__()
self.bInitialized: bool = False
self.minX: AnyNumber = 0
self.minY: AnyNumber = 0
self.minZ: AnyNumber = 0
self.maxX: AnyNumber = 0
self.maxY: AnyNumber = 0
self.maxZ: AnyNumber = 0
def add_point(self, vertex: AnyNumberIterable):
"""Adds a point to be considered for this AABB. This expands the AABB dimensions immediately."""
#If not initialized, set the limits to match this point
if self.bInitialized is False:
self.bInitialized = True
self.minX = vertex[0]
self.maxX = vertex[0]
self.minY = vertex[1]
self.maxY = vertex[1]
self.minZ = vertex[2]
self.maxZ = vertex[2]
return
if self.minX > vertex[0]:
self.minX = vertex[0]
if self.maxX < vertex[0]:
self.maxX = vertex[0]
if self.minY > vertex[1]:
self.minY = vertex[1]
if self.maxY < vertex[1]:
self.maxY = vertex[1]
if self.minZ > vertex[2]:
self.minZ = vertex[2]
if self.maxZ < vertex[2]:
self.maxZ = vertex[2]
def get_center_position(self) -> List[AnyNumber]:
"""Returns a point which is exactly in the center of this AABB. Useful for working out offsets, pivots etc"""
X_size = self.maxX - self.minX
X = self.minX + (X_size / 2)
Y_size = self.maxY - self.minY
Y = self.minY + (Y_size / 2)
Z_size = self.maxZ - self.minZ
Z = self.minZ + (Z_size / 2)
position = [X, Y, Z]
return position
def merge(self, other: AxisAlignedBoundingBox) -> AxisAlignedBoundingBox:
"""Creates a new AABB which has a size that encompasses both self and other"""
if self.bInitialized is False and other.bInitialized is False:
return self
if self.bInitialized is False:
return other
newAABB = AxisAlignedBoundingBox()
newAABB.bInitialized = True
if self.minX > other.minX:
newAABB.minX = other.minX
else:
newAABB.minX = self.minX
if self.minY > other.minY:
newAABB.minY = other.minY
else:
newAABB.minY = self.minY
if self.minZ > other.minZ:
newAABB.minZ = other.minZ
else:
newAABB.minZ = self.minZ
if self.maxX < other.maxX:
newAABB.maxX = other.maxX
else:
newAABB.maxX = self.maxX
if self.maxY < other.maxY:
newAABB.maxY = other.maxY
else:
newAABB.maxY = self.maxY
if self.maxZ < other.maxZ:
newAABB.maxZ = other.maxZ
else:
newAABB.maxZ = self.maxZ
return newAABB
def get_size(self) -> List[AnyNumber]:
"""Returns a list containing the extents/magnitudes of X,Y and Z."""
newSize = []
newSize.append(abs(self.maxX - self.minX))
newSize.append(abs(self.maxY - self.minY))
newSize.append(abs(self.maxZ - self.minZ))
return newSize
def normalize_color(color: Union[List[int], Tuple[int, ...]]) -> Tuple[float, ...]:
""" take an iterable object with values 0-255, and convert to 0.0-1.0 range
returns tuple"""
normColor: List[float] = []
for el in color:
normColor.append(el / 255)
return tuple(normColor)
def unnormalize_color(color: FloatIterable) -> Tuple[int, ...]:
""" take an iterable object with values 0.0-1.0, and convert to 0-255 range
returns tuple"""
normColor: List[int] = []
for el in color:
normColor.append(int(el * 255))
return tuple(normColor)
def pad_color(color: AnyNumberIterable) -> Tuple[float, ...]:
"""
Take an iterable object, and add 1.0 elements until length is 4.
returns tuple
"""
paddedColor: List[AnyNumber] = []
for el in color:
paddedColor.append(el)
while len(paddedColor) < 4:
paddedColor.append(1.0)
return tuple(paddedColor)
def sanitize_float(inFloat: float) -> str:
"""converts float to string, with maximum of 8 decimal places, avoiding e-notation"""
return "{0:.8f}".format(inFloat)
@deprecated
def is_vector_normal(normal: FloatIterable) -> bool:
"""Takes an iterable, calculates vector length, and then returns True if it is aproximately 1.0"""
vector_length = Vector.get_length(normal)
if vector_length > 0.9999 and vector_length < 1.0001:
return True
return False
@deprecated
def calc_vector_length(vector: FloatIterable):
"""Wrapper for Vector.get_length, since this was meant to be tidied up"""
length = Vector.get_length(vector)
return length
class Vector(object):
"""A class containing static methods related to operations on vectors"""
@staticmethod
def get_length(vector_array: AnyNumberIterable) -> float:
"""Calculates a vector length. vector_array is an iterable"""
squaredSum = 0.0
for i in vector_array:
squaredSum += i * i
length = math.sqrt(squaredSum)
return length
@staticmethod
def is_normal(normal: FloatIterable) -> bool:
"""Takes an iterable, calculates vector length, and then returns True if it is aproximately 1.0"""
vector_length = Vector.get_length(normal)
if vector_length > 0.9999 and vector_length < 1.0001:
return True
return False
@staticmethod
def get_normal(vector_array: AnyNumberIterable) -> List[float]:
"""Calculates a vector normal. vector_array is an iterable"""
length = Vector.get_length(vector_array)
normal = Vector.divide_scalar(vector_array, length)
return normal
@staticmethod
def add_scalar(vecA: AnyNumberIterable, scalar: AnyNumber) -> List[float]:
"""Adds a scalar to a vector.
Element wise operation"""
result = []
for el in vecA:
result.append(el + scalar)
return result
@staticmethod
def subtract_scalar(vecA: AnyNumberIterable, scalar: AnyNumber) -> List[float]:
"""Subtracts a scalar from a vector.
Element wise operation"""
result = []
for el in vecA:
result.append(el - scalar)
return result
@staticmethod
def multiply_scalar(vecA: AnyNumberIterable, scalar: AnyNumber) -> List[float]:
"""Multiply a vector element-wise by a scalar value"""
result = []
for el in vecA:
result.append(el * scalar)
return result
@staticmethod
def divide_scalar(vecA: AnyNumberIterable, scalar: AnyNumber) -> List[float]:
"""Divide a vector element-wise by a scalar value"""
result = []
for el in vecA:
result.append(el / scalar)
return result
@staticmethod
def add_vector(vecA: AnyNumberIterable, vecB: AnyNumberIterable) -> List[float]:
"""Add 2 vectors, element-wise, together"""
result = []
for i in range(len(vecA)): # pylint: disable=consider-using-enumerate
result.append(vecA[i] + vecB[i])
return result
@staticmethod
def subtract_vector(vecA: AnyNumberIterable, vecB: AnyNumberIterable) -> List[float]:
"""Subtract 2 vectors, element-wise"""
result = []
for i in range(len(vecA)): # pylint: disable=consider-using-enumerate
result.append(vecA[i] - vecB[i])
return result
@staticmethod
def multiply_vector(vecA: AnyNumberIterable, vecB: AnyNumberIterable) -> List[float]:
"""Multiply 2 vectors, element-wise"""
result = []
for i in range(len(vecA)): # pylint: disable=consider-using-enumerate
result.append(vecA[i] * vecB[i])
return result
@staticmethod
def divide_vector(vecA: AnyNumberIterable, vecB: AnyNumberIterable) -> List[float]:
"""Divide 2 vectors, element-wise"""
result = []
for i in range(len(vecA)): # pylint: disable=consider-using-enumerate
result.append(vecA[i] / vecB[i])
return result
@staticmethod
def dot(vecA: AnyNumberIterable, vecB: AnyNumberIterable) -> float:
"""Calculates the dot product of 2 vectors. This function normalizes vectors first to ensure consistent results"""
vecANorm = Vector.get_normal(vecA)
vecBNorm = Vector.get_normal(vecB)
multipliedVec = Vector.multiply_vector(vecANorm, vecBNorm)
resultSum = 0.0
for el in multipliedVec:
resultSum += el
return resultSum
@staticmethod
def get_angle(vecA: AnyNumberIterable, vecB: AnyNumberIterable) -> float:
"""Returns the angle between 2 vectors, expressed in radians"""
dotproduct = Vector.dot(vecA, vecB)
theta = math.acos(dotproduct)
return theta
@staticmethod
def cross(vecA: AnyNumberIterable, vecB: AnyNumberIterable) -> List[float]:
"""Returns the cross product between 2 vectors. Both vectors should be lists that are exactly 3 elements long"""
X = vecA[1] * vecB[2] - vecA[2] * vecB[1]
Y = vecA[2] * vecB[0] - vecA[0] * vecB[2]
Z = vecA[0] * vecB[1] - vecA[1] * vecB[0]
cross = [X, Y, Z]
return cross
| [
"math.sqrt",
"math.acos"
] | [((5714, 5735), 'math.sqrt', 'math.sqrt', (['squaredSum'], {}), '(squaredSum)\n', (5723, 5735), False, 'import math\n'), ((9465, 9486), 'math.acos', 'math.acos', (['dotproduct'], {}), '(dotproduct)\n', (9474, 9486), False, 'import math\n')] |
import os
import json
import glob
import datetime
from flask_restful import Api, Resource, reqparse
from flask_jwt_extended import (
JWTManager, jwt_required, create_access_token,
get_jwt_identity
)
from .decorators import local_only
import utils
'''
Check authentication
'''
current_path = os.path.dirname(os.path.realpath(__file__))
class Authentication(Resource):
parser = reqparse.RequestParser()
parser.add_argument('username',
type=str,
required=True,
help="This field cannot be left blank!"
)
parser.add_argument('password',
type=str,
required=True,
help="This field cannot be left blank!"
)
# add another authen level when settings things from remote
def verify(self, options):
config_path = options.get('CONFIG_PATH')
if config_path:
# get cred from config file
config = ConfigParser(interpolation=ExtendedInterpolation())
config.read(config_path)
config_username = config['Server']['username']
config_password = config['Server']['password']
if config_username.lower() == options.get('USERNAME').lower() and config_password.lower() == options.get('PASSWORD').lower():
return True
return False
# just look for right cred on any workspace
def get_options(self, username, password):
option_files = glob.glob(
current_path + '/storages/**/options.json', recursive=True)
# loop though all options avalible
for option in option_files:
json_option = utils.reading_json(option)
if username == json_option.get('USERNAME'):
if password == json_option.get('PASSWORD'):
return True
return False
# @local_only
def post(self, workspace=None):
# global options
data = Authentication.parser.parse_args()
username = data['username']
password = data['password']
# if no workspace specific
if not workspace:
if self.get_options(username, password):
# cause we don't have real db so it's really hard to manage JWT
# just change the secret if you want to revoke old token
expires = datetime.timedelta(days=365)
token = create_access_token(username, expires_delta=expires)
return {'access_token': token}
else:
return {'error': "Credentials Incorrect"}
elif workspace == 'None':
pass
current_path = os.path.dirname(os.path.realpath(__file__))
options_path = current_path + \
'/storages/{0}/options.json'.format(workspace)
if not utils.not_empty_file(options_path):
return {'error': "Workspace not found"}
options = utils.reading_json(options_path)
if username == options.get('USERNAME'):
if password == options.get('PASSWORD'):
# cause we don't have real db so it's really hard to manage JWT
# just change the secret if you want to revoke old token
expires = datetime.timedelta(days=365)
token = create_access_token(username, expires_delta=expires)
return {'access_token': token}
return {'error': "Credentials Incorrect"}
| [
"flask_restful.reqparse.RequestParser",
"flask_jwt_extended.create_access_token",
"utils.not_empty_file",
"os.path.realpath",
"glob.glob",
"datetime.timedelta",
"utils.reading_json"
] | [((320, 346), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (336, 346), False, 'import os\n'), ((395, 419), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {}), '()\n', (417, 419), False, 'from flask_restful import Api, Resource, reqparse\n'), ((1563, 1632), 'glob.glob', 'glob.glob', (["(current_path + '/storages/**/options.json')"], {'recursive': '(True)'}), "(current_path + '/storages/**/options.json', recursive=True)\n", (1572, 1632), False, 'import glob\n'), ((3018, 3050), 'utils.reading_json', 'utils.reading_json', (['options_path'], {}), '(options_path)\n', (3036, 3050), False, 'import utils\n'), ((1751, 1777), 'utils.reading_json', 'utils.reading_json', (['option'], {}), '(option)\n', (1769, 1777), False, 'import utils\n'), ((2767, 2793), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (2783, 2793), False, 'import os\n'), ((2911, 2945), 'utils.not_empty_file', 'utils.not_empty_file', (['options_path'], {}), '(options_path)\n', (2931, 2945), False, 'import utils\n'), ((2447, 2475), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(365)'}), '(days=365)\n', (2465, 2475), False, 'import datetime\n'), ((2500, 2552), 'flask_jwt_extended.create_access_token', 'create_access_token', (['username'], {'expires_delta': 'expires'}), '(username, expires_delta=expires)\n', (2519, 2552), False, 'from flask_jwt_extended import JWTManager, jwt_required, create_access_token, get_jwt_identity\n'), ((3331, 3359), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(365)'}), '(days=365)\n', (3349, 3359), False, 'import datetime\n'), ((3384, 3436), 'flask_jwt_extended.create_access_token', 'create_access_token', (['username'], {'expires_delta': 'expires'}), '(username, expires_delta=expires)\n', (3403, 3436), False, 'from flask_jwt_extended import JWTManager, jwt_required, create_access_token, get_jwt_identity\n')] |
import os
from json import dumps
import logging
from platform import platform
from psutil import cpu_percent, virtual_memory
from serial import Serial
from time import sleep
from sb_serial import Sensor, SbSerial
# Change the serial port to suit the machine that this running on
# and the OS
#DEV = os.getenv('DEV', '/dev/ttyS0')
#DEV = os.getenv('DEV', '/dev/tty.SLAB_USBtoUART')
DEV = os.getenv('DEV', 'COM11')
# Create serial object and use it to create SbSerial connection
s = Serial(DEV)
sbs = SbSerial(s)
def json_measurement_handler():
# note python dict has bool True/False JSON true/False
dictPayload = {
"temperature": 123.2,
"waterLevelHigh": True,
"waterLevelLow": False,
"Turbidity": 45,
"DissolvedOxygen": 78,
"Debris": True,
"flowIn": 12.5,
"flowOut": 11.8
}
# return dictionary convert to JSON
return dumps(dictPayload)
def json_settings_handler():
# note python dict has bool True/False JSON true/False
dictPayload = {
"Fog_roller_OFF":300,
"Fog_roller_ON":300,
"Recirculation_OFF":90,
"Recirculation_ON":10,
"Service_pump_OFF":2550,
"Service_pump_ON":90,
"Solenoid_valve_OFF":2550,
"Solenoid_valve_ON":120,
"cleaningCycles":3,
"unitOFF":2,
"unitON":7
}
# return dictionary convert to JSON
return dumps(dictPayload)
# Create sensor objects with SbSerial connection, callback, type,
# resource path, and optionally unit of measure
sensors = [
Sensor(sbs, json_measurement_handler, 'json', 'sensors/uplinkMeasured'),
Sensor(sbs, json_settings_handler, 'json', 'sensors/controlSettings')
]
[sensor.create_sensor() for sensor in sensors]
# Run Forever
while True:
try:
sleep(10)
print("loop")
except KeyboardInterrupt:
exit(0)
| [
"sb_serial.SbSerial",
"os.getenv",
"json.dumps",
"time.sleep",
"sb_serial.Sensor",
"serial.Serial"
] | [((389, 414), 'os.getenv', 'os.getenv', (['"""DEV"""', '"""COM11"""'], {}), "('DEV', 'COM11')\n", (398, 414), False, 'import os\n'), ((486, 497), 'serial.Serial', 'Serial', (['DEV'], {}), '(DEV)\n', (492, 497), False, 'from serial import Serial\n'), ((504, 515), 'sb_serial.SbSerial', 'SbSerial', (['s'], {}), '(s)\n', (512, 515), False, 'from sb_serial import Sensor, SbSerial\n'), ((909, 927), 'json.dumps', 'dumps', (['dictPayload'], {}), '(dictPayload)\n', (914, 927), False, 'from json import dumps\n'), ((1419, 1437), 'json.dumps', 'dumps', (['dictPayload'], {}), '(dictPayload)\n', (1424, 1437), False, 'from json import dumps\n'), ((1571, 1642), 'sb_serial.Sensor', 'Sensor', (['sbs', 'json_measurement_handler', '"""json"""', '"""sensors/uplinkMeasured"""'], {}), "(sbs, json_measurement_handler, 'json', 'sensors/uplinkMeasured')\n", (1577, 1642), False, 'from sb_serial import Sensor, SbSerial\n'), ((1648, 1717), 'sb_serial.Sensor', 'Sensor', (['sbs', 'json_settings_handler', '"""json"""', '"""sensors/controlSettings"""'], {}), "(sbs, json_settings_handler, 'json', 'sensors/controlSettings')\n", (1654, 1717), False, 'from sb_serial import Sensor, SbSerial\n'), ((1812, 1821), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (1817, 1821), False, 'from time import sleep\n')] |
# TODO selection percentage pattern works, but run and done methods really slow down
# TODO whole computation when we traverse really big data
# TODO in order to achieve better performance EconomizeFiniteStateMachine class was provided
import math
class SelectionFiniteStateMachine:
def __init__(self, *args, **kwargs):
self.empty_passage = False
self.obj_temp_size = 0
self.obj_size = 0
self.visited = -1
self.visited_total = -1
self.to_visit = 0
self.space = 0
self.intersections = -1
self.items = 0
self.doc_len = kwargs.get('len', 0)
self.percentage = kwargs.get('percentage', 0)
self.immersion = {'features': [0, 1],
'features_obj': [0, 1, 0]}
def run(self, anatomy=None, blocked=None):
"""
when data stream will nest itself in features array, we start to checking
if data stream is inside of features item or outside before entering next item
:param anatomy: DataAnatomyFiniteStateMachine.stack [int,...]
:return: bool()
"""
if self.immersion['features'] == anatomy and not self.empty_passage:
self.visited_total += 1
self.intersections += 1
self.empty_passage = True
if blocked:
self.obj_temp_size = 0
else:
self.visited += 1
self.__adjust_information__()
if self.intersections >= self.space:
self.intersections = 0
elif self.immersion['features_obj'] == anatomy[:3]:
self.empty_passage = False
self.obj_temp_size += 1
return self.intersections >= 1
def done(self):
return self.visited >= self.to_visit and self.to_visit != 0
def __adjust_information__(self):
if self.obj_temp_size > self.obj_size:
self.obj_size = self.obj_temp_size
self.__calculate_percentage__()
self.__calculate_space__()
self.obj_temp_size = 0
def __calculate_percentage__(self):
self.items = math.floor(self.doc_len / self.obj_size)
to_visit = math.floor(((self.items * self.percentage) / 100) - self.visited)
self.to_visit = (1, to_visit)[to_visit >= 0]
def __calculate_space__(self):
self.space = math.floor(self.items / (self.to_visit + self.visited)) | [
"math.floor"
] | [((2125, 2165), 'math.floor', 'math.floor', (['(self.doc_len / self.obj_size)'], {}), '(self.doc_len / self.obj_size)\n', (2135, 2165), False, 'import math\n'), ((2185, 2246), 'math.floor', 'math.floor', (['(self.items * self.percentage / 100 - self.visited)'], {}), '(self.items * self.percentage / 100 - self.visited)\n', (2195, 2246), False, 'import math\n'), ((2361, 2416), 'math.floor', 'math.floor', (['(self.items / (self.to_visit + self.visited))'], {}), '(self.items / (self.to_visit + self.visited))\n', (2371, 2416), False, 'import math\n')] |
## 1. Introduction to the data ##
import pandas as pd
cars = pd.read_csv("auto.csv")
unique_regions = cars['origin'].unique()
print(unique_regions)
## 2. Dummy variables ##
dummy_cylinders = pd.get_dummies(cars["cylinders"], prefix="cyl")
cars = pd.concat([cars, dummy_cylinders], axis=1)
print(cars.head())
dummy_years = pd.get_dummies(cars["year"], prefix="year")
cars = pd.concat([cars, dummy_years], axis=1)
cars = cars.drop("year", axis=1)
cars = cars.drop("cylinders", axis=1)
print(cars.head())
## 3. Multiclass classification ##
shuffled_rows = np.random.permutation(cars.index)
shuffled_cars = cars.iloc[shuffled_rows]
highest_train_row = int(cars.shape[0] * .70)
train = shuffled_cars.iloc[0:highest_train_row]
test = shuffled_cars.iloc[highest_train_row:]
## 4. Training a multiclass logistic regression model ##
from sklearn.linear_model import LogisticRegression
import re
unique_origins = cars["origin"].unique()
unique_origins.sort()
models = {}
X = train[[x for x in train.columns if x.startswith("cyl") or x.startswith("year")]]
print(X.shape)
for origin in unique_origins:
y = (train["origin"] == origin)
lr = LogisticRegression()
lr.fit(X, y)
models[origin] = lr
print(models)
## 5. Testing the models ##
testing_probs = pd.DataFrame(columns=unique_origins)
test = test[[x for x in test.columns if x.startswith("cyl") or x.startswith("year")]]
print(test.shape)
for origin in unique_origins:
X_test = test[features]
testing_probs[origin] = models[origin].predict_proba(X_test)[:,1]
## 6. Choose the origin ##
predicted_origins = testing_probs.idxmax(axis = 1)
print(predicted_origins) | [
"pandas.DataFrame",
"pandas.read_csv",
"sklearn.linear_model.LogisticRegression",
"pandas.get_dummies",
"pandas.concat"
] | [((62, 85), 'pandas.read_csv', 'pd.read_csv', (['"""auto.csv"""'], {}), "('auto.csv')\n", (73, 85), True, 'import pandas as pd\n'), ((194, 241), 'pandas.get_dummies', 'pd.get_dummies', (["cars['cylinders']"], {'prefix': '"""cyl"""'}), "(cars['cylinders'], prefix='cyl')\n", (208, 241), True, 'import pandas as pd\n'), ((249, 291), 'pandas.concat', 'pd.concat', (['[cars, dummy_cylinders]'], {'axis': '(1)'}), '([cars, dummy_cylinders], axis=1)\n', (258, 291), True, 'import pandas as pd\n'), ((325, 368), 'pandas.get_dummies', 'pd.get_dummies', (["cars['year']"], {'prefix': '"""year"""'}), "(cars['year'], prefix='year')\n", (339, 368), True, 'import pandas as pd\n'), ((376, 414), 'pandas.concat', 'pd.concat', (['[cars, dummy_years]'], {'axis': '(1)'}), '([cars, dummy_years], axis=1)\n', (385, 414), True, 'import pandas as pd\n'), ((1268, 1304), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'unique_origins'}), '(columns=unique_origins)\n', (1280, 1304), True, 'import pandas as pd\n'), ((1145, 1165), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (1163, 1165), False, 'from sklearn.linear_model import LogisticRegression\n')] |
import glob
import os
import subprocess
import time
import matplotlib.pyplot as plt
import numpy
import torch
def viz(
batch: torch.Tensor,
episodes=1000,
video=True,
folder='output',
) -> None:
## Visualize GoodAI Breakout Dataset
fig = plt.figure(1)
ax = fig.add_subplot(111)
ax.set_title("Breakout")
im = ax.imshow(numpy.zeros((84, 84, 4))) # Blank starting image
fig.show()
im.axes.figure.canvas.draw()
tstart = time.time()
rewards = 0
for episode in range(episodes):
image = batch.states[episode].permute(1, 2, 0)
rewards += batch.rewards[episode].detach().cpu().numpy()
ax.set_title(str(f"episode: {episode} | reward: {rewards}"))
im.set_data(image)
im.axes.figure.canvas.draw()
ax.figure.savefig(folder + "/img%02d.png" % episode)
if video:
subprocess.call([
'ffmpeg', '-framerate', '8', '-i', f'{folder}/img%02d.png', '-r', '30',
'-pix_fmt', 'yuv420p', f'{folder}/video_name.mp4'
])
for file_name in glob.glob(f"{folder}/*.png"):
os.remove(file_name)
print('FPS:', 100 / (time.time() - tstart))
| [
"matplotlib.pyplot.figure",
"numpy.zeros",
"subprocess.call",
"time.time",
"glob.glob",
"os.remove"
] | [((261, 274), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (271, 274), True, 'import matplotlib.pyplot as plt\n'), ((454, 465), 'time.time', 'time.time', ([], {}), '()\n', (463, 465), False, 'import time\n'), ((348, 372), 'numpy.zeros', 'numpy.zeros', (['(84, 84, 4)'], {}), '((84, 84, 4))\n', (359, 372), False, 'import numpy\n'), ((821, 965), 'subprocess.call', 'subprocess.call', (["['ffmpeg', '-framerate', '8', '-i', f'{folder}/img%02d.png', '-r', '30',\n '-pix_fmt', 'yuv420p', f'{folder}/video_name.mp4']"], {}), "(['ffmpeg', '-framerate', '8', '-i', f'{folder}/img%02d.png',\n '-r', '30', '-pix_fmt', 'yuv420p', f'{folder}/video_name.mp4'])\n", (836, 965), False, 'import subprocess\n'), ((1005, 1033), 'glob.glob', 'glob.glob', (['f"""{folder}/*.png"""'], {}), "(f'{folder}/*.png')\n", (1014, 1033), False, 'import glob\n'), ((1041, 1061), 'os.remove', 'os.remove', (['file_name'], {}), '(file_name)\n', (1050, 1061), False, 'import os\n'), ((1086, 1097), 'time.time', 'time.time', ([], {}), '()\n', (1095, 1097), False, 'import time\n')] |
from Bio import SeqIO
from subprocess import Popen, PIPE
adapters = [str(s.seq) for s in SeqIO.parse(open(snakemake.input[1], 'r'), 'fasta')]
adapters = '-b ' + ' -b '.join(adapters)
cutadapt_cmd = f"cutadapt -o {snakemake.output[0]} {adapters} {snakemake.input[0]}"
p = Popen(cutadapt_cmd, stdout=PIPE, stderr=PIPE, shell=True)
stdout, stderr = p.communicate() | [
"subprocess.Popen"
] | [((274, 331), 'subprocess.Popen', 'Popen', (['cutadapt_cmd'], {'stdout': 'PIPE', 'stderr': 'PIPE', 'shell': '(True)'}), '(cutadapt_cmd, stdout=PIPE, stderr=PIPE, shell=True)\n', (279, 331), False, 'from subprocess import Popen, PIPE\n')] |
#!/usr/bin/env python3
import sys, json, time, hashlib, base64
from collections import defaultdict
# vat-mint (v5) .getCurrentAmount is a really simple method: it looks up a
# Presence in a WeakMap, and returns the value. The only syscall it makes is
# the resolve. There are four timestamps of interest:
# A: delivery sent from kernel
# B: resolve syscall received by kernel
# C: resolve syscall result sent by kernel
# D: delivery result received by kernel
#
# A-B includes delivery marshalling, early vat processing (including WeakMap
# lookup), syscall.resolve marshalling
# B-C records the kernel time spent updating the promise table and queueing
# notify() events
# C-D includes syscall result marshalling, end-of-crank GC processing
# For a first pass, we record the three deltas and the deliveryNum
deliveryNums = [] # deliveryNum
deliveries = {} # deliveryNum: { AB, BC, CD, computrons }
last_delivery = None
num_syscalls = None
syscall_start = None
syscall_type = None
syscall_times = defaultdict(list)
for line in sys.stdin:
data = json.loads(line)
if data.get("vatID", None) != "v5":
continue
if data["type"] == "deliver":
last_delivery = None
if data["kd"][0] == "message" and data["kd"][2]["method"] == "getCurrentAmount":
delivery_start = data["time"]
last_delivery = data["deliveryNum"]
deliveryNums.append(last_delivery)
deliveries[last_delivery] = {}
num_syscalls = 0
if last_delivery is None:
continue
if data["type"] == "syscall":
num_syscalls += 1
assert num_syscalls == 1
syscall_type = data["ksc"][0]
assert syscall_type == "resolve"
syscall_start = data["time"]
resolutions = data["ksc"][2]
count = len(resolutions)
assert count == 1
rejected = resolutions[0][1]
assert not rejected
if data["type"] == "syscall-result":
syscall_finish = data["time"]
if data["type"] == "deliver-result":
computrons = data["dr"][2]["compute"]
delivery_finish = data["time"]
AB = syscall_start - delivery_start
BC = syscall_finish - syscall_start
CD = delivery_finish - syscall_finish
elapsed = delivery_finish - delivery_start
d = deliveries[last_delivery]
d["AB"] = AB
d["BC"] = BC
d["CD"] = CD
d["elapsed"] = elapsed
d["computrons"] = computrons
# print milliseconds
print("%6d %8dc %1.2f %1.2f %1.2f = %1.2f" % (
last_delivery,
d["computrons"],
1000*d["AB"], 1000*d["BC"], 1000*d["CD"], 1000*d["elapsed"],
))
if 0:
for deliveryNum in deliveryNum:
d = deliveries[deliveryNum]
print("%6d %8dc %.6f %.6f $.6f = $.6f" % (
deliveryNum,
d["computrons"],
d["AB"], d["BC"], d["CD"], d["elapsed"],
))
| [
"json.loads",
"collections.defaultdict"
] | [((1007, 1024), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1018, 1024), False, 'from collections import defaultdict\n'), ((1060, 1076), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1070, 1076), False, 'import sys, json, time, hashlib, base64\n')] |
""" Download datasets from SANDANA samples from Galaxy server.
python scripts/download_sandana_datasets.py --help
"""
import argparse
import logging
from cycif_db.galaxy_download import download_sandana
parser = argparse.ArgumentParser()
parser.add_argument(
'--server', '-s', type=str, dest='server', required=False,
help="Galaxy server URL address. Can be set in `config.yml`.")
parser.add_argument(
'--key', '-k', type=str, dest='api_key', required=False,
help="API key to the Galaxy server. Can be set in `config.yml`.")
parser.add_argument(
'destination', type=str,
help="The folder to save the downloaded files.")
parser.add_argument(
'-v', '--verbose', default=False, action='store_true',
help="Show detailed log.")
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
download_sandana(args.destination, server=args.server, api_key=args.api_key)
| [
"logging.basicConfig",
"cycif_db.galaxy_download.download_sandana",
"argparse.ArgumentParser"
] | [((215, 240), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (238, 240), False, 'import argparse\n'), ((851, 927), 'cycif_db.galaxy_download.download_sandana', 'download_sandana', (['args.destination'], {'server': 'args.server', 'api_key': 'args.api_key'}), '(args.destination, server=args.server, api_key=args.api_key)\n', (867, 927), False, 'from cycif_db.galaxy_download import download_sandana\n'), ((809, 849), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (828, 849), False, 'import logging\n')] |
import os.path
import re
import platform
import subprocess
from copy import deepcopy
from pathlib import Path
from schema import Optional, Schema, SchemaError, Or
from buildz.toolchain.generic import GenericToolchain
from buildz.utils import find_re_it_in_list, get_cmd_matches, merge, merge_envs, resolve_rel_paths_list
class GccToolchain(GenericToolchain):
_confsch = Schema(merge(GenericToolchain._confsch._schema, {
'gcc_path': str,
'ar_path': str,
'ld_path': str
}))
_envsch = Schema(merge(GenericToolchain._envsch._schema, {
Optional('compile_flags'): [str],
Optional('optimization'): Or(0, 1, 2, 3, 's'),
Optional('includes'): [str],
Optional('defines'): [str],
Optional('build_type'): str,
Optional('debug_level'): Or(0, 1, None, 3),
Optional('link_flags'): [str],
Optional('link_dirs'): [str],
Optional('link'): [str]
}))
__execext = {
'Windows': '.exe',
'Linux': ''
}
__sharedext = {
'Windows': '.dll',
'Linux': '.so'
}
def __init__(self, toolchain_setup):
super().__init__(toolchain_setup)
def default_includes(self):
# gcc -xc -E -v /dev/null
args = [self.conf['gcc_path'], '-xc', '-E', '-v', os.devnull]
args.extend(self.env.get('flags', []))
incl_start_regex = r' *#include <\.\.\.> search starts here: *'
incl_end_regex = r' *End of search list\. *'
proc = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT , text=True)
lines = proc.stdout.splitlines()
start_it = find_re_it_in_list(incl_start_regex, lines)
if start_it == None:
return []
end_it = find_re_it_in_list(incl_end_regex, lines, start_it)
if end_it == None:
return []
# theres no paths between them
if (end_it - start_it) == 1:
return []
return lines[start_it+1 : end_it]
def defines(self, uf_env):
# gcc -dM -E - $flags 0</dev/null
def_regex = r'^ *#define +(.+?) +(.+?) *$'
temp_env = merge(self.env, uf_env)
args = [self.conf['gcc_path'], '-dM', '-E', '-']
args.extend(temp_env.get('flags', []))
matches = get_cmd_matches(args, def_regex, subprocess.DEVNULL)
if not matches:
return []
# matches is tuple of (defname:str, defvalue:str)
return matches
def _unifiedflags_env(self, env):
incls = env.get('includes', [])
defs = env.get('defines', [])
opt = env.get('optimization', 3)
build_type = env.get('build_type', 'release')
debug_level = env.get('debug_level', None)
fl = []
if build_type == 'debug':
fl.append('-g' + debug_level)
fl.append('-D_DEBUG')
if build_type == 'release':
fl.append('-DNDEBUG')
fl.append('-O' + str(opt))
fl.extend(['-D' + d for d in defs])
fl.extend(['-I' + i for i in incls])
return {
'compile_flags': fl
}
def build_mod(self, config, module, target):
mod_env = module.envs.get(target.toolchain, {})
name_params = {
'build_type': config.build.type,
'module_name': module.name,
'target_name': target.name,
'target_toolchain': target.toolchain
}
norm_tchenv = self._normalize_env(self.env, os.getcwd(), name_params)
norm_modenv = self._normalize_env(mod_env, module.absdir, name_params)
norm_trgenv = self._normalize_env(target.env, os.getcwd(), name_params)
env = merge_envs(norm_tchenv, norm_modenv, norm_trgenv, self._envsch)
uf_env = self._unifiedflags_env(env)
env = merge(env, uf_env)
out_absdir = Path(self.setup.output_dir.format(**name_params)).resolve()
obj_absdir = (out_absdir / 'obj')
objects = self._build_objects(env, obj_absdir, module)
out_name = self.setup.output_pattern.format(**name_params)
if module.packaging == 'executable':
result = self._link(env, objects, False, out_absdir, out_name)
if module.packaging == 'shared':
result = self._link(env, objects, True, out_absdir, out_name)
if module.packaging == 'static':
result = self._ar_objects(env, objects, out_absdir, out_name)
return (result == 0)
def _normalize_env(self, env, path, name_params):
tenv = deepcopy(env)
for key, val in tenv.items():
if isinstance(val, list):
temp = []
for elem in val:
if isinstance(elem, str):
temp.append(elem.format(**name_params))
else:
temp.append(elem)
tenv[key] = temp
if isinstance(val, str):
tenv[key] = val.format(**name_params)
tenv['includes'] = resolve_rel_paths_list(env.get('includes', []), path)
tenv['link_dirs'] = resolve_rel_paths_list(env.get('link_dirs', []), path)
return tenv
def _build_objects(self, uf_env, objs_absdir, module):
gcc = self.conf['gcc_path']
flags = uf_env.get('compile_flags', [])
objs_absdir = Path(objs_absdir)
os.makedirs(str(objs_absdir), exist_ok=True)
obj_abspaths = []
for fp_str in module.files:
fp = Path(fp_str)
if fp.is_absolute():
obj_abspath = Path(objs_absdir) / (fp.stem + '.o')
fp_abspath = fp
else:
obj_abspath = Path(objs_absdir) / fp.with_suffix('.o')
fp_abspath = module.absdir / fp
os.makedirs(obj_abspath.parent, exist_ok=True)
# if obj is newer than source compilation is nonsense
if obj_abspath.exists():
obj_mtime = obj_abspath.stat().st_mtime
src_mtime = fp_abspath.stat().st_mtime
if src_mtime < obj_mtime:
continue
args = [gcc, '-c', '-o', str(obj_abspath)]
args.extend(flags)
args.append(fp_abspath)
proc = subprocess.run(args, check=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
if proc.returncode == 0:
print("[GCC] Compiled {} without errors.".format(fp_str))
else:
print('[GCC] Returned with errors for file {}:\n'.format(fp_str), proc.stdout)
break
obj_abspaths.append(obj_abspath)
return obj_abspaths
def _link(self, env, obj_abspaths, shared, exe_absdir, exe_name):
if shared:
ext = self.__sharedext[platform.system()]
exe_name = ('lib' + exe_name)
else:
ext = self.__execext[platform.system()]
exe_absdir = Path(exe_absdir)
if shared:
exe_abspath = exe_absdir / (exe_name + ext)
else:
exe_abspath = exe_absdir / (exe_name + ext)
ld = self.conf['ld_path']
link_dirs = ['-L ' + l for l in self.env.get('link_dirs', [])]
link = ['-l ' + l for l in self.env.get('link', [])]
link_flags = self.env.get('link_flags', [])
args = [ld]
if shared:
if platform.system() == 'Windows':
args.append('--dll --output-def {}'.format(exe_absdir / (exe_name + '.def')))
if platform.system() == 'Linux':
args.append('-shared -soname={}'.format(exe_name + ext))
args.extend(link_dirs)
args.extend(link_flags)
args.extend(link)
args.extend(['-o', str(exe_abspath)])
args.extend(obj_abspaths)
proc = subprocess.run(args, check=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
if proc.returncode == 0:
print("[LINKER] Linked {} without errors.".format(exe_name+ext))
else:
print('[LINKER] Returned errors for file {}:\n'.format(exe_name+ext), proc.stdout, '\n')
return proc.returncode
def _ar_objects(self, env, obj_abspaths, a_absdir, a_name):
a_name = ('lib' + a_name + '.a')
a_abspath = Path(a_absdir) / a_name
ar = self.conf['ar_path']
args = [ar]
args.extend(['-r', str(a_abspath)])
args.extend(obj_abspaths)
proc = subprocess.run(args, check=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
if proc.returncode == 0:
print("[AR] Archived without errors.")
else:
print('[AR] Failed for file {}:\n'.format(a_name), proc.stdout, '\n')
return proc.returncode
# VSCode support
def gen_tasks(self, target):
return [
(target.name)
]
def gen_config(self, target):
return {} | [
"buildz.utils.get_cmd_matches",
"copy.deepcopy",
"schema.Optional",
"pathlib.Path",
"subprocess.run",
"buildz.utils.merge",
"platform.system",
"schema.Or",
"buildz.utils.find_re_it_in_list",
"buildz.utils.merge_envs"
] | [((385, 480), 'buildz.utils.merge', 'merge', (['GenericToolchain._confsch._schema', "{'gcc_path': str, 'ar_path': str, 'ld_path': str}"], {}), "(GenericToolchain._confsch._schema, {'gcc_path': str, 'ar_path': str,\n 'ld_path': str})\n", (390, 480), False, 'from buildz.utils import find_re_it_in_list, get_cmd_matches, merge, merge_envs, resolve_rel_paths_list\n'), ((1513, 1599), 'subprocess.run', 'subprocess.run', (['args'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'text': '(True)'}), '(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text\n =True)\n', (1527, 1599), False, 'import subprocess\n'), ((1661, 1704), 'buildz.utils.find_re_it_in_list', 'find_re_it_in_list', (['incl_start_regex', 'lines'], {}), '(incl_start_regex, lines)\n', (1679, 1704), False, 'from buildz.utils import find_re_it_in_list, get_cmd_matches, merge, merge_envs, resolve_rel_paths_list\n'), ((1774, 1825), 'buildz.utils.find_re_it_in_list', 'find_re_it_in_list', (['incl_end_regex', 'lines', 'start_it'], {}), '(incl_end_regex, lines, start_it)\n', (1792, 1825), False, 'from buildz.utils import find_re_it_in_list, get_cmd_matches, merge, merge_envs, resolve_rel_paths_list\n'), ((2162, 2185), 'buildz.utils.merge', 'merge', (['self.env', 'uf_env'], {}), '(self.env, uf_env)\n', (2167, 2185), False, 'from buildz.utils import find_re_it_in_list, get_cmd_matches, merge, merge_envs, resolve_rel_paths_list\n'), ((2309, 2361), 'buildz.utils.get_cmd_matches', 'get_cmd_matches', (['args', 'def_regex', 'subprocess.DEVNULL'], {}), '(args, def_regex, subprocess.DEVNULL)\n', (2324, 2361), False, 'from buildz.utils import find_re_it_in_list, get_cmd_matches, merge, merge_envs, resolve_rel_paths_list\n'), ((3705, 3768), 'buildz.utils.merge_envs', 'merge_envs', (['norm_tchenv', 'norm_modenv', 'norm_trgenv', 'self._envsch'], {}), '(norm_tchenv, norm_modenv, norm_trgenv, self._envsch)\n', (3715, 3768), False, 'from buildz.utils import find_re_it_in_list, get_cmd_matches, merge, merge_envs, resolve_rel_paths_list\n'), ((3828, 3846), 'buildz.utils.merge', 'merge', (['env', 'uf_env'], {}), '(env, uf_env)\n', (3833, 3846), False, 'from buildz.utils import find_re_it_in_list, get_cmd_matches, merge, merge_envs, resolve_rel_paths_list\n'), ((4571, 4584), 'copy.deepcopy', 'deepcopy', (['env'], {}), '(env)\n', (4579, 4584), False, 'from copy import deepcopy\n'), ((5385, 5402), 'pathlib.Path', 'Path', (['objs_absdir'], {}), '(objs_absdir)\n', (5389, 5402), False, 'from pathlib import Path\n'), ((7003, 7019), 'pathlib.Path', 'Path', (['exe_absdir'], {}), '(exe_absdir)\n', (7007, 7019), False, 'from pathlib import Path\n'), ((7879, 7978), 'subprocess.run', 'subprocess.run', (['args'], {'check': '(False)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'text': '(True)'}), '(args, check=False, stdout=subprocess.PIPE, stderr=subprocess\n .STDOUT, text=True)\n', (7893, 7978), False, 'import subprocess\n'), ((8532, 8631), 'subprocess.run', 'subprocess.run', (['args'], {'check': '(False)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'text': '(True)'}), '(args, check=False, stdout=subprocess.PIPE, stderr=subprocess\n .STDOUT, text=True)\n', (8546, 8631), False, 'import subprocess\n'), ((5536, 5548), 'pathlib.Path', 'Path', (['fp_str'], {}), '(fp_str)\n', (5540, 5548), False, 'from pathlib import Path\n'), ((6309, 6408), 'subprocess.run', 'subprocess.run', (['args'], {'check': '(False)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'text': '(True)'}), '(args, check=False, stdout=subprocess.PIPE, stderr=subprocess\n .STDOUT, text=True)\n', (6323, 6408), False, 'import subprocess\n'), ((8358, 8372), 'pathlib.Path', 'Path', (['a_absdir'], {}), '(a_absdir)\n', (8362, 8372), False, 'from pathlib import Path\n'), ((580, 605), 'schema.Optional', 'Optional', (['"""compile_flags"""'], {}), "('compile_flags')\n", (588, 605), False, 'from schema import Optional, Schema, SchemaError, Or\n'), ((622, 646), 'schema.Optional', 'Optional', (['"""optimization"""'], {}), "('optimization')\n", (630, 646), False, 'from schema import Optional, Schema, SchemaError, Or\n'), ((677, 697), 'schema.Optional', 'Optional', (['"""includes"""'], {}), "('includes')\n", (685, 697), False, 'from schema import Optional, Schema, SchemaError, Or\n'), ((714, 733), 'schema.Optional', 'Optional', (['"""defines"""'], {}), "('defines')\n", (722, 733), False, 'from schema import Optional, Schema, SchemaError, Or\n'), ((751, 773), 'schema.Optional', 'Optional', (['"""build_type"""'], {}), "('build_type')\n", (759, 773), False, 'from schema import Optional, Schema, SchemaError, Or\n'), ((788, 811), 'schema.Optional', 'Optional', (['"""debug_level"""'], {}), "('debug_level')\n", (796, 811), False, 'from schema import Optional, Schema, SchemaError, Or\n'), ((841, 863), 'schema.Optional', 'Optional', (['"""link_flags"""'], {}), "('link_flags')\n", (849, 863), False, 'from schema import Optional, Schema, SchemaError, Or\n'), ((880, 901), 'schema.Optional', 'Optional', (['"""link_dirs"""'], {}), "('link_dirs')\n", (888, 901), False, 'from schema import Optional, Schema, SchemaError, Or\n'), ((918, 934), 'schema.Optional', 'Optional', (['"""link"""'], {}), "('link')\n", (926, 934), False, 'from schema import Optional, Schema, SchemaError, Or\n'), ((648, 667), 'schema.Or', 'Or', (['(0)', '(1)', '(2)', '(3)', '"""s"""'], {}), "(0, 1, 2, 3, 's')\n", (650, 667), False, 'from schema import Optional, Schema, SchemaError, Or\n'), ((813, 830), 'schema.Or', 'Or', (['(0)', '(1)', 'None', '(3)'], {}), '(0, 1, None, 3)\n', (815, 830), False, 'from schema import Optional, Schema, SchemaError, Or\n'), ((6850, 6867), 'platform.system', 'platform.system', ([], {}), '()\n', (6865, 6867), False, 'import platform\n'), ((6958, 6975), 'platform.system', 'platform.system', ([], {}), '()\n', (6973, 6975), False, 'import platform\n'), ((7449, 7466), 'platform.system', 'platform.system', ([], {}), '()\n', (7464, 7466), False, 'import platform\n'), ((7590, 7607), 'platform.system', 'platform.system', ([], {}), '()\n', (7605, 7607), False, 'import platform\n'), ((5613, 5630), 'pathlib.Path', 'Path', (['objs_absdir'], {}), '(objs_absdir)\n', (5617, 5630), False, 'from pathlib import Path\n'), ((5730, 5747), 'pathlib.Path', 'Path', (['objs_absdir'], {}), '(objs_absdir)\n', (5734, 5747), False, 'from pathlib import Path\n')] |
from .. import db
from .base import Base, BaseSchema
from typing import Dict, Union
from datetime import datetime
class Calculation(Base):
__table_name__ = 'calculation'
calc_id = db.Column(db.Integer, primary_key=True)
num1 = db.Column(db.Integer, nullable=False)
num2 = db.Column(db.Integer, nullable=False)
symbol = db.Column(db.String(20), nullable=False)
result = db.Column(db.String(1000), nullable=False)
def select_all(self):
return Calculation.query.all()
def select_id(self, calc_id: int):
calc = Calculation.query.filter_by(calc_id=calc_id)
return calc
def create(self, data: Dict[str, Union[str, int]]):
result = eval("%s%s%s" % (data['num1'], data['symbol'], data['num2']))
new_calc = Calculation(
num1=data['num1'],
num2=data['num2'],
symbol=data['symbol'],
result=result
)
db.session.add(new_calc)
db.session.commit()
return new_calc
def update(self, calc, data: Dict[str, Union[str, int]]):
result = eval("%s%s%s" % (data['num1'], data['symbol'], data['num2']))
now_time = datetime.strptime(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'), '%Y-%m-%d %H:%M:%S')
update_data = dict(
num1=data['num1'],
num2=data['num2'],
symbol=data['symbol'],
result=result,
update_date=now_time
)
calc.update(update_data, synchronize_session=False)
db.session.commit()
return calc
def delete(self, calc):
now_time = datetime.strptime(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'), '%Y-%m-%d %H:%M:%S')
delete_data = dict(
delete_date=now_time
)
calc.update(delete_data, synchronize_session=False)
db.session.commit()
return calc
class CalculationSchema(BaseSchema):
class Meta:
model = Calculation
fields = (
'calc_id',
'num1',
'num2',
'symbol',
'result',
'update_date',
'delete_date',
'create_date'
)
ordered = True | [
"datetime.datetime.utcnow"
] | [((1193, 1210), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1208, 1210), False, 'from datetime import datetime\n'), ((1634, 1651), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1649, 1651), False, 'from datetime import datetime\n')] |
# coding=utf-8
import random
import sys
import pygame
from pygame.color import THECOLORS
pygame.init()
screen = pygame.display.set_mode([640, 480])
screen.fill([255, 255, 255])
for i in range(0, 100):
width = random.randint(0, 250)
height = random.randint(0, 100)
top = random.randint(0, 400)
left = random.randint(0, 500)
color_name = random.choice(THECOLORS.keys())
color = THECOLORS[color_name]
line_width = random.randint(1, 3)
pygame.draw.rect(screen, color, [left, top, width, height], line_width)
pygame.display.flip()
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
pygame.quit()
| [
"pygame.init",
"pygame.quit",
"pygame.event.get",
"pygame.color.THECOLORS.keys",
"pygame.display.set_mode",
"pygame.display.flip",
"pygame.draw.rect",
"random.randint"
] | [((94, 107), 'pygame.init', 'pygame.init', ([], {}), '()\n', (105, 107), False, 'import pygame\n'), ((117, 152), 'pygame.display.set_mode', 'pygame.display.set_mode', (['[640, 480]'], {}), '([640, 480])\n', (140, 152), False, 'import pygame\n'), ((542, 563), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (561, 563), False, 'import pygame\n'), ((699, 712), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (710, 712), False, 'import pygame\n'), ((218, 240), 'random.randint', 'random.randint', (['(0)', '(250)'], {}), '(0, 250)\n', (232, 240), False, 'import random\n'), ((254, 276), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (268, 276), False, 'import random\n'), ((287, 309), 'random.randint', 'random.randint', (['(0)', '(400)'], {}), '(0, 400)\n', (301, 309), False, 'import random\n'), ((321, 343), 'random.randint', 'random.randint', (['(0)', '(500)'], {}), '(0, 500)\n', (335, 343), False, 'import random\n'), ((444, 464), 'random.randint', 'random.randint', (['(1)', '(3)'], {}), '(1, 3)\n', (458, 464), False, 'import random\n'), ((469, 540), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'color', '[left, top, width, height]', 'line_width'], {}), '(screen, color, [left, top, width, height], line_width)\n', (485, 540), False, 'import pygame\n'), ((612, 630), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (628, 630), False, 'import pygame\n'), ((375, 391), 'pygame.color.THECOLORS.keys', 'THECOLORS.keys', ([], {}), '()\n', (389, 391), False, 'from pygame.color import THECOLORS\n')] |
"""
Tool for 'leave-one-out' testing features in dataset.
Adds use_column parameter for lightgbm CLI, which works
like an opposite one to ignore_columns.
Example usage
--------------
>>> python lgbm_tool.py --use_column=column1,column2,column3 \
>>> config=path_to_config data=path_to_data valid=path_to_valid
"""
import argparse
import subprocess
from typing import List, TextIO
def _get_all_features(data_file: TextIO) -> List[str]:
features = data_file.readline().strip().split(',')
return features
def _generate_ignore_string(features: List[str],
features_left: List[str]) -> str:
for feature in features_left:
features.remove(feature)
ignore_string = 'name:' + ','.join([f"{feature}" for feature in features])
return ignore_string
def _parse_lgbm_config(config_file: str) -> dict:
config = {}
f = open(config_file, 'r')
for line in f:
line = line.strip()
if line.startswith('#'):
continue
parameter_name, parameter_value = line.split('=')
config[parameter_name] = parameter_value
f.close()
return config
def _get_label_column(lightgbm_cli_args: dict):
"""Checks whether label column is either in CLI arguments or lgbm config
file and gets it. If not, raises an exception.
"""
if 'label_column' in lightgbm_cli_args:
label_column = lightgbm_cli_args['label_column'].replace('name:', '')
return label_column
config = lightgbm_cli_args.get('config')
if not config:
raise ValueError('No label column provided')
lightgbm_config_args = _parse_lgbm_config(config)
if 'label_column' not in lightgbm_config_args:
raise ValueError('No label column provided')
label_column = lightgbm_config_args['label_column'].replace('name:', '')
return label_column
def run_lgbm(args: dict) -> None:
"""Asynchronously runs lightgbm"""
lightgbm_args = {
key: value for key, value in
map(lambda x: x.split('='), args['lightgbm_args'])
}
label_column = _get_label_column(lightgbm_args)
with open(lightgbm_args['data'], 'r') as f:
features = _get_all_features(f)
use_columns = args['use_column'].split(',')
use_columns.append(label_column)
ignore_string = _generate_ignore_string(features, use_columns)
subprocess.call(
[
'lightgbm',
f'ignore_column={ignore_string}',
*args['lightgbm_args']
],
)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--use_column', type=str, required=False,
help='Features to use for training'
)
parser.add_argument(
'lightgbm_args', nargs='+',
help='Any arguments for lightgbm cli'
)
args = parser.parse_args()
run_lgbm(vars(args))
if __name__ == '__main__':
main()
| [
"subprocess.call",
"argparse.ArgumentParser"
] | [((2375, 2467), 'subprocess.call', 'subprocess.call', (["['lightgbm', f'ignore_column={ignore_string}', *args['lightgbm_args']]"], {}), "(['lightgbm', f'ignore_column={ignore_string}', *args[\n 'lightgbm_args']])\n", (2390, 2467), False, 'import subprocess\n'), ((2551, 2576), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2574, 2576), False, 'import argparse\n')] |
import setuptools
setuptools.setup(
name="image-quality-assessment",
version="0.0.1",
author="gdp",
author_email="<EMAIL>",
description="TBD",
long_description_content_type="text/markdown",
url="https://github.com/getyourguide/image-quality-assessment",
packages=setuptools.find_packages(),
package_data={
# If any package contains *.yml, include the file in the package:
"": ["*.yml", "*.json", "*.hdf5"],
},
python_requires=">=3.5",
) | [
"setuptools.find_packages"
] | [((297, 323), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (321, 323), False, 'import setuptools\n')] |
import torch.nn.functional as F
from torch import nn, zeros, cat, bmm
from data import MAX_LENGTH
class EncoderRNN(nn.Module):
def __init__(self, input_size, hidden_size):
super(EncoderRNN, self).__init__()
self.hidden_size = hidden_size
self.embedding = nn.Embedding(input_size, hidden_size)
self.gru = nn.GRU(hidden_size, hidden_size)
def forward(self, input, hidden):
embedded = self.embedding(input).view(1, 1, -1)
output, hidden = self.gru(embedded, hidden)
return output, hidden
def init_hidden(self):
return zeros(1, 1, self.hidden_size)
class AttnDecoderRNN(nn.Module):
def __init__(self, hidden_size, output_size, dropout_p=0.1, max_length=MAX_LENGTH):
super(AttnDecoderRNN, self).__init__()
self.hidden_size = hidden_size
self.output_size = output_size
self.dropout_p = dropout_p
self.max_length = max_length
self.embedding = nn.Embedding(self.output_size, self.hidden_size)
self.attn = nn.Linear(self.hidden_size * 2, self.max_length)
self.attn_combine = nn.Linear(self.hidden_size * 2, self.hidden_size)
self.dropout = nn.Dropout(self.dropout_p)
self.gru = nn.GRU(self.hidden_size, self.hidden_size)
self.out = nn.Linear(self.hidden_size, self.output_size)
def forward(self, input, hidden, encoder_outputs):
embedded = self.embedding(input).view(1, 1, -1)
embedded = self.dropout(embedded)
attn = cat((embedded[0], hidden[0]), 1)
attn = self.attn(attn)
attn_weights = F.softmax(attn, dim=1)
attn_weights = attn_weights.unsqueeze(0)
encoder_outputs = encoder_outputs.unsqueeze(0)
# 两个tensor的维度必须为3
attn_applied = bmm(attn_weights, encoder_outputs)
output = cat((embedded[0], attn_applied[0]), 1)
output = self.attn_combine(output)
output = output.unsqueeze(0)
output = F.relu(output)
output, hidden = self.gru(output, hidden)
output = self.out(output[0])
output = F.log_softmax(output, dim=1)
return output, hidden, attn_weights
def init_hidden(self):
return zeros(1, 1, self.hidden_size) | [
"torch.nn.functional.softmax",
"torch.nn.Dropout",
"torch.cat",
"torch.nn.Linear",
"torch.nn.functional.relu",
"torch.bmm",
"torch.nn.functional.log_softmax",
"torch.zeros",
"torch.nn.Embedding",
"torch.nn.GRU"
] | [((286, 323), 'torch.nn.Embedding', 'nn.Embedding', (['input_size', 'hidden_size'], {}), '(input_size, hidden_size)\n', (298, 323), False, 'from torch import nn, zeros, cat, bmm\n'), ((343, 375), 'torch.nn.GRU', 'nn.GRU', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (349, 375), False, 'from torch import nn, zeros, cat, bmm\n'), ((596, 625), 'torch.zeros', 'zeros', (['(1)', '(1)', 'self.hidden_size'], {}), '(1, 1, self.hidden_size)\n', (601, 625), False, 'from torch import nn, zeros, cat, bmm\n'), ((972, 1020), 'torch.nn.Embedding', 'nn.Embedding', (['self.output_size', 'self.hidden_size'], {}), '(self.output_size, self.hidden_size)\n', (984, 1020), False, 'from torch import nn, zeros, cat, bmm\n'), ((1041, 1089), 'torch.nn.Linear', 'nn.Linear', (['(self.hidden_size * 2)', 'self.max_length'], {}), '(self.hidden_size * 2, self.max_length)\n', (1050, 1089), False, 'from torch import nn, zeros, cat, bmm\n'), ((1118, 1167), 'torch.nn.Linear', 'nn.Linear', (['(self.hidden_size * 2)', 'self.hidden_size'], {}), '(self.hidden_size * 2, self.hidden_size)\n', (1127, 1167), False, 'from torch import nn, zeros, cat, bmm\n'), ((1191, 1217), 'torch.nn.Dropout', 'nn.Dropout', (['self.dropout_p'], {}), '(self.dropout_p)\n', (1201, 1217), False, 'from torch import nn, zeros, cat, bmm\n'), ((1237, 1279), 'torch.nn.GRU', 'nn.GRU', (['self.hidden_size', 'self.hidden_size'], {}), '(self.hidden_size, self.hidden_size)\n', (1243, 1279), False, 'from torch import nn, zeros, cat, bmm\n'), ((1299, 1344), 'torch.nn.Linear', 'nn.Linear', (['self.hidden_size', 'self.output_size'], {}), '(self.hidden_size, self.output_size)\n', (1308, 1344), False, 'from torch import nn, zeros, cat, bmm\n'), ((1515, 1547), 'torch.cat', 'cat', (['(embedded[0], hidden[0])', '(1)'], {}), '((embedded[0], hidden[0]), 1)\n', (1518, 1547), False, 'from torch import nn, zeros, cat, bmm\n'), ((1603, 1625), 'torch.nn.functional.softmax', 'F.softmax', (['attn'], {'dim': '(1)'}), '(attn, dim=1)\n', (1612, 1625), True, 'import torch.nn.functional as F\n'), ((1779, 1813), 'torch.bmm', 'bmm', (['attn_weights', 'encoder_outputs'], {}), '(attn_weights, encoder_outputs)\n', (1782, 1813), False, 'from torch import nn, zeros, cat, bmm\n'), ((1832, 1870), 'torch.cat', 'cat', (['(embedded[0], attn_applied[0])', '(1)'], {}), '((embedded[0], attn_applied[0]), 1)\n', (1835, 1870), False, 'from torch import nn, zeros, cat, bmm\n'), ((1969, 1983), 'torch.nn.functional.relu', 'F.relu', (['output'], {}), '(output)\n', (1975, 1983), True, 'import torch.nn.functional as F\n'), ((2089, 2117), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['output'], {'dim': '(1)'}), '(output, dim=1)\n', (2102, 2117), True, 'import torch.nn.functional as F\n'), ((2205, 2234), 'torch.zeros', 'zeros', (['(1)', '(1)', 'self.hidden_size'], {}), '(1, 1, self.hidden_size)\n', (2210, 2234), False, 'from torch import nn, zeros, cat, bmm\n')] |
import torch
import numpy as np
import cv2
def tonumpyimg(img):
"""
Convert a normalized tensor image to unnormalized uint8 numpy image
For single channel image, no unnormalization is done.
:param img: torch, normalized, (3, H, W), (H, W)
:return: numpy: (H, W, 3), (H, W). uint8
"""
return touint8(tonumpy(unnormalize_torch(img)))
def tonumpy(img):
"""
Convert torch image map to numpy image map
Note the range is not change
:param img: tensor, shape (C, H, W), (H, W)
:return: numpy, shape (H, W, C), (H, W)
"""
if len(img.size()) == 2:
return img.cpu().detach().numpy()
return img.permute(1, 2, 0).cpu().detach().numpy()
def touint8(img):
"""
Convert float numpy image to uint8 image
:param img: numpy image, float, (0, 1)
:return: uint8 image
"""
img = img * 255
return img.astype(np.uint8)
def normalize_torch(img, mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]):
"""
Normalize a torch image.
:param img: (3, H, W), in range (0, 1)
"""
img = img.clone()
img -= torch.tensor(mean).view(3, 1, 1)
img /= torch.tensor(std).view(3, 1, 1)
return img
def unnormalize_torch(img, mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]):
"""
Convert a normalized Tensor image to unnormalized form
For single channel image, no normalization is done.
:param img: (C, H, W), (H, W)
"""
if img.size()[0] == 3:
img = img.clone()
img *= torch.Tensor(std).view(3, 1, 1)
img += torch.Tensor(mean).view(3, 1, 1)
return img
def gray2RGB(img_raw):
"""
Convert a gray image to RGB
:param img_raw: (H, W, 3) or (H, W), uint8, numpy
:return: (H, W, 3)
"""
if len(img_raw.shape) == 2:
img_raw = np.repeat(img_raw[:, :, None], 3, axis=2)
if img_raw.shape[2] > 3:
img_raw = img_raw[:, :, :3]
return img_raw
def color_scale(attention):
"""
Visualize a attention map
:param scale_map: (C, H, W), attention map, softmaxed
:return: (3, H, W), colored version
"""
colors = torch.Tensor([
[1, 0, 0], # red
[0, 1, 0], # green
[0, 0, 1], # blue
[0, 0, 0], # black
]).float()
# (H, W)
attention = torch.argmax(attention, dim=0)
# (H, W, C)
color_map = colors[attention]
color_map = color_map.permute(2, 0, 1)
return color_map
def warp_torch(map, H):
"""
Warp a torch image.
:param map: either (C, H, W) or (H, W)
:param H: (3, 3)
:return: warped iamge, (C, H, W) or (H, W)
"""
map = tonumpy(map)
h, w = map.shape[-2:]
map = cv2.warpPerspective(map, H, dsize=(w, h))
return totensor(map)
def torange(array, low, high):
"""
Render an array to value range (low, high)
:param array: any array
:param low, high: the range
:return: new array
"""
min, max = array.min(), array.max()
# normalized to [0, 1]
array = array - min
array = array / (max - min)
# to (low, high)
array = array * (high - low) + low
return array
def tofloat(img):
"""
Convert a uint8 image to float image
:param img: numpy image, uint8
:return: float image
"""
return img.astype(np.float) / 255
def tonumpy_batch(imgs):
"""
Convert a batch of torch images to numpy image map
:param imgs: (B, C, H, W)
:return: (B, H, W, C)
"""
return imgs.permute(0, 2, 3, 1).cpu().detach().numpy()
def totensor(img, device=torch.device('cpu')):
"""
Do the reverse of tonumpy
"""
if len(img.shape) == 2:
return torch.from_numpy(img).to(device).float()
return torch.from_numpy(img).permute(2, 0, 1).to(device).float()
def totensor_batch(imgs, device=torch.device('cpu')):
"""
Do the reverse of tonumpy_batch
"""
return torch.from_numpy(imgs).permute(0, 3, 1, 2).to(device).float()
def RGB2BGR(*imgs):
return [cv2.cvtColor(x, cv2.COLOR_RGB2BGR) for x in imgs]
def unnormalize(img, mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]):
"""
Convert a normalized tensor image to unnormalized form
:param img: (B, C, H, W)
"""
img = img.detach().cpu()
img *= torch.tensor(std).view(3, 1, 1)
img += torch.tensor(mean).view(3, 1, 1)
return img
def toUint8RGB(img):
return (tonumpy(unnormalize(img)) * 255.).astype(np.uint8)
| [
"numpy.repeat",
"torch.Tensor",
"torch.argmax",
"torch.from_numpy",
"torch.tensor",
"cv2.warpPerspective",
"cv2.cvtColor",
"torch.device"
] | [((2321, 2351), 'torch.argmax', 'torch.argmax', (['attention'], {'dim': '(0)'}), '(attention, dim=0)\n', (2333, 2351), False, 'import torch\n'), ((2707, 2748), 'cv2.warpPerspective', 'cv2.warpPerspective', (['map', 'H'], {'dsize': '(w, h)'}), '(map, H, dsize=(w, h))\n', (2726, 2748), False, 'import cv2\n'), ((3586, 3605), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (3598, 3605), False, 'import torch\n'), ((3841, 3860), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (3853, 3860), False, 'import torch\n'), ((1829, 1870), 'numpy.repeat', 'np.repeat', (['img_raw[:, :, None]', '(3)'], {'axis': '(2)'}), '(img_raw[:, :, None], 3, axis=2)\n', (1838, 1870), True, 'import numpy as np\n'), ((4022, 4056), 'cv2.cvtColor', 'cv2.cvtColor', (['x', 'cv2.COLOR_RGB2BGR'], {}), '(x, cv2.COLOR_RGB2BGR)\n', (4034, 4056), False, 'import cv2\n'), ((1117, 1135), 'torch.tensor', 'torch.tensor', (['mean'], {}), '(mean)\n', (1129, 1135), False, 'import torch\n'), ((1161, 1178), 'torch.tensor', 'torch.tensor', (['std'], {}), '(std)\n', (1173, 1178), False, 'import torch\n'), ((2148, 2206), 'torch.Tensor', 'torch.Tensor', (['[[1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 0, 0]]'], {}), '([[1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 0, 0]])\n', (2160, 2206), False, 'import torch\n'), ((4295, 4312), 'torch.tensor', 'torch.tensor', (['std'], {}), '(std)\n', (4307, 4312), False, 'import torch\n'), ((4338, 4356), 'torch.tensor', 'torch.tensor', (['mean'], {}), '(mean)\n', (4350, 4356), False, 'import torch\n'), ((1530, 1547), 'torch.Tensor', 'torch.Tensor', (['std'], {}), '(std)\n', (1542, 1547), False, 'import torch\n'), ((1577, 1595), 'torch.Tensor', 'torch.Tensor', (['mean'], {}), '(mean)\n', (1589, 1595), False, 'import torch\n'), ((3697, 3718), 'torch.from_numpy', 'torch.from_numpy', (['img'], {}), '(img)\n', (3713, 3718), False, 'import torch\n'), ((3749, 3770), 'torch.from_numpy', 'torch.from_numpy', (['img'], {}), '(img)\n', (3765, 3770), False, 'import torch\n'), ((3926, 3948), 'torch.from_numpy', 'torch.from_numpy', (['imgs'], {}), '(imgs)\n', (3942, 3948), False, 'import torch\n')] |
# -*- coding: utf-8 -*-
# *****************************************************************************
# NICOS, the Networked Instrument Control System of the MLZ
# Copyright (c) 2009-2021 by the NICOS contributors (see AUTHORS)
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Module authors:
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
#
# *****************************************************************************
"""NICOS axis classes."""
import TACOStates # pylint: disable=import-error
from Motor import Motor as TACOMotor # pylint: disable=import-error
from nicos import session
from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, \
anytype, oneof, requires, status, tupleof, usermethod
from nicos.devices.abstract import Axis as AbstractAxis, CanReference
from nicos.devices.generic.sequence import SeqCall, SeqDev, SeqSleep, \
SequencerMixin
from nicos.devices.taco.core import TacoDevice
class Axis(CanReference, TacoDevice, AbstractAxis):
"""Interface for TACO Axis server devices."""
taco_class = TACOMotor
_TACO_STATUS_MAPPING = dict(TacoDevice._TACO_STATUS_MAPPING)
_TACO_STATUS_MAPPING[TACOStates.INIT] = (status.BUSY, 'referencing')
_TACO_STATUS_MAPPING[TACOStates.RESETTING] = (status.BUSY, 'referencing')
_TACO_STATUS_MAPPING[TACOStates.ALARM] = (status.NOTREACHED, 'position not reached')
parameters = {
'speed': Param('Motor speed', unit='main/s', settable=True),
'accel': Param('Motor acceleration', unit='main/s^2',
settable=True),
'refspeed': Param('Speed driving to reference switch', unit='main/s',
settable=True),
'refswitch': Param('Switch to use as reference', type=str,
settable=True),
'refpos': Param('Position of the reference switch', unit='main',
settable=True),
# do not call deviceReset by default as it does a reference drive
'resetcall': Param('What TACO method to call on reset (deviceInit or '
'deviceReset)', settable=True, default='deviceInit',
type=oneof('deviceInit', 'deviceReset')),
}
def doStart(self, target):
self._taco_guard(self._dev.start, target + self.offset)
def doRead(self, maxage=0):
return self._taco_guard(self._dev.read) - self.offset
def doTime(self, old_value, target):
s, v, a = abs(old_value - target), self.speed, self.accel
if v <= 0 or a <= 0:
return 0
if s > v**2 / a: # do we reach nominal speed?
return s / v + v / a
return 2 * (s / a)**0.5
def doReset(self):
self._taco_reset(self._dev, self.resetcall)
@usermethod
@requires(level=ADMIN, helpmsg='use adjust() to set a new offset')
def setPosition(self, pos):
"""Sets the current position of the axis to the target.
This operation is forbidden in slave mode, and does the right thing
virtually in simulation mode.
"""
if self._mode == SLAVE:
raise ModeError(self, 'setting new position not possible in '
'slave mode')
elif self._sim_intercept:
self._sim_setValue(pos)
return
self._taco_guard(self._dev.setpos, pos)
# update current value in cache
self.read(0)
def doStop(self):
self._taco_guard(self._dev.stop)
def doReference(self):
"""Do a reference drive of the axis (do not use with encoded axes)."""
self.log.info('referencing the axis, please wait...')
self._taco_guard(self._dev.deviceReset)
while self._taco_guard(self._dev.deviceState) \
in (TACOStates.INIT, TACOStates.RESETTING):
session.delay(0.3)
if self._taco_guard(self._dev.isDeviceOff):
self._taco_guard(self._dev.deviceOn)
if self.read() != self.refpos:
self._taco_guard(self._dev.setpos, self.refpos)
def doReadSpeed(self):
return self._taco_guard(self._dev.speed)
def doWriteSpeed(self, value):
self._taco_guard(self._dev.setSpeed, value)
def doReadDragerror(self):
return float(self._taco_guard(
self._dev.deviceQueryResource, 'dragerror'))
def doWriteDragerror(self, value):
self._taco_update_resource('dragerror', str(value))
def doReadPrecision(self):
return float(self._taco_guard(
self._dev.deviceQueryResource, 'precision'))
def doWritePrecision(self, value):
self._taco_update_resource('precision', str(value))
def doReadMaxtries(self):
return int(self._taco_guard(
self._dev.deviceQueryResource, 'maxtries'))
def doWriteMaxtries(self, value):
self._taco_update_resource('maxtries', str(value))
def doReadLoopdelay(self):
return float(self._taco_guard(
self._dev.deviceQueryResource, 'loopdelay'))
def doWriteLoopdelay(self, value):
self._taco_update_resource('loopdelay', str(value))
def doReadBacklash(self):
return float(self._taco_guard(
self._dev.deviceQueryResource, 'backlash'))
def doWriteBacklash(self, value):
self._taco_update_resource('backlash', str(value))
# resources that need to be set on the motor, not the axis device
def _readMotorParam(self, resource, conv=float):
motorname = self._taco_guard(self._dev.deviceQueryResource, 'motor')
client = TACOMotor(motorname)
return conv(client.deviceQueryResource(resource))
def _writeMotorParam(self, resource, value):
motorname = self._taco_guard(self._dev.deviceQueryResource, 'motor')
client = TACOMotor(motorname)
client.deviceOff()
try:
client.deviceUpdateResource(resource, str(value))
finally:
client.deviceOn()
def doReadAccel(self):
return self._readMotorParam('accel')
def doWriteAccel(self, value):
self._writeMotorParam('accel', value)
def doReadRefspeed(self):
return self._readMotorParam('refspeed')
def doWriteRefspeed(self, value):
self._writeMotorParam('refspeed', value)
def doReadRefswitch(self):
return self._readMotorParam('refswitch', str)
def doWriteRefswitch(self, value):
self._writeMotorParam('refswitch', value)
def doReadRefpos(self):
return self._readMotorParam('refpos')
def doWriteRefpos(self, value):
self._writeMotorParam('refpos', value)
class HoveringAxis(SequencerMixin, Axis):
"""An axis that also controls air for airpads."""
attached_devices = {
'switch': Attach('The device used for switching air on and off', Moveable),
}
parameters = {
'startdelay': Param('Delay after switching on air', type=float,
mandatory=True, unit='s'),
'stopdelay': Param('Delay before switching off air', type=float,
mandatory=True, unit='s'),
'switchvalues': Param('(off, on) values to write to switch device',
type=tupleof(anytype, anytype), default=(0, 1)),
}
hardware_access = True
def _generateSequence(self, target):
return [
SeqDev(self._attached_switch, self.switchvalues[1]),
SeqSleep(self.startdelay),
SeqCall(Axis.doStart, self, target),
SeqCall(self._hw_wait),
SeqSleep(self.stopdelay),
SeqDev(self._attached_switch, self.switchvalues[0]),
]
def _hw_wait(self):
# overridden: query Axis status, not HoveringAxis status
while Axis.doStatus(self, 0)[0] == status.BUSY:
session.delay(self._base_loop_delay)
def doStart(self, target):
if self._seq_is_running():
self.stop()
self.log.info('waiting for axis to stop...')
self.wait()
if abs(target - self.read()) < self.precision:
return
self._startSequence(self._generateSequence(target))
def doStop(self):
# stop only the axis, but the sequence has to run through
Axis.doStop(self)
def doTime(self, old_value, target):
return Axis.doTime(
self, old_value, target) + self.startdelay + self.stopdelay
| [
"nicos.session.delay",
"nicos.core.Param",
"nicos.devices.generic.sequence.SeqDev",
"nicos.core.ModeError",
"nicos.core.Attach",
"nicos.core.tupleof",
"nicos.devices.generic.sequence.SeqSleep",
"Motor.Motor",
"nicos.core.requires",
"nicos.core.oneof",
"nicos.devices.generic.sequence.SeqCall"
] | [((3458, 3523), 'nicos.core.requires', 'requires', ([], {'level': 'ADMIN', 'helpmsg': '"""use adjust() to set a new offset"""'}), "(level=ADMIN, helpmsg='use adjust() to set a new offset')\n", (3466, 3523), False, 'from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, anytype, oneof, requires, status, tupleof, usermethod\n'), ((2071, 2121), 'nicos.core.Param', 'Param', (['"""Motor speed"""'], {'unit': '"""main/s"""', 'settable': '(True)'}), "('Motor speed', unit='main/s', settable=True)\n", (2076, 2121), False, 'from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, anytype, oneof, requires, status, tupleof, usermethod\n'), ((2144, 2203), 'nicos.core.Param', 'Param', (['"""Motor acceleration"""'], {'unit': '"""main/s^2"""', 'settable': '(True)'}), "('Motor acceleration', unit='main/s^2', settable=True)\n", (2149, 2203), False, 'from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, anytype, oneof, requires, status, tupleof, usermethod\n'), ((2253, 2325), 'nicos.core.Param', 'Param', (['"""Speed driving to reference switch"""'], {'unit': '"""main/s"""', 'settable': '(True)'}), "('Speed driving to reference switch', unit='main/s', settable=True)\n", (2258, 2325), False, 'from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, anytype, oneof, requires, status, tupleof, usermethod\n'), ((2375, 2435), 'nicos.core.Param', 'Param', (['"""Switch to use as reference"""'], {'type': 'str', 'settable': '(True)'}), "('Switch to use as reference', type=str, settable=True)\n", (2380, 2435), False, 'from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, anytype, oneof, requires, status, tupleof, usermethod\n'), ((2485, 2554), 'nicos.core.Param', 'Param', (['"""Position of the reference switch"""'], {'unit': '"""main"""', 'settable': '(True)'}), "('Position of the reference switch', unit='main', settable=True)\n", (2490, 2554), False, 'from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, anytype, oneof, requires, status, tupleof, usermethod\n'), ((6253, 6273), 'Motor.Motor', 'TACOMotor', (['motorname'], {}), '(motorname)\n', (6262, 6273), True, 'from Motor import Motor as TACOMotor\n'), ((6476, 6496), 'Motor.Motor', 'TACOMotor', (['motorname'], {}), '(motorname)\n', (6485, 6496), True, 'from Motor import Motor as TACOMotor\n'), ((7445, 7509), 'nicos.core.Attach', 'Attach', (['"""The device used for switching air on and off"""', 'Moveable'], {}), "('The device used for switching air on and off', Moveable)\n", (7451, 7509), False, 'from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, anytype, oneof, requires, status, tupleof, usermethod\n'), ((7561, 7636), 'nicos.core.Param', 'Param', (['"""Delay after switching on air"""'], {'type': 'float', 'mandatory': '(True)', 'unit': '"""s"""'}), "('Delay after switching on air', type=float, mandatory=True, unit='s')\n", (7566, 7636), False, 'from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, anytype, oneof, requires, status, tupleof, usermethod\n'), ((7692, 7769), 'nicos.core.Param', 'Param', (['"""Delay before switching off air"""'], {'type': 'float', 'mandatory': '(True)', 'unit': '"""s"""'}), "('Delay before switching off air', type=float, mandatory=True, unit='s')\n", (7697, 7769), False, 'from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, anytype, oneof, requires, status, tupleof, usermethod\n'), ((3797, 3863), 'nicos.core.ModeError', 'ModeError', (['self', '"""setting new position not possible in slave mode"""'], {}), "(self, 'setting new position not possible in slave mode')\n", (3806, 3863), False, 'from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, anytype, oneof, requires, status, tupleof, usermethod\n'), ((4520, 4538), 'nicos.session.delay', 'session.delay', (['(0.3)'], {}), '(0.3)\n', (4533, 4538), False, 'from nicos import session\n'), ((8061, 8112), 'nicos.devices.generic.sequence.SeqDev', 'SeqDev', (['self._attached_switch', 'self.switchvalues[1]'], {}), '(self._attached_switch, self.switchvalues[1])\n', (8067, 8112), False, 'from nicos.devices.generic.sequence import SeqCall, SeqDev, SeqSleep, SequencerMixin\n'), ((8126, 8151), 'nicos.devices.generic.sequence.SeqSleep', 'SeqSleep', (['self.startdelay'], {}), '(self.startdelay)\n', (8134, 8151), False, 'from nicos.devices.generic.sequence import SeqCall, SeqDev, SeqSleep, SequencerMixin\n'), ((8165, 8200), 'nicos.devices.generic.sequence.SeqCall', 'SeqCall', (['Axis.doStart', 'self', 'target'], {}), '(Axis.doStart, self, target)\n', (8172, 8200), False, 'from nicos.devices.generic.sequence import SeqCall, SeqDev, SeqSleep, SequencerMixin\n'), ((8214, 8236), 'nicos.devices.generic.sequence.SeqCall', 'SeqCall', (['self._hw_wait'], {}), '(self._hw_wait)\n', (8221, 8236), False, 'from nicos.devices.generic.sequence import SeqCall, SeqDev, SeqSleep, SequencerMixin\n'), ((8250, 8274), 'nicos.devices.generic.sequence.SeqSleep', 'SeqSleep', (['self.stopdelay'], {}), '(self.stopdelay)\n', (8258, 8274), False, 'from nicos.devices.generic.sequence import SeqCall, SeqDev, SeqSleep, SequencerMixin\n'), ((8288, 8339), 'nicos.devices.generic.sequence.SeqDev', 'SeqDev', (['self._attached_switch', 'self.switchvalues[0]'], {}), '(self._attached_switch, self.switchvalues[0])\n', (8294, 8339), False, 'from nicos.devices.generic.sequence import SeqCall, SeqDev, SeqSleep, SequencerMixin\n'), ((8509, 8545), 'nicos.session.delay', 'session.delay', (['self._base_loop_delay'], {}), '(self._base_loop_delay)\n', (8522, 8545), False, 'from nicos import session\n'), ((2848, 2882), 'nicos.core.oneof', 'oneof', (['"""deviceInit"""', '"""deviceReset"""'], {}), "('deviceInit', 'deviceReset')\n", (2853, 2882), False, 'from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, anytype, oneof, requires, status, tupleof, usermethod\n'), ((7912, 7937), 'nicos.core.tupleof', 'tupleof', (['anytype', 'anytype'], {}), '(anytype, anytype)\n', (7919, 7937), False, 'from nicos.core import ADMIN, SLAVE, Attach, ModeError, Moveable, Param, anytype, oneof, requires, status, tupleof, usermethod\n')] |
import subprocess
from os import system, remove, chdir
from tabulate import tabulate
def edges(n):
location = 0
edges = [[0,n-1]]
for i in range(n-1):
edges.append([location, location+1])
location += 1
return edges
def cut(state, edges):
cut = 0
for edge in edges:
cut += 1 if state[edge[0]] == state[edge[1]] else -1
return cut
# label is used to unambiguously identify a group
def label(state):
label = []
value = state[0]
repetition = 0
for i in range(len(state)+1):
if state[i%len(state)] == value:
if i == len(state):
if len(label) == 0:
label.append('0')
label[0] = str( int(label[0]) + repetition)
repetition += 1
else:
label.append(str(repetition))
repetition = 1
value = state[i%len(state)]
return label
def groupsOfStates(n):
states = {}
for i in range(2**(n-1)): #n-1 because we automaticaly add also NOT the generated state
s = str(bin(i))[2:]
s = (n-len(s))*"0" + s
mynot = ''
for znak in s:
mynot += '0' if znak == '1' else '1'
e = edges(n)
c = cut(s,e)
k = label(s)
used = False
for i in range(n):
temp = rotate(k,i)
k1 = " ".join(temp)
temp.reverse()
k2 = " ".join(temp)
if k1 in states.keys():
states[k1].append(s)
states[k1].append(mynot)
used = True
break
elif k2 in states.keys():
states[k2].append(s)
states[k2].append(mynot)
used = True
break
if used == False:
k1 = " ".join(k)
states[k1] = []
states[k1].append(s)
states[k1].append(mynot)
return states
def compare(string1, string2):
diff = 0
for c1, c2 in zip(string1, string2):
if c1 != c2:
diff += 1
return diff
def rotate(array, x):
return array[-x:] + array[0:-x]
#THE MOST IMPORTANT
def amplitudes(n, states):
amplitudes = {}
cuts = {}
labels = states.keys()
for l in labels:
cuts[l] = cut(states[l][0], edges(n))
#MAIN FOR LOOP
for l in labels:
constants = {} #cut to (number of sinuses to how many times repeated) - all we need to construct the amplitude
#it's enough to compute the amplitude for representant of the group
representant = states[l][0]
#here we fill the constants directory - gain all informations needed for amplitude creation
for group in labels:
if cuts[group] not in constants.keys():
constants[ cuts[group] ] = {}
for state in states[group]:
numOfSinuses = compare(representant, state)
if numOfSinuses in constants[ cuts[group] ].keys():
constants[ cuts[group] ][ numOfSinuses ] += 1
else:
constants[ cuts[group] ][ numOfSinuses ] = 1
#here we create the amplitude for l
amplitude = ""
for c in constants.keys():
amplitude += "Exp[" + str(-c) + " I y" + "] ("
for numOfSin in constants[c].keys():
m = str( constants[ c ][ numOfSin ] )
if numOfSin == n:
amplitude += ( m + " (I Sin[x])^" + str(n) )
elif numOfSin == 0:
amplitude += ( m + " Cos[x]^" + str(n) )
else:
amplitude += ( m + " (I Sin[x])^" + str(numOfSin) + " Cos[x]^" + str(n - numOfSin) )
amplitude += " + "
amplitude = amplitude[:-3] + ") + "
amplitudes[l] = amplitude[:-3] #[:-3] to remove additional ' + '
return amplitudes
start = 3
end = 16
with open('result.txt', 'w') as f:#to delete any content
pass
with open('amplitudes.txt', 'w') as f:#to create the file
pass
for numberOfQubits in range(start, end+1):
states = groupsOfStates(numberOfQubits)
for l, amplitude in amplitudes(numberOfQubits, states).items():
print(l)
#here we are creating result mathematica file so that we could run it
with open(str(numberOfQubits) + "v" + str(l) + ".nb", "w") as file, open("templates/results.nb", "r") as template:
file.write ( "$Conjugate[x_] := x /. Complex[a_, b_] :> a - I b;\n" +
"function[x_, y_] := $Conjugate[" + amplitude + "]*\n(" + amplitude + ")\n\n" +
"amplitude[x_,y_] := " + amplitude + "\n" +
"amount = " + str(numberOfQubits) + ";\n" +
"name = \"" + str(numberOfQubits) + "v" + l + "\";\n" +
"states = " + str(len(states[l])) + ";\n\n"
)
for line in template:
file.write(line)
file.write("\nExport[\"images/plots/" + str(numberOfQubits) + "v" + l + ".jpg\", Plot3D[f, {c, 0, n/2},{d, 0, n}, PlotRange -> All]];\n")
file.write("\nExport[\"images/contour-plots/" + str(numberOfQubits) + "v" + l + " c.jpg\", ContourPlot[function[x, y]/2^amount, {x, 0, n/2}, {y, 0, n/2}, PlotLegends -> Automatic, Contours -> 30, FrameLabel -> {\\[Beta],\\[Gamma]}, FrameTicks ->{Range[0, Pi/2, Pi/8],Range[0, Pi/2, Pi/8]}]];\n")
#here we run the mathematica file and then we remove it
while True:
try:
cpi = subprocess.run(
["math", "-script", str(numberOfQubits) + "v" + l + ".nb"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=10,
check=True
)
if "ok" in cpi.stdout.decode().split('\n'):
break
else:
print("retrying")
except (subprocess.TimeoutExpired, subprocess.CalledProcessError) as e:
print(e)
print("retrying")
remove(str(numberOfQubits) + "v" + l + ".nb")
#here we are creating amplitudes.pdf
if numberOfQubits < 7:
with open("templates/amplitudes.tex", "r") as template, open("amplitudes/amplitudes.tex", "w") as file, open("amplitudes.txt", "r") as ampl:
for line in template:
file.write(line)
x = ampl.readlines()
for line in x:
line = line.replace("\"","").replace("\\\\","\\").replace("\\\n", "")
file.write(line)
file.write("\\end{document}")
#here we are creating useful mathematica file - sth to play later with
with open("mathematica-files/" + str(numberOfQubits) + "v" + l + ".nb", "w") as f, open("templates/useful.nb", "r") as template:
f.write ( "$Conjugate[x_] := x /. Complex[a_, b_] :> a - I b;\n" +
"function[x_, y_] := $Conjugate[" + amplitude + "]*\n(" + amplitude + ")\n\n" +
"amplitude[x_,y_] := " + amplitude + "\n\n" +
"amount = " + str(numberOfQubits) + ";\n" +
"name = \"" + str(numberOfQubits) + "v" + l + "\";\n" +
"states = " + str(len(states[l])) + ";\n\n"
)
for line in template:
f.write(line)
f.write("\nPlot3D[f,{c,0,n},{d,0,n}, PlotRange -> All]\n")
f.write("\nContourPlot[function[x, y], {x, 0, n}, {y, 0, n}, PlotLegends -> Automatic, Contours -> 30]\n")
#here we are creating result.txt: type - Pmax table
with open('result.txt', 'r') as f:
x = f.readlines()
remove("result.txt")
results = []
for line in x:
line = line.replace("\"","").strip()
g = line.rfind(" ")
b = line[:g].rfind(" ")
p = line[:b].rfind(" ")
results.append([line[:p], line[p+1:b], line[b+1:g], line[g+1:]])
with open('result.txt', 'w') as f:
f.write(tabulate(results, headers=['Name', 'Max Probability', "beta (X)", "gamma (ZZ)"]))
#just to really have the amplitudes.pdf file
chdir('./amplitudes')
system('make')
system('make cleanaux')
remove('../amplitudes.txt') | [
"os.chdir",
"os.system",
"tabulate.tabulate",
"os.remove"
] | [((6435, 6455), 'os.remove', 'remove', (['"""result.txt"""'], {}), "('result.txt')\n", (6441, 6455), False, 'from os import system, remove, chdir\n'), ((6833, 6854), 'os.chdir', 'chdir', (['"""./amplitudes"""'], {}), "('./amplitudes')\n", (6838, 6854), False, 'from os import system, remove, chdir\n'), ((6855, 6869), 'os.system', 'system', (['"""make"""'], {}), "('make')\n", (6861, 6869), False, 'from os import system, remove, chdir\n'), ((6870, 6893), 'os.system', 'system', (['"""make cleanaux"""'], {}), "('make cleanaux')\n", (6876, 6893), False, 'from os import system, remove, chdir\n'), ((6894, 6921), 'os.remove', 'remove', (['"""../amplitudes.txt"""'], {}), "('../amplitudes.txt')\n", (6900, 6921), False, 'from os import system, remove, chdir\n'), ((6704, 6789), 'tabulate.tabulate', 'tabulate', (['results'], {'headers': "['Name', 'Max Probability', 'beta (X)', 'gamma (ZZ)']"}), "(results, headers=['Name', 'Max Probability', 'beta (X)', 'gamma (ZZ)']\n )\n", (6712, 6789), False, 'from tabulate import tabulate\n')] |
from pvapy import Channel, CA, PvTimeStamp, PvAlarm
print('DBRdouble')
channel = Channel('DBRdouble')
timestamp = PvTimeStamp(10, 100)
alarm = PvAlarm(1,1,"mess")
print(channel.get('value'))
print('here 1')
channel.put(alarm,'record[process=false]field(alarm)')
print('here 2')
print(channel.get('value'))
channel.put(timestamp,'record[process=false]field(timeStamp)')
print(channel.get('value'))
print('here 3')
print('DBRdouble CA')
channel = Channel('DBRdouble',CA)
print(channel.get('value'))
print('here 4')
channel.put(alarm,'record[process=false]field(alarm)')
print('here 5')
print(channel.get('value'))
channel.put(timestamp,'record[process=false]field(timeStamp)')
print(channel.get('value'))
print('here 6')
| [
"pvapy.Channel",
"pvapy.PvAlarm",
"pvapy.PvTimeStamp"
] | [((81, 101), 'pvapy.Channel', 'Channel', (['"""DBRdouble"""'], {}), "('DBRdouble')\n", (88, 101), False, 'from pvapy import Channel, CA, PvTimeStamp, PvAlarm\n'), ((114, 134), 'pvapy.PvTimeStamp', 'PvTimeStamp', (['(10)', '(100)'], {}), '(10, 100)\n', (125, 134), False, 'from pvapy import Channel, CA, PvTimeStamp, PvAlarm\n'), ((143, 164), 'pvapy.PvAlarm', 'PvAlarm', (['(1)', '(1)', '"""mess"""'], {}), "(1, 1, 'mess')\n", (150, 164), False, 'from pvapy import Channel, CA, PvTimeStamp, PvAlarm\n'), ((446, 470), 'pvapy.Channel', 'Channel', (['"""DBRdouble"""', 'CA'], {}), "('DBRdouble', CA)\n", (453, 470), False, 'from pvapy import Channel, CA, PvTimeStamp, PvAlarm\n')] |
from pwn import *
import hashlib
r=remote("172.16.17.32",10122)
## pow
temp=r.recvuntil("sha256( ")
prefix=r.recvline().split()[0]
i=0
while True:
data=prefix+str(i)
Hash=hashlib.sha256(data)
if Hash.hexdigest()[:5]=="0"*5:
r.sendline(str(i))
break
i+=1
## get flag
r.sendline("0")
r.sendline("system")
r.sendline("1")
r.sendline("nonsecret.pyc")
r.sendline("b013")
r.sendline("92")
r.sendline("3")
r.sendline("cat flag")
r.interactive()
| [
"hashlib.sha256"
] | [((177, 197), 'hashlib.sha256', 'hashlib.sha256', (['data'], {}), '(data)\n', (191, 197), False, 'import hashlib\n')] |
#
# Author: <EMAIL>
# Date: 01/25/2019
#
""" Utils for training and optimization
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import utils
logger=utils.get_logger()
import numpy as np
import torch
from bert.optimization import BertAdam
def zero_grad(model, optimizer_param):
model.zero_grad()
for n, p in optimizer_param:
p.grad = None
def dump_parameter_names(model, path):
with open(path, 'w', encoding='utf8') as fs:
fs.write('{}\n'.format('\n'.join([n for n,p in model.named_parameters()])))
def copy_optimizer_params_to_model(named_params_model, named_params_optimizer):
""" Utility function for optimize_on_cpu and 16-bits training.
Copy the parameters optimized on CPU/RAM back to the model on GPU
"""
for (name_opti, param_opti), (name_model, param_model) in zip(named_params_optimizer, named_params_model):
if name_opti != name_model:
logger.error("name_opti != name_model: {} {}".format(name_opti, name_model))
raise ValueError
param_model.data.copy_(param_opti.data)
def set_optimizer_params_grad(named_params_optimizer, named_params_model, test_nan=False):
""" Utility function for optimize_on_cpu and 16-bits training.
Copy the gradient of the GPU parameters to the CPU/RAMM copy of the model
"""
is_nan = False
for (name_opti, param_opti), (name_model, param_model) in zip(named_params_optimizer, named_params_model):
if name_opti != name_model:
logger.error("name_opti != name_model: {} {}".format(name_opti, name_model))
raise ValueError
if param_model.grad is not None:
norm = param_model.grad.norm()
if test_nan and (torch.isnan(norm) or torch.isinf(norm)):
is_nan = True
if param_opti.grad is None:
param_opti.grad = torch.nn.Parameter(param_opti.data.new().resize_(*param_opti.data.size()))
param_opti.grad.data.copy_(param_model.grad.data)
else:
param_opti.grad = None
return is_nan
def create_optimizer(model, args, num_train_steps=None, init_spec=None, no_decay=['bias', 'LayerNorm.weight']):
# Prepare optimizer
if args.fp16:
dcnt = torch.cuda.device_count()
if args.no_even_grad:
param_optimizer = [(n, param.detach().clone().type(torch.cuda.FloatTensor).\
requires_grad_()) for i,(n,param) in enumerate(model.named_parameters())]
else:
total_size = sum(np.prod(p.size()) for p in model.parameters())
quota={i:0 for i in range(dcnt)}
quota[0]=total_size//(dcnt*2)
param_optimizer = []
for i,(n, param) in enumerate(model.named_parameters()):
ps = np.prod(param.size())
index = list(sorted(quota.items(), key=lambda x: x[1]))[0][0]
quota[index]+=ps
cp = param.clone().type(torch.cuda.FloatTensor).detach().to('cuda:{}'.format(index)).requires_grad_()
param_optimizer += [(n, cp)]
elif args.optimize_on_cpu:
param_optimizer = [(n, param.clone().detach().to('cpu').requires_grad_()) \
for n, param in model.named_parameters()]
else:
param_optimizer = [(n,p) for n,p in model.named_parameters()]
group0=dict(params=[],
weight_decay_rate=args.weight_decay,
names=[])
group1=dict(params=[],
weight_decay_rate=0.00,
names=[])
for (n,p) in param_optimizer:
if not any(nd in n for nd in no_decay):
group0['params'].append(p)
group0['names'].append(n)
else:
group1['params'].append(p)
group1['names'].append(n)
optimizer_grouped_parameters = [group0, group1]
t_total = num_train_steps
optimizer=None
if t_total:
if args.local_rank != -1:
t_total = t_total // torch.distributed.get_world_size()
optimizer = BertAdam(optimizer_grouped_parameters,
lr=args.learning_rate,
b1=args.adam_beta1,
b2=args.adam_beta2,
v1=args.qhadam_v1,
v2=args.qhadam_v2,
lr_ends=args.lr_schedule_ends,
e=args.epsilon,
warmup=args.warmup_proportion if args.warmup_proportion<1 else args.warmup_proportion/t_total,
t_total=t_total,
schedule=args.lr_schedule,
max_grad_norm=args.max_grad_norm,
global_grad_norm=args.global_grad_norm,
init_spec = init_spec,
weight_decay_rate = args.weight_decay)
return optimizer, param_optimizer, t_total
| [
"utils.get_logger",
"torch.cuda.device_count",
"bert.optimization.BertAdam",
"torch.isinf",
"torch.isnan",
"torch.distributed.get_world_size"
] | [((216, 234), 'utils.get_logger', 'utils.get_logger', ([], {}), '()\n', (232, 234), False, 'import utils\n'), ((2168, 2193), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (2191, 2193), False, 'import torch\n'), ((3734, 4223), 'bert.optimization.BertAdam', 'BertAdam', (['optimizer_grouped_parameters'], {'lr': 'args.learning_rate', 'b1': 'args.adam_beta1', 'b2': 'args.adam_beta2', 'v1': 'args.qhadam_v1', 'v2': 'args.qhadam_v2', 'lr_ends': 'args.lr_schedule_ends', 'e': 'args.epsilon', 'warmup': '(args.warmup_proportion if args.warmup_proportion < 1 else args.\n warmup_proportion / t_total)', 't_total': 't_total', 'schedule': 'args.lr_schedule', 'max_grad_norm': 'args.max_grad_norm', 'global_grad_norm': 'args.global_grad_norm', 'init_spec': 'init_spec', 'weight_decay_rate': 'args.weight_decay'}), '(optimizer_grouped_parameters, lr=args.learning_rate, b1=args.\n adam_beta1, b2=args.adam_beta2, v1=args.qhadam_v1, v2=args.qhadam_v2,\n lr_ends=args.lr_schedule_ends, e=args.epsilon, warmup=args.\n warmup_proportion if args.warmup_proportion < 1 else args.\n warmup_proportion / t_total, t_total=t_total, schedule=args.lr_schedule,\n max_grad_norm=args.max_grad_norm, global_grad_norm=args.\n global_grad_norm, init_spec=init_spec, weight_decay_rate=args.weight_decay)\n', (3742, 4223), False, 'from bert.optimization import BertAdam\n'), ((3683, 3717), 'torch.distributed.get_world_size', 'torch.distributed.get_world_size', ([], {}), '()\n', (3715, 3717), False, 'import torch\n'), ((1697, 1714), 'torch.isnan', 'torch.isnan', (['norm'], {}), '(norm)\n', (1708, 1714), False, 'import torch\n'), ((1718, 1735), 'torch.isinf', 'torch.isinf', (['norm'], {}), '(norm)\n', (1729, 1735), False, 'import torch\n')] |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import mineid
import pathlib
HERE = pathlib.Path(__file__).parent
README = (HERE / "README.md").read_text()
setup(
name=mineid.__name__,
version=mineid.__version__,
description="A small Python library for getting Minecraft UUIDs.",
long_description=README,
long_description_content_type="text/markdown",
package_dir={"": "mineid"},
url="https://github.com/SomeHybrid/mineid",
classifiers=[
"Framework :: AsyncIO",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Topic :: Internet :: WWW/HTTP"
]
)
| [
"pathlib.Path",
"distutils.core.setup"
] | [((205, 938), 'distutils.core.setup', 'setup', ([], {'name': 'mineid.__name__', 'version': 'mineid.__version__', 'description': '"""A small Python library for getting Minecraft UUIDs."""', 'long_description': 'README', 'long_description_content_type': '"""text/markdown"""', 'package_dir': "{'': 'mineid'}", 'url': '"""https://github.com/SomeHybrid/mineid"""', 'classifiers': "['Framework :: AsyncIO', 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License', 'Natural Language :: English',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Programming Language :: Python :: 3.10',\n 'Programming Language :: Python :: 3.11', 'Topic :: Internet :: WWW/HTTP']"}), "(name=mineid.__name__, version=mineid.__version__, description=\n 'A small Python library for getting Minecraft UUIDs.', long_description\n =README, long_description_content_type='text/markdown', package_dir={'':\n 'mineid'}, url='https://github.com/SomeHybrid/mineid', classifiers=[\n 'Framework :: AsyncIO', 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License', 'Natural Language :: English',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Programming Language :: Python :: 3.10',\n 'Programming Language :: Python :: 3.11', 'Topic :: Internet :: WWW/HTTP'])\n", (210, 938), False, 'from distutils.core import setup\n'), ((132, 154), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (144, 154), False, 'import pathlib\n')] |
from django.db import models
# 微博Model
class vvebo(models.Model):
id = models.IntegerField(primary_key=True)
keyword = models.TextField(max_length=1000, default="")
user_id = models.TextField(max_length=1000, default="")
user_name = models.TextField(max_length=1000, default="")
time = models.CharField(max_length=1000, default="")
comment = models.TextField(max_length=1000, default="")
shoucang = models.IntegerField(default=0)
zhuanfa = models.IntegerField(default=0)
pinglun = models.IntegerField(default=0)
dianzan = models.IntegerField(default=0)
device = models.TextField(max_length=1000, default="")
url = models.TextField(max_length=1000, default="")
# 知乎Model
class zhihu(models.Model):
keyword = models.TextField(max_length=100, default="")
question_id = models.CharField(max_length=20, default="")
question_name = models.TextField(max_length=100, default="")
answer_id = models.CharField(max_length=20, default="")
comment = models.TextField(max_length=30000, default="")
time = models.CharField(max_length=20, default="")
voteup_count = models.IntegerField(default=0)
user_name = models.CharField(max_length=20, default="")
#微信Model
class wechat(models.Model):
id = models.IntegerField(primary_key=True)
keyword = models.TextField(max_length=1000, default="")
article_title = models.TextField(max_length=1000, default="")
article_url = models.TextField(max_length=1000, default="")
article_imgs = models.TextField(max_length=1000, default="")
comment = models.TextField(max_length=1000, default="")
time = models.TextField(max_length=1000, default="")
gzh_profile_url = models.TextField(max_length=1000, default="")
gzh_headimage = models.TextField(max_length=1000, default="")
user_name = models.TextField(max_length=1000, default="")
gzh_isv = models.IntegerField(default=0)
| [
"django.db.models.CharField",
"django.db.models.TextField",
"django.db.models.IntegerField"
] | [((76, 113), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (95, 113), False, 'from django.db import models\n'), ((128, 173), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (144, 173), False, 'from django.db import models\n'), ((188, 233), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (204, 233), False, 'from django.db import models\n'), ((250, 295), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (266, 295), False, 'from django.db import models\n'), ((307, 352), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (323, 352), False, 'from django.db import models\n'), ((367, 412), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (383, 412), False, 'from django.db import models\n'), ((428, 458), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (447, 458), False, 'from django.db import models\n'), ((473, 503), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (492, 503), False, 'from django.db import models\n'), ((518, 548), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (537, 548), False, 'from django.db import models\n'), ((563, 593), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (582, 593), False, 'from django.db import models\n'), ((607, 652), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (623, 652), False, 'from django.db import models\n'), ((663, 708), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (679, 708), False, 'from django.db import models\n'), ((764, 808), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(100)', 'default': '""""""'}), "(max_length=100, default='')\n", (780, 808), False, 'from django.db import models\n'), ((827, 870), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'default': '""""""'}), "(max_length=20, default='')\n", (843, 870), False, 'from django.db import models\n'), ((891, 935), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(100)', 'default': '""""""'}), "(max_length=100, default='')\n", (907, 935), False, 'from django.db import models\n'), ((952, 995), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'default': '""""""'}), "(max_length=20, default='')\n", (968, 995), False, 'from django.db import models\n'), ((1010, 1056), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(30000)', 'default': '""""""'}), "(max_length=30000, default='')\n", (1026, 1056), False, 'from django.db import models\n'), ((1068, 1111), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'default': '""""""'}), "(max_length=20, default='')\n", (1084, 1111), False, 'from django.db import models\n'), ((1131, 1161), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1150, 1161), False, 'from django.db import models\n'), ((1178, 1221), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'default': '""""""'}), "(max_length=20, default='')\n", (1194, 1221), False, 'from django.db import models\n'), ((1272, 1309), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (1291, 1309), False, 'from django.db import models\n'), ((1324, 1369), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (1340, 1369), False, 'from django.db import models\n'), ((1390, 1435), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (1406, 1435), False, 'from django.db import models\n'), ((1454, 1499), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (1470, 1499), False, 'from django.db import models\n'), ((1519, 1564), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (1535, 1564), False, 'from django.db import models\n'), ((1579, 1624), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (1595, 1624), False, 'from django.db import models\n'), ((1636, 1681), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (1652, 1681), False, 'from django.db import models\n'), ((1705, 1750), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (1721, 1750), False, 'from django.db import models\n'), ((1771, 1816), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (1787, 1816), False, 'from django.db import models\n'), ((1833, 1878), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(1000)', 'default': '""""""'}), "(max_length=1000, default='')\n", (1849, 1878), False, 'from django.db import models\n'), ((1893, 1923), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1912, 1923), False, 'from django.db import models\n')] |
import pandas as pd
import matplotlib.pyplot as plt
from data import games
plays = games[games['type']=='play']
plays.columns= ['type','inning','team', 'player', 'count','pitches','event', 'game_id', 'year']
#print (plays)
hits = plays.loc[plays['event'].str.contains('^(?:S(?!B)|D|T|HR)'), ['inning','event']]
#print(hits)
#plays.columns = ['inning', 'event']
#attendance.loc[:, 'attendance']= pd.to_numeric(attendance.loc[:, 'attendance'])
hits.loc[:, 'inning']= pd.to_numeric(hits.loc[:, 'inning'])
print (hits)
replacements= {r'^S(.*)': 'single', r'^D(.*)': 'double', r'^T(.*)': 'triple', r'^HR(.*)': 'hr'}
#this is just an array, with now converted 'event' called hit_type
hit_type= hits['event'].replace(replacements, regex=True)
#print(hit_type)
#add hit_type into hits matrix,
#now we have ['inning', 'event','hit_type']
hits= hits.assign(hit_type=hit_type)
#print (hits)
'''
In one line of code, group the hits DataFrame by inning and hit_type,
call size() to count the number of hits per inning,
and then reset the index of the resulting DataFrame.
'''
hits = hits.groupby(['inning','hit_type']).size().reset_index(name= 'count')
#how does it know the reset_index is the size()?
#hits = hits.reset_index(name= 'count')
#print (hits)
hits['hit_type']= pd.Categorical(hits['hit_type'], ['single', 'double', 'triple', 'hr'])
#sort_values need parameter 'by=[column1, column2, ...]', 'by=' is optional
hits= hits.sort_values(['inning','hit_type'])
#print (hits)
hits= hits.pivot(index='inning', columns='hit_type',values='count')
#print (hits)
hits.plot.bar(stacked= True)
plt.show()
| [
"pandas.to_numeric",
"matplotlib.pyplot.show",
"pandas.Categorical"
] | [((470, 506), 'pandas.to_numeric', 'pd.to_numeric', (["hits.loc[:, 'inning']"], {}), "(hits.loc[:, 'inning'])\n", (483, 506), True, 'import pandas as pd\n'), ((1277, 1347), 'pandas.Categorical', 'pd.Categorical', (["hits['hit_type']", "['single', 'double', 'triple', 'hr']"], {}), "(hits['hit_type'], ['single', 'double', 'triple', 'hr'])\n", (1291, 1347), True, 'import pandas as pd\n'), ((1599, 1609), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1607, 1609), True, 'import matplotlib.pyplot as plt\n')] |
# import necessary libraries
from flask import Flask, render_template, redirect
from flask_pymongo import PyMongo
import scrape_marsdata
from pymongo import MongoClient
# create instance of Flask app
app = Flask(__name__)
# Use flask_pymongo to set up mongo connection
# conn = "mongodb://localhost:27017"
# client = pymongo.MongoClient(conn)
# db = client.mars
app.config['MONGO_URI']= "mongodb://localhost:27017/mars_app"
mongo=PyMongo(app)
# create route that renders index.html template and finds documents from mongo
@app.route("/")
def index():
# Find data
mars_info = mongo.db.mars.find_one()
# return template and data
return render_template("index.html", mars_info=mars_info)
# Route that will trigger scrape functions
@app.route("/scrape")
def scrape():
mars_data= scrape_marsdata.scrape()
mars_info={
"title":mars_data["news_title"],
"paragraph":mars_data["news_p"],
"feature_img_url":mars_data["feature_img_url"],
"weather":mars_data["mars_weather"],
"facts":mars_data["mars_facts"],
"hemisphere_imgs":mars_data["hemisphere_imgs"],
}
# Insert forecast into database
mongo.db.mars.insert_one(mars_info)
#redirect back to home page
return index()
if __name__ == "__main__":
app.run(debug=True)
# app.jinja_env.auto_reload = True
# app.config['TEMPLATES_AUTO_RELOAD'] = True
| [
"flask.render_template",
"flask_pymongo.PyMongo",
"scrape_marsdata.scrape",
"flask.Flask"
] | [((207, 222), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (212, 222), False, 'from flask import Flask, render_template, redirect\n'), ((433, 445), 'flask_pymongo.PyMongo', 'PyMongo', (['app'], {}), '(app)\n', (440, 445), False, 'from flask_pymongo import PyMongo\n'), ((658, 708), 'flask.render_template', 'render_template', (['"""index.html"""'], {'mars_info': 'mars_info'}), "('index.html', mars_info=mars_info)\n", (673, 708), False, 'from flask import Flask, render_template, redirect\n'), ((806, 830), 'scrape_marsdata.scrape', 'scrape_marsdata.scrape', ([], {}), '()\n', (828, 830), False, 'import scrape_marsdata\n')] |
#appModules/msimn.py - Outlook Express appModule
#A part of NonVisual Desktop Access (NVDA)
#Copyright (C) 2006-2012 NVDA Contributors
#This file is covered by the GNU General Public License.
#See the file COPYING for more details.
import winUser
import controlTypes
import displayModel
import textInfos
import api
import appModuleHandler
from keyboardHandler import KeyboardInputGesture
from NVDAObjects.window import Window
from NVDAObjects.IAccessible import IAccessible, sysListView32
import watchdog
from NVDAObjects.behaviors import _FakeTableCell
messageListImageLabels={
# Translators: This Outlook Express message has an attachment
8:_("has attachment"),
# Translators: this Outlook Express message is flagged
34:_("flagged"),
}
#Labels for the header fields of an email, by control ID
envelopeNames={
# Translators: This is presented in outlook or live mail to indicate email attachments.
1000:_("Attachments"),
# Translators: This is presented in outlook or live mail when creating a new email 'to:' or 'recipient:'
1001:_("To:"),
# Translators: This is presented in outlook or live mail when sending an email to a newsgroup
1002:_("Newsgroup:"),
# Translators: This is presented in outlook or live mail, email carbon copy
1003:_("CC:"),
# Translators: This is presented in outlook or live mail, email subject
1004:_("Subject:"),
# Translators: This is presented in outlook or live mail, email sender
1005:_("From:"),
# Translators: This is presented in outlook or live mail, date of email
1016:_("Date:"),
# Translators: This is presented in outlook or live mail
1018:_("Forward to:"),
# Translators: This is presented in outlook or live mail
1019:_("Answer to:"),
# Translators: This is presented in outlook or live mail
1020:_("Organisation:"),
# Translators: This is presented in outlook or live mail
1021:_("Distribution:"),
# Translators: This is presented in outlook or live mail
1022:_("Key words:"),
# Translators: This is presented in outlook or live mail, email blind carbon copy
1026:_("BCC:"),
# Translators: This is presented in outlook or live mail, email sender
1037:_("From:"),
}
class AppModule(appModuleHandler.AppModule):
def event_NVDAObject_init(self,obj):
if not isinstance(obj,Window): return
controlID=obj.windowControlID
windowHandle=obj.windowHandle
parentWindow=winUser.getAncestor(windowHandle,winUser.GA_PARENT)
parentClassName=winUser.getClassName(parentWindow)
#If this object is an email header field, and we have a custom label for it,
#Then set the object's name to the label
if parentClassName=="OE_Envelope" and isinstance(obj,IAccessible) and obj.IAccessibleChildID==0 and controlID in envelopeNames:
obj.name=envelopeNames[controlID]
obj.useITextDocumentSupport=True
obj.editValueUnit=textInfos.UNIT_STORY
def chooseNVDAObjectOverlayClasses(self,obj,clsList):
if obj.windowClassName=="SysListView32" and obj.windowControlID in (128,129,130) and obj.role==controlTypes.ROLE_LISTITEM:
clsList.insert(0,MessageRuleListItem)
elif "SysListView32" in obj.windowClassName and obj.role==controlTypes.ROLE_LISTITEM and obj.parent.name=="Outlook Express Message List":
clsList.insert(0,MessageListItem)
def event_gainFocus(self,obj,nextHandler):
nextHandler()
#Force focus to move to something sane when landing on an outlook express message window
if obj.windowClassName=="ATH_Note" and obj.event_objectID==winUser.OBJID_CLIENT and obj.IAccessibleChildID==0:
api.processPendingEvents()
if obj==api.getFocusObject() and controlTypes.STATE_FOCUSED in obj.states:
return KeyboardInputGesture.fromName("shift+tab").send()
class MessageRuleListItem(sysListView32.ListItem):
"""Used for the checkbox list items used to select message rule types in in message filters"""
role=controlTypes.ROLE_CHECKBOX
def _get_states(self):
states=super(MessageRuleListItem,self).states
if (watchdog.cancellableSendMessage(self.windowHandle,sysListView32.LVM_GETITEMSTATE,self.IAccessibleChildID-1,sysListView32.LVIS_STATEIMAGEMASK)>>12)==8:
states.add(controlTypes.STATE_CHECKED)
return states
class MessageListItem(sysListView32.ListItem):
def _getColumnContent(self,column):
content=super(MessageListItem,self)._getColumnContent(column)
if not content:
imageID=self._getColumnImageID(column)
if imageID>0:
content=messageListImageLabels.get(imageID,"")
return content
def _get_isUnread(self):
info=displayModel.DisplayModelTextInfo(self,textInfos.POSITION_FIRST)
info.expand(textInfos.UNIT_CHARACTER)
fields=info.getTextWithFields()
try:
isUnread=fields[0].field['bold']
except:
isUnread=False
return isUnread
def _get_name(self):
nameList=[]
imageState=watchdog.cancellableSendMessage(self.windowHandle,sysListView32.LVM_GETITEMSTATE,self.IAccessibleChildID-1,sysListView32.LVIS_STATEIMAGEMASK)>>12
if imageState==5:
nameList.append(controlTypes.stateLabels[controlTypes.STATE_COLLAPSED])
elif imageState==6:
nameList.append(controlTypes.stateLabels[controlTypes.STATE_EXPANDED])
if self.isUnread:
# Translators: Displayed in outlook or live mail to indicate an email is unread
nameList.append(_("unread"))
name=super(MessageListItem,self).name
if name:
nameList.append(name)
return " ".join(nameList)
| [
"displayModel.DisplayModelTextInfo",
"api.getFocusObject",
"keyboardHandler.KeyboardInputGesture.fromName",
"api.processPendingEvents",
"winUser.getClassName",
"watchdog.cancellableSendMessage",
"winUser.getAncestor"
] | [((2347, 2399), 'winUser.getAncestor', 'winUser.getAncestor', (['windowHandle', 'winUser.GA_PARENT'], {}), '(windowHandle, winUser.GA_PARENT)\n', (2366, 2399), False, 'import winUser\n'), ((2417, 2451), 'winUser.getClassName', 'winUser.getClassName', (['parentWindow'], {}), '(parentWindow)\n', (2437, 2451), False, 'import winUser\n'), ((4452, 4517), 'displayModel.DisplayModelTextInfo', 'displayModel.DisplayModelTextInfo', (['self', 'textInfos.POSITION_FIRST'], {}), '(self, textInfos.POSITION_FIRST)\n', (4485, 4517), False, 'import displayModel\n'), ((3487, 3513), 'api.processPendingEvents', 'api.processPendingEvents', ([], {}), '()\n', (3511, 3513), False, 'import api\n'), ((4730, 4886), 'watchdog.cancellableSendMessage', 'watchdog.cancellableSendMessage', (['self.windowHandle', 'sysListView32.LVM_GETITEMSTATE', '(self.IAccessibleChildID - 1)', 'sysListView32.LVIS_STATEIMAGEMASK'], {}), '(self.windowHandle, sysListView32.\n LVM_GETITEMSTATE, self.IAccessibleChildID - 1, sysListView32.\n LVIS_STATEIMAGEMASK)\n', (4761, 4886), False, 'import watchdog\n'), ((3914, 4070), 'watchdog.cancellableSendMessage', 'watchdog.cancellableSendMessage', (['self.windowHandle', 'sysListView32.LVM_GETITEMSTATE', '(self.IAccessibleChildID - 1)', 'sysListView32.LVIS_STATEIMAGEMASK'], {}), '(self.windowHandle, sysListView32.\n LVM_GETITEMSTATE, self.IAccessibleChildID - 1, sysListView32.\n LVIS_STATEIMAGEMASK)\n', (3945, 4070), False, 'import watchdog\n'), ((3525, 3545), 'api.getFocusObject', 'api.getFocusObject', ([], {}), '()\n', (3543, 3545), False, 'import api\n'), ((3603, 3645), 'keyboardHandler.KeyboardInputGesture.fromName', 'KeyboardInputGesture.fromName', (['"""shift+tab"""'], {}), "('shift+tab')\n", (3632, 3645), False, 'from keyboardHandler import KeyboardInputGesture\n')] |
""" 2019 Day 2 Solver"""
import os
import sys
from typing import Tuple, List
from multiprocessing import Manager, Process
from multiprocessing.managers import ValueProxy
INPUT_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "input.txt")
def solve():
""" Solve https://adventofcode.com/2019/day/2"""
print(f'Solving AoC Day 2: https://adventofcode.com/2019/day/2')
print("Generating map")
value_map = [(x, y) for x in range(0, 100) for y in range(0, 100)]
data = [int(module) for module in open(INPUT_PATH, 'r').read().split(',')]
finish = Manager().Value(bool, False)
procs = []
for tile in value_map:
procs.append(Process(target=run_intcode, args=(data, tile, finish)))
print(f'Starting {len(procs)} subprocesses')
for proc in procs:
if not finish.get():
proc.start()
proc.join()
sys.exit()
def run_intcode(data: List[int], tile: Tuple[int, int], finish: ValueProxy):
""" This runs intcode base on a data list a tile from the value map
:param data
:returns void
"""
initial_noun, initial_verb = tile
data[1] = initial_noun
data[2] = initial_verb
for i in range(0, len(data) - 1, 4):
if finish.get():
sys.exit()
opcode, noun, verb, out = data[i:i + 4]
if opcode == 1:
data[out] = data[noun] + data[verb]
elif opcode == 2:
data[out] = data[noun] * data[verb]
elif opcode == 99:
if data[0] == 19690720:
print(f'Result: {100 * initial_noun + initial_verb}')
finish.set(True)
sys.exit()
| [
"os.path.realpath",
"multiprocessing.Process",
"multiprocessing.Manager",
"sys.exit"
] | [((889, 899), 'sys.exit', 'sys.exit', ([], {}), '()\n', (897, 899), False, 'import sys\n'), ((213, 239), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (229, 239), False, 'import os\n'), ((585, 594), 'multiprocessing.Manager', 'Manager', ([], {}), '()\n', (592, 594), False, 'from multiprocessing import Manager, Process\n'), ((677, 731), 'multiprocessing.Process', 'Process', ([], {'target': 'run_intcode', 'args': '(data, tile, finish)'}), '(target=run_intcode, args=(data, tile, finish))\n', (684, 731), False, 'from multiprocessing import Manager, Process\n'), ((1273, 1283), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1281, 1283), False, 'import sys\n'), ((1661, 1671), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1669, 1671), False, 'import sys\n')] |
import itertools
import pathlib
from pathlib import Path
from typing import Union
import gdspy
from gdsfactory.component import Component
from gdsfactory.import_gds import import_gds
COUNTER = itertools.count()
def xor_polygons(A: Component, B: Component, hash_geometry: bool = True):
"""Given two devices A and B, performs a layer-by-layer XOR diff between
A and B, and returns polygons representing the differences between A and B.
Adapted from lytest/kdb_xor.py
"""
# first do a geometry hash to vastly speed up if they are equal
if hash_geometry and (A.hash_geometry() == B.hash_geometry()):
return Component()
D = Component()
A_polys = A.get_polygons(by_spec=True)
B_polys = B.get_polygons(by_spec=True)
A_layers = A_polys.keys()
B_layers = B_polys.keys()
all_layers = set()
all_layers.update(A_layers)
all_layers.update(B_layers)
for layer in all_layers:
if (layer in A_layers) and (layer in B_layers):
p = gdspy.fast_boolean(
A_polys[layer],
B_polys[layer],
operation="xor",
precision=0.001,
max_points=4000,
layer=layer[0],
datatype=layer[1],
)
elif layer in A_layers:
p = A_polys[layer]
elif layer in B_layers:
p = B_polys[layer]
if p is not None:
D.add_polygon(p, layer=layer)
return D
def gdsdiff(
component1: Union[Path, Component, str],
component2: Union[Path, Component, str],
name: str = "TOP",
xor: bool = True,
) -> Component:
"""Compare two Components.
Args:
component1: Component or path to gds file
component2: Component or path to gds file
name: name of the top cell
xor: makes boolean operation
Returns:
Component with both cells (xor, common and diffs)
"""
if isinstance(component1, pathlib.Path):
component1 = str(component1)
if isinstance(component2, pathlib.Path):
component2 = str(component2)
if isinstance(component1, str):
component1 = import_gds(component1, flatten=True)
if isinstance(component2, str):
component2 = import_gds(component2, flatten=True)
top = Component(name=f"{name}_diffs")
if component1.name.startswith("Unnamed"):
component1.name = f"{name}_old"
if component2.name.startswith("Unnamed"):
component2.name = f"{name}_new"
ref1 = top << component1
ref2 = top << component2
ref1.xmin = 0
ref1.ymin = 0
ref2.xmin = 0
ref2.ymin = 0
if xor:
diff = xor_polygons(ref1, ref2, hash_geometry=False)
diff.name = f"{name}_xor"
top.add_ref(diff)
return top
if __name__ == "__main__":
import sys
if len(sys.argv) != 3:
print("Usage: gdsdiff <mask_v1.gds> <mask_v2.gds>")
print("Note that you need to have KLayout opened with klive running")
sys.exit()
c = gdsdiff(sys.argv[1], sys.argv[2])
c.show()
| [
"gdspy.fast_boolean",
"itertools.count",
"gdsfactory.component.Component",
"sys.exit",
"gdsfactory.import_gds.import_gds"
] | [((196, 213), 'itertools.count', 'itertools.count', ([], {}), '()\n', (211, 213), False, 'import itertools\n'), ((662, 673), 'gdsfactory.component.Component', 'Component', ([], {}), '()\n', (671, 673), False, 'from gdsfactory.component import Component\n'), ((2302, 2333), 'gdsfactory.component.Component', 'Component', ([], {'name': 'f"""{name}_diffs"""'}), "(name=f'{name}_diffs')\n", (2311, 2333), False, 'from gdsfactory.component import Component\n'), ((641, 652), 'gdsfactory.component.Component', 'Component', ([], {}), '()\n', (650, 652), False, 'from gdsfactory.component import Component\n'), ((2160, 2196), 'gdsfactory.import_gds.import_gds', 'import_gds', (['component1'], {'flatten': '(True)'}), '(component1, flatten=True)\n', (2170, 2196), False, 'from gdsfactory.import_gds import import_gds\n'), ((2254, 2290), 'gdsfactory.import_gds.import_gds', 'import_gds', (['component2'], {'flatten': '(True)'}), '(component2, flatten=True)\n', (2264, 2290), False, 'from gdsfactory.import_gds import import_gds\n'), ((3007, 3017), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3015, 3017), False, 'import sys\n'), ((1008, 1148), 'gdspy.fast_boolean', 'gdspy.fast_boolean', (['A_polys[layer]', 'B_polys[layer]'], {'operation': '"""xor"""', 'precision': '(0.001)', 'max_points': '(4000)', 'layer': 'layer[0]', 'datatype': 'layer[1]'}), "(A_polys[layer], B_polys[layer], operation='xor',\n precision=0.001, max_points=4000, layer=layer[0], datatype=layer[1])\n", (1026, 1148), False, 'import gdspy\n')] |
"""A set of Python Classes for connecting to and interacting with a VOSpace
service.
Connections to VOSpace are made using a SSL X509 certificat which is
stored in a .pem file.
"""
#from contextlib import nested
import copy
import errno
import fnmatch
import hashlib
import requests
from requests.exceptions import HTTPError
import html2text
import logging
import mimetypes
import os
import re
import stat
import string
import sys
import time
#import urllib
#import urlparse
from xml.etree import ElementTree
from copy import deepcopy
from NodeCache import NodeCache
from __version__ import version
import netrc
try:
_unicode = unicode
except NameError:
try:
_unicode = str
except NameError:
# If Python is built without Unicode support, the unicode type
# will not exist. Fake one.
class Unicode(object):
pass
_unicode = unicode
try:
from contextlib import nested # Python 2
except ImportError:
from contextlib import ExitStack, contextmanager
@contextmanager
def nested(*contexts):
"""
Reimplementation of nested in python 3.
"""
with ExitStack() as stack:
for ctx in contexts:
stack.enter_context(ctx)
yield contexts
try:
import ConfigParser # Python 2
from urllib import splittag, splitquery, urlencode
from urlparse import parse_qs, urlparse
from cStringIO import StringIO
import httplib as http_client
except ImportError:
import configparser as ConfigParser # Python 3
from urllib.parse import splittag, splitquery, urlencode
from urllib.parse import parse_qs, urlparse
from io import StringIO
import http.client as http_client
http_client.HTTPConnection.debuglevel = 0 #1
logger = logging.getLogger('vos')
logger.setLevel(logging.ERROR)
if sys.version_info[1] > 6:
logger.addHandler(logging.NullHandler())
BUFSIZE = 8388608 # Size of read/write buffer
MAX_RETRY_DELAY = 128 # maximum delay between retries
DEFAULT_RETRY_DELAY = 30 # start delay between retries when Try_After not sent by server.
MAX_RETRY_TIME = 900 # maximum time for retries before giving up...
CONNECTION_TIMEOUT = 30 # seconds before HTTP connection should drop, should be less than DAEMON timeout in vofs
VOSPACE_ARCHIVE = os.getenv("VOSPACE_ARCHIVE", "vospace")
#HEADER_DELEG_TOKEN = 'X-CADC-DelegationToken'
HEADER_DELEG_TOKEN = 'X-DL-AuthToken'
HEADER_CONTENT_LENGTH = 'X-CADC-Content-Length'
HEADER_PARTIAL_READ = 'X-CADC-Partial-Read'
CONNECTION_COUNTER = 0
CADC_GMS_PREFIX = ''
requests.packages.urllib3.disable_warnings()
logging.getLogger("requests").setLevel(logging.WARNING)
def convert_vospace_time_to_seconds(str_date):
"""A convenience method that takes a string from a vospace time field and converts it to seconds since epoch.
:param str_date: string to parse into a VOSpace time
:type str_date: str
:return: A datetime object for the provided string date
:rtype: datetime
"""
right = str_date.rfind(":") + 3
mtime = time.mktime(time.strptime(str_date[0:right], '%Y-%m-%dT%H:%M:%S'))
return mtime - time.mktime(time.gmtime()) + time.mktime(time.localtime())
def compute_md5(filename, block_size=BUFSIZE):
"""
Given a file compute the MD5 of that file.
:param filename: name of file to open and compute MD5 for.
:type filename: str
:param block_size: size of read blocks to stream through MD5 calculator.
:type block_size: int
:return: md5 as hex
:rtype: hex
"""
md5 = hashlib.md5()
with open(filename, 'r') as r:
while True:
buf = r.read(block_size)
if len(buf) == 0:
break
md5.update(buf)
return md5.hexdigest()
class URLParser(object):
""" Parse out the structure of a URL.
There is a difference between the 2.5 and 2.7 version of the
urlparse.urlparse command, so here I roll my own...
"""
def __init__(self, url):
self.scheme = None
self.netloc = None
self.args = None
self.path = None
m = re.match("(^(?P<scheme>[a-zA-Z]*):)?(//(?P<netloc>(?P<server>[^!~]*)[!~](?P<service>[^/]*)))?"
"(?P<path>/?[^?]*)?(?P<args>\?.*)?", url)
self.scheme = m.group('scheme')
self.netloc = m.group('netloc')
self.server = m.group('server')
self.service = m.group('service')
self.path = (m.group('path') is not None and m.group('path')) or ''
self.args = (m.group('args') is not None and m.group('args')) or ''
def __str__(self):
return "[scheme: %s, netloc: %s, path: %s]" % (self.scheme,
self.netloc, self.path)
class Connection(object):
"""Class to hold and act on the X509 certificate"""
def __init__(self, vospace_certfile=None, vospace_token=None, http_debug=False):
"""Setup the Certificate for later usage
vospace_certfile -- where to store the certificate, if None then
${HOME}/.ssl or a temporary filename
vospace_token -- token string (alternative to vospace_certfile)
http_debug -- set True to generate debug statements
The user must supply a valid certificate or connection will be 'anonymous'.
"""
self.http_debug = http_debug
# tokens trump certs. We should only ever have token or certfile
# set in order to avoid confusion.
self.vospace_certfile = None
self.vospace_token = vospace_token
if self.vospace_token is None:
# allow anonymous access if no certfile specified
if vospace_certfile is not None and not os.access(vospace_certfile, os.F_OK):
logger.warning(
"Could not access certificate at {0}. Reverting to anonymous.".format(vospace_certfile))
vospace_certfile = None
self.vospace_certfile = vospace_certfile
# create a requests session object that all requests will be made via.
session = requests.Session()
if self.vospace_certfile is not None:
session.cert = (self.vospace_certfile, self.vospace_certfile)
if self.vospace_certfile is None: # MJG look at this in operation
try:
auth = netrc.netrc().authenticators(EndPoints.VOSPACE_WEBSERVICE)
if auth is not None:
session.auth = (auth[0], auth[2])
except:
pass
if self.vospace_token is not None:
session.headers.update({HEADER_DELEG_TOKEN: self.vospace_token})
user_agent = 'vos ' + version
if "vofs" in sys.argv[0]:
user_agent = 'vofs ' + version
session.headers.update({"User-Agent": user_agent})
assert isinstance(session, requests.Session)
self.session = session
def get_connection(self, url=None):
"""Create an HTTPSConnection object and return. Uses the client
certificate if None given.
:param url: a VOSpace uri
"""
if url is not None:
raise OSError(errno.ENOSYS, "Connections are no longer set per URL.")
return self.session
class Node(object):
"""A VOSpace node"""
IVOAURL = "ivo://ivoa.net/vospace/core"
VOSNS = "http://www.ivoa.net/xml/VOSpace/v2.0"
XSINS = "http://www.w3.org/2001/XMLSchema-instance"
TYPE = '{%s}type' % XSINS
NODES = '{%s}nodes' % VOSNS
NODE = '{%s}node' % VOSNS
PROTOCOL = '{%s}protocol' % VOSNS
PROPERTIES = '{%s}properties' % VOSNS
PROPERTY = '{%s}property' % VOSNS
ACCEPTS = '{%s}accepts' % VOSNS
PROVIDES = '{%s}provides' % VOSNS
ENDPOINT = '{%s}endpoint' % VOSNS
TARGET = '{%s}target' % VOSNS
DATA_NODE = "vos:DataNode"
LINK_NODE = "vos:LinkNode"
CONTAINER_NODE = "vos:ContainerNode"
def __init__(self, node, node_type=None, properties=None, subnodes=None):
"""Create a Node object based on the DOM passed to the init method
if node is a string then create a node named node of nodeType with
properties
"""
self.uri = None
self.name = None
self.target = None
self.groupread = None
self.groupwrite = None
self.is_public = None
self.type = None
self.props = {}
self.attr = {}
self.xattr = {}
self._node_list = None
self._endpoints = None
if not subnodes:
subnodes = []
if not properties:
properties = {}
if node_type is None:
node_type = Node.DATA_NODE
if type(node) == unicode or type(node) == str:
node = self.create(node, node_type, properties, subnodes=subnodes)
if node is None:
raise LookupError("no node found or created?")
self.node = node
self.node.set('xmlns:vos', self.VOSNS)
self.update()
def __eq__(self, node):
if not isinstance(node, Node):
return False
return self.props == node.props
@property
def endpoints(self):
if not self._endpoints:
self._endpoints = EndPoints(self.uri)
return self._endpoints
def update(self):
"""Update the convience links of this node as we update the xml file"""
self.type = self.node.get(Node.TYPE)
if self.type is None:
# logger.debug("Node type unknown, no node created")
return None
if self.type == "vos:LinkNode":
self.target = self.node.findtext(Node.TARGET)
self.uri = self.node.get('uri')
self.name = os.path.basename(self.uri)
for propertiesNode in self.node.findall(Node.PROPERTIES):
self.set_props(propertiesNode)
self.is_public = False
if self.props.get('ispublic', 'false') == 'true':
self.is_public = True
logger.debug("{0} {1} -> {2}".format(self.uri, self.endpoints.islocked, self.props))
self.groupwrite = self.props.get('groupwrite', '')
self.groupread = self.props.get('groupread', '')
logger.debug("Setting file attributes via setattr")
self.setattr()
logger.debug("Setting file x-attributes via setxattr")
self.setxattr()
def set_property(self, key, value):
"""Create a key/value pair Node.PROPERTY element.
:param key: the property key
:param value: the property value
"""
properties = self.node.find(Node.PROPERTIES)
uri = "%s#%s" % (Node.IVOAURL, key)
ElementTree.SubElement(properties, Node.PROPERTY,
attrib={'uri': uri, 'readOnly': 'false'}).text = value
def __str__(self):
"""Convert the Node to a string representation of the Node"""
class Dummy(object):
pass
data = []
file_handle = Dummy()
file_handle.write = data.append
ElementTree.ElementTree(self.node).write(file_handle) # MJG , encoding="UTF-8")
return "".join(data)
def setattr(self, attr=None):
"""return / augment a dictionary of attributes associated with the Node
These attributes are determined from the node on VOSpace.
:param attr: the dictionary that holds the attributes
"""
if not attr:
attr = {}
# Get the flags for file mode settings.
self.attr = {}
# Only one date provided by VOSpace, so use this as all possible dates.
access_time = time.time()
if not self.props.get('date', None):
modified_time = access_time
else:
# mktime is expecting a localtime but we're sending a UT date, so
# some correction will be needed
modified_time = convert_vospace_time_to_seconds(self.props.get('date'))
self.attr['st_ctime'] = attr.get('st_ctime', modified_time)
self.attr['st_mtime'] = attr.get('st_mtime', modified_time)
self.attr['st_atime'] = access_time
# set the MODE by or'ing together all flags from stat
st_mode = 0
st_nlink = 1
if self.type == 'vos:ContainerNode':
st_mode |= stat.S_IFDIR
st_nlink = max(2, len(self.get_info_list()) + 2)
# if getInfoList length is < 0 we have a problem elsewhere, so above hack solves that problem.
elif self.type == 'vos:LinkNode':
st_mode |= stat.S_IFLNK
else:
st_mode |= stat.S_IFREG
self.attr['st_nlink'] = st_nlink
# Set the OWNER permissions: all vospace Nodes have read/write/execute by owner
st_mode |= stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
# Set the GROUP permissions
# MJG if self.props.get('groupwrite', "NONE") != "NONE":
if 'groupwrite' in self.props and self.props.get('groupwrite') is not None:
st_mode |= stat.S_IWGRP
# if self.props.get('groupread', "NONE") != "NONE":
if 'groupread' in self.props and self.props.get('groupread') is not None:
st_mode |= stat.S_IRGRP
st_mode |= stat.S_IXGRP
# Set the OTHER permissions
if self.props.get('ispublic', 'false') == 'true':
# If you can read the file then you can execute too.
# Public does NOT mean writeable. EVER
st_mode |= stat.S_IROTH | stat.S_IXOTH
self.attr['st_mode'] = attr.get('st_mode', st_mode)
# We set the owner and group bits to be those of the currently running process.
# This is a hack since we don't have an easy way to figure these out.
# TODO Come up with a better approach to uid setting
self.attr['st_uid'] = attr.get('st_uid', os.getuid())
self.attr['st_gid'] = attr.get('st_uid', os.getgid())
st_size = int(self.props.get('length', 0))
self.attr['st_size'] = st_size > 0 and st_size or 0
self.attr['st_blocks'] = self.attr['st_size'] / 512
def setxattr(self, attrs=None):
"""Initialize the extended attributes using the Node properties that are not part of the core set.
:param attrs: An input list of attributes being sent from an external source, not supported.
"""
if attrs is not None:
raise OSError(errno.ENOSYS, "No externally set extended Attributes for vofs yet.")
for key in self.props:
if key in Client.vosProperties:
continue
self.xattr[key] = self.props[key]
return
def chwgrp(self, group):
"""Set the groupwrite value to group for this node
:param group: the uri of he group to give write access to.
:type group: str
"""
logger.debug("Setting groups to: {0}".format(group))
if group is not None and len(group.split()) > 3:
raise AttributeError("Exceeded max of 4 write groups: {0}<-".format(group.split()))
self.groupwrite = group
return self.change_prop('groupwrite', group)
def chrgrp(self, group):
"""Set the groupread value to group for this node
:param group: the uri of the group to give read access to.
:type group: str
"""
if group is not None and len(group.split()) > 3:
raise AttributeError("Exceeded max of 4 read groups: {0}<-".format(group))
self.groupread = group
return self.change_prop('groupread', group)
def set_public(self, value):
"""
:param value: should the is_public flag be set? (true/false)
:type value: str
"""
return self.change_prop('ispublic', value)
@staticmethod
def fix_prop(prop):
"""Check if prop is a well formed uri and if not then make into one
:param prop: the property to expand into a IVOA uri value for a property.
:rtype str
"""
(url, tag) = urllib.splittag(prop)
if tag is None and url in ['title',
'creator',
'subject',
'description',
'publisher',
'contributer',
'date',
'type',
'format',
'identifier',
'source',
'language',
'relation',
'coverage',
'rights',
'availableSpace',
'groupread',
'groupwrite',
'publicread',
'quota',
'length',
'MD5',
'mtime',
'ctime',
'ispublic']:
tag = url
url = Node.IVOAURL
prop = url + "#" + tag
parts = URLParser(url)
if parts.path is None or tag is None:
raise ValueError("Invalid VOSpace property uri: {0}".format(prop))
return prop
@staticmethod
def set_prop():
"""Build the XML for a given node"""
raise NotImplementedError('No set prop.')
def change_prop(self, key, value):
"""Change the node property 'key' to 'value'.
:param key: The property key to update
:type key: str
:param value: The value to give that property.
:type value: str,None
:return True/False depending on if the property value was updated.
"""
# TODO split into 'set' and 'delete'
uri = self.fix_prop(key)
changed = False
found = False
properties = self.node.findall(Node.PROPERTIES)
for props in properties:
for prop in props.findall(Node.PROPERTY):
if uri != prop.attrib.get('uri', None):
continue
found = True
if prop.attrib.get('text', None) == value:
break
changed = True
if value is None:
# this is actually a delete property
prop.attrib['xsi:nil'] = 'true'
prop.attrib["xmlns:xsi"] = Node.XSINS
prop.text = ""
self.props[self.get_prop_name(uri)] = None
else:
prop.text = value
if found:
return changed
# must not have had this kind of property already, so set value
property_node = ElementTree.SubElement(properties[0], Node.PROPERTY)
property_node.attrib['readOnly'] = "false"
property_node.attrib['uri'] = uri
property_node.text = value
self.props[self.get_prop_name(uri)] = value
return changed
def chmod(self, mode):
"""Set the MODE of this Node...
translates unix style MODE to voSpace and updates the properties...
This function is quite limited. We can make a file publicly
readable and we can turn on/off group read/write permissions,
that's all.
:param mode: a stat MODE bit
"""
changed = 0
if mode & stat.S_IROTH:
changed += self.set_public('true')
else:
changed += self.set_public('false')
if mode & stat.S_IRGRP:
changed += self.chrgrp(self.groupread)
else:
changed += self.chrgrp('')
if mode & stat.S_IWGRP:
changed += self.chwgrp(self.groupwrite)
else:
changed += self.chwgrp('')
# logger.debug("%d -> %s" % (changed, changed>0))
return changed > 0
def create(self, uri, node_type="vos:DataNode", properties=None, subnodes=None):
"""Build the XML needed to represent a VOSpace node returns an ElementTree representation of the XML
:param uri: The URI for this node.
:type uri: str
:param node_type: the type of VOSpace node, likely one of vos:DataNode, vos:ContainerNode, vos:LinkNode
:type node_type: str
:param properties: a dictionary of the node properties, keys should be single words from the IVOA list
:type properties: dict
:param subnodes: Any children to attach to this node, only valid for vos:ContainerNode
:type subnodes: [Node]
"""
if not subnodes:
subnodes = []
elif node_type != 'vos:ContainerNode':
raise ValueError("Only Container Nodes can have subnodes")
if not properties:
properties = {}
endpoints = EndPoints(uri)
# Build the root node called 'node'
node = ElementTree.Element("node")
node.attrib["xmlns"] = Node.VOSNS
node.attrib["xmlns:vos"] = Node.VOSNS
node.attrib[Node.TYPE] = node_type
node.attrib["uri"] = uri
# create a properties section
if 'type' not in properties:
properties['type'] = mimetypes.guess_type(uri)[0]
properties_node = ElementTree.SubElement(node, Node.PROPERTIES)
for prop in properties.keys():
property_node = ElementTree.SubElement(properties_node, Node.PROPERTY)
property_node.attrib['readOnly'] = "false"
property_node.attrib["uri"] = self.fix_prop(prop)
if properties[prop] is None:
# Setting the property value to None indicates that this is actually a delete
property_node.attrib['xsi:nil'] = 'true'
property_node.attrib["xmlns:xsi"] = Node.XSINS
property_node.text = ""
elif len(str(properties[prop])) > 0:
property_node.text = properties[prop]
# That's it for link nodes...
if node_type == "vos:LinkNode":
return node
# create accepts
accepts = ElementTree.SubElement(node, Node.ACCEPTS)
ElementTree.SubElement(accepts, "view").attrib['uri'] = \
"%s#%s" % (Node.IVOAURL, "defaultview")
provides = ElementTree.SubElement(node, Node.PROVIDES)
ElementTree.SubElement(provides, "view").attrib['uri'] = \
"%s#%s" % (Node.IVOAURL, 'defaultview')
ElementTree.SubElement(provides, "view").attrib['uri'] = \
"%s#%s" % (endpoints.core, 'rssview')
# Only DataNode can have a dataview...
if node_type == "vos:DataNode":
ElementTree.SubElement(provides, "view").attrib['uri'] = \
"%s#%s" % (endpoints.core, 'dataview')
# if this is a container node then add directory contents
if node_type == "vos:ContainerNode":
node_list = ElementTree.SubElement(node, Node.NODES)
for sub_node in subnodes:
node_list.append(sub_node.node)
return node
def isdir(self):
"""Check if target is a container Node"""
# logger.debug(self.type)
if self.type == "vos:ContainerNode":
return True
return False
def islink(self):
"""Check if target is a link Node"""
# logger.debug(self.type)
if self.type == "vos:LinkNode":
return True
return False
@property
def is_locked(self):
return self.islocked()
@is_locked.setter
def is_locked(self, lock):
if lock == self.is_locked:
return
self.change_prop(self.endpoints.islocked, lock and "true" or "false")
def islocked(self):
"""Check if target state is locked for update/delete."""
return self.props.get(self.endpoints.islocked, "false") == "true"
def get_info(self):
"""Organize some information about a node and return as dictionary"""
date = convert_vospace_time_to_seconds(self.props['date'])
creator = string.lower(re.search('CN=([^,]*)',
self.props.get('creator', 'CN=unknown_000,'))
.groups()[0].replace(' ', '_'))
perm = []
for i in range(10):
perm.append('-')
perm[1] = 'r'
perm[2] = 'w'
if self.type == "vos:ContainerNode":
perm[0] = 'd'
if self.type == "vos:LinkNode":
perm[0] = 'l'
if self.props.get('ispublic', "false") == "true":
perm[-3] = 'r'
perm[-2] = '-'
# write_group = self.props.get('groupwrite', 'NONE') # MJG
write_group = self.props.get('groupwrite', '') # MJG
if write_group != '':
perm[5] = 'w'
# read_group = self.props.get('groupread', 'NONE')
read_group = self.props.get('groupread', '')
if read_group != '':
perm[4] = 'r'
is_locked = self.props.get(self.endpoints.islocked, "false")
return {"permissions": string.join(perm, ''),
"creator": creator,
"readGroup": read_group,
"writeGroup": write_group,
"isLocked": is_locked,
"size": float(self.props.get('length', 0)),
"date": date,
"target": self.target}
@property
def node_list(self):
"""Get a list of all the nodes held to by a ContainerNode return a
list of Node objects"""
if self._node_list is None:
self._node_list = []
for nodesNode in self.node.findall(Node.NODES):
for nodeNode in nodesNode.findall(Node.NODE):
self.add_child(nodeNode)
return self._node_list
def add_child(self, child_element_tree):
"""
Add a child node to a node list.
:param child_element_tree: a node to add as a child.
:type child_element_tree: ElementTree
:return: Node
"""
child_node = Node(child_element_tree)
self.node_list.append(child_node)
return child_node
def clear_properties(self):
logger.debug("clearing properties")
properties_node_list = self.node.findall(Node.PROPERTIES)
for properties_node in properties_node_list:
for property_node in properties_node.findall(Node.PROPERTY):
key = self.get_prop_name(property_node.get('uri'))
if key in self.props:
del self.props[key]
properties_node.remove(property_node)
logger.debug("done clearing properties")
return
def get_info_list(self):
"""
:rtype [(Node, dict)]
:return a list of tuples containing the (NodeName, Info) about the node and its childern
"""
info = {}
for node in self.node_list:
info[node.name] = node.get_info()
if self.type == "vos:DataNode":
info[self.name] = self.get_info()
return info.items()
def set_props(self, props):
"""Set the SubElement Node PROPERTY values of the given xmlx ELEMENT provided using the Nodes props dictionary.
:param props: the xmlx element to set the Node PROPERTY of.
"""
for property_node in props.findall(Node.PROPERTY):
self.props[self.get_prop_name(property_node.get('uri'))] = self.get_prop_value(property_node)
return
@staticmethod
def get_prop_name(prop):
"""parse the property uri and get the name of the property (strips off the url and just returns the tag)
if this is an IVOA property, otherwise sends back the entry uri.
:param prop: the uri of the property to get the name of.
"""
(url, prop_name) = urllib.splittag(prop)
if url == Node.IVOAURL:
return prop_name
return prop
@staticmethod
def get_prop_value(prop):
"""Pull out the value part of PROPERTY Element.
:param prop: an XML Element that represents a Node PROPERTY.
"""
return prop.text
class VOFile(object):
"""
A class for managing http connections
Attributes:
maxRetries - maximum number of retries when transient errors encountered.
When set too high (as the default value is) the number of
retries are time limited (max 15min)
maxRetryTime - maximum time to retry for when transient errors are
encountered
"""
errnos = {404: errno.ENOENT,
401: errno.EACCES,
409: errno.EEXIST,
423: errno.EPERM,
408: errno.EAGAIN}
# ## if we get one of these codes, retry the command... ;-(
retryCodes = (503, 408, 504, 412)
def __init__(self, url_list, connector, method, size=None,
follow_redirect=True, byte_range=None, possible_partial_read=False):
# MJG: Fix URLs for non-GET calls
if method != 'GET' and '?' in url_list:
url_list = url_list[: url_list.rindex('?')]
self.closed = True
assert isinstance(connector, Connection)
self.connector = connector
self.httpCon = None
self.timeout = -1
self.size = size
self.md5sum = None
self.totalFileSize = None
self.maxRetries = 10000
self.maxRetryTime = MAX_RETRY_TIME
self.url = None
self.method = None
# TODO
# Make all the calls to open send a list of URLs
# this should be redone during a cleanup. Basically, a GET might
# result in multiple URLs (list of URLs) but VOFile is also used to
# retrieve schema files and other info.
# All the calls should pass a list of URLs. Make sure that we
# make a deep copy of the input list so that we don't
# accidentally modify the caller's copy.
if isinstance(url_list, list):
self.URLs = deepcopy(url_list)
else:
self.URLs = [url_list]
self.urlIndex = 0
self.followRedirect = follow_redirect
self._fpos = 0
# initial values for retry parameters
self.currentRetryDelay = DEFAULT_RETRY_DELAY
self.totalRetryDelay = 0
self.retries = 0
self.fileSize = None
self.request = None
self.resp = None
self.trans_encode = None
# open the connection
self._fobj = None
self.open(self.URLs[self.urlIndex], method, byte_range=byte_range, possible_partial_read=possible_partial_read)
def tell(self):
return self._fpos
def seek(self, offset, loc=os.SEEK_SET):
if loc == os.SEEK_CUR:
self._fpos += offset
elif loc == os.SEEK_SET:
self._fpos = offset
elif loc == os.SEEK_END:
self._fpos = int(self.size) - offset
return
@staticmethod
def flush():
"""
Flush is a NO OP in VOFile: only really flush on close.
@return:
"""
return
def close(self):
"""close the connection."""
if not self.closed:
try:
if self.trans_encode is not None:
self.httpCon.send('0\r\n\r\n')
logger.debug("End of document sent.")
logger.debug("getting response.")
self.resp = self.connector.session.send(self.request)
logger.debug("checking response status.")
self.checkstatus()
finally:
self.closed = True
return self.closed
def checkstatus(self, codes=(200, 201, 202, 206, 302, 303, 503, 416,
416, 402, 408, 412, 504)):
"""check the response status. If the status code doesn't match a value from the codes list then
raise an Exception.
:param codes: a list of http status_codes that are NOT failures but require some additional action.
"""
msgs = {404: "Node Not Found",
401: "Not Authorized",
409: "Conflict",
423: "Locked",
408: "Connection Timeout"}
logger.debug("status %d for URL %s" % (self.resp.status_code, self.url))
if self.resp.status_code not in codes:
logger.debug("Got status code: %s for %s" %
(self.resp.status_code, self.url))
msg = self.resp.content
if msg is not None:
msg = html2text.html2text(msg, self.url).strip().replace('\n', ' ')
logger.debug("Error message: {0}".format(msg))
if self.resp.status_code in VOFile.errnos.keys() or (msg is not None and "Node is busy" in msg):
if msg is None or len(msg) == 0 and self.resp.status_code in msgs:
msg = msgs[self.resp.status_code]
if (self.resp.status_code == 401 and
self.connector.vospace_certfile is None and
self.connector.session.auth is None and self.connector.vospace_token is None):
msg += " using anonymous access "
exception = OSError(VOFile.errnos.get(self.resp.status_code, self.resp.status_code), msg)
if self.resp.status_code == 500 and "read-only" in msg:
exception = OSError(errno.EPERM, "VOSpace in read-only mode.")
raise exception
# Get the file size. We use this HEADER-CONTENT-LENGTH as a
# fallback to work around a server-side Java bug that limits
# 'Content-Length' to a signed 32-bit integer (~2 gig files)
try:
self.size = int(self.resp.headers.get("Content-Length", self.resp.headers.get(HEADER_CONTENT_LENGTH, 0)))
except ValueError:
self.size = 0
if self.resp.status_code == 200:
self.md5sum = self.resp.headers.get("Content-MD5", None)
self.totalFileSize = self.size
return True
def open(self, url, method="GET", byte_range=None, possible_partial_read=False):
"""Open a connection to the given URL
:param url: The URL to be openned
:type url: str
:param method: HTTP Method to use on open (PUT/GET/POST)
:type method: str
:param byte_range: The range of byte_range to read, This is in open so we can set the header parameter.
:type byte_range: str
:param possible_partial_read: Sometimes we kill during read, this tells the server that isn't an error.
:type possible_partial_read: bool
"""
logger.debug("Opening %s (%s)" % (url, method))
self.url = url
self.method = method
request = requests.Request(self.method, url)
self.trans_encode = None
# Try to send a content length hint if this is a PUT.
# otherwise send as a chunked PUT
if method in ["PUT"]:
try:
self.size = int(self.size)
request.headers.update({"Content-Length": self.size,
HEADER_CONTENT_LENGTH: self.size})
except TypeError:
self.size = None
self.trans_encode = "chunked"
elif method in ["POST", "DELETE"]:
self.size = None
self.trans_encode = "chunked"
if method in ["PUT", "POST", "DELETE"]:
content_type = "text/xml"
# Workaround for UWS library issues MJG
if 'sync' in url or 'transfer' in url:
content_type = 'application/x-www-form-urlencoded'
if method == "PUT":
ext = os.path.splitext(urllib.splitquery(url)[0])[1]
if ext in ['.fz', '.fits', 'fit']:
content_type = 'application/fits'
else:
content_type = mimetypes.guess_type(url)[0]
if content_type is None: content_type = "text/xml" # MJG
if content_type is not None:
request.headers.update({"Content-type": content_type})
if byte_range is not None and method == "GET":
request.headers.update({"Range": byte_range})
request.headers.update({"Accept": "*/*",
"Expect": "100-continue"})
# set header if a partial read is possible
if possible_partial_read and method == "GET":
request.headers.update({HEADER_PARTIAL_READ: "true"})
try:
self.request = self.connector.session.prepare_request(request)
except Exception as ex:
logger.error(str(ex))
def get_file_info(self):
"""Return information harvested from the HTTP header"""
return self.totalFileSize, self.md5sum
def read(self, size=None, return_response = False):
"""return size bytes from the connection response
:param size: number of bytes to read from the file.
"""
if self.resp is None:
try:
logger.debug("Initializing read by sending request: {0}".format(self.request))
self.resp = self.connector.session.send(self.request, stream=True)
self.checkstatus()
except Exception as ex:
logger.debug("Error on read: {0}".format(ex))
raise ex
if self.resp is None:
raise OSError(errno.EFAULT, "No response from VOServer")
read_error = None
if self.resp.status_code == 416:
return ""
# check the most likely response first
if self.resp.status_code == 200 or self.resp.status_code == 206:
if return_response:
return self.resp
else:
buff = self.resp.raw.read(size)
size = size is not None and size < len(buff) and size or len(buff)
# logger.debug("Sending back {0} bytes".format(size))
return buff[:size]
elif self.resp.status_code == 303 or self.resp.status_code == 302:
url = self.resp.headers.get('Location', None)
logger.debug("Got redirect URL: {0}".format(url))
self.url = url
if not url:
raise OSError(errno.ENOENT,
"Got 303 on {0} but no Location value in header? [{1}]".format(self.url,
self.resp.content),
self.url)
if self.followRedirect:
# We open this new URL without the byte range and partial read as we are following a service
# redirect and that service redirect is to the object that satisfies the original request.
# TODO seperate out making the transfer reqest and reading the response content.
self.open(url, "GET")
# logger.debug("Following redirected URL: %s" % (URL))
return self.read(size)
else:
# logger.debug("Got url:%s from redirect but not following" %
# (self.url))
return self.url
elif self.resp.status_code in VOFile.retryCodes:
# Note: 404 (File Not Found) might be returned when:
# 1. file deleted or replaced
# 2. file migrated from cache
# 3. hardware failure on storage node
# For 3. it is necessary to try the other URLs in the list
# otherwise this the failed URL might show up even after the
# caller tries to re-negotiate the transfer.
# For 1. and 2., calls to the other URLs in the list might or
# might not succeed.
if self.urlIndex < len(self.URLs) - 1:
# go to the next URL
self.urlIndex += 1
self.open(self.URLs[self.urlIndex], "GET")
return self.read(size)
else:
self.URLs.pop(self.urlIndex) # remove url from list
if len(self.URLs) == 0:
# no more URLs to try...
if read_error is not None:
raise read_error
if self.resp.status_code == 404:
raise OSError(errno.ENOENT, self.url)
else:
raise OSError(errno.EIO,
"unexpected server response %s (%d)" %
(self.resp.reason, self.resp.status_code), self.url)
if self.urlIndex < len(self.URLs):
self.open(self.URLs[self.urlIndex], "GET")
return self.read(size)
# start from top of URLs with a delay
self.urlIndex = 0
logger.error("Servers busy {0} for {1}".format(self.resp.status_code, self.URLs))
msg = self.resp.content
if msg is not None:
msg = html2text.html2text(msg, self.url).strip()
else:
msg = "No Message Sent"
logger.error("Message from VOSpace {0}: {1}".format(self.url, msg))
try:
# see if there is a Retry-After in the head...
ras = int(self.resp.headers.get("Retry-After", 5))
except ValueError:
ras = self.currentRetryDelay
if (self.currentRetryDelay * 2) < MAX_RETRY_DELAY:
self.currentRetryDelay *= 2
else:
self.currentRetryDelay = MAX_RETRY_DELAY
if ((self.retries < self.maxRetries) and
(self.totalRetryDelay < self.maxRetryTime)):
logger.error("Retrying in {0} seconds".format(ras))
self.totalRetryDelay += ras
self.retries += 1
time.sleep(int(ras))
self.open(self.URLs[self.urlIndex], "GET")
return self.read(size)
else:
raise OSError(self.resp.status_code,
"failed to connect to server after multiple attempts {0} {1}".format(self.resp.reason,
self.resp.status_code),
self.url)
@staticmethod
def write(buf):
"""write buffer to the connection
:param buf: string to write to the file.
"""
raise OSError(errno.ENOSYS, "Direct write to a VOSpaceFile is not supported, use copy instead.")
class EndPoints(object):
CADC_SERVER = 'www.canfar.phys.uvic.ca'
# NOAO_TEST_SERVER = "dldemo.datalab.noirlab.edu:8080/vospace-2.0"
NOAO_TEST_SERVER = "dldb1.datalab.noirlab.edu:8080/vospace-2.0"
LOCAL_TEST_SERVER = 'localhost:8080/vospace-2.0'
DEFAULT_VOSPACE_URI = 'datalab.noao.edu!vospace'
# DEFAULT_VOSPACE_URI = 'nvo.caltech!vospace'
VOSPACE_WEBSERVICE = os.getenv('VOSPACE_WEBSERVICE', None)
VOServers = {'cadc.nrc.ca!vospace': CADC_SERVER,
'cadc.nrc.ca~vospace': CADC_SERVER,
'datalab.noao.edu!vospace': NOAO_TEST_SERVER,
'datalab.noao.edu~vospace': NOAO_TEST_SERVER,
'nvo.caltech!vospace': LOCAL_TEST_SERVER,
'nvo.caltech~vospace': LOCAL_TEST_SERVER
}
VODataView = {'cadc.nrc.ca!vospace': 'ivo://cadc.nrc.ca/vospace',
'cadc.nrc.ca~vospace': 'ivo://cadc.nrc.ca/vospace',
'datalab.noao.edu!vospace': 'ivo://datalab.noao.edu/vospace',
'datalab.noao.edu~vospace': 'ivo://datalab.noao.edu/vospace',
'nvo.caltech!vospace': 'ivo://nvo.caltech/vospace',
'nvo.caltech~vospace': 'ivo://nvo.caltech/vospace'}
# VONodes = "vospace/nodes"
# VOProperties = {NOAO_TEST_SERVER: "/vospace",
# CADC_SERVER: "/vospace/nodeprops",
# LOCAL_TEST_SERVER: "/vospace"}
# VOTransfer = {NOAO_TEST_SERVER: '/vospace/sync',
# CADC_SERVER: '/vospace/synctrans',
# LOCAL_TEST_SERVER: '/vospace/sync'}
VONodes = "nodes"
VOProperties = {NOAO_TEST_SERVER: "",
CADC_SERVER: "nodeprops",
LOCAL_TEST_SERVER: ""}
VOTransfer = {NOAO_TEST_SERVER: 'sync',
CADC_SERVER: 'synctrans',
LOCAL_TEST_SERVER: 'sync'}
def __init__(self, uri, basic_auth=False):
"""
Based on the URI return the various sever endpoints that will be
associated with this uri.
:param uri:
"""
self.service = basic_auth and 'vospace/auth' or 'vospace'
self.uri_parts = URLParser(uri)
@property
def netloc(self):
return self.uri_parts.netloc
@property
def properties(self):
return "{0}/{1}/{2}".format(self.server, self.service, EndPoints.VOProperties.get(self.server))
@property
def uri(self):
return "ivo://{0}".format(self.netloc).replace("!", "/").replace("~", "/")
@property
def view(self):
return "{0}/view".format(self.uri)
@property
def cutout(self):
return "ivo://{0}/{1}#{2}".format(self.uri_parts.server, 'view', 'cutout')
@property
def core(self):
return "{0}/core".format(self.uri)
@property
def islocked(self):
return "{0}#islocked".format(self.core)
@property
def server(self):
"""
:return: The network location of the VOSpace server.
"""
return (EndPoints.VOSPACE_WEBSERVICE is not None and EndPoints.VOSPACE_WEBSERVICE or
EndPoints.VOServers.get(self.netloc, None))
@property
def transfer(self):
"""
The transfer service endpoint.
:return: service location of the transfer service.
:rtype: str
"""
if self.server in EndPoints.VOTransfer:
end_point = EndPoints.VOTransfer[self.server]
else:
end_point = "/vospace/auth/synctrans"
return "{0}/{1}/{2}".format(self.server, self.service, end_point)
@property
def nodes(self):
"""
:return: The Node service endpoint.
"""
return "{0}/{1}/{2}".format(self.server, self.service, EndPoints.VONodes)
class Client(object):
"""The Client object does the work"""
VO_HTTPGET_PROTOCOL = 'ivo://ivoa.net/vospace/core#httpget'
VO_HTTPPUT_PROTOCOL = 'ivo://ivoa.net/vospace/core#httpput'
VO_HTTPSGET_PROTOCOL = 'ivo://ivoa.net/vospace/core#httpsget'
VO_HTTPSPUT_PROTOCOL = 'ivo://ivoa.net/vospace/core#httpsput'
DWS = '/data/pub/'
# reserved vospace properties, not to be used for extended property setting
vosProperties = ["description", "type", "encoding", "MD5", "length",
"creator", "date", "groupread", "groupwrite", "ispublic"]
VOSPACE_CERTFILE = os.getenv("VOSPACE_CERTFILE", None)
if VOSPACE_CERTFILE is None:
for certfile in ['cadcproxy.pem', 'vospaceproxy.pem']:
certpath = os.path.join(os.getenv("HOME", "."), '.ssl')
certfilepath = os.path.join(certpath, certfile)
if os.access(certfilepath, os.R_OK):
VOSPACE_CERTFILE = certfilepath
break
def __init__(self, vospace_certfile=None, root_node=None, conn=None,
transfer_shortcut=False, http_debug=False,
secure_get=False, vospace_token=None):
"""This could/should be expanded to set various defaults
:param vospace_certfile: x509 proxy certificate file location. Overrides certfile in conn.
:type vospace_certfile: str
:param vospace_token: token string (alternative to vospace_certfile)
:type vospace_token: str
:param root_node: the base of the VOSpace for uri references.
:type root_node: str
:param conn: a connection pool object for this Client
:type conn: Session
:param transfer_shortcut: if True then just assumed data web service urls
:type transfer_shortcut: bool
:param http_debug: turn on http debugging.
:type http_debug: bool
:param secure_get: Use HTTPS: ie. transfer contents of files using SSL encryption.
:type secure_get: bool
"""
if not isinstance(conn, Connection):
vospace_certfile = vospace_certfile is None and Client.VOSPACE_CERTFILE or vospace_certfile
conn = Connection(vospace_certfile=vospace_certfile,
vospace_token=vospace_token,
http_debug=http_debug)
if conn.vospace_certfile:
logger.debug("Using certificate file: {0}".format(vospace_certfile))
if conn.vospace_token:
logger.debug("Using vospace token: " + conn.vospace_token)
vospace_certfile = conn.vospace_certfile
# Set the protocol
if vospace_certfile is None:
self.protocol = "http"
else:
self.protocol = "https"
self.conn = conn
self.rootNode = root_node
self.nodeCache = NodeCache()
self.transfer_shortcut = transfer_shortcut
self.secure_get = secure_get
return
def glob(self, pathname):
"""Return a list of paths matching a pathname pattern.
The pattern may contain simple shell-style wildcards a la
fnmatch. However, unlike fnmatch, file names starting with a
dot are special cases that are not matched by '*' and '?'
patterns.
:param pathname: path to glob.
"""
return list(self.iglob(pathname))
def iglob(self, pathname):
"""Return an iterator which yields the paths matching a pathname pattern.
The pattern may contain simple shell-style wildcards a la fnmatch. However, unlike fnmatch, filenames
starting with a dot are special cases that are not matched by '*' and '?' patterns.
:param pathname: path to run glob against.
:type pathname: str
"""
dirname, basename = os.path.split(pathname)
if not self.has_magic(pathname):
if basename:
self.get_node(pathname)
yield pathname
else:
# Patterns ending with a slash should match only directories
if self.iglob(dirname):
yield pathname
return
if not dirname:
for name in self.glob1(self.rootNode, basename):
yield name
return
# `os.path.split()` returns the argument itself as a dirname if it is a
# drive or UNC path. Prevent an infinite recursion if a drive or UNC path
# contains magic characters (i.e. r'\\?\C:').
if dirname != pathname and self.has_magic(dirname):
dirs = self.iglob(dirname)
else:
dirs = [dirname]
if self.has_magic(basename):
glob_in_dir = self.glob1
else:
glob_in_dir = self.glob0
for dirname in dirs:
for name in glob_in_dir(dirname, basename):
yield os.path.join(dirname, name)
# These 2 helper functions non-recursively glob inside a literal directory.
# They return a list of basenames. `glob1` accepts a pattern while `glob0`
# takes a literal basename (so it only has to check for its existence).
def glob1(self, dirname, pattern):
"""
:param dirname: name of the directory to look for matches in.
:type dirname: str
:param pattern: pattern to match directory contents names against
:type pattern: str
:return:
"""
if not dirname:
dirname = self.rootNode
if isinstance(pattern, _unicode) and not isinstance(dirname, unicode):
dirname = unicode(dirname, sys.getfilesystemencoding() or sys.getdefaultencoding())
try:
names = self.listdir(dirname, force=True)
except os.error:
return []
if not pattern.startswith('.'):
names = filter(lambda x: not x.startswith('.'), names)
return fnmatch.filter(names, pattern)
def glob0(self, dirname, basename):
if basename == '':
# `os.path.split()` returns an empty basename for paths ending with a
# directory separator. 'q*x/' should match only directories.
if self.isdir(dirname):
return [basename]
else:
if self.access(os.path.join(dirname, basename)):
return [basename]
else:
raise OSError(errno.EACCES, "Permission denied: {0}".format(os.path.join(dirname, basename)))
return []
magic_check = re.compile('[*?[]')
@classmethod
def has_magic(cls, s):
return cls.magic_check.search(s) is not None
# @logExceptions()
def copy(self, source, destination, send_md5=False):
"""copy from source to destination.
One of source or destination must be a vospace location and the other must be a local location.
:param source: The source file to send to VOSpace or the VOSpace node to retrieve
:type source: str
:param destination: The VOSpace location to put the file to or the local destination.
:type destination: str
:param send_md5: Should copy send back the md5 of the destination file or just the size?
:type send_md5: bool
"""
# TODO: handle vospace to vospace copies.
success = False
destination_size = None
destination_md5 = None
source_md5 = None
get_node_url_retried = False
if source[0:4] == "vos:":
check_md5 = False
match = re.search("([^\[\]]*)(\[.*\])$", source)
if match is not None:
view = 'cutout'
source = match.group(1)
cutout = match.group(2)
else:
view = 'data'
cutout = None
check_md5 = True
source_md5 = self.get_node(source).props.get('MD5', 'd41d8cd98f00b204e9800998ecf8427e')
get_urls = self.get_node_url(source, method='GET', cutout=cutout, view=view)
while not success:
# If there are no urls available, drop through to full negotiation if that wasn't already tried
if len(get_urls) == 0:
if self.transfer_shortcut and not get_node_url_retried:
get_urls = self.get_node_url(source, method='GET', cutout=cutout, view=view,
full_negotiation=True)
# remove the first one as we already tried that one.
get_urls.pop(0)
get_node_url_retried = True
else:
break
get_url = get_urls.pop(0)
try:
response = self.conn.session.get(get_url, timeout=(2, 5), stream=True)
source_md5 = response.headers.get('Content-MD5', source_md5)
response.raise_for_status()
with open(destination, 'w') as fout:
for chunk in response.iter_content(chunk_size=512 * 1024):
if chunk:
fout.write(chunk)
fout.flush()
destination_size = os.stat(destination).st_size
if check_md5:
destination_md5 = compute_md5(destination)
logger.debug("{0} {1}".format(source_md5, destination_md5))
assert destination_md5 == source_md5
success = True
except Exception as ex:
logging.debug("Failed to GET {0}".format(get_url))
logging.debug("Got error {0}".format(ex))
continue
else:
source_md5 = compute_md5(source)
put_urls = self.get_node_url(destination, 'PUT')
while not success:
if len(put_urls) == 0:
if self.transfer_shortcut and not get_node_url_retried:
put_urls = self.get_node_url(destination, method='PUT', full_negotiation=True)
# remove the first one as we already tried that one.
put_urls.pop(0)
get_node_url_retried = True
else:
break
put_url = put_urls.pop(0)
try:
with open(source, 'r') as fin:
self.conn.session.put(put_url, data=fin)
node = self.get_node(destination, limit=0, force=True)
destination_md5 = node.props.get('MD5', 'd41d8cd98f00b204e9800998ecf8427e')
assert destination_md5 == source_md5
except Exception as ex:
logging.debug("FAILED to PUT to {0}".format(put_url))
logging.debug("Got error: {0}".format(ex))
continue
success = True
break
if not success:
raise OSError(errno.EFAULT, "Failed copying {0} -> {1}".format(source, destination))
return send_md5 and destination_md5 or destination_size
def fix_uri(self, uri):
"""given a uri check if the authority part is there and if it isn't
then add the vospace authority
:param uri: The string that should be parsed into a proper URI, if possible.
"""
parts = URLParser(uri)
# TODO
# implement support for local files (parts.scheme=None
# and self.rootNode=None
if parts.scheme is None:
if self.rootNode is not None:
uri = self.rootNode + uri
else:
return uri
parts = URLParser(uri)
if parts.scheme != "vos":
# Just past this back, I don't know how to fix...
return uri
# Check that path name compiles with the standard
logger.debug("Got value of args: {0}".format(parts.args))
if parts.args is not None and parts.args != "":
uri = parse_qs(urlparse(parts.args).query).get('link', None)[0]
logger.debug("Got uri: {0}".format(uri))
if uri is not None:
return self.fix_uri(uri)
# Check for 'cutout' syntax values.
path = re.match("(?P<filename>[^\[]*)(?P<ext>(\[\d*:?\d*\])?"
"(\[\d*:?\d*,?\d*:?\d*\])?)", parts.path)
filename = os.path.basename(path.group('filename'))
if not re.match("^[_\-\(\)=\+!,;:@&\*\$\.\w~]*$", filename):
raise OSError(errno.EINVAL, "Illegal vospace container name",
filename)
path = path.group('filename')
# insert the default VOSpace server if none given
host = parts.netloc
if not host or host == '':
host = EndPoints.DEFAULT_VOSPACE_URI
path = os.path.normpath(path).strip('/')
uri = "{0}://{1}/{2}{3}".format(parts.scheme, host, path, parts.args)
logger.debug("Returning URI: {0}".format(uri))
return uri
def get_node(self, uri, limit=0, force=False):
"""connect to VOSpace and download the definition of VOSpace node
:param uri: -- a voSpace node in the format vos:/VOSpaceName/nodeName
:type uri: str
:param limit: -- load children nodes in batches of limit
:type limit: int, None
:param force: force getting the node from the service, rather than returning a cached version.
:return: The VOSpace Node
:rtype: Node
"""
logger.debug("Getting node {0}".format(uri))
uri = self.fix_uri(uri)
node = None
if not force and uri in self.nodeCache:
node = self.nodeCache[uri]
if node is None:
logger.debug("Getting node {0} from ws".format(uri))
with self.nodeCache.watch(uri) as watch:
# If this is vospace URI then we can request the node info
# using the uri directly, but if this a URL then the metadata
# comes from the HTTP header.
if uri.startswith('vos:'):
vo_fobj = self.open(uri, os.O_RDONLY, limit=limit)
vo_xml_string = vo_fobj.read()
xml_file = StringIO(vo_xml_string)
xml_file.seek(0)
dom = ElementTree.parse(xml_file)
node = Node(dom.getroot())
elif uri.startswith('http'):
header = self.open(None, url=uri, mode=os.O_RDONLY, head=True)
header.read()
logger.debug("Got http headers: {0}".format(header.resp.headers))
properties = {'type': header.resp.headers.get('Content-type', 'txt'),
'date': time.strftime(
'%Y-%m-%dT%H:%M:%S GMT',
time.strptime(header.resp.headers.get('Date', None),
'%a, %d %b %Y %H:%M:%S GMT')),
'groupwrite': None,
'groupread': None,
'ispublic': URLParser(uri).scheme == 'https' and 'true' or 'false',
'length': header.resp.headers.get('Content-Length', 0)}
node = Node(node=uri, node_type=Node.DATA_NODE, properties=properties)
logger.debug(str(node))
else:
raise OSError(2, "Bad URI {0}".format(uri))
watch.insert(node)
# IF THE CALLER KNOWS THEY DON'T NEED THE CHILDREN THEY
# CAN SET LIMIT=0 IN THE CALL Also, if the number of nodes
# on the firt call was less than 500, we likely got them
# all during the init
if limit != 0 and node.isdir() and len(node.node_list) > 500:
next_uri = None
while next_uri != node.node_list[-1].uri:
next_uri = node.node_list[-1].uri
xml_file = StringIO(self.open(uri, os.O_RDONLY, next_uri=next_uri, limit=limit).read())
xml_file.seek(0)
next_page = Node(ElementTree.parse(xml_file).getroot())
if len(next_page.node_list) > 0 and next_uri == next_page.node_list[0].uri:
next_page.node_list.pop(0)
node.node_list.extend(next_page.node_list)
for childNode in node.node_list:
with self.nodeCache.watch(childNode.uri) as childWatch:
childWatch.insert(childNode)
return node
def get_node_url(self, uri, method='GET', view=None, limit=0, next_uri=None, cutout=None, full_negotiation=None):
"""Split apart the node string into parts and return the correct URL for this node.
:param uri: The VOSpace uri to get an associated url for.
:type uri: str
:param method: What will this URL be used to do: 'GET' the node, 'PUT' or 'POST' to the node or 'DELETE' it
:type method: str
:param view: If this is a 'GET' which view of the node should the URL provide.
:type view: str
:param limit: If this is a container how many of the children should be returned? (None - Unlimited)
:type limit: int, None
:param next_uri: When getting a container we make repeated calls until all 'limit' children returned. next_uri
tells the service what was the last child uri retrieved in the previous call.
:type next_uri: str
:param cutout: The cutout pattern to apply to the file at the service end: applies to view='cutout' only.
:type cutout: str
:param full_negotiation: Should we use the transfer UWS or do a GET and follow the redirect.
:type full_negotiation: bool
"""
uri = self.fix_uri(uri)
if view in ['data', 'cutout'] and method == 'GET':
node = self.get_node(uri, limit=0)
if node.islink():
target = node.node.findtext(Node.TARGET)
logger.debug("%s is a link to %s" % (node.uri, target))
if target is None:
raise OSError(errno.ENOENT, "No target for link")
parts = URLParser(target)
if parts.scheme != "vos":
# This is not a link to another VOSpace node so lets just return the target as the url
url = target
if cutout is not None:
url = "{0}?cutout={1}".format(target, cutout)
logger.debug("Line 3.1.2")
logger.debug("Returning URL: {0}".format(url))
return [url]
logger.debug("Getting URLs for: {0}".format(target))
return self.get_node_url(target, method=method, view=view, limit=limit, next_uri=next_uri,
cutout=cutout,
full_negotiation=full_negotiation)
logger.debug("Getting URL for: " + str(uri))
parts = URLParser(uri)
if parts.scheme.startswith('http'):
return [uri]
endpoints = EndPoints(uri, basic_auth=self.conn.session.auth is not None)
# see if we have a VOSpace server that goes with this URI in our look up list
if endpoints.server is None:
# Since we don't know how to get URLs for this server we should just return the uri.
return uri
# full_negotiation is an override, so it can be used to force either shortcut (false) or full negotiation (true)
if full_negotiation is not None:
do_shortcut = not full_negotiation
else:
do_shortcut = self.transfer_shortcut
do_shortcut = False # MJG
if not do_shortcut and method == 'GET' and view in ['data', 'cutout']:
return self._get(uri, view=view, cutout=cutout)
if not do_shortcut and method == 'PUT':
return self._put(uri)
if (view == "cutout" and cutout is None) or (cutout is not None and view != "cutout"):
raise ValueError("For cutout, must specify a view=cutout and for view=cutout must specify cutout")
if method == 'GET' and view not in ['data', 'cutout']:
# This is a request for the URL of the Node, which returns an XML document that describes the node.
fields = {}
# MJG: No limit keyword on URLs
# if limit is not None:
# fields['limit'] = limit
if view is not None:
fields['view'] = view
if next_uri is not None:
fields['uri'] = next_uri
data = ""
if len(fields) > 0:
data = "?" + urllib.urlencode(fields)
url = "%s://%s/%s%s" % (self.protocol,
endpoints.nodes,
parts.path.strip('/'),
data)
logger.debug("URL: %s (%s)" % (url, method))
return url
# This is the shortcut. We do a GET request on the service with the parameters sent as arguments.
direction = {'GET': 'pullFromVoSpace', 'PUT': 'pushToVoSpace'}
# On GET override the protocol to be http (faster) unless a secure_get is requested.
protocol = {
'GET': {'https': (self.secure_get and Client.VO_HTTPSGET_PROTOCOL) or Client.VO_HTTPGET_PROTOCOL,
'http': Client.VO_HTTPGET_PROTOCOL},
'PUT': {'https': Client.VO_HTTPSPUT_PROTOCOL,
'http': Client.VO_HTTPPUT_PROTOCOL}}
# build the url for that will request the url that provides access to the node.
url = "%s://%s" % (self.protocol, endpoints.transfer)
logger.debug("URL: %s" % url)
args = {
'TARGET': uri,
'DIRECTION': direction[method],
'PROTOCOL': protocol[method][self.protocol],
'view': view}
if cutout is not None:
args['cutout'] = cutout
params = urllib.urlencode(args)
headers = {"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/plain"}
response = self.conn.session.get(url, params=params, headers=headers, allow_redirects=False)
assert isinstance(response, requests.Response)
logging.debug("Transfer Server said: {0}".format(response.content))
if response.status_code == 303:
# Normal case is a redirect
url = response.headers.get('Location', None)
elif response.status_code == 404:
# The file doesn't exist
raise OSError(errno.ENOENT, response.content, url)
elif response.status_code == 409:
raise OSError(errno.EREMOTE, response.content, url)
elif response.status_code == 413:
raise OSError(errno.E2BIG, response.content, url)
else:
logger.debug("Reverting to full negotiation")
return self.get_node_url(uri,
method=method,
view=view,
full_negotiation=True,
limit=limit,
next_uri=next_uri,
cutout=cutout)
logger.debug("Sending short cut url: {0}".format(url))
return [url]
def link(self, src_uri, link_uri):
"""Make link_uri point to src_uri.
:param src_uri: the existing resource, either a vospace uri or a http url
:type src_uri: str
:param link_uri: the vospace node to create that will be a link to src_uri
:type link_uri: str
"""
link_uri = self.fix_uri(link_uri)
src_uri = self.fix_uri(src_uri)
# if the link_uri points at an existing directory then we try and make a link into that directory
if self.isdir(link_uri):
link_uri = os.path.join(link_uri, os.path.basename(src_uri))
with nested(self.nodeCache.volatile(src_uri), self.nodeCache.volatile(link_uri)):
link_node = Node(link_uri, node_type="vos:LinkNode")
ElementTree.SubElement(link_node.node, "target").text = src_uri
data = str(link_node)
size = len(data)
# MJG
print(data)
url = self.get_node_url(link_uri)
logger.debug("Got linkNode URL: {0}".format(url))
self.conn.session.put(url, data=data, headers={'size': size, 'Content-type': 'text/xml'})
def move(self, src_uri, destination_uri):
"""Move src_uri to destination_uri. If destination_uri is a containerNode then move src_uri into destination_uri
:param src_uri: the VOSpace node to be moved.
:type src_uri: str
:param destination_uri: the VOSpace location to move to.
:type destination_uri: str
:return did the move succeed?
:rtype bool
"""
src_uri = self.fix_uri(src_uri)
destination_uri = self.fix_uri(destination_uri)
with nested(self.nodeCache.volatile(src_uri), self.nodeCache.volatile(destination_uri)):
return self.transfer(src_uri, destination_uri, view='move')
def _get(self, uri, view="defaultview", cutout=None):
with self.nodeCache.volatile(uri):
return self.transfer(uri, "pullFromVoSpace", view, cutout)
def _put(self, uri):
with self.nodeCache.volatile(uri):
return self.transfer(uri, "pushToVoSpace", view="defaultview")
def transfer(self, uri, direction, view=None, cutout=None):
"""Build the transfer XML document
:param direction: is this a pushToVoSpace or a pullFromVoSpace ?
:param uri: the uri to transfer from or to VOSpace.
:param view: which view of the node (data/default/cutout/etc.) is being transferred
:param cutout: a special parameter added to the 'cutout' view request. e.g. '[0][1:10,1:10]'
"""
endpoints = EndPoints(uri, basic_auth=self.conn.session.auth is not None)
protocol = {"pullFromVoSpace": "{0}get".format(self.protocol),
"pushToVoSpace": "{0}put".format(self.protocol)}
transfer_xml = ElementTree.Element("vos:transfer")
transfer_xml.attrib['xmlns:vos'] = Node.VOSNS
ElementTree.SubElement(transfer_xml, "vos:target").text = uri
ElementTree.SubElement(transfer_xml, "vos:direction").text = direction
if view == 'move':
ElementTree.SubElement(transfer_xml, "vos:keepBytes").text = "false"
else:
if view == 'defaultview' or view == 'data': # MJG - data view not supported
ElementTree.SubElement(transfer_xml, "vos:view").attrib['uri'] = "ivo://ivoa.net/vospace/core#defaultview"
elif view is not None:
vos_view = ElementTree.SubElement(transfer_xml, "vos:view")
vos_view.attrib['uri'] = endpoints.view + "#{0}".format(view)
if cutout is not None and view == 'cutout':
param = ElementTree.SubElement(vos_view, "vos:param")
param.attrib['uri'] = endpoints.cutout
param.text = cutout
protocol_element = ElementTree.SubElement(transfer_xml, "vos:protocol")
protocol_element.attrib['uri'] = "{0}#{1}".format(Node.IVOAURL, protocol[direction])
logging.debug(ElementTree.tostring(transfer_xml))
url = "{0}://{1}".format(self.protocol,
endpoints.transfer)
logging.debug("Sending to : {}".format(url))
data = ElementTree.tostring(transfer_xml)
resp = self.conn.session.post(url,
data=data,
allow_redirects=False,
headers={'Content-type': 'application/x-www-form-urlencoded'}) # 'text/xml'}) # MJG
logging.debug("{0}".format(resp))
logging.debug("{0}".format(resp.content))
if resp.status_code != 303 and resp.status_code != 302: # MJG
raise OSError(resp.status_code, "Failed to get transfer service response.")
transfer_url = resp.headers.get('Location', None)
if self.conn.session.auth is not None and "auth" not in transfer_url:
transfer_url = transfer_url.replace('/vospace/', '/vospace/auth/')
logging.debug("Got back from transfer URL: %s" % transfer_url)
# For a move this is the end of the transaction.
if view == 'move':
return not self.get_transfer_error(transfer_url, uri)
# for get or put we need the protocol value
xfer_resp = self.conn.session.get(transfer_url, allow_redirects=False)
xfer_url = xfer_resp.headers.get('Location', transfer_url) # MJG
if self.conn.session.auth is not None and "auth" not in xfer_url:
xfer_url = xfer_url.replace('/vospace/', '/vospace/auth/')
xml_string = self.conn.session.get(xfer_url).content
logging.debug("Transfer Document: %s" % xml_string)
transfer_document = ElementTree.fromstring(xml_string)
logging.debug("XML version: {0}".format(ElementTree.tostring(transfer_document)))
all_protocols = transfer_document.findall(Node.PROTOCOL)
if all_protocols is None or not len(all_protocols) > 0:
return self.get_transfer_error(transfer_url, uri)
result = []
for protocol in all_protocols:
for node in protocol.findall(Node.ENDPOINT):
result.append(node.text)
# if this is a connection to the 'rc' server then we reverse the
# urllist to test the fail-over process
if endpoints.server.startswith('rc'):
result.reverse()
return result
def get_transfer_error(self, url, uri):
"""Follow a transfer URL to the Error message
:param url: The URL of the transfer request that had the error.
:param uri: The uri that we were trying to transfer (get or put).
"""
error_codes = {'NodeNotFound': errno.ENOENT,
'RequestEntityTooLarge': errno.E2BIG,
'PermissionDenied': errno.EACCES,
'OperationNotSupported': errno.EOPNOTSUPP,
'InternalFault': errno.EFAULT,
'ProtocolNotSupported': errno.EPFNOSUPPORT,
'ViewNotSupported': errno.ENOSYS,
'InvalidArgument': errno.EINVAL,
'InvalidURI': errno.EFAULT,
'TransferFailed': errno.EIO,
'DuplicateNode.': errno.EEXIST,
'NodeLocked': errno.EPERM}
job_url = str.replace(url, "/results/transferDetails", "")
try:
phase_url = job_url + "/phase"
sleep_time = 1
roller = ('\\', '-', '/', '|', '\\', '-', '/', '|')
phase = VOFile(phase_url, self.conn, method="GET",
follow_redirect=False).read()
# do not remove the line below. It is used for testing
logging.debug("Job URL: " + job_url + "/phase")
while phase in ['PENDING', 'QUEUED', 'EXECUTING', 'UNKNOWN']:
# poll the job. Sleeping time in between polls is doubling
# each time until it gets to 32sec
total_time_slept = 0
if sleep_time <= 32:
sleep_time *= 2
slept = 0
if logger.getEffectiveLevel() == logging.INFO:
while slept < sleep_time:
sys.stdout.write("\r%s %s" % (phase,
roller[total_time_slept % len(roller)]))
sys.stdout.flush()
slept += 1
total_time_slept += 1
time.sleep(1)
sys.stdout.write("\r \n")
else:
time.sleep(sleep_time)
phase = self.conn.session.get(phase_url, allow_redirects=False).content
logging.debug("Async transfer Phase for url %s: %s " % (url, phase))
except KeyboardInterrupt:
# abort the job when receiving a Ctrl-C/Interrupt from the client
logging.error("Received keyboard interrupt")
self.conn.session.post(job_url + "/phase",
allow_redirects=False,
data="PHASE=ABORT",
headers={"Content-type": 'application/x-www-form-urlencoded'}) # MJG
raise KeyboardInterrupt
status = VOFile(phase_url, self.conn, method="GET",
follow_redirect=False).read()
logger.debug("Phase: {0}".format(status))
if status in ['COMPLETED']:
return False
if status in ['HELD', 'SUSPENDED', 'ABORTED']:
# re-queue the job and continue to monitor for completion.
raise OSError("UWS status: {0}".format(status), errno.EFAULT)
error_url = job_url + "/error"
error_message = self.conn.session.get(error_url).content
logger.debug("Got transfer error {0} on URI {1}".format(error_message, uri))
# Check if the error was that the link type is unsupported and try and follow that link.
target = re.search("Unsupported link target:(?P<target> .*)$", error_message)
if target is not None:
return target.group('target').strip()
raise OSError(error_codes.get(error_message, errno.EFAULT),
"{0}: {1}".format(uri, error_message))
def open(self, uri, mode=os.O_RDONLY, view=None, head=False, url=None,
limit=None, next_uri=None, size=None, cutout=None, byte_range=None,
full_negotiation=False, possible_partial_read=False):
"""Create a VOFile connection to the specified uri or url.
:rtype : VOFile
:param uri: The uri of the VOSpace resource to create a connection to, override by specifying url
:type uri: str, None
:param mode: The mode os.O_RDONLY or os.O_WRONLY to open the connection with.
:type mode: bit
:param view: The view of the VOSpace resource, one of: default, data, cutout
:type view: str, None
:param head: Just return the http header of this request.
:type head: bool
:param url: Ignore the uri (ie don't look up the url using get_node_url) and just connect to this url
:type url: str, None
:param limit: limit response from vospace to this many child nodes. relevant for containerNode type
:type limit: int, None
:param next_uri: The uri of the last child node returned by a previous request on a containerNode
:type next_uri: str, None
:param size: The size of file to expect or be put to VOSpace
:type size: int, None
:param cutout: The cutout pattern to use during a get
:type cutout: str, None
:param byte_range: The range of bytes to request, rather than getting the entire file.
:type byte_range: str, None
:param full_negotiation: force this interaction to use the full UWS interaction to get the url for the resource
:type full_negotiation: bool
:param possible_partial_read:
"""
# sometimes this is called with mode from ['w', 'r']
# really that's an error, but I thought I'd just accept those are
# os.O_RDONLY
if type(mode) == str:
mode = os.O_RDONLY
# the url of the connection depends if we are 'getting', 'putting' or
# 'posting' data
method = None
if mode == os.O_RDONLY:
method = "GET"
elif mode & (os.O_WRONLY | os.O_CREAT):
method = "PUT"
elif mode & os.O_APPEND:
method = "POST"
elif mode & os.O_TRUNC:
method = "DELETE"
if head:
method = "HEAD"
if not method:
raise OSError(errno.EOPNOTSUPP, "Invalid access mode", mode)
if uri is not None and view in ['data', 'cutout']:
# Check if this is a target node.
try:
node = self.get_node(uri)
if node.type == "vos:LinkNode":
target = node.node.findtext(Node.TARGET)
logger.debug("%s is a link to %s" % (node.uri, target))
if target is None:
raise OSError(errno.ENOENT, "No target for link")
else:
parts = URLParser(target)
if parts.scheme == 'vos':
# This is a link to another VOSpace node so lets open that instead.
return self.open(target, mode, view, head, url, limit,
next_uri, size, cutout, byte_range)
else:
# A target external link
# TODO Need a way of passing along authentication.
if cutout is not None:
target = "{0}?cutout={1}".format(target, cutout)
return VOFile([target],
self.conn,
method=method,
size=size,
byte_range=byte_range,
possible_partial_read=possible_partial_read)
except OSError as e:
if e.errno in [2, 404]:
pass
else:
raise e
if url is None:
url = self.get_node_url(uri, method=method, view=view,
limit=limit, next_uri=next_uri, cutout=cutout,
full_negotiation=full_negotiation)
if url is None:
raise OSError(errno.EREMOTE)
return VOFile(url, self.conn, method=method, size=size, byte_range=byte_range,
possible_partial_read=possible_partial_read)
def add_props(self, node):
"""Given a node structure do a POST of the XML to the VOSpace to
update the node properties
Makes a new copy of current local state, then gets a copy of what's on the server and
then updates server with differences.
:param node: the Node object to add some properties to.
"""
new_props = copy.deepcopy(node.props)
old_props = self.get_node(node.uri, force=True).props
for prop in old_props:
if prop in new_props and old_props[prop] == new_props[prop] and old_props[prop] is not None:
del (new_props[prop])
node.node = node.create(node.uri, node_type=node.type,
properties=new_props)
# Now write these new properties to the node location.
url = self.get_node_url(node.uri, method='GET')
data = str(node)
size = len(data)
self.conn.session.post(url,
headers={'size': size, 'Content-type': 'text/xml'},
data=data) # MJG
def create(self, node):
"""
Create a (Container/Link/Data) Node on the VOSpace server.
:param node: the Node that we are going to create on the server.
:type node: bool
"""
url = self.get_node_url(node.uri, method='PUT')
data = str(node)
size = len(data)
self.conn.session.put(url, data=data, headers={'size': size, 'Content-type': 'text/xml'})
return True
def update(self, node, recursive=False):
"""Updates the node properties on the server. For non-recursive
updates, node's properties are updated on the server. For
recursive updates, node should only contain the properties to
be changed in the node itself as well as all its children.
:param node: the node to update.
:param recursive: should this update be applied to all children? (True/False)
"""
# Let's do this update using the async transfer method
url = self.get_node_url(node.uri)
endpoints = node.endpoints
if recursive:
property_url = "{0}://{1}".format(self.protocol, endpoints.properties)
logger.debug("prop URL: {0}".format(property_url))
try:
resp = self.conn.session.post(property_url,
allow_redirects=False,
data=str(node),
headers={'Content-type': 'text/xml'})
except Exception as ex:
logger.error(str(ex))
raise ex
if resp is None:
raise OSError(errno.EFAULT, "Failed to connect VOSpace")
logger.debug("Got prop-update response: {0}".format(resp.content))
transfer_url = resp.headers.get('Location', None)
logger.debug("Got job status redirect: {0}".format(transfer_url))
# logger.debug("Got back %s from $Client.VOPropertiesEndPoint " % (con))
# Start the job
self.conn.session.post(transfer_url + "/phase",
allow_redirects=False,
data="PHASE=RUN",
headers={'Content-type': "application/x-www-form-urlencoded"}) # MJG
self.get_transfer_error(transfer_url, node.uri)
else:
resp = self.conn.session.post(url,
data=str(node),
allow_redirects=False,
headers={'Content-type': 'text/xml'}) # MJG
logger.debug("update response: {0}".format(resp.content))
return 0
def mkdir(self, uri):
"""
Create a ContainerNode on the service. Raise OSError(EEXIST) if the container exists.
:param uri: The URI of the ContainerNode to create on the service.
:type uri: str
"""
uri = self.fix_uri(uri)
node = Node(uri, node_type="vos:ContainerNode")
url = self.get_node_url(uri)
try:
if '?' in url: url = url[: url.rindex('?')] # MJG
self.conn.session.headers['Content-type'] = 'text/xml' # MJG
response = self.conn.session.put(url, data=str(node))
response.raise_for_status()
except HTTPError as http_error:
if http_error.response.status_code != 409:
raise http_error
else:
raise OSError(errno.EEXIST, 'ContainerNode {0} already exists'.format(uri))
def delete(self, uri):
"""Delete the node
:param uri: The (Container/Link/Data)Node to delete from the service.
"""
uri = self.fix_uri(uri)
logger.debug("delete {0}".format(uri))
with self.nodeCache.volatile(uri):
url = self.get_node_url(uri, method='GET')
response = self.conn.session.delete(url)
response.raise_for_status()
def get_info_list(self, uri):
"""Retrieve a list of tuples of (NodeName, Info dict)
:param uri: the Node to get info about.
"""
info_list = {}
uri = self.fix_uri(uri)
logger.debug(str(uri))
node = self.get_node(uri, limit=None)
logger.debug(str(node))
while node.type == "vos:LinkNode":
uri = node.target
try:
node = self.get_node(uri, limit=None)
except Exception as e:
logger.error(str(e))
break
for thisNode in node.node_list:
info_list[thisNode.name] = thisNode.get_info()
if node.type in ["vos:DataNode", "vos:LinkNode"]:
info_list[node.name] = node.get_info()
return info_list.items()
def listdir(self, uri, force=False):
"""
Walk through the directory structure a la os.walk.
Setting force=True will make sure no cached results are used.
Follows LinksNodes to their destination location.
:param force: don't use cached values, retrieve from service.
:param uri: The ContainerNode to get a listing of.
:rtype [str]
"""
# logger.debug("getting a listing of %s " % (uri))
names = []
logger.debug(str(uri))
node = self.get_node(uri, limit=None, force=force)
while node.type == "vos:LinkNode":
uri = node.target
# logger.debug(uri)
node = self.get_node(uri, limit=None, force=force)
for thisNode in node.node_list:
names.append(thisNode.name)
return names
def _node_type(self, uri):
"""
Recursively follow links until the base Node is found.
:param uri: the VOSpace uri to recursively get the type of.
:return: the type of Node
:rtype: str
"""
node = self.get_node(uri, limit=0)
while node.type == "vos:LinkNode":
uri = node.target
if uri[0:4] == "vos:":
node = self.get_node(uri, limit=0)
else:
return "vos:DataNode"
return node.type
def isdir(self, uri):
"""Check to see if the given uri is or is a link to containerNode.
:param uri: a VOSpace Node URI to test.
:rtype: bool
"""
try:
return self._node_type(uri) == "vos:ContainerNode"
except OSError as ex:
if ex.errno == errno.ENOENT:
return False
raise ex
def isfile(self, uri):
"""
Check if the given uri is or is a link to a DataNode
:param uri: the VOSpace Node URI to test.
:rtype: bool
"""
try:
return self._node_type(uri) == "vos:DataNode"
except OSError as ex:
if ex.errno == errno.ENOENT:
return False
raise ex
def access(self, uri, mode=os.O_RDONLY):
"""Test if the give VOSpace uri can be accessed in the way requested.
:param uri: a VOSpace location.
:param mode: os.O_RDONLY
"""
return isinstance(self.open(uri, mode=mode), VOFile)
def status(self, uri, code=None):
"""
Check to see if this given uri points at a containerNode.
This is done by checking the view=data header and seeing if you
get an error.
:param uri: the VOSpace (Container/Link/Data)Node to check access status on.
:param code: NOT SUPPORTED.
"""
if not code:
raise OSError(errno.ENOSYS, "Use of 'code' option values no longer supported.")
self.get_node(uri)
return True
def get_job_status(self, url):
""" Returns the status of a job
:param url: the URL of the UWS job to get status of.
:rtype: str
"""
return VOFile(url, self.conn, method="GET", follow_redirect=False).read()
| [
"logging.getLogger",
"string.join",
"requests.Session",
"logging.debug",
"re.compile",
"sys.getdefaultencoding",
"os.getuid",
"netrc.netrc",
"time.sleep",
"copy.deepcopy",
"mimetypes.guess_type",
"logging.error",
"re.search",
"sys.getfilesystemencoding",
"xml.etree.ElementTree.parse",
... | [((1847, 1871), 'logging.getLogger', 'logging.getLogger', (['"""vos"""'], {}), "('vos')\n", (1864, 1871), False, 'import logging\n'), ((2373, 2412), 'os.getenv', 'os.getenv', (['"""VOSPACE_ARCHIVE"""', '"""vospace"""'], {}), "('VOSPACE_ARCHIVE', 'vospace')\n", (2382, 2412), False, 'import os\n'), ((2636, 2680), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ([], {}), '()\n', (2678, 2680), False, 'import requests\n'), ((3616, 3629), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (3627, 3629), False, 'import hashlib\n'), ((43084, 43121), 'os.getenv', 'os.getenv', (['"""VOSPACE_WEBSERVICE"""', 'None'], {}), "('VOSPACE_WEBSERVICE', None)\n", (43093, 43121), False, 'import os\n'), ((47092, 47127), 'os.getenv', 'os.getenv', (['"""VOSPACE_CERTFILE"""', 'None'], {}), "('VOSPACE_CERTFILE', None)\n", (47101, 47127), False, 'import os\n'), ((52969, 52988), 're.compile', 're.compile', (['"""[*?[]"""'], {}), "('[*?[]')\n", (52979, 52988), False, 'import re\n'), ((1954, 1975), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (1973, 1975), False, 'import logging\n'), ((2681, 2710), 'logging.getLogger', 'logging.getLogger', (['"""requests"""'], {}), "('requests')\n", (2698, 2710), False, 'import logging\n'), ((3130, 3183), 'time.strptime', 'time.strptime', (['str_date[0:right]', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(str_date[0:right], '%Y-%m-%dT%H:%M:%S')\n", (3143, 3183), False, 'import time\n'), ((4174, 4318), 're.match', 're.match', (['"""(^(?P<scheme>[a-zA-Z]*):)?(//(?P<netloc>(?P<server>[^!~]*)[!~](?P<service>[^/]*)))?(?P<path>/?[^?]*)?(?P<args>\\\\?.*)?"""', 'url'], {}), "(\n '(^(?P<scheme>[a-zA-Z]*):)?(//(?P<netloc>(?P<server>[^!~]*)[!~](?P<service>[^/]*)))?(?P<path>/?[^?]*)?(?P<args>\\\\?.*)?'\n , url)\n", (4182, 4318), False, 'import re\n'), ((6159, 6177), 'requests.Session', 'requests.Session', ([], {}), '()\n', (6175, 6177), False, 'import requests\n'), ((9767, 9793), 'os.path.basename', 'os.path.basename', (['self.uri'], {}), '(self.uri)\n', (9783, 9793), False, 'import os\n'), ((11667, 11678), 'time.time', 'time.time', ([], {}), '()\n', (11676, 11678), False, 'import time\n'), ((18972, 19024), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['properties[0]', 'Node.PROPERTY'], {}), '(properties[0], Node.PROPERTY)\n', (18994, 19024), False, 'from xml.etree import ElementTree\n'), ((21112, 21139), 'xml.etree.ElementTree.Element', 'ElementTree.Element', (['"""node"""'], {}), "('node')\n", (21131, 21139), False, 'from xml.etree import ElementTree\n'), ((21468, 21513), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['node', 'Node.PROPERTIES'], {}), '(node, Node.PROPERTIES)\n', (21490, 21513), False, 'from xml.etree import ElementTree\n'), ((22298, 22340), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['node', 'Node.ACCEPTS'], {}), '(node, Node.ACCEPTS)\n', (22320, 22340), False, 'from xml.etree import ElementTree\n'), ((22480, 22523), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['node', 'Node.PROVIDES'], {}), '(node, Node.PROVIDES)\n', (22502, 22523), False, 'from xml.etree import ElementTree\n'), ((34959, 34993), 'requests.Request', 'requests.Request', (['self.method', 'url'], {}), '(self.method, url)\n', (34975, 34993), False, 'import requests\n'), ((49322, 49333), 'NodeCache.NodeCache', 'NodeCache', ([], {}), '()\n', (49331, 49333), False, 'from NodeCache import NodeCache\n'), ((50283, 50306), 'os.path.split', 'os.path.split', (['pathname'], {}), '(pathname)\n', (50296, 50306), False, 'import os\n'), ((52370, 52400), 'fnmatch.filter', 'fnmatch.filter', (['names', 'pattern'], {}), '(names, pattern)\n', (52384, 52400), False, 'import fnmatch\n'), ((58845, 58959), 're.match', 're.match', (['"""(?P<filename>[^\\\\[]*)(?P<ext>(\\\\[\\\\d*:?\\\\d*\\\\])?(\\\\[\\\\d*:?\\\\d*,?\\\\d*:?\\\\d*\\\\])?)"""', 'parts.path'], {}), "(\n '(?P<filename>[^\\\\[]*)(?P<ext>(\\\\[\\\\d*:?\\\\d*\\\\])?(\\\\[\\\\d*:?\\\\d*,?\\\\d*:?\\\\d*\\\\])?)'\n , parts.path)\n", (58853, 58959), False, 'import re\n'), ((73059, 73094), 'xml.etree.ElementTree.Element', 'ElementTree.Element', (['"""vos:transfer"""'], {}), "('vos:transfer')\n", (73078, 73094), False, 'from xml.etree import ElementTree\n'), ((74472, 74506), 'xml.etree.ElementTree.tostring', 'ElementTree.tostring', (['transfer_xml'], {}), '(transfer_xml)\n', (74492, 74506), False, 'from xml.etree import ElementTree\n'), ((75265, 75327), 'logging.debug', 'logging.debug', (["('Got back from transfer URL: %s' % transfer_url)"], {}), "('Got back from transfer URL: %s' % transfer_url)\n", (75278, 75327), False, 'import logging\n'), ((75907, 75958), 'logging.debug', 'logging.debug', (["('Transfer Document: %s' % xml_string)"], {}), "('Transfer Document: %s' % xml_string)\n", (75920, 75958), False, 'import logging\n'), ((75987, 76021), 'xml.etree.ElementTree.fromstring', 'ElementTree.fromstring', (['xml_string'], {}), '(xml_string)\n', (76009, 76021), False, 'from xml.etree import ElementTree\n'), ((80350, 80418), 're.search', 're.search', (['"""Unsupported link target:(?P<target> .*)$"""', 'error_message'], {}), "('Unsupported link target:(?P<target> .*)$', error_message)\n", (80359, 80418), False, 'import re\n'), ((85629, 85654), 'copy.deepcopy', 'copy.deepcopy', (['node.props'], {}), '(node.props)\n', (85642, 85654), False, 'import copy\n'), ((3245, 3261), 'time.localtime', 'time.localtime', ([], {}), '()\n', (3259, 3261), False, 'import time\n'), ((10708, 10803), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['properties', 'Node.PROPERTY'], {'attrib': "{'uri': uri, 'readOnly': 'false'}"}), "(properties, Node.PROPERTY, attrib={'uri': uri,\n 'readOnly': 'false'})\n", (10730, 10803), False, 'from xml.etree import ElementTree\n'), ((13871, 13882), 'os.getuid', 'os.getuid', ([], {}), '()\n', (13880, 13882), False, 'import os\n'), ((13933, 13944), 'os.getgid', 'os.getgid', ([], {}), '()\n', (13942, 13944), False, 'import os\n'), ((21581, 21635), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['properties_node', 'Node.PROPERTY'], {}), '(properties_node, Node.PROPERTY)\n', (21603, 21635), False, 'from xml.etree import ElementTree\n'), ((23110, 23150), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['node', 'Node.NODES'], {}), '(node, Node.NODES)\n', (23132, 23150), False, 'from xml.etree import ElementTree\n'), ((25254, 25275), 'string.join', 'string.join', (['perm', '""""""'], {}), "(perm, '')\n", (25265, 25275), False, 'import string\n'), ((30194, 30212), 'copy.deepcopy', 'deepcopy', (['url_list'], {}), '(url_list)\n', (30202, 30212), False, 'from copy import deepcopy\n'), ((47319, 47351), 'os.path.join', 'os.path.join', (['certpath', 'certfile'], {}), '(certpath, certfile)\n', (47331, 47351), False, 'import os\n'), ((47367, 47399), 'os.access', 'os.access', (['certfilepath', 'os.R_OK'], {}), '(certfilepath, os.R_OK)\n', (47376, 47399), False, 'import os\n'), ((53984, 54028), 're.search', 're.search', (['"""([^\\\\[\\\\]]*)(\\\\[.*\\\\])$"""', 'source'], {}), "('([^\\\\[\\\\]]*)(\\\\[.*\\\\])$', source)\n", (53993, 54028), False, 'import re\n'), ((59041, 59101), 're.match', 're.match', (['"""^[_\\\\-\\\\(\\\\)=\\\\+!,;:@&\\\\*\\\\$\\\\.\\\\w~]*$"""', 'filename'], {}), "('^[_\\\\-\\\\(\\\\)=\\\\+!,;:@&\\\\*\\\\$\\\\.\\\\w~]*$', filename)\n", (59049, 59101), False, 'import re\n'), ((73157, 73207), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['transfer_xml', '"""vos:target"""'], {}), "(transfer_xml, 'vos:target')\n", (73179, 73207), False, 'from xml.etree import ElementTree\n'), ((73227, 73280), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['transfer_xml', '"""vos:direction"""'], {}), "(transfer_xml, 'vos:direction')\n", (73249, 73280), False, 'from xml.etree import ElementTree\n'), ((74085, 74137), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['transfer_xml', '"""vos:protocol"""'], {}), "(transfer_xml, 'vos:protocol')\n", (74107, 74137), False, 'from xml.etree import ElementTree\n'), ((74258, 74292), 'xml.etree.ElementTree.tostring', 'ElementTree.tostring', (['transfer_xml'], {}), '(transfer_xml)\n', (74278, 74292), False, 'from xml.etree import ElementTree\n'), ((78029, 78076), 'logging.debug', 'logging.debug', (["('Job URL: ' + job_url + '/phase')"], {}), "('Job URL: ' + job_url + '/phase')\n", (78042, 78076), False, 'import logging\n'), ((1164, 1175), 'contextlib.ExitStack', 'ExitStack', ([], {}), '()\n', (1173, 1175), False, 'from contextlib import ExitStack, contextmanager\n'), ((3216, 3229), 'time.gmtime', 'time.gmtime', ([], {}), '()\n', (3227, 3229), False, 'import time\n'), ((11082, 11116), 'xml.etree.ElementTree.ElementTree', 'ElementTree.ElementTree', (['self.node'], {}), '(self.node)\n', (11105, 11116), False, 'from xml.etree import ElementTree\n'), ((21413, 21438), 'mimetypes.guess_type', 'mimetypes.guess_type', (['uri'], {}), '(uri)\n', (21433, 21438), False, 'import mimetypes\n'), ((22350, 22389), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['accepts', '"""view"""'], {}), "(accepts, 'view')\n", (22372, 22389), False, 'from xml.etree import ElementTree\n'), ((22532, 22572), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['provides', '"""view"""'], {}), "(provides, 'view')\n", (22554, 22572), False, 'from xml.etree import ElementTree\n'), ((22651, 22691), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['provides', '"""view"""'], {}), "(provides, 'view')\n", (22673, 22691), False, 'from xml.etree import ElementTree\n'), ((47260, 47282), 'os.getenv', 'os.getenv', (['"""HOME"""', '"""."""'], {}), "('HOME', '.')\n", (47269, 47282), False, 'import os\n'), ((52736, 52767), 'os.path.join', 'os.path.join', (['dirname', 'basename'], {}), '(dirname, basename)\n', (52748, 52767), False, 'import os\n'), ((59428, 59450), 'os.path.normpath', 'os.path.normpath', (['path'], {}), '(path)\n', (59444, 59450), False, 'import os\n'), ((70808, 70833), 'os.path.basename', 'os.path.basename', (['src_uri'], {}), '(src_uri)\n', (70824, 70833), False, 'import os\n'), ((71003, 71051), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['link_node.node', '"""target"""'], {}), "(link_node.node, 'target')\n", (71025, 71051), False, 'from xml.etree import ElementTree\n'), ((73338, 73391), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['transfer_xml', '"""vos:keepBytes"""'], {}), "(transfer_xml, 'vos:keepBytes')\n", (73360, 73391), False, 'from xml.etree import ElementTree\n'), ((76070, 76109), 'xml.etree.ElementTree.tostring', 'ElementTree.tostring', (['transfer_document'], {}), '(transfer_document)\n', (76090, 76109), False, 'from xml.etree import ElementTree\n'), ((79074, 79142), 'logging.debug', 'logging.debug', (["('Async transfer Phase for url %s: %s ' % (url, phase))"], {}), "('Async transfer Phase for url %s: %s ' % (url, phase))\n", (79087, 79142), False, 'import logging\n'), ((79267, 79311), 'logging.error', 'logging.error', (['"""Received keyboard interrupt"""'], {}), "('Received keyboard interrupt')\n", (79280, 79311), False, 'import logging\n'), ((5788, 5824), 'os.access', 'os.access', (['vospace_certfile', 'os.F_OK'], {}), '(vospace_certfile, os.F_OK)\n', (5797, 5824), False, 'import os\n'), ((22860, 22900), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['provides', '"""view"""'], {}), "(provides, 'view')\n", (22882, 22900), False, 'from xml.etree import ElementTree\n'), ((41194, 41228), 'html2text.html2text', 'html2text.html2text', (['msg', 'self.url'], {}), '(msg, self.url)\n', (41213, 41228), False, 'import html2text\n'), ((51355, 51382), 'os.path.join', 'os.path.join', (['dirname', 'name'], {}), '(dirname, name)\n', (51367, 51382), False, 'import os\n'), ((52077, 52104), 'sys.getfilesystemencoding', 'sys.getfilesystemencoding', ([], {}), '()\n', (52102, 52104), False, 'import sys\n'), ((52108, 52132), 'sys.getdefaultencoding', 'sys.getdefaultencoding', ([], {}), '()\n', (52130, 52132), False, 'import sys\n'), ((60842, 60865), 'io.StringIO', 'StringIO', (['vo_xml_string'], {}), '(vo_xml_string)\n', (60850, 60865), False, 'from io import StringIO\n'), ((60929, 60956), 'xml.etree.ElementTree.parse', 'ElementTree.parse', (['xml_file'], {}), '(xml_file)\n', (60946, 60956), False, 'from xml.etree import ElementTree\n'), ((73694, 73742), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['transfer_xml', '"""vos:view"""'], {}), "(transfer_xml, 'vos:view')\n", (73716, 73742), False, 'from xml.etree import ElementTree\n'), ((78860, 78904), 'sys.stdout.write', 'sys.stdout.write', (["'\\r \\n'"], {}), "('\\r \\n')\n", (78876, 78904), False, 'import sys\n'), ((78947, 78969), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (78957, 78969), False, 'import time\n'), ((6412, 6425), 'netrc.netrc', 'netrc.netrc', ([], {}), '()\n', (6423, 6425), False, 'import netrc\n'), ((36111, 36136), 'mimetypes.guess_type', 'mimetypes.guess_type', (['url'], {}), '(url)\n', (36131, 36136), False, 'import mimetypes\n'), ((52898, 52929), 'os.path.join', 'os.path.join', (['dirname', 'basename'], {}), '(dirname, basename)\n', (52910, 52929), False, 'import os\n'), ((55730, 55750), 'os.stat', 'os.stat', (['destination'], {}), '(destination)\n', (55737, 55750), False, 'import os\n'), ((73525, 73573), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['transfer_xml', '"""vos:view"""'], {}), "(transfer_xml, 'vos:view')\n", (73547, 73573), False, 'from xml.etree import ElementTree\n'), ((73909, 73954), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['vos_view', '"""vos:param"""'], {}), "(vos_view, 'vos:param')\n", (73931, 73954), False, 'from xml.etree import ElementTree\n'), ((78702, 78720), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (78718, 78720), False, 'import sys\n'), ((78826, 78839), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (78836, 78839), False, 'import time\n'), ((32746, 32780), 'html2text.html2text', 'html2text.html2text', (['msg', 'self.url'], {}), '(msg, self.url)\n', (32765, 32780), False, 'import html2text\n'), ((58611, 58631), 'urllib.parse.urlparse', 'urlparse', (['parts.args'], {}), '(parts.args)\n', (58619, 58631), False, 'from urllib.parse import parse_qs, urlparse\n'), ((62877, 62904), 'xml.etree.ElementTree.parse', 'ElementTree.parse', (['xml_file'], {}), '(xml_file)\n', (62894, 62904), False, 'from xml.etree import ElementTree\n')] |
import sys
from logs.logger import log
from utils import check_internet , get_public_ip
import bot
if __name__ == "__main__":
if check_internet() is True:
try:
log.info(f'Internet connection found : {get_public_ip()}')
bot.run()
except KeyboardInterrupt:
# quit
sys.exit()
else:
log.info('Please check your internet connection')
sys.exit()
| [
"logs.logger.log.info",
"bot.run",
"utils.get_public_ip",
"sys.exit",
"utils.check_internet"
] | [((134, 150), 'utils.check_internet', 'check_internet', ([], {}), '()\n', (148, 150), False, 'from utils import check_internet, get_public_ip\n'), ((256, 265), 'bot.run', 'bot.run', ([], {}), '()\n', (263, 265), False, 'import bot\n'), ((368, 417), 'logs.logger.log.info', 'log.info', (['"""Please check your internet connection"""'], {}), "('Please check your internet connection')\n", (376, 417), False, 'from logs.logger import log\n'), ((430, 440), 'sys.exit', 'sys.exit', ([], {}), '()\n', (438, 440), False, 'import sys\n'), ((331, 341), 'sys.exit', 'sys.exit', ([], {}), '()\n', (339, 341), False, 'import sys\n'), ((225, 240), 'utils.get_public_ip', 'get_public_ip', ([], {}), '()\n', (238, 240), False, 'from utils import check_internet, get_public_ip\n')] |
from robot.api.parsing import ModelTransformer, Token
try:
from robot.api.parsing import InlineIfHeader
except ImportError:
InlineIfHeader = None
from robotidy.disablers import skip_section_if_disabled
from robotidy.utils import ROBOT_VERSION
EOL = Token(Token.EOL)
CONTINUATION = Token(Token.CONTINUATION)
class SplitTooLongLine(ModelTransformer):
"""
Split too long lines.
If any line in keyword call exceeds given length limit (120 by default) it will be
split:
Keyword With Longer Name ${arg1} ${arg2} ${arg3} # let's assume that arg2 is at 120 char
To:
# let's assume that arg2 is at 120 char
Keyword With Longer Name
... ${arg1}
... ${arg2}
... ${arg3}
Allowed line length is configurable using global parameter ``--line-length``:
robotidy --line-length 140 src.robot
Or using dedicated for this transformer parameter ``line_length``:
robotidy --configure SplitTooLongLine:line_length:140 src.robot
Using ``split_on_every_arg`` flag (``True`` by default), you can force the formatter to fill arguments in one line
until character limit:
Keyword With Longer Name ${arg1}
... ${arg2} ${arg3}
Supports global formatting params: ``spacecount``, ``separator``, ``--startline`` and ``--endline``.
See https://robotidy.readthedocs.io/en/latest/transformers/SplitTooLongLine.html for more examples.
"""
def __init__(self, line_length: int = None, split_on_every_arg: bool = True):
super().__init__()
self._line_length = line_length
self.split_on_every_arg = split_on_every_arg
@property
def line_length(self):
return self.formatting_config.line_length if self._line_length is None else self._line_length
@skip_section_if_disabled
def visit_Section(self, node): # noqa
return self.generic_visit(node)
def visit_If(self, node): # noqa
if self.is_inline(node):
return node
if node.orelse:
self.generic_visit(node.orelse)
return self.generic_visit(node)
@staticmethod
def is_inline(node):
return ROBOT_VERSION.major > 4 and isinstance(node.header, InlineIfHeader)
def visit_KeywordCall(self, node): # noqa
if all(line[-1].end_col_offset < self.line_length for line in node.lines):
return node
if self.disablers.is_node_disabled(node, full_match=False):
return node
return self.split_keyword_call(node)
@staticmethod
def join_on_separator(tokens, separator):
for token in tokens:
yield token
yield separator
@staticmethod
def split_to_multiple_lines(tokens, indent, separator):
first = True
for token in tokens:
yield indent
if not first:
yield CONTINUATION
yield separator
yield token
yield EOL
first = False
def split_keyword_call(self, node):
separator = Token(Token.SEPARATOR, self.formatting_config.separator)
indent = node.tokens[0]
split_every_arg = self.split_on_every_arg
keyword = node.get_token(Token.KEYWORD)
line = [indent, *self.join_on_separator(node.get_tokens(Token.ASSIGN), separator), keyword]
if not self.col_fit_in_line(line):
split_every_arg
head = [
*self.split_to_multiple_lines(node.get_tokens(Token.ASSIGN), indent=indent, separator=separator),
indent,
CONTINUATION,
separator,
keyword,
]
line = []
else:
head = []
comments = []
# Comments with separators inside them are split into
# [COMMENT, SEPARATOR, COMMENT] tokens in the AST, so in order to preserve the
# original comment, we need a lookback on the separator tokens.
last_separator = None
rest = node.tokens[node.tokens.index(keyword) + 1 :]
for token in rest:
if token.type == Token.SEPARATOR:
last_separator = token
elif token.type in {Token.EOL, Token.CONTINUATION}:
continue
elif token.type == Token.COMMENT:
# AST splits comments with separators, e.g.
#
# "# Comment rest" -> ["# Comment", " ", "rest"].
#
# Notice the third value not starting with a hash - that's what this
# condition is about:
if not str(token).startswith("#"):
# -2 because -1 is the EOL
comments[-2].value += last_separator.value + token.value
else:
comments += [indent, token, EOL]
elif token.type == Token.ARGUMENT:
if token.value == "":
token.value = "${EMPTY}"
if self.split_on_every_arg or not self.col_fit_in_line(line + [separator, token]):
line.append(EOL)
head += line
line = [indent, CONTINUATION, separator, token]
else:
line += [separator, token]
# last line
line.append(EOL)
head += line
node.tokens = comments + head
return node
def col_fit_in_line(self, tokens):
return self.len_token_text(tokens) < self.line_length
@staticmethod
def len_token_text(tokens):
return sum(len(token.value) for token in tokens)
| [
"robot.api.parsing.Token"
] | [((259, 275), 'robot.api.parsing.Token', 'Token', (['Token.EOL'], {}), '(Token.EOL)\n', (264, 275), False, 'from robot.api.parsing import ModelTransformer, Token\n'), ((291, 316), 'robot.api.parsing.Token', 'Token', (['Token.CONTINUATION'], {}), '(Token.CONTINUATION)\n', (296, 316), False, 'from robot.api.parsing import ModelTransformer, Token\n'), ((3090, 3146), 'robot.api.parsing.Token', 'Token', (['Token.SEPARATOR', 'self.formatting_config.separator'], {}), '(Token.SEPARATOR, self.formatting_config.separator)\n', (3095, 3146), False, 'from robot.api.parsing import ModelTransformer, Token\n')] |
"""
Utility for creating a Python repl.
::
from prompt_toolkit.contrib.repl import embed
embed(globals(), locals(), vi_mode=False)
"""
# Warning: don't import `print_function` from __future__, otherwise we will
# also get the print_function inside `eval` on Python 2.7.
from __future__ import unicode_literals
from pygments import highlight
from pygments.formatters.terminal256 import Terminal256Formatter
from pygments.lexers import PythonTracebackLexer
from prompt_toolkit import AbortAction, Exit
from prompt_toolkit.contrib.python_input import PythonCommandLineInterface, PythonStyle, AutoCompletionStyle
from six import exec_
import sys
import os
import traceback
__all__ = ('PythonRepl', 'embed')
class PythonRepl(PythonCommandLineInterface):
def start_repl(self, startup_paths=None):
"""
Start the Read-Eval-Print Loop.
:param startup_paths: Array of paths to Python files.
"""
self._execute_startup(startup_paths)
# Run REPL loop until Exit.
try:
while True:
# Read
document = self.read_input(
on_abort=AbortAction.RETRY,
on_exit=AbortAction.RAISE_EXCEPTION)
line = document.text
if line and not line.isspace():
try:
# Eval and print.
self._execute(line)
except KeyboardInterrupt as e: # KeyboardInterrupt doesn't inherit from Exception.
self._handle_keyboard_interrupt(e)
except Exception as e:
self._handle_exception(e)
self.current_statement_index += 1
except Exit:
pass
def _execute_startup(self, startup_paths):
"""
Load and execute startup file.
"""
if startup_paths:
for path in startup_paths:
with open(path, 'r') as f:
code = compile(f.read(), path, 'exec')
exec_(code, self.get_globals(), self.get_locals())
def _execute(self, line):
"""
Evaluate the line and print the result.
"""
if line[0:1] == '!':
# Run as shell command
os.system(line[1:])
else:
# Try eval first
try:
result = eval(line, self.get_globals(), self.get_locals())
locals = self.get_locals()
locals['_'] = locals['_%i' % self.current_statement_index] = result
if result is not None:
try:
self.stdout.write('Out[%i]: %r\n' % (self.current_statement_index, result))
except UnicodeDecodeError:
# In Python 2: `__repr__` should return a bytestring,
# so to put it in a unicode context could raise an
# exception that the 'ascii' codec can't decode certain
# characters. Decode as utf-8 in that case.
self.stdout.write('Out[%i]: %s\n' % (self.current_statement_index, repr(result).decode('utf-8')))
# If not a valid `eval` expression, run using `exec` instead.
except SyntaxError:
exec_(line, self.get_globals(), self.get_locals())
self.stdout.write('\n')
self.stdout.flush()
def _handle_exception(self, e):
# Instead of just calling ``traceback.format_exc``, we take the
# traceback and skip the bottom calls of this framework.
t, v, tb = sys.exc_info()
tblist = traceback.extract_tb(tb)[3:]
l = traceback.format_list(tblist)
if l:
l.insert(0, "Traceback (most recent call last):\n")
l.extend(traceback.format_exception_only(t, v))
tb = ''.join(l)
# Format exception and write to output.
self.stdout.write(highlight(tb, PythonTracebackLexer(), Terminal256Formatter()))
self.stdout.write('%s\n\n' % e)
self.stdout.flush()
def _handle_keyboard_interrupt(self, e):
self.stdout.write('\rKeyboardInterrupt\n\n')
self.stdout.flush()
def embed(globals=None, locals=None, vi_mode=False, history_filename=None, no_colors=False,
autocompletion_style=AutoCompletionStyle.POPUP_MENU, startup_paths=None, always_multiline=False):
"""
Call this to embed Python shell at the current point in your program.
It's similar to `IPython.embed` and `bpython.embed`. ::
from prompt_toolkit.contrib.repl import embed
embed(globals(), locals(), vi_mode=False)
:param vi_mode: Boolean. Use Vi instead of Emacs key bindings.
"""
globals = globals or {}
locals = locals or globals
def get_globals():
return globals
def get_locals():
return locals
cli = PythonRepl(get_globals, get_locals, vi_mode=vi_mode, history_filename=history_filename,
style=(None if no_colors else PythonStyle),
autocompletion_style=autocompletion_style, always_multiline=always_multiline)
cli.start_repl(startup_paths=startup_paths)
| [
"traceback.format_exception_only",
"traceback.format_list",
"pygments.formatters.terminal256.Terminal256Formatter",
"pygments.lexers.PythonTracebackLexer",
"sys.exc_info",
"os.system",
"traceback.extract_tb"
] | [((3670, 3684), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3682, 3684), False, 'import sys\n'), ((3743, 3772), 'traceback.format_list', 'traceback.format_list', (['tblist'], {}), '(tblist)\n', (3764, 3772), False, 'import traceback\n'), ((2317, 2336), 'os.system', 'os.system', (['line[1:]'], {}), '(line[1:])\n', (2326, 2336), False, 'import os\n'), ((3702, 3726), 'traceback.extract_tb', 'traceback.extract_tb', (['tb'], {}), '(tb)\n', (3722, 3726), False, 'import traceback\n'), ((3868, 3905), 'traceback.format_exception_only', 'traceback.format_exception_only', (['t', 'v'], {}), '(t, v)\n', (3899, 3905), False, 'import traceback\n'), ((4020, 4042), 'pygments.lexers.PythonTracebackLexer', 'PythonTracebackLexer', ([], {}), '()\n', (4040, 4042), False, 'from pygments.lexers import PythonTracebackLexer\n'), ((4044, 4066), 'pygments.formatters.terminal256.Terminal256Formatter', 'Terminal256Formatter', ([], {}), '()\n', (4064, 4066), False, 'from pygments.formatters.terminal256 import Terminal256Formatter\n')] |
from base import FeatureBase
from difflib import SequenceMatcher
class FeatureEdits(FeatureBase):
name = 'edits'
desc = 'counts of word-based edit operations'
def run(self, trg, src):
matcher = SequenceMatcher(None, src.split(), trg.split())
ops = [tag for tag, _, _, _, _ in matcher.get_opcodes()]
return "EditIns= {} EditDel= {} EditSub= {}" \
.format(ops.count('insert'),
ops.count('delete'),
ops.count('replace'))
class FeatureChars(FeatureBase):
name = 'charedits'
desc = 'counts of character-based edit operations'
def run(self, trg, src):
matcher = SequenceMatcher(None, src, trg)
ops = [tag for tag, _, _, _, _ in matcher.get_opcodes()]
return "CharIns= {} CharDel= {} CharSub= {}" \
.format(ops.count('insert'),
ops.count('delete'),
ops.count('replace'))
| [
"difflib.SequenceMatcher"
] | [((670, 701), 'difflib.SequenceMatcher', 'SequenceMatcher', (['None', 'src', 'trg'], {}), '(None, src, trg)\n', (685, 701), False, 'from difflib import SequenceMatcher\n')] |
# MIT License
# Copyright 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ==============================================================================
import os
import numpy as np
import pytest
from astropy.io import fits
import morpheus_core.helpers.fits_helper as fh
import morpheus_core.tests.helpers as helper
@pytest.mark.unit
def test_open_file():
"""Tests morpheus_core.helpers.fits_helper.open_file"""
helper.setup()
sample_location = helper.make_sample_file()
expected_array = np.arange(100).reshape([10, 10])
hdul, actual_array = fh.open_file(sample_location)
np.testing.assert_array_equal(expected_array, actual_array)
helper.tear_down()
@pytest.mark.unit
def test_open_files():
"""Tests morpheus_core.helpers.fits_helper.open_file"""
helper.setup()
sample_location = helper.make_sample_file()
sample2_location = helper.make_sample_file2()
expected_array = np.arange(100).reshape([10, 10])
_, actual_arrays = fh.open_files([sample_location, sample2_location])
np.testing.assert_array_equal(expected_array, actual_arrays[0])
np.testing.assert_array_equal(expected_array, actual_arrays[1])
helper.tear_down()
@pytest.mark.unit
def test_dtype_to_bytes_per_value():
"""Tests morpheus_core.helpers.fits_helper.dtype_to_bytes_per_value"""
types = [np.uint8, np.int16, np.int32, np.float32, np.float64]
expected_bytes_per_value = [1, 2, 4, 4, 8]
actual_bytes_per_value = list(map(fh.dtype_to_bytes_per_value, types))
assert actual_bytes_per_value == expected_bytes_per_value
@pytest.mark.unit
def test_dtype_to_bytes_per_value_fails():
"""Tests morpheus_core.helpers.fits_helper.dtype_to_bytes_per_value"""
with pytest.raises(ValueError):
fh.dtype_to_bytes_per_value(np.bool)
@pytest.mark.unit
@pytest.mark.filterwarnings("ignore::UserWarning") # Ignore astropy warning
def test_create_file():
"""Tests morpheus_core.helpers.fits_helper.create_file"""
helper.setup()
shape = (100, 100)
tmp_out = os.path.join(helper.TMP_DIR, "test.fits")
fh.create_file(tmp_out, shape, np.float32)
actual = fits.getdata(tmp_out)
assert actual.shape == shape
helper.tear_down()
| [
"morpheus_core.helpers.fits_helper.open_file",
"pytest.mark.filterwarnings",
"numpy.arange",
"morpheus_core.tests.helpers.make_sample_file2",
"morpheus_core.helpers.fits_helper.dtype_to_bytes_per_value",
"os.path.join",
"morpheus_core.tests.helpers.make_sample_file",
"astropy.io.fits.getdata",
"pyte... | [((2846, 2895), 'pytest.mark.filterwarnings', 'pytest.mark.filterwarnings', (['"""ignore::UserWarning"""'], {}), "('ignore::UserWarning')\n", (2872, 2895), False, 'import pytest\n'), ((1447, 1461), 'morpheus_core.tests.helpers.setup', 'helper.setup', ([], {}), '()\n', (1459, 1461), True, 'import morpheus_core.tests.helpers as helper\n'), ((1484, 1509), 'morpheus_core.tests.helpers.make_sample_file', 'helper.make_sample_file', ([], {}), '()\n', (1507, 1509), True, 'import morpheus_core.tests.helpers as helper\n'), ((1591, 1620), 'morpheus_core.helpers.fits_helper.open_file', 'fh.open_file', (['sample_location'], {}), '(sample_location)\n', (1603, 1620), True, 'import morpheus_core.helpers.fits_helper as fh\n'), ((1626, 1685), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['expected_array', 'actual_array'], {}), '(expected_array, actual_array)\n', (1655, 1685), True, 'import numpy as np\n'), ((1691, 1709), 'morpheus_core.tests.helpers.tear_down', 'helper.tear_down', ([], {}), '()\n', (1707, 1709), True, 'import morpheus_core.tests.helpers as helper\n'), ((1817, 1831), 'morpheus_core.tests.helpers.setup', 'helper.setup', ([], {}), '()\n', (1829, 1831), True, 'import morpheus_core.tests.helpers as helper\n'), ((1854, 1879), 'morpheus_core.tests.helpers.make_sample_file', 'helper.make_sample_file', ([], {}), '()\n', (1877, 1879), True, 'import morpheus_core.tests.helpers as helper\n'), ((1903, 1929), 'morpheus_core.tests.helpers.make_sample_file2', 'helper.make_sample_file2', ([], {}), '()\n', (1927, 1929), True, 'import morpheus_core.tests.helpers as helper\n'), ((2009, 2059), 'morpheus_core.helpers.fits_helper.open_files', 'fh.open_files', (['[sample_location, sample2_location]'], {}), '([sample_location, sample2_location])\n', (2022, 2059), True, 'import morpheus_core.helpers.fits_helper as fh\n'), ((2065, 2128), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['expected_array', 'actual_arrays[0]'], {}), '(expected_array, actual_arrays[0])\n', (2094, 2128), True, 'import numpy as np\n'), ((2133, 2196), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['expected_array', 'actual_arrays[1]'], {}), '(expected_array, actual_arrays[1])\n', (2162, 2196), True, 'import numpy as np\n'), ((2202, 2220), 'morpheus_core.tests.helpers.tear_down', 'helper.tear_down', ([], {}), '()\n', (2218, 2220), True, 'import morpheus_core.tests.helpers as helper\n'), ((3012, 3026), 'morpheus_core.tests.helpers.setup', 'helper.setup', ([], {}), '()\n', (3024, 3026), True, 'import morpheus_core.tests.helpers as helper\n'), ((3066, 3107), 'os.path.join', 'os.path.join', (['helper.TMP_DIR', '"""test.fits"""'], {}), "(helper.TMP_DIR, 'test.fits')\n", (3078, 3107), False, 'import os\n'), ((3112, 3154), 'morpheus_core.helpers.fits_helper.create_file', 'fh.create_file', (['tmp_out', 'shape', 'np.float32'], {}), '(tmp_out, shape, np.float32)\n', (3126, 3154), True, 'import morpheus_core.helpers.fits_helper as fh\n'), ((3169, 3190), 'astropy.io.fits.getdata', 'fits.getdata', (['tmp_out'], {}), '(tmp_out)\n', (3181, 3190), False, 'from astropy.io import fits\n'), ((3229, 3247), 'morpheus_core.tests.helpers.tear_down', 'helper.tear_down', ([], {}), '()\n', (3245, 3247), True, 'import morpheus_core.tests.helpers as helper\n'), ((2753, 2778), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2766, 2778), False, 'import pytest\n'), ((2788, 2824), 'morpheus_core.helpers.fits_helper.dtype_to_bytes_per_value', 'fh.dtype_to_bytes_per_value', (['np.bool'], {}), '(np.bool)\n', (2815, 2824), True, 'import morpheus_core.helpers.fits_helper as fh\n'), ((1532, 1546), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (1541, 1546), True, 'import numpy as np\n'), ((1952, 1966), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (1961, 1966), True, 'import numpy as np\n')] |
#
# Copyright (c) 2017 NORDUnet A/S
# Copyright (c) 2018 SUNET
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# 3. Neither the name of the NORDUnet nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from flask import request
from eduid_common.authn import fido_tokens
from eduid_common.session import session
from eduid_webapp.actions.action_abc import ActionPlugin
from eduid_webapp.actions.app import ActionsApp
from eduid_webapp.actions.app import current_actions_app as current_app
from eduid_webapp.actions.helpers import ActionsMsg
__author__ = 'ft'
class Plugin(ActionPlugin):
PLUGIN_NAME = 'mfa'
PACKAGE_NAME = 'eduid_webapp.actions.actions.mfa'
steps = 1
@classmethod
def includeme(cls, app: ActionsApp):
if not app.conf.eidas_url:
app.logger.error(f'The configuration option eidas_url is required with plugin MFA')
if not app.conf.mfa_authn_idp:
app.logger.error(f'The configuration option mfa_authn_idp is required with plugin MFA')
app.conf.mfa_testing = False
def get_config_for_bundle(self, action):
eppn = action.eppn
user = current_app.central_userdb.get_user_by_eppn(eppn, raise_on_missing=False)
current_app.logger.debug('Loaded User {} from db'.format(user))
if not user:
raise self.ActionError(ActionsMsg.user_not_found)
config = fido_tokens.start_token_verification(user, self.PACKAGE_NAME, current_app.conf.fido2_rp_id)
# Explicit check for boolean True
if current_app.conf.mfa_testing is True:
current_app.logger.info('MFA test mode is enabled')
config['testing'] = True
else:
config['testing'] = False
# Add config for external mfa auth
config['eidas_url'] = current_app.conf.eidas_url
config['mfa_authn_idp'] = current_app.conf.mfa_authn_idp
return config
def perform_step(self, action):
current_app.logger.debug('Performing MFA step')
if current_app.conf.mfa_testing:
current_app.logger.debug('Test mode is on, faking authentication')
return {
'success': True,
'testing': True,
}
eppn = action.eppn
user = current_app.central_userdb.get_user_by_eppn(eppn, raise_on_missing=False)
current_app.logger.debug('Loaded User {} from db (in perform_action)'.format(user))
# Third party service MFA
if session.mfa_action.success is True: # Explicit check that success is the boolean True
issuer = session.mfa_action.issuer
authn_instant = session.mfa_action.authn_instant
authn_context = session.mfa_action.authn_context
current_app.logger.info('User {} logged in using external mfa service {}'.format(user, issuer))
action.result = {
'success': True,
'issuer': issuer,
'authn_instant': authn_instant,
'authn_context': authn_context,
}
current_app.actions_db.update_action(action)
# Clear mfa_action from session
del session.mfa_action
return action.result
req_json = request.get_json()
if not req_json:
current_app.logger.error('No data in request to authn {}'.format(user))
raise self.ActionError(ActionsMsg.no_data)
# Process POSTed data
if 'tokenResponse' in req_json:
# CTAP1/U2F
token_response = request.get_json().get('tokenResponse', '')
current_app.logger.debug('U2F token response: {}'.format(token_response))
challenge = session.get(self.PACKAGE_NAME + '.u2f.challenge')
current_app.logger.debug('Challenge: {!r}'.format(challenge))
result = fido_tokens.verify_u2f(user, challenge, token_response, current_app.conf.u2f_valid_facets)
if result is not None:
action.result = result
current_app.actions_db.update_action(action)
return action.result
elif 'authenticatorData' in req_json:
# CTAP2/Webauthn
try:
result = fido_tokens.verify_webauthn(user, req_json, self.PACKAGE_NAME, current_app.conf.fido2_rp_id)
except fido_tokens.VerificationProblem as exc:
raise self.ActionError(exc.msg)
action.result = result
current_app.actions_db.update_action(action)
return action.result
else:
current_app.logger.error('Neither U2F nor Webauthn data in request to authn {}'.format(user))
current_app.logger.debug('Request: {}'.format(req_json))
raise self.ActionError(ActionsMsg.no_response)
raise self.ActionError(ActionsMsg.unknown_token)
| [
"eduid_common.authn.fido_tokens.verify_u2f",
"eduid_webapp.actions.app.current_actions_app.central_userdb.get_user_by_eppn",
"eduid_common.authn.fido_tokens.verify_webauthn",
"eduid_webapp.actions.app.current_actions_app.logger.debug",
"eduid_webapp.actions.app.current_actions_app.logger.info",
"eduid_com... | [((2557, 2630), 'eduid_webapp.actions.app.current_actions_app.central_userdb.get_user_by_eppn', 'current_app.central_userdb.get_user_by_eppn', (['eppn'], {'raise_on_missing': '(False)'}), '(eppn, raise_on_missing=False)\n', (2600, 2630), True, 'from eduid_webapp.actions.app import current_actions_app as current_app\n'), ((2804, 2900), 'eduid_common.authn.fido_tokens.start_token_verification', 'fido_tokens.start_token_verification', (['user', 'self.PACKAGE_NAME', 'current_app.conf.fido2_rp_id'], {}), '(user, self.PACKAGE_NAME, current_app.\n conf.fido2_rp_id)\n', (2840, 2900), False, 'from eduid_common.authn import fido_tokens\n'), ((3375, 3422), 'eduid_webapp.actions.app.current_actions_app.logger.debug', 'current_app.logger.debug', (['"""Performing MFA step"""'], {}), "('Performing MFA step')\n", (3399, 3422), True, 'from eduid_webapp.actions.app import current_actions_app as current_app\n'), ((3687, 3760), 'eduid_webapp.actions.app.current_actions_app.central_userdb.get_user_by_eppn', 'current_app.central_userdb.get_user_by_eppn', (['eppn'], {'raise_on_missing': '(False)'}), '(eppn, raise_on_missing=False)\n', (3730, 3760), True, 'from eduid_webapp.actions.app import current_actions_app as current_app\n'), ((4659, 4677), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (4675, 4677), False, 'from flask import request\n'), ((3000, 3051), 'eduid_webapp.actions.app.current_actions_app.logger.info', 'current_app.logger.info', (['"""MFA test mode is enabled"""'], {}), "('MFA test mode is enabled')\n", (3023, 3051), True, 'from eduid_webapp.actions.app import current_actions_app as current_app\n'), ((3476, 3542), 'eduid_webapp.actions.app.current_actions_app.logger.debug', 'current_app.logger.debug', (['"""Test mode is on, faking authentication"""'], {}), "('Test mode is on, faking authentication')\n", (3500, 3542), True, 'from eduid_webapp.actions.app import current_actions_app as current_app\n'), ((4482, 4526), 'eduid_webapp.actions.app.current_actions_app.actions_db.update_action', 'current_app.actions_db.update_action', (['action'], {}), '(action)\n', (4518, 4526), True, 'from eduid_webapp.actions.app import current_actions_app as current_app\n'), ((5121, 5170), 'eduid_common.session.session.get', 'session.get', (["(self.PACKAGE_NAME + '.u2f.challenge')"], {}), "(self.PACKAGE_NAME + '.u2f.challenge')\n", (5132, 5170), False, 'from eduid_common.session import session\n'), ((5267, 5362), 'eduid_common.authn.fido_tokens.verify_u2f', 'fido_tokens.verify_u2f', (['user', 'challenge', 'token_response', 'current_app.conf.u2f_valid_facets'], {}), '(user, challenge, token_response, current_app.conf.\n u2f_valid_facets)\n', (5289, 5362), False, 'from eduid_common.authn import fido_tokens\n'), ((5449, 5493), 'eduid_webapp.actions.app.current_actions_app.actions_db.update_action', 'current_app.actions_db.update_action', (['action'], {}), '(action)\n', (5485, 5493), True, 'from eduid_webapp.actions.app import current_actions_app as current_app\n'), ((5897, 5941), 'eduid_webapp.actions.app.current_actions_app.actions_db.update_action', 'current_app.actions_db.update_action', (['action'], {}), '(action)\n', (5933, 5941), True, 'from eduid_webapp.actions.app import current_actions_app as current_app\n'), ((4966, 4984), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (4982, 4984), False, 'from flask import request\n'), ((5649, 5746), 'eduid_common.authn.fido_tokens.verify_webauthn', 'fido_tokens.verify_webauthn', (['user', 'req_json', 'self.PACKAGE_NAME', 'current_app.conf.fido2_rp_id'], {}), '(user, req_json, self.PACKAGE_NAME, current_app.\n conf.fido2_rp_id)\n', (5676, 5746), False, 'from eduid_common.authn import fido_tokens\n')] |
import networkx as nx
import markdown as md
def parse_concepts(filename):
"""Takes a markdown file with with a certain structure
and parses it to separate the concept and the relations
between the concepts.
Structure:
# [Title]
## [Concept]
[Some text]
[Even Latex math]
### [Any subtitle]
### Utiliza:
- [Related concept 1]
- [Related concept 2]
## [Concept]
...
The functions returns an array of dicts and a string with the Title.
Each dictionary correspond to a concept and has keys for:
id -> The position of the dict in the array. This is useful to build the network.
name -> The title of the concept. What appears as [Concept] in the structure.
uses -> Array of the indexes of the concepts in the "Utiliza:" section.
content -> All of the plain text beetween the Concept title and the "Utiliza:" section.
"""
# Open the markdown file
with open(filename, "r") as file:
text = file.read()
# Create list of concepts and save title
Concepts = []
index = 0
sections = text.split("\n## ")
Title = sections[0].strip("# ").strip("\n")
for con in sections[1:]:
concept = {}
lines = [i for i in con.split("\n") if i != ""]
concept["id"] = index
concept["name"] = lines[0]
try:
end_index = lines.index("### Utiliza:")
concept["uses"] = [line.strip("- ") for line in lines[end_index+1:]]
except:
concept["uses"] = []
end_index = len(lines)
concept["content"] = "\n".join(lines[1:end_index])
concept["content"] = "##"+concept["name"]+ "\n" + concept["content"]
Concepts.append(concept)
index += 1
# Update relative indexes
for con in Concepts:
uses_index = []
for i in Concepts:
if i["name"] in con["uses"]:
uses_index.append(i["id"])
con["uses"] = uses_index
return Concepts, Title
def build_concept_network(filename):
"""
Uses NetworkX to build a network of concepts with the data
parsed from the file passed as argument.
The network only saves conections, it does not keep the direction
of the conections.
Returns a tuple with:
- NetworkX graph object
- Number of nodes
- The dictionary with the concept data
- The title for the network
"""
concept_data, Title = parse_concepts(filename)
N = len(concept_data)
G = nx.Graph()
G.add_nodes_from(list(range(0, N)))
# Build edges
for concept in concept_data:
for use in concept["uses"]:
G.add_edge(concept["id"], use)
return (G, N, concept_data, Title)
def get_graph_data(filename):
""" Wrapper for the build_concept_network and parse_concepts functions.
Returns all nesessary data to draw the graph.
"""
G, N, concept_data, Title = build_concept_network(filename)
titles = [concept["name"] for concept in concept_data]
html_content = [md.markdown(concept["content"]) for concept in concept_data]
# Get conection info about the nodes
node_conections = [list(nx.neighbors(G, n)) for n in range(N)]
node_conectivity = [len(con) for con in node_conections]
return (G, N, concept_data, Title, titles, html_content, node_conections, node_conectivity) | [
"markdown.markdown",
"networkx.neighbors",
"networkx.Graph"
] | [((2534, 2544), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (2542, 2544), True, 'import networkx as nx\n'), ((3069, 3100), 'markdown.markdown', 'md.markdown', (["concept['content']"], {}), "(concept['content'])\n", (3080, 3100), True, 'import markdown as md\n'), ((3200, 3218), 'networkx.neighbors', 'nx.neighbors', (['G', 'n'], {}), '(G, n)\n', (3212, 3218), True, 'import networkx as nx\n')] |
# -*- coding: UTF-8 -*-
from flask import Blueprint, Flask, jsonify, request, make_response
import pdfkit
from utils.cm.utils import is_exist
from utils.cm.files import delete_dir
from utils.pdf.pdfkits import *
app = Blueprint('pdfapi', __name__)
# curl -v -H "Content-type: application/json" -X POST http://192.168.10.126:8084/pdf
# curl -XPOST -F file=@index.html -F file=@index.css -F file=@style.css http://192.168.10.126:8084/pdf > test.pdf
@app.route('/pdf', methods=[ 'GET', 'POST' ])
def pdf():
obj = {}
if request.method == 'POST':
if request.json is not None:
if is_json(request.json):
obj = request.json
else:
obj = get_forms(request)
# options = {}
# options['orientation'] = 'Portrait'
# obj['options'] = options
result = get_pdf(obj)
if result is not None and is_exist(result, 'msg') == False:
response = make_response()
filename = result['filename']
fullpath = result['path'] + '/' + filename
response.data = open(fullpath, 'rb').read()
response.headers['Content-Disposition'] = "attachment; filename=" + filename
response.mimetype = 'application/pdf'
delete_dir(result['path'])
return response
else:
if result is None:
result = { 'msg': 'Json Data is error !!!' }
return jsonify(result), 200
| [
"utils.cm.files.delete_dir",
"utils.cm.utils.is_exist",
"flask.make_response",
"flask.Blueprint",
"flask.jsonify"
] | [((219, 248), 'flask.Blueprint', 'Blueprint', (['"""pdfapi"""', '__name__'], {}), "('pdfapi', __name__)\n", (228, 248), False, 'from flask import Blueprint, Flask, jsonify, request, make_response\n'), ((915, 930), 'flask.make_response', 'make_response', ([], {}), '()\n', (928, 930), False, 'from flask import Blueprint, Flask, jsonify, request, make_response\n'), ((1212, 1238), 'utils.cm.files.delete_dir', 'delete_dir', (["result['path']"], {}), "(result['path'])\n", (1222, 1238), False, 'from utils.cm.files import delete_dir\n'), ((862, 885), 'utils.cm.utils.is_exist', 'is_exist', (['result', '"""msg"""'], {}), "(result, 'msg')\n", (870, 885), False, 'from utils.cm.utils import is_exist\n'), ((1372, 1387), 'flask.jsonify', 'jsonify', (['result'], {}), '(result)\n', (1379, 1387), False, 'from flask import Blueprint, Flask, jsonify, request, make_response\n')] |
import frappe
def after_migrate():
set_default_otp_template()
def set_default_otp_template():
if not frappe.db.get_value("System Settings", None, "email_otp_template"):
if frappe.db.exists("Email Template", "Default Email OTP Template"):
# should exists via fixtures
frappe.db.set_value("System Settings", None, "email_otp_template", "Default Email OTP Template")
if not frappe.db.get_value("System Settings", None, "sms_otp_template"):
if frappe.db.exists("SMS Template", "Default SMS OTP Template"):
# should exists via fixtures
frappe.db.set_value("System Settings", None, "sms_otp_template", "Default SMS OTP Template")
| [
"frappe.db.exists",
"frappe.db.get_value",
"frappe.db.set_value"
] | [((107, 173), 'frappe.db.get_value', 'frappe.db.get_value', (['"""System Settings"""', 'None', '"""email_otp_template"""'], {}), "('System Settings', None, 'email_otp_template')\n", (126, 173), False, 'import frappe\n'), ((182, 246), 'frappe.db.exists', 'frappe.db.exists', (['"""Email Template"""', '"""Default Email OTP Template"""'], {}), "('Email Template', 'Default Email OTP Template')\n", (198, 246), False, 'import frappe\n'), ((396, 460), 'frappe.db.get_value', 'frappe.db.get_value', (['"""System Settings"""', 'None', '"""sms_otp_template"""'], {}), "('System Settings', None, 'sms_otp_template')\n", (415, 460), False, 'import frappe\n'), ((469, 529), 'frappe.db.exists', 'frappe.db.exists', (['"""SMS Template"""', '"""Default SMS OTP Template"""'], {}), "('SMS Template', 'Default SMS OTP Template')\n", (485, 529), False, 'import frappe\n'), ((289, 389), 'frappe.db.set_value', 'frappe.db.set_value', (['"""System Settings"""', 'None', '"""email_otp_template"""', '"""Default Email OTP Template"""'], {}), "('System Settings', None, 'email_otp_template',\n 'Default Email OTP Template')\n", (308, 389), False, 'import frappe\n'), ((572, 668), 'frappe.db.set_value', 'frappe.db.set_value', (['"""System Settings"""', 'None', '"""sms_otp_template"""', '"""Default SMS OTP Template"""'], {}), "('System Settings', None, 'sms_otp_template',\n 'Default SMS OTP Template')\n", (591, 668), False, 'import frappe\n')] |