code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import random, os, string, subprocess, shutil, requests
from discord import Webhook, RequestsWebhookAdapter, Embed
from dotenv import dotenv_values
import argparse, colorama
from colorama import Fore
class Settings():
def __init__(self):
for k, v in dotenv_values(".settings").items():
setattr(self, k, v)
class App():
def __init__(self, config):
self.config = config
self.webhook = Webhook.from_url(self.config.WEBHOOK, adapter=RequestsWebhookAdapter())
self.output_path = self.config.RECORDS_PATH + '\\output\\'
self.processed_path = self.config.RECORDS_PATH + '\\processed\\'
def gen_pass(self, lenght):
chars = string.ascii_letters + string.digits + "!#$%&()*+<=>?@[]^_|~"
password = ''.join(random.choices(chars, k=lenght))
return password
def _check_7zip(self):
if not os.path.isfile(self.config._7ZIP):
exit(f'{Fore.RED}WRONG path to 7ZIP executable. Program Exited.')
def _generate_dirs(self):
if not os.path.isdir(self.processed_path):
os.mkdir(self.processed_path)
print(f'{Fore.YELLOW}Path for proccsed records not found. Created one for you.')
if not os.path.isdir(self.output_path):
os.mkdir(self.output_path)
print(f'{Fore.YELLOW}Output path not found. Created one for you.')
def process_files(self):
with open('passwords', 'a+', encoding="utf-8") as f:
for fn in os.listdir(self.config.RECORDS_PATH):
if fn.endswith(self.config.EXTENSION):
file_password, link_password = self.gen_pass(16), self.gen_pass(16)
command = [self.config._7ZIP, 'a -mx9 -mhe=on -y -r', f'-p"{file_password}"',
'--', f'"{self.output_path + fn[:-len(self.config.EXTENSION)]}.7z"', f'"{self.config.RECORDS_PATH}\\{fn}"']
subprocess.run(" ".join(command))
shutil.move(self.config.RECORDS_PATH + '\\' + fn, self.processed_path + fn)
f.write(f'F: {fn} | FP: {file_password} | LP: {link_password} | L: \n')
def send_2_discord(self):
data = None
with open('passwords', 'r', encoding="utf-8") as f:
data = [line.strip('\n').split(' | ') for line in f.readlines()]
with open('passwords', 'w+', encoding="utf-8") as f:
for line in data:
fn = line[0][2::].strip(' ')
file_password = line[1][3::].strip(' ')
link_password = line[2][3::].strip(' ')
link = line[3][2::].strip(' ')
if link == '':
print(f'{Fore.YELLOW}{fn} SKIPPED - No SHARE LINK specified.')
f.write(' | '.join(line) + '\n')
continue
if line[0][0] == '*':
f.write(' | '.join(line) + '\n')
continue
else:
f.write('*' + ' | '.join(line) + '\n')
msg = {
'title': f'{fn}',
'description': 'W razie wątpliwości pytać na <#809980920249319465>;',
'fields': [
{'name': 'Link do nagrania:', 'value': f'[Kliknij, aby się przenieść.]({link})', 'inline': False},
{'name': 'Hasło dostępu:', 'value': f'```{link_password}```', 'inline': True},
{'name': 'Hasło do pliku:', 'value': f'```{file_password}```', 'inline': True}
],
'footer': {
'text': f'~{self.config.NAME}', 'inline': True
}
}
self.webhook.send('Nowe nagranie zostało udostępnione.', username='Student.', embed=Embed().from_dict(msg),
avatar_url="https://cdn4.iconfinder.com/data/icons/science-131/64/265-512.png")
def run(self):
self._check_7zip()
self._generate_dirs()
self.process_files()
self.send_2_discord()
if __name__ == "__main__":
colorama.init(autoreset=True)
parser = argparse.ArgumentParser()
parser.add_argument("-v", "--verbose", help="Display errors in console.", action="store_true", default=False)
args = parser.parse_args()
CONFIG = Settings()
app = App(CONFIG)
try:
app.run()
except Exception as e:
if args.verbose:
print(e)
exit(f'{Fore.RED}An Error occured program will exit.')
| [
"os.listdir",
"discord.RequestsWebhookAdapter",
"argparse.ArgumentParser",
"shutil.move",
"os.path.isfile",
"random.choices",
"os.path.isdir",
"os.mkdir",
"dotenv.dotenv_values",
"discord.Embed",
"colorama.init"
] | [((4178, 4207), 'colorama.init', 'colorama.init', ([], {'autoreset': '(True)'}), '(autoreset=True)\n', (4191, 4207), False, 'import argparse, colorama\n'), ((4221, 4246), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4244, 4246), False, 'import argparse, colorama\n'), ((781, 812), 'random.choices', 'random.choices', (['chars'], {'k': 'lenght'}), '(chars, k=lenght)\n', (795, 812), False, 'import random, os, string, subprocess, shutil, requests\n'), ((881, 914), 'os.path.isfile', 'os.path.isfile', (['self.config._7ZIP'], {}), '(self.config._7ZIP)\n', (895, 914), False, 'import random, os, string, subprocess, shutil, requests\n'), ((1040, 1074), 'os.path.isdir', 'os.path.isdir', (['self.processed_path'], {}), '(self.processed_path)\n', (1053, 1074), False, 'import random, os, string, subprocess, shutil, requests\n'), ((1088, 1117), 'os.mkdir', 'os.mkdir', (['self.processed_path'], {}), '(self.processed_path)\n', (1096, 1117), False, 'import random, os, string, subprocess, shutil, requests\n'), ((1227, 1258), 'os.path.isdir', 'os.path.isdir', (['self.output_path'], {}), '(self.output_path)\n', (1240, 1258), False, 'import random, os, string, subprocess, shutil, requests\n'), ((1272, 1298), 'os.mkdir', 'os.mkdir', (['self.output_path'], {}), '(self.output_path)\n', (1280, 1298), False, 'import random, os, string, subprocess, shutil, requests\n'), ((1492, 1528), 'os.listdir', 'os.listdir', (['self.config.RECORDS_PATH'], {}), '(self.config.RECORDS_PATH)\n', (1502, 1528), False, 'import random, os, string, subprocess, shutil, requests\n'), ((264, 290), 'dotenv.dotenv_values', 'dotenv_values', (['""".settings"""'], {}), "('.settings')\n", (277, 290), False, 'from dotenv import dotenv_values\n'), ((477, 501), 'discord.RequestsWebhookAdapter', 'RequestsWebhookAdapter', ([], {}), '()\n', (499, 501), False, 'from discord import Webhook, RequestsWebhookAdapter, Embed\n'), ((1986, 2061), 'shutil.move', 'shutil.move', (["(self.config.RECORDS_PATH + '\\\\' + fn)", '(self.processed_path + fn)'], {}), "(self.config.RECORDS_PATH + '\\\\' + fn, self.processed_path + fn)\n", (1997, 2061), False, 'import random, os, string, subprocess, shutil, requests\n'), ((3874, 3881), 'discord.Embed', 'Embed', ([], {}), '()\n', (3879, 3881), False, 'from discord import Webhook, RequestsWebhookAdapter, Embed\n')] |
"""Bin Testing"""
# standard library
from importlib.machinery import SourceFileLoader
from importlib.util import module_from_spec, spec_from_loader
from typing import List
# third-party
from typer.testing import CliRunner
# dynamically load bin/tcex file
spec = spec_from_loader('app', SourceFileLoader('app', 'bin/tcex'))
tcex_cli = module_from_spec(spec)
spec.loader.exec_module(tcex_cli)
# get app from bin/tcex CLI script
app = tcex_cli.app
# get instance of typer CliRunner for test case
runner = CliRunner()
class TestTcexCliList:
"""Tcex CLI Testing."""
def setup_method(self):
"""Configure teardown before all tests."""
def teardown_method(self):
"""Configure teardown before all tests."""
@staticmethod
def _run_command(args: List[str]) -> str:
"""Test Case"""
result = runner.invoke(app, args)
return result
def test_tcex_list(self) -> None:
"""Test Case"""
result = self._run_command(['list'])
assert result.exit_code == 0, result.stdout
# spot check a few lines of outputs
assert 'Organization Templates' in result.stdout
assert 'Playbook Templates' in result.stdout
# TODO: [med] update this once template is done
# assert 'API Service Templates' in result.stdout
# assert 'Trigger Service Templates' in result.stdout
# assert 'Webhook Trigger Service Templates' in result.stdout
# TODO: [med] update this once template is done
# def test_tcex_list_external_api_service(self) -> None:
# """Test Case"""
# result = self._run_command(['list', '--type', 'api_service'])
# assert result.exit_code == 0, result.stdout
# # spot check a few lines of outputs
# assert 'basic' in result.stdout
# TODO: [med] update this once template is done
# def test_tcex_list_external_basic(self) -> None:
# """Test Case"""
# result = self._run_command(['list', '--type', 'external'])
# assert result.exit_code == 0, result.stdout
# # spot check a few lines of outputs
# assert 'basic' in result.stdout
def test_tcex_list_organization_basic(self) -> None:
"""Test Case"""
result = self._run_command(['list', '--type', 'organization'])
assert result.exit_code == 0, result.stdout
# spot check a few lines of outputs
assert 'basic' in result.stdout
def test_tcex_list_playbook_basic(self) -> None:
"""Test Case"""
result = self._run_command(['list', '--type', 'playbook'])
assert result.exit_code == 0, f'{result.stdout}'
# spot check a few lines of outputs
assert 'basic' in result.stdout
# TODO: [med] update this once template is done
# def test_tcex_list_trigger_basic(self) -> None:
# """Test Case"""
# result = self._run_command(['list', '--type', 'trigger_service'])
# assert result.exit_code == 0, result.stdout
# # spot check a few lines of outputs
# assert 'basic' in result.stdout
# TODO: [med] update this once template is done
# def test_tcex_list_webhook_trigger_basic(self) -> None:
# """Test Case"""
# result = self._run_command(['list', '--type', 'webhook_trigger_service'])
# assert result.exit_code == 0, result.stdout
# # spot check a few lines of outputs
# assert 'basic' in result.stdout
| [
"typer.testing.CliRunner",
"importlib.machinery.SourceFileLoader",
"importlib.util.module_from_spec"
] | [((336, 358), 'importlib.util.module_from_spec', 'module_from_spec', (['spec'], {}), '(spec)\n', (352, 358), False, 'from importlib.util import module_from_spec, spec_from_loader\n'), ((506, 517), 'typer.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (515, 517), False, 'from typer.testing import CliRunner\n'), ((288, 323), 'importlib.machinery.SourceFileLoader', 'SourceFileLoader', (['"""app"""', '"""bin/tcex"""'], {}), "('app', 'bin/tcex')\n", (304, 323), False, 'from importlib.machinery import SourceFileLoader\n')] |
# Copyright 2019 <NAME>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import pytest
import torch
from mt.mvae import utils
from mt.mvae.distributions.von_mises_fisher import VonMisesFisher
dims = [2, 3, 4]
scales = [1e9, 1e5, 1e1, 1e0, 1e-5, 1e-15]
def vmf_distribution(dim: int, scale: float) -> VonMisesFisher:
utils.set_seeds(42)
loc = torch.tensor([[1.] + [0.] * (dim - 1)], dtype=torch.get_default_dtype())
scale = torch.tensor([[scale]], dtype=torch.get_default_dtype())
vmf = VonMisesFisher(loc, scale)
return vmf
@pytest.mark.parametrize("dim", dims)
@pytest.mark.parametrize("scale", scales)
def test_vmf_sampling_nans(dim: int, scale: float) -> None:
vmf = vmf_distribution(dim, scale)
shape = torch.Size([10])
for i in range(100):
samples = vmf.sample(shape)
assert torch.isfinite(samples).all()
assert torch.norm(samples, p=2, dim=-1).allclose(torch.ones(samples.shape[:-1]))
log_prob = vmf.log_prob(samples)
assert torch.isfinite(log_prob).all()
# assert (log_prob <= 0).all() This does not hold, because it actually doesn't have to hold :) Math :)
assert (log_prob < 1e20).all()
assert (log_prob > -1e20).all()
# This does not depend on the mean (loc), just it's dimensionality.
@pytest.mark.parametrize("scale", scales)
def test_sampling_w3(scale: float) -> None:
vmf = vmf_distribution(3, scale)
w = vmf._sample_w3(shape=torch.Size([100]))
assert (w.abs() <= 1).all()
# This does not depend on the mean (loc), just it's dimensionality.
@pytest.mark.parametrize("dim", dims)
@pytest.mark.parametrize("scale", scales)
def test_sampling_w_rej(dim: int, scale: float) -> None:
vmf = vmf_distribution(dim, scale)
w = vmf._sample_w_rej(shape=torch.Size([100]))
assert (w.abs() <= 1).all()
| [
"torch.get_default_dtype",
"mt.mvae.distributions.von_mises_fisher.VonMisesFisher",
"torch.isfinite",
"pytest.mark.parametrize",
"torch.norm",
"mt.mvae.utils.set_seeds",
"torch.Size",
"torch.ones"
] | [((1130, 1166), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""dim"""', 'dims'], {}), "('dim', dims)\n", (1153, 1166), False, 'import pytest\n'), ((1168, 1208), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""scale"""', 'scales'], {}), "('scale', scales)\n", (1191, 1208), False, 'import pytest\n'), ((1880, 1920), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""scale"""', 'scales'], {}), "('scale', scales)\n", (1903, 1920), False, 'import pytest\n'), ((2153, 2189), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""dim"""', 'dims'], {}), "('dim', dims)\n", (2176, 2189), False, 'import pytest\n'), ((2191, 2231), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""scale"""', 'scales'], {}), "('scale', scales)\n", (2214, 2231), False, 'import pytest\n'), ((903, 922), 'mt.mvae.utils.set_seeds', 'utils.set_seeds', (['(42)'], {}), '(42)\n', (918, 922), False, 'from mt.mvae import utils\n'), ((1085, 1111), 'mt.mvae.distributions.von_mises_fisher.VonMisesFisher', 'VonMisesFisher', (['loc', 'scale'], {}), '(loc, scale)\n', (1099, 1111), False, 'from mt.mvae.distributions.von_mises_fisher import VonMisesFisher\n'), ((1320, 1336), 'torch.Size', 'torch.Size', (['[10]'], {}), '([10])\n', (1330, 1336), False, 'import torch\n'), ((979, 1004), 'torch.get_default_dtype', 'torch.get_default_dtype', ([], {}), '()\n', (1002, 1004), False, 'import torch\n'), ((1048, 1073), 'torch.get_default_dtype', 'torch.get_default_dtype', ([], {}), '()\n', (1071, 1073), False, 'import torch\n'), ((1500, 1530), 'torch.ones', 'torch.ones', (['samples.shape[:-1]'], {}), '(samples.shape[:-1])\n', (1510, 1530), False, 'import torch\n'), ((2031, 2048), 'torch.Size', 'torch.Size', (['[100]'], {}), '([100])\n', (2041, 2048), False, 'import torch\n'), ((2360, 2377), 'torch.Size', 'torch.Size', (['[100]'], {}), '([100])\n', (2370, 2377), False, 'import torch\n'), ((1413, 1436), 'torch.isfinite', 'torch.isfinite', (['samples'], {}), '(samples)\n', (1427, 1436), False, 'import torch\n'), ((1458, 1490), 'torch.norm', 'torch.norm', (['samples'], {'p': '(2)', 'dim': '(-1)'}), '(samples, p=2, dim=-1)\n', (1468, 1490), False, 'import torch\n'), ((1588, 1612), 'torch.isfinite', 'torch.isfinite', (['log_prob'], {}), '(log_prob)\n', (1602, 1612), False, 'import torch\n')] |
from abc import ABCMeta, abstractmethod
import torch
import torch.nn.functional as F
from addict import Dict
from mmtrack.models import TRACKERS
@TRACKERS.register_module()
class BaseTracker(metaclass=ABCMeta):
"""Base tracker model.
Args:
momentums (dict[str:float], optional): Momentums to update the buffers.
The `str` indicates the name of the buffer while the `float`
indicates the momentum. Default to None.
num_frames_retain (int, optional). If a track is disappeared more than
`num_frames_retain` frames, it will be deleted in the memo.
"""
def __init__(self, momentums=None, num_frames_retain=10):
super().__init__()
if momentums is not None:
assert isinstance(momentums, dict), 'momentums must be a dict'
self.momentums = momentums
self.num_frames_retain = num_frames_retain
self.reset()
def reset(self):
"""Reset the buffer of the tracker."""
self.num_tracks = 0
self.tracks = dict()
@property
def empty(self):
"""Whether the buffer is empty or not."""
return False if self.tracks else True
@property
def ids(self):
"""All ids in the tracker."""
return list(self.tracks.keys())
@property
def with_reid(self):
"""bool: whether the framework has a reid model"""
return hasattr(self, 'reid') and self.reid is not None
def update(self, **kwargs):
"""Update the tracker.
Args:
kwargs (dict[str: Tensor | int]): The `str` indicates the
name of the input variable. `ids` and `frame_ids` are
obligatory in the keys.
"""
memo_items = [k for k, v in kwargs.items() if v is not None]
rm_items = [k for k in kwargs.keys() if k not in memo_items]
for item in rm_items:
kwargs.pop(item)
if not hasattr(self, 'memo_items'):
self.memo_items = memo_items
else:
assert memo_items == self.memo_items
assert 'ids' in memo_items
num_objs = len(kwargs['ids'])
id_indice = memo_items.index('ids')
assert 'frame_ids' in memo_items
frame_id = int(kwargs['frame_ids'])
if isinstance(kwargs['frame_ids'], int):
kwargs['frame_ids'] = torch.tensor([kwargs['frame_ids']] *
num_objs)
# cur_frame_id = int(kwargs['frame_ids'][0])
for k, v in kwargs.items():
if len(v) != num_objs:
raise ValueError()
for obj in zip(*kwargs.values()):
id = int(obj[id_indice])
if id in self.tracks:
self.update_track(id, obj)
else:
self.init_track(id, obj)
self.pop_invalid_tracks(frame_id)
def pop_invalid_tracks(self, frame_id):
"""Pop out invalid tracks."""
invalid_ids = []
for k, v in self.tracks.items():
if frame_id - v['frame_ids'][-1] >= self.num_frames_retain:
invalid_ids.append(k)
for invalid_id in invalid_ids:
self.tracks.pop(invalid_id)
def update_track(self, id, obj):
"""Update a track."""
for k, v in zip(self.memo_items, obj):
v = v[None]
if self.momentums is not None and k in self.momentums:
m = self.momentums[k]
self.tracks[id][k] = (1 - m) * self.tracks[id][k] + m * v
else:
self.tracks[id][k].append(v)
def init_track(self, id, obj):
"""Initialize a track."""
self.tracks[id] = Dict()
for k, v in zip(self.memo_items, obj):
v = v[None]
if self.momentums is not None and k in self.momentums:
self.tracks[id][k] = v
else:
self.tracks[id][k] = [v]
@property
def memo(self):
"""Return all buffers in the tracker."""
outs = Dict()
for k in self.memo_items:
outs[k] = []
for id, objs in self.tracks.items():
for k, v in objs.items():
if k not in outs:
continue
if self.momentums is not None and k in self.momentums:
v = v
else:
v = v[-1]
outs[k].append(v)
for k, v in outs.items():
outs[k] = torch.cat(v, dim=0)
return outs
def get(self, item, ids=None, num_samples=None, behavior=None):
"""Get the buffer of a specific item.
Args:
item (str): The demanded item.
ids (list[int]): The demaned ids.
num_samples (int, optional): Number of samples to calculate the
results. Defaults to None.
behavior (str, optional): Behavior to calculate the results.
Options are `mean` | None. Defaults to None.
Returns:
Tensor: The results of the demanded item.
"""
if ids is None:
ids = self.ids
outs = []
for id in ids:
out = self.tracks[id][item]
if isinstance(out, list):
if num_samples is not None:
out = out[-num_samples:]
out = torch.cat(out, dim=0)
if behavior == 'mean':
out = out.mean(dim=0, keepdim=True)
elif behavior is None:
out = out[None]
else:
raise NotImplementedError()
else:
out = out[-1]
outs.append(out)
return torch.cat(outs, dim=0)
@abstractmethod
def track(self, *args, **kwargs):
"""Tracking forward function."""
pass
def crop_imgs(self, img, img_metas, bboxes, rescale=False):
"""Crop the images according to some bounding boxes. Typically for re-
identification sub-module.
Args:
img (Tensor): of shape (N, C, H, W) encoding input images.
Typically these should be mean centered and std scaled.
img_metas (list[dict]): list of image info dict where each dict
has: 'img_shape', 'scale_factor', 'flip', and may also contain
'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'.
bboxes (Tensor): of shape (N, 4) or (N, 5).
rescale (bool, optional): If True, the bounding boxes should be
rescaled to fit the scale of the image. Defaults to False.
Returns:
Tensor: Image tensor of shape (N, C, H, W).
"""
h, w, _ = img_metas[0]['img_shape']
img = img[:, :, :h, :w]
if rescale:
bboxes[:, :4] *= torch.tensor(img_metas[0]['scale_factor']).to(
bboxes.device)
bboxes[:, 0::2] = torch.clamp(bboxes[:, 0::2], min=0, max=w)
bboxes[:, 1::2] = torch.clamp(bboxes[:, 1::2], min=0, max=h)
crop_imgs = []
for bbox in bboxes:
x1, y1, x2, y2 = map(int, bbox)
if x2 == x1:
x2 = x1 + 1
if y2 == y1:
y2 = y1 + 1
crop_img = img[:, :, y1:y2, x1:x2]
if self.reid.get('img_scale', False):
crop_img = F.interpolate(
crop_img,
size=self.reid['img_scale'],
mode='bilinear',
align_corners=False)
crop_imgs.append(crop_img)
if len(crop_imgs) > 0:
return torch.cat(crop_imgs, dim=0)
else:
return img.new_zeros((0, ))
| [
"addict.Dict",
"mmtrack.models.TRACKERS.register_module",
"torch.tensor",
"torch.cat",
"torch.nn.functional.interpolate",
"torch.clamp"
] | [((150, 176), 'mmtrack.models.TRACKERS.register_module', 'TRACKERS.register_module', ([], {}), '()\n', (174, 176), False, 'from mmtrack.models import TRACKERS\n'), ((3686, 3692), 'addict.Dict', 'Dict', ([], {}), '()\n', (3690, 3692), False, 'from addict import Dict\n'), ((4028, 4034), 'addict.Dict', 'Dict', ([], {}), '()\n', (4032, 4034), False, 'from addict import Dict\n'), ((5749, 5771), 'torch.cat', 'torch.cat', (['outs'], {'dim': '(0)'}), '(outs, dim=0)\n', (5758, 5771), False, 'import torch\n'), ((6973, 7015), 'torch.clamp', 'torch.clamp', (['bboxes[:, 0::2]'], {'min': '(0)', 'max': 'w'}), '(bboxes[:, 0::2], min=0, max=w)\n', (6984, 7015), False, 'import torch\n'), ((7042, 7084), 'torch.clamp', 'torch.clamp', (['bboxes[:, 1::2]'], {'min': '(0)', 'max': 'h'}), '(bboxes[:, 1::2], min=0, max=h)\n', (7053, 7084), False, 'import torch\n'), ((2359, 2405), 'torch.tensor', 'torch.tensor', (["([kwargs['frame_ids']] * num_objs)"], {}), "([kwargs['frame_ids']] * num_objs)\n", (2371, 2405), False, 'import torch\n'), ((4481, 4500), 'torch.cat', 'torch.cat', (['v'], {'dim': '(0)'}), '(v, dim=0)\n', (4490, 4500), False, 'import torch\n'), ((7673, 7700), 'torch.cat', 'torch.cat', (['crop_imgs'], {'dim': '(0)'}), '(crop_imgs, dim=0)\n', (7682, 7700), False, 'import torch\n'), ((7411, 7505), 'torch.nn.functional.interpolate', 'F.interpolate', (['crop_img'], {'size': "self.reid['img_scale']", 'mode': '"""bilinear"""', 'align_corners': '(False)'}), "(crop_img, size=self.reid['img_scale'], mode='bilinear',\n align_corners=False)\n", (7424, 7505), True, 'import torch.nn.functional as F\n'), ((5363, 5384), 'torch.cat', 'torch.cat', (['out'], {'dim': '(0)'}), '(out, dim=0)\n', (5372, 5384), False, 'import torch\n'), ((6869, 6911), 'torch.tensor', 'torch.tensor', (["img_metas[0]['scale_factor']"], {}), "(img_metas[0]['scale_factor'])\n", (6881, 6911), False, 'import torch\n')] |
# Copyright 2019-2022 The University of Manchester, UK
# Copyright 2020-2022 Vlaams Instituut voor Biotechnologie (VIB), BE
# Copyright 2020-2022 Barcelona Supercomputing Center (BSC), ES
# Copyright 2020-2022 Center for Advanced Studies, Research and Development in Sardinia (CRS4), IT
# Copyright 2022 École Polytechnique Fédérale de Lausanne, CH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import pathlib
import shutil
import pytest
from rocrate.utils import get_norm_value
THIS_DIR = pathlib.Path(__file__).absolute().parent
TEST_DATA_NAME = 'test-data'
BASE_URL = 'https://w3id.org/ro/crate'
VERSION = '1.1'
LEGACY_VERSION = '1.0'
class Helpers:
PROFILE = f"{BASE_URL}/{VERSION}"
LEGACY_PROFILE = f"{BASE_URL}/{LEGACY_VERSION}"
WORKFLOW_PROFILE = "https://w3id.org/workflowhub/workflow-ro-crate/1.0"
METADATA_FILE_NAME = 'ro-crate-metadata.json'
LEGACY_METADATA_FILE_NAME = 'ro-crate-metadata.jsonld'
WORKFLOW_TYPES = {"File", "SoftwareSourceCode", "ComputationalWorkflow"}
WORKFLOW_DESC_TYPES = {"File", "SoftwareSourceCode", "HowTo"}
LEGACY_WORKFLOW_TYPES = {"File", "SoftwareSourceCode", "Workflow"}
PREVIEW_FILE_NAME = "ro-crate-preview.html"
@classmethod
def read_json_entities(cls, crate_base_path):
metadata_path = pathlib.Path(crate_base_path) / cls.METADATA_FILE_NAME
with open(metadata_path, "rt") as f:
json_data = json.load(f)
return {_["@id"]: _ for _ in json_data["@graph"]}
@classmethod
def check_crate(cls, json_entities, root_id="./", data_entity_ids=None):
assert root_id in json_entities
root = json_entities[root_id]
assert root["@type"] == "Dataset"
assert cls.METADATA_FILE_NAME in json_entities
metadata = json_entities[cls.METADATA_FILE_NAME]
assert metadata["@type"] == "CreativeWork"
assert cls.PROFILE in get_norm_value(metadata, "conformsTo")
assert metadata["about"] == {"@id": root_id}
if data_entity_ids:
data_entity_ids = set(data_entity_ids)
assert data_entity_ids.issubset(json_entities)
assert "hasPart" in root
assert data_entity_ids.issubset([_["@id"] for _ in root["hasPart"]])
@classmethod
def check_wf_crate(cls, json_entities, wf_file_name, root_id="./"):
cls.check_crate(json_entities, root_id=root_id)
assert json_entities[root_id]["mainEntity"]["@id"] == wf_file_name
assert wf_file_name in json_entities
wf_entity = json_entities[wf_file_name]
assert isinstance(wf_entity["@type"], list)
assert cls.WORKFLOW_TYPES.issubset(wf_entity["@type"])
assert "programmingLanguage" in wf_entity
metadata = json_entities[cls.METADATA_FILE_NAME]
assert cls.WORKFLOW_PROFILE in get_norm_value(metadata, "conformsTo")
@pytest.fixture
def helpers():
return Helpers
# pytest's default tmpdir returns a py.path object
@pytest.fixture
def tmpdir(tmpdir):
return pathlib.Path(tmpdir)
@pytest.fixture
def test_data_dir(tmpdir):
d = tmpdir / TEST_DATA_NAME
shutil.copytree(THIS_DIR / TEST_DATA_NAME, d)
return d
| [
"shutil.copytree",
"json.load",
"rocrate.utils.get_norm_value",
"pathlib.Path"
] | [((3514, 3534), 'pathlib.Path', 'pathlib.Path', (['tmpdir'], {}), '(tmpdir)\n', (3526, 3534), False, 'import pathlib\n'), ((3616, 3661), 'shutil.copytree', 'shutil.copytree', (['(THIS_DIR / TEST_DATA_NAME)', 'd'], {}), '(THIS_DIR / TEST_DATA_NAME, d)\n', (3631, 3661), False, 'import shutil\n'), ((1002, 1024), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (1014, 1024), False, 'import pathlib\n'), ((1797, 1826), 'pathlib.Path', 'pathlib.Path', (['crate_base_path'], {}), '(crate_base_path)\n', (1809, 1826), False, 'import pathlib\n'), ((1921, 1933), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1930, 1933), False, 'import json\n'), ((2400, 2438), 'rocrate.utils.get_norm_value', 'get_norm_value', (['metadata', '"""conformsTo"""'], {}), "(metadata, 'conformsTo')\n", (2414, 2438), False, 'from rocrate.utils import get_norm_value\n'), ((3323, 3361), 'rocrate.utils.get_norm_value', 'get_norm_value', (['metadata', '"""conformsTo"""'], {}), "(metadata, 'conformsTo')\n", (3337, 3361), False, 'from rocrate.utils import get_norm_value\n')] |
#!/usr/bin/python3
import binascii
import random
import cosim
class LoopbackTester(cosim.CosimBase):
"""Provides methods to test the loopback simulations."""
def test_list(self):
ifaces = self.cosim.list().wait().ifaces
assert len(ifaces) > 0
def test_open_close(self):
ifaces = self.cosim.list().wait().ifaces
openResp = self.cosim.open(ifaces[0]).wait()
assert openResp.iface is not None
ep = openResp.iface
ep.close().wait()
def test_i32(self, num_msgs):
ep = self.openEP(sendType=self.schema.I32, recvType=self.schema.I32)
for _ in range(num_msgs):
data = random.randint(0, 2**32)
print(f"Sending {data}")
ep.send(self.schema.I32.new_message(i=data))
result = self.readMsg(ep, self.schema.I32)
print(f"Got {result}")
assert (result.i == data)
def write_3bytes(self, ep):
r = random.randrange(0, 2**24)
data = r.to_bytes(3, 'big')
print(f'Sending: {binascii.hexlify(data)}')
ep.send(self.schema.UntypedData.new_message(data=data)).wait()
return data
def read_3bytes(self, ep):
dataMsg = self.readMsg(ep, self.schema.UntypedData)
data = dataMsg.data
print(binascii.hexlify(data))
return data
def test_3bytes(self, num_msgs=50):
ep = self.openEP()
print("Testing writes")
dataSent = list()
for _ in range(num_msgs):
dataSent.append(self.write_3bytes(ep))
print()
print("Testing reads")
dataRecv = list()
for _ in range(num_msgs):
dataRecv.append(self.read_3bytes(ep))
ep.close().wait()
assert dataSent == dataRecv
| [
"binascii.hexlify",
"random.randint",
"random.randrange"
] | [((872, 900), 'random.randrange', 'random.randrange', (['(0)', '(2 ** 24)'], {}), '(0, 2 ** 24)\n', (888, 900), False, 'import random\n'), ((616, 642), 'random.randint', 'random.randint', (['(0)', '(2 ** 32)'], {}), '(0, 2 ** 32)\n', (630, 642), False, 'import random\n'), ((1182, 1204), 'binascii.hexlify', 'binascii.hexlify', (['data'], {}), '(data)\n', (1198, 1204), False, 'import binascii\n'), ((953, 975), 'binascii.hexlify', 'binascii.hexlify', (['data'], {}), '(data)\n', (969, 975), False, 'import binascii\n')] |
from typing import List, Optional, Tuple
from collections import defaultdict
from mp_api.core.client import BaseRester, MPRestError
import warnings
class DielectricRester(BaseRester):
suffix = "dielectric"
def get_dielectric_from_material_id(self, material_id: str):
"""
Get dielectric data for a given Materials Project ID.
Arguments:
material_id (str): Materials project ID
Returns:
results (Dict): Dictionary containing dielectric data.
"""
result = self._make_request("{}/?all_fields=true".format(material_id))
if len(result.get("data", [])) > 0:
return result
else:
raise MPRestError("No document found")
def search_dielectric_docs(
self,
e_total: Optional[Tuple[float, float]] = None,
e_ionic: Optional[Tuple[float, float]] = None,
e_static: Optional[Tuple[float, float]] = None,
n: Optional[Tuple[float, float]] = None,
num_chunks: Optional[int] = None,
chunk_size: int = 100,
fields: Optional[List[str]] = None,
):
"""
Query equations of state docs using a variety of search criteria.
Arguments:
e_total (Tuple[float,float]): Minimum and maximum total dielectric constant to consider.
e_ionic (Tuple[float,float]): Minimum and maximum ionic dielectric constant to consider.
e_static (Tuple[float,float]): Minimum and maximum electronic dielectric constant to consider.
n (Tuple[float,float]): Minimum and maximum refractive index to consider.
num_chunks (int): Maximum number of chunks of data to yield. None will yield all possible.
chunk_size (int): Number of data entries per chunk.
fields (List[str]): List of fields in EOSDoc to return data for.
Default is material_id only.
Yields:
([dict]) List of dictionaries containing data for entries defined in 'fields'.
Defaults to Materials Project IDs only.
"""
query_params = defaultdict(dict) # type: dict
if chunk_size <= 0 or chunk_size > 100:
warnings.warn("Improper chunk size given. Setting value to 100.")
chunk_size = 100
if e_total:
query_params.update({"e_total_min": e_total[0], "e_total_max": e_total[1]})
if e_ionic:
query_params.update({"e_ionic_min": e_ionic[0], "e_ionic_max": e_ionic[1]})
if e_static:
query_params.update(
{"e_static_min": e_static[0], "e_static_max": e_static[1]}
)
if n:
query_params.update({"n_min": n[0], "n_max": n[1]})
if fields:
query_params.update({"fields": ",".join(fields)})
query_params = {
entry: query_params[entry]
for entry in query_params
if query_params[entry] is not None
}
query_params.update({"limit": chunk_size, "skip": 0})
count = 0
while True:
query_params["skip"] = count * chunk_size
results = self.query(query_params).get("data", [])
if not any(results) or (num_chunks is not None and count == num_chunks):
break
count += 1
yield results
| [
"warnings.warn",
"mp_api.core.client.MPRestError",
"collections.defaultdict"
] | [((2116, 2133), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (2127, 2133), False, 'from collections import defaultdict\n'), ((707, 739), 'mp_api.core.client.MPRestError', 'MPRestError', (['"""No document found"""'], {}), "('No document found')\n", (718, 739), False, 'from mp_api.core.client import BaseRester, MPRestError\n'), ((2209, 2274), 'warnings.warn', 'warnings.warn', (['"""Improper chunk size given. Setting value to 100."""'], {}), "('Improper chunk size given. Setting value to 100.')\n", (2222, 2274), False, 'import warnings\n')] |
from wtforms import TextField, IntegerField, PasswordField
from wtforms.ext.sqlalchemy.fields import (
QuerySelectField, QuerySelectMultipleField)
from wtforms.validators import Required
from pynuts.view import BaseForm
import database
from application import nuts
class EmployeeView(nuts.ModelView):
model = database.Employee
list_column = 'fullname'
table_columns = ('fullname', )
create_columns = ('login', 'password', 'name', 'firstname', 'company')
read_columns = ('person_id', 'name', 'firstname', 'fullname', 'company')
update_columns = ('name', 'firstname')
class Form(BaseForm):
person_id = IntegerField('ID')
login = TextField(u'Login', validators=[Required()])
password = PasswordField(u'Password', validators=[Required()])
name = TextField(u'Surname', validators=[Required()])
firstname = TextField(u'Firstname', validators=[Required()])
fullname = TextField(u'Employee name')
company = QuerySelectField(
u'Company', get_label='name',
query_factory=lambda: database.Company.query, allow_blank=True)
class CompanyView(nuts.ModelView):
model = database.Company
list_column = 'name'
create_columns = ('name', 'employees')
read_columns = ('name', 'employees')
class Form(BaseForm):
company_id = IntegerField('Company')
name = TextField('Company name')
employees = QuerySelectMultipleField(
u'Employees', get_label='fullname', query_factory=
lambda: database.Employee.query.filter_by(company_id=None))
| [
"wtforms.IntegerField",
"wtforms.validators.Required",
"wtforms.ext.sqlalchemy.fields.QuerySelectField",
"wtforms.TextField",
"database.Employee.query.filter_by"
] | [((645, 663), 'wtforms.IntegerField', 'IntegerField', (['"""ID"""'], {}), "('ID')\n", (657, 663), False, 'from wtforms import TextField, IntegerField, PasswordField\n'), ((946, 973), 'wtforms.TextField', 'TextField', (['u"""Employee name"""'], {}), "(u'Employee name')\n", (955, 973), False, 'from wtforms import TextField, IntegerField, PasswordField\n'), ((992, 1107), 'wtforms.ext.sqlalchemy.fields.QuerySelectField', 'QuerySelectField', (['u"""Company"""'], {'get_label': '"""name"""', 'query_factory': '(lambda : database.Company.query)', 'allow_blank': '(True)'}), "(u'Company', get_label='name', query_factory=lambda :\n database.Company.query, allow_blank=True)\n", (1008, 1107), False, 'from wtforms.ext.sqlalchemy.fields import QuerySelectField, QuerySelectMultipleField\n'), ((1352, 1375), 'wtforms.IntegerField', 'IntegerField', (['"""Company"""'], {}), "('Company')\n", (1364, 1375), False, 'from wtforms import TextField, IntegerField, PasswordField\n'), ((1391, 1416), 'wtforms.TextField', 'TextField', (['"""Company name"""'], {}), "('Company name')\n", (1400, 1416), False, 'from wtforms import TextField, IntegerField, PasswordField\n'), ((712, 722), 'wtforms.validators.Required', 'Required', ([], {}), '()\n', (720, 722), False, 'from wtforms.validators import Required\n'), ((783, 793), 'wtforms.validators.Required', 'Required', ([], {}), '()\n', (791, 793), False, 'from wtforms.validators import Required\n'), ((845, 855), 'wtforms.validators.Required', 'Required', ([], {}), '()\n', (853, 855), False, 'from wtforms.validators import Required\n'), ((914, 924), 'wtforms.validators.Required', 'Required', ([], {}), '()\n', (922, 924), False, 'from wtforms.validators import Required\n'), ((1546, 1596), 'database.Employee.query.filter_by', 'database.Employee.query.filter_by', ([], {'company_id': 'None'}), '(company_id=None)\n', (1579, 1596), False, 'import database\n')] |
"""AirTouch 4 component to control of AirTouch 4 Climate Devices."""
from __future__ import annotations
import logging
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
FAN_AUTO,
FAN_DIFFUSE,
FAN_FOCUS,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE
AT_TO_HA_STATE = {
"Heat": HVAC_MODE_HEAT,
"Cool": HVAC_MODE_COOL,
"AutoHeat": HVAC_MODE_AUTO, # airtouch reports either autoheat or autocool
"AutoCool": HVAC_MODE_AUTO,
"Auto": HVAC_MODE_AUTO,
"Dry": HVAC_MODE_DRY,
"Fan": HVAC_MODE_FAN_ONLY,
}
HA_STATE_TO_AT = {
HVAC_MODE_HEAT: "Heat",
HVAC_MODE_COOL: "Cool",
HVAC_MODE_AUTO: "Auto",
HVAC_MODE_DRY: "Dry",
HVAC_MODE_FAN_ONLY: "Fan",
HVAC_MODE_OFF: "Off",
}
AT_TO_HA_FAN_SPEED = {
"Quiet": FAN_DIFFUSE,
"Low": FAN_LOW,
"Medium": FAN_MEDIUM,
"High": FAN_HIGH,
"Powerful": FAN_FOCUS,
"Auto": FAN_AUTO,
"Turbo": "turbo",
}
AT_GROUP_MODES = [HVAC_MODE_OFF, HVAC_MODE_FAN_ONLY]
HA_FAN_SPEED_TO_AT = {value: key for key, value in AT_TO_HA_FAN_SPEED.items()}
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Airtouch 4."""
coordinator = hass.data[DOMAIN][config_entry.entry_id]
info = coordinator.data
entities: list[ClimateEntity] = [
AirtouchGroup(coordinator, group["group_number"], info)
for group in info["groups"]
]
entities.extend(
AirtouchAC(coordinator, ac["ac_number"], info) for ac in info["acs"]
)
_LOGGER.debug(" Found entities %s", entities)
async_add_entities(entities)
class AirtouchAC(CoordinatorEntity, ClimateEntity):
"""Representation of an AirTouch 4 ac."""
_attr_supported_features = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE
_attr_temperature_unit = TEMP_CELSIUS
def __init__(self, coordinator, ac_number, info):
"""Initialize the climate device."""
super().__init__(coordinator)
self._ac_number = ac_number
self._airtouch = coordinator.airtouch
self._info = info
self._unit = self._airtouch.GetAcs()[self._ac_number]
@callback
def _handle_coordinator_update(self):
self._unit = self._airtouch.GetAcs()[self._ac_number]
return super()._handle_coordinator_update()
@property
def device_info(self) -> DeviceInfo:
"""Return device info for this device."""
return DeviceInfo(
identifiers={(DOMAIN, self.unique_id)},
name=self.name,
manufacturer="Airtouch",
model="Airtouch 4",
)
@property
def unique_id(self):
"""Return unique ID for this device."""
return f"ac_{self._ac_number}"
@property
def current_temperature(self):
"""Return the current temperature."""
return self._unit.Temperature
@property
def name(self):
"""Return the name of the climate device."""
return f"AC {self._ac_number}"
@property
def fan_mode(self):
"""Return fan mode of the AC this group belongs to."""
return AT_TO_HA_FAN_SPEED[self._airtouch.acs[self._ac_number].AcFanSpeed]
@property
def fan_modes(self):
"""Return the list of available fan modes."""
airtouch_fan_speeds = self._airtouch.GetSupportedFanSpeedsForAc(self._ac_number)
return [AT_TO_HA_FAN_SPEED[speed] for speed in airtouch_fan_speeds]
@property
def hvac_mode(self):
"""Return hvac target hvac state."""
is_off = self._unit.PowerState == "Off"
if is_off:
return HVAC_MODE_OFF
return AT_TO_HA_STATE[self._airtouch.acs[self._ac_number].AcMode]
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
airtouch_modes = self._airtouch.GetSupportedCoolingModesForAc(self._ac_number)
modes = [AT_TO_HA_STATE[mode] for mode in airtouch_modes]
modes.append(HVAC_MODE_OFF)
return modes
async def async_set_hvac_mode(self, hvac_mode):
"""Set new operation mode."""
if hvac_mode not in HA_STATE_TO_AT:
raise ValueError(f"Unsupported HVAC mode: {hvac_mode}")
if hvac_mode == HVAC_MODE_OFF:
return await self.async_turn_off()
await self._airtouch.SetCoolingModeForAc(
self._ac_number, HA_STATE_TO_AT[hvac_mode]
)
# in case it isn't already, unless the HVAC mode was off, then the ac should be on
await self.async_turn_on()
self._unit = self._airtouch.GetAcs()[self._ac_number]
_LOGGER.debug("Setting operation mode of %s to %s", self._ac_number, hvac_mode)
self.async_write_ha_state()
async def async_set_fan_mode(self, fan_mode):
"""Set new fan mode."""
if fan_mode not in self.fan_modes:
raise ValueError(f"Unsupported fan mode: {fan_mode}")
_LOGGER.debug("Setting fan mode of %s to %s", self._ac_number, fan_mode)
await self._airtouch.SetFanSpeedForAc(
self._ac_number, HA_FAN_SPEED_TO_AT[fan_mode]
)
self._unit = self._airtouch.GetAcs()[self._ac_number]
self.async_write_ha_state()
async def async_turn_on(self):
"""Turn on."""
_LOGGER.debug("Turning %s on", self.unique_id)
# in case ac is not on. Airtouch turns itself off if no groups are turned on
# (even if groups turned back on)
await self._airtouch.TurnAcOn(self._ac_number)
async def async_turn_off(self):
"""Turn off."""
_LOGGER.debug("Turning %s off", self.unique_id)
await self._airtouch.TurnAcOff(self._ac_number)
self.async_write_ha_state()
class AirtouchGroup(CoordinatorEntity, ClimateEntity):
"""Representation of an AirTouch 4 group."""
_attr_supported_features = SUPPORT_TARGET_TEMPERATURE
_attr_temperature_unit = TEMP_CELSIUS
_attr_hvac_modes = AT_GROUP_MODES
def __init__(self, coordinator, group_number, info):
"""Initialize the climate device."""
super().__init__(coordinator)
self._group_number = group_number
self._airtouch = coordinator.airtouch
self._info = info
self._unit = self._airtouch.GetGroupByGroupNumber(self._group_number)
@callback
def _handle_coordinator_update(self):
self._unit = self._airtouch.GetGroupByGroupNumber(self._group_number)
return super()._handle_coordinator_update()
@property
def device_info(self) -> DeviceInfo:
"""Return device info for this device."""
return DeviceInfo(
identifiers={(DOMAIN, self.unique_id)},
manufacturer="Airtouch",
model="Airtouch 4",
name=self.name,
)
@property
def unique_id(self):
"""Return unique ID for this device."""
return self._group_number
@property
def min_temp(self):
"""Return Minimum Temperature for AC of this group."""
return self._airtouch.acs[self._unit.BelongsToAc].MinSetpoint
@property
def max_temp(self):
"""Return Max Temperature for AC of this group."""
return self._airtouch.acs[self._unit.BelongsToAc].MaxSetpoint
@property
def name(self):
"""Return the name of the climate device."""
return self._unit.GroupName
@property
def current_temperature(self):
"""Return the current temperature."""
return self._unit.Temperature
@property
def target_temperature(self):
"""Return the temperature we are trying to reach."""
return self._unit.TargetSetpoint
@property
def hvac_mode(self):
"""Return hvac target hvac state."""
# there are other power states that aren't 'on' but still count as on (eg. 'Turbo')
is_off = self._unit.PowerState == "Off"
if is_off:
return HVAC_MODE_OFF
return HVAC_MODE_FAN_ONLY
async def async_set_hvac_mode(self, hvac_mode):
"""Set new operation mode."""
if hvac_mode not in HA_STATE_TO_AT:
raise ValueError(f"Unsupported HVAC mode: {hvac_mode}")
if hvac_mode == HVAC_MODE_OFF:
return await self.async_turn_off()
if self.hvac_mode == HVAC_MODE_OFF:
await self.async_turn_on()
self._unit = self._airtouch.GetGroups()[self._group_number]
_LOGGER.debug(
"Setting operation mode of %s to %s", self._group_number, hvac_mode
)
self.async_write_ha_state()
@property
def fan_mode(self):
"""Return fan mode of the AC this group belongs to."""
return AT_TO_HA_FAN_SPEED[self._airtouch.acs[self._unit.BelongsToAc].AcFanSpeed]
@property
def fan_modes(self):
"""Return the list of available fan modes."""
airtouch_fan_speeds = self._airtouch.GetSupportedFanSpeedsByGroup(
self._group_number
)
return [AT_TO_HA_FAN_SPEED[speed] for speed in airtouch_fan_speeds]
async def async_set_temperature(self, **kwargs):
"""Set new target temperatures."""
temp = kwargs.get(ATTR_TEMPERATURE)
_LOGGER.debug("Setting temp of %s to %s", self._group_number, str(temp))
self._unit = await self._airtouch.SetGroupToTemperature(
self._group_number, int(temp)
)
self.async_write_ha_state()
async def async_set_fan_mode(self, fan_mode):
"""Set new fan mode."""
if fan_mode not in self.fan_modes:
raise ValueError(f"Unsupported fan mode: {fan_mode}")
_LOGGER.debug("Setting fan mode of %s to %s", self._group_number, fan_mode)
self._unit = await self._airtouch.SetFanSpeedByGroup(
self._group_number, HA_FAN_SPEED_TO_AT[fan_mode]
)
self.async_write_ha_state()
async def async_turn_on(self):
"""Turn on."""
_LOGGER.debug("Turning %s on", self.unique_id)
await self._airtouch.TurnGroupOn(self._group_number)
# in case ac is not on. Airtouch turns itself off if no groups are turned on
# (even if groups turned back on)
await self._airtouch.TurnAcOn(
self._airtouch.GetGroupByGroupNumber(self._group_number).BelongsToAc
)
# this might cause the ac object to be wrong, so force the shared data
# store to update
await self.coordinator.async_request_refresh()
self.async_write_ha_state()
async def async_turn_off(self):
"""Turn off."""
_LOGGER.debug("Turning %s off", self.unique_id)
await self._airtouch.TurnGroupOff(self._group_number)
# this will cause the ac object to be wrong
# (ac turns off automatically if no groups are running)
# so force the shared data store to update
await self.coordinator.async_request_refresh()
self.async_write_ha_state()
| [
"logging.getLogger",
"homeassistant.helpers.entity.DeviceInfo"
] | [((1753, 1780), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1770, 1780), False, 'import logging\n'), ((3197, 3312), 'homeassistant.helpers.entity.DeviceInfo', 'DeviceInfo', ([], {'identifiers': '{(DOMAIN, self.unique_id)}', 'name': 'self.name', 'manufacturer': '"""Airtouch"""', 'model': '"""Airtouch 4"""'}), "(identifiers={(DOMAIN, self.unique_id)}, name=self.name,\n manufacturer='Airtouch', model='Airtouch 4')\n", (3207, 3312), False, 'from homeassistant.helpers.entity import DeviceInfo\n'), ((7365, 7480), 'homeassistant.helpers.entity.DeviceInfo', 'DeviceInfo', ([], {'identifiers': '{(DOMAIN, self.unique_id)}', 'manufacturer': '"""Airtouch"""', 'model': '"""Airtouch 4"""', 'name': 'self.name'}), "(identifiers={(DOMAIN, self.unique_id)}, manufacturer='Airtouch',\n model='Airtouch 4', name=self.name)\n", (7375, 7480), False, 'from homeassistant.helpers.entity import DeviceInfo\n')] |
"""
Error classes, when needed for exceptions.
"""
from _ast import AST
from dataclasses import dataclass, field
from typing import Optional, Union
from src.compiler.Util import Util
@dataclass(frozen=True)
class ObjectAlreadyDefinedError(NameError):
"""
For our compilation scheme, objects can only be defined once and must be given a type hint.
If you try to type hint the same object 2 times, this should raise an error.
From this, you should also realize that object types are immutable and cannot be freed.
"""
object_name: str
def __str__(self) -> str:
# Error text
return f"You cannot redefine object '{self.object_name}' as it is already initialized."
@dataclass(frozen=True)
class ObjectNotDefinedError(NameError):
"""
As stated in ObjectAlreadyDefinedError, a object must have an explicit type hint the first time it is used.
This is referred to as "defining" or "initializing".
If a object is referenced without being defined, then the compiler should throw this error.
"""
object_name: str
def __str__(self) -> str:
# Error text
return f"Object '{self.object_name}' was not initialized yet."
@dataclass(frozen=True)
class UnsupportedFeatureException(SyntaxError):
"""
An error to raise whenever a Python feature is used which is not implemented in the compiler.
Examples (currently) include classes, for example. (Boo hoo, no OOP for you)
"""
feature: Union[AST, str]
def __str__(self) -> str:
# Local import to avoid import error
# Error text
return "Python feature '" + \
(Util.get_name(self.feature) if isinstance(self.feature, AST) else self.feature) + \
"' is not supported by the compiler."
@dataclass(frozen=True)
class InvalidArgumentError(ValueError):
"""
An error to throw when the user inputted an invalid argument.
Specifically, to be used for command line arguments. Not for
syntax arguments / code that is currently being compiled.
"""
argument: Optional[str] = field(default=None)
def __str__(self) -> str:
# Error text
return f"Argument '{self.argument}' is not valid." \
if self.argument is not None else \
"Internal argument handling error encountered."
@dataclass(frozen=True)
class SyntaxSubsetError(SyntaxError):
"""
An error to throw when the user's code does
not match the syntax subset specifications.
"""
warning: str = field()
def __str__(self) -> str:
# Error text
return f"Invalid usage of '{self.warning}' caused a syntax error (the code must comply to the syntax subset)."
@dataclass(frozen=True)
class InvalidTypeError(TypeError):
"""
An error to throw when the user gave an invalid type or
value of a non-corresponding type (in their syntax/code).
"""
given_type: Optional[str] = field(default=None)
expected_type: Optional[str] = field(default=None)
def __str__(self) -> str:
# Error text
return f"Could not use type '{self.given_type}' when type '{self.expected_type}' was expected." \
if self.given_type is not None else \
"Invalid types (or value of conflicting type) found in code."
| [
"src.compiler.Util.Util.get_name",
"dataclasses.dataclass",
"dataclasses.field"
] | [((187, 209), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (196, 209), False, 'from dataclasses import dataclass, field\n'), ((712, 734), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (721, 734), False, 'from dataclasses import dataclass, field\n'), ((1204, 1226), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (1213, 1226), False, 'from dataclasses import dataclass, field\n'), ((1793, 1815), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (1802, 1815), False, 'from dataclasses import dataclass, field\n'), ((2348, 2370), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (2357, 2370), False, 'from dataclasses import dataclass, field\n'), ((2723, 2745), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (2732, 2745), False, 'from dataclasses import dataclass, field\n'), ((2096, 2115), 'dataclasses.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (2101, 2115), False, 'from dataclasses import dataclass, field\n'), ((2541, 2548), 'dataclasses.field', 'field', ([], {}), '()\n', (2546, 2548), False, 'from dataclasses import dataclass, field\n'), ((2952, 2971), 'dataclasses.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (2957, 2971), False, 'from dataclasses import dataclass, field\n'), ((3007, 3026), 'dataclasses.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (3012, 3026), False, 'from dataclasses import dataclass, field\n'), ((1652, 1679), 'src.compiler.Util.Util.get_name', 'Util.get_name', (['self.feature'], {}), '(self.feature)\n', (1665, 1679), False, 'from src.compiler.Util import Util\n')] |
# -*- coding: utf-8 -*-
import argparse
from github.accounts.github_account import GithubAccount
from github.domain.github import GithubUser
from github.recorders.github.common import get_result
from zvdata.api import get_entities
from zvdata.domain import get_db_session
from zvdata.recorder import TimeSeriesDataRecorder
from zvdata.utils.time_utils import day_offset_today, now_pd_timestamp
class GithubUserInfoRecorder(TimeSeriesDataRecorder):
entity_provider = 'github'
entity_schema = GithubUser
provider = 'github'
data_schema = GithubUser
url = 'https://api.github.com/users/{}'
def __init__(self,
codes=None,
batch_size=50,
force_update=True,
sleeping_time=5,
default_size=2000,
one_shot=True,
fix_duplicate_way='ignore',
start_timestamp=None,
end_timestamp=None) -> None:
super().__init__('github_user', ['github'], None, codes, batch_size, force_update, sleeping_time,
default_size, one_shot, fix_duplicate_way, start_timestamp, end_timestamp)
self.seed = 0
def init_entities(self):
if self.entity_provider == self.provider and self.entity_schema == self.data_schema:
self.entity_session = self.session
else:
self.entity_session = get_db_session(provider=self.entity_provider, data_schema=self.entity_schema)
# init the entity list
self.entities = get_entities(session=self.entity_session,
entity_type=self.entity_type,
entity_ids=self.entity_ids,
codes=self.codes,
return_type='domain',
provider=self.entity_provider,
# 最近7天更新过的跳过
filters=[(GithubUser.updated_timestamp < day_offset_today(
-7)) | (GithubUser.updated_timestamp.is_(None))],
start_timestamp=self.start_timestamp,
end_timestamp=self.end_timestamp)
def record(self, entity_item, start, end, size, timestamps):
self.seed += 1
the_url = self.url.format(entity_item.code)
user_info = get_result(url=the_url, token=GithubAccount.get_token(seed=self.seed))
if user_info:
user_info['updated_timestamp'] = now_pd_timestamp()
return [user_info]
return []
def get_data_map(self):
return {
'site_admin': 'site_admin',
'name': 'name',
'avatar_url': 'avatar_url',
'gravatar_id': 'gravatar_id',
'company': 'company',
'blog': 'blog',
'location': 'location',
'email': 'email',
'hireable': 'hireable',
'bio': 'bio',
'public_repos': 'public_repos',
'public_gists': 'public_gists',
'followers': 'followers',
'following': 'following',
'updated_timestamp': 'updated_timestamp'
}
def generate_domain_id(self, security_item, original_data):
return security_item.id
def evaluate_start_end_size_timestamps(self, entity):
latest_record = self.get_latest_saved_record(entity=entity)
if latest_record:
latest_timestamp = latest_record.updated_timestamp
if latest_timestamp is not None:
if (now_pd_timestamp() - latest_timestamp).days < 7:
self.logger.info('entity_item:{},updated_timestamp:{},ignored'.format(entity.id, latest_timestamp))
return None, None, 0, None
return None, None, self.default_size, None
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--start', help='start_timestamp', default='2015-01-01')
parser.add_argument('--end', help='end_timestamp', default='2015-12-31')
args = parser.parse_args()
start = args.start
end = args.end
recorder = GithubUserInfoRecorder(start_timestamp=start, end_timestamp=end)
recorder.run()
| [
"argparse.ArgumentParser",
"github.accounts.github_account.GithubAccount.get_token",
"zvdata.utils.time_utils.now_pd_timestamp",
"github.domain.github.GithubUser.updated_timestamp.is_",
"zvdata.domain.get_db_session",
"zvdata.utils.time_utils.day_offset_today"
] | [((3955, 3980), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3978, 3980), False, 'import argparse\n'), ((1412, 1489), 'zvdata.domain.get_db_session', 'get_db_session', ([], {'provider': 'self.entity_provider', 'data_schema': 'self.entity_schema'}), '(provider=self.entity_provider, data_schema=self.entity_schema)\n', (1426, 1489), False, 'from zvdata.domain import get_db_session\n'), ((2585, 2603), 'zvdata.utils.time_utils.now_pd_timestamp', 'now_pd_timestamp', ([], {}), '()\n', (2601, 2603), False, 'from zvdata.utils.time_utils import day_offset_today, now_pd_timestamp\n'), ((2477, 2516), 'github.accounts.github_account.GithubAccount.get_token', 'GithubAccount.get_token', ([], {'seed': 'self.seed'}), '(seed=self.seed)\n', (2500, 2516), False, 'from github.accounts.github_account import GithubAccount\n'), ((2097, 2135), 'github.domain.github.GithubUser.updated_timestamp.is_', 'GithubUser.updated_timestamp.is_', (['None'], {}), '(None)\n', (2129, 2135), False, 'from github.domain.github import GithubUser\n'), ((2030, 2050), 'zvdata.utils.time_utils.day_offset_today', 'day_offset_today', (['(-7)'], {}), '(-7)\n', (2046, 2050), False, 'from zvdata.utils.time_utils import day_offset_today, now_pd_timestamp\n'), ((3645, 3663), 'zvdata.utils.time_utils.now_pd_timestamp', 'now_pd_timestamp', ([], {}), '()\n', (3661, 3663), False, 'from zvdata.utils.time_utils import day_offset_today, now_pd_timestamp\n')] |
# Importing needed libraries
import uuid
from decouple import config
from dotenv import load_dotenv
from flask import Flask, render_template, request, jsonify
from sklearn.externals import joblib
import traceback
import pandas as pd
import numpy as np
from flask_sqlalchemy import SQLAlchemy
# Saving DB var
DB = SQLAlchemy()
# Reads key value pair from .env
load_dotenv()
# Running function to create the app
def create_app():
'''
Used to initiate the app
'''
# saving flask(__name__) to var app
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = config('DATABASE_URL')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
DB.init_app(app)
@app.route('/predict', methods=['POST'])
def predict():
if lr:
try:
json_ = request.json
print(json_)
query = pd.get_dummies(pd.DataFrame(json_))
query = query.reindex(columns=model_columns, fill_value=0)
prediction = list(lr.predict(query))
return jsonify({'prediction': str(prediction)})
except:
return jsonify({'trace': traceback.format_exc()})
else:
print ('Train the model first')
return ('No model here to use')
if __name__ == '__main__':
try:
port = int(sys.argv[1]) # This is for a command-line input
except:
port = 12345 # If you don't provide any port the port will be set to 12345
lr = joblib.load("model.pkl") # Load "model.pkl"
print ('Model loaded')
model_columns = joblib.load("model_columns.pkl") # Load "model_columns.pkl"
print ('Model columns loaded')
app.run(port=port, debug=True)
| [
"traceback.format_exc",
"flask.Flask",
"sklearn.externals.joblib.load",
"decouple.config",
"dotenv.load_dotenv",
"pandas.DataFrame",
"flask_sqlalchemy.SQLAlchemy"
] | [((314, 326), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', ([], {}), '()\n', (324, 326), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((361, 374), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (372, 374), False, 'from dotenv import load_dotenv\n'), ((526, 541), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (531, 541), False, 'from flask import Flask, render_template, request, jsonify\n'), ((586, 608), 'decouple.config', 'config', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (592, 608), False, 'from decouple import config\n'), ((1528, 1552), 'sklearn.externals.joblib.load', 'joblib.load', (['"""model.pkl"""'], {}), "('model.pkl')\n", (1539, 1552), False, 'from sklearn.externals import joblib\n'), ((1627, 1659), 'sklearn.externals.joblib.load', 'joblib.load', (['"""model_columns.pkl"""'], {}), "('model_columns.pkl')\n", (1638, 1659), False, 'from sklearn.externals import joblib\n'), ((890, 909), 'pandas.DataFrame', 'pd.DataFrame', (['json_'], {}), '(json_)\n', (902, 909), True, 'import pandas as pd\n'), ((1168, 1190), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1188, 1190), False, 'import traceback\n')] |
import pandas as pd
import os
from sklearn.svm import SVC
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import classification_report, confusion_matrix
from sklearn.model_selection import train_test_split
import pickle
BASE_PATH = os.path.join(os.getcwd() , "dataset")
df = None
i = 0
for file_name in os.listdir(BASE_PATH):
file_path = os.path.join(BASE_PATH , file_name)
print(file_path)
data_frame = pd.read_csv(file_path , header=None)
data_frame.pop(178)
data_frame.pop(0)
dat = pd.DataFrame({'result': [i for k in range(data_frame.shape[1])]})
data_frame = data_frame.join(dat)
if not df is None :
df = df.append(data_frame , ignore_index=True)
else:
df = data_frame
i += 1
scaler = StandardScaler()
y = df.pop("result")
scalled_data = scaler.fit_transform(df)
X_train, X_test, y_train, y_test = train_test_split(scalled_data , y, test_size = 0.20)
svclassifier = SVC(kernel='linear')
svclassifier.fit(X_train, y_train)
y_pred = svclassifier.predict(X_test)
print(confusion_matrix(y_test,y_pred))
print(classification_report(y_test,y_pred))
pickle.dump(svclassifier , open("classifier.pkl" , 'wb'))
pickle.dump(scaler , open("scaler.pkl" , 'wb')) | [
"os.listdir",
"sklearn.metrics.confusion_matrix",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"sklearn.metrics.classification_report",
"os.path.join",
"os.getcwd",
"sklearn.preprocessing.StandardScaler",
"sklearn.svm.SVC"
] | [((326, 347), 'os.listdir', 'os.listdir', (['BASE_PATH'], {}), '(BASE_PATH)\n', (336, 347), False, 'import os\n'), ((772, 788), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (786, 788), False, 'from sklearn.preprocessing import StandardScaler\n'), ((886, 934), 'sklearn.model_selection.train_test_split', 'train_test_split', (['scalled_data', 'y'], {'test_size': '(0.2)'}), '(scalled_data, y, test_size=0.2)\n', (902, 934), False, 'from sklearn.model_selection import train_test_split\n'), ((955, 975), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""linear"""'}), "(kernel='linear')\n", (958, 975), False, 'from sklearn.svm import SVC\n'), ((268, 279), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (277, 279), False, 'import os\n'), ((365, 399), 'os.path.join', 'os.path.join', (['BASE_PATH', 'file_name'], {}), '(BASE_PATH, file_name)\n', (377, 399), False, 'import os\n'), ((439, 474), 'pandas.read_csv', 'pd.read_csv', (['file_path'], {'header': 'None'}), '(file_path, header=None)\n', (450, 474), True, 'import pandas as pd\n'), ((1055, 1087), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (1071, 1087), False, 'from sklearn.metrics import classification_report, confusion_matrix\n'), ((1094, 1131), 'sklearn.metrics.classification_report', 'classification_report', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (1115, 1131), False, 'from sklearn.metrics import classification_report, confusion_matrix\n')] |
from flask import Flask
# import pyodbc
app = Flask(__name__)
@app.route("/")
def hello():
# Some other example server values are
# server = 'localhost\sqlexpress' # for a named instance
# server = 'myserver,port' # to specify an alternate port
# server = 'tcp:mytest.centralus.cloudapp.azure.com'
# database = 'test'
# username = 'ndb'
# password = '<PASSWORD>###'
# cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};SERVER='+server+';DATABASE='+database+';UID='+username+';PWD='+ password)
# cursor = cnxn.cursor()
# cursor.execute('SELECT * FROM dbo.Users')
# s = ' '
# for row in cursor:
# s += ''.join(row)
# print(row)
s = '!! Azure'
return "hello"+s
| [
"flask.Flask"
] | [((47, 62), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (52, 62), False, 'from flask import Flask\n')] |
from sweeps.sweepFunctions import *
import numpy as np
def SMTBFSweep(SMTBFSweepInput,ourInput):
myRange = SMTBFSweepInput["range"] if dictHasKey(SMTBFSweepInput,"range") else False
myStickyRange=SMTBFSweepInput["sticky-range"] if dictHasKey(SMTBFSweepInput,"sticky-range") else False
sticky=False if type(myStickyRange) == bool else True
myFormula = SMTBFSweepInput["formula"] if dictHasKey(SMTBFSweepInput,"formula") else False
fixedToNode = SMTBFSweepInput["compute-SMTBF-from-NMTBF"] if dictHasKey(SMTBFSweepInput,"compute-SMTBF-from-NMTBF") else False
if type(myRange) == bool and type(myStickyRange) == bool:
#ok so we are going to have a min,max,step
minimum = float(SMTBFSweepInput["min"])
maximum = float(SMTBFSweepInput["max"])
step = float(SMTBFSweepInput["step"])
if myFormula:
#ok so we have a formula
formula_range = list(np.arange(minimum,maximum+step,step))
SMTBFRange = [eval(myFormula) for i in formula_range]
else:
SMTBFRange = list(np.arange(minimum,maximum+step,step))
elif myFormula:
if sticky:
formula_range = myStickyRange
else:
formula_range = myRange
SMTBFRange = [eval(myFormula) for i in formula_range]
else:
if sticky:
SMTBFRange = myStickyRange
else:
SMTBFRange = myRange
currentExperiments = len(ourInput.keys())
if sticky and not(len(SMTBFRange) == currentExperiments):
print("chose sticky-range for SMTBF but length of sticky-range does not match length of currentExperiments\n"+"SMTBFRange: "+str(len(SMTBFRange))
+" currentExperiments: "+ str(currentExperiments))
raise ValueError("chose sticky-range for SMTBF but length of sticky-range does not match length of currentExperiments\n"+"SMTBFRange: "+str(len(SMTBFRange))
+" currentExperiments: "+ str(currentExperiments))
#if there were no sweeps before. Notice compute-SMTBF-from-NMTBF doesn't make sense if this is the case since there will be no nodes
if currentExperiments == 0:
count = 1
for i in SMTBFRange:
ourInput["experiment_{count}".format(count=count)]={"SMTBF":i}
count+=1
#there were sweeps before
else:
tmpInput = ourInput.copy()
count = 1
# update the current experiments first, if sticky ONLY update the current experiments
for i in ourInput.keys():
data = ourInput[i]
if fixedToNode == True:
nodes = data["nodes"] if dictHasKey(data,"nodes") else False
if type(nodes) == bool:
print("compute-SMTBF-from-NMTBF set but no nodes set")
sys.exit(1)
if sticky:
data["SMTBF"] = SMTBFRange[count-1]/nodes
else:
data["SMTBF"] = SMTBFRange[0]/nodes
else:
data["SMTBF"] = SMTBFRange[0]
ourInput[i] = data
count+=1
if not sticky:
for i in SMTBFRange:
if not i == SMTBFRange[0]: #skip the first, we already did it
for j in tmpInput.keys():
data = tmpInput[j].copy()
if fixedToNode == True:
nodes = data["nodes"] if dictHasKey(data,"nodes") else False
if type(nodes) == bool:
print("compute-SMTBF-from-NMTBF set but no nodes set")
sys.exit(1)
data["SMTBF"] = i/nodes
else:
data["SMTBF"] = i
ourInput["experiment_{count}".format(count=count)] = data
count+=1
| [
"numpy.arange"
] | [((926, 966), 'numpy.arange', 'np.arange', (['minimum', '(maximum + step)', 'step'], {}), '(minimum, maximum + step, step)\n', (935, 966), True, 'import numpy as np\n'), ((1074, 1114), 'numpy.arange', 'np.arange', (['minimum', '(maximum + step)', 'step'], {}), '(minimum, maximum + step, step)\n', (1083, 1114), True, 'import numpy as np\n')] |
from __future__ import unicode_literals
import pytest # noqa
import sys
pytestmark = pytest.mark.skipif(sys.version_info[0] < 3,
reason="pyecore is not Python 2 compatible") # noqa
pyecore = pytest.importorskip("pyecore") # noqa
import textx
from textx.metamodel import metamodel_from_str
@pytest.fixture(scope="module")
def enable_pyecore_support():
textx.enable_pyecore_support()
yield
textx.enable_pyecore_support(enable=False)
pytestmark = pytest.mark.usefixtures("enable_pyecore_support")
def test_issue_34_resolving():
"""An issue in resolving a list of objects of different types.
In the grammar below, attribute `values` in `FormulaExp` collect STRING
instances which leads textX to deduce the type of this attribute to be list
of STRING objects. Thus, object reference resolving does not consider the
`values` list.
In the new version textX will deduce type OBJECT if different types are
used in multiple assignments.
"""
grammar = """
Expression:
atts+=Attribute[','] 'formula' form=Formula
;
Formula:
value=FormulaExp
;
FormulaExp:
values=Cond
| ( values='(' values=Formula values=')' )
;
Cond:
attribute = [Attribute|attr_id] '<' values=STRING
;
attr_id:
/attr_[a-f0-9]+/
;
Attribute:
name = attr_id
;
"""
meta_model = metamodel_from_str(grammar)
model = meta_model.model_from_str(
"attr_123, attr_444 formula attr_123 < 'aa'")
assert type(model.form.value.values[0].attribute).__name__ == 'Attribute'
assert model.form.value.values[0].attribute.name == 'attr_123'
| [
"textx.enable_pyecore_support",
"pytest.importorskip",
"pytest.mark.usefixtures",
"pytest.mark.skipif",
"pytest.fixture",
"textx.metamodel.metamodel_from_str"
] | [((86, 179), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(sys.version_info[0] < 3)'], {'reason': '"""pyecore is not Python 2 compatible"""'}), "(sys.version_info[0] < 3, reason=\n 'pyecore is not Python 2 compatible')\n", (104, 179), False, 'import pytest\n'), ((225, 255), 'pytest.importorskip', 'pytest.importorskip', (['"""pyecore"""'], {}), "('pyecore')\n", (244, 255), False, 'import pytest\n'), ((327, 357), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (341, 357), False, 'import pytest\n'), ((495, 544), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""enable_pyecore_support"""'], {}), "('enable_pyecore_support')\n", (518, 544), False, 'import pytest\n'), ((392, 422), 'textx.enable_pyecore_support', 'textx.enable_pyecore_support', ([], {}), '()\n', (420, 422), False, 'import textx\n'), ((437, 479), 'textx.enable_pyecore_support', 'textx.enable_pyecore_support', ([], {'enable': '(False)'}), '(enable=False)\n', (465, 479), False, 'import textx\n'), ((1442, 1469), 'textx.metamodel.metamodel_from_str', 'metamodel_from_str', (['grammar'], {}), '(grammar)\n', (1460, 1469), False, 'from textx.metamodel import metamodel_from_str\n')] |
# -*- coding: utf-8 -*-
import boto3
from botocore.exceptions import ClientError
import attr
from attrs_mate import AttrsClass
import weakref
@attr.s
class S3Object(AttrsClass):
aws_profile = attr.ib()
bucket = attr.ib() # type: str
key = attr.ib() # type: str
_s3_client_cache = weakref.WeakValueDictionary()
def s3_client(self):
if self.aws_profile not in self._s3_client_cache:
client = boto3.session.Session(profile_name=self.aws_profile).client("s3")
self._s3_client_cache[self.aws_profile] = client
return self._s3_client_cache[self.aws_profile]
def exists_on_s3(self):
try:
self.s3_client().head_object(Bucket=self.bucket, Key=self.key)
return True
except ClientError:
return False
except Exception as e:
raise e
| [
"boto3.session.Session",
"weakref.WeakValueDictionary",
"attr.ib"
] | [((199, 208), 'attr.ib', 'attr.ib', ([], {}), '()\n', (206, 208), False, 'import attr\n'), ((222, 231), 'attr.ib', 'attr.ib', ([], {}), '()\n', (229, 231), False, 'import attr\n'), ((254, 263), 'attr.ib', 'attr.ib', ([], {}), '()\n', (261, 263), False, 'import attr\n'), ((300, 329), 'weakref.WeakValueDictionary', 'weakref.WeakValueDictionary', ([], {}), '()\n', (327, 329), False, 'import weakref\n'), ((435, 487), 'boto3.session.Session', 'boto3.session.Session', ([], {'profile_name': 'self.aws_profile'}), '(profile_name=self.aws_profile)\n', (456, 487), False, 'import boto3\n')] |
from android.runnable import run_on_ui_thread
from jnius import autoclass, cast
mActivity = autoclass("org.kivy.android.PythonActivity").mActivity
Toast = autoclass("android.widget.Toast")
CharSequence = autoclass("java.lang.CharSequence")
String = autoclass("java.lang.String")
@run_on_ui_thread
def android_toast(text, long=False):
duration = Toast.LENGTH_SHORT if long else Toast.LENGTH_LONG
text = cast(CharSequence, String(text))
Toast.makeText(
mActivity.getApplicationContext(), text, duration
).show()
| [
"jnius.autoclass"
] | [((157, 190), 'jnius.autoclass', 'autoclass', (['"""android.widget.Toast"""'], {}), "('android.widget.Toast')\n", (166, 190), False, 'from jnius import autoclass, cast\n'), ((206, 241), 'jnius.autoclass', 'autoclass', (['"""java.lang.CharSequence"""'], {}), "('java.lang.CharSequence')\n", (215, 241), False, 'from jnius import autoclass, cast\n'), ((251, 280), 'jnius.autoclass', 'autoclass', (['"""java.lang.String"""'], {}), "('java.lang.String')\n", (260, 280), False, 'from jnius import autoclass, cast\n'), ((94, 138), 'jnius.autoclass', 'autoclass', (['"""org.kivy.android.PythonActivity"""'], {}), "('org.kivy.android.PythonActivity')\n", (103, 138), False, 'from jnius import autoclass, cast\n')] |
#!/usr/bin/env python3
# coding: utf-8
# author: <NAME> <<EMAIL>>
import pandas as pd
import numpy as np
from itertools import islice
from sklearn.utils.validation import check_X_y
class KTopScoringPair:
""" K-Top Scoring Pair classifier.
This classifier evaluate maximum-likelihood estimation for P(X_i < X_i | Y),
with X_i < X_i a pair of feature given a class Y. K determine how many pair
evaluate. Then pairs are ranked by the primary score:
s = P(X_i < X_j | 0) - P(X_i < X_j | 1)
Further detail can be found in [1].
For its nature this is a binary classifier but it will not provide any error
if found multiple label, score will be computed between first and second
class. Multi-class classification can be achieved by using sklearn multiclass
wrappers.
Parameters
----------
pairs : list of tuples with index of the feature to be considered.
The feature will be tested in order, that is (X_i, X_j) will be counted
for X_i < X_j.
K : int. How many pairs will contribute to classification.
It should be chosen as an odd int, to allow majority voting.
t : int, optional (default=0)
It can be used to adjust accuracy/specificity. By default it means that
score_{ij} = (P(X_i < X_j | 0) - P(X_i < X_j | 1)) > t
Attributes
----------
estimated_proba_ : 2d array of float
Estimated probability computed from training.
rules_ : array of shape = [n_classes]
Human-readable K rules found with training.
----------
.. [1] AFSARI, Bahman, et al. Rank discriminants for predicting phenotypes
from RNA expression. The Annals of Applied Statistics, 2014, 8.3: 1469-1491.
"""
def __init__(self, pairs, K, t=0):
self.pairs = pairs
self.K = K
self.t = t
self._estimator_type = "classifier"
# Defined after fitting
self.estimated_proba_ = None
self.rules_ = []
self.classes_ = []
def fit(self, X, y):
""" Train the classifier.
Parameters
----------
X : {array-like, sparse matrix} of shape = [n_samples, n_features]
y : array-like of shape = [n_samples]
Returns
-------
self : returns an instance of self.
"""
X, y = check_X_y(X, y) # Assert input is safe
# Determine class and convert y accordingly
self.classes_, y = np.unique(y, return_inverse=True)
# Main statistics gathering
Frequencies, Sizes = self._fit(X, y, self.pairs)
# Compute likelihood probabilities
self._compute_proba(Frequencies, Sizes)
return self
def _fit(self, X, y, pairs):
# Instantiate dictionary as counter for (X_i, X_j) = |{X_i < X_i | Y}|
pairs_dict = {l: dict() for l in range(len(self.classes_))}
class_size = {l: 0 for l in range(len(self.classes_))}
# Class loop
for label in pairs_dict.keys():
X_given_y = X[y==label]
class_size[label] = X_given_y.shape[0]
class_pairs = pairs_dict[label]
# Pairs loop
for X_i, X_j in pairs:
class_pairs[(X_i, X_j)] = sum(X_given_y[:, X_i] < X_given_y[:, X_j])
# Return statistics in a convenient format
Freq, Size = pd.DataFrame(pairs_dict), pd.Series(class_size)
return Freq, Size
def predict(self, X, K=None, t=None):
""" Predict the provided X.
Parameters
----------
X : {array-like, sparse matrix} of shape = [n_samples, n_features]
K : int, optional.
Once estimated_proba_ were computed there is no problem to vary K and
use K-rules different from __init__ time K
t : int, optional
Same as above
Returns
-------
y : array-like of shape = [n_samples]
"""
P = self.predict_proba(X, K)
# Translate most probable class with its label
return self.classes_[np.argmax(P, axis=1)]
def predict_proba(self, X, K=None, t=None):
""" Predict the provided X with probabilities.
Parameters
----------
X : {array-like, sparse matrix} of shape = [n_samples, n_features]
K : int, optional.
Once estimated_proba_ were computed there is no problem to vary K and
use K-rules different from __init__ time K
t : int, optional
Same as above
Returns
-------
P : array of shape = [n_samples, n_class]
"""
def vote_for(x):
return [r['i<j'] if x[r['i']] < x[r['j']] else r['j<i'] for r in self.rules_]
# Rebuild rules if K or t is different from __init__ time K
if (K is not None and K != self.K) or (t is not None and t != self.t):
P = self.estimated_proba_
self.K = self.K if K is None else K
self.t = self.t if t is None else t
self.rules_ = self._scorer(P, self.K, self.t, P.columns[0], P.columns[1])
# Gather votes for every sample -> V = (n, k)
V = [vote_for(x) for _, x in X.iterrows()]
# Group votes by class -> P (n, c)
P = [{k: v for k, v in zip(*np.unique(v, return_counts=True))} for v in V]
P = pd.DataFrame(P, columns=self.classes_).fillna(0)
# Normalized it to emit probabilities
return (P / self.K).as_matrix()
def partial_fit(self, X_batch, y_batch, classes):
""" Train the classifier by chunk. This can take advantage of multiprocessing
computation. Choose chunk dimension it is your discretion.
Parameters
----------
X_batch : iterator for an {array-like, sparse matrix} of
shape = [n_samples, n_features]
y_batch : iterator for an array-like of shape = [n_samples]
classes : array-like, shape (n_classes,)
Can't be inferred, then classes need to be passed as argument.
Returns
-------
self : returns an instance of self.
"""
from multiprocessing import Pool
self.classes_ = np.array(sorted(classes))
pool = Pool()
# Process mapping (zip is needed because map can handle only one argument)
Freq_chunks, Size_chunks = zip(*pool.map(self._chunk_worker, zip(X_batch, y_batch)))
# Concatenate resultant dictionary for missing pairs, then group-by and
# aggregate totals with a sum
F, S = pd.concat(Freq_chunks), pd.concat(Size_chunks)
Frequencies, Sizes = F.groupby(level=[0, 1]).sum(), S.groupby(S.index).sum()
# Now statistics are complete, compute as normal fit
self._compute_proba(Frequencies, Sizes)
return self
def _chunk_worker(self, X_y):
# Assert input safely
X, y = X_y
X, y = check_X_y(X, y)
# Translate y as label
d = {k:v for k,v in zip(self.classes_, range(len(self.classes_)))}
y = np.array(list(map(lambda x: d[x], y)))
# Count frequencies-sizes for this chunk
return self._fit(X, y, self.pairs)
def _scorer(self, P, K, t, minus, plus):
# Not efficient friendly, but produce human-readable rules.
def formatted_rule(i, j, isPositive, score):
if isPositive:
return {"i":i, "j":j, "i<j":minus, "j<i":plus, "score":score}
else:
return {"i":i, "j":j, "i<j":plus, "j<i":minus, "score":score}
# +/- scores depends on what is subtracted from what
scores = P[minus] - P[plus]
ranked = scores.abs().sort_values(ascending=False)
# Compute rules, ranked by descending score
rules = [formatted_rule(k[0], k[1], scores[k] > t, scores[k])
for k in islice(iter(ranked.keys()), K)]
return rules
def _compute_proba(self, Frequencies, Sizes):
# Mainly for debugging purposes
self.frequencies_, self.sizes_ = Frequencies, Sizes
# Compute P = |{X_i < X_i | Y}| / |Y|
P = Frequencies / Sizes
self.estimated_proba_ = P
# Build rules
self.rules_ = self._scorer(P, self.K, self.t, P.columns[0], P.columns[1])
def get_params(self, deep=True):
return {"pairs": self.pairs, "K": self.K, "t": self.t}
def set_params(self, **parameters):
for parameter, value in parameters.items():
self.setattr(parameter, value)
return self
def human_rules(self, features):
""" Allow rules convertion for human reading.
Parameters
----------
features : list of feature name corresponding to i,j indexing
Returns
-------
hr_rules : list of rules, with label converted according to input
"""
import copy as cp
hr_rules = cp.deepcopy(self.rules_)
for d in hr_rules:
d['i'], d['j'] = features[d['i']], features[d['j']]
d['i<j'], d['j<i'] = self.classes_[d['i<j']], self.classes_[d['j<i']]
return hr_rules
| [
"pandas.Series",
"numpy.unique",
"numpy.argmax",
"multiprocessing.Pool",
"copy.deepcopy",
"pandas.DataFrame",
"pandas.concat",
"sklearn.utils.validation.check_X_y"
] | [((2475, 2490), 'sklearn.utils.validation.check_X_y', 'check_X_y', (['X', 'y'], {}), '(X, y)\n', (2484, 2490), False, 'from sklearn.utils.validation import check_X_y\n'), ((2594, 2627), 'numpy.unique', 'np.unique', (['y'], {'return_inverse': '(True)'}), '(y, return_inverse=True)\n', (2603, 2627), True, 'import numpy as np\n'), ((6463, 6469), 'multiprocessing.Pool', 'Pool', ([], {}), '()\n', (6467, 6469), False, 'from multiprocessing import Pool\n'), ((7139, 7154), 'sklearn.utils.validation.check_X_y', 'check_X_y', (['X', 'y'], {}), '(X, y)\n', (7148, 7154), False, 'from sklearn.utils.validation import check_X_y\n'), ((9142, 9166), 'copy.deepcopy', 'cp.deepcopy', (['self.rules_'], {}), '(self.rules_)\n', (9153, 9166), True, 'import copy as cp\n'), ((3485, 3509), 'pandas.DataFrame', 'pd.DataFrame', (['pairs_dict'], {}), '(pairs_dict)\n', (3497, 3509), True, 'import pandas as pd\n'), ((3511, 3532), 'pandas.Series', 'pd.Series', (['class_size'], {}), '(class_size)\n', (3520, 3532), True, 'import pandas as pd\n'), ((4222, 4242), 'numpy.argmax', 'np.argmax', (['P'], {'axis': '(1)'}), '(P, axis=1)\n', (4231, 4242), True, 'import numpy as np\n'), ((6779, 6801), 'pandas.concat', 'pd.concat', (['Freq_chunks'], {}), '(Freq_chunks)\n', (6788, 6801), True, 'import pandas as pd\n'), ((6803, 6825), 'pandas.concat', 'pd.concat', (['Size_chunks'], {}), '(Size_chunks)\n', (6812, 6825), True, 'import pandas as pd\n'), ((5542, 5580), 'pandas.DataFrame', 'pd.DataFrame', (['P'], {'columns': 'self.classes_'}), '(P, columns=self.classes_)\n', (5554, 5580), True, 'import pandas as pd\n'), ((5483, 5515), 'numpy.unique', 'np.unique', (['v'], {'return_counts': '(True)'}), '(v, return_counts=True)\n', (5492, 5515), True, 'import numpy as np\n')] |
'''
@brief Base class for system data classes.
This class defines the interface for cata classes which are intended to hold
a specific data item (packet, channel, event). This data item includes the time
of the data as well as data such as channel value or argument value.
@date Created July 2, 2018
@author <NAME> (<EMAIL>)
@bug No known bugs
'''
from fprime.common.models.serialize import time_type
from fprime_gds.common.templates import data_template
import fprime_gds.common.utils.jsonable
class SysData(object):
'''
The SysData class defines the interface for system data classes which are
for specific data readings/events
'''
def __init__(self):
'''
Constructor.
Each subclass will define new constructors with necessary arguments.
The necessary fields are time, id, and template.
Returns:
An initialized SysData object
'''
if not self.id:
self.id = 0
if not self.template:
self.template = data_template.DataTemplate()
if not self.time:
self.time = time_type.TimeType()
def get_id(self):
'''
Returns the id of the channel
Returns:
The id of the channel
'''
return self.id
def get_time(self):
'''
Returns the time of the channel data reading
Returns:
Time of the reading as a TimeType
'''
return self.time
def get_template(self):
'''
Returns the template class instance for the data stored
Returns:
An instance of a template class for this instance's data
'''
return self.template
def to_jsonable(self):
'''
Converts to a JSONable object (primatives, anon-objects, lists)
'''
return fprime_gds.common.utils.jsonable.fprime_to_jsonable(self)
@staticmethod
def compare(x, y):
'''
Compares two data items.
Returns:
Negative, 0, or positive for t1<t2, t1==t2, t1>t2 respectively
'''
# Compare by time first
time_comp = time_type.TimeType.compare(x.time, y.time)
if (time_comp != 0):
return time_comp
# Compare by id second (just let multiple events at the same time with
# the same id be counted as equal
return cmp(x.id, y.id)
if __name__ == '__main__':
pass
| [
"fprime.common.models.serialize.time_type.TimeType",
"fprime.common.models.serialize.time_type.TimeType.compare",
"fprime_gds.common.templates.data_template.DataTemplate"
] | [((2155, 2197), 'fprime.common.models.serialize.time_type.TimeType.compare', 'time_type.TimeType.compare', (['x.time', 'y.time'], {}), '(x.time, y.time)\n', (2181, 2197), False, 'from fprime.common.models.serialize import time_type\n'), ((1027, 1055), 'fprime_gds.common.templates.data_template.DataTemplate', 'data_template.DataTemplate', ([], {}), '()\n', (1053, 1055), False, 'from fprime_gds.common.templates import data_template\n'), ((1106, 1126), 'fprime.common.models.serialize.time_type.TimeType', 'time_type.TimeType', ([], {}), '()\n', (1124, 1126), False, 'from fprime.common.models.serialize import time_type\n')] |
##
# Copyright (c) 2011-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from twext.enterprise.dal.syntax import SQLFragment
from twisted.trial.unittest import TestCase
from twistedcaldav import carddavxml
from txdav.carddav.datastore.query.filter import Filter, FilterBase
from txdav.common.datastore.sql_tables import schema
from txdav.carddav.datastore.query.builder import buildExpression
from txdav.common.datastore.query.generator import SQLQueryGenerator
from txdav.carddav.datastore.index_file import sqladdressbookquery
class TestQueryFilter(TestCase):
_objectSchema = schema.ADDRESSBOOK_OBJECT
_queryFields = {
"UID": _objectSchema.UID
}
def test_query(self):
"""
Basic query test - single term.
Only UID can be queried via sql.
"""
filter = carddavxml.Filter(
*[carddavxml.PropertyFilter(
carddavxml.TextMatch.fromString("Example"),
**{"name": "UID"}
)]
)
filter = Filter(filter)
expression = buildExpression(filter, self._queryFields)
sql = SQLQueryGenerator(expression, self, 1234)
select, args = sql.generate()
self.assertEqual(select.toSQL(), SQLFragment("select distinct RESOURCE_NAME, VCARD_UID from ADDRESSBOOK_OBJECT where ADDRESSBOOK_HOME_RESOURCE_ID = ? and VCARD_UID like (? || (? || ?))", [1234, "%", "Example", "%"]))
self.assertEqual(args, {})
def test_sqllite_query(self):
"""
Basic query test - single term.
Only UID can be queried via sql.
"""
filter = carddavxml.Filter(
*[carddavxml.PropertyFilter(
carddavxml.TextMatch.fromString("Example"),
**{"name": "UID"}
)]
)
filter = Filter(filter)
sql, args = sqladdressbookquery(filter, 1234)
self.assertEqual(sql, " from RESOURCE where RESOURCE.UID GLOB :1")
self.assertEqual(args, ["*Example*"])
class TestQueryFilterSerialize(TestCase):
def test_query(self):
"""
Basic query test - no time range
"""
filter = carddavxml.Filter(
*[carddavxml.PropertyFilter(
carddavxml.TextMatch.fromString("Example"),
**{"name": "UID"}
)]
)
filter = Filter(filter)
j = filter.serialize()
self.assertEqual(j["type"], "Filter")
f = FilterBase.deserialize(j)
self.assertTrue(isinstance(f, Filter))
| [
"txdav.carddav.datastore.index_file.sqladdressbookquery",
"txdav.carddav.datastore.query.filter.FilterBase.deserialize",
"twistedcaldav.carddavxml.TextMatch.fromString",
"txdav.common.datastore.query.generator.SQLQueryGenerator",
"txdav.carddav.datastore.query.filter.Filter",
"twext.enterprise.dal.syntax.... | [((1553, 1567), 'txdav.carddav.datastore.query.filter.Filter', 'Filter', (['filter'], {}), '(filter)\n', (1559, 1567), False, 'from txdav.carddav.datastore.query.filter import Filter, FilterBase\n'), ((1590, 1632), 'txdav.carddav.datastore.query.builder.buildExpression', 'buildExpression', (['filter', 'self._queryFields'], {}), '(filter, self._queryFields)\n', (1605, 1632), False, 'from txdav.carddav.datastore.query.builder import buildExpression\n'), ((1647, 1688), 'txdav.common.datastore.query.generator.SQLQueryGenerator', 'SQLQueryGenerator', (['expression', 'self', '(1234)'], {}), '(expression, self, 1234)\n', (1664, 1688), False, 'from txdav.common.datastore.query.generator import SQLQueryGenerator\n'), ((2343, 2357), 'txdav.carddav.datastore.query.filter.Filter', 'Filter', (['filter'], {}), '(filter)\n', (2349, 2357), False, 'from txdav.carddav.datastore.query.filter import Filter, FilterBase\n'), ((2378, 2411), 'txdav.carddav.datastore.index_file.sqladdressbookquery', 'sqladdressbookquery', (['filter', '(1234)'], {}), '(filter, 1234)\n', (2397, 2411), False, 'from txdav.carddav.datastore.index_file import sqladdressbookquery\n'), ((2885, 2899), 'txdav.carddav.datastore.query.filter.Filter', 'Filter', (['filter'], {}), '(filter)\n', (2891, 2899), False, 'from txdav.carddav.datastore.query.filter import Filter, FilterBase\n'), ((2990, 3015), 'txdav.carddav.datastore.query.filter.FilterBase.deserialize', 'FilterBase.deserialize', (['j'], {}), '(j)\n', (3012, 3015), False, 'from txdav.carddav.datastore.query.filter import Filter, FilterBase\n'), ((1769, 1961), 'twext.enterprise.dal.syntax.SQLFragment', 'SQLFragment', (['"""select distinct RESOURCE_NAME, VCARD_UID from ADDRESSBOOK_OBJECT where ADDRESSBOOK_HOME_RESOURCE_ID = ? and VCARD_UID like (? || (? || ?))"""', "[1234, '%', 'Example', '%']"], {}), "(\n 'select distinct RESOURCE_NAME, VCARD_UID from ADDRESSBOOK_OBJECT where ADDRESSBOOK_HOME_RESOURCE_ID = ? and VCARD_UID like (? || (? || ?))'\n , [1234, '%', 'Example', '%'])\n", (1780, 1961), False, 'from twext.enterprise.dal.syntax import SQLFragment\n'), ((1433, 1475), 'twistedcaldav.carddavxml.TextMatch.fromString', 'carddavxml.TextMatch.fromString', (['"""Example"""'], {}), "('Example')\n", (1464, 1475), False, 'from twistedcaldav import carddavxml\n'), ((2223, 2265), 'twistedcaldav.carddavxml.TextMatch.fromString', 'carddavxml.TextMatch.fromString', (['"""Example"""'], {}), "('Example')\n", (2254, 2265), False, 'from twistedcaldav import carddavxml\n'), ((2765, 2807), 'twistedcaldav.carddavxml.TextMatch.fromString', 'carddavxml.TextMatch.fromString', (['"""Example"""'], {}), "('Example')\n", (2796, 2807), False, 'from twistedcaldav import carddavxml\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
WSGI script
Setup Application, Authentication, ...
"""
import os
from eve import Eve
from evedom import loader
# from your_app.authentication.token import TokenBasedAuth
__author__ = "nam4dev"
__created__ = '08/11/2017'
ROOT_PATH = os.path.dirname(
os.path.abspath(__file__)
)
EVE_SETTINGS = os.path.join(ROOT_PATH, 'settings.py')
def runner(*_, **options):
"""
A simple runner
Args:
*_:
**options:
Returns:
Flask App run
"""
arguments = dict(
debug=1,
port=5000,
)
arguments.update(options)
if 'EVE_SETTINGS' not in os.environ:
os.environ['EVE_SETTINGS'] = EVE_SETTINGS
application = Eve(
settings=EVE_SETTINGS,
# auth=TokenBasedAuth,
)
application.root_path = ROOT_PATH
with application.app_context():
loader.init()
return application.run(**arguments)
if __name__ == "__main__":
exit(runner())
| [
"os.path.abspath",
"eve.Eve",
"evedom.loader.init",
"os.path.join"
] | [((362, 400), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""settings.py"""'], {}), "(ROOT_PATH, 'settings.py')\n", (374, 400), False, 'import os\n'), ((315, 340), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (330, 340), False, 'import os\n'), ((749, 775), 'eve.Eve', 'Eve', ([], {'settings': 'EVE_SETTINGS'}), '(settings=EVE_SETTINGS)\n', (752, 775), False, 'from eve import Eve\n'), ((905, 918), 'evedom.loader.init', 'loader.init', ([], {}), '()\n', (916, 918), False, 'from evedom import loader\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2019-01-18 17:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('workflow', '0026_auto_20190116_1357'),
]
operations = [
migrations.CreateModel(
name='TolaUserCountryRoles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('role', models.CharField(choices=[('user', 'User'), ('basic_admin', 'Basic Admin'), ('super_admin', 'Super Admin')], max_length=100)),
('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_roles', to='workflow.Country')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='country_roles', to='workflow.TolaUser')),
],
),
migrations.CreateModel(
name='TolaUserProgramRoles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('role', models.CharField(choices=[('low', 'Low'), ('medium', 'Medium'), ('high', 'High')], max_length=100)),
('program', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_roles', to='workflow.Program')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='program_roles', to='workflow.TolaUser')),
],
),
]
| [
"django.db.models.AutoField",
"django.db.models.CharField",
"django.db.models.ForeignKey"
] | [((443, 536), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (459, 536), False, 'from django.db import migrations, models\n'), ((560, 688), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('user', 'User'), ('basic_admin', 'Basic Admin'), ('super_admin',\n 'Super Admin')]", 'max_length': '(100)'}), "(choices=[('user', 'User'), ('basic_admin', 'Basic Admin'),\n ('super_admin', 'Super Admin')], max_length=100)\n", (576, 688), False, 'from django.db import migrations, models\n'), ((715, 832), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""user_roles"""', 'to': '"""workflow.Country"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='user_roles', to='workflow.Country')\n", (732, 832), False, 'from django.db import migrations, models\n'), ((855, 976), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""country_roles"""', 'to': '"""workflow.TolaUser"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='country_roles', to='workflow.TolaUser')\n", (872, 976), False, 'from django.db import migrations, models\n'), ((1117, 1210), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1133, 1210), False, 'from django.db import migrations, models\n'), ((1234, 1336), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('low', 'Low'), ('medium', 'Medium'), ('high', 'High')]", 'max_length': '(100)'}), "(choices=[('low', 'Low'), ('medium', 'Medium'), ('high',\n 'High')], max_length=100)\n", (1250, 1336), False, 'from django.db import migrations, models\n'), ((1363, 1480), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""user_roles"""', 'to': '"""workflow.Program"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='user_roles', to='workflow.Program')\n", (1380, 1480), False, 'from django.db import migrations, models\n'), ((1503, 1624), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""program_roles"""', 'to': '"""workflow.TolaUser"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='program_roles', to='workflow.TolaUser')\n", (1520, 1624), False, 'from django.db import migrations, models\n')] |
import sys, os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import random
from util.util import pad, detect_aes_ecb, generate_key, ammend_plaintext, encrypt_random
# Chosen plaintext
plaintext = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
# Generate data and encrypt plaintext
key = generate_key()
plaintext = pad(ammend_plaintext(plaintext), 16)
ciphertext = encrypt_random(key, plaintext)
# Detect AES in ECB mode
detect = detect_aes_ecb(ciphertext)
# Print answer
print("Plaintext: " + str(plaintext, 'latin-1'))
print("Ciphertext: " + str(ciphertext, 'latin-1'))
if (detect[1] == 6):
print("Guess: ECB without CBC mode")
elif (detect[1] == 4):
print("Guess: ECB with CBC mode")
else:
raise Exception | [
"util.util.encrypt_random",
"util.util.generate_key",
"util.util.detect_aes_ecb",
"os.path.abspath",
"util.util.ammend_plaintext"
] | [((319, 333), 'util.util.generate_key', 'generate_key', ([], {}), '()\n', (331, 333), False, 'from util.util import pad, detect_aes_ecb, generate_key, ammend_plaintext, encrypt_random\n'), ((396, 426), 'util.util.encrypt_random', 'encrypt_random', (['key', 'plaintext'], {}), '(key, plaintext)\n', (410, 426), False, 'from util.util import pad, detect_aes_ecb, generate_key, ammend_plaintext, encrypt_random\n'), ((462, 488), 'util.util.detect_aes_ecb', 'detect_aes_ecb', (['ciphertext'], {}), '(ciphertext)\n', (476, 488), False, 'from util.util import pad, detect_aes_ecb, generate_key, ammend_plaintext, encrypt_random\n'), ((350, 377), 'util.util.ammend_plaintext', 'ammend_plaintext', (['plaintext'], {}), '(plaintext)\n', (366, 377), False, 'from util.util import pad, detect_aes_ecb, generate_key, ammend_plaintext, encrypt_random\n'), ((63, 88), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (78, 88), False, 'import sys, os\n')] |
# Copyright (c) 2017 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Initializes an UberRidesClient with OAuth 2.0 Credentials.
This example demonstrates how to get an access token through the
OAuth 2.0 Authorization Code Grant and use credentials to create
an UberRidesClient.
To run this example:
(1) Set your app credentials in config.driver.yaml
(2) Run `python authorize_driver.py`
(3) A success message will print, 'Hello {YOUR_NAME}'
(4) User OAuth 2.0 credentials are recorded in
'oauth_driver_session_store.yaml'
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import input
from yaml import safe_dump
from example import utils # NOQA
from example.utils import fail_print
from example.utils import response_print
from example.utils import success_print
from example.utils import import_app_credentials
from uber_rides.auth import AuthorizationCodeGrant
from uber_rides.client import UberRidesClient
from uber_rides.errors import ClientError
from uber_rides.errors import ServerError
from uber_rides.errors import UberIllegalState
def authorization_code_grant_flow(credentials, storage_filename):
"""Get an access token through Authorization Code Grant.
Parameters
credentials (dict)
All your app credentials and information
imported from the configuration file.
storage_filename (str)
Filename to store OAuth 2.0 Credentials.
Returns
(UberRidesClient)
An UberRidesClient with OAuth 2.0 Credentials.
"""
auth_flow = AuthorizationCodeGrant(
credentials.get('client_id'),
credentials.get('scopes'),
credentials.get('client_secret'),
credentials.get('redirect_url'),
)
auth_url = auth_flow.get_authorization_url()
login_message = 'Login as a driver and grant access by going to:\n\n{}\n'
login_message = login_message.format(auth_url)
response_print(login_message)
redirect_url = 'Copy the URL you are redirected to and paste here:\n\n'
result = input(redirect_url).strip()
try:
session = auth_flow.get_session(result)
except (ClientError, UberIllegalState) as error:
fail_print(error)
return
credential = session.oauth2credential
credential_data = {
'client_id': credential.client_id,
'redirect_url': credential.redirect_url,
'access_token': credential.access_token,
'expires_in_seconds': credential.expires_in_seconds,
'scopes': list(credential.scopes),
'grant_type': credential.grant_type,
'client_secret': credential.client_secret,
'refresh_token': credential.refresh_token,
}
with open(storage_filename, 'w') as yaml_file:
yaml_file.write(safe_dump(credential_data, default_flow_style=False))
return UberRidesClient(session, sandbox_mode=True)
def hello_user(api_client):
"""Use an authorized client to fetch and print profile information.
Parameters
api_client (UberRidesClient)
An UberRidesClient with OAuth 2.0 credentials.
"""
try:
response = api_client.get_driver_profile()
except (ClientError, ServerError) as error:
fail_print(error)
return
else:
profile = response.json
first_name = profile.get('first_name')
last_name = profile.get('last_name')
email = profile.get('email')
message = 'Hello, {} {}. Successfully granted access token to {}.'
message = message.format(first_name, last_name, email)
success_print(message)
success_print(profile)
success_print('---')
response = api_client.get_driver_trips()
trips = response.json
success_print(trips)
success_print('---')
response = api_client.get_driver_payments()
payments = response.json
success_print(payments)
if __name__ == '__main__':
"""Run the example.
Get an access token through the OAuth 2.0 Authorization Code Grant
and use credentials to create an UberRidesClient.
"""
credentials = import_app_credentials('config.driver.yaml')
api_client = authorization_code_grant_flow(
credentials,
'oauth_driver_session_store.yaml',
)
hello_user(api_client)
| [
"yaml.safe_dump",
"builtins.input",
"example.utils.fail_print",
"example.utils.success_print",
"uber_rides.client.UberRidesClient",
"example.utils.response_print",
"example.utils.import_app_credentials"
] | [((3077, 3106), 'example.utils.response_print', 'response_print', (['login_message'], {}), '(login_message)\n', (3091, 3106), False, 'from example.utils import response_print\n'), ((3986, 4029), 'uber_rides.client.UberRidesClient', 'UberRidesClient', (['session'], {'sandbox_mode': '(True)'}), '(session, sandbox_mode=True)\n', (4001, 4029), False, 'from uber_rides.client import UberRidesClient\n'), ((5265, 5309), 'example.utils.import_app_credentials', 'import_app_credentials', (['"""config.driver.yaml"""'], {}), "('config.driver.yaml')\n", (5287, 5309), False, 'from example.utils import import_app_credentials\n'), ((4721, 4743), 'example.utils.success_print', 'success_print', (['message'], {}), '(message)\n', (4734, 4743), False, 'from example.utils import success_print\n'), ((4752, 4774), 'example.utils.success_print', 'success_print', (['profile'], {}), '(profile)\n', (4765, 4774), False, 'from example.utils import success_print\n'), ((4784, 4804), 'example.utils.success_print', 'success_print', (['"""---"""'], {}), "('---')\n", (4797, 4804), False, 'from example.utils import success_print\n'), ((4892, 4912), 'example.utils.success_print', 'success_print', (['trips'], {}), '(trips)\n', (4905, 4912), False, 'from example.utils import success_print\n'), ((4922, 4942), 'example.utils.success_print', 'success_print', (['"""---"""'], {}), "('---')\n", (4935, 4942), False, 'from example.utils import success_print\n'), ((5036, 5059), 'example.utils.success_print', 'success_print', (['payments'], {}), '(payments)\n', (5049, 5059), False, 'from example.utils import success_print\n'), ((3197, 3216), 'builtins.input', 'input', (['redirect_url'], {}), '(redirect_url)\n', (3202, 3216), False, 'from builtins import input\n'), ((3345, 3362), 'example.utils.fail_print', 'fail_print', (['error'], {}), '(error)\n', (3355, 3362), False, 'from example.utils import fail_print\n'), ((3920, 3972), 'yaml.safe_dump', 'safe_dump', (['credential_data'], {'default_flow_style': '(False)'}), '(credential_data, default_flow_style=False)\n', (3929, 3972), False, 'from yaml import safe_dump\n'), ((4370, 4387), 'example.utils.fail_print', 'fail_print', (['error'], {}), '(error)\n', (4380, 4387), False, 'from example.utils import fail_print\n')] |
# -*- coding: utf-8 -*-
""" A data clustering widget for the Orange3.
This is a data clustering widget for Orange3, that implements the OPTICS algorithm.
OPTICS stands for "Ordering Points To Identify the Clustering Structure".
This is a very useful algorithm for clustering data when the dataset is unlabeled with
Non-flat geometry or when it has uneven cluster sizes or variable cluster density.
The package used is called "sklearn". Source: https://scikit-learn.org/stable/index.html
To run the addon, just install it using 'pip install -e .' from its package folder.
Don't forget to first activate the orange environment.
__author__ = <NAME>
__date__ = Feb 2020
__version__ = 0.1.0
__type__ = Orange Addon
__platform__ = Windows (Orange enviroment)
__email__ = '<NAME>' <<EMAIL>>
__status__ = Dev
"""
import numpy as np
from AnyQt.QtCore import Qt
from AnyQt.QtGui import QColor
from Orange.widgets import widget, gui
from Orange.widgets import settings
from Orange.widgets.widget import Msg
from Orange.widgets.utils.signals import Input, Output
from Orange.widgets.utils.widgetpreview import WidgetPreview
from Orange.widgets.utils.slidergraph import SliderGraph
from Orange.data import Table, Domain, DiscreteVariable
from pyqtgraph import mkPen
from pyqtgraph.functions import intColor
from sklearn.cluster import OPTICS
from sklearn.neighbors import VALID_METRICS
""" OPTICS Parameters
class sklearn.cluster.OPTICS(
* min_samples=5, {default=5 or int > 1}, title: Min samples
max_eps=inf, {default=np.inf}, not changed
* metric='minkowski', {default='minkowski' or [1]}, title: Metric
p=2, {default=2}, not changed
cluster_method='xi', {default='xi'}, not changed
eps=None, {default=None}, not changed
* xi=0.05, {default=0.05 or float, between 0 and 1}, title: Minimum steepness
predecessor_correction=True, {default=True}, not changed
min_cluster_size=None, {default=None}, not changed
* algorithm='auto', {default=auto or ball_tree, kd_tree, brute, auto}, title: Algorithm for nearest neighbors:
leaf_size=30, {default=30}, not changed
n_jobs=None, {default=None}, not changed
)
[1] Valid values for metric are:
from scikit-learn: [‘cityblock’, ‘cosine’, ‘euclidean’, ‘l1’, ‘l2’, ‘manhattan’]
from scipy.spatial.distance: [‘braycurtis’, ‘canberra’, ‘chebyshev’, ‘correlation’, ‘dice’, ‘hamming’, ‘jaccard’,
‘kulsinski’, ‘mahalanobis’, ‘minkowski’, ‘rogerstanimoto’, ‘russellrao’, ‘seuclidean’, ‘sokalmichener’, ‘sokalsneath’, ‘sqeuclidean’, ‘yule’]
See the documentation for scipy.spatial.distance for details on these metrics.
"""
OPTICS_METRICS = [
("cityblock", "cityblock"),
("cosine", "cosine"),
("euclidean", "euclidean"),
("l1", "l1"),
("l2", "l2"),
("manhattan", "manhattan"),
("braycurtis", "braycurtis"),
("canberra", "canberra"),
("chebyshev", "chebyshev"),
("correlation", "correlation"),
("hamming", "hamming"),
("minkowski", "minkowski"),
("sqeuclidean", "sqeuclidean"),
]
OPTICS_ALGORITHM = [
("Auto","auto"),
("Ball Tree","ball_tree"),
("kd Tree","kd_tree"),
("Brute","brute"),
]
class OPTICS_w(widget.OWWidget):
name = "OPTICS"
description = "dynamicaly clustering unlabeled data by density"
icon = "icons/OPTICS.svg"
priority = 20
class Inputs:
data = Input("Data", Table)
class Outputs:
annotated_data = Output("Data", Table)
class Error(widget.OWWidget.Error):
not_enough_instances = Msg("Not enough unique data instances. "
"At least two are required.")
minimum_samples = settings.Setting(5)
metric_methode = settings.Setting(11)
xi_value = settings.Setting(0.05)
algorithm_base = settings.Setting(0)
auto_commit = settings.Setting(False)
cut_point = xi_value
want_main_area = True
def __init__(self):
super().__init__()
self.data = None
self.dataset = None
self.annotated_data = None
# GUI
infobox = gui.widgetBox(self.controlArea, "Info")
self.infoa = gui.widgetLabel(infobox, "No data on input yet, waiting to get something.")
self.infob = gui.widgetLabel(infobox, "")
self.infoc = gui.widgetLabel(infobox, "")
self.infod = gui.widgetLabel(infobox, "")
self.optionsBox = gui.widgetBox(self.controlArea, "OPTICS Options")
gui.spin(
self.optionsBox,
self,
"minimum_samples",
minv=1,
maxv=100,
step=1,
label="Core point neighbors ",
callback=self._min_samples_changed
)
gui.comboBox(
self.optionsBox,
self,
"metric_methode",
orientation=Qt.Horizontal,
label="Distance metric: ",
items=[d[0] for d in OPTICS_METRICS],
callback=self._metric_changed
)
gui.doubleSpin(
self.optionsBox,
self,
"xi_value",
minv=(0.000),
maxv=(0.999),
step=(0.001),
label="Minimum steepness: ",
callback=self._xi_changed
)
gui.comboBox(
self.optionsBox,
self,
"algorithm_base",
orientation=Qt.Horizontal,
label="neighborhood algorithm: ",
items=[d[0] for d in OPTICS_ALGORITHM],
callback=self._algorithm_changed
)
self.optionsBox.setDisabled(True)
gui.auto_apply(self.controlArea, self, "auto_commit")
gui.rubber(self.controlArea)
self.controlArea.layout().addStretch()
self.plot = SliderGraph(
x_axis_label="Ordering of the points as processed by OPTICS",
y_axis_label="Reachability distance (epsilon distance)",
callback=self._on_changed
)
self.mainArea.layout().addWidget(self.plot)
def check_data_size(self, data):
if data is None:
return False
if len(data) < 2:
self.Error.not_enough_instances()
return False
return True
def normalizing(self,model):
clusters = [c if c >= 0 else np.nan for c in model.labels_]
k = len(set(clusters) - {np.nan})
clusters = np.array(clusters).reshape(len(self.data), 1)
clust_var = DiscreteVariable("Cluster", values=["C%d" % (x + 1) for x in range(k)])
domain = self.data.domain
attributes, classes = domain.attributes, domain.class_vars
meta_attrs = domain.metas
x, y, metas = self.data.X, self.data.Y, self.data.metas
meta_attrs += (clust_var, )
metas = np.hstack((metas, clusters))
domain = Domain(attributes, classes, meta_attrs)
new_table = Table(domain, x, y, metas, self.data.W)
# self.Outputs.annotated_data.send(new_table)
return new_table
def commit(self):
self.cluster()
return
def cluster(self):
if not self.check_data_size(self.data):
return
model = OPTICS(min_samples=self.minimum_samples,
metric=OPTICS_METRICS[self.metric_methode][1],
xi=self.xi_value,
algorithm=OPTICS_ALGORITHM[self.algorithm_base][1],
)
model.fit(self.data.X)
self._plot_graph(model)
self.result_OPTICS = self.normalizing(model)
self.send_data()
def _plot_graph(self,model):
reachability = model.reachability_[model.ordering_]
space = np.arange(len(reachability))
reachability[reachability == np.inf] = np.nanmax(reachability[reachability != np.inf])
labels = model.labels_[model.ordering_]
cluster_count = (len(np.unique(labels[labels[:]>=0])))
self.infoc.setText("%d values in the cluster outcome" % cluster_count)
noisy_counter = len(space[labels==-1])
self.infod.setText("%d noisy samples in the leaf cluster" % noisy_counter)
x_plot = space
y_plot = reachability
self.plot.clear_plot()
colors = np.arange(150, (150+cluster_count))
for klaster, color in zip(range(0, cluster_count), colors):
Xk = space[labels == klaster]
Rk = reachability[labels == klaster]
self.plot.plot(Xk, Rk, pen=mkPen(intColor(color), width=2), antialias=True)
self.plot.plot(x_plot[labels==-1], y_plot[labels==-1], pen=mkPen(QColor('black'), width=2), antialias=True)
@Inputs.data
def set_data(self, dataset):
self.Error.clear()
if not self.check_data_size(dataset):
self.optionsBox.setDisabled(True)
self.plot.clear_plot()
self.infoa.setText(
"No data on input yet, waiting to get something.")
self.infob.setText('')
self.infoc.setText('')
self.infod.setText('')
self.dataset = None
self.annotated_data = None
self.Outputs.annotated_data.send(None)
return
self.data = dataset
self.optionsBox.setDisabled(False)
self.numberOfInputInstances = len(self.data)
self.infoa.setText("%d instances in input data set" % self.numberOfInputInstances)
numOfclasses = len(self.data.domain.class_var.values)
self.infob.setText("%d values in the categorical outcome" % numOfclasses)
self.commit()
def checkCommit(self):
if self.commitOnChange:
self.commit()
def send_data(self):
self.Outputs.annotated_data.send(self.result_OPTICS)
def _min_samples_changed(self):
if self.data is None:
return
self.commit()
def _metric_changed(self):
if self.data is None:
return
self.algorithm_base = 0
self.commit()
def _xi_changed(self):
self.commit()
def _algorithm_changed(self):
if self.data is None:
return
if self.algorithm_base != 0:
if OPTICS_METRICS[self.metric_methode][1] not in VALID_METRICS[OPTICS_ALGORITHM[self.algorithm_base][1]]:
self.algorithm_base = 0
self.commit()
def _on_changed(self, value):
self.cut_point = value
if __name__ == "__main__":
WidgetPreview(OPTICS_w).run(Table("iris-imbalanced"))
| [
"Orange.widgets.utils.signals.Input",
"numpy.hstack",
"numpy.array",
"Orange.widgets.utils.widgetpreview.WidgetPreview",
"numpy.arange",
"Orange.widgets.utils.slidergraph.SliderGraph",
"Orange.widgets.utils.signals.Output",
"pyqtgraph.functions.intColor",
"Orange.widgets.settings.Setting",
"Orange... | [((3836, 3855), 'Orange.widgets.settings.Setting', 'settings.Setting', (['(5)'], {}), '(5)\n', (3852, 3855), False, 'from Orange.widgets import settings\n'), ((3877, 3897), 'Orange.widgets.settings.Setting', 'settings.Setting', (['(11)'], {}), '(11)\n', (3893, 3897), False, 'from Orange.widgets import settings\n'), ((3913, 3935), 'Orange.widgets.settings.Setting', 'settings.Setting', (['(0.05)'], {}), '(0.05)\n', (3929, 3935), False, 'from Orange.widgets import settings\n'), ((3957, 3976), 'Orange.widgets.settings.Setting', 'settings.Setting', (['(0)'], {}), '(0)\n', (3973, 3976), False, 'from Orange.widgets import settings\n'), ((3995, 4018), 'Orange.widgets.settings.Setting', 'settings.Setting', (['(False)'], {}), '(False)\n', (4011, 4018), False, 'from Orange.widgets import settings\n'), ((3547, 3567), 'Orange.widgets.utils.signals.Input', 'Input', (['"""Data"""', 'Table'], {}), "('Data', Table)\n", (3552, 3567), False, 'from Orange.widgets.utils.signals import Input, Output\n'), ((3613, 3634), 'Orange.widgets.utils.signals.Output', 'Output', (['"""Data"""', 'Table'], {}), "('Data', Table)\n", (3619, 3634), False, 'from Orange.widgets.utils.signals import Input, Output\n'), ((3707, 3774), 'Orange.widgets.widget.Msg', 'Msg', (['"""Not enough unique data instances. At least two are required."""'], {}), "('Not enough unique data instances. At least two are required.')\n", (3710, 3774), False, 'from Orange.widgets.widget import Msg\n'), ((4249, 4288), 'Orange.widgets.gui.widgetBox', 'gui.widgetBox', (['self.controlArea', '"""Info"""'], {}), "(self.controlArea, 'Info')\n", (4262, 4288), False, 'from Orange.widgets import widget, gui\n'), ((4310, 4385), 'Orange.widgets.gui.widgetLabel', 'gui.widgetLabel', (['infobox', '"""No data on input yet, waiting to get something."""'], {}), "(infobox, 'No data on input yet, waiting to get something.')\n", (4325, 4385), False, 'from Orange.widgets import widget, gui\n'), ((4407, 4435), 'Orange.widgets.gui.widgetLabel', 'gui.widgetLabel', (['infobox', '""""""'], {}), "(infobox, '')\n", (4422, 4435), False, 'from Orange.widgets import widget, gui\n'), ((4457, 4485), 'Orange.widgets.gui.widgetLabel', 'gui.widgetLabel', (['infobox', '""""""'], {}), "(infobox, '')\n", (4472, 4485), False, 'from Orange.widgets import widget, gui\n'), ((4507, 4535), 'Orange.widgets.gui.widgetLabel', 'gui.widgetLabel', (['infobox', '""""""'], {}), "(infobox, '')\n", (4522, 4535), False, 'from Orange.widgets import widget, gui\n'), ((4563, 4612), 'Orange.widgets.gui.widgetBox', 'gui.widgetBox', (['self.controlArea', '"""OPTICS Options"""'], {}), "(self.controlArea, 'OPTICS Options')\n", (4576, 4612), False, 'from Orange.widgets import widget, gui\n'), ((4621, 4768), 'Orange.widgets.gui.spin', 'gui.spin', (['self.optionsBox', 'self', '"""minimum_samples"""'], {'minv': '(1)', 'maxv': '(100)', 'step': '(1)', 'label': '"""Core point neighbors """', 'callback': 'self._min_samples_changed'}), "(self.optionsBox, self, 'minimum_samples', minv=1, maxv=100, step=1,\n label='Core point neighbors ', callback=self._min_samples_changed)\n", (4629, 4768), False, 'from Orange.widgets import widget, gui\n'), ((4879, 5064), 'Orange.widgets.gui.comboBox', 'gui.comboBox', (['self.optionsBox', 'self', '"""metric_methode"""'], {'orientation': 'Qt.Horizontal', 'label': '"""Distance metric: """', 'items': '[d[0] for d in OPTICS_METRICS]', 'callback': 'self._metric_changed'}), "(self.optionsBox, self, 'metric_methode', orientation=Qt.\n Horizontal, label='Distance metric: ', items=[d[0] for d in\n OPTICS_METRICS], callback=self._metric_changed)\n", (4891, 5064), False, 'from Orange.widgets import widget, gui\n'), ((5158, 5301), 'Orange.widgets.gui.doubleSpin', 'gui.doubleSpin', (['self.optionsBox', 'self', '"""xi_value"""'], {'minv': '(0.0)', 'maxv': '(0.999)', 'step': '(0.001)', 'label': '"""Minimum steepness: """', 'callback': 'self._xi_changed'}), "(self.optionsBox, self, 'xi_value', minv=0.0, maxv=0.999,\n step=0.001, label='Minimum steepness: ', callback=self._xi_changed)\n", (5172, 5301), False, 'from Orange.widgets import widget, gui\n'), ((5420, 5617), 'Orange.widgets.gui.comboBox', 'gui.comboBox', (['self.optionsBox', 'self', '"""algorithm_base"""'], {'orientation': 'Qt.Horizontal', 'label': '"""neighborhood algorithm: """', 'items': '[d[0] for d in OPTICS_ALGORITHM]', 'callback': 'self._algorithm_changed'}), "(self.optionsBox, self, 'algorithm_base', orientation=Qt.\n Horizontal, label='neighborhood algorithm: ', items=[d[0] for d in\n OPTICS_ALGORITHM], callback=self._algorithm_changed)\n", (5432, 5617), False, 'from Orange.widgets import widget, gui\n'), ((5762, 5815), 'Orange.widgets.gui.auto_apply', 'gui.auto_apply', (['self.controlArea', 'self', '"""auto_commit"""'], {}), "(self.controlArea, self, 'auto_commit')\n", (5776, 5815), False, 'from Orange.widgets import widget, gui\n'), ((5824, 5852), 'Orange.widgets.gui.rubber', 'gui.rubber', (['self.controlArea'], {}), '(self.controlArea)\n', (5834, 5852), False, 'from Orange.widgets import widget, gui\n'), ((5922, 6088), 'Orange.widgets.utils.slidergraph.SliderGraph', 'SliderGraph', ([], {'x_axis_label': '"""Ordering of the points as processed by OPTICS"""', 'y_axis_label': '"""Reachability distance (epsilon distance)"""', 'callback': 'self._on_changed'}), "(x_axis_label='Ordering of the points as processed by OPTICS',\n y_axis_label='Reachability distance (epsilon distance)', callback=self.\n _on_changed)\n", (5933, 6088), False, 'from Orange.widgets.utils.slidergraph import SliderGraph\n'), ((6939, 6967), 'numpy.hstack', 'np.hstack', (['(metas, clusters)'], {}), '((metas, clusters))\n', (6948, 6967), True, 'import numpy as np\n'), ((6986, 7025), 'Orange.data.Domain', 'Domain', (['attributes', 'classes', 'meta_attrs'], {}), '(attributes, classes, meta_attrs)\n', (6992, 7025), False, 'from Orange.data import Table, Domain, DiscreteVariable\n'), ((7046, 7085), 'Orange.data.Table', 'Table', (['domain', 'x', 'y', 'metas', 'self.data.W'], {}), '(domain, x, y, metas, self.data.W)\n', (7051, 7085), False, 'from Orange.data import Table, Domain, DiscreteVariable\n'), ((7335, 7502), 'sklearn.cluster.OPTICS', 'OPTICS', ([], {'min_samples': 'self.minimum_samples', 'metric': 'OPTICS_METRICS[self.metric_methode][1]', 'xi': 'self.xi_value', 'algorithm': 'OPTICS_ALGORITHM[self.algorithm_base][1]'}), '(min_samples=self.minimum_samples, metric=OPTICS_METRICS[self.\n metric_methode][1], xi=self.xi_value, algorithm=OPTICS_ALGORITHM[self.\n algorithm_base][1])\n', (7341, 7502), False, 'from sklearn.cluster import OPTICS\n'), ((7963, 8010), 'numpy.nanmax', 'np.nanmax', (['reachability[reachability != np.inf]'], {}), '(reachability[reachability != np.inf])\n', (7972, 8010), True, 'import numpy as np\n'), ((8442, 8477), 'numpy.arange', 'np.arange', (['(150)', '(150 + cluster_count)'], {}), '(150, 150 + cluster_count)\n', (8451, 8477), True, 'import numpy as np\n'), ((10691, 10715), 'Orange.data.Table', 'Table', (['"""iris-imbalanced"""'], {}), "('iris-imbalanced')\n", (10696, 10715), False, 'from Orange.data import Table, Domain, DiscreteVariable\n'), ((8088, 8121), 'numpy.unique', 'np.unique', (['labels[labels[:] >= 0]'], {}), '(labels[labels[:] >= 0])\n', (8097, 8121), True, 'import numpy as np\n'), ((10663, 10686), 'Orange.widgets.utils.widgetpreview.WidgetPreview', 'WidgetPreview', (['OPTICS_w'], {}), '(OPTICS_w)\n', (10676, 10686), False, 'from Orange.widgets.utils.widgetpreview import WidgetPreview\n'), ((6547, 6565), 'numpy.array', 'np.array', (['clusters'], {}), '(clusters)\n', (6555, 6565), True, 'import numpy as np\n'), ((8798, 8813), 'AnyQt.QtGui.QColor', 'QColor', (['"""black"""'], {}), "('black')\n", (8804, 8813), False, 'from AnyQt.QtGui import QColor\n'), ((8682, 8697), 'pyqtgraph.functions.intColor', 'intColor', (['color'], {}), '(color)\n', (8690, 8697), False, 'from pyqtgraph.functions import intColor\n')] |
#!/usr/bin/python3
## Tommy
from botbase import *
_frankfurt_st = re.compile(r"Stand:\s*(\d\d?\. *\w+ 20\d\d, \d\d?(?::\d\d)?) Uhr")
def frankfurt(sheets):
import locale
locale.setlocale(locale.LC_TIME, "de_DE.UTF-8")
soup = get_soup("https://frankfurt.de/service-und-rathaus/verwaltung/aemter-und-institutionen/gesundheitsamt/informationen-zum-neuartigen-coronavirus-sars-cov-2/aktuelle-lage")
header = next(x for x in soup.find_all("h4") if "Aktuelle Infektionszahlen in Frankfurt" in x.get_text())
rows = [[x.text.strip() for x in row.findAll("td")] for row in header.findNext("table").findAll("tr")]
date_text = rows[0][0]
#print(date_text)
date = _frankfurt_st.search(date_text)
date = date.group(1) + (":00" if not ":" in date.group(1) else "")
#print(date)
#if not today().strftime("%d. %B %Y") in date_text: raise NotYetAvailableException("Frankfurt noch alt: " + date_text[:-93])
date = check_date(date, "Frankfurt", datetime.timedelta(hours=8))
assert "Gesamtzahl der COVID-19-Fälle in Frankfurt" in rows[1][0]
assert "Todesfälle" in rows[2][0]
assert "Genesene" in rows[3][0]
c = force_int(rows[1][1])
d = force_int(rows[2][1])
g = force_int(rows[3][1])
update(sheets, 6412, c=c, d=d, g=g, sig="Bot", ignore_delta=True)
return True
schedule.append(Task(8, 5, 12, 5, 360, frankfurt, 6412))
if __name__ == '__main__': frankfurt(googlesheets())
| [
"locale.setlocale"
] | [((179, 226), 'locale.setlocale', 'locale.setlocale', (['locale.LC_TIME', '"""de_DE.UTF-8"""'], {}), "(locale.LC_TIME, 'de_DE.UTF-8')\n", (195, 226), False, 'import locale\n')] |
# Copyright 2013-2018 Aerospike, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import signal
import commands
DEFAULT_TIMEOUT = 5.0
class TimeoutException(Exception):
"""A timeout has occurred."""
pass
class call_with_timeout:
def __init__(self, function, timeout=DEFAULT_TIMEOUT):
self.timeout = timeout
self.function = function
def handler(self, signum, frame):
raise TimeoutException()
def __call__(self, *args):
# get the old SIGALRM handler
old = signal.signal(signal.SIGALRM, self.handler)
# set the alarm
signal.setitimer(signal.ITIMER_REAL, self.timeout)
try:
result = self.function(*args)
finally:
# restore existing SIGALRM handler
signal.signal(signal.SIGALRM, old)
signal.setitimer(signal.ITIMER_REAL, 0)
return result
def timeout(timeout):
"""This decorator takes a timeout parameter in seconds."""
def wrap_function(function):
return call_with_timeout(function, timeout)
return wrap_function
def default_timeout(function):
"""This simple decorator 'timesout' after DEFAULT_TIMEOUT seconds."""
return call_with_timeout(function)
def getstatusoutput(command, timeout=DEFAULT_TIMEOUT):
"""This is a timeout wrapper aroung getstatusoutput."""
_gso = call_with_timeout(commands.getstatusoutput, timeout)
try:
return _gso(command)
except TimeoutException:
return (-1, "The command '%s' timed-out after %i seconds." % (command, timeout))
| [
"signal.signal",
"signal.setitimer"
] | [((1022, 1065), 'signal.signal', 'signal.signal', (['signal.SIGALRM', 'self.handler'], {}), '(signal.SIGALRM, self.handler)\n', (1035, 1065), False, 'import signal\n'), ((1098, 1148), 'signal.setitimer', 'signal.setitimer', (['signal.ITIMER_REAL', 'self.timeout'], {}), '(signal.ITIMER_REAL, self.timeout)\n', (1114, 1148), False, 'import signal\n'), ((1323, 1362), 'signal.setitimer', 'signal.setitimer', (['signal.ITIMER_REAL', '(0)'], {}), '(signal.ITIMER_REAL, 0)\n', (1339, 1362), False, 'import signal\n'), ((1280, 1314), 'signal.signal', 'signal.signal', (['signal.SIGALRM', 'old'], {}), '(signal.SIGALRM, old)\n', (1293, 1314), False, 'import signal\n')] |
import lanelines
from compgraph import CompGraph, CompGraphRunner
import numpy as np
import cv2
func_dict = {
'grayscale': lanelines.grayscale,
'get_image_shape': lambda im : im.shape,
'canny': lanelines.canny,
'define_lanes_region': lanelines.define_lanes_region,
'apply_region_mask': lanelines.apply_region_mask,
'gaussian_blur': lanelines.gaussian_blur,
'hough_lines': lanelines.find_hough_lines,
'compute_line_tangents': lanelines.compute_line_tangents,
'extend_lines': lanelines.extend_lane_lines_grouped_by_slopes,
'average_endpoints_left': lanelines.average_lines_endpoints,
'average_endpoints_right': lanelines.average_lines_endpoints
}
func_io = {
'grayscale': ('image', 'image_gray'),
'get_image_shape': ('image_gray', ('n_rows', 'n_cols')),
'define_lanes_region': (
('n_rows', 'n_cols', 'x_from', 'x_to', 'y_lim', 'left_offset', 'right_offset'),
'region_vertices'
),
'gaussian_blur': (('image_gray', 'blur_kernel'), 'blurred_image'),
'canny': (('blurred_image', 'canny_lo', 'canny_hi'), 'image_canny'),
'apply_region_mask': (('image_canny', 'region_vertices'), 'masked_image'),
'hough_lines': (('masked_image', 'rho', 'theta', 'hough_threshold', 'min_line_length', 'max_line_gap'), 'lines'),
'compute_line_tangents': ('lines', 'tangents'),
'extend_lines': (('lines', 'tangents', 'y_lim', 'n_rows', 'abs_slope_threshold'), ('extended_lines_left', 'extended_lines_right')),
'average_endpoints_left': ('extended_lines_left', 'avg_line_left'),
'average_endpoints_right': ('extended_lines_right', 'avg_line_right')
}
computational_graph = CompGraph(func_dict, func_io)
parameters = {
'x_from': 560,
'x_to': 710,
'y_lim': 450,
'left_offset': 50,
'right_offset': 0,
'blur_kernel': 11,
'canny_lo': 70,
'canny_hi': 200,
'rho': 1,
'theta': np.pi/180,
'hough_threshold': 20,
'min_line_length': 7,
'max_line_gap': 1,
'abs_slope_threshold': 0.2
}
| [
"compgraph.CompGraph"
] | [((1658, 1687), 'compgraph.CompGraph', 'CompGraph', (['func_dict', 'func_io'], {}), '(func_dict, func_io)\n', (1667, 1687), False, 'from compgraph import CompGraph, CompGraphRunner\n')] |
# LSTM(GRU) 예시 : KODEX200 주가 (2010 ~ 현재)를 예측해 본다.
# KODEX200의 종가와, 10일, 40일 이동평균을 이용하여 향후 10일 동안의 종가를 예측해 본다.
# 과거 20일 (step = 20) 종가, 이동평균 패턴을 학습하여 예측한다.
# 일일 주가에 대해 예측이 가능할까 ??
#
# 2018.11.22, 아마추어퀀트 (조성현)
# --------------------------------------------------------------------------
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from MyUtil import YahooData
nInput = 3
nOutput = 3
nStep = 20
nNeuron = 50
# 2차원 배열의 시계열 데이터로 학습용 배치 파일을 만든다.
# return : xBatch - RNN 입력
# yBatch - RNN 출력
#
# step = 2, n = 3 이라면,
# xData = [[1,2,3], [4,5,6], [7,8,9], [10,11,12], ...]
# xBatch = [[[1,2,3], [4,5,6]], [[7,8,9], [10,11,12]], ...]
# yBatch = [[[4,5,6], [7,8,9]], [[10,11,12], [13,14,15]], ...]
def createTrainData(xData, step, n=nInput):
m = np.arange(len(xData) - step)
np.random.shuffle(m)
x, y = [], []
for i in m:
a = xData[i:(i+step)]
x.append(a)
xBatch = np.reshape(np.array(x), (len(m), step, n))
for i in m+1:
a = xData[i:(i+step)]
y.append(a)
yBatch = np.reshape(np.array(y), (len(m), step, n))
return xBatch, yBatch
# 주가 데이터
#df = YahooData.getStockDataYahoo('^KS11', start='2007-01-01')
df = pd.read_csv('StockData/^KS11.csv', index_col=0, parse_dates=True)
df = pd.DataFrame(df['Close'])
df['ma_10'] = pd.DataFrame(df['Close']).rolling(window=10).mean()
df['ma_40'] = pd.DataFrame(df['Close']).rolling(window=40).mean()
df = df.dropna()
df = (df - df.mean()) / df.std()
# 학습 데이터를 생성한다.
data = np.array(df)
xBatch, yBatch = createTrainData(data, nStep)
# RNN 그래프를 생성한다 (Wx, Wh). xBatch를 RNN에 입력한다.
tf.reset_default_graph()
x = tf.placeholder(tf.float32, [None, nStep, nInput])
rnn = tf.nn.rnn_cell.LSTMCell(nNeuron)
#rnn = tf.nn.rnn_cell.GRUCell(nNeuron)
output, state = tf.nn.dynamic_rnn(rnn, x, dtype=tf.float32)
# RNN의 출력값을 입력으로 받아 3개의 y가 출력되도록 하는 feed-forward network를 생성한다. (Wy)
y = tf.placeholder(tf.float32, [None, nStep, nOutput])
inFC = tf.reshape(output, [-1, nNeuron])
fc1 = tf.contrib.layers.fully_connected(inputs=inFC, num_outputs=nNeuron)
predY = tf.contrib.layers.fully_connected(inputs=fc1, num_outputs=nOutput, activation_fn=None)
predY = tf.reshape(predY, [-1, nStep, nOutput])
# Mean square error (MSE)로 Loss를 정의한다. xBatch가 입력되면 yBatch가 출력되도록 함.
loss = tf.reduce_sum(tf.square(predY - y))
optimizer = tf.train.AdamOptimizer(learning_rate=0.001)
minLoss = optimizer.minimize(loss)
# 그래프를 실행한다. 학습한다. (Wx, Wh, Wy를 업데이트함)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
lossHist = []
for i in range(300):
sess.run(minLoss, feed_dict={x: xBatch, y: yBatch})
if i % 5 == 0:
ploss = sess.run(loss, feed_dict={x: xBatch, y: yBatch})
lossHist.append(ploss)
print(i, "\tLoss:", ploss)
# 향후 10 기간 데이터를 예측한다. 향후 1 기간을 예측하고, 예측값을 다시 입력하여 2 기간을 예측한다.
# 이런 방식으로 10 기간까지 예측한다.
nFuture = 10
if len(data) > 100:
lastData = np.copy(data[-100:]) # 원 데이터의 마지막 100개만 그려본다
else:
lastData = np.copy(data)
dx = np.copy(lastData)
estimate = [dx[-1]]
for i in range(nFuture):
# 마지막 nStep 만큼 입력데이로 다음 값을 예측한다
px = dx[-nStep:,]
px = np.reshape(px, (1, nStep, nInput))
# 다음 값을 예측한다.
yHat = sess.run(predY, feed_dict={x: px})[0][-1]
# 예측값을 저장해 둔다
estimate.append(yHat)
# 이전 예측값을 포함하여 또 다음 값을 예측하기위해 예측한 값을 저장해 둔다
dx = np.vstack([dx, yHat])
# Loss history를 그린다
plt.figure(figsize=(8, 3))
plt.plot(lossHist, color='red')
plt.title("Loss History")
plt.xlabel("epoch")
plt.ylabel("loss")
plt.show()
# 주가 차트와 이동평균을 그린다.
plt.figure(figsize=(8, 3))
plt.plot(df['Close'], color='red')
plt.plot(df['ma_10'], color='blue')
plt.plot(df['ma_40'], color='green')
plt.title("KODEX-200 stock price")
plt.show()
# 원 시계열과 예측된 시계열을 그린다
CLOSE = 0 # 종가를 예측한다
estimate = np.array(estimate)
ax1 = np.arange(1, len(lastData[:, CLOSE]) + 1)
ax2 = np.arange(len(lastData), len(lastData) + len(estimate))
plt.figure(figsize=(8, 3))
plt.plot(ax1, lastData[:, CLOSE], 'b-o', color='blue', markersize=4, label='Stock price', linewidth=1)
plt.plot(ax2, estimate[:, CLOSE], 'b-o', color='red', markersize=4, label='Estimate')
plt.axvline(x=ax1[-1], linestyle='dashed', linewidth=1)
plt.legend()
plt.title("KODEX-200 prediction")
plt.show()
| [
"pandas.read_csv",
"matplotlib.pyplot.ylabel",
"numpy.array",
"numpy.reshape",
"tensorflow.placeholder",
"tensorflow.contrib.layers.fully_connected",
"tensorflow.Session",
"matplotlib.pyplot.plot",
"tensorflow.nn.rnn_cell.LSTMCell",
"tensorflow.nn.dynamic_rnn",
"matplotlib.pyplot.xlabel",
"num... | [((1239, 1304), 'pandas.read_csv', 'pd.read_csv', (['"""StockData/^KS11.csv"""'], {'index_col': '(0)', 'parse_dates': '(True)'}), "('StockData/^KS11.csv', index_col=0, parse_dates=True)\n", (1250, 1304), True, 'import pandas as pd\n'), ((1310, 1335), 'pandas.DataFrame', 'pd.DataFrame', (["df['Close']"], {}), "(df['Close'])\n", (1322, 1335), True, 'import pandas as pd\n'), ((1542, 1554), 'numpy.array', 'np.array', (['df'], {}), '(df)\n', (1550, 1554), True, 'import numpy as np\n'), ((1647, 1671), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (1669, 1671), True, 'import tensorflow as tf\n'), ((1676, 1725), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, nStep, nInput]'], {}), '(tf.float32, [None, nStep, nInput])\n', (1690, 1725), True, 'import tensorflow as tf\n'), ((1734, 1766), 'tensorflow.nn.rnn_cell.LSTMCell', 'tf.nn.rnn_cell.LSTMCell', (['nNeuron'], {}), '(nNeuron)\n', (1757, 1766), True, 'import tensorflow as tf\n'), ((1822, 1865), 'tensorflow.nn.dynamic_rnn', 'tf.nn.dynamic_rnn', (['rnn', 'x'], {'dtype': 'tf.float32'}), '(rnn, x, dtype=tf.float32)\n', (1839, 1865), True, 'import tensorflow as tf\n'), ((1940, 1990), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, nStep, nOutput]'], {}), '(tf.float32, [None, nStep, nOutput])\n', (1954, 1990), True, 'import tensorflow as tf\n'), ((1998, 2031), 'tensorflow.reshape', 'tf.reshape', (['output', '[-1, nNeuron]'], {}), '(output, [-1, nNeuron])\n', (2008, 2031), True, 'import tensorflow as tf\n'), ((2048, 2115), 'tensorflow.contrib.layers.fully_connected', 'tf.contrib.layers.fully_connected', ([], {'inputs': 'inFC', 'num_outputs': 'nNeuron'}), '(inputs=inFC, num_outputs=nNeuron)\n', (2081, 2115), True, 'import tensorflow as tf\n'), ((2124, 2214), 'tensorflow.contrib.layers.fully_connected', 'tf.contrib.layers.fully_connected', ([], {'inputs': 'fc1', 'num_outputs': 'nOutput', 'activation_fn': 'None'}), '(inputs=fc1, num_outputs=nOutput,\n activation_fn=None)\n', (2157, 2214), True, 'import tensorflow as tf\n'), ((2223, 2262), 'tensorflow.reshape', 'tf.reshape', (['predY', '[-1, nStep, nOutput]'], {}), '(predY, [-1, nStep, nOutput])\n', (2233, 2262), True, 'import tensorflow as tf\n'), ((2392, 2435), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': '(0.001)'}), '(learning_rate=0.001)\n', (2414, 2435), True, 'import tensorflow as tf\n'), ((2527, 2539), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (2537, 2539), True, 'import tensorflow as tf\n'), ((3051, 3068), 'numpy.copy', 'np.copy', (['lastData'], {}), '(lastData)\n', (3058, 3068), True, 'import numpy as np\n'), ((3446, 3472), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 3)'}), '(figsize=(8, 3))\n', (3456, 3472), True, 'import matplotlib.pyplot as plt\n'), ((3473, 3504), 'matplotlib.pyplot.plot', 'plt.plot', (['lossHist'], {'color': '"""red"""'}), "(lossHist, color='red')\n", (3481, 3504), True, 'import matplotlib.pyplot as plt\n'), ((3505, 3530), 'matplotlib.pyplot.title', 'plt.title', (['"""Loss History"""'], {}), "('Loss History')\n", (3514, 3530), True, 'import matplotlib.pyplot as plt\n'), ((3531, 3550), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""epoch"""'], {}), "('epoch')\n", (3541, 3550), True, 'import matplotlib.pyplot as plt\n'), ((3551, 3569), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""loss"""'], {}), "('loss')\n", (3561, 3569), True, 'import matplotlib.pyplot as plt\n'), ((3570, 3580), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3578, 3580), True, 'import matplotlib.pyplot as plt\n'), ((3602, 3628), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 3)'}), '(figsize=(8, 3))\n', (3612, 3628), True, 'import matplotlib.pyplot as plt\n'), ((3629, 3663), 'matplotlib.pyplot.plot', 'plt.plot', (["df['Close']"], {'color': '"""red"""'}), "(df['Close'], color='red')\n", (3637, 3663), True, 'import matplotlib.pyplot as plt\n'), ((3664, 3699), 'matplotlib.pyplot.plot', 'plt.plot', (["df['ma_10']"], {'color': '"""blue"""'}), "(df['ma_10'], color='blue')\n", (3672, 3699), True, 'import matplotlib.pyplot as plt\n'), ((3700, 3736), 'matplotlib.pyplot.plot', 'plt.plot', (["df['ma_40']"], {'color': '"""green"""'}), "(df['ma_40'], color='green')\n", (3708, 3736), True, 'import matplotlib.pyplot as plt\n'), ((3737, 3771), 'matplotlib.pyplot.title', 'plt.title', (['"""KODEX-200 stock price"""'], {}), "('KODEX-200 stock price')\n", (3746, 3771), True, 'import matplotlib.pyplot as plt\n'), ((3772, 3782), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3780, 3782), True, 'import matplotlib.pyplot as plt\n'), ((3844, 3862), 'numpy.array', 'np.array', (['estimate'], {}), '(estimate)\n', (3852, 3862), True, 'import numpy as np\n'), ((3973, 3999), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 3)'}), '(figsize=(8, 3))\n', (3983, 3999), True, 'import matplotlib.pyplot as plt\n'), ((4000, 4107), 'matplotlib.pyplot.plot', 'plt.plot', (['ax1', 'lastData[:, CLOSE]', '"""b-o"""'], {'color': '"""blue"""', 'markersize': '(4)', 'label': '"""Stock price"""', 'linewidth': '(1)'}), "(ax1, lastData[:, CLOSE], 'b-o', color='blue', markersize=4, label=\n 'Stock price', linewidth=1)\n", (4008, 4107), True, 'import matplotlib.pyplot as plt\n'), ((4103, 4193), 'matplotlib.pyplot.plot', 'plt.plot', (['ax2', 'estimate[:, CLOSE]', '"""b-o"""'], {'color': '"""red"""', 'markersize': '(4)', 'label': '"""Estimate"""'}), "(ax2, estimate[:, CLOSE], 'b-o', color='red', markersize=4, label=\n 'Estimate')\n", (4111, 4193), True, 'import matplotlib.pyplot as plt\n'), ((4189, 4244), 'matplotlib.pyplot.axvline', 'plt.axvline', ([], {'x': 'ax1[-1]', 'linestyle': '"""dashed"""', 'linewidth': '(1)'}), "(x=ax1[-1], linestyle='dashed', linewidth=1)\n", (4200, 4244), True, 'import matplotlib.pyplot as plt\n'), ((4246, 4258), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4256, 4258), True, 'import matplotlib.pyplot as plt\n'), ((4259, 4292), 'matplotlib.pyplot.title', 'plt.title', (['"""KODEX-200 prediction"""'], {}), "('KODEX-200 prediction')\n", (4268, 4292), True, 'import matplotlib.pyplot as plt\n'), ((4293, 4303), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4301, 4303), True, 'import matplotlib.pyplot as plt\n'), ((835, 855), 'numpy.random.shuffle', 'np.random.shuffle', (['m'], {}), '(m)\n', (852, 855), True, 'import numpy as np\n'), ((2354, 2374), 'tensorflow.square', 'tf.square', (['(predY - y)'], {}), '(predY - y)\n', (2363, 2374), True, 'import tensorflow as tf\n'), ((2549, 2582), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (2580, 2582), True, 'import tensorflow as tf\n'), ((2965, 2985), 'numpy.copy', 'np.copy', (['data[-100:]'], {}), '(data[-100:])\n', (2972, 2985), True, 'import numpy as np\n'), ((3032, 3045), 'numpy.copy', 'np.copy', (['data'], {}), '(data)\n', (3039, 3045), True, 'import numpy as np\n'), ((3181, 3215), 'numpy.reshape', 'np.reshape', (['px', '(1, nStep, nInput)'], {}), '(px, (1, nStep, nInput))\n', (3191, 3215), True, 'import numpy as np\n'), ((3403, 3424), 'numpy.vstack', 'np.vstack', (['[dx, yHat]'], {}), '([dx, yHat])\n', (3412, 3424), True, 'import numpy as np\n'), ((969, 980), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (977, 980), True, 'import numpy as np\n'), ((1098, 1109), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (1106, 1109), True, 'import numpy as np\n'), ((1350, 1375), 'pandas.DataFrame', 'pd.DataFrame', (["df['Close']"], {}), "(df['Close'])\n", (1362, 1375), True, 'import pandas as pd\n'), ((1416, 1441), 'pandas.DataFrame', 'pd.DataFrame', (["df['Close']"], {}), "(df['Close'])\n", (1428, 1441), True, 'import pandas as pd\n')] |
"""media.py: Module for movie_trailer_website, contains Movie class"""
import webbrowser
import urllib
import json
class Movie(object):
"""This class provides a way to store movie related information.
constructor takes movie title, imdb_id and a url for a youtube trailer as input.
All other values are populated via OMDB API"""
def __init__(self, title, imdb_id, trailer_youtube_url):
# Initialize instance variables for passed parameters
self.title = title
self.imdb_id = imdb_id
self.trailer_youtube_url = trailer_youtube_url
# Query OMDB API for json response of movie data
response = urllib.urlopen("http://www.omdbapi.com/?i=" + self.imdb_id + "&plot=short&r=json")
movie_json = json.loads(response.read())
# Download movie posters locally
# IMDB does not allow hotlinking of images
f = open('posters/' + self.imdb_id + '.jpg', 'wb')
f.write(urllib.urlopen(movie_json['Poster']).read())
f.close()
# Populate remaining instance variables from json response and downloaded poster
self.plot = movie_json['Plot']
self.genre = movie_json['Genre']
self.year = movie_json['Year']
self.runtime = movie_json['Runtime']
self.rating = movie_json['Rated']
self.imdb_score = movie_json['imdbRating']
self.poster_image_url = f.name
def show_trailer(self):
webbrowser.open(self.trailer_youtube_url)
| [
"urllib.urlopen",
"webbrowser.open"
] | [((658, 744), 'urllib.urlopen', 'urllib.urlopen', (["('http://www.omdbapi.com/?i=' + self.imdb_id + '&plot=short&r=json')"], {}), "('http://www.omdbapi.com/?i=' + self.imdb_id +\n '&plot=short&r=json')\n", (672, 744), False, 'import urllib\n'), ((1444, 1485), 'webbrowser.open', 'webbrowser.open', (['self.trailer_youtube_url'], {}), '(self.trailer_youtube_url)\n', (1459, 1485), False, 'import webbrowser\n'), ((958, 994), 'urllib.urlopen', 'urllib.urlopen', (["movie_json['Poster']"], {}), "(movie_json['Poster'])\n", (972, 994), False, 'import urllib\n')] |
import torch
import torchvision
import torchvision.transforms as transforms
import torch.optim as optim
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from model_utils import *
class down(nn.Module):
"""
A class for creating neural network blocks containing layers:
Average Pooling --> Convlution + Leaky ReLU --> Convolution + Leaky ReLU
This is used in the UNet Class to create a UNet like NN architecture.
...
Methods
-------
forward(x)
Returns output tensor after passing input `x` to the neural network
block.
"""
def __init__(self, inChannels, outChannels, filterSize):
"""
Parameters
----------
inChannels : int
number of input channels for the first convolutional layer.
outChannels : int
number of output channels for the first convolutional layer.
This is also used as input and output channels for the
second convolutional layer.
filterSize : int
filter size for the convolution filter. input N would create
a N x N filter.
"""
super(down, self).__init__()
# Initialize convolutional layers.
# self.conv1 = nn.Conv2d(inChannels, outChannels, filterSize, stride=1, padding=int((filterSize - 1) / 2))
# self.conv2 = nn.Conv2d(outChannels, outChannels, filterSize, stride=1, padding=int((filterSize - 1) / 2))
self.conv1 = MetaConv2dLayer(in_channels=inChannels, out_channels=outChannels, kernel_size=filterSize, stride=1, padding=int((filterSize - 1) / 2))
self.conv2 = MetaConv2dLayer(in_channels=outChannels, out_channels=outChannels, kernel_size=filterSize, stride=1, padding=int((filterSize - 1) / 2))
def forward(self, x, params=None):
"""
Returns output tensor after passing input `x` to the neural network
block.
Parameters
----------
x : tensor
input to the NN block.
Returns
-------
tensor
output of the NN block.
"""
# Average pooling with kernel size 2 (2 x 2).
x = F.avg_pool2d(x, 2)
# (Convolution + Leaky ReLU) x 2
param_dict = dict()
if params is not None:
param_dict = extract_top_level_dict(current_dict=params)
x = F.leaky_relu(self.conv1(x, params=param_dict['conv1']), negative_slope = 0.1)
x = F.leaky_relu(self.conv2(x, params=param_dict['conv2']), negative_slope = 0.1)
else:
x = F.leaky_relu(self.conv1(x), negative_slope = 0.1)
x = F.leaky_relu(self.conv2(x), negative_slope = 0.1)
return x
class up(nn.Module):
"""
A class for creating neural network blocks containing layers:
Bilinear interpolation --> Convlution + Leaky ReLU --> Convolution + Leaky ReLU
This is used in the UNet Class to create a UNet like NN architecture.
...
Methods
-------
forward(x, skpCn)
Returns output tensor after passing input `x` to the neural network
block.
"""
def __init__(self, inChannels, outChannels):
"""
Parameters
----------
inChannels : int
number of input channels for the first convolutional layer.
outChannels : int
number of output channels for the first convolutional layer.
This is also used for setting input and output channels for
the second convolutional layer.
"""
super(up, self).__init__()
# Initialize convolutional layers.
# self.conv1 = nn.Conv2d(inChannels, outChannels, 3, stride=1, padding=1)
self.conv1 = MetaConv2dLayer(in_channels=inChannels, out_channels=outChannels, kernel_size=3, stride=1, padding=1)
# (2 * outChannels) is used for accommodating skip connection.
# self.conv2 = nn.Conv2d(2 * outChannels, outChannels, 3, stride=1, padding=1)
self.conv2 = MetaConv2dLayer(in_channels=2 * outChannels, out_channels=outChannels, kernel_size=3, stride=1, padding=1)
def forward(self, x, skpCn, params=None):
"""
Returns output tensor after passing input `x` to the neural network
block.
Parameters
----------
x : tensor
input to the NN block.
skpCn : tensor
skip connection input to the NN block.
Returns
-------
tensor
output of the NN block.
"""
# Bilinear interpolation with scaling 2.
x = F.interpolate(x, scale_factor=2, mode='bilinear')
param_dict = dict()
if params is not None:
param_dict = extract_top_level_dict(current_dict=params)
# Convolution + Leaky ReLU
x = F.leaky_relu(self.conv1(x, params=param_dict['conv1']), negative_slope = 0.1)
# Convolution + Leaky ReLU on (`x`, `skpCn`)
x = F.leaky_relu(self.conv2(torch.cat((x, skpCn), 1), params=param_dict['conv2']), negative_slope = 0.1)
else:
# Convolution + Leaky ReLU
x = F.leaky_relu(self.conv1(x), negative_slope = 0.1)
# Convolution + Leaky ReLU on (`x`, `skpCn`)
x = F.leaky_relu(self.conv2(torch.cat((x, skpCn), 1)), negative_slope = 0.1)
return x
class UNet(nn.Module):
"""
A class for creating UNet like architecture as specified by the
Super SloMo paper.
...
Methods
-------
forward(x)
Returns output tensor after passing input `x` to the neural network
block.
"""
def __init__(self, inChannels, outChannels):
"""
Parameters
----------
inChannels : int
number of input channels for the UNet.
outChannels : int
number of output channels for the UNet.
"""
super(UNet, self).__init__()
# Initialize neural network blocks.
self.conv1 = nn.Conv2d(inChannels, 32, 7, stride=1, padding=3)
self.conv2 = nn.Conv2d(32, 32, 7, stride=1, padding=3)
self.down1 = down(32, 64, 5)
self.down2 = down(64, 128, 3)
self.down3 = down(128, 256, 3)
self.down4 = down(256, 512, 3)
self.down5 = down(512, 512, 3)
self.up1 = up(512, 512)
self.up2 = up(512, 256)
self.up3 = up(256, 128)
self.up4 = up(128, 64)
self.up5 = up(64, 32)
self.conv3 = nn.Conv2d(32, outChannels, 3, stride=1, padding=1)
def forward(self, x):
"""
Returns output tensor after passing input `x` to the neural network.
Parameters
----------
x : tensor
input to the UNet.
Returns
-------
tensor
output of the UNet.
"""
x = F.leaky_relu(self.conv1(x), negative_slope = 0.1)
s1 = F.leaky_relu(self.conv2(x), negative_slope = 0.1)
s2 = self.down1(s1)
s3 = self.down2(s2)
s4 = self.down3(s3)
s5 = self.down4(s4)
x = self.down5(s5)
x = self.up1(x, s5)
x = self.up2(x, s4)
x = self.up3(x, s3)
x = self.up4(x, s2)
x = self.up5(x, s1)
x = F.leaky_relu(self.conv3(x), negative_slope = 0.1)
return x
class backWarp(nn.Module):
"""
A class for creating a backwarping object.
This is used for backwarping to an image:
Given optical flow from frame I0 to I1 --> F_0_1 and frame I1,
it generates I0 <-- backwarp(F_0_1, I1).
...
Methods
-------
forward(x)
Returns output tensor after passing input `img` and `flow` to the backwarping
block.
"""
def __init__(self, W, H, device):
"""
Parameters
----------
W : int
width of the image.
H : int
height of the image.
device : device
computation device (cpu/cuda).
"""
super(backWarp, self).__init__()
# create a grid
gridX, gridY = np.meshgrid(np.arange(W), np.arange(H))
self.W = W
self.H = H
self.gridX = torch.tensor(gridX, requires_grad=False, device=device)
self.gridY = torch.tensor(gridY, requires_grad=False, device=device)
def forward(self, img, flow):
"""
Returns output tensor after passing input `img` and `flow` to the backwarping
block.
I0 = backwarp(I1, F_0_1)
Parameters
----------
img : tensor
frame I1.
flow : tensor
optical flow from I0 and I1: F_0_1.
Returns
-------
tensor
frame I0.
"""
# Extract horizontal and vertical flows.
u = flow[:, 0, :, :]
v = flow[:, 1, :, :]
x = self.gridX.unsqueeze(0).expand_as(u).float() + u
y = self.gridY.unsqueeze(0).expand_as(v).float() + v
# range -1 to 1
x = 2*(x/self.W - 0.5)
y = 2*(y/self.H - 0.5)
# stacking X and Y
grid = torch.stack((x,y), dim=3)
# Sample pixels using bilinear interpolation.
imgOut = torch.nn.functional.grid_sample(img, grid)
return imgOut
# Creating an array of `t` values for the 7 intermediate frames between
# reference frames I0 and I1.
t = np.linspace(0.125, 0.875, 7)
def getFlowCoeff (indices, device):
"""
Gets flow coefficients used for calculating intermediate optical
flows from optical flows between I0 and I1: F_0_1 and F_1_0.
F_t_0 = C00 x F_0_1 + C01 x F_1_0
F_t_1 = C10 x F_0_1 + C11 x F_1_0
where,
C00 = -(1 - t) x t
C01 = t x t
C10 = (1 - t) x (1 - t)
C11 = -t x (1 - t)
Parameters
----------
indices : tensor
indices corresponding to the intermediate frame positions
of all samples in the batch.
device : device
computation device (cpu/cuda).
Returns
-------
tensor
coefficients C00, C01, C10, C11.
"""
# Convert indices tensor to numpy array
ind = indices.detach().numpy()
C11 = C00 = - (1 - (t[ind])) * (t[ind])
C01 = (t[ind]) * (t[ind])
C10 = (1 - (t[ind])) * (1 - (t[ind]))
return torch.Tensor(C00)[None, None, None, :].permute(3, 0, 1, 2).to(device), torch.Tensor(C01)[None, None, None, :].permute(3, 0, 1, 2).to(device), torch.Tensor(C10)[None, None, None, :].permute(3, 0, 1, 2).to(device), torch.Tensor(C11)[None, None, None, :].permute(3, 0, 1, 2).to(device)
def getWarpCoeff (indices, device):
"""
Gets coefficients used for calculating final intermediate
frame `It_gen` from backwarped images using flows F_t_0 and F_t_1.
It_gen = (C0 x V_t_0 x g_I_0_F_t_0 + C1 x V_t_1 x g_I_1_F_t_1) / (C0 x V_t_0 + C1 x V_t_1)
where,
C0 = 1 - t
C1 = t
V_t_0, V_t_1 --> visibility maps
g_I_0_F_t_0, g_I_1_F_t_1 --> backwarped intermediate frames
Parameters
----------
indices : tensor
indices corresponding to the intermediate frame positions
of all samples in the batch.
device : device
computation device (cpu/cuda).
Returns
-------
tensor
coefficients C0 and C1.
"""
# Convert indices tensor to numpy array
ind = indices.detach().numpy()
C0 = 1 - t[ind]
C1 = t[ind]
return torch.Tensor(C0)[None, None, None, :].permute(3, 0, 1, 2).to(device), torch.Tensor(C1)[None, None, None, :].permute(3, 0, 1, 2).to(device)
class SuperSloMoModel(nn.Module):
def __init__(self, device):
super(SuperSloMoModel, self).__init__()
self.device = device
self.flowComp = UNet(6, 4)
self.arbTimeFlowIntrp = UNet(20, 5)
self.backwarp = None
def forward(self, I0, I1, ind):
w, h = I0.size(3), I0.size(2)
s = 6 # bits to shift
padW, padH = 0, 0
if w != ((w >> s) << s):
padW = (((w >> s) + 1) << s) - w
if h != ((h >> s) << s):
padH = (((h >> s) + 1) << s) - h
paddingInput = nn.ReflectionPad2d(padding=[padW // 2, padW - padW // 2, padH // 2, padH - padH // 2])
paddingOutput = nn.ReflectionPad2d(padding=[0 - padW // 2, padW // 2 - padW, 0 - padH // 2, padH // 2 - padH])
I0 = paddingInput(I0)
I1 = paddingInput(I1)
flowOut = self.flowComp(torch.cat((I0, I1), dim=1))
F_0_1 = flowOut[:, :2, :, :]
F_1_0 = flowOut[:, 2:, :, :]
fCoeff = getFlowCoeff(ind, self.device)
F_t_0 = fCoeff[0] * F_0_1 + fCoeff[1] * F_1_0
F_t_1 = fCoeff[2] * F_0_1 + fCoeff[3] * F_1_0
if self.backwarp is None or self.backwarp.W != I0.size(3) or self.backwarp.H != I0.size(2):
self.backwarp = backWarp(I0.size(3), I0.size(2), self.device) # make grid
g_I0_F_t_0 = self.backwarp(I0, F_t_0)
g_I1_F_t_1 = self.backwarp(I1, F_t_1)
intrpOut = self.arbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0), dim=1))
F_t_0_f = intrpOut[:, :2, :, :] + F_t_0
F_t_1_f = intrpOut[:, 2:4, :, :] + F_t_1
V_t_0 = F.sigmoid(intrpOut[:, 4:5, :, :])
V_t_1 = 1 - V_t_0
g_I0_F_t_0_f = self.backwarp(I0, F_t_0_f)
g_I1_F_t_1_f = self.backwarp(I1, F_t_1_f)
wCoeff = getWarpCoeff(ind, self.device)
Ft_p = (wCoeff[0] * V_t_0 * g_I0_F_t_0_f + wCoeff[1] * V_t_1 * g_I1_F_t_1_f) / (wCoeff[0] * V_t_0 + wCoeff[1] * V_t_1)
warped_I0, warped_I1 = self.backwarp(I0, F_1_0), self.backwarp(I1, F_0_1)
Ft_p = paddingOutput(Ft_p)
F_0_1, F_1_0 = paddingOutput(F_0_1), paddingOutput(F_1_0)
g_I0_F_t_0, g_I1_F_t_1 = paddingOutput(g_I0_F_t_0), paddingOutput(g_I1_F_t_1)
warped_I0, warped_I1 = paddingOutput(warped_I0), paddingOutput(warped_I1)
#return Ft_p, # output image
# (F_0_1, F_1_0), # bidirectional flow maps
# (g_I0_F_t_0, g_I1_F_t_1), # warped intermediate images
# (self.backwarp(I0, F_1_0), self.backwarp(I1, F_0_1)) # warped input image (0-1, 1-0)
return Ft_p, \
(F_0_1, F_1_0), \
(g_I0_F_t_0, g_I1_F_t_1), \
(warped_I0, warped_I1)
# (self.backwarp(I0, F_1_0), self.backwarp(I1, F_0_1))
class MetaUNet(nn.Module):
"""
A class for creating UNet like architecture as specified by the
Super SloMo paper.
...
Methods
-------
forward(x)
Returns output tensor after passing input `x` to the neural network
block.
"""
def __init__(self, inChannels, outChannels):
"""
Parameters
----------
inChannels : int
number of input channels for the UNet.
outChannels : int
number of output channels for the UNet.
"""
super(MetaUNet, self).__init__()
# Initialize neural network blocks.
self.conv1 = MetaConv2dLayer(in_channels=inChannels, out_channels=32, kernel_size=7, stride=1, padding=3)
self.conv2 = MetaConv2dLayer(in_channels=32, out_channels=32, kernel_size=7, stride=1, padding=3)
self.down1 = down(32, 64, 5)
self.down2 = down(64, 128, 3)
self.down3 = down(128, 256, 3)
self.down4 = down(256, 512, 3)
self.down5 = down(512, 512, 3)
self.up1 = up(512, 512)
self.up2 = up(512, 256)
self.up3 = up(256, 128)
self.up4 = up(128, 64)
self.up5 = up(64, 32)
self.conv3 = MetaConv2dLayer(in_channels=32, out_channels=outChannels, kernel_size=3, stride=1, padding=1)
def forward(self, x, params=None):
"""
Returns output tensor after passing input `x` to the neural network.
Parameters
----------
x : tensor
input to the UNet.
Returns
-------
tensor
output of the UNet.
"""
param_dict = dict()
if params is not None:
param_dict = extract_top_level_dict(current_dict=params)
x = F.leaky_relu(self.conv1(x, params=param_dict['conv1']), negative_slope = 0.1)
s1 = F.leaky_relu(self.conv2(x, params=param_dict['conv2']), negative_slope = 0.1)
s2 = self.down1(s1, params=param_dict['down1'])
s3 = self.down2(s2, params=param_dict['down2'])
s4 = self.down3(s3, params=param_dict['down3'])
s5 = self.down4(s4, params=param_dict['down4'])
x = self.down5(s5, params=param_dict['down5'])
x = self.up1(x, s5, params=param_dict['up1'])
x = self.up2(x, s4, params=param_dict['up2'])
x = self.up3(x, s3, params=param_dict['up3'])
x = self.up4(x, s2, params=param_dict['up4'])
x = self.up5(x, s1, params=param_dict['up5'])
x = F.leaky_relu(self.conv3(x, params=param_dict['conv3']), negative_slope = 0.1)
else:
x = F.leaky_relu(self.conv1(x), negative_slope = 0.1)
s1 = F.leaky_relu(self.conv2(x), negative_slope = 0.1)
s2 = self.down1(s1)
s3 = self.down2(s2)
s4 = self.down3(s3)
s5 = self.down4(s4)
x = self.down5(s5)
x = self.up1(x, s5)
x = self.up2(x, s4)
x = self.up3(x, s3)
x = self.up4(x, s2)
x = self.up5(x, s1)
x = F.leaky_relu(self.conv3(x), negative_slope = 0.1)
return x
class MetaSuperSloMo(nn.Module):
def __init__(self, device, resume=False):
super(MetaSuperSloMo, self).__init__()
self.device = device
self.flowComp = MetaUNet(6, 4)
self.arbTimeFlowIntrp = MetaUNet(20, 5)
self.backwarp = None
if resume:
print('Loading model: pretrained_models/superslomo_base.pth')
# checkpoint = torch.load('pretrained_models/meta_superslomo.pth')
checkpoint = torch.load('pretrained_models/superslomo_base.pth')
self.flowComp.load_state_dict(checkpoint['state_dictFC'])
self.arbTimeFlowIntrp.load_state_dict(checkpoint['state_dictAT'])
def forward(self, I0, I1, ind=3, params=None, **kwargs):
ind = ind * torch.ones(I0.size(0), dtype=int)
w, h = I0.size(3), I0.size(2)
s = 6 # bits to shift
padW, padH = 0, 0
if w != ((w >> s) << s):
padW = (((w >> s) + 1) << s) - w
if h != ((h >> s) << s):
padH = (((h >> s) + 1) << s) - h
paddingInput = nn.ReflectionPad2d(padding=[padW // 2, padW - padW // 2, padH // 2, padH - padH // 2])
paddingOutput = nn.ReflectionPad2d(padding=[0 - padW // 2, padW // 2 - padW, 0 - padH // 2, padH // 2 - padH])
I0 = paddingInput(I0)
I1 = paddingInput(I1)
param_dict = dict()
if params is not None:
param_dict = extract_top_level_dict(current_dict=params)
flowOut = self.flowComp(torch.cat((I0, I1), dim=1), params=param_dict['flowComp'])
F_0_1 = flowOut[:, :2, :, :]
F_1_0 = flowOut[:, 2:, :, :]
fCoeff = getFlowCoeff(ind, self.device)
F_t_0 = fCoeff[0] * F_0_1 + fCoeff[1] * F_1_0
F_t_1 = fCoeff[2] * F_0_1 + fCoeff[3] * F_1_0
if self.backwarp is None or self.backwarp.W != I0.size(3) or self.backwarp.H != I0.size(2):
self.backwarp = backWarp(I0.size(3), I0.size(2), self.device) # make grid
g_I0_F_t_0 = self.backwarp(I0, F_t_0)
g_I1_F_t_1 = self.backwarp(I1, F_t_1)
intrpOut = self.arbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0), dim=1),
params=param_dict['arbTimeFlowIntrp'])
else:
flowOut = self.flowComp(torch.cat((I0, I1), dim=1))
F_0_1 = flowOut[:, :2, :, :]
F_1_0 = flowOut[:, 2:, :, :]
fCoeff = getFlowCoeff(ind, self.device)
F_t_0 = fCoeff[0] * F_0_1 + fCoeff[1] * F_1_0
F_t_1 = fCoeff[2] * F_0_1 + fCoeff[3] * F_1_0
if self.backwarp is None or self.backwarp.W != I0.size(3) or self.backwarp.H != I0.size(2):
self.backwarp = backWarp(I0.size(3), I0.size(2), self.device) # make grid
g_I0_F_t_0 = self.backwarp(I0, F_t_0)
g_I1_F_t_1 = self.backwarp(I1, F_t_1)
intrpOut = self.arbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0), dim=1))
F_t_0_f = intrpOut[:, :2, :, :] + F_t_0
F_t_1_f = intrpOut[:, 2:4, :, :] + F_t_1
V_t_0 = F.sigmoid(intrpOut[:, 4:5, :, :])
V_t_1 = 1 - V_t_0
g_I0_F_t_0_f = self.backwarp(I0, F_t_0_f)
g_I1_F_t_1_f = self.backwarp(I1, F_t_1_f)
wCoeff = getWarpCoeff(ind, self.device)
Ft_p = (wCoeff[0] * V_t_0 * g_I0_F_t_0_f + wCoeff[1] * V_t_1 * g_I1_F_t_1_f) / (wCoeff[0] * V_t_0 + wCoeff[1] * V_t_1)
warped_I0, warped_I1 = self.backwarp(I0, F_1_0), self.backwarp(I1, F_0_1)
Ft_p = paddingOutput(Ft_p)
F_0_1, F_1_0 = paddingOutput(F_0_1), paddingOutput(F_1_0)
g_I0_F_t_0, g_I1_F_t_1 = paddingOutput(g_I0_F_t_0), paddingOutput(g_I1_F_t_1)
warped_I0, warped_I1 = paddingOutput(warped_I0), paddingOutput(warped_I1)
#return Ft_p, # output image
# (F_0_1, F_1_0), # bidirectional flow maps
# (g_I0_F_t_0, g_I1_F_t_1), # warped intermediate images
# (self.backwarp(I0, F_1_0), self.backwarp(I1, F_0_1)) # warped input image (0-1, 1-0)
return Ft_p, {
'bidirectional_flow': (F_0_1, F_1_0),
'warped_intermediate_frames': (g_I0_F_t_0, g_I1_F_t_1),
'warped_input_frames': (warped_I0, warped_I1)}
# (self.backwarp(I0, F_1_0), self.backwarp(I1, F_0_1))
# return Ft_p
def zero_grad(self, params=None):
if params is None:
for param in self.parameters():
if param.requires_grad == True:
if param.grad is not None:
if torch.sum(param.grad) > 0:
print(param.grad)
param.grad.zero_()
else:
for name, param in params.items():
if param.requires_grad == True:
if param.grad is not None:
if torch.sum(param.grad) > 0:
print(param.grad)
param.grad.zero_()
params[name].grad = None
def restore_backup_stats(self):
"""
Reset stored batch statistics from the stored backup.
"""
pass # no batch statistics used
| [
"torch.nn.functional.grid_sample",
"torch.nn.ReflectionPad2d",
"torch.load",
"torch.stack",
"torch.Tensor",
"torch.nn.functional.avg_pool2d",
"torch.nn.Conv2d",
"torch.nn.functional.sigmoid",
"torch.tensor",
"numpy.linspace",
"torch.cat",
"torch.sum",
"torch.nn.functional.interpolate",
"nu... | [((9659, 9687), 'numpy.linspace', 'np.linspace', (['(0.125)', '(0.875)', '(7)'], {}), '(0.125, 0.875, 7)\n', (9670, 9687), True, 'import numpy as np\n'), ((2256, 2274), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['x', '(2)'], {}), '(x, 2)\n', (2268, 2274), True, 'import torch.nn.functional as F\n'), ((4759, 4808), 'torch.nn.functional.interpolate', 'F.interpolate', (['x'], {'scale_factor': '(2)', 'mode': '"""bilinear"""'}), "(x, scale_factor=2, mode='bilinear')\n", (4772, 4808), True, 'import torch.nn.functional as F\n'), ((6207, 6256), 'torch.nn.Conv2d', 'nn.Conv2d', (['inChannels', '(32)', '(7)'], {'stride': '(1)', 'padding': '(3)'}), '(inChannels, 32, 7, stride=1, padding=3)\n', (6216, 6256), True, 'import torch.nn as nn\n'), ((6278, 6319), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(32)', '(7)'], {'stride': '(1)', 'padding': '(3)'}), '(32, 32, 7, stride=1, padding=3)\n', (6287, 6319), True, 'import torch.nn as nn\n'), ((6700, 6750), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', 'outChannels', '(3)'], {'stride': '(1)', 'padding': '(1)'}), '(32, outChannels, 3, stride=1, padding=1)\n', (6709, 6750), True, 'import torch.nn as nn\n'), ((8448, 8503), 'torch.tensor', 'torch.tensor', (['gridX'], {'requires_grad': '(False)', 'device': 'device'}), '(gridX, requires_grad=False, device=device)\n', (8460, 8503), False, 'import torch\n'), ((8525, 8580), 'torch.tensor', 'torch.tensor', (['gridY'], {'requires_grad': '(False)', 'device': 'device'}), '(gridY, requires_grad=False, device=device)\n', (8537, 8580), False, 'import torch\n'), ((9388, 9414), 'torch.stack', 'torch.stack', (['(x, y)'], {'dim': '(3)'}), '((x, y), dim=3)\n', (9399, 9414), False, 'import torch\n'), ((9485, 9527), 'torch.nn.functional.grid_sample', 'torch.nn.functional.grid_sample', (['img', 'grid'], {}), '(img, grid)\n', (9516, 9527), False, 'import torch\n'), ((12444, 12535), 'torch.nn.ReflectionPad2d', 'nn.ReflectionPad2d', ([], {'padding': '[padW // 2, padW - padW // 2, padH // 2, padH - padH // 2]'}), '(padding=[padW // 2, padW - padW // 2, padH // 2, padH - \n padH // 2])\n', (12462, 12535), True, 'import torch.nn as nn\n'), ((12555, 12653), 'torch.nn.ReflectionPad2d', 'nn.ReflectionPad2d', ([], {'padding': '[0 - padW // 2, padW // 2 - padW, 0 - padH // 2, padH // 2 - padH]'}), '(padding=[0 - padW // 2, padW // 2 - padW, 0 - padH // 2,\n padH // 2 - padH])\n', (12573, 12653), True, 'import torch.nn as nn\n'), ((13520, 13553), 'torch.nn.functional.sigmoid', 'F.sigmoid', (['intrpOut[:, 4:5, :, :]'], {}), '(intrpOut[:, 4:5, :, :])\n', (13529, 13553), True, 'import torch.nn.functional as F\n'), ((19138, 19229), 'torch.nn.ReflectionPad2d', 'nn.ReflectionPad2d', ([], {'padding': '[padW // 2, padW - padW // 2, padH // 2, padH - padH // 2]'}), '(padding=[padW // 2, padW - padW // 2, padH // 2, padH - \n padH // 2])\n', (19156, 19229), True, 'import torch.nn as nn\n'), ((19249, 19347), 'torch.nn.ReflectionPad2d', 'nn.ReflectionPad2d', ([], {'padding': '[0 - padW // 2, padW // 2 - padW, 0 - padH // 2, padH // 2 - padH]'}), '(padding=[0 - padW // 2, padW // 2 - padW, 0 - padH // 2,\n padH // 2 - padH])\n', (19267, 19347), True, 'import torch.nn as nn\n'), ((21223, 21256), 'torch.nn.functional.sigmoid', 'F.sigmoid', (['intrpOut[:, 4:5, :, :]'], {}), '(intrpOut[:, 4:5, :, :])\n', (21232, 21256), True, 'import torch.nn.functional as F\n'), ((8361, 8373), 'numpy.arange', 'np.arange', (['W'], {}), '(W)\n', (8370, 8373), True, 'import numpy as np\n'), ((8375, 8387), 'numpy.arange', 'np.arange', (['H'], {}), '(H)\n', (8384, 8387), True, 'import numpy as np\n'), ((12745, 12771), 'torch.cat', 'torch.cat', (['(I0, I1)'], {'dim': '(1)'}), '((I0, I1), dim=1)\n', (12754, 12771), False, 'import torch\n'), ((13326, 13404), 'torch.cat', 'torch.cat', (['(I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0)'], {'dim': '(1)'}), '((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0), dim=1)\n', (13335, 13404), False, 'import torch\n'), ((18546, 18597), 'torch.load', 'torch.load', (['"""pretrained_models/superslomo_base.pth"""'], {}), "('pretrained_models/superslomo_base.pth')\n", (18556, 18597), False, 'import torch\n'), ((19571, 19597), 'torch.cat', 'torch.cat', (['(I0, I1)'], {'dim': '(1)'}), '((I0, I1), dim=1)\n', (19580, 19597), False, 'import torch\n'), ((20223, 20301), 'torch.cat', 'torch.cat', (['(I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0)'], {'dim': '(1)'}), '((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0), dim=1)\n', (20232, 20301), False, 'import torch\n'), ((20408, 20434), 'torch.cat', 'torch.cat', (['(I0, I1)'], {'dim': '(1)'}), '((I0, I1), dim=1)\n', (20417, 20434), False, 'import torch\n'), ((21029, 21107), 'torch.cat', 'torch.cat', (['(I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0)'], {'dim': '(1)'}), '((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0), dim=1)\n', (21038, 21107), False, 'import torch\n'), ((5176, 5200), 'torch.cat', 'torch.cat', (['(x, skpCn)', '(1)'], {}), '((x, skpCn), 1)\n', (5185, 5200), False, 'import torch\n'), ((5469, 5493), 'torch.cat', 'torch.cat', (['(x, skpCn)', '(1)'], {}), '((x, skpCn), 1)\n', (5478, 5493), False, 'import torch\n'), ((10586, 10603), 'torch.Tensor', 'torch.Tensor', (['C00'], {}), '(C00)\n', (10598, 10603), False, 'import torch\n'), ((10657, 10674), 'torch.Tensor', 'torch.Tensor', (['C01'], {}), '(C01)\n', (10669, 10674), False, 'import torch\n'), ((10728, 10745), 'torch.Tensor', 'torch.Tensor', (['C10'], {}), '(C10)\n', (10740, 10745), False, 'import torch\n'), ((10799, 10816), 'torch.Tensor', 'torch.Tensor', (['C11'], {}), '(C11)\n', (10811, 10816), False, 'import torch\n'), ((11735, 11751), 'torch.Tensor', 'torch.Tensor', (['C0'], {}), '(C0)\n', (11747, 11751), False, 'import torch\n'), ((11805, 11821), 'torch.Tensor', 'torch.Tensor', (['C1'], {}), '(C1)\n', (11817, 11821), False, 'import torch\n'), ((22850, 22871), 'torch.sum', 'torch.sum', (['param.grad'], {}), '(param.grad)\n', (22859, 22871), False, 'import torch\n'), ((23153, 23174), 'torch.sum', 'torch.sum', (['param.grad'], {}), '(param.grad)\n', (23162, 23174), False, 'import torch\n')] |
# -*- coding: utf-8 -*-
"""
In this file are all the needed functions to calculate an adaptive fractionation treatment plan. The value_eval and the result_calc function are the only ones that should be used
This file requires all sparing factors to be known, therefore, it isnt suited to do active treatment planning but to analyze patient data.
value_eval and result_calc_BEDNT are the most essential codes. The results from value_eval can be used to calculate a treatment plan with result_calc_BEDNT.
The optimal policies for each fraction can be extracted manually(pol4 = first fraction, first index in pol is the last fraction and the last index is the first fraction). but one must know what index represents which sparing factor
Note: This file does not assume all sparing factors to be known at the start, but simulates the treatment planning as if we would get a new sparing factor at each fraction!
This program uses a discrete state space and does not interpolate between states. Therefore, it is less precise than the interpolation programs
"""
import numpy as np
from scipy.stats import truncnorm
import time
from scipy.stats import invgamma
def get_truncated_normal(mean=0, sd=1, low=0, upp=10):
'''produces a truncated normal distribution'''
return truncnorm((low - mean) / sd, (upp - mean) / sd, loc=mean, scale=sd)
def std_calc(measured_data,alpha,beta):
'''calculates the most likely standard deviation for a list of k sparing factors and an inverse-gamma conjugate prior
measured_data: list/array with k sparing factors
alpha: shape of inverse-gamma distribution
beta: scale of inverse-gamme distrinbution
return: most likely std based on the measured data and inverse-gamma prior'''
n = len(measured_data)
var_values = np.arange(0.00001,0.25,0.00001)
likelihood_values = np.zeros(len(var_values))
for index,value in enumerate(var_values):
likelihood_values[index] = value**(-alpha-1)/value**(n/2)*np.exp(-beta/value)*np.exp(-np.var(measured_data)*n/(2*value))
std = (np.sqrt(var_values[np.argmax(likelihood_values)]))
return std
def distribution_update(sparing_factors, alpha, beta):
'''produces the updated probability distribution for each fraction based on Variance prior
sparing_factors: list/array of k spraring factors
alpha: shape of inverse-gamma distribution
beta: scale of inverse-gamme distrinbution
return: k-1 dimensional mean and std arrays starting from the second sparing factor (index 1)
'''
means = np.zeros(len(sparing_factors))
stds = np.zeros(len(sparing_factors))
for i in range(len(sparing_factors)):
means[i] = np.mean(sparing_factors[:(i+1)])
stds[i] = std_calc(sparing_factors[:(i+1)],alpha,beta)
means = np.delete(means,0)
stds = np.delete(stds,0) #we get rid of the first value as it is only the planning value and not used in a fraction
return [means,stds]
def updated_distribution_calc(data,sparing_factors):
'''calculates the updated distribution based on prior data that is used to setup an inverse gamma distribution
data shape: nxk where n is the amount of patients and k the amount of sparingfactors per patient
sparing_factors shape: list/array with k entries with the first sparing factor being the planning sparing factor, therefore not being included in the treatment
return: updated means and stds for k-1 fractions.'''
variances = data.var(axis = 1)
alpha,loc,beta = invgamma.fit(variances, floc = 0) #here beta is the scale parameter
[means,stds] = distribution_update(sparing_factors,alpha,beta)
return[means,stds]
def probdistributions(means,stds):
'''produces the truncated normal distribution for several means and standard deviations
means: list/array of n means
stds: list/array of n standard deviations
return: n probability distributions for values [0.01,1.40]'''
distributions = np.zeros(141*len(means)).reshape(len(means),141)
for i in range(len(means)):
X = get_truncated_normal(means[i], stds[i], low=0, upp=1.4)
for index,value in enumerate(np.arange(0,1.41,0.01)):
distributions[i][index] = X.cdf(value+0.004999999999999999999)-X.cdf(value-0.005)
return distributions
def BED_calc0( dose, ab,sparing = 1):
BED = sparing*dose*(1+(sparing*dose)/ab)
return BED
def BED_calc( sf, ab,actionspace):
BED = np.outer(sf,actionspace)*(1+np.outer(sf,actionspace)/ab) #produces a sparing factors x actions space array
return BED
def value_eval(sparing_factors,data,abt = 10,abn = 3,bound = 90,riskfactor = 0):
'''calculates the best policy for a list of k sparing factors with k-1 fractions based on a dynamic programming algorithm. Estimation of the probability distribution is based on prior patient data
sparing_factors: list/array of k sparing factors. A planning sparing factor is necessary!
data: nxk dimensional data of n prior patients with k sparing factors.
abt: alpha beta ratio of tumor
abn: alpha beta ratio of Organ at risk
bound: upper limit of BED in OAR
riskfactor: "risk reducing" factor of zero is a full adaptive fractionation algorithm while a sparing factor of 0.1 slightly forces the algorithm to stay close to the 6Gy per fraction plan. a risk factor of 1 results in a 6Gy per fraction plan.
return:
Values: a sparing_factor-2 x BEDT x sf dimensional matrix with the value of each BEDT/sf state
Values4: Values of the first fraction
policy: a sparing_factor-2 x BEDT x sf dimensional matrix with the policy of each BEDT/sf state. fourth index = first fraction, first index = last fraction
policy4: policy of the first fraction'''
sf= np.arange(0,1.41,0.01) #list of all possible sparing factors
BEDT = np.arange(0,90.3,0.1) #list of all possible Biological effective doses
Values = np.zeros(len(BEDT)*len(sf)*4).reshape(4,len(BEDT),len(sf)) #2d values list with first indice being the BED and second being the sf
actionspace = np.arange(0,22.4,0.1) #list of all possible dose actions
[means,stds] =updated_distribution_calc(data,sparing_factors)
distributions = probdistributions(means,stds)
policy = np.zeros((4,len(BEDT),len(sf)))
upperbound = 90.2
start = time.time()
#here we add the calculation of the distance to the standard treatment
useless,calculator = np.meshgrid(np.zeros(len(actionspace)),sf) #calculator is matrix that has the correct sparing factors
actionspace_expand,useless = np.meshgrid(actionspace,sf)
risk_penalty = abs(6/calculator-actionspace_expand)
delivered_doses = np.round(BED_calc(sf,abn,actionspace),1)
BEDT_rew = BED_calc(1, abt,actionspace) #this is the reward for the dose deposited inside the normal tissue.
BEDT_transformed, meaningless = np.meshgrid(BEDT_rew,np.zeros(len(sf)))
risk_penalty[0] = risk_penalty[1]
for update_loop in range (0,5):
prob = distributions[update_loop]
for state in range(0,5-update_loop): #We have five fractionations with 2 special cases 0 and 4
print(str(state+1) +' loop done')
if state == 4: #first state with no prior dose delivered so we dont loop through BEDT
future_bed = delivered_doses
future_bed[future_bed > upperbound] = upperbound #any dose surpassing 95 is set to 95. Furthermore, 95 will be penalized so strong that the program avoids it at all costs. (95 is basically the upper bound and can be adapted)
future_values_prob = (Values[state-1][(future_bed*10).astype(int)]*prob).sum(axis = 2) #in this array are all future values multiplied with the probability of getting there. shape = sparing factors x actionspace
penalties = np.zeros(future_bed.shape)
penalties[future_bed > bound] = -(future_bed[future_bed > bound]-bound)*5
Vs = future_values_prob + BEDT_transformed + penalties - risk_penalty*riskfactor
policy4 = Vs.argmax(axis=1)
Values4 = Vs.max(axis=1)
else:
future_values_prob_all = (Values[state-1]*prob).sum(axis = 1)
for bed in range(len(BEDT)): #this and the next for loop allow us to loop through all states
future_bed = delivered_doses + bed/10
future_bed[future_bed > upperbound] = upperbound #any dose surpassing 95 is set to 95.
if state == 0: #last state no more further values to add
penalties = np.zeros(future_bed.shape)
penalties[future_bed > bound] = -(future_bed[future_bed > bound]-bound)*5
penalties[future_bed == upperbound] = -10000 #here we produced the penalties for all the values surpassing the limit
Vs = BEDT_transformed + penalties# Value of each sparing factor for each action
else:
penalties = np.zeros(future_bed.shape)
penalties[future_bed == upperbound] = -100
future_values_prob = (future_values_prob_all[(future_bed*10).astype(int)])#in this array are all future values multiplied with the probability of getting there. shape = sparing factors x actionspace
Vs = future_values_prob + BEDT_transformed + penalties - risk_penalty*riskfactor
best_action = Vs.argmax(axis=1)
valer = Vs.max(axis=1)
policy[state][bed] = best_action
Values[state][bed] = valer
end = time.time()
print('time elapsed = ' +str(end - start))
return [Values,policy,Values4,policy4]
def result_calc_BEDNT(pol4,pol,sparing_factors,abt = 10,abn = 3): #this function calculates the fractionation plan according to the reinforcement learning
'''in this function gives the treatment plan for a set of sparing factors based on the sparing factors that have been used to calculate the optimal policy
the pol4 and pol matrices are the ones that are returnedin the value_eval function
pol4: first fraction policy
pol: second - fifth fraction policy
sparing_factors: sparing factors that should be used to make a plan. list starting from first fraction'''
actionspace = np.arange(0,22.4,0.1) #list of all possible dose actions
total_bedt = BED_calc0(actionspace[pol4[round(sparing_factors[0]*100)]],abt)
total_bednt = BED_calc0(actionspace[pol4[round(sparing_factors[0]*100)]],abn,sparing_factors[0])
print('fraction 1 dose delivered: ',actionspace[pol4[round(sparing_factors[0]*100)]])
print('total accumulated biological effective dose in tumor; fraction 1 = ',round(total_bedt,1))
print('total accumulated biological effective dose in normal tissue; fraction 1 = ',round(total_bednt,1))
for index,fraction in enumerate(range(3,-1,-1)):
if fraction == 0:
dose_action = (-sparing_factors[index+1]+np.sqrt(sparing_factors[index+1]**2+4*sparing_factors[index+1]**2*(90-total_bednt)/abn))/(2*sparing_factors[index+1]**2/abn)
else:
dose_action = actionspace[pol[fraction][(round(total_bednt,1)*10).astype(int)][round(sparing_factors[index+1]*100)].astype(int)]
dose_delivered = BED_calc0(dose_action,abt)
total_bedt += dose_delivered
total_bednt += BED_calc0(dose_action,abn,sparing_factors[index+1])
print('fraction ', index+2, 'dose delivered: ', round(dose_action,1))
print('total accumulated dose in tumor; fraction ', index+2, '=', round(total_bedt,1))
print('total accumulated dose in normal tissue; fraction ', index+2, '=', round(total_bednt,1))
| [
"numpy.mean",
"numpy.sqrt",
"scipy.stats.invgamma.fit",
"numpy.delete",
"numpy.argmax",
"numpy.exp",
"numpy.zeros",
"numpy.outer",
"numpy.var",
"scipy.stats.truncnorm",
"numpy.meshgrid",
"time.time",
"numpy.arange"
] | [((1290, 1357), 'scipy.stats.truncnorm', 'truncnorm', (['((low - mean) / sd)', '((upp - mean) / sd)'], {'loc': 'mean', 'scale': 'sd'}), '((low - mean) / sd, (upp - mean) / sd, loc=mean, scale=sd)\n', (1299, 1357), False, 'from scipy.stats import truncnorm\n'), ((1803, 1832), 'numpy.arange', 'np.arange', (['(1e-05)', '(0.25)', '(1e-05)'], {}), '(1e-05, 0.25, 1e-05)\n', (1812, 1832), True, 'import numpy as np\n'), ((2814, 2833), 'numpy.delete', 'np.delete', (['means', '(0)'], {}), '(means, 0)\n', (2823, 2833), True, 'import numpy as np\n'), ((2845, 2863), 'numpy.delete', 'np.delete', (['stds', '(0)'], {}), '(stds, 0)\n', (2854, 2863), True, 'import numpy as np\n'), ((3548, 3579), 'scipy.stats.invgamma.fit', 'invgamma.fit', (['variances'], {'floc': '(0)'}), '(variances, floc=0)\n', (3560, 3579), False, 'from scipy.stats import invgamma\n'), ((5833, 5857), 'numpy.arange', 'np.arange', (['(0)', '(1.41)', '(0.01)'], {}), '(0, 1.41, 0.01)\n', (5842, 5857), True, 'import numpy as np\n'), ((5906, 5929), 'numpy.arange', 'np.arange', (['(0)', '(90.3)', '(0.1)'], {}), '(0, 90.3, 0.1)\n', (5915, 5929), True, 'import numpy as np\n'), ((6141, 6164), 'numpy.arange', 'np.arange', (['(0)', '(22.4)', '(0.1)'], {}), '(0, 22.4, 0.1)\n', (6150, 6164), True, 'import numpy as np\n'), ((6399, 6410), 'time.time', 'time.time', ([], {}), '()\n', (6408, 6410), False, 'import time\n'), ((6655, 6683), 'numpy.meshgrid', 'np.meshgrid', (['actionspace', 'sf'], {}), '(actionspace, sf)\n', (6666, 6683), True, 'import numpy as np\n'), ((9843, 9854), 'time.time', 'time.time', ([], {}), '()\n', (9852, 9854), False, 'import time\n'), ((10554, 10577), 'numpy.arange', 'np.arange', (['(0)', '(22.4)', '(0.1)'], {}), '(0, 22.4, 0.1)\n', (10563, 10577), True, 'import numpy as np\n'), ((2704, 2736), 'numpy.mean', 'np.mean', (['sparing_factors[:i + 1]'], {}), '(sparing_factors[:i + 1])\n', (2711, 2736), True, 'import numpy as np\n'), ((4495, 4520), 'numpy.outer', 'np.outer', (['sf', 'actionspace'], {}), '(sf, actionspace)\n', (4503, 4520), True, 'import numpy as np\n'), ((2094, 2122), 'numpy.argmax', 'np.argmax', (['likelihood_values'], {}), '(likelihood_values)\n', (2103, 2122), True, 'import numpy as np\n'), ((4197, 4221), 'numpy.arange', 'np.arange', (['(0)', '(1.41)', '(0.01)'], {}), '(0, 1.41, 0.01)\n', (4206, 4221), True, 'import numpy as np\n'), ((2000, 2021), 'numpy.exp', 'np.exp', (['(-beta / value)'], {}), '(-beta / value)\n', (2006, 2021), True, 'import numpy as np\n'), ((4523, 4548), 'numpy.outer', 'np.outer', (['sf', 'actionspace'], {}), '(sf, actionspace)\n', (4531, 4548), True, 'import numpy as np\n'), ((7912, 7938), 'numpy.zeros', 'np.zeros', (['future_bed.shape'], {}), '(future_bed.shape)\n', (7920, 7938), True, 'import numpy as np\n'), ((11236, 11345), 'numpy.sqrt', 'np.sqrt', (['(sparing_factors[index + 1] ** 2 + 4 * sparing_factors[index + 1] ** 2 * (\n 90 - total_bednt) / abn)'], {}), '(sparing_factors[index + 1] ** 2 + 4 * sparing_factors[index + 1] **\n 2 * (90 - total_bednt) / abn)\n', (11243, 11345), True, 'import numpy as np\n'), ((8757, 8783), 'numpy.zeros', 'np.zeros', (['future_bed.shape'], {}), '(future_bed.shape)\n', (8765, 8783), True, 'import numpy as np\n'), ((9195, 9221), 'numpy.zeros', 'np.zeros', (['future_bed.shape'], {}), '(future_bed.shape)\n', (9203, 9221), True, 'import numpy as np\n'), ((2028, 2049), 'numpy.var', 'np.var', (['measured_data'], {}), '(measured_data)\n', (2034, 2049), True, 'import numpy as np\n')] |
import sys
from PySide2.QtWidgets import QApplication
from PySide2.QtGui import QColor
from pivy import quarter, coin, graphics, utils
class ConnectionMarker(graphics.Marker):
def __init__(self, points):
super(ConnectionMarker, self).__init__(points, True)
class ConnectionPolygon(graphics.Polygon):
std_col = "green"
def __init__(self, markers):
super(ConnectionPolygon, self).__init__(
sum([m.points for m in markers], []), True)
self.markers = markers
for m in self.markers:
m.on_drag.append(self.update_polygon)
def update_polygon(self):
self.points = sum([m.points for m in self.markers], [])
@property
def drag_objects(self):
return self.markers
def check_dependency(self):
if any([m._delete for m in self.markers]):
self.delete()
class ConnectionLine(graphics.Line):
def __init__(self, markers):
super(ConnectionLine, self).__init__(
sum([m.points for m in markers], []), True)
self.markers = markers
for m in self.markers:
m.on_drag.append(self.update_line)
def update_line(self):
self.points = sum([m.points for m in self.markers], [])
@property
def drag_objects(self):
return self.markers
def check_dependency(self):
if any([m._delete for m in self.markers]):
self.delete()
def main():
app = QApplication(sys.argv)
utils.addMarkerFromSvg("test.svg", "CUSTOM_MARKER", 40)
viewer = quarter.QuarterWidget()
root = graphics.InteractionSeparator(viewer.sorendermanager)
root.pick_radius = 40
m1 = ConnectionMarker([[-1, -1, -1]])
m2 = ConnectionMarker([[-1, 1, -1]])
m3 = ConnectionMarker([[ 1, 1, -1]])
m4 = ConnectionMarker([[ 1, -1, -1]])
m5 = ConnectionMarker([[-1, -1, 1]])
m6 = ConnectionMarker([[-1, 1, 1]])
m7 = ConnectionMarker([[ 1, 1, 1]])
m8 = ConnectionMarker([[ 1, -1, 1]])
points = [m1, m2, m3, m4, m5, m6, m7, m8]
l01 = ConnectionLine([m1, m2])
l02 = ConnectionLine([m2, m3])
l03 = ConnectionLine([m3, m4])
l04 = ConnectionLine([m4, m1])
l05 = ConnectionLine([m5, m6])
l06 = ConnectionLine([m6, m7])
l07 = ConnectionLine([m7, m8])
l08 = ConnectionLine([m8, m5])
l09 = ConnectionLine([m1, m5])
l10 = ConnectionLine([m2, m6])
l11 = ConnectionLine([m3, m7])
l12 = ConnectionLine([m4, m8])
lines = [l01, l02, l03, l04, l05, l06, l07, l08, l09, l10, l11, l12]
p1 = ConnectionPolygon([m1, m2, m3, m4])
p2 = ConnectionPolygon([m8, m7, m6, m5])
p3 = ConnectionPolygon([m5, m6, m2, m1])
p4 = ConnectionPolygon([m6, m7, m3, m2])
p5 = ConnectionPolygon([m7, m8, m4, m3])
p6 = ConnectionPolygon([m8, m5, m1, m4])
polygons = [p1, p2, p3, p4, p5, p6]
root += points + lines + polygons
root.register()
viewer.setSceneGraph(root)
viewer.setBackgroundColor(QColor(255, 255, 255))
viewer.setWindowTitle("minimal")
viewer.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| [
"PySide2.QtGui.QColor",
"pivy.graphics.InteractionSeparator",
"PySide2.QtWidgets.QApplication",
"pivy.quarter.QuarterWidget",
"pivy.utils.addMarkerFromSvg"
] | [((1444, 1466), 'PySide2.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (1456, 1466), False, 'from PySide2.QtWidgets import QApplication\n'), ((1471, 1526), 'pivy.utils.addMarkerFromSvg', 'utils.addMarkerFromSvg', (['"""test.svg"""', '"""CUSTOM_MARKER"""', '(40)'], {}), "('test.svg', 'CUSTOM_MARKER', 40)\n", (1493, 1526), False, 'from pivy import quarter, coin, graphics, utils\n'), ((1541, 1564), 'pivy.quarter.QuarterWidget', 'quarter.QuarterWidget', ([], {}), '()\n', (1562, 1564), False, 'from pivy import quarter, coin, graphics, utils\n'), ((1576, 1629), 'pivy.graphics.InteractionSeparator', 'graphics.InteractionSeparator', (['viewer.sorendermanager'], {}), '(viewer.sorendermanager)\n', (1605, 1629), False, 'from pivy import quarter, coin, graphics, utils\n'), ((2970, 2991), 'PySide2.QtGui.QColor', 'QColor', (['(255)', '(255)', '(255)'], {}), '(255, 255, 255)\n', (2976, 2991), False, 'from PySide2.QtGui import QColor\n')] |
"""
Functions used in pre-processing of data for the machine learning pipelines.
"""
import pandas as pd
from pandas.api.types import is_scalar
from pathlib import Path
from sklearn.model_selection import GroupShuffleSplit
def concat_annotated(datadir):
"""
Concatenate all "annotated_df_*_parsed*.pkl" files in `datadir`.
The pkl's of the core team should end with "dedup.pkl", i.e. they should be deduplicated by the `parse_annotations.py` script.
The ze pkl's need not be deduplicated, as only notes that are not in the annotations of the core team are included.
Parameters
----------
datadir: Path
path to directory with data
Returns
-------
DataFrame
df of concatenated parsed annotations
"""
# load core team annotations; pickles are deduplicated during processing
annot = pd.concat([pd.read_pickle(fp) for fp in datadir.glob('*_dedup.pkl')], ignore_index=True)
# load ze annotations and remove IAA files
ze = pd.concat(
[pd.read_pickle(fp) for fp in datadir.glob('annotated_df_ze_*.pkl')], ignore_index=True
).query("~NotitieID.isin(@annot.NotitieID)", engine='python')
return pd.concat([annot, ze], ignore_index=True)
def drop_disregard(df):
"""
If one token in a note is marked 'disregard', remove the whole note from df.
Parameters
----------
df: DataFrame
parsed token-level annotations df (created by `parse_annotations.py`)
Returns
-------
DataFrame
df without 'disregard' notes
"""
df['disregard_note'] = df.groupby('NotitieID').disregard.transform('any')
return df.query(
"not disregard_note"
).drop(columns=['disregard', 'disregard_note'])
def fix_week_14(df):
"""
For annotations from week 14:
- Replace MBW values with `False`
- Replace MBW-lvl values with NaN
We remove this domain from week 14 since the guidelines for it were changed after this week.
Parameters
----------
df: DataFrame
parsed token-level annotations df (created by `parse_annotations.py`)
Returns
-------
DataFrame
df without MBW and MBW_lvl labels for week 14
"""
df['MBW'] = df.MBW.mask(df.batch == 'week_14', other=False)
df['MBW_lvl'] = df.MBW_lvl.mask(df.batch == 'week_14')
return df
def pad_sen_id(id):
"""
Add padding zeroes to sen_id.
"""
note_id, sen_no = id.split('_')
return '_'.join([note_id, f"{sen_no:0>4}"])
def anonymize(txt, nlp):
"""
Replace entities of type PERSON and GPE with 'PERSON', 'GPE'.
Return anonymized text and its length.
"""
doc = nlp(txt)
anonym = str(doc)
to_repl = {str(ent):ent.label_ for ent in doc.ents if ent.label_ in ['PERSON', 'GPE']}
for string, replacement in to_repl.items():
anonym = anonym.replace(string, replacement)
return anonym, len(doc)
def data_split_groups(
df,
X_col,
y_col,
group_col,
train_size,
):
"""
Split data to train / dev / test, while taking into account groups that should stay together.
Parameters
----------
df: DataFrame
df with the data to split
X_col: str
name of the column with the data (text)
y_col: str
name of the column with the gold labels
group_col: str
name of the column with the groups to take into account when splitting
train_size: float
proportion of data that should go to the training set
Returns
-------
train, dev, test: DataFrame's
df with train data, df with dev data, df with test data
"""
# create training set of `train_size`
gss = GroupShuffleSplit(n_splits=1, test_size=1-train_size, random_state=19)
for train_idx, other_idx in gss.split(df[X_col], df[y_col], groups=df[group_col]):
train = df.iloc[train_idx]
other = df.iloc[other_idx]
# the non-train data is split 50/50 into development and test
gss = GroupShuffleSplit(n_splits=1, test_size=0.5, random_state=19)
for dev_idx, test_idx in gss.split(other[X_col], other[y_col], groups=other[group_col]):
dev = other.iloc[dev_idx]
test = other.iloc[test_idx]
return train, dev, test
def flatten_preds_if_necessary(df):
"""
Flatten predictions if they are a list in a list.
This is necessary because of an issue with the predict.py script prior to the update performed on 15-09-2021.
"""
cols = [col for col in df.columns if 'pred' in col]
for col in cols:
test = df[col].iloc[0]
if is_scalar(test[0]):
continue
df[col] = df[col].str[0]
return df | [
"sklearn.model_selection.GroupShuffleSplit",
"pandas.api.types.is_scalar",
"pandas.concat",
"pandas.read_pickle"
] | [((1185, 1226), 'pandas.concat', 'pd.concat', (['[annot, ze]'], {'ignore_index': '(True)'}), '([annot, ze], ignore_index=True)\n', (1194, 1226), True, 'import pandas as pd\n'), ((3682, 3754), 'sklearn.model_selection.GroupShuffleSplit', 'GroupShuffleSplit', ([], {'n_splits': '(1)', 'test_size': '(1 - train_size)', 'random_state': '(19)'}), '(n_splits=1, test_size=1 - train_size, random_state=19)\n', (3699, 3754), False, 'from sklearn.model_selection import GroupShuffleSplit\n'), ((3987, 4048), 'sklearn.model_selection.GroupShuffleSplit', 'GroupShuffleSplit', ([], {'n_splits': '(1)', 'test_size': '(0.5)', 'random_state': '(19)'}), '(n_splits=1, test_size=0.5, random_state=19)\n', (4004, 4048), False, 'from sklearn.model_selection import GroupShuffleSplit\n'), ((4586, 4604), 'pandas.api.types.is_scalar', 'is_scalar', (['test[0]'], {}), '(test[0])\n', (4595, 4604), False, 'from pandas.api.types import is_scalar\n'), ((865, 883), 'pandas.read_pickle', 'pd.read_pickle', (['fp'], {}), '(fp)\n', (879, 883), True, 'import pandas as pd\n'), ((1020, 1038), 'pandas.read_pickle', 'pd.read_pickle', (['fp'], {}), '(fp)\n', (1034, 1038), True, 'import pandas as pd\n')] |
import numpy as np
np.deprecate(1) # E: No overload variant
np.deprecate_with_doc(1) # E: incompatible type
np.byte_bounds(1) # E: incompatible type
np.who(1) # E: incompatible type
np.lookfor(None) # E: incompatible type
np.safe_eval(None) # E: incompatible type
| [
"numpy.deprecate_with_doc",
"numpy.deprecate",
"numpy.lookfor",
"numpy.who",
"numpy.byte_bounds",
"numpy.safe_eval"
] | [((20, 35), 'numpy.deprecate', 'np.deprecate', (['(1)'], {}), '(1)\n', (32, 35), True, 'import numpy as np\n'), ((63, 87), 'numpy.deprecate_with_doc', 'np.deprecate_with_doc', (['(1)'], {}), '(1)\n', (84, 87), True, 'import numpy as np\n'), ((113, 130), 'numpy.byte_bounds', 'np.byte_bounds', (['(1)'], {}), '(1)\n', (127, 130), True, 'import numpy as np\n'), ((156, 165), 'numpy.who', 'np.who', (['(1)'], {}), '(1)\n', (162, 165), True, 'import numpy as np\n'), ((191, 207), 'numpy.lookfor', 'np.lookfor', (['None'], {}), '(None)\n', (201, 207), True, 'import numpy as np\n'), ((233, 251), 'numpy.safe_eval', 'np.safe_eval', (['None'], {}), '(None)\n', (245, 251), True, 'import numpy as np\n')] |
# Generated by Django 3.1.7 on 2021-05-06 23:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mentorships', '0001_initial'),
('activities', '0007_activity_enrollment'),
]
operations = [
migrations.RemoveField(
model_name='activity',
name='enrollment',
),
migrations.AddField(
model_name='activity',
name='mentorship',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='activities', to='mentorships.mentorship', verbose_name='Mentorship'),
),
]
| [
"django.db.migrations.RemoveField",
"django.db.models.ForeignKey"
] | [((313, 377), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""activity"""', 'name': '"""enrollment"""'}), "(model_name='activity', name='enrollment')\n", (335, 377), False, 'from django.db import migrations, models\n'), ((527, 692), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""activities"""', 'to': '"""mentorships.mentorship"""', 'verbose_name': '"""Mentorship"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n related_name='activities', to='mentorships.mentorship', verbose_name=\n 'Mentorship')\n", (544, 692), False, 'from django.db import migrations, models\n')] |
import numpy as np
import pandas as pd
from scipy import signal,stats
from flask import Flask,request,jsonify
import json
import re
import os
import data_utils as utils
import sklearn.preprocessing as pre
configpath=os.path.join(os.path.dirname(__file__),'config.txt')
try:
config = utils.py_configs(configpath)
Signal_SERVER = config["Signal_SERVER"]
Signal_PORT = config["Signal_PORT"]
except:
raise Exception("Configuration error")
app = Flask(__name__)
@app.route('/Preprocessing/encoding_feature',methods=['POST'])
def encoding_feature(): ### 参数以json格式传送
try:
file_key=list(request.files.to_dict().keys())
print('k: ',file_key)
## 'file' 是输入的信号数据文件; 'window'表示窗口,一般为`boxcar`, `triang`, `blackman`, `hamming`, `hann`等; 'index'是数据文件中需要进行小波分析的该组数据
keys=['file']
for key in keys:
if (key not in file_key):
code = 2
output = {"code": code, "KeyError": str(key)}
output = json.dumps(output)
return output
file=request.files.get('file')
index=int(request.form['index'])
df=pd.read_csv(file)
data=df.values
cols=df.columns
onehot_dict={}
for i in range(data.shape[1]):
attr_set=set(data[:,i])
temp_dict={}
j=0
for attr in attr_set:
temp_dict[attr]=j
j+=1
onehot_dict[cols[i]]=temp_dict
data_to_bi = []
for i in range(data.shape[0]):
rows=[]
for j in range(data.shape[1]):
onehot=onehot_dict[cols[j]][data[i][j]]
rows.append(onehot)
data_to_bi.append(rows)
enc = pre.OneHotEncoder()
print(onehot_dict)
print(data_to_bi)
enc.fit(data_to_bi)
a=enc.transform(data_to_bi).toarray()
result={'OneHot Result':str(a)}
return jsonify(result)
except Exception as e:
print('Exception: ',e)
code = 1
result = {"code":code,"error":re.findall("'([\w\d _]+)'",str(type(e)))[0]}
result = jsonify(result)
return result
if __name__=="__main__":
app.run(host=Signal_SERVER, port=int(Signal_PORT)) | [
"pandas.read_csv",
"flask.Flask",
"flask.request.files.to_dict",
"sklearn.preprocessing.OneHotEncoder",
"json.dumps",
"os.path.dirname",
"data_utils.py_configs",
"flask.request.files.get",
"flask.jsonify"
] | [((481, 496), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (486, 496), False, 'from flask import Flask, request, jsonify\n'), ((244, 269), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (259, 269), False, 'import os\n'), ((304, 332), 'data_utils.py_configs', 'utils.py_configs', (['configpath'], {}), '(configpath)\n', (320, 332), True, 'import data_utils as utils\n'), ((1095, 1120), 'flask.request.files.get', 'request.files.get', (['"""file"""'], {}), "('file')\n", (1112, 1120), False, 'from flask import Flask, request, jsonify\n'), ((1175, 1192), 'pandas.read_csv', 'pd.read_csv', (['file'], {}), '(file)\n', (1186, 1192), True, 'import pandas as pd\n'), ((1802, 1821), 'sklearn.preprocessing.OneHotEncoder', 'pre.OneHotEncoder', ([], {}), '()\n', (1819, 1821), True, 'import sklearn.preprocessing as pre\n'), ((2010, 2025), 'flask.jsonify', 'jsonify', (['result'], {}), '(result)\n', (2017, 2025), False, 'from flask import Flask, request, jsonify\n'), ((2206, 2221), 'flask.jsonify', 'jsonify', (['result'], {}), '(result)\n', (2213, 2221), False, 'from flask import Flask, request, jsonify\n'), ((1031, 1049), 'json.dumps', 'json.dumps', (['output'], {}), '(output)\n', (1041, 1049), False, 'import json\n'), ((637, 660), 'flask.request.files.to_dict', 'request.files.to_dict', ([], {}), '()\n', (658, 660), False, 'from flask import Flask, request, jsonify\n')] |
import torch
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
from matplotlib.legend_handler import HandlerTuple
from matplotlib.ticker import FormatStrFormatter
#from tqdm import tqdm
matplotlib.rcParams['pdf.fonttype'] = 42
matplotlib.rcParams['ps.fonttype'] = 42
plt.rc('xtick', labelsize=22) # fontsize of the tick labels
plt.rc('ytick', labelsize=22)
plt.rc('legend', fontsize=25)
plt.rc('axes', labelsize=25)
plt.rcParams["figure.figsize"] = (7.5, 6)
colors = ['lightcoral', 'mediumseagreen', 'darkorange']
def epoch_time(start_time, end_time):
elap_time = end_time - start_time
elap_min = elap_time//60
elap_sec = elap_time % 60
return elap_min, elap_sec
def train(model, dataloader, optimizer, criterion, initial, prev_m, device, depth=4):
"""
train model for alpha for one loop over dataloader
"""
epoch_loss = 0
model.train() # set model to train mode
i = 0
for batch in dataloader:
optimizer.zero_grad()
bm, cn = batch
X = model(bm.to(device),
cn.to(device),
prev_m[i*dataloader.batch_size:(i+1)*dataloader.batch_size],
initial[i*dataloader.batch_size:(i+1)*dataloader.batch_size].to(device))
strategy = model.strategy
loss = criterion(X, prev_m[i*dataloader.batch_size:(i+1)*dataloader.batch_size], strategy)
loss.backward(retain_graph=True)
optimizer.step()
epoch_loss += loss.item()
i+=1
return epoch_loss/len(dataloader)
def evaluate(model, dataloader, criterion, initial, prev_m, device, depth=4):
epoch_loss = 0
#initialize
#N = prev_m.size()[0] - 1
#m = torch.zeros(N+1, 1, device=device)
#sigs = torch.zeros(signatory.signature_channels(2, depth), device=device)
model.eval() # set model to train mode
i = 0
for batch in dataloader:
bm, cn = batch
X = model(bm.to(device),
cn.to(device),
prev_m[i*dataloader.batch_size:(i+1)*dataloader.batch_size],
initial[i*dataloader.batch_size:(i+1)*dataloader.batch_size].to(device))
strategy = model.strategy
loss = criterion(X, prev_m[i*dataloader.batch_size:(i+1)*dataloader.batch_size], strategy)
epoch_loss += loss.item()
i+=1
return epoch_loss/len(dataloader)
def train1(model, dataloader, optimizer, criterion, initial, prev_m, device, depth=4):
"""
train model for alpha for one loop over dataloader
"""
epoch_loss = 0
model.train() # set model to train mode
i = 0
for batch in dataloader:
optimizer.zero_grad()
bm, cn, typeVec = batch
X = model(bm.to(device),
cn.to(device),
typeVec.to(device),
prev_m[i*dataloader.batch_size:(i+1)*dataloader.batch_size],
initial[i*dataloader.batch_size:(i+1)*dataloader.batch_size].to(device))
strategy = model.strategy
loss = criterion(X, prev_m[i*dataloader.batch_size:(i+1)*dataloader.batch_size], strategy)
loss.backward(retain_graph=True)
optimizer.step()
epoch_loss += loss.item()
i+=1
return epoch_loss/len(dataloader)
def evaluate1(model, dataloader, criterion, initial, prev_m, device, depth=4):
epoch_loss = 0
model.eval() # set model to train mode
i = 0
for batch in dataloader:
bm, cn, typeVec = batch
X = model(bm.to(device),
cn.to(device),
typeVec.to(device),
prev_m[i*dataloader.batch_size:(i+1)*dataloader.batch_size],
initial[i*dataloader.batch_size:(i+1)*dataloader.batch_size].to(device))
strategy = model.strategy
loss = criterion(X, prev_m[i*dataloader.batch_size:(i+1)*dataloader.batch_size], strategy)
epoch_loss += loss.item()
i+=1
return epoch_loss/len(dataloader)
def train2(model, dataloader, optimizer, criterion, initial, prev_m, prev_c, device, depth=4):
"""
train model for alpha for one loop over dataloader
"""
epoch_loss = 0
model.train() # set model to train mode
i = 0
for batch in dataloader:
optimizer.zero_grad()
bm, cn, typeVec = batch
X = model(bm.to(device),
cn.to(device),
typeVec.to(device),
prev_m[i*dataloader.batch_size:(i+1)*dataloader.batch_size],
prev_c[i*dataloader.batch_size:(i+1)*dataloader.batch_size],
initial[i*dataloader.batch_size:(i+1)*dataloader.batch_size].to(device))
strategy = model.strategy
loss = criterion(X, prev_m[i*dataloader.batch_size:(i+1)*dataloader.batch_size],
strategy, prev_c[i*dataloader.batch_size:(i+1)*dataloader.batch_size])
loss.backward(retain_graph=True)
optimizer.step()
epoch_loss += loss.item()
i+=1
return epoch_loss/len(dataloader)
def evaluate2(model, dataloader, criterion, initial, prev_m, prev_c, device, depth=4):
epoch_loss = 0
#initialize
#N = prev_m.size()[0] - 1
#m = torch.zeros(N+1, 1, device=device)
#sigs = torch.zeros(signatory.signature_channels(2, depth), device=device)
model.eval() # set model to train mode
i = 0
for batch in dataloader:
bm, cn, typeVec = batch
X = model(bm.to(device),
cn.to(device),
typeVec.to(device),
prev_m[i*dataloader.batch_size:(i+1)*dataloader.batch_size],
prev_c[i*dataloader.batch_size:(i+1)*dataloader.batch_size],
initial[i*dataloader.batch_size:(i+1)*dataloader.batch_size].to(device))
strategy = model.strategy
loss = criterion(X, prev_m[i*dataloader.batch_size:(i+1)*dataloader.batch_size],
strategy, prev_c[i*dataloader.batch_size:(i+1)*dataloader.batch_size])
epoch_loss += loss.item()
i+=1
return epoch_loss/len(dataloader)
def plotErrors(error, target_addr, title, filename):
fig = plt.figure()
plt.title(title)
plt.xlabel("FP rounds")
plt.ylabel("Errors")
plt.plot(error, color='blue')
fig.savefig(target_addr+'/'+filename+'.pdf')
def plotUtil(util, ylim, ytrue, target_addr, title, filename, ins_loc=None, ins_ylim=None, cost=False):
if cost:
n = len(util)
fig, ax = plt.subplots(figsize=(7.5, 6))
if title:
plt.title(title)
if ylim:
ax.set_ylim(ylim)
ax.set_xlabel(r"FP iterations $n$")
ax.set_ylabel("validation cost")
l1 = ax.axhline(ytrue, color="indianred", ls="--")
l2, = ax.plot(util, color='darkcyan', ls="-")
if ins_loc:
axins = ax.inset_axes(ins_loc)
if ins_ylim:
axins.set_ylim(ins_ylim)
axins.plot(range(n-50, n), util[-50:], color='darkcyan', ls="-")
axins.axhline(ytrue, color="indianred", ls="--")
ax.indicate_inset_zoom(axins)
ax.legend((l1, l2), ("true cost", "validation cost"), loc="upper center")
plt.tight_layout()
fig.savefig(target_addr+'/'+filename+'.pdf')
else:
n = len(util)
fig, ax = plt.subplots(figsize=(7.5, 6))
if title:
plt.title(title)
if ylim:
ax.set_ylim(ylim)
ax.set_xlabel(r"FP iterations $n$")
ax.set_ylabel("validation utility")
l1 = ax.axhline(ytrue, color="indianred", ls="--")
l2, = ax.plot(util, color='darkcyan', ls="-")
if ins_loc:
axins = ax.inset_axes(ins_loc)
if ins_ylim:
axins.set_ylim(ins_ylim)
axins.plot(range(n-50, n), util[-50:], color='darkcyan', ls="-")
axins.axhline(ytrue, color="indianred", ls="--")
ax.indicate_inset_zoom(axins)
ax.legend((l1, l2), ("true utility", "validation utility"), loc="upper center")
plt.tight_layout()
fig.savefig(target_addr+'/'+filename+'.pdf')
def plotMeanDiff_bencmarkvspredicted(data, target_addr, title, filename, ylim=None, label1=None, label2=None, ylabel=None, legendloc=None, round_=False):
fig = plt.figure()
if title:
plt.title(title)
x, next_m, m = data
if ylim:
plt.ylim(ylim)
c = len(next_m)
lines = []
lines_pred = []
for i in range(c):
l, = plt.plot(x, next_m[i], color=colors[i])
lines.append(l)
for i in range(c):
l, = plt.plot(x, m[i], color=colors[i], ls='--', marker='.')
lines_pred.append(l)
#plt.plot(x, next_m-m, label='diff')
plt.xlabel(r"time $t$")
if ylabel:
plt.ylabel(ylabel)
if legendloc:
plt.legend([tuple(lines), tuple(lines_pred)], [label1, label2],
loc=legendloc, ncol=2, handler_map={tuple: HandlerTuple(ndivide=None)})
else:
plt.legend([tuple(lines), tuple(lines_pred)], [label1, label2],
loc="upper left", ncol=2, handler_map={tuple: HandlerTuple(ndivide=None)})
if round_:
plt.gca().yaxis.set_major_formatter(FormatStrFormatter('%.2f'))
plt.tight_layout()
fig.savefig(target_addr+'/'+filename+'.pdf')
def L2distance(x, y):
b, N, _ = x.size()
return ((torch.sum(torch.pow(x - y, 2))/N/b)**0.5).item()
def plotSDE(benchmark, predicted, target_addr, title, filename, ylim=None, label1=None, label2=None, legendloc=None):
"""
input:
benchmark -- list[paths]
predicted -- list[paths]
"""
fig = plt.figure()
if title:
plt.title(title)
if ylim:
plt.ylim(ylim)
t = [i/100 for i in range(101)]
c = len(benchmark)
lines = []
lines_pred = []
for i in range(c):
l, = plt.plot(t, benchmark[i], color=colors[i], ls='-')
lines.append(l)
for i in range(c):
l, = plt.plot(t, predicted[i], color=colors[i], ls='--', marker='.')
lines_pred.append(l)
if legendloc:
plt.legend([tuple(lines), tuple(lines_pred)], [label1, label2],
loc=legendloc, ncol=2, handler_map={tuple: HandlerTuple(ndivide=None)})
else:
plt.legend([tuple(lines), tuple(lines_pred)], [label1, label2],
loc="upper left", ncol=2, handler_map={tuple: HandlerTuple(ndivide=None)})
plt.xlabel(r"time $t$")
plt.ylabel(r"$X_t$ and $\widehat{X}_t$")
plt.tight_layout()
fig.savefig(target_addr+'/'+filename+'.pdf')
def plotC(benchmark, predicted, target_addr, title, filename, label1=None, label2=None, ylabel=None):
"""
input:
benchmark -- list[paths]
predicted -- list[paths]
"""
t = [i/100 for i in range(100)]
fig = plt.figure()
if title:
plt.title(title)
c = len(benchmark)
lines = []
lines_pred = []
for i in range(c):
l, = plt.plot(t, benchmark[i], color=colors[i], ls='-')
lines.append(l)
for i in range(c):
l, = plt.plot(t, predicted[i], color=colors[i], ls='--', marker='.')
lines_pred.append(l)
plt.legend([tuple(lines), tuple(lines_pred)], [label1, label2],
loc="upper left",ncol=2, handler_map={tuple: HandlerTuple(ndivide=None)})
plt.xlabel(r"time $t$")
if ylabel:
plt.ylabel(ylabel)
plt.tight_layout()
fig.savefig(target_addr+'/'+filename+'.pdf')
def plotpi(benchmark, predicted, target_addr, title, filename, ylim = None, label1=None, label2=None, ylabel=None, legendloc = None):
"""
input:
benchmark -- list[paths]
predicted -- list[paths]
"""
fig = plt.figure()
if title:
plt.title(title)
if ylim:
plt.ylim(ylim)
t = [i/100 for i in range(100)]
c = len(benchmark)
lines = []
lines_pred = []
for i in range(c):
l, = plt.plot(t, benchmark[i], color=colors[i], ls='-')
lines.append(l)
for i in range(c):
l, = plt.plot(t, predicted[i], color=colors[i], ls='--', marker='.')
lines_pred.append(l)
if legendloc:
plt.legend([tuple(lines), tuple(lines_pred)], [label1, label2],
loc=legendloc, ncol=2, handler_map={tuple: HandlerTuple(ndivide=None)})
else:
plt.legend([tuple(lines), tuple(lines_pred)], [label1, label2],
loc="upper left", ncol=2, handler_map={tuple: HandlerTuple(ndivide=None)})
plt.xlabel(r"time $t$")
if ylabel:
plt.ylabel(ylabel)
plt.tight_layout()
fig.savefig(target_addr+'/'+filename+'.pdf')
def plotmC(benchmark, predicted, target_addr, title, filename, label1=None, label2=None, ylabel=None):
"""
input:
benchmark -- list[paths]
predicted -- list[paths]
"""
N = predicted.shape[1]
t = [1/N*i for i in range(N)]
fig = plt.figure()
if title:
plt.title(title)
c = len(predicted)
lines = []
lines_pred = []
l, = plt.plot(t, benchmark, color='darkgrey', ls='-', linewidth=5)
lines.append(l)
for i in range(c):
l, = plt.plot(t, predicted[i], color=colors[i], ls='--', marker='.')
lines_pred.append(l)
plt.legend([tuple(lines), tuple(lines_pred)], [label1, label2],
loc="upper left", ncol=2, handler_map={tuple: HandlerTuple(ndivide=None)})
plt.xlabel(r"time $t$")
if ylabel:
plt.ylabel(ylabel)
plt.tight_layout()
fig.savefig(target_addr+'/'+filename+'.pdf')
| [
"matplotlib.legend_handler.HandlerTuple",
"matplotlib.pyplot.ylabel",
"matplotlib.use",
"matplotlib.pyplot.gca",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"torch.pow",
"matplotlib.pyplot.figure",
"matplotlib.ticker.FormatStrFormatter",
"matplotlib.pyplot.tight_layout",
"matplotlib.py... | [((33, 54), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (47, 54), False, 'import matplotlib\n'), ((294, 323), 'matplotlib.pyplot.rc', 'plt.rc', (['"""xtick"""'], {'labelsize': '(22)'}), "('xtick', labelsize=22)\n", (300, 323), True, 'import matplotlib.pyplot as plt\n'), ((357, 386), 'matplotlib.pyplot.rc', 'plt.rc', (['"""ytick"""'], {'labelsize': '(22)'}), "('ytick', labelsize=22)\n", (363, 386), True, 'import matplotlib.pyplot as plt\n'), ((387, 416), 'matplotlib.pyplot.rc', 'plt.rc', (['"""legend"""'], {'fontsize': '(25)'}), "('legend', fontsize=25)\n", (393, 416), True, 'import matplotlib.pyplot as plt\n'), ((418, 446), 'matplotlib.pyplot.rc', 'plt.rc', (['"""axes"""'], {'labelsize': '(25)'}), "('axes', labelsize=25)\n", (424, 446), True, 'import matplotlib.pyplot as plt\n'), ((6333, 6345), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6343, 6345), True, 'import matplotlib.pyplot as plt\n'), ((6350, 6366), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (6359, 6366), True, 'import matplotlib.pyplot as plt\n'), ((6371, 6394), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""FP rounds"""'], {}), "('FP rounds')\n", (6381, 6394), True, 'import matplotlib.pyplot as plt\n'), ((6399, 6419), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Errors"""'], {}), "('Errors')\n", (6409, 6419), True, 'import matplotlib.pyplot as plt\n'), ((6424, 6453), 'matplotlib.pyplot.plot', 'plt.plot', (['error'], {'color': '"""blue"""'}), "(error, color='blue')\n", (6432, 6453), True, 'import matplotlib.pyplot as plt\n'), ((8462, 8474), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (8472, 8474), True, 'import matplotlib.pyplot as plt\n'), ((8895, 8917), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time $t$"""'], {}), "('time $t$')\n", (8905, 8917), True, 'import matplotlib.pyplot as plt\n'), ((9403, 9421), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (9419, 9421), True, 'import matplotlib.pyplot as plt\n'), ((9806, 9818), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (9816, 9818), True, 'import matplotlib.pyplot as plt\n'), ((10583, 10605), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time $t$"""'], {}), "('time $t$')\n", (10593, 10605), True, 'import matplotlib.pyplot as plt\n'), ((10611, 10651), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$X_t$ and $\\\\widehat{X}_t$"""'], {}), "('$X_t$ and $\\\\widehat{X}_t$')\n", (10621, 10651), True, 'import matplotlib.pyplot as plt\n'), ((10656, 10674), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (10672, 10674), True, 'import matplotlib.pyplot as plt\n'), ((10976, 10988), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (10986, 10988), True, 'import matplotlib.pyplot as plt\n'), ((11489, 11511), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time $t$"""'], {}), "('time $t$')\n", (11499, 11511), True, 'import matplotlib.pyplot as plt\n'), ((11559, 11577), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (11575, 11577), True, 'import matplotlib.pyplot as plt\n'), ((11869, 11881), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (11879, 11881), True, 'import matplotlib.pyplot as plt\n'), ((12646, 12668), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time $t$"""'], {}), "('time $t$')\n", (12656, 12668), True, 'import matplotlib.pyplot as plt\n'), ((12716, 12734), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (12732, 12734), True, 'import matplotlib.pyplot as plt\n'), ((13061, 13073), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (13071, 13073), True, 'import matplotlib.pyplot as plt\n'), ((13185, 13246), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'benchmark'], {'color': '"""darkgrey"""', 'ls': '"""-"""', 'linewidth': '(5)'}), "(t, benchmark, color='darkgrey', ls='-', linewidth=5)\n", (13193, 13246), True, 'import matplotlib.pyplot as plt\n'), ((13565, 13587), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time $t$"""'], {}), "('time $t$')\n", (13575, 13587), True, 'import matplotlib.pyplot as plt\n'), ((13635, 13653), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (13651, 13653), True, 'import matplotlib.pyplot as plt\n'), ((6670, 6700), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(7.5, 6)'}), '(figsize=(7.5, 6))\n', (6682, 6700), True, 'import matplotlib.pyplot as plt\n'), ((7381, 7399), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (7397, 7399), True, 'import matplotlib.pyplot as plt\n'), ((7503, 7533), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(7.5, 6)'}), '(figsize=(7.5, 6))\n', (7515, 7533), True, 'import matplotlib.pyplot as plt\n'), ((8223, 8241), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (8239, 8241), True, 'import matplotlib.pyplot as plt\n'), ((8497, 8513), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (8506, 8513), True, 'import matplotlib.pyplot as plt\n'), ((8559, 8573), 'matplotlib.pyplot.ylim', 'plt.ylim', (['ylim'], {}), '(ylim)\n', (8567, 8573), True, 'import matplotlib.pyplot as plt\n'), ((8665, 8704), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'next_m[i]'], {'color': 'colors[i]'}), '(x, next_m[i], color=colors[i])\n', (8673, 8704), True, 'import matplotlib.pyplot as plt\n'), ((8765, 8820), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'm[i]'], {'color': 'colors[i]', 'ls': '"""--"""', 'marker': '"""."""'}), "(x, m[i], color=colors[i], ls='--', marker='.')\n", (8773, 8820), True, 'import matplotlib.pyplot as plt\n'), ((8942, 8960), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['ylabel'], {}), '(ylabel)\n', (8952, 8960), True, 'import matplotlib.pyplot as plt\n'), ((9841, 9857), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (9850, 9857), True, 'import matplotlib.pyplot as plt\n'), ((9879, 9893), 'matplotlib.pyplot.ylim', 'plt.ylim', (['ylim'], {}), '(ylim)\n', (9887, 9893), True, 'import matplotlib.pyplot as plt\n'), ((10024, 10074), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'benchmark[i]'], {'color': 'colors[i]', 'ls': '"""-"""'}), "(t, benchmark[i], color=colors[i], ls='-')\n", (10032, 10074), True, 'import matplotlib.pyplot as plt\n'), ((10135, 10198), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'predicted[i]'], {'color': 'colors[i]', 'ls': '"""--"""', 'marker': '"""."""'}), "(t, predicted[i], color=colors[i], ls='--', marker='.')\n", (10143, 10198), True, 'import matplotlib.pyplot as plt\n'), ((11011, 11027), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (11020, 11027), True, 'import matplotlib.pyplot as plt\n'), ((11122, 11172), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'benchmark[i]'], {'color': 'colors[i]', 'ls': '"""-"""'}), "(t, benchmark[i], color=colors[i], ls='-')\n", (11130, 11172), True, 'import matplotlib.pyplot as plt\n'), ((11233, 11296), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'predicted[i]'], {'color': 'colors[i]', 'ls': '"""--"""', 'marker': '"""."""'}), "(t, predicted[i], color=colors[i], ls='--', marker='.')\n", (11241, 11296), True, 'import matplotlib.pyplot as plt\n'), ((11536, 11554), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['ylabel'], {}), '(ylabel)\n', (11546, 11554), True, 'import matplotlib.pyplot as plt\n'), ((11904, 11920), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (11913, 11920), True, 'import matplotlib.pyplot as plt\n'), ((11942, 11956), 'matplotlib.pyplot.ylim', 'plt.ylim', (['ylim'], {}), '(ylim)\n', (11950, 11956), True, 'import matplotlib.pyplot as plt\n'), ((12087, 12137), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'benchmark[i]'], {'color': 'colors[i]', 'ls': '"""-"""'}), "(t, benchmark[i], color=colors[i], ls='-')\n", (12095, 12137), True, 'import matplotlib.pyplot as plt\n'), ((12198, 12261), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'predicted[i]'], {'color': 'colors[i]', 'ls': '"""--"""', 'marker': '"""."""'}), "(t, predicted[i], color=colors[i], ls='--', marker='.')\n", (12206, 12261), True, 'import matplotlib.pyplot as plt\n'), ((12693, 12711), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['ylabel'], {}), '(ylabel)\n', (12703, 12711), True, 'import matplotlib.pyplot as plt\n'), ((13096, 13112), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (13105, 13112), True, 'import matplotlib.pyplot as plt\n'), ((13303, 13366), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'predicted[i]'], {'color': 'colors[i]', 'ls': '"""--"""', 'marker': '"""."""'}), "(t, predicted[i], color=colors[i], ls='--', marker='.')\n", (13311, 13366), True, 'import matplotlib.pyplot as plt\n'), ((13612, 13630), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['ylabel'], {}), '(ylabel)\n', (13622, 13630), True, 'import matplotlib.pyplot as plt\n'), ((6731, 6747), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (6740, 6747), True, 'import matplotlib.pyplot as plt\n'), ((7564, 7580), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (7573, 7580), True, 'import matplotlib.pyplot as plt\n'), ((9371, 9397), 'matplotlib.ticker.FormatStrFormatter', 'FormatStrFormatter', (['"""%.2f"""'], {}), "('%.2f')\n", (9389, 9397), False, 'from matplotlib.ticker import FormatStrFormatter\n'), ((11456, 11482), 'matplotlib.legend_handler.HandlerTuple', 'HandlerTuple', ([], {'ndivide': 'None'}), '(ndivide=None)\n', (11468, 11482), False, 'from matplotlib.legend_handler import HandlerTuple\n'), ((13527, 13553), 'matplotlib.legend_handler.HandlerTuple', 'HandlerTuple', ([], {'ndivide': 'None'}), '(ndivide=None)\n', (13539, 13553), False, 'from matplotlib.legend_handler import HandlerTuple\n'), ((9110, 9136), 'matplotlib.legend_handler.HandlerTuple', 'HandlerTuple', ([], {'ndivide': 'None'}), '(ndivide=None)\n', (9122, 9136), False, 'from matplotlib.legend_handler import HandlerTuple\n'), ((9283, 9309), 'matplotlib.legend_handler.HandlerTuple', 'HandlerTuple', ([], {'ndivide': 'None'}), '(ndivide=None)\n', (9295, 9309), False, 'from matplotlib.legend_handler import HandlerTuple\n'), ((9335, 9344), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (9342, 9344), True, 'import matplotlib.pyplot as plt\n'), ((10377, 10403), 'matplotlib.legend_handler.HandlerTuple', 'HandlerTuple', ([], {'ndivide': 'None'}), '(ndivide=None)\n', (10389, 10403), False, 'from matplotlib.legend_handler import HandlerTuple\n'), ((10550, 10576), 'matplotlib.legend_handler.HandlerTuple', 'HandlerTuple', ([], {'ndivide': 'None'}), '(ndivide=None)\n', (10562, 10576), False, 'from matplotlib.legend_handler import HandlerTuple\n'), ((12440, 12466), 'matplotlib.legend_handler.HandlerTuple', 'HandlerTuple', ([], {'ndivide': 'None'}), '(ndivide=None)\n', (12452, 12466), False, 'from matplotlib.legend_handler import HandlerTuple\n'), ((12613, 12639), 'matplotlib.legend_handler.HandlerTuple', 'HandlerTuple', ([], {'ndivide': 'None'}), '(ndivide=None)\n', (12625, 12639), False, 'from matplotlib.legend_handler import HandlerTuple\n'), ((9545, 9564), 'torch.pow', 'torch.pow', (['(x - y)', '(2)'], {}), '(x - y, 2)\n', (9554, 9564), False, 'import torch\n')] |
#!/usr/bin/env python
"""Tests the client file finder action."""
import collections
import glob
import hashlib
import os
import platform
import shutil
import subprocess
import unittest
import mock
import psutil
import unittest
from grr.client import comms
from grr.client.client_actions import file_finder as client_file_finder
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import utils
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import crypto as rdf_crypto
from grr.lib.rdfvalues import file_finder as rdf_file_finder
from grr.lib.rdfvalues import standard as rdf_standard
from grr.test_lib import client_test_lib
from grr.test_lib import test_lib
def MyStat(path):
stat_obj = MyStat.old_target(path)
if path.endswith("auth.log"):
res = list(stat_obj)
# Sets atime, ctime, and mtime to some time in 2022.
res[-1] = 1672466423
res[-2] = 1672466423
res[-3] = 1672466423
return os.stat_result(res)
return stat_obj
class FileFinderTest(client_test_lib.EmptyActionTest):
def setUp(self):
super(FileFinderTest, self).setUp()
self.stat_action = rdf_file_finder.FileFinderAction.Stat()
def _GetRelativeResults(self, raw_results, base_path=None):
base_path = base_path or self.base_path
return [
result.stat_entry.pathspec.path[len(base_path) + 1:]
for result in raw_results
]
def _RunFileFinder(self,
paths,
action,
conditions=None,
follow_links=True,
**kw):
return self.RunAction(
client_file_finder.FileFinderOS,
arg=rdf_file_finder.FileFinderArgs(
paths=paths,
action=action,
conditions=conditions,
process_non_regular_files=True,
follow_links=follow_links,
**kw))
def testFileFinder(self):
paths = [self.base_path + "/*"]
results = self._RunFileFinder(paths, self.stat_action)
self.assertEqual(
self._GetRelativeResults(results), os.listdir(self.base_path))
profiles_path = os.path.join(self.base_path, "profiles/v1.0")
paths = [os.path.join(self.base_path, "profiles/v1.0") + "/*"]
results = self._RunFileFinder(paths, self.stat_action)
self.assertEqual(
self._GetRelativeResults(results, base_path=profiles_path),
os.listdir(profiles_path))
def testRecursiveGlob(self):
paths = [self.base_path + "/**3"]
results = self._RunFileFinder(paths, self.stat_action)
relative_results = self._GetRelativeResults(results)
self.assertIn("a/b", relative_results)
self.assertIn("a/b/c", relative_results)
self.assertIn("a/b/d", relative_results)
self.assertNotIn("a/b/c/helloc.txt", relative_results)
self.assertNotIn("a/b/d/hellod.txt", relative_results)
paths = [self.base_path + "/**4"]
results = self._RunFileFinder(paths, self.stat_action)
relative_results = self._GetRelativeResults(results)
self.assertIn("a/b", relative_results)
self.assertIn("a/b/c", relative_results)
self.assertIn("a/b/d", relative_results)
self.assertIn("a/b/c/helloc.txt", relative_results)
self.assertIn("a/b/d/hellod.txt", relative_results)
def testRegexGlob(self):
paths = [self.base_path + "/rekall*.gz"]
results = self._RunFileFinder(paths, self.stat_action)
relative_results = self._GetRelativeResults(results)
for glob_result in glob.glob(self.base_path + "/rekall*gz"):
self.assertIn(os.path.basename(glob_result), relative_results)
def testRecursiveRegexGlob(self):
paths = [self.base_path + "/**3/*.gz"]
results = self._RunFileFinder(paths, self.stat_action)
relative_results = self._GetRelativeResults(results)
self.assertIn("profiles/v1.0/nt/index.gz", relative_results)
self.assertIn("bigquery/ExportedFile.json.gz", relative_results)
for r in relative_results:
self.assertEqual(os.path.splitext(r)[1], ".gz")
paths = [self.base_path + "/**2/*.gz"]
results = self._RunFileFinder(paths, self.stat_action)
relative_results = self._GetRelativeResults(results)
self.assertNotIn("profiles/v1.0/nt/index.gz", relative_results)
self.assertIn("bigquery/ExportedFile.json.gz", relative_results)
for r in relative_results:
self.assertEqual(os.path.splitext(r)[1], ".gz")
def testDoubleRecursionFails(self):
paths = [self.base_path + "/**/**/test.exe"]
with self.assertRaises(ValueError):
self._RunFileFinder(paths, self.stat_action)
def testInvalidInput(self):
paths = [self.base_path + "/r**z"]
with self.assertRaises(ValueError):
self._RunFileFinder(paths, self.stat_action)
paths = [self.base_path + "/**.exe"]
with self.assertRaises(ValueError):
self._RunFileFinder(paths, self.stat_action)
paths = [self.base_path + "/test**"]
with self.assertRaises(ValueError):
self._RunFileFinder(paths, self.stat_action)
def testGroupings(self):
paths = [self.base_path + "/a/b/{c,d}/hello*"]
results = self._RunFileFinder(paths, self.stat_action)
relative_results = self._GetRelativeResults(results)
self.assertIn("a/b/c/helloc.txt", relative_results)
self.assertIn("a/b/d/hellod.txt", relative_results)
paths = [self.base_path + "/a/b/*/hello{c,d}.txt"]
results = self._RunFileFinder(paths, self.stat_action)
relative_results = self._GetRelativeResults(results)
self.assertIn("a/b/c/helloc.txt", relative_results)
self.assertIn("a/b/d/hellod.txt", relative_results)
def testFollowLinks(self):
try:
# This sets up a structure as follows:
# tmp_dir/lnk_test/contains_lnk
# tmp_dir/lnk_test/contains_lnk/lnk
# tmp_dir/lnk_test/lnk_target
# tmp_dir/lnk_test/lnk_target/target
# lnk is a symbolic link to lnk_target. A recursive find in
# contains_lnk will find the target iff follow_links is allowed.
test_dir = os.path.join(self.temp_dir, "lnk_test")
contains_lnk = os.path.join(test_dir, "contains_lnk")
lnk = os.path.join(contains_lnk, "lnk")
lnk_target = os.path.join(test_dir, "lnk_target")
lnk_target_contents = os.path.join(lnk_target, "target")
os.mkdir(test_dir)
os.mkdir(contains_lnk)
os.mkdir(lnk_target)
os.symlink(lnk_target, lnk)
with open(lnk_target_contents, "wb") as fd:
fd.write("sometext")
paths = [contains_lnk + "/**"]
results = self._RunFileFinder(paths, self.stat_action)
relative_results = self._GetRelativeResults(results, base_path=test_dir)
self.assertIn("contains_lnk/lnk", relative_results)
self.assertIn("contains_lnk/lnk/target", relative_results)
results = self._RunFileFinder(paths, self.stat_action, follow_links=False)
relative_results = self._GetRelativeResults(results, base_path=test_dir)
self.assertIn("contains_lnk/lnk", relative_results)
self.assertNotIn("contains_lnk/lnk/target", relative_results)
finally:
try:
shutil.rmtree(test_dir)
except OSError:
pass
def _PrepareTimestampedFiles(self):
searching_path = os.path.join(self.base_path, "searching")
test_dir = os.path.join(self.temp_dir, "times_test")
os.mkdir(test_dir)
for f in ["dpkg.log", "dpkg_false.log", "auth.log"]:
src = os.path.join(searching_path, f)
dst = os.path.join(test_dir, f)
shutil.copy(src, dst)
return test_dir
def RunAndCheck(self,
paths,
action=None,
conditions=None,
expected=None,
unexpected=None,
base_path=None,
**kw):
action = action or self.stat_action
raw_results = self._RunFileFinder(
paths, action, conditions=conditions, **kw)
relative_results = self._GetRelativeResults(
raw_results, base_path=base_path)
for f in unexpected:
self.assertNotIn(f, relative_results)
for f in expected:
self.assertIn(f, relative_results)
def testLiteralMatchCondition(self):
searching_path = os.path.join(self.base_path, "searching")
paths = [searching_path + "/{dpkg.log,dpkg_false.log,auth.log}"]
literal = "pam_unix(ssh:session)"
clmc = rdf_file_finder.FileFinderContentsLiteralMatchCondition
bytes_before = 10
bytes_after = 20
condition = rdf_file_finder.FileFinderCondition(
condition_type="CONTENTS_LITERAL_MATCH",
contents_literal_match=clmc(
literal=literal, bytes_before=bytes_before,
bytes_after=bytes_after))
raw_results = self._RunFileFinder(
paths, self.stat_action, conditions=[condition])
relative_results = self._GetRelativeResults(
raw_results, base_path=searching_path)
self.assertEqual(len(relative_results), 1)
self.assertIn("auth.log", relative_results)
self.assertEqual(len(raw_results[0].matches), 1)
buffer_ref = raw_results[0].matches[0]
orig_data = open(os.path.join(searching_path, "auth.log")).read()
self.assertEqual(
len(buffer_ref.data), bytes_before + len(literal) + bytes_after)
self.assertEqual(
orig_data[buffer_ref.offset:buffer_ref.offset + buffer_ref.length],
buffer_ref.data)
def testLiteralMatchConditionAllHits(self):
searching_path = os.path.join(self.base_path, "searching")
paths = [searching_path + "/{dpkg.log,dpkg_false.log,auth.log}"]
clmc = rdf_file_finder.FileFinderContentsLiteralMatchCondition
bytes_before = 10
bytes_after = 20
literal = "mydomain.com"
condition = rdf_file_finder.FileFinderCondition(
condition_type="CONTENTS_LITERAL_MATCH",
contents_literal_match=clmc(
literal=literal,
mode="ALL_HITS",
bytes_before=bytes_before,
bytes_after=bytes_after))
raw_results = self._RunFileFinder(
paths, self.stat_action, conditions=[condition])
self.assertEqual(len(raw_results), 1)
self.assertEqual(len(raw_results[0].matches), 6)
for buffer_ref in raw_results[0].matches:
self.assertEqual(
buffer_ref.data[bytes_before:bytes_before + len(literal)], literal)
def testLiteralMatchConditionLargeFile(self):
paths = [os.path.join(self.base_path, "new_places.sqlite")]
literal = "RecentlyBookmarked"
clmc = rdf_file_finder.FileFinderContentsLiteralMatchCondition
bytes_before = 10
bytes_after = 20
condition = rdf_file_finder.FileFinderCondition(
condition_type="CONTENTS_LITERAL_MATCH",
contents_literal_match=clmc(
literal=literal,
mode="ALL_HITS",
bytes_before=bytes_before,
bytes_after=bytes_after))
raw_results = self._RunFileFinder(
paths, self.stat_action, conditions=[condition])
self.assertEqual(len(raw_results), 1)
self.assertEqual(len(raw_results[0].matches), 1)
buffer_ref = raw_results[0].matches[0]
with open(paths[0], "rb") as fd:
fd.seek(buffer_ref.offset)
self.assertEqual(buffer_ref.data, fd.read(buffer_ref.length))
self.assertEqual(
buffer_ref.data[bytes_before:bytes_before + len(literal)], literal)
def testRegexMatchCondition(self):
searching_path = os.path.join(self.base_path, "searching")
paths = [searching_path + "/{dpkg.log,dpkg_false.log,auth.log}"]
regex = r"pa[nm]_o?unix\(s{2}h"
bytes_before = 10
bytes_after = 20
crmc = rdf_file_finder.FileFinderContentsRegexMatchCondition
condition = rdf_file_finder.FileFinderCondition(
condition_type="CONTENTS_REGEX_MATCH",
contents_regex_match=crmc(
regex=regex,
bytes_before=bytes_before,
bytes_after=bytes_after,
))
raw_results = self._RunFileFinder(
paths, self.stat_action, conditions=[condition])
relative_results = self._GetRelativeResults(
raw_results, base_path=searching_path)
self.assertEqual(len(relative_results), 1)
self.assertIn("auth.log", relative_results)
self.assertEqual(len(raw_results[0].matches), 1)
buffer_ref = raw_results[0].matches[0]
orig_data = open(os.path.join(searching_path, "auth.log")).read()
self.assertEqual(
orig_data[buffer_ref.offset:buffer_ref.offset + buffer_ref.length],
buffer_ref.data)
def testRegexMatchConditionAllHits(self):
searching_path = os.path.join(self.base_path, "searching")
paths = [searching_path + "/{dpkg.log,dpkg_false.log,auth.log}"]
bytes_before = 10
bytes_after = 20
crmc = rdf_file_finder.FileFinderContentsRegexMatchCondition
regex = r"mydo....\.com"
condition = rdf_file_finder.FileFinderCondition(
condition_type="CONTENTS_REGEX_MATCH",
contents_regex_match=crmc(
regex=regex,
mode="ALL_HITS",
bytes_before=bytes_before,
bytes_after=bytes_after,
))
raw_results = self._RunFileFinder(
paths, self.stat_action, conditions=[condition])
self.assertEqual(len(raw_results), 1)
self.assertEqual(len(raw_results[0].matches), 6)
for buffer_ref in raw_results[0].matches:
needle = "mydomain.com"
self.assertEqual(buffer_ref.data[bytes_before:bytes_before + len(needle)],
needle)
def testHashAction(self):
paths = [os.path.join(self.base_path, "hello.exe")]
hash_action = rdf_file_finder.FileFinderAction(
action_type=rdf_file_finder.FileFinderAction.Action.HASH)
results = self._RunFileFinder(paths, hash_action)
self.assertEqual(len(results), 1)
res = results[0]
data = open(paths[0], "rb").read()
self.assertEqual(res.hash_entry.num_bytes, len(data))
self.assertEqual(res.hash_entry.md5.HexDigest(),
hashlib.md5(data).hexdigest())
self.assertEqual(res.hash_entry.sha1.HexDigest(),
hashlib.sha1(data).hexdigest())
self.assertEqual(res.hash_entry.sha256.HexDigest(),
hashlib.sha256(data).hexdigest())
hash_action = rdf_file_finder.FileFinderAction(
action_type=rdf_file_finder.FileFinderAction.Action.HASH,
hash=rdf_file_finder.FileFinderHashActionOptions(
max_size=100, oversized_file_policy="SKIP"))
results = self._RunFileFinder(paths, hash_action)
self.assertEqual(len(results), 1)
res = results[0]
self.assertFalse(res.HasField("hash"))
hash_action = rdf_file_finder.FileFinderAction(
action_type=rdf_file_finder.FileFinderAction.Action.HASH,
hash=rdf_file_finder.FileFinderHashActionOptions(
max_size=100, oversized_file_policy="HASH_TRUNCATED"))
results = self._RunFileFinder(paths, hash_action)
self.assertEqual(len(results), 1)
res = results[0]
data = open(paths[0], "rb").read()[:100]
self.assertEqual(res.hash_entry.num_bytes, len(data))
self.assertEqual(res.hash_entry.md5.HexDigest(),
hashlib.md5(data).hexdigest())
self.assertEqual(res.hash_entry.sha1.HexDigest(),
hashlib.sha1(data).hexdigest())
self.assertEqual(res.hash_entry.sha256.HexDigest(),
hashlib.sha256(data).hexdigest())
def _RunFileFinderDownloadHello(self, upload, opts=None):
action = rdf_file_finder.FileFinderAction.Download()
action.download = opts
upload.return_value = rdf_client.UploadedFile(
bytes_uploaded=42, file_id="foo", hash=rdf_crypto.Hash())
hello_path = os.path.join(self.base_path, "hello.exe")
return self._RunFileFinder([hello_path], action)
@mock.patch.object(comms.GRRClientWorker, "UploadFile")
def testDownloadActionDefault(self, upload):
results = self._RunFileFinderDownloadHello(upload)
self.assertEquals(len(results), 1)
self.assertTrue(upload.called_with(max_bytes=None))
self.assertTrue(results[0].HasField("uploaded_file"))
self.assertEquals(results[0].uploaded_file, upload.return_value)
@mock.patch.object(comms.GRRClientWorker, "UploadFile")
def testDownloadActionSkip(self, upload):
opts = rdf_file_finder.FileFinderDownloadActionOptions(
max_size=0, oversized_file_policy="SKIP")
results = self._RunFileFinderDownloadHello(upload, opts=opts)
self.assertEquals(len(results), 1)
self.assertFalse(upload.called)
self.assertFalse(results[0].HasField("uploaded_file"))
@mock.patch.object(comms.GRRClientWorker, "UploadFile")
def testDownloadActionTruncate(self, upload):
opts = rdf_file_finder.FileFinderDownloadActionOptions(
max_size=42, oversized_file_policy="DOWNLOAD_TRUNCATED")
results = self._RunFileFinderDownloadHello(upload, opts=opts)
self.assertEquals(len(results), 1)
self.assertTrue(upload.called_with(max_bytes=42))
self.assertTrue(results[0].HasField("uploaded_file"))
self.assertEquals(results[0].uploaded_file, upload.return_value)
EXT2_COMPR_FL = 0x00000004
EXT2_IMMUTABLE_FL = 0x00000010
# TODO(hanuszczak): Maybe it would make sense to refactor this to a helper
# constructor of the `rdf_file_finder.FileFinderAction`.
@staticmethod
def _StatAction(**kwargs):
action_type = rdf_file_finder.FileFinderAction.Action.STAT
opts = rdf_file_finder.FileFinderStatActionOptions(**kwargs)
return rdf_file_finder.FileFinderAction(action_type=action_type, stat=opts)
@unittest.skipIf(platform.system() != "Linux", "requires Linux")
def testStatExtFlags(self):
with test_lib.AutoTempFilePath() as temp_filepath:
if subprocess.call(["which", "chattr"]) != 0:
raise unittest.SkipTest("`chattr` command is not available")
if subprocess.call(["chattr", "+c", temp_filepath]) != 0:
reason = "extended attributes not supported by filesystem"
raise unittest.SkipTest(reason)
action = self._StatAction()
results = self._RunFileFinder([temp_filepath], action)
self.assertEqual(len(results), 1)
stat_entry = results[0].stat_entry
self.assertTrue(stat_entry.st_flags_linux & self.EXT2_COMPR_FL)
self.assertFalse(stat_entry.st_flags_linux & self.EXT2_IMMUTABLE_FL)
def testStatExtAttrs(self):
with test_lib.AutoTempFilePath() as temp_filepath:
self._SetExtAttr(temp_filepath, "user.foo", "bar")
self._SetExtAttr(temp_filepath, "user.quux", "norf")
action = self._StatAction()
results = self._RunFileFinder([temp_filepath], action)
self.assertEqual(len(results), 1)
ext_attrs = results[0].stat_entry.ext_attrs
self.assertEqual(ext_attrs[0].name, "user.foo")
self.assertEqual(ext_attrs[0].value, "bar")
self.assertEqual(ext_attrs[1].name, "user.quux")
self.assertEqual(ext_attrs[1].value, "norf")
action = self._StatAction(ext_attrs=False)
results = self._RunFileFinder([temp_filepath], action)
self.assertEqual(len(results), 1)
ext_attrs = results[0].stat_entry.ext_attrs
self.assertFalse(ext_attrs)
@classmethod
def _SetExtAttr(cls, filepath, name, value):
if platform.system() == "Linux":
cls._SetExtAttrLinux(filepath, name, value)
elif platform.system() == "Darwin":
cls._SetExtAttrOsx(filepath, name, value)
else:
raise unittest.SkipTest("unsupported system")
@classmethod
def _SetExtAttrLinux(cls, filepath, name, value):
if subprocess.call(["which", "setfattr"]) != 0:
raise unittest.SkipTest("`setfattr` command is not available")
if subprocess.call(["setfattr", filepath, "-n", name, "-v", value]) != 0:
raise unittest.SkipTest("extended attributes not supported by filesystem")
@classmethod
def _SetExtAttrOsx(cls, filepath, name, value):
if subprocess.call(["xattr", "-w", name, value, filepath]) != 0:
raise unittest.SkipTest("extended attributes not supported")
def testLinkStat(self):
"""Tests resolving symlinks when getting stat entries."""
test_dir = os.path.join(self.temp_dir, "lnk_stat_test")
lnk = os.path.join(test_dir, "lnk")
lnk_target = os.path.join(test_dir, "lnk_target")
os.mkdir(test_dir)
with open(lnk_target, "wb") as fd:
fd.write("sometext")
os.symlink(lnk_target, lnk)
paths = [lnk]
link_size = os.lstat(lnk).st_size
target_size = os.stat(lnk).st_size
for expected_size, resolve_links in [(link_size, False), (target_size,
True)]:
stat_action = rdf_file_finder.FileFinderAction.Stat(
resolve_links=resolve_links)
results = self._RunFileFinder(paths, stat_action)
self.assertEqual(len(results), 1)
res = results[0]
self.assertEqual(res.stat_entry.st_size, expected_size)
def testModificationTimeCondition(self):
with utils.Stubber(os, "lstat", MyStat):
test_dir = self._PrepareTimestampedFiles()
# We have one "old" file, auth.log, and two "new" ones, dpkg*.
paths = [test_dir + "/{dpkg.log,dpkg_false.log,auth.log}"]
change_time = rdfvalue.RDFDatetime.FromHumanReadable("2020-01-01")
modification_time_condition = rdf_file_finder.FileFinderCondition(
condition_type="MODIFICATION_TIME",
modification_time=rdf_file_finder.FileFinderModificationTimeCondition(
max_last_modified_time=change_time))
self.RunAndCheck(
paths,
conditions=[modification_time_condition],
expected=["dpkg.log", "dpkg_false.log"],
unexpected=["auth.log"],
base_path=test_dir)
# Now just the file from 2022.
modification_time_condition = rdf_file_finder.FileFinderCondition(
condition_type="MODIFICATION_TIME",
modification_time=rdf_file_finder.FileFinderModificationTimeCondition(
min_last_modified_time=change_time))
self.RunAndCheck(
paths,
conditions=[modification_time_condition],
expected=["auth.log"],
unexpected=["dpkg.log", "dpkg_false.log"],
base_path=test_dir)
def testAccessTimeCondition(self):
with utils.Stubber(os, "lstat", MyStat):
test_dir = self._PrepareTimestampedFiles()
paths = [test_dir + "/{dpkg.log,dpkg_false.log,auth.log}"]
change_time = rdfvalue.RDFDatetime.FromHumanReadable("2020-01-01")
# Check we can get the normal files.
access_time_condition = rdf_file_finder.FileFinderCondition(
condition_type="ACCESS_TIME",
access_time=rdf_file_finder.FileFinderAccessTimeCondition(
max_last_access_time=change_time))
self.RunAndCheck(
paths,
conditions=[access_time_condition],
expected=["dpkg.log", "dpkg_false.log"],
unexpected=["auth.log"],
base_path=test_dir)
# Now just the file from 2022.
access_time_condition = rdf_file_finder.FileFinderCondition(
condition_type="ACCESS_TIME",
access_time=rdf_file_finder.FileFinderAccessTimeCondition(
min_last_access_time=change_time))
self.RunAndCheck(
paths,
conditions=[access_time_condition],
expected=["auth.log"],
unexpected=["dpkg.log", "dpkg_false.log"],
base_path=test_dir)
def testInodeChangeTimeCondition(self):
with utils.Stubber(os, "lstat", MyStat):
test_dir = self._PrepareTimestampedFiles()
# We have one "old" file, auth.log, and two "new" ones, dpkg*.
paths = [test_dir + "/{dpkg.log,dpkg_false.log,auth.log}"]
# Check we can get the auth log only (huge ctime).
change_time = rdfvalue.RDFDatetime.FromHumanReadable("2020-01-01")
ichange_time_condition = rdf_file_finder.FileFinderCondition(
condition_type="INODE_CHANGE_TIME",
inode_change_time=rdf_file_finder.FileFinderInodeChangeTimeCondition(
min_last_inode_change_time=change_time))
self.RunAndCheck(
paths,
conditions=[ichange_time_condition],
expected=["auth.log"],
unexpected=["dpkg.log", "dpkg_false.log"],
base_path=test_dir)
# Now just the others.
ichange_time_condition = rdf_file_finder.FileFinderCondition(
condition_type="INODE_CHANGE_TIME",
inode_change_time=rdf_file_finder.FileFinderInodeChangeTimeCondition(
max_last_inode_change_time=change_time))
self.RunAndCheck(
paths,
conditions=[ichange_time_condition],
expected=["dpkg.log", "dpkg_false.log"],
unexpected=["auth.log"],
base_path=test_dir)
def testSizeCondition(self):
test_dir = self._PrepareTimestampedFiles()
# We have one "old" file, auth.log, and two "new" ones, dpkg*.
paths = [test_dir + "/{dpkg.log,dpkg_false.log,auth.log}"]
# Auth.log is 770 bytes, the other two ~620 each.
size_condition = rdf_file_finder.FileFinderCondition(
condition_type="SIZE",
size=rdf_file_finder.FileFinderSizeCondition(min_file_size=700))
self.RunAndCheck(
paths,
conditions=[size_condition],
expected=["auth.log"],
unexpected=["dpkg.log", "dpkg_false.log"],
base_path=test_dir)
size_condition = rdf_file_finder.FileFinderCondition(
condition_type="SIZE",
size=rdf_file_finder.FileFinderSizeCondition(max_file_size=700))
self.RunAndCheck(
paths,
conditions=[size_condition],
expected=["dpkg.log", "dpkg_false.log"],
unexpected=["auth.log"],
base_path=test_dir)
def testXDEV(self):
test_dir = os.path.join(self.temp_dir, "xdev_test")
local_dev_dir = os.path.join(test_dir, "local_dev")
net_dev_dir = os.path.join(test_dir, "net_dev")
os.mkdir(test_dir)
os.mkdir(local_dev_dir)
os.mkdir(net_dev_dir)
local_file = os.path.join(local_dev_dir, "local_file")
net_file = os.path.join(net_dev_dir, "net_file")
with open(local_file, "wb") as fd:
fd.write("local_data")
with open(net_file, "wb") as fd:
fd.write("net_data")
all_mountpoints = [local_dev_dir, net_dev_dir, "/some/other/dir"]
local_mountpoints = [local_dev_dir]
def MyDiskPartitions(all=False): # pylint: disable=redefined-builtin
mp = collections.namedtuple("MountPoint", ["mountpoint"])
if all:
return [mp(mountpoint=m) for m in all_mountpoints]
else:
return [mp(mountpoint=m) for m in local_mountpoints]
with utils.Stubber(psutil, "disk_partitions", MyDiskPartitions):
paths = [test_dir + "/**5"]
self.RunAndCheck(
paths,
expected=[
"local_dev", "local_dev/local_file", "net_dev", "net_dev/net_file"
],
unexpected=[],
base_path=test_dir,
xdev="ALWAYS")
self.RunAndCheck(
paths,
expected=["local_dev", "local_dev/local_file", "net_dev"],
unexpected=["net_dev/net_file"],
base_path=test_dir,
xdev="LOCAL")
self.RunAndCheck(
paths,
expected=["local_dev", "net_dev"],
unexpected=["local_dev/local_file", "net_dev/net_file"],
base_path=test_dir,
xdev="NEVER")
class RegexMatcherTest(unittest.TestCase):
@staticmethod
def _RegexMatcher(string):
regex = rdf_standard.RegularExpression(string)
return client_file_finder.RegexMatcher(regex)
def testMatchLiteral(self):
matcher = self._RegexMatcher("foo")
span = matcher.Match("foobar", 0)
self.assertTrue(span)
self.assertEqual(span.begin, 0)
self.assertEqual(span.end, 3)
span = matcher.Match("foobarfoobar", 2)
self.assertTrue(span)
self.assertEqual(span.begin, 6)
self.assertEqual(span.end, 9)
def testNoMatchLiteral(self):
matcher = self._RegexMatcher("baz")
span = matcher.Match("foobar", 0)
self.assertFalse(span)
span = matcher.Match("foobazbar", 5)
self.assertFalse(span)
def testMatchWildcard(self):
matcher = self._RegexMatcher("foo.*bar")
span = matcher.Match("foobar", 0)
self.assertTrue(span)
self.assertEqual(span.begin, 0)
self.assertEqual(span.end, 6)
span = matcher.Match("quuxfoobazbarnorf", 2)
self.assertTrue(span)
self.assertEqual(span.begin, 4)
self.assertEqual(span.end, 13)
def testMatchRepeated(self):
matcher = self._RegexMatcher("qu+x")
span = matcher.Match("quuuux", 0)
self.assertTrue(span)
self.assertEqual(span.begin, 0)
self.assertEqual(span.end, 6)
span = matcher.Match("qx", 0)
self.assertFalse(span)
span = matcher.Match("qvvvvx", 0)
self.assertFalse(span)
class LiteralMatcherTest(unittest.TestCase):
def testMatchLiteral(self):
matcher = client_file_finder.LiteralMatcher("bar")
span = matcher.Match("foobarbaz", 0)
self.assertTrue(span)
self.assertEqual(span.begin, 3)
self.assertEqual(span.end, 6)
span = matcher.Match("barbarbar", 0)
self.assertTrue(span)
self.assertEqual(span.begin, 0)
self.assertEqual(span.end, 3)
span = matcher.Match("barbarbar", 4)
self.assertTrue(span)
self.assertEqual(span.begin, 6)
self.assertEqual(span.end, 9)
def testNoMatchLiteral(self):
matcher = client_file_finder.LiteralMatcher("norf")
span = matcher.Match("quux", 0)
self.assertFalse(span)
span = matcher.Match("norf", 2)
self.assertFalse(span)
span = matcher.Match("quuxnorf", 5)
self.assertFalse(span)
class ConditionTestMixin(object):
def setUp(self):
super(ConditionTestMixin, self).setUp()
self.temp_filepath = test_lib.TempFilePath()
def tearDown(self):
super(ConditionTestMixin, self).tearDown()
os.remove(self.temp_filepath)
@unittest.skipIf(platform.system() == "Windows", "requires Unix-like system")
class MetadataConditionTestMixin(ConditionTestMixin):
def Stat(self):
return utils.Stat(self.temp_filepath, follow_symlink=False)
def Touch(self, mode, date):
self.assertIn(mode, ["-m", "-a"])
result = subprocess.call(["touch", mode, "-t", date, self.temp_filepath])
# Sanity check in case something is wrong with the test.
self.assertEqual(result, 0)
class ModificationTimeConditionTest(MetadataConditionTestMixin,
unittest.TestCase):
def testDefault(self):
params = rdf_file_finder.FileFinderCondition()
condition = client_file_finder.ModificationTimeCondition(params)
self.Touch("-m", "198309121200") # 1983-09-12 12:00
self.assertTrue(condition.Check(self.Stat()))
self.Touch("-m", "201710020815") # 2017-10-02 8:15
self.assertTrue(condition.Check(self.Stat()))
def testMinTime(self):
time = rdfvalue.RDFDatetime.FromHumanReadable("2017-12-24 19:00:00")
params = rdf_file_finder.FileFinderCondition()
params.modification_time.min_last_modified_time = time
condition = client_file_finder.ModificationTimeCondition(params)
self.Touch("-m", "201712240100") # 2017-12-24 1:30
self.assertFalse(condition.Check(self.Stat()))
self.Touch("-m", "201806141700") # 2018-06-14 17:00
self.assertTrue(condition.Check(self.Stat()))
def testMaxTime(self):
time = rdfvalue.RDFDatetime.FromHumanReadable("2125-12-28 18:45")
params = rdf_file_finder.FileFinderCondition()
params.modification_time.max_last_modified_time = time
condition = client_file_finder.ModificationTimeCondition(params)
self.Touch("-m", "211811111200") # 2118-11-11 12:00
self.assertTrue(condition.Check(self.Stat()))
self.Touch("-m", "222510201500") # 2225-10-20 15:00
self.assertFalse(condition.Check(self.Stat()))
class AccessTimeConditionTest(MetadataConditionTestMixin, unittest.TestCase):
def testDefault(self):
params = rdf_file_finder.FileFinderCondition()
condition = client_file_finder.AccessTimeCondition(params)
self.Touch("-a", "241007151200") # 2410-07-15 12:00
self.assertTrue(condition.Check(self.Stat()))
self.Touch("-a", "201005160745") # 2010-05-16 7:45
self.assertTrue(condition.Check(self.Stat()))
def testRange(self):
min_time = rdfvalue.RDFDatetime.FromHumanReadable("2156-01-27")
max_time = rdfvalue.RDFDatetime.FromHumanReadable("2191-12-05")
params = rdf_file_finder.FileFinderCondition()
params.access_time.min_last_access_time = min_time
params.access_time.max_last_access_time = max_time
condition = client_file_finder.AccessTimeCondition(params)
self.Touch("-a", "215007280000") # 2150-07-28 0:00
self.assertFalse(condition.Check(self.Stat()))
self.Touch("-a", "219101010000") # 2191-01-01 0:00
self.assertTrue(condition.Check(self.Stat()))
self.Touch("-a", "221003010000") # 2210-03-01 0:00
self.assertFalse(condition.Check(self.Stat()))
class SizeConditionTest(MetadataConditionTestMixin, unittest.TestCase):
def testDefault(self):
params = rdf_file_finder.FileFinderCondition()
condition = client_file_finder.SizeCondition(params)
with open(self.temp_filepath, "wb") as fd:
fd.write("1234567")
self.assertTrue(condition.Check(self.Stat()))
with open(self.temp_filepath, "wb") as fd:
fd.write("")
self.assertTrue(condition.Check(self.Stat()))
def testRange(self):
params = rdf_file_finder.FileFinderCondition()
params.size.min_file_size = 2
params.size.max_file_size = 6
condition = client_file_finder.SizeCondition(params)
with open(self.temp_filepath, "wb") as fd:
fd.write("1")
self.assertFalse(condition.Check(self.Stat()))
with open(self.temp_filepath, "wb") as fd:
fd.write("12")
self.assertTrue(condition.Check(self.Stat()))
with open(self.temp_filepath, "wb") as fd:
fd.write("1234")
self.assertTrue(condition.Check(self.Stat()))
with open(self.temp_filepath, "wb") as fd:
fd.write("123456")
self.assertTrue(condition.Check(self.Stat()))
with open(self.temp_filepath, "wb") as fd:
fd.write("1234567")
self.assertFalse(condition.Check(self.Stat()))
class ExtFlagsConditionTest(MetadataConditionTestMixin, unittest.TestCase):
# https://github.com/apple/darwin-xnu/blob/master/bsd/sys/stat.h
UF_NODUMP = 0x00000001
UF_IMMUTABLE = 0x00000002
UF_HIDDEN = 0x00008000
# https://github.com/torvalds/linux/blob/master/include/uapi/linux/fs.h
FS_COMPR_FL = 0x00000004
FS_IMMUTABLE_FL = 0x00000010
FS_NODUMP_FL = 0x00000040
def testDefault(self):
params = rdf_file_finder.FileFinderCondition()
condition = client_file_finder.ExtFlagsCondition(params)
self.assertTrue(condition.Check(self.Stat()))
def testNoMatchOsxBitsSet(self):
params = rdf_file_finder.FileFinderCondition()
params.ext_flags.osx_bits_set = self.UF_IMMUTABLE | self.UF_NODUMP
condition = client_file_finder.ExtFlagsCondition(params)
self._Chflags(["nodump"])
self.assertFalse(condition.Check(self.Stat()))
def testNoMatchOsxBitsUnset(self):
params = rdf_file_finder.FileFinderCondition()
params.ext_flags.osx_bits_unset = self.UF_NODUMP | self.UF_HIDDEN
condition = client_file_finder.ExtFlagsCondition(params)
self._Chflags(["hidden"])
self.assertFalse(condition.Check(self.Stat()))
def testNoMatchLinuxBitsSet(self):
params = rdf_file_finder.FileFinderCondition()
params.ext_flags.linux_bits_set = self.FS_IMMUTABLE_FL
condition = client_file_finder.ExtFlagsCondition(params)
self.assertFalse(condition.Check(self.Stat()))
def testNoMatchLinuxBitsUnset(self):
params = rdf_file_finder.FileFinderCondition()
params.ext_flags.linux_bits_unset = self.FS_COMPR_FL
condition = client_file_finder.ExtFlagsCondition(params)
self._Chattr(["+c", "+d"])
self.assertFalse(condition.Check(self.Stat()))
def testMatchOsxBitsSet(self):
params = rdf_file_finder.FileFinderCondition()
params.ext_flags.osx_bits_set = self.UF_NODUMP | self.UF_HIDDEN
condition = client_file_finder.ExtFlagsCondition(params)
self._Chflags(["nodump", "hidden", "uappend"])
try:
self.assertTrue(condition.Check(self.Stat()))
finally:
# Make the test file deletable.
self._Chflags(["nouappend"])
def testMatchLinuxBitsSet(self):
params = rdf_file_finder.FileFinderCondition()
params.ext_flags.linux_bits_set = self.FS_COMPR_FL | self.FS_NODUMP_FL
condition = client_file_finder.ExtFlagsCondition(params)
self._Chattr(["+c", "+d"])
self.assertTrue(condition.Check(self.Stat()))
def testMatchOsxBitsUnset(self):
params = rdf_file_finder.FileFinderCondition()
params.ext_flags.osx_bits_unset = self.UF_NODUMP | self.UF_IMMUTABLE
condition = client_file_finder.ExtFlagsCondition(params)
self._Chflags(["hidden", "uappend"])
try:
self.assertTrue(condition.Check(self.Stat()))
finally:
# Make the test file deletable.
self._Chflags(["nouappend"])
def testMatchLinuxBitsUnset(self):
params = rdf_file_finder.FileFinderCondition()
params.ext_flags.linux_bits_unset = self.FS_IMMUTABLE_FL
condition = client_file_finder.ExtFlagsCondition(params)
self._Chattr(["+c", "+d"])
self.assertTrue(condition.Check(self.Stat()))
def testMatchOsxBitsMixed(self):
params = rdf_file_finder.FileFinderCondition()
params.ext_flags.osx_bits_set = self.UF_NODUMP
params.ext_flags.osx_bits_unset = self.UF_HIDDEN
params.ext_flags.linux_bits_unset = self.FS_NODUMP_FL
condition = client_file_finder.ExtFlagsCondition(params)
self._Chflags(["nodump", "uappend"])
try:
self.assertTrue(condition.Check(self.Stat()))
finally:
# Make the test file deletable.
self._Chflags(["nouappend"])
def testMatchLinuxBitsMixed(self):
params = rdf_file_finder.FileFinderCondition()
params.ext_flags.linux_bits_set = self.FS_NODUMP_FL
params.ext_flags.linux_bits_unset = self.FS_COMPR_FL
params.ext_flags.osx_bits_unset = self.UF_IMMUTABLE
condition = client_file_finder.ExtFlagsCondition(params)
self._Chattr(["+d"])
self.assertTrue(condition.Check(self.Stat()))
def _Chattr(self, args):
if platform.system() != "Linux":
raise unittest.SkipTest("requires Linux")
if subprocess.call(["which", "chattr"]) != 0:
raise unittest.SkipTest("the `chattr` command is not available")
if subprocess.call(["chattr"] + args + [self.temp_filepath]) != 0:
reason = "extended attributes are not supported by filesystem"
raise unittest.SkipTest(reason)
def _Chflags(self, args):
if platform.system() != "Darwin":
raise unittest.SkipTest("requires macOS")
subprocess.check_call(["chflags", ",".join(args), self.temp_filepath])
# TODO(hanuszczak): Write tests for the metadata change condition.
class LiteralMatchConditionTest(ConditionTestMixin, unittest.TestCase):
def testNoHits(self):
with open(self.temp_filepath, "wb") as fd:
fd.write("foo bar quux")
params = rdf_file_finder.FileFinderCondition()
params.contents_literal_match.literal = "baz"
params.contents_literal_match.mode = "ALL_HITS"
condition = client_file_finder.LiteralMatchCondition(params)
results = list(condition.Search(self.temp_filepath))
self.assertFalse(results)
def testSomeHits(self):
with open(self.temp_filepath, "wb") as fd:
fd.write("foo bar foo")
params = rdf_file_finder.FileFinderCondition()
params.contents_literal_match.literal = "foo"
params.contents_literal_match.mode = "ALL_HITS"
condition = client_file_finder.LiteralMatchCondition(params)
results = list(condition.Search(self.temp_filepath))
self.assertEqual(len(results), 2)
self.assertEqual(results[0].data, "foo")
self.assertEqual(results[0].offset, 0)
self.assertEqual(results[0].length, 3)
self.assertEqual(results[1].data, "foo")
self.assertEqual(results[1].offset, 8)
self.assertEqual(results[1].length, 3)
def testFirstHit(self):
with open(self.temp_filepath, "wb") as fd:
fd.write("bar foo baz foo")
params = rdf_file_finder.FileFinderCondition()
params.contents_literal_match.literal = "foo"
params.contents_literal_match.mode = "FIRST_HIT"
condition = client_file_finder.LiteralMatchCondition(params)
results = list(condition.Search(self.temp_filepath))
self.assertEqual(len(results), 1)
self.assertEqual(results[0].data, "foo")
self.assertEqual(results[0].offset, 4)
self.assertEqual(results[0].length, 3)
def testContext(self):
with open(self.temp_filepath, "wb") as fd:
fd.write("foo foo foo")
params = rdf_file_finder.FileFinderCondition()
params.contents_literal_match.literal = "foo"
params.contents_literal_match.mode = "ALL_HITS"
params.contents_literal_match.bytes_before = 3
params.contents_literal_match.bytes_after = 2
condition = client_file_finder.LiteralMatchCondition(params)
results = list(condition.Search(self.temp_filepath))
self.assertEqual(len(results), 3)
self.assertEqual(results[0].data, "foo f")
self.assertEqual(results[0].offset, 0)
self.assertEqual(results[0].length, 5)
self.assertEqual(results[1].data, "oo foo f")
self.assertEqual(results[1].offset, 1)
self.assertEqual(results[1].length, 8)
self.assertEqual(results[2].data, "oo foo")
self.assertEqual(results[2].offset, 5)
self.assertEqual(results[2].length, 6)
def testStartOffset(self):
with open(self.temp_filepath, "wb") as fd:
fd.write("oooooooo")
params = rdf_file_finder.FileFinderCondition()
params.contents_literal_match.literal = "ooo"
params.contents_literal_match.mode = "ALL_HITS"
params.contents_literal_match.start_offset = 2
condition = client_file_finder.LiteralMatchCondition(params)
results = list(condition.Search(self.temp_filepath))
self.assertEqual(len(results), 2)
self.assertEqual(results[0].data, "ooo")
self.assertEqual(results[0].offset, 2)
self.assertEqual(results[0].length, 3)
self.assertEqual(results[1].data, "ooo")
self.assertEqual(results[1].offset, 5)
self.assertEqual(results[1].length, 3)
class RegexMatchCondition(ConditionTestMixin, unittest.TestCase):
def testNoHits(self):
with open(self.temp_filepath, "wb") as fd:
fd.write("foo bar quux")
params = rdf_file_finder.FileFinderCondition()
params.contents_regex_match.regex = "\\d+"
params.contents_regex_match.mode = "FIRST_HIT"
condition = client_file_finder.RegexMatchCondition(params)
results = list(condition.Search(self.temp_filepath))
self.assertFalse(results)
def testSomeHits(self):
with open(self.temp_filepath, "wb") as fd:
fd.write("foo 7 bar 49 baz343")
params = rdf_file_finder.FileFinderCondition()
params.contents_regex_match.regex = "\\d+"
params.contents_regex_match.mode = "ALL_HITS"
condition = client_file_finder.RegexMatchCondition(params)
results = list(condition.Search(self.temp_filepath))
self.assertEqual(len(results), 3)
self.assertEqual(results[0].data, "7")
self.assertEqual(results[0].offset, 4)
self.assertEqual(results[0].length, 1)
self.assertEqual(results[1].data, "49")
self.assertEqual(results[1].offset, 10)
self.assertEqual(results[1].length, 2)
self.assertEqual(results[2].data, "343")
self.assertEqual(results[2].offset, 16)
self.assertEqual(results[2].length, 3)
def testFirstHit(self):
with open(self.temp_filepath, "wb") as fd:
fd.write("4 8 15 16 23 42 foo 108 bar")
params = rdf_file_finder.FileFinderCondition()
params.contents_regex_match.regex = "[a-z]+"
params.contents_regex_match.mode = "FIRST_HIT"
condition = client_file_finder.RegexMatchCondition(params)
results = list(condition.Search(self.temp_filepath))
self.assertEqual(len(results), 1)
self.assertEqual(results[0].data, "foo")
self.assertEqual(results[0].offset, 16)
self.assertEqual(results[0].length, 3)
def testContext(self):
with open(self.temp_filepath, "wb") as fd:
fd.write("foobarbazbaaarquux")
params = rdf_file_finder.FileFinderCondition()
params.contents_regex_match.regex = "ba+r"
params.contents_regex_match.mode = "ALL_HITS"
params.contents_regex_match.bytes_before = 3
params.contents_regex_match.bytes_after = 4
condition = client_file_finder.RegexMatchCondition(params)
results = list(condition.Search(self.temp_filepath))
self.assertEqual(len(results), 2)
self.assertEqual(results[0].data, "foobarbazb")
self.assertEqual(results[0].offset, 0)
self.assertEqual(results[0].length, 10)
self.assertEqual(results[1].data, "bazbaaarquux")
self.assertEqual(results[1].offset, 6)
self.assertEqual(results[1].length, 12)
def testStartOffset(self):
with open(self.temp_filepath, "wb") as fd:
fd.write("ooooooo")
params = rdf_file_finder.FileFinderCondition()
params.contents_regex_match.regex = "o+"
params.contents_regex_match.mode = "FIRST_HIT"
params.contents_regex_match.start_offset = 3
condition = client_file_finder.RegexMatchCondition(params)
results = list(condition.Search(self.temp_filepath))
self.assertEqual(len(results), 1)
self.assertEqual(results[0].data, "oooo")
self.assertEqual(results[0].offset, 3)
self.assertEqual(results[0].length, 4)
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
flags.StartMain(main)
| [
"grr.client.client_actions.file_finder.RegexMatcher",
"grr.test_lib.test_lib.TempFilePath",
"grr.lib.rdfvalues.file_finder.FileFinderAccessTimeCondition",
"grr.lib.rdfvalues.file_finder.FileFinderDownloadActionOptions",
"hashlib.sha1",
"os.remove",
"os.listdir",
"grr.client.client_actions.file_finder.... | [((15661, 15715), 'mock.patch.object', 'mock.patch.object', (['comms.GRRClientWorker', '"""UploadFile"""'], {}), "(comms.GRRClientWorker, 'UploadFile')\n", (15678, 15715), False, 'import mock\n'), ((16044, 16098), 'mock.patch.object', 'mock.patch.object', (['comms.GRRClientWorker', '"""UploadFile"""'], {}), "(comms.GRRClientWorker, 'UploadFile')\n", (16061, 16098), False, 'import mock\n'), ((16458, 16512), 'mock.patch.object', 'mock.patch.object', (['comms.GRRClientWorker', '"""UploadFile"""'], {}), "(comms.GRRClientWorker, 'UploadFile')\n", (16475, 16512), False, 'import mock\n'), ((45455, 45474), 'grr.test_lib.test_lib.main', 'test_lib.main', (['argv'], {}), '(argv)\n', (45468, 45474), False, 'from grr.test_lib import test_lib\n'), ((45506, 45527), 'grr.lib.flags.StartMain', 'flags.StartMain', (['main'], {}), '(main)\n', (45521, 45527), False, 'from grr.lib import flags\n'), ((962, 981), 'os.stat_result', 'os.stat_result', (['res'], {}), '(res)\n', (976, 981), False, 'import os\n'), ((1140, 1179), 'grr.lib.rdfvalues.file_finder.FileFinderAction.Stat', 'rdf_file_finder.FileFinderAction.Stat', ([], {}), '()\n', (1177, 1179), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((2131, 2176), 'os.path.join', 'os.path.join', (['self.base_path', '"""profiles/v1.0"""'], {}), "(self.base_path, 'profiles/v1.0')\n", (2143, 2176), False, 'import os\n'), ((3477, 3517), 'glob.glob', 'glob.glob', (["(self.base_path + '/rekall*gz')"], {}), "(self.base_path + '/rekall*gz')\n", (3486, 3517), False, 'import glob\n'), ((7183, 7224), 'os.path.join', 'os.path.join', (['self.base_path', '"""searching"""'], {}), "(self.base_path, 'searching')\n", (7195, 7224), False, 'import os\n'), ((7240, 7281), 'os.path.join', 'os.path.join', (['self.temp_dir', '"""times_test"""'], {}), "(self.temp_dir, 'times_test')\n", (7252, 7281), False, 'import os\n'), ((7286, 7304), 'os.mkdir', 'os.mkdir', (['test_dir'], {}), '(test_dir)\n', (7294, 7304), False, 'import os\n'), ((8154, 8195), 'os.path.join', 'os.path.join', (['self.base_path', '"""searching"""'], {}), "(self.base_path, 'searching')\n", (8166, 8195), False, 'import os\n'), ((9387, 9428), 'os.path.join', 'os.path.join', (['self.base_path', '"""searching"""'], {}), "(self.base_path, 'searching')\n", (9399, 9428), False, 'import os\n'), ((11321, 11362), 'os.path.join', 'os.path.join', (['self.base_path', '"""searching"""'], {}), "(self.base_path, 'searching')\n", (11333, 11362), False, 'import os\n'), ((12466, 12507), 'os.path.join', 'os.path.join', (['self.base_path', '"""searching"""'], {}), "(self.base_path, 'searching')\n", (12478, 12507), False, 'import os\n'), ((13475, 13570), 'grr.lib.rdfvalues.file_finder.FileFinderAction', 'rdf_file_finder.FileFinderAction', ([], {'action_type': 'rdf_file_finder.FileFinderAction.Action.HASH'}), '(action_type=rdf_file_finder.\n FileFinderAction.Action.HASH)\n', (13507, 13570), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((15355, 15398), 'grr.lib.rdfvalues.file_finder.FileFinderAction.Download', 'rdf_file_finder.FileFinderAction.Download', ([], {}), '()\n', (15396, 15398), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((15562, 15603), 'os.path.join', 'os.path.join', (['self.base_path', '"""hello.exe"""'], {}), "(self.base_path, 'hello.exe')\n", (15574, 15603), False, 'import os\n'), ((16154, 16247), 'grr.lib.rdfvalues.file_finder.FileFinderDownloadActionOptions', 'rdf_file_finder.FileFinderDownloadActionOptions', ([], {'max_size': '(0)', 'oversized_file_policy': '"""SKIP"""'}), "(max_size=0,\n oversized_file_policy='SKIP')\n", (16201, 16247), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((16572, 16680), 'grr.lib.rdfvalues.file_finder.FileFinderDownloadActionOptions', 'rdf_file_finder.FileFinderDownloadActionOptions', ([], {'max_size': '(42)', 'oversized_file_policy': '"""DOWNLOAD_TRUNCATED"""'}), "(max_size=42,\n oversized_file_policy='DOWNLOAD_TRUNCATED')\n", (16619, 16680), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((17292, 17345), 'grr.lib.rdfvalues.file_finder.FileFinderStatActionOptions', 'rdf_file_finder.FileFinderStatActionOptions', ([], {}), '(**kwargs)\n', (17335, 17345), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((17357, 17425), 'grr.lib.rdfvalues.file_finder.FileFinderAction', 'rdf_file_finder.FileFinderAction', ([], {'action_type': 'action_type', 'stat': 'opts'}), '(action_type=action_type, stat=opts)\n', (17389, 17425), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((19983, 20027), 'os.path.join', 'os.path.join', (['self.temp_dir', '"""lnk_stat_test"""'], {}), "(self.temp_dir, 'lnk_stat_test')\n", (19995, 20027), False, 'import os\n'), ((20038, 20067), 'os.path.join', 'os.path.join', (['test_dir', '"""lnk"""'], {}), "(test_dir, 'lnk')\n", (20050, 20067), False, 'import os\n'), ((20085, 20121), 'os.path.join', 'os.path.join', (['test_dir', '"""lnk_target"""'], {}), "(test_dir, 'lnk_target')\n", (20097, 20121), False, 'import os\n'), ((20127, 20145), 'os.mkdir', 'os.mkdir', (['test_dir'], {}), '(test_dir)\n', (20135, 20145), False, 'import os\n'), ((20216, 20243), 'os.symlink', 'os.symlink', (['lnk_target', 'lnk'], {}), '(lnk_target, lnk)\n', (20226, 20243), False, 'import os\n'), ((25624, 25664), 'os.path.join', 'os.path.join', (['self.temp_dir', '"""xdev_test"""'], {}), "(self.temp_dir, 'xdev_test')\n", (25636, 25664), False, 'import os\n'), ((25685, 25720), 'os.path.join', 'os.path.join', (['test_dir', '"""local_dev"""'], {}), "(test_dir, 'local_dev')\n", (25697, 25720), False, 'import os\n'), ((25739, 25772), 'os.path.join', 'os.path.join', (['test_dir', '"""net_dev"""'], {}), "(test_dir, 'net_dev')\n", (25751, 25772), False, 'import os\n'), ((25778, 25796), 'os.mkdir', 'os.mkdir', (['test_dir'], {}), '(test_dir)\n', (25786, 25796), False, 'import os\n'), ((25801, 25824), 'os.mkdir', 'os.mkdir', (['local_dev_dir'], {}), '(local_dev_dir)\n', (25809, 25824), False, 'import os\n'), ((25829, 25850), 'os.mkdir', 'os.mkdir', (['net_dev_dir'], {}), '(net_dev_dir)\n', (25837, 25850), False, 'import os\n'), ((25869, 25910), 'os.path.join', 'os.path.join', (['local_dev_dir', '"""local_file"""'], {}), "(local_dev_dir, 'local_file')\n", (25881, 25910), False, 'import os\n'), ((25926, 25963), 'os.path.join', 'os.path.join', (['net_dev_dir', '"""net_file"""'], {}), "(net_dev_dir, 'net_file')\n", (25938, 25963), False, 'import os\n'), ((27351, 27389), 'grr.lib.rdfvalues.standard.RegularExpression', 'rdf_standard.RegularExpression', (['string'], {}), '(string)\n', (27381, 27389), True, 'from grr.lib.rdfvalues import standard as rdf_standard\n'), ((27401, 27439), 'grr.client.client_actions.file_finder.RegexMatcher', 'client_file_finder.RegexMatcher', (['regex'], {}), '(regex)\n', (27432, 27439), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((28782, 28822), 'grr.client.client_actions.file_finder.LiteralMatcher', 'client_file_finder.LiteralMatcher', (['"""bar"""'], {}), "('bar')\n", (28815, 28822), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((29284, 29325), 'grr.client.client_actions.file_finder.LiteralMatcher', 'client_file_finder.LiteralMatcher', (['"""norf"""'], {}), "('norf')\n", (29317, 29325), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((29647, 29670), 'grr.test_lib.test_lib.TempFilePath', 'test_lib.TempFilePath', ([], {}), '()\n', (29668, 29670), False, 'from grr.test_lib import test_lib\n'), ((29745, 29774), 'os.remove', 'os.remove', (['self.temp_filepath'], {}), '(self.temp_filepath)\n', (29754, 29774), False, 'import os\n'), ((29939, 29991), 'grr.lib.utils.Stat', 'utils.Stat', (['self.temp_filepath'], {'follow_symlink': '(False)'}), '(self.temp_filepath, follow_symlink=False)\n', (29949, 29991), False, 'from grr.lib import utils\n'), ((30075, 30139), 'subprocess.call', 'subprocess.call', (["['touch', mode, '-t', date, self.temp_filepath]"], {}), "(['touch', mode, '-t', date, self.temp_filepath])\n", (30090, 30139), False, 'import subprocess\n'), ((29794, 29811), 'platform.system', 'platform.system', ([], {}), '()\n', (29809, 29811), False, 'import platform\n'), ((30394, 30431), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (30429, 30431), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((30448, 30500), 'grr.client.client_actions.file_finder.ModificationTimeCondition', 'client_file_finder.ModificationTimeCondition', (['params'], {}), '(params)\n', (30492, 30500), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((30753, 30814), 'grr.lib.rdfvalue.RDFDatetime.FromHumanReadable', 'rdfvalue.RDFDatetime.FromHumanReadable', (['"""2017-12-24 19:00:00"""'], {}), "('2017-12-24 19:00:00')\n", (30791, 30814), False, 'from grr.lib import rdfvalue\n'), ((30829, 30866), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (30864, 30866), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((30942, 30994), 'grr.client.client_actions.file_finder.ModificationTimeCondition', 'client_file_finder.ModificationTimeCondition', (['params'], {}), '(params)\n', (30986, 30994), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((31248, 31306), 'grr.lib.rdfvalue.RDFDatetime.FromHumanReadable', 'rdfvalue.RDFDatetime.FromHumanReadable', (['"""2125-12-28 18:45"""'], {}), "('2125-12-28 18:45')\n", (31286, 31306), False, 'from grr.lib import rdfvalue\n'), ((31321, 31358), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (31356, 31358), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((31434, 31486), 'grr.client.client_actions.file_finder.ModificationTimeCondition', 'client_file_finder.ModificationTimeCondition', (['params'], {}), '(params)\n', (31478, 31486), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((31823, 31860), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (31858, 31860), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((31877, 31923), 'grr.client.client_actions.file_finder.AccessTimeCondition', 'client_file_finder.AccessTimeCondition', (['params'], {}), '(params)\n', (31915, 31923), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((32178, 32230), 'grr.lib.rdfvalue.RDFDatetime.FromHumanReadable', 'rdfvalue.RDFDatetime.FromHumanReadable', (['"""2156-01-27"""'], {}), "('2156-01-27')\n", (32216, 32230), False, 'from grr.lib import rdfvalue\n'), ((32246, 32298), 'grr.lib.rdfvalue.RDFDatetime.FromHumanReadable', 'rdfvalue.RDFDatetime.FromHumanReadable', (['"""2191-12-05"""'], {}), "('2191-12-05')\n", (32284, 32298), False, 'from grr.lib import rdfvalue\n'), ((32313, 32350), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (32348, 32350), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((32477, 32523), 'grr.client.client_actions.file_finder.AccessTimeCondition', 'client_file_finder.AccessTimeCondition', (['params'], {}), '(params)\n', (32515, 32523), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((32960, 32997), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (32995, 32997), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((33014, 33054), 'grr.client.client_actions.file_finder.SizeCondition', 'client_file_finder.SizeCondition', (['params'], {}), '(params)\n', (33046, 33054), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((33333, 33370), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (33368, 33370), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((33455, 33495), 'grr.client.client_actions.file_finder.SizeCondition', 'client_file_finder.SizeCondition', (['params'], {}), '(params)\n', (33487, 33495), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((34527, 34564), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (34562, 34564), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((34581, 34625), 'grr.client.client_actions.file_finder.ExtFlagsCondition', 'client_file_finder.ExtFlagsCondition', (['params'], {}), '(params)\n', (34617, 34625), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((34726, 34763), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (34761, 34763), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((34851, 34895), 'grr.client.client_actions.file_finder.ExtFlagsCondition', 'client_file_finder.ExtFlagsCondition', (['params'], {}), '(params)\n', (34887, 34895), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((35030, 35067), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (35065, 35067), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((35154, 35198), 'grr.client.client_actions.file_finder.ExtFlagsCondition', 'client_file_finder.ExtFlagsCondition', (['params'], {}), '(params)\n', (35190, 35198), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((35333, 35370), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (35368, 35370), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((35446, 35490), 'grr.client.client_actions.file_finder.ExtFlagsCondition', 'client_file_finder.ExtFlagsCondition', (['params'], {}), '(params)\n', (35482, 35490), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((35596, 35633), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (35631, 35633), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((35707, 35751), 'grr.client.client_actions.file_finder.ExtFlagsCondition', 'client_file_finder.ExtFlagsCondition', (['params'], {}), '(params)\n', (35743, 35751), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((35883, 35920), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (35918, 35920), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((36005, 36049), 'grr.client.client_actions.file_finder.ExtFlagsCondition', 'client_file_finder.ExtFlagsCondition', (['params'], {}), '(params)\n', (36041, 36049), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((36299, 36336), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (36334, 36336), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((36428, 36472), 'grr.client.client_actions.file_finder.ExtFlagsCondition', 'client_file_finder.ExtFlagsCondition', (['params'], {}), '(params)\n', (36464, 36472), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((36605, 36642), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (36640, 36642), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((36732, 36776), 'grr.client.client_actions.file_finder.ExtFlagsCondition', 'client_file_finder.ExtFlagsCondition', (['params'], {}), '(params)\n', (36768, 36776), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((37018, 37055), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (37053, 37055), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((37133, 37177), 'grr.client.client_actions.file_finder.ExtFlagsCondition', 'client_file_finder.ExtFlagsCondition', (['params'], {}), '(params)\n', (37169, 37177), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((37310, 37347), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (37345, 37347), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((37526, 37570), 'grr.client.client_actions.file_finder.ExtFlagsCondition', 'client_file_finder.ExtFlagsCondition', (['params'], {}), '(params)\n', (37562, 37570), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((37812, 37849), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (37847, 37849), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((38035, 38079), 'grr.client.client_actions.file_finder.ExtFlagsCondition', 'client_file_finder.ExtFlagsCondition', (['params'], {}), '(params)\n', (38071, 38079), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((39019, 39056), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (39054, 39056), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((39175, 39223), 'grr.client.client_actions.file_finder.LiteralMatchCondition', 'client_file_finder.LiteralMatchCondition', (['params'], {}), '(params)\n', (39215, 39223), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((39430, 39467), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (39465, 39467), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((39586, 39634), 'grr.client.client_actions.file_finder.LiteralMatchCondition', 'client_file_finder.LiteralMatchCondition', (['params'], {}), '(params)\n', (39626, 39634), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((40115, 40152), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (40150, 40152), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((40272, 40320), 'grr.client.client_actions.file_finder.LiteralMatchCondition', 'client_file_finder.LiteralMatchCondition', (['params'], {}), '(params)\n', (40312, 40320), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((40665, 40702), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (40700, 40702), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((40922, 40970), 'grr.client.client_actions.file_finder.LiteralMatchCondition', 'client_file_finder.LiteralMatchCondition', (['params'], {}), '(params)\n', (40962, 40970), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((41588, 41625), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (41623, 41625), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((41795, 41843), 'grr.client.client_actions.file_finder.LiteralMatchCondition', 'client_file_finder.LiteralMatchCondition', (['params'], {}), '(params)\n', (41835, 41843), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((42387, 42424), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (42422, 42424), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((42539, 42585), 'grr.client.client_actions.file_finder.RegexMatchCondition', 'client_file_finder.RegexMatchCondition', (['params'], {}), '(params)\n', (42577, 42585), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((42800, 42837), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (42835, 42837), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((42951, 42997), 'grr.client.client_actions.file_finder.RegexMatchCondition', 'client_file_finder.RegexMatchCondition', (['params'], {}), '(params)\n', (42989, 42997), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((43620, 43657), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (43655, 43657), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((43774, 43820), 'grr.client.client_actions.file_finder.RegexMatchCondition', 'client_file_finder.RegexMatchCondition', (['params'], {}), '(params)\n', (43812, 43820), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((44173, 44210), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (44208, 44210), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((44421, 44467), 'grr.client.client_actions.file_finder.RegexMatchCondition', 'client_file_finder.RegexMatchCondition', (['params'], {}), '(params)\n', (44459, 44467), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((44961, 44998), 'grr.lib.rdfvalues.file_finder.FileFinderCondition', 'rdf_file_finder.FileFinderCondition', ([], {}), '()\n', (44996, 44998), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((45160, 45206), 'grr.client.client_actions.file_finder.RegexMatchCondition', 'client_file_finder.RegexMatchCondition', (['params'], {}), '(params)\n', (45198, 45206), True, 'from grr.client.client_actions import file_finder as client_file_finder\n'), ((2082, 2108), 'os.listdir', 'os.listdir', (['self.base_path'], {}), '(self.base_path)\n', (2092, 2108), False, 'import os\n'), ((2401, 2426), 'os.listdir', 'os.listdir', (['profiles_path'], {}), '(profiles_path)\n', (2411, 2426), False, 'import os\n'), ((5981, 6020), 'os.path.join', 'os.path.join', (['self.temp_dir', '"""lnk_test"""'], {}), "(self.temp_dir, 'lnk_test')\n", (5993, 6020), False, 'import os\n'), ((6042, 6080), 'os.path.join', 'os.path.join', (['test_dir', '"""contains_lnk"""'], {}), "(test_dir, 'contains_lnk')\n", (6054, 6080), False, 'import os\n'), ((6093, 6126), 'os.path.join', 'os.path.join', (['contains_lnk', '"""lnk"""'], {}), "(contains_lnk, 'lnk')\n", (6105, 6126), False, 'import os\n'), ((6146, 6182), 'os.path.join', 'os.path.join', (['test_dir', '"""lnk_target"""'], {}), "(test_dir, 'lnk_target')\n", (6158, 6182), False, 'import os\n'), ((6211, 6245), 'os.path.join', 'os.path.join', (['lnk_target', '"""target"""'], {}), "(lnk_target, 'target')\n", (6223, 6245), False, 'import os\n'), ((6253, 6271), 'os.mkdir', 'os.mkdir', (['test_dir'], {}), '(test_dir)\n', (6261, 6271), False, 'import os\n'), ((6278, 6300), 'os.mkdir', 'os.mkdir', (['contains_lnk'], {}), '(contains_lnk)\n', (6286, 6300), False, 'import os\n'), ((6307, 6327), 'os.mkdir', 'os.mkdir', (['lnk_target'], {}), '(lnk_target)\n', (6315, 6327), False, 'import os\n'), ((6334, 6361), 'os.symlink', 'os.symlink', (['lnk_target', 'lnk'], {}), '(lnk_target, lnk)\n', (6344, 6361), False, 'import os\n'), ((7374, 7405), 'os.path.join', 'os.path.join', (['searching_path', 'f'], {}), '(searching_path, f)\n', (7386, 7405), False, 'import os\n'), ((7418, 7443), 'os.path.join', 'os.path.join', (['test_dir', 'f'], {}), '(test_dir, f)\n', (7430, 7443), False, 'import os\n'), ((7450, 7471), 'shutil.copy', 'shutil.copy', (['src', 'dst'], {}), '(src, dst)\n', (7461, 7471), False, 'import shutil\n'), ((10315, 10364), 'os.path.join', 'os.path.join', (['self.base_path', '"""new_places.sqlite"""'], {}), "(self.base_path, 'new_places.sqlite')\n", (10327, 10364), False, 'import os\n'), ((13413, 13454), 'os.path.join', 'os.path.join', (['self.base_path', '"""hello.exe"""'], {}), "(self.base_path, 'hello.exe')\n", (13425, 13454), False, 'import os\n'), ((17533, 17560), 'grr.test_lib.test_lib.AutoTempFilePath', 'test_lib.AutoTempFilePath', ([], {}), '()\n', (17558, 17560), False, 'from grr.test_lib import test_lib\n'), ((17446, 17463), 'platform.system', 'platform.system', ([], {}), '()\n', (17461, 17463), False, 'import platform\n'), ((18234, 18261), 'grr.test_lib.test_lib.AutoTempFilePath', 'test_lib.AutoTempFilePath', ([], {}), '()\n', (18259, 18261), False, 'from grr.test_lib import test_lib\n'), ((19099, 19116), 'platform.system', 'platform.system', ([], {}), '()\n', (19114, 19116), False, 'import platform\n'), ((19404, 19442), 'subprocess.call', 'subprocess.call', (["['which', 'setfattr']"], {}), "(['which', 'setfattr'])\n", (19419, 19442), False, 'import subprocess\n'), ((19461, 19517), 'unittest.SkipTest', 'unittest.SkipTest', (['"""`setfattr` command is not available"""'], {}), "('`setfattr` command is not available')\n", (19478, 19517), False, 'import unittest\n'), ((19525, 19589), 'subprocess.call', 'subprocess.call', (["['setfattr', filepath, '-n', name, '-v', value]"], {}), "(['setfattr', filepath, '-n', name, '-v', value])\n", (19540, 19589), False, 'import subprocess\n'), ((19608, 19676), 'unittest.SkipTest', 'unittest.SkipTest', (['"""extended attributes not supported by filesystem"""'], {}), "('extended attributes not supported by filesystem')\n", (19625, 19676), False, 'import unittest\n'), ((19750, 19805), 'subprocess.call', 'subprocess.call', (["['xattr', '-w', name, value, filepath]"], {}), "(['xattr', '-w', name, value, filepath])\n", (19765, 19805), False, 'import subprocess\n'), ((19824, 19878), 'unittest.SkipTest', 'unittest.SkipTest', (['"""extended attributes not supported"""'], {}), "('extended attributes not supported')\n", (19841, 19878), False, 'import unittest\n'), ((20279, 20292), 'os.lstat', 'os.lstat', (['lnk'], {}), '(lnk)\n', (20287, 20292), False, 'import os\n'), ((20319, 20331), 'os.stat', 'os.stat', (['lnk'], {}), '(lnk)\n', (20326, 20331), False, 'import os\n'), ((20505, 20571), 'grr.lib.rdfvalues.file_finder.FileFinderAction.Stat', 'rdf_file_finder.FileFinderAction.Stat', ([], {'resolve_links': 'resolve_links'}), '(resolve_links=resolve_links)\n', (20542, 20571), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((20817, 20851), 'grr.lib.utils.Stubber', 'utils.Stubber', (['os', '"""lstat"""', 'MyStat'], {}), "(os, 'lstat', MyStat)\n", (20830, 20851), False, 'from grr.lib import utils\n'), ((21058, 21110), 'grr.lib.rdfvalue.RDFDatetime.FromHumanReadable', 'rdfvalue.RDFDatetime.FromHumanReadable', (['"""2020-01-01"""'], {}), "('2020-01-01')\n", (21096, 21110), False, 'from grr.lib import rdfvalue\n'), ((22119, 22153), 'grr.lib.utils.Stubber', 'utils.Stubber', (['os', '"""lstat"""', 'MyStat'], {}), "(os, 'lstat', MyStat)\n", (22132, 22153), False, 'from grr.lib import utils\n'), ((22291, 22343), 'grr.lib.rdfvalue.RDFDatetime.FromHumanReadable', 'rdfvalue.RDFDatetime.FromHumanReadable', (['"""2020-01-01"""'], {}), "('2020-01-01')\n", (22329, 22343), False, 'from grr.lib import rdfvalue\n'), ((23336, 23370), 'grr.lib.utils.Stubber', 'utils.Stubber', (['os', '"""lstat"""', 'MyStat'], {}), "(os, 'lstat', MyStat)\n", (23349, 23370), False, 'from grr.lib import utils\n'), ((23634, 23686), 'grr.lib.rdfvalue.RDFDatetime.FromHumanReadable', 'rdfvalue.RDFDatetime.FromHumanReadable', (['"""2020-01-01"""'], {}), "('2020-01-01')\n", (23672, 23686), False, 'from grr.lib import rdfvalue\n'), ((26293, 26345), 'collections.namedtuple', 'collections.namedtuple', (['"""MountPoint"""', "['mountpoint']"], {}), "('MountPoint', ['mountpoint'])\n", (26315, 26345), False, 'import collections\n'), ((26502, 26560), 'grr.lib.utils.Stubber', 'utils.Stubber', (['psutil', '"""disk_partitions"""', 'MyDiskPartitions'], {}), "(psutil, 'disk_partitions', MyDiskPartitions)\n", (26515, 26560), False, 'from grr.lib import utils\n'), ((38192, 38209), 'platform.system', 'platform.system', ([], {}), '()\n', (38207, 38209), False, 'import platform\n'), ((38234, 38269), 'unittest.SkipTest', 'unittest.SkipTest', (['"""requires Linux"""'], {}), "('requires Linux')\n", (38251, 38269), False, 'import unittest\n'), ((38277, 38313), 'subprocess.call', 'subprocess.call', (["['which', 'chattr']"], {}), "(['which', 'chattr'])\n", (38292, 38313), False, 'import subprocess\n'), ((38332, 38390), 'unittest.SkipTest', 'unittest.SkipTest', (['"""the `chattr` command is not available"""'], {}), "('the `chattr` command is not available')\n", (38349, 38390), False, 'import unittest\n'), ((38398, 38455), 'subprocess.call', 'subprocess.call', (["(['chattr'] + args + [self.temp_filepath])"], {}), "(['chattr'] + args + [self.temp_filepath])\n", (38413, 38455), False, 'import subprocess\n'), ((38543, 38568), 'unittest.SkipTest', 'unittest.SkipTest', (['reason'], {}), '(reason)\n', (38560, 38568), False, 'import unittest\n'), ((38605, 38622), 'platform.system', 'platform.system', ([], {}), '()\n', (38620, 38622), False, 'import platform\n'), ((38648, 38683), 'unittest.SkipTest', 'unittest.SkipTest', (['"""requires macOS"""'], {}), "('requires macOS')\n", (38665, 38683), False, 'import unittest\n'), ((1672, 1828), 'grr.lib.rdfvalues.file_finder.FileFinderArgs', 'rdf_file_finder.FileFinderArgs', ([], {'paths': 'paths', 'action': 'action', 'conditions': 'conditions', 'process_non_regular_files': '(True)', 'follow_links': 'follow_links'}), '(paths=paths, action=action, conditions=\n conditions, process_non_regular_files=True, follow_links=follow_links, **kw\n )\n', (1702, 1828), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((2190, 2235), 'os.path.join', 'os.path.join', (['self.base_path', '"""profiles/v1.0"""'], {}), "(self.base_path, 'profiles/v1.0')\n", (2202, 2235), False, 'import os\n'), ((3539, 3568), 'os.path.basename', 'os.path.basename', (['glob_result'], {}), '(glob_result)\n', (3555, 3568), False, 'import os\n'), ((7064, 7087), 'shutil.rmtree', 'shutil.rmtree', (['test_dir'], {}), '(test_dir)\n', (7077, 7087), False, 'import shutil\n'), ((14240, 14331), 'grr.lib.rdfvalues.file_finder.FileFinderHashActionOptions', 'rdf_file_finder.FileFinderHashActionOptions', ([], {'max_size': '(100)', 'oversized_file_policy': '"""SKIP"""'}), "(max_size=100,\n oversized_file_policy='SKIP')\n", (14283, 14331), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((14630, 14731), 'grr.lib.rdfvalues.file_finder.FileFinderHashActionOptions', 'rdf_file_finder.FileFinderHashActionOptions', ([], {'max_size': '(100)', 'oversized_file_policy': '"""HASH_TRUNCATED"""'}), "(max_size=100,\n oversized_file_policy='HASH_TRUNCATED')\n", (14673, 14731), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((15525, 15542), 'grr.lib.rdfvalues.crypto.Hash', 'rdf_crypto.Hash', ([], {}), '()\n', (15540, 15542), True, 'from grr.lib.rdfvalues import crypto as rdf_crypto\n'), ((17588, 17624), 'subprocess.call', 'subprocess.call', (["['which', 'chattr']"], {}), "(['which', 'chattr'])\n", (17603, 17624), False, 'import subprocess\n'), ((17645, 17699), 'unittest.SkipTest', 'unittest.SkipTest', (['"""`chattr` command is not available"""'], {}), "('`chattr` command is not available')\n", (17662, 17699), False, 'import unittest\n'), ((17709, 17757), 'subprocess.call', 'subprocess.call', (["['chattr', '+c', temp_filepath]"], {}), "(['chattr', '+c', temp_filepath])\n", (17724, 17757), False, 'import subprocess\n'), ((17845, 17870), 'unittest.SkipTest', 'unittest.SkipTest', (['reason'], {}), '(reason)\n', (17862, 17870), False, 'import unittest\n'), ((19188, 19205), 'platform.system', 'platform.system', ([], {}), '()\n', (19203, 19205), False, 'import platform\n'), ((19289, 19328), 'unittest.SkipTest', 'unittest.SkipTest', (['"""unsupported system"""'], {}), "('unsupported system')\n", (19306, 19328), False, 'import unittest\n'), ((24993, 25051), 'grr.lib.rdfvalues.file_finder.FileFinderSizeCondition', 'rdf_file_finder.FileFinderSizeCondition', ([], {'min_file_size': '(700)'}), '(min_file_size=700)\n', (25032, 25051), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((25341, 25399), 'grr.lib.rdfvalues.file_finder.FileFinderSizeCondition', 'rdf_file_finder.FileFinderSizeCondition', ([], {'max_file_size': '(700)'}), '(max_file_size=700)\n', (25380, 25399), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((3972, 3991), 'os.path.splitext', 'os.path.splitext', (['r'], {}), '(r)\n', (3988, 3991), False, 'import os\n'), ((4354, 4373), 'os.path.splitext', 'os.path.splitext', (['r'], {}), '(r)\n', (4370, 4373), False, 'import os\n'), ((9051, 9091), 'os.path.join', 'os.path.join', (['searching_path', '"""auth.log"""'], {}), "(searching_path, 'auth.log')\n", (9063, 9091), False, 'import os\n'), ((12228, 12268), 'os.path.join', 'os.path.join', (['searching_path', '"""auth.log"""'], {}), "(searching_path, 'auth.log')\n", (12240, 12268), False, 'import os\n'), ((13859, 13876), 'hashlib.md5', 'hashlib.md5', (['data'], {}), '(data)\n', (13870, 13876), False, 'import hashlib\n'), ((13965, 13983), 'hashlib.sha1', 'hashlib.sha1', (['data'], {}), '(data)\n', (13977, 13983), False, 'import hashlib\n'), ((14074, 14094), 'hashlib.sha256', 'hashlib.sha256', (['data'], {}), '(data)\n', (14088, 14094), False, 'import hashlib\n'), ((15032, 15049), 'hashlib.md5', 'hashlib.md5', (['data'], {}), '(data)\n', (15043, 15049), False, 'import hashlib\n'), ((15138, 15156), 'hashlib.sha1', 'hashlib.sha1', (['data'], {}), '(data)\n', (15150, 15156), False, 'import hashlib\n'), ((15247, 15267), 'hashlib.sha256', 'hashlib.sha256', (['data'], {}), '(data)\n', (15261, 15267), False, 'import hashlib\n'), ((21259, 21351), 'grr.lib.rdfvalues.file_finder.FileFinderModificationTimeCondition', 'rdf_file_finder.FileFinderModificationTimeCondition', ([], {'max_last_modified_time': 'change_time'}), '(max_last_modified_time=\n change_time)\n', (21310, 21351), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((21758, 21850), 'grr.lib.rdfvalues.file_finder.FileFinderModificationTimeCondition', 'rdf_file_finder.FileFinderModificationTimeCondition', ([], {'min_last_modified_time': 'change_time'}), '(min_last_modified_time=\n change_time)\n', (21809, 21850), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((22517, 22596), 'grr.lib.rdfvalues.file_finder.FileFinderAccessTimeCondition', 'rdf_file_finder.FileFinderAccessTimeCondition', ([], {'max_last_access_time': 'change_time'}), '(max_last_access_time=change_time)\n', (22562, 22596), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((22984, 23063), 'grr.lib.rdfvalues.file_finder.FileFinderAccessTimeCondition', 'rdf_file_finder.FileFinderAccessTimeCondition', ([], {'min_last_access_time': 'change_time'}), '(min_last_access_time=change_time)\n', (23029, 23063), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((23830, 23925), 'grr.lib.rdfvalues.file_finder.FileFinderInodeChangeTimeCondition', 'rdf_file_finder.FileFinderInodeChangeTimeCondition', ([], {'min_last_inode_change_time': 'change_time'}), '(min_last_inode_change_time\n =change_time)\n', (23880, 23925), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n'), ((24314, 24409), 'grr.lib.rdfvalues.file_finder.FileFinderInodeChangeTimeCondition', 'rdf_file_finder.FileFinderInodeChangeTimeCondition', ([], {'max_last_inode_change_time': 'change_time'}), '(max_last_inode_change_time\n =change_time)\n', (24364, 24409), True, 'from grr.lib.rdfvalues import file_finder as rdf_file_finder\n')] |
import socket
host = 'localhost'
# we need to define encode function for converting string to bytes string
# this will be use for sending/receiving data via socket
encode = lambda text: text.encode()
# we need to define deocde function for converting bytes string to string
# this will convert bytes string sent/recieved via socket to string
decode = lambda byte_text: byte_text.decode()
def echo_client(port, message="Hello"):
# create a TCP socket
sock = socket.socket()
server_address = (host, port)
# connect to server
print("Connecting to server ")
sock.connect(server_address)
# send data
try:
# send message
print("Sending data: {}".format(message))
# sendall need bytes string ,so we need to use encode to convert plain
# string to bytes string
sock.sendall(encode(message))
# Look for response
amount_received = 0
amount_expected = len(message)
while amount_received < amount_expected:
data = sock.recv(16)
amount_received += len(data)
print("Recieved from server: {}".format(decode(data)))
except socket.error as e:
print("socket error: {}".format(e))
except Exception as e:
print("other exception: {}".format(e))
finally:
print("Closing connection to server")
sock.close()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Simple TCP echo client')
parser.add_argument("--port", action="store",
dest="port", type=int, required=True)
parser.add_argument("--message", action="store",
dest="message", required=False)
get_args = parser.parse_args()
port = get_args.port
message = get_args.message
if message:
echo_client(port, message)
else:
echo_client(port)
| [
"argparse.ArgumentParser",
"socket.socket"
] | [((472, 487), 'socket.socket', 'socket.socket', ([], {}), '()\n', (485, 487), False, 'import socket\n'), ((1439, 1500), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Simple TCP echo client"""'}), "(description='Simple TCP echo client')\n", (1462, 1500), False, 'import argparse\n')] |
from typing import Optional
from my_collection.paxos.common import NodeId, Router, ProposalId, Value, PrepareRequest, is_majority, PrepareResponse, \
Proposal, ProposeRequest, ProposeResponse, CODE_OK
class Proposer:
node_id: NodeId
acceptor_id_list: list[NodeId]
router: Router
current_proposal_id: ProposalId # init {0, node_id}
def __init__(self, node_id: NodeId, acceptor_id_list: list[NodeId], router: Router):
self.node_id = node_id
self.acceptor_id_list = acceptor_id_list
self.router = router
self.current_proposal_id = ProposalId(id=0, node_id=node_id)
async def propose_once(self, value: Value) -> Optional[Value]:
proposal_id = self.current_proposal_id
self.current_proposal_id.id += 1
request = PrepareRequest(proposal_id=proposal_id)
response_list: list[PrepareResponse] = [
await self.router(acceptor_id, request, PrepareResponse.parse_obj)
for acceptor_id in self.acceptor_id_list
]
response_list: list[PrepareResponse] = [
response
for response in response_list
if response is not None and response.code == CODE_OK
]
if not is_majority(len(self.acceptor_id_list), len(response_list)):
return None
accepted_proposal_list = [
response.proposal
for response in response_list
if response.proposal is not None
]
if len(accepted_proposal_list) > 0:
proposal = max(accepted_proposal_list, key=lambda x: x.id)
else:
proposal = Proposal(id=proposal_id, value=value)
request = ProposeRequest(proposal=proposal)
response_list: list[ProposeResponse] = [
await self.router(acceptor_id, request, ProposeResponse.parse_obj)
for acceptor_id in self.acceptor_id_list
]
response_list: list[ProposeResponse] = [
response
for response in response_list
if response is not None and response.code == CODE_OK
]
if not is_majority(len(self.acceptor_id_list), len(response_list)):
return None
return response_list[0].proposal.value
| [
"my_collection.paxos.common.Proposal",
"my_collection.paxos.common.ProposeRequest",
"my_collection.paxos.common.ProposalId",
"my_collection.paxos.common.PrepareRequest"
] | [((589, 622), 'my_collection.paxos.common.ProposalId', 'ProposalId', ([], {'id': '(0)', 'node_id': 'node_id'}), '(id=0, node_id=node_id)\n', (599, 622), False, 'from my_collection.paxos.common import NodeId, Router, ProposalId, Value, PrepareRequest, is_majority, PrepareResponse, Proposal, ProposeRequest, ProposeResponse, CODE_OK\n'), ((798, 837), 'my_collection.paxos.common.PrepareRequest', 'PrepareRequest', ([], {'proposal_id': 'proposal_id'}), '(proposal_id=proposal_id)\n', (812, 837), False, 'from my_collection.paxos.common import NodeId, Router, ProposalId, Value, PrepareRequest, is_majority, PrepareResponse, Proposal, ProposeRequest, ProposeResponse, CODE_OK\n'), ((1687, 1720), 'my_collection.paxos.common.ProposeRequest', 'ProposeRequest', ([], {'proposal': 'proposal'}), '(proposal=proposal)\n', (1701, 1720), False, 'from my_collection.paxos.common import NodeId, Router, ProposalId, Value, PrepareRequest, is_majority, PrepareResponse, Proposal, ProposeRequest, ProposeResponse, CODE_OK\n'), ((1630, 1667), 'my_collection.paxos.common.Proposal', 'Proposal', ([], {'id': 'proposal_id', 'value': 'value'}), '(id=proposal_id, value=value)\n', (1638, 1667), False, 'from my_collection.paxos.common import NodeId, Router, ProposalId, Value, PrepareRequest, is_majority, PrepareResponse, Proposal, ProposeRequest, ProposeResponse, CODE_OK\n')] |
import math
class Point (object):
# constructor
def __init__ (self, x = 0, y = 0):
self.x = x
self.y = y
# get the distance to another Point object
def dist (self, other):
return math.hypot (self.x - other.x, self.y - other.y)
# string representation of a Point
def __str__ (self):
return '(' + str(self.x) + ', ' + str(self.y) + ')'
# test for equality of two Point objects
def __eq__ (self, other):
tol = 1.0e-16
return ((abs(self.x - other.x) < tol) and (abs(self.y - other.y) < tol))
def getPoints():
myFile = open("points.txt", "r")
points = []
for line in myFile:
line = line.strip()
x = int(line.split("\t")[0])
y = int(line.split("\t")[1])
z = x * y
print(z)
point = Point(x, y)
points.append(point)
#print(point)
return(points)
def getShortestDistance(points):
shortestDistance = -1
for i in range(len(points) - 1):
for j in range(i + 1, len(points)):
if(shortestDistance == -1):
shortestDistance = points[i].dist(points[j])
shortestDistance = min(shortestDistance, points[i].dist(points))
print(shortestDistance)
return(shortestDistance)
def main():
# create an empty list of Point objects
points = getPoints()
shortestDistance = getShortestDistance(points)
main()
| [
"math.hypot"
] | [((201, 247), 'math.hypot', 'math.hypot', (['(self.x - other.x)', '(self.y - other.y)'], {}), '(self.x - other.x, self.y - other.y)\n', (211, 247), False, 'import math\n')] |
import pandas as pd
exa = pd.read_csv('en_dup.csv')
exa.loc[exa['label'] =='F', 'label']= 0
exa.loc[exa['label'] =='T', 'label']= 1
exa.loc[exa['label'] =='U', 'label']= 2
#不读取label2, 只读取0,1标签
exa0 = exa.loc[exa["label"] == 0]
exa1 = exa.loc[exa["label"] == 1]
exa = [exa0, exa1]
exa = pd.concat(exa)
exa.to_csv('train.csv', index=0)
| [
"pandas.concat",
"pandas.read_csv"
] | [((28, 53), 'pandas.read_csv', 'pd.read_csv', (['"""en_dup.csv"""'], {}), "('en_dup.csv')\n", (39, 53), True, 'import pandas as pd\n'), ((292, 306), 'pandas.concat', 'pd.concat', (['exa'], {}), '(exa)\n', (301, 306), True, 'import pandas as pd\n')] |
from restapi.connectors import Connector
from restapi.env import Env
from restapi.services.authentication import BaseAuthentication, Role
from restapi.tests import API_URI, BaseTests, FlaskClient
from restapi.utilities.logs import log
class TestApp(BaseTests):
def test_no_auth(self, client: FlaskClient) -> None:
r = client.get(f"{API_URI}/tests/noauth")
assert r.status_code == 200
assert self.get_content(r) == "OK"
if Env.get_bool("AUTH_ENABLE"):
headers, _ = self.do_login(client, None, None)
# Tokens are ignored
r = client.get(f"{API_URI}/tests/noauth", headers=headers)
assert r.status_code == 200
assert self.get_content(r) == "OK"
# Tokens are ignored even if invalid
r = client.get(
f"{API_URI}/tests/noauth", headers={"Authorization": "Bearer invalid"}
)
assert r.status_code == 200
assert self.get_content(r) == "OK"
def test_auth(self, client: FlaskClient) -> None:
if not Env.get_bool("AUTH_ENABLE"):
log.warning("Skipping authentication tests")
return
r = client.get(f"{API_URI}/tests/authentication")
assert r.status_code == 401
r = client.get(
f"{API_URI}/tests/authentication",
headers={"Authorization": "Bearer invalid"},
)
assert r.status_code == 401
headers, token = self.do_login(client, None, None)
r = client.get(f"{API_URI}/tests/authentication", headers=headers)
assert r.status_code == 200
content = self.get_content(r)
assert isinstance(content, dict)
assert len(content) == 1
assert "email" in content
assert content["email"] == BaseAuthentication.default_user
if not Env.get_bool("ALLOW_ACCESS_TOKEN_PARAMETER"):
# access token parameter is not allowed by default
r = client.get(
f"{API_URI}/tests/authentication", query_string={"access_token": token}
)
assert r.status_code == 401
def test_optional_auth(self, client: FlaskClient) -> None:
if not Env.get_bool("AUTH_ENABLE"):
log.warning("Skipping authentication tests")
return
# Optional authentication can accept missing tokens
r = client.get(f"{API_URI}/tests/optionalauthentication")
assert r.status_code == 204
headers, token = self.do_login(client, None, None)
# Or valid tokens
r = client.get(f"{API_URI}/tests/optionalauthentication", headers=headers)
assert r.status_code == 200
content = self.get_content(r)
assert isinstance(content, dict)
assert len(content) == 1
assert "email" in content
assert content["email"] == BaseAuthentication.default_user
# But not invalid tokens, i.e. if presented the tokens is always validated
r = client.get(
f"{API_URI}/tests/authentication",
headers={"Authorization": "Bearer invalid"},
)
assert r.status_code == 401
if not Env.get_bool("ALLOW_ACCESS_TOKEN_PARAMETER"):
# access token parameter is not allowed by default
r = client.get(
f"{API_URI}/tests/optionalauthentication",
query_string={"access_token": token},
)
# query token is ignored but the endpoint accepts missing tokens
assert r.status_code == 204
r = client.get(
f"{API_URI}/tests/optionalauthentication",
query_string={"access_token": "invalid"},
)
# invalid tokens should be rejected, but query token is ignored
assert r.status_code == 204
def test_access_token_parameter(self, client: FlaskClient) -> None:
if not Env.get_bool("AUTH_ENABLE"):
log.warning("Skipping authentication tests")
return
r = client.get(f"{API_URI}/tests/queryauthentication")
assert r.status_code == 401
r = client.get(
f"{API_URI}/tests/queryauthentication",
headers={"Authorization": "Bearer invalid"},
)
assert r.status_code == 401
headers, token = self.do_login(client, None, None)
r = client.get(f"{API_URI}/tests/queryauthentication", headers=headers)
assert r.status_code == 200
content = self.get_content(r)
assert isinstance(content, dict)
assert len(content) == 1
assert "email" in content
assert content["email"] == BaseAuthentication.default_user
r = client.get(
f"{API_URI}/tests/queryauthentication", query_string={"access_token": token}
)
assert r.status_code == 200
content = self.get_content(r)
assert isinstance(content, dict)
assert len(content) == 1
assert "email" in content
assert content["email"] == BaseAuthentication.default_user
r = client.get(
f"{API_URI}/tests/queryauthentication",
query_string={"access_token": "invalid"},
)
assert r.status_code == 401
def test_optional_access_token_parameter(self, client: FlaskClient) -> None:
if not Env.get_bool("AUTH_ENABLE"):
log.warning("Skipping authentication tests")
return
# Optional authentication can accept missing tokens
r = client.get(f"{API_URI}/tests/optionalqueryauthentication")
assert r.status_code == 204
headers, token = self.do_login(client, None, None)
# Or valid tokens
r = client.get(f"{API_URI}/tests/optionalqueryauthentication", headers=headers)
assert r.status_code == 200
content = self.get_content(r)
assert isinstance(content, dict)
assert len(content) == 1
assert "email" in content
assert content["email"] == BaseAuthentication.default_user
# But not invalid tokens, i.e. if presented the tokens is always validated
r = client.get(
f"{API_URI}/tests/optionalqueryauthentication",
headers={"Authorization": "Bearer invalid"},
)
assert r.status_code == 401
r = client.get(
f"{API_URI}/tests/optionalqueryauthentication",
query_string={"access_token": token},
)
assert r.status_code == 200
content = self.get_content(r)
assert isinstance(content, dict)
assert len(content) == 1
assert "email" in content
assert content["email"] == BaseAuthentication.default_user
r = client.get(
f"{API_URI}/tests/optionalqueryauthentication",
query_string={"access_token": "invalid"},
)
# invalid tokens should be rejected, but query token is ignored
assert r.status_code == 401
def test_authentication_with_multiple_roles(self, client: FlaskClient) -> None:
if not Env.get_bool("AUTH_ENABLE"):
log.warning("Skipping authentication tests")
return
r = client.get(f"{API_URI}/tests/manyrolesauthentication")
assert r.status_code == 401
r = client.get(f"{API_URI}/tests/unknownroleauthentication")
assert r.status_code == 401
admin_headers, _ = self.do_login(client, None, None)
r = client.get(
f"{API_URI}/tests/manyrolesauthentication", headers=admin_headers
)
assert r.status_code == 200
content = self.get_content(r)
assert isinstance(content, dict)
assert len(content) == 1
assert "email" in content
assert content["email"] == BaseAuthentication.default_user
r = client.get(
f"{API_URI}/tests/unknownroleauthentication", headers=admin_headers
)
assert r.status_code == 401
if Env.get_bool("MAIN_LOGIN_ENABLE"):
uuid, data = self.create_user(client, roles=[Role.USER])
user_header, _ = self.do_login(
client, data.get("email"), data.get("password")
)
r = client.get(
f"{API_URI}/tests/manyrolesauthentication", headers=user_header
)
assert r.status_code == 200
content = self.get_content(r)
assert isinstance(content, dict)
assert len(content) == 1
assert "email" in content
assert content["email"] == data.get("email")
r = client.get(
f"{API_URI}/tests/unknownroleauthentication", headers=user_header
)
assert r.status_code == 401
self.delete_user(client, uuid)
def test_authentication_with_auth_callback(self, client: FlaskClient) -> None:
if not Env.get_bool("AUTH_ENABLE"):
log.warning("Skipping authentication tests")
return
auth = Connector.get_authentication_instance()
user = auth.get_user(username=BaseAuthentication.default_user)
assert user is not None
VALID = f"/tests/preloadcallback/{user.uuid}"
INVALID = "/tests/preloadcallback/12345678-90ab-cdef-1234-567890abcdef"
admin_headers, _ = self.do_login(client, None, None)
# Verify both endpoint ...
r = client.get(
f"{API_URI}{VALID}", query_string={"test": True}, headers=admin_headers
)
assert r.status_code == 200
content = self.get_content(r)
assert isinstance(content, dict)
assert len(content) == 1
assert "email" in content
assert content["email"] == user.email
r = client.get(
f"{API_URI}{INVALID}", query_string={"test": True}, headers=admin_headers
)
assert r.status_code == 401
# and get_schema!
r = client.get(
f"{API_URI}{VALID}",
query_string={"get_schema": True},
headers=admin_headers,
)
assert r.status_code == 200
content = self.get_content(r)
assert isinstance(content, list)
assert len(content) == 1
assert content[0]["key"] == "test"
assert content[0]["type"] == "boolean"
r = client.get(
f"{API_URI}{INVALID}",
query_string={"get_schema": True},
headers=admin_headers,
)
assert r.status_code == 401
| [
"restapi.connectors.Connector.get_authentication_instance",
"restapi.env.Env.get_bool",
"restapi.utilities.logs.log.warning"
] | [((462, 489), 'restapi.env.Env.get_bool', 'Env.get_bool', (['"""AUTH_ENABLE"""'], {}), "('AUTH_ENABLE')\n", (474, 489), False, 'from restapi.env import Env\n'), ((7942, 7975), 'restapi.env.Env.get_bool', 'Env.get_bool', (['"""MAIN_LOGIN_ENABLE"""'], {}), "('MAIN_LOGIN_ENABLE')\n", (7954, 7975), False, 'from restapi.env import Env\n'), ((8980, 9019), 'restapi.connectors.Connector.get_authentication_instance', 'Connector.get_authentication_instance', ([], {}), '()\n', (9017, 9019), False, 'from restapi.connectors import Connector\n'), ((1055, 1082), 'restapi.env.Env.get_bool', 'Env.get_bool', (['"""AUTH_ENABLE"""'], {}), "('AUTH_ENABLE')\n", (1067, 1082), False, 'from restapi.env import Env\n'), ((1096, 1140), 'restapi.utilities.logs.log.warning', 'log.warning', (['"""Skipping authentication tests"""'], {}), "('Skipping authentication tests')\n", (1107, 1140), False, 'from restapi.utilities.logs import log\n'), ((1831, 1875), 'restapi.env.Env.get_bool', 'Env.get_bool', (['"""ALLOW_ACCESS_TOKEN_PARAMETER"""'], {}), "('ALLOW_ACCESS_TOKEN_PARAMETER')\n", (1843, 1875), False, 'from restapi.env import Env\n'), ((2190, 2217), 'restapi.env.Env.get_bool', 'Env.get_bool', (['"""AUTH_ENABLE"""'], {}), "('AUTH_ENABLE')\n", (2202, 2217), False, 'from restapi.env import Env\n'), ((2231, 2275), 'restapi.utilities.logs.log.warning', 'log.warning', (['"""Skipping authentication tests"""'], {}), "('Skipping authentication tests')\n", (2242, 2275), False, 'from restapi.utilities.logs import log\n'), ((3151, 3195), 'restapi.env.Env.get_bool', 'Env.get_bool', (['"""ALLOW_ACCESS_TOKEN_PARAMETER"""'], {}), "('ALLOW_ACCESS_TOKEN_PARAMETER')\n", (3163, 3195), False, 'from restapi.env import Env\n'), ((3897, 3924), 'restapi.env.Env.get_bool', 'Env.get_bool', (['"""AUTH_ENABLE"""'], {}), "('AUTH_ENABLE')\n", (3909, 3924), False, 'from restapi.env import Env\n'), ((3938, 3982), 'restapi.utilities.logs.log.warning', 'log.warning', (['"""Skipping authentication tests"""'], {}), "('Skipping authentication tests')\n", (3949, 3982), False, 'from restapi.utilities.logs import log\n'), ((5320, 5347), 'restapi.env.Env.get_bool', 'Env.get_bool', (['"""AUTH_ENABLE"""'], {}), "('AUTH_ENABLE')\n", (5332, 5347), False, 'from restapi.env import Env\n'), ((5361, 5405), 'restapi.utilities.logs.log.warning', 'log.warning', (['"""Skipping authentication tests"""'], {}), "('Skipping authentication tests')\n", (5372, 5405), False, 'from restapi.utilities.logs import log\n'), ((7040, 7067), 'restapi.env.Env.get_bool', 'Env.get_bool', (['"""AUTH_ENABLE"""'], {}), "('AUTH_ENABLE')\n", (7052, 7067), False, 'from restapi.env import Env\n'), ((7081, 7125), 'restapi.utilities.logs.log.warning', 'log.warning', (['"""Skipping authentication tests"""'], {}), "('Skipping authentication tests')\n", (7092, 7125), False, 'from restapi.utilities.logs import log\n'), ((8859, 8886), 'restapi.env.Env.get_bool', 'Env.get_bool', (['"""AUTH_ENABLE"""'], {}), "('AUTH_ENABLE')\n", (8871, 8886), False, 'from restapi.env import Env\n'), ((8900, 8944), 'restapi.utilities.logs.log.warning', 'log.warning', (['"""Skipping authentication tests"""'], {}), "('Skipping authentication tests')\n", (8911, 8944), False, 'from restapi.utilities.logs import log\n')] |
# -*- coding:utf-8 -*-
from django.contrib import admin
from .models import UserProfile
# Register your models here.
class UserProfileModelAdmin(admin.ModelAdmin):
"""
用户管理Model
"""
list_display = ('id', 'username', 'nike_name', 'mobile',
'email', 'is_active')
list_filter = ('is_active',)
list_display_links = ('id', 'username')
search_fields = ('username', 'email', 'mobile', 'nike_name')
admin.site.register(UserProfile, UserProfileModelAdmin)
| [
"django.contrib.admin.site.register"
] | [((443, 498), 'django.contrib.admin.site.register', 'admin.site.register', (['UserProfile', 'UserProfileModelAdmin'], {}), '(UserProfile, UserProfileModelAdmin)\n', (462, 498), False, 'from django.contrib import admin\n')] |
'''
axicli.py - Command line interface (CLI) for AxiDraw.
For quick help:
python axicli.py --help
Full user guide:
https://axidraw.com/doc/cli_api/
This script is a stand-alone version of AxiDraw Control, accepting
various options and providing a facility for setting default values.
'''
from axicli.axidraw_cli import axidraw_CLI
if __name__ == '__main__':
axidraw_CLI()
| [
"axicli.axidraw_cli.axidraw_CLI"
] | [((382, 395), 'axicli.axidraw_cli.axidraw_CLI', 'axidraw_CLI', ([], {}), '()\n', (393, 395), False, 'from axicli.axidraw_cli import axidraw_CLI\n')] |
# MENTOL
# At:Sun Nov 24 15:04:31 2019
if len(bytecode) == 0:
print('\x1b[1;93mbyte code kosong\nharap masukkan bytecodenya\x1b[0m')
exit()
import marshal, sys, os, random, string, time
try:
from uncompyle6.main import decompile
except:
os.system('pip install uncompyle6')
from uncompyle6.main import decompile
def echo(text):
w = 'mhkbucp'
for z in w:
text = text.replace('+%s' % z, '\x1b[%d;1m' % (91 + w.index(z)))
text += '\x1b[0m'
text = text.replace('+0', '\x1b[0m')
print(text)
def run(text):
try:
w = 'mhkbucp'
for z in w:
text = text.replace('+%s' % z, '\x1b[%d;1m' % (91 + w.index(z)))
text += '\x1b[0m'
text = text.replace('+0', '\x1b[0m')
for i in text + '\n':
sys.stdout.write(i)
sys.stdout.flush()
time.sleep(0.01)
except (KeyboardInterrupt, EOFError):
exit('')
n = ''.join((random.choice(string.ascii_lowercase) for _ in range(4)))
fl = n + '-dec.py'
logo = '\n+m 888 +h,8,"88b,\n+m e88 888 ,e e, e88\'888 e88 88e 888 888 8e +p888 88e Y8b Y888P +h " ,88P\'\n+md888 888 d88 88b d888 \'8 d888 888b 888 888 88b +p888 888b Y8b Y8P +h C8K\n+mY888 888 888 , Y888 , Y888 888P 888 888 888 +p888 888P Y8b Y +h e `88b,\n+m "88 888 "YeeP" "88,e8\' "88 88" 888 888 888 +p888 88" 888 +h"8",88P\'\n +p888 888\n +p888 888+p\n\t\t+ccoded by: +pZhu <NAME> AKA AnonyMass\n\t\t+cteam : +pBlack Coder Crush'
def decom():
try:
os.system('clear')
echo(logo)
x = decompile(3.7, marshal.loads(bytecode), open(fl, 'w'))
run('\t\t\t+hdecompile sukses :)+p')
run('\t\t\t+hfile disimpan: +p' + fl)
exit()
except Exception as e:
try:
os.system('clear')
echo(logo)
echo('+mdecompile gagal+p')
exit()
finally:
e = None
del e
decom()
| [
"random.choice",
"time.sleep",
"marshal.loads",
"os.system",
"sys.stdout.flush",
"sys.stdout.write"
] | [((256, 291), 'os.system', 'os.system', (['"""pip install uncompyle6"""'], {}), "('pip install uncompyle6')\n", (265, 291), False, 'import marshal, sys, os, random, string, time\n'), ((952, 989), 'random.choice', 'random.choice', (['string.ascii_lowercase'], {}), '(string.ascii_lowercase)\n', (965, 989), False, 'import marshal, sys, os, random, string, time\n'), ((1703, 1721), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (1712, 1721), False, 'import marshal, sys, os, random, string, time\n'), ((797, 816), 'sys.stdout.write', 'sys.stdout.write', (['i'], {}), '(i)\n', (813, 816), False, 'import marshal, sys, os, random, string, time\n'), ((829, 847), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (845, 847), False, 'import marshal, sys, os, random, string, time\n'), ((860, 876), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (870, 876), False, 'import marshal, sys, os, random, string, time\n'), ((1768, 1791), 'marshal.loads', 'marshal.loads', (['bytecode'], {}), '(bytecode)\n', (1781, 1791), False, 'import marshal, sys, os, random, string, time\n'), ((1966, 1984), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (1975, 1984), False, 'import marshal, sys, os, random, string, time\n')] |
from builtins import str
from builtins import range
from builtins import object
import os
import fixtures
import testtools
from vn_test import VNFixture
from vm_test import VMFixture
from common.connections import ContrailConnections
from policy_test import PolicyFixture
from policy.config import AttachPolicyFixture
from time import sleep
from tcutils.commands import ssh, execute_cmd, execute_cmd_out
class ConfigPerformance(object):
def config_vm(self, vn_fix, vm_name, node_name=None, image_name='ubuntu-netperf', flavor='contrail_flavor_large'):
vm_fixture = self.useFixture(VMFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=vn_fix.obj, vm_name=vm_name, node_name=node_name, image_name=image_name, flavor=flavor))
return vm_fixture
def set_cpu_performance(self, hosts):
sessions = {}
cmd = 'for f in /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor ; do echo performance > $f; cat $f; done'
for i in range(0, 2):
session = ssh(hosts[i]['host_ip'], hosts[i]['username'], hosts[i]['password'])
execute_cmd(session, cmd, self.logger)
return
def start_tcp_dump(self, vm_fixture):
sessions =[]
vm_name = vm_fixture.vm_name
host = self.inputs.host_data[vm_fixture.vm_node_ip]
inspect_h = self.agent_inspect[vm_fixture.vm_node_ip]
tapintf = inspect_h.get_vna_tap_interface_by_ip(vm_fixture.vm_ip)[0]['name']
pcap = '/tmp/%s.pcap' % tapintf
cmd = "sudo tcpdump -ni %s udp -w %s" % (tapintf, pcap)
session = ssh(host['host_ip'], host['username'], host['password'])
self.logger.info("Staring tcpdump to capture the packets.")
execute_cmd(session, cmd, self.logger)
sessions.extend((session, pcap))
return sessions
def stop_tcp_dump(self, sessions):
self.logger.info("Waiting for the tcpdump write to complete.")
sleep(30)
cmd = 'sudo kill $(pidof tcpdump)'
execute_cmd(sessions[0], cmd, self.logger)
execute_cmd(sessions[0], 'sync', self.logger)
cmd = 'sudo tcpdump -r %s | wc -l' % sessions[1]
out, err = execute_cmd_out(sessions[0], cmd, self.logger)
count = int(out.strip('\n'))
#cmd = 'rm -f %s' % sessions[1]
#execute_cmd(sessions[0], cmd, self.logger)
return count
def changeEncap_setting(self, encap1='MPLSoUDP', encap2='MPLSoGRE', encap3='VXLAN'):
self.logger.info('Deleting any Encap before continuing')
out=self.connections.delete_vrouter_encap()
if ( out!='No config id found'):
self.addCleanup(self.connections.set_vrouter_config_encap,out[0],out[1],out[2])
self.logger.info('Setting new Encap before continuing')
config_id=self.connections.set_vrouter_config_encap(encap1, encap2, encap3)
self.logger.info('Created.UUID is %s'%(config_id))
self.addCleanup(self.connections.delete_vrouter_encap)
encap_list_to_be_configured = [str(encap1),str(encap2),str(encap3)]
encap_list_configured=self.connections.read_vrouter_config_encap()
if encap_list_to_be_configured != encap_list_configured:
self.logger.error( "Configured Encap Priority order is NOT matching with expected order. Configured: %s,\
Expected: %s" %(encap_list_configured, encap_list_to_be_configured))
assert False
else:
self.logger.info( "Configured Encap Priority order is matching with expected order. Configured: %s,\
Expected: %s" %(encap_list_configured,encap_list_to_be_configured))
return
| [
"vm_test.VMFixture",
"tcutils.commands.ssh",
"tcutils.commands.execute_cmd_out",
"time.sleep",
"builtins.str",
"builtins.range",
"tcutils.commands.execute_cmd"
] | [((1042, 1053), 'builtins.range', 'range', (['(0)', '(2)'], {}), '(0, 2)\n', (1047, 1053), False, 'from builtins import range\n'), ((1642, 1698), 'tcutils.commands.ssh', 'ssh', (["host['host_ip']", "host['username']", "host['password']"], {}), "(host['host_ip'], host['username'], host['password'])\n", (1645, 1698), False, 'from tcutils.commands import ssh, execute_cmd, execute_cmd_out\n'), ((1775, 1813), 'tcutils.commands.execute_cmd', 'execute_cmd', (['session', 'cmd', 'self.logger'], {}), '(session, cmd, self.logger)\n', (1786, 1813), False, 'from tcutils.commands import ssh, execute_cmd, execute_cmd_out\n'), ((1998, 2007), 'time.sleep', 'sleep', (['(30)'], {}), '(30)\n', (2003, 2007), False, 'from time import sleep\n'), ((2059, 2101), 'tcutils.commands.execute_cmd', 'execute_cmd', (['sessions[0]', 'cmd', 'self.logger'], {}), '(sessions[0], cmd, self.logger)\n', (2070, 2101), False, 'from tcutils.commands import ssh, execute_cmd, execute_cmd_out\n'), ((2110, 2155), 'tcutils.commands.execute_cmd', 'execute_cmd', (['sessions[0]', '"""sync"""', 'self.logger'], {}), "(sessions[0], 'sync', self.logger)\n", (2121, 2155), False, 'from tcutils.commands import ssh, execute_cmd, execute_cmd_out\n'), ((2232, 2278), 'tcutils.commands.execute_cmd_out', 'execute_cmd_out', (['sessions[0]', 'cmd', 'self.logger'], {}), '(sessions[0], cmd, self.logger)\n', (2247, 2278), False, 'from tcutils.commands import ssh, execute_cmd, execute_cmd_out\n'), ((596, 778), 'vm_test.VMFixture', 'VMFixture', ([], {'project_name': 'self.inputs.project_name', 'connections': 'self.connections', 'vn_obj': 'vn_fix.obj', 'vm_name': 'vm_name', 'node_name': 'node_name', 'image_name': 'image_name', 'flavor': 'flavor'}), '(project_name=self.inputs.project_name, connections=self.\n connections, vn_obj=vn_fix.obj, vm_name=vm_name, node_name=node_name,\n image_name=image_name, flavor=flavor)\n', (605, 778), False, 'from vm_test import VMFixture\n'), ((1077, 1145), 'tcutils.commands.ssh', 'ssh', (["hosts[i]['host_ip']", "hosts[i]['username']", "hosts[i]['password']"], {}), "(hosts[i]['host_ip'], hosts[i]['username'], hosts[i]['password'])\n", (1080, 1145), False, 'from tcutils.commands import ssh, execute_cmd, execute_cmd_out\n'), ((1158, 1196), 'tcutils.commands.execute_cmd', 'execute_cmd', (['session', 'cmd', 'self.logger'], {}), '(session, cmd, self.logger)\n', (1169, 1196), False, 'from tcutils.commands import ssh, execute_cmd, execute_cmd_out\n'), ((3100, 3111), 'builtins.str', 'str', (['encap1'], {}), '(encap1)\n', (3103, 3111), False, 'from builtins import str\n'), ((3112, 3123), 'builtins.str', 'str', (['encap2'], {}), '(encap2)\n', (3115, 3123), False, 'from builtins import str\n'), ((3124, 3135), 'builtins.str', 'str', (['encap3'], {}), '(encap3)\n', (3127, 3135), False, 'from builtins import str\n')] |
from data import NumericalField, CategoricalField, Iterator
from data import Dataset
from synthesizer import MaskGenerator_MLP, ObservedGenerator_MLP, Discriminator, Handler, ObservedGenerator_LSTM
from random import choice
import multiprocessing
import pandas as pd
import numpy as np
import torch
import argparse
import json
import os
parameters_space = {
"batch_size":[64, 128, 256],
"z_dim":[100, 200, 300],
"gen_num_layers":[1,2,3],
"gen_hidden_dim":[100, 200, 300, 400],
"gen_feature_dim":[100, 200, 300, 400, 500],
"gen_lstm_dim":[100,200,300,400,500],
"dis_hidden_dim":[100, 200, 300],
"dis_num_layers":[1,2,3],
"lr":[0.0001,0.0002,0.0005],
"cp":[0.01],
"dis_train_num" :[1, 2, 5]
}
def parameter_search(gen_model):
param = {}
param["batch_size"] = choice(parameters_space["batch_size"])
param["z_dim"] = choice(parameters_space["z_dim"])
param["mask_gen_hidden_dims"] = []
gen_num_layers = choice(parameters_space["gen_num_layers"])
for l in range(gen_num_layers):
dim = choice(parameters_space["gen_hidden_dim"])
if l > 0 and param["mask_gen_hidden_dims"][l-1] > dim:
dim = param["mask_gen_hidden_dims"][l-1]
param["mask_gen_hidden_dims"].append(dim)
if gen_model == "MLP":
param["obs_gen_hidden_dims"] = []
gen_num_layers = choice(parameters_space["gen_num_layers"])
for l in range(gen_num_layers):
dim = choice(parameters_space["gen_hidden_dim"])
if l > 0 and param["obs_gen_hidden_dims"][l-1] > dim:
dim = param["obs_gen_hidden_dims"][l-1]
param["obs_gen_hidden_dims"].append(dim)
elif gen_model == "LSTM":
param["obs_gen_feature_dim"] = choice(parameters_space["gen_feature_dim"])
param["obs_gen_lstm_dim"] = choice(parameters_space["gen_lstm_dim"])
param["obs_dis_hidden_dims"] = []
dis_num_layers = choice(parameters_space["dis_num_layers"])
for l in range(dis_num_layers):
dim = choice(parameters_space["dis_hidden_dim"])
if l > 0 and param["obs_dis_hidden_dims"][l-1] < dim:
dim = param["obs_dis_hidden_dims"][l-1]
param["obs_dis_hidden_dims"].append(dim)
param["mask_dis_hidden_dims"] = []
dis_num_layers = choice(parameters_space["dis_num_layers"])
for l in range(dis_num_layers):
dim = choice(parameters_space["dis_hidden_dim"])
if l > 0 and param["mask_dis_hidden_dims"][l-1] < dim:
dim = param["mask_dis_hidden_dims"][l-1]
param["mask_dis_hidden_dims"].append(dim)
param["lr"] = choice(parameters_space["lr"])
param["cp"] = choice(parameters_space["cp"])
param["dis_train_num"] = choice(parameters_space["dis_train_num"])
return param
def thread_run(path, search, config, source_dst, target_dst, GPU):
if config["rand_search"] == "yes":
param = parameter_search(gen_model=config["gen_model"])
else:
param = config["param"]
with open(path+"exp_params.json", "a") as f:
json.dump(param, f)
f.write("\n")
source_it = Iterator(dataset=source_dst, batch_size=param["batch_size"], shuffle=False, labels=config["labels"], mask=config["source_mask"])
target_it = Iterator(dataset=target_dst, batch_size=param["batch_size"], shuffle=False, labels=config["labels"], mask=config["target_mask"])
x_dim = source_it.data.shape[1]
col_ind = source_dst.col_ind
col_dim = source_dst.col_dim
col_type = source_dst.col_type
mask_dim = target_it.masks.shape[1]
if config["Gm"] == "yes":
mask_gen = MaskGenerator_MLP(param["z_dim"], x_dim, param["mask_gen_hidden_dims"], mask_dim)
mask_dis = Discriminator(mask_dim, param["mask_dis_hidden_dims"], c_dim=x_dim, condition=True)
else:
mask_gen = None
mask_dis = None
if config["Gx"] == "yes":
if config["gen_model"] == "LSTM":
obs_gen = ObservedGenerator_LSTM(param["z_dim"], param["obs_gen_feature_dim"], param["obs_gen_lstm_dim"], col_dim, col_type, col_ind, x_dim, mask_dim)
elif config["gen_model"] == "MLP":
obs_gen = ObservedGenerator_MLP(param["z_dim"], param["obs_gen_hidden_dims"], x_dim, mask_dim, col_type, col_ind)
else:
obs_gen = None
obs_dis = Discriminator(x_dim, param["obs_dis_hidden_dims"])
print(mask_gen)
print(mask_dis)
print(obs_gen)
print(obs_dis)
handler = Handler(source_it, target_it, source_dst, path)
if mask_gen is None and obs_gen is None:
handler.translate(mask_gen, obs_gen, param["z_dim"], path+"translate_{}".format(search), GPU=True, repeat=1)
else:
mask_gen, obs_gen, mask_dis, obs_dis = handler.train(mask_gen, obs_gen, mask_dis, obs_dis, param, config, search, GPU=GPU)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('configs', help='a json config file')
parser.add_argument('gpu', default=0)
args = parser.parse_args()
gpu = int(args.gpu)
if gpu >= 0:
GPU = True
os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu)
else:
GPU = False
with open(args.configs) as f:
configs = json.load(f)
try:
os.mkdir("expdir")
except:
pass
for config in configs:
path = "expdir/"+config["name"]+"/"
try:
os.mkdir("expdir/"+config["name"])
except:
pass
source = pd.read_csv(config["source"])
target = pd.read_csv(config["target"])
fields = []
col_type = []
if "label" in config.keys():
cond = config["label"]
for i, col in enumerate(list(source)):
if "label" in config.keys() and col in cond:
fields.append((col, CategoricalField("one-hot", noise=0)))
col_type.append("condition")
elif i in config["normalize_cols"]:
fields.append((col,NumericalField("normalize")))
col_type.append("normalize")
elif i in config["gmm_cols"]:
fields.append((col, NumericalField("gmm", n=5)))
col_type.append("gmm")
elif i in config["one-hot_cols"]:
fields.append((col, CategoricalField("one-hot", noise=0)))
col_type.append("one-hot")
elif i in config["ordinal_cols"]:
fields.append((col, CategoricalField("dict")))
col_type.append("ordinal")
source_dst, target_dst = Dataset.split(
fields = fields,
path = ".",
col_type = col_type,
train = config["source"],
validation = config["target"],
format = "csv",
)
source_dst.learn_convert()
target_dst.learn_convert()
print("source row : {}".format(len(source_dst)))
print("target row: {}".format(len(target_dst)))
n_search = config["n_search"]
jobs = [multiprocessing.Process(target=thread_run, args=(path, search, config, source_dst, target_dst, GPU)) for search in range(n_search)]
for j in jobs:
j.start()
for j in jobs:
j.join()
| [
"synthesizer.ObservedGenerator_MLP",
"random.choice",
"argparse.ArgumentParser",
"synthesizer.Discriminator",
"pandas.read_csv",
"data.Dataset.split",
"multiprocessing.Process",
"synthesizer.ObservedGenerator_LSTM",
"synthesizer.MaskGenerator_MLP",
"os.mkdir",
"json.load",
"synthesizer.Handler... | [((773, 811), 'random.choice', 'choice', (["parameters_space['batch_size']"], {}), "(parameters_space['batch_size'])\n", (779, 811), False, 'from random import choice\n'), ((830, 863), 'random.choice', 'choice', (["parameters_space['z_dim']"], {}), "(parameters_space['z_dim'])\n", (836, 863), False, 'from random import choice\n'), ((919, 961), 'random.choice', 'choice', (["parameters_space['gen_num_layers']"], {}), "(parameters_space['gen_num_layers'])\n", (925, 961), False, 'from random import choice\n'), ((1775, 1817), 'random.choice', 'choice', (["parameters_space['dis_num_layers']"], {}), "(parameters_space['dis_num_layers'])\n", (1781, 1817), False, 'from random import choice\n'), ((2099, 2141), 'random.choice', 'choice', (["parameters_space['dis_num_layers']"], {}), "(parameters_space['dis_num_layers'])\n", (2105, 2141), False, 'from random import choice\n'), ((2387, 2417), 'random.choice', 'choice', (["parameters_space['lr']"], {}), "(parameters_space['lr'])\n", (2393, 2417), False, 'from random import choice\n'), ((2433, 2463), 'random.choice', 'choice', (["parameters_space['cp']"], {}), "(parameters_space['cp'])\n", (2439, 2463), False, 'from random import choice\n'), ((2490, 2531), 'random.choice', 'choice', (["parameters_space['dis_train_num']"], {}), "(parameters_space['dis_train_num'])\n", (2496, 2531), False, 'from random import choice\n'), ((2842, 2974), 'data.Iterator', 'Iterator', ([], {'dataset': 'source_dst', 'batch_size': "param['batch_size']", 'shuffle': '(False)', 'labels': "config['labels']", 'mask': "config['source_mask']"}), "(dataset=source_dst, batch_size=param['batch_size'], shuffle=False,\n labels=config['labels'], mask=config['source_mask'])\n", (2850, 2974), False, 'from data import NumericalField, CategoricalField, Iterator\n'), ((2984, 3116), 'data.Iterator', 'Iterator', ([], {'dataset': 'target_dst', 'batch_size': "param['batch_size']", 'shuffle': '(False)', 'labels': "config['labels']", 'mask': "config['target_mask']"}), "(dataset=target_dst, batch_size=param['batch_size'], shuffle=False,\n labels=config['labels'], mask=config['target_mask'])\n", (2992, 3116), False, 'from data import NumericalField, CategoricalField, Iterator\n'), ((3945, 3995), 'synthesizer.Discriminator', 'Discriminator', (['x_dim', "param['obs_dis_hidden_dims']"], {}), "(x_dim, param['obs_dis_hidden_dims'])\n", (3958, 3995), False, 'from synthesizer import MaskGenerator_MLP, ObservedGenerator_MLP, Discriminator, Handler, ObservedGenerator_LSTM\n'), ((4076, 4123), 'synthesizer.Handler', 'Handler', (['source_it', 'target_it', 'source_dst', 'path'], {}), '(source_it, target_it, source_dst, path)\n', (4083, 4123), False, 'from synthesizer import MaskGenerator_MLP, ObservedGenerator_MLP, Discriminator, Handler, ObservedGenerator_LSTM\n'), ((4449, 4474), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4472, 4474), False, 'import argparse\n'), ((1003, 1045), 'random.choice', 'choice', (["parameters_space['gen_hidden_dim']"], {}), "(parameters_space['gen_hidden_dim'])\n", (1009, 1045), False, 'from random import choice\n'), ((1271, 1313), 'random.choice', 'choice', (["parameters_space['gen_num_layers']"], {}), "(parameters_space['gen_num_layers'])\n", (1277, 1313), False, 'from random import choice\n'), ((1859, 1901), 'random.choice', 'choice', (["parameters_space['dis_hidden_dim']"], {}), "(parameters_space['dis_hidden_dim'])\n", (1865, 1901), False, 'from random import choice\n'), ((2183, 2225), 'random.choice', 'choice', (["parameters_space['dis_hidden_dim']"], {}), "(parameters_space['dis_hidden_dim'])\n", (2189, 2225), False, 'from random import choice\n'), ((2792, 2811), 'json.dump', 'json.dump', (['param', 'f'], {}), '(param, f)\n', (2801, 2811), False, 'import json\n'), ((3316, 3401), 'synthesizer.MaskGenerator_MLP', 'MaskGenerator_MLP', (["param['z_dim']", 'x_dim', "param['mask_gen_hidden_dims']", 'mask_dim'], {}), "(param['z_dim'], x_dim, param['mask_gen_hidden_dims'],\n mask_dim)\n", (3333, 3401), False, 'from synthesizer import MaskGenerator_MLP, ObservedGenerator_MLP, Discriminator, Handler, ObservedGenerator_LSTM\n'), ((3411, 3498), 'synthesizer.Discriminator', 'Discriminator', (['mask_dim', "param['mask_dis_hidden_dims']"], {'c_dim': 'x_dim', 'condition': '(True)'}), "(mask_dim, param['mask_dis_hidden_dims'], c_dim=x_dim,\n condition=True)\n", (3424, 3498), False, 'from synthesizer import MaskGenerator_MLP, ObservedGenerator_MLP, Discriminator, Handler, ObservedGenerator_LSTM\n'), ((4762, 4774), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4771, 4774), False, 'import json\n'), ((4783, 4801), 'os.mkdir', 'os.mkdir', (['"""expdir"""'], {}), "('expdir')\n", (4791, 4801), False, 'import os\n'), ((4957, 4986), 'pandas.read_csv', 'pd.read_csv', (["config['source']"], {}), "(config['source'])\n", (4968, 4986), True, 'import pandas as pd\n'), ((4998, 5027), 'pandas.read_csv', 'pd.read_csv', (["config['target']"], {}), "(config['target'])\n", (5009, 5027), True, 'import pandas as pd\n'), ((5819, 5948), 'data.Dataset.split', 'Dataset.split', ([], {'fields': 'fields', 'path': '"""."""', 'col_type': 'col_type', 'train': "config['source']", 'validation': "config['target']", 'format': '"""csv"""'}), "(fields=fields, path='.', col_type=col_type, train=config[\n 'source'], validation=config['target'], format='csv')\n", (5832, 5948), False, 'from data import Dataset\n'), ((1357, 1399), 'random.choice', 'choice', (["parameters_space['gen_hidden_dim']"], {}), "(parameters_space['gen_hidden_dim'])\n", (1363, 1399), False, 'from random import choice\n'), ((1606, 1649), 'random.choice', 'choice', (["parameters_space['gen_feature_dim']"], {}), "(parameters_space['gen_feature_dim'])\n", (1612, 1649), False, 'from random import choice\n'), ((1680, 1720), 'random.choice', 'choice', (["parameters_space['gen_lstm_dim']"], {}), "(parameters_space['gen_lstm_dim'])\n", (1686, 1720), False, 'from random import choice\n'), ((3614, 3759), 'synthesizer.ObservedGenerator_LSTM', 'ObservedGenerator_LSTM', (["param['z_dim']", "param['obs_gen_feature_dim']", "param['obs_gen_lstm_dim']", 'col_dim', 'col_type', 'col_ind', 'x_dim', 'mask_dim'], {}), "(param['z_dim'], param['obs_gen_feature_dim'], param[\n 'obs_gen_lstm_dim'], col_dim, col_type, col_ind, x_dim, mask_dim)\n", (3636, 3759), False, 'from synthesizer import MaskGenerator_MLP, ObservedGenerator_MLP, Discriminator, Handler, ObservedGenerator_LSTM\n'), ((4893, 4929), 'os.mkdir', 'os.mkdir', (["('expdir/' + config['name'])"], {}), "('expdir/' + config['name'])\n", (4901, 4929), False, 'import os\n'), ((6182, 6286), 'multiprocessing.Process', 'multiprocessing.Process', ([], {'target': 'thread_run', 'args': '(path, search, config, source_dst, target_dst, GPU)'}), '(target=thread_run, args=(path, search, config,\n source_dst, target_dst, GPU))\n', (6205, 6286), False, 'import multiprocessing\n'), ((3805, 3912), 'synthesizer.ObservedGenerator_MLP', 'ObservedGenerator_MLP', (["param['z_dim']", "param['obs_gen_hidden_dims']", 'x_dim', 'mask_dim', 'col_type', 'col_ind'], {}), "(param['z_dim'], param['obs_gen_hidden_dims'], x_dim,\n mask_dim, col_type, col_ind)\n", (3826, 3912), False, 'from synthesizer import MaskGenerator_MLP, ObservedGenerator_MLP, Discriminator, Handler, ObservedGenerator_LSTM\n'), ((5231, 5267), 'data.CategoricalField', 'CategoricalField', (['"""one-hot"""'], {'noise': '(0)'}), "('one-hot', noise=0)\n", (5247, 5267), False, 'from data import NumericalField, CategoricalField, Iterator\n'), ((5365, 5392), 'data.NumericalField', 'NumericalField', (['"""normalize"""'], {}), "('normalize')\n", (5379, 5392), False, 'from data import NumericalField, CategoricalField, Iterator\n'), ((5485, 5511), 'data.NumericalField', 'NumericalField', (['"""gmm"""'], {'n': '(5)'}), "('gmm', n=5)\n", (5499, 5511), False, 'from data import NumericalField, CategoricalField, Iterator\n'), ((5602, 5638), 'data.CategoricalField', 'CategoricalField', (['"""one-hot"""'], {'noise': '(0)'}), "('one-hot', noise=0)\n", (5618, 5638), False, 'from data import NumericalField, CategoricalField, Iterator\n'), ((5733, 5757), 'data.CategoricalField', 'CategoricalField', (['"""dict"""'], {}), "('dict')\n", (5749, 5757), False, 'from data import NumericalField, CategoricalField, Iterator\n')] |
# Test the Unicode versions of normal file functions
# open, os.open, os.stat. os.listdir, os.rename, os.remove, os.mkdir, os.chdir, os.rmdir
import sys, os, unittest
from test import support
if not os.path.supports_unicode_filenames:
raise unittest.SkipTest("test works only on NT+")
filenames = [
'abc',
'ascii',
'Gr\xfc\xdf-Gott',
'\u0393\u03b5\u03b9\u03ac-\u03c3\u03b1\u03c2',
'\u0417\u0434\u0440\u0430\u0432\u0441\u0442\u0432\u0443\u0439\u0442\u0435',
'\u306b\u307d\u3093',
'\u05d4\u05e9\u05e7\u05e6\u05e5\u05e1',
'\u66e8\u66e9\u66eb',
'\u66e8\u05e9\u3093\u0434\u0393\xdf',
]
# Destroy directory dirname and all files under it, to one level.
def deltree(dirname):
# Don't hide legitimate errors: if one of these suckers exists, it's
# an error if we can't remove it.
if os.path.exists(dirname):
# must pass unicode to os.listdir() so we get back unicode results.
for fname in os.listdir(str(dirname)):
os.unlink(os.path.join(dirname, fname))
os.rmdir(dirname)
class UnicodeFileTests(unittest.TestCase):
files = [os.path.join(support.TESTFN, f) for f in filenames]
def setUp(self):
try:
os.mkdir(support.TESTFN)
except OSError:
pass
for name in self.files:
f = open(name, 'wb')
f.write((name+'\n').encode("utf-8"))
f.close()
os.stat(name)
def tearDown(self):
deltree(support.TESTFN)
def _apply_failure(self, fn, filename, expected_exception,
check_fn_in_exception = True):
try:
fn(filename)
raise support.TestFailed("Expected to fail calling '%s(%r)'"
% (fn.__name__, filename))
except expected_exception as details:
if check_fn_in_exception and details.filename != filename:
raise support.TestFailed("Function '%s(%r) failed with "
"bad filename in the exception: %r"
% (fn.__name__, filename,
details.filename))
def test_failures(self):
# Pass non-existing Unicode filenames all over the place.
for name in self.files:
name = "not_" + name
self._apply_failure(open, name, IOError)
self._apply_failure(os.stat, name, OSError)
self._apply_failure(os.chdir, name, OSError)
self._apply_failure(os.rmdir, name, OSError)
self._apply_failure(os.remove, name, OSError)
# listdir may append a wildcard to the filename, so dont check
self._apply_failure(os.listdir, name, OSError, False)
def test_open(self):
for name in self.files:
f = open(name, 'wb')
f.write((name+'\n').encode("utf-8"))
f.close()
os.stat(name)
def test_listdir(self):
f1 = os.listdir(support.TESTFN)
f2 = os.listdir(str(support.TESTFN.encode("utf-8"),
sys.getfilesystemencoding()))
sf2 = set("\\".join((str(support.TESTFN), f))
for f in f2)
self.failUnlessEqual(len(f1), len(self.files))
self.failUnlessEqual(sf2, set(self.files))
def test_rename(self):
for name in self.files:
os.rename(name,"tmp")
os.rename("tmp",name)
def test_directory(self):
dirname = os.path.join(support.TESTFN,'Gr\xfc\xdf-\u66e8\u66e9\u66eb')
filename = '\xdf-\u66e8\u66e9\u66eb'
oldwd = os.getcwd()
os.mkdir(dirname)
os.chdir(dirname)
f = open(filename, 'wb')
f.write((filename + '\n').encode("utf-8"))
f.close()
os.access(filename,os.R_OK)
os.remove(filename)
os.chdir(oldwd)
os.rmdir(dirname)
def test_main():
try:
support.run_unittest(UnicodeFileTests)
finally:
deltree(support.TESTFN)
if __name__ == "__main__":
test_main()
| [
"os.path.exists",
"os.listdir",
"sys.getfilesystemencoding",
"test.support.TestFailed",
"test.support.run_unittest",
"os.rename",
"os.access",
"os.path.join",
"os.getcwd",
"os.chdir",
"os.rmdir",
"unittest.SkipTest",
"os.mkdir",
"os.stat",
"test.support.TESTFN.encode",
"os.remove"
] | [((245, 288), 'unittest.SkipTest', 'unittest.SkipTest', (['"""test works only on NT+"""'], {}), "('test works only on NT+')\n", (262, 288), False, 'import sys, os, unittest\n'), ((834, 857), 'os.path.exists', 'os.path.exists', (['dirname'], {}), '(dirname)\n', (848, 857), False, 'import sys, os, unittest\n'), ((1042, 1059), 'os.rmdir', 'os.rmdir', (['dirname'], {}), '(dirname)\n', (1050, 1059), False, 'import sys, os, unittest\n'), ((1117, 1148), 'os.path.join', 'os.path.join', (['support.TESTFN', 'f'], {}), '(support.TESTFN, f)\n', (1129, 1148), False, 'import sys, os, unittest\n'), ((2972, 2998), 'os.listdir', 'os.listdir', (['support.TESTFN'], {}), '(support.TESTFN)\n', (2982, 2998), False, 'import sys, os, unittest\n'), ((3489, 3529), 'os.path.join', 'os.path.join', (['support.TESTFN', '"""Grüß-曨曩曫"""'], {}), "(support.TESTFN, 'Grüß-曨曩曫')\n", (3501, 3529), False, 'import sys, os, unittest\n'), ((3611, 3622), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3620, 3622), False, 'import sys, os, unittest\n'), ((3631, 3648), 'os.mkdir', 'os.mkdir', (['dirname'], {}), '(dirname)\n', (3639, 3648), False, 'import sys, os, unittest\n'), ((3657, 3674), 'os.chdir', 'os.chdir', (['dirname'], {}), '(dirname)\n', (3665, 3674), False, 'import sys, os, unittest\n'), ((3785, 3813), 'os.access', 'os.access', (['filename', 'os.R_OK'], {}), '(filename, os.R_OK)\n', (3794, 3813), False, 'import sys, os, unittest\n'), ((3821, 3840), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (3830, 3840), False, 'import sys, os, unittest\n'), ((3849, 3864), 'os.chdir', 'os.chdir', (['oldwd'], {}), '(oldwd)\n', (3857, 3864), False, 'import sys, os, unittest\n'), ((3873, 3890), 'os.rmdir', 'os.rmdir', (['dirname'], {}), '(dirname)\n', (3881, 3890), False, 'import sys, os, unittest\n'), ((3926, 3964), 'test.support.run_unittest', 'support.run_unittest', (['UnicodeFileTests'], {}), '(UnicodeFileTests)\n', (3946, 3964), False, 'from test import support\n'), ((1216, 1240), 'os.mkdir', 'os.mkdir', (['support.TESTFN'], {}), '(support.TESTFN)\n', (1224, 1240), False, 'import sys, os, unittest\n'), ((1430, 1443), 'os.stat', 'os.stat', (['name'], {}), '(name)\n', (1437, 1443), False, 'import sys, os, unittest\n'), ((1675, 1760), 'test.support.TestFailed', 'support.TestFailed', (['("Expected to fail calling \'%s(%r)\'" % (fn.__name__, filename))'], {}), '("Expected to fail calling \'%s(%r)\'" % (fn.__name__,\n filename))\n', (1693, 1760), False, 'from test import support\n'), ((2916, 2929), 'os.stat', 'os.stat', (['name'], {}), '(name)\n', (2923, 2929), False, 'import sys, os, unittest\n'), ((3384, 3406), 'os.rename', 'os.rename', (['name', '"""tmp"""'], {}), "(name, 'tmp')\n", (3393, 3406), False, 'import sys, os, unittest\n'), ((3418, 3440), 'os.rename', 'os.rename', (['"""tmp"""', 'name'], {}), "('tmp', name)\n", (3427, 3440), False, 'import sys, os, unittest\n'), ((1004, 1032), 'os.path.join', 'os.path.join', (['dirname', 'fname'], {}), '(dirname, fname)\n', (1016, 1032), False, 'import sys, os, unittest\n'), ((3027, 3057), 'test.support.TESTFN.encode', 'support.TESTFN.encode', (['"""utf-8"""'], {}), "('utf-8')\n", (3048, 3057), False, 'from test import support\n'), ((3091, 3118), 'sys.getfilesystemencoding', 'sys.getfilesystemencoding', ([], {}), '()\n', (3116, 3118), False, 'import sys, os, unittest\n'), ((1925, 2063), 'test.support.TestFailed', 'support.TestFailed', (['("Function \'%s(%r) failed with bad filename in the exception: %r" % (fn.\n __name__, filename, details.filename))'], {}), '(\n "Function \'%s(%r) failed with bad filename in the exception: %r" % (fn.\n __name__, filename, details.filename))\n', (1943, 2063), False, 'from test import support\n')] |
import argparse
from pathlib import Path
import numpy as np
import yaml
# this script takes in a folder path and then recursively collects all
# results.yaml files in that directory. It averages them and prints
# summary statistics
parser = argparse.ArgumentParser(description="Analyze the results")
parser.add_argument("path", type=str, help="path to the folder containing the results")
args = parser.parse_args()
results = []
keys = set()
for path in Path(args.path).rglob("results.yaml"):
with open(path, "r") as file:
results.append(yaml.safe_load(file))
keys = keys.union(results[-1].keys())
print(f"Found {len(results)} files with {len(keys)} different metrics\n")
output = {}
for key in keys:
vals = [result[key] for result in results if key in result]
n = len(vals)
mean = float(np.mean(vals))
std = float(np.std(vals))
output[key] = {
"N runs": n,
"mean": round(mean, 3),
"std": round(std, 3)
}
print(yaml.dump(output)) | [
"numpy.mean",
"argparse.ArgumentParser",
"pathlib.Path",
"yaml.dump",
"yaml.safe_load",
"numpy.std"
] | [((244, 302), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Analyze the results"""'}), "(description='Analyze the results')\n", (267, 302), False, 'import argparse\n'), ((988, 1005), 'yaml.dump', 'yaml.dump', (['output'], {}), '(output)\n', (997, 1005), False, 'import yaml\n'), ((459, 474), 'pathlib.Path', 'Path', (['args.path'], {}), '(args.path)\n', (463, 474), False, 'from pathlib import Path\n'), ((827, 840), 'numpy.mean', 'np.mean', (['vals'], {}), '(vals)\n', (834, 840), True, 'import numpy as np\n'), ((858, 870), 'numpy.std', 'np.std', (['vals'], {}), '(vals)\n', (864, 870), True, 'import numpy as np\n'), ((555, 575), 'yaml.safe_load', 'yaml.safe_load', (['file'], {}), '(file)\n', (569, 575), False, 'import yaml\n')] |
import random
import argparse
# TODO: Parse word lists from files
words = {
"codenames_adjective": [
"quantum",
"loud",
"red",
"blue",
"green",
"yellow",
"irate",
"angry",
"peeved",
"happy",
"slimy",
"sleepy",
"junior",
"slicker",
"united",
"somber",
"bizarre",
"odd",
"weird",
"wrong",
"latent",
"chilly",
"strange",
"loud",
"silent",
"hopping",
"orange",
"violet",
"violent",
"desolate",
"lone",
"cold",
"solemn",
"raging",
"intelligent",
"american",
],
"codenames_noun": [
"matrix",
"wolf",
"solace",
"whisper",
"felony",
"moon",
"sucker",
"penguin",
"waffle",
"maestro",
"night",
"trinity",
"deity",
"monkey",
"ark",
"squirrel",
"iron",
"bounce",
"farm",
"chef",
"trough",
"net",
"trawl",
"glee",
"water",
"spork",
"plow",
"feed",
"souffle",
"route",
"bagel",
"montana",
"analyst",
"auto",
"watch",
"photo",
"yard",
"source",
"monkey",
"seagull",
"toll",
"spawn",
"gopher",
"chipmunk",
"set",
"calendar",
"artist",
"chaser",
"scan",
"tote",
"beam",
"entourage",
"genesis",
"walk",
"spatula",
"rage",
"fire",
"master"
],
"codenames_suffix": [
" {}000",
"-II",
" {}.0",
" rev{}",
"-HX",
" v{}",
]
}
def parse_args():
parser = argparse.ArgumentParser(description='Generate NSA TAO project names')
parser.add_argument("-n", "--num", required=False, type=int, default=1, help="Number of project names to generate")
return parser.parse_args()
def pick_random(arr):
return arr[random.randrange(len(arr))]
def generate_tao_name():
name = ""
name += pick_random(words["codenames_adjective"]).upper()
name += pick_random(words["codenames_noun"]).upper()
# Hacky way to do 1/5
if (random.randrange(5) == 4):
suffix = pick_random(words["codenames_suffix"])
suffix = suffix.format(random.randrange(1, 9))
name += suffix
return name
def main(args):
for _ in range(args.num):
print("%s" % generate_tao_name())
if __name__ == "__main__":
main(parse_args()) | [
"argparse.ArgumentParser",
"random.randrange"
] | [((1638, 1707), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generate NSA TAO project names"""'}), "(description='Generate NSA TAO project names')\n", (1661, 1707), False, 'import argparse\n'), ((2122, 2141), 'random.randrange', 'random.randrange', (['(5)'], {}), '(5)\n', (2138, 2141), False, 'import random\n'), ((2236, 2258), 'random.randrange', 'random.randrange', (['(1)', '(9)'], {}), '(1, 9)\n', (2252, 2258), False, 'import random\n')] |
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# <NAME> <<EMAIL>>
# Mon 18 Nov 21:38:19 2013
"""Extension building for using this package
"""
import numpy
from pkg_resources import resource_filename
from bob.extension import Extension as BobExtension
# forward the build_ext command from bob.extension
from bob.extension import build_ext, Library as BobLibrary
from distutils.version import LooseVersion
class Extension(BobExtension):
"""Extension building with pkg-config packages and blitz.array.
See the documentation for :py:class:`distutils.extension.Extension` for more
details on input parameters.
"""
def __init__(self, *args, **kwargs):
"""Initialize the extension with parameters.
This extension adds ``blitz>=0.10`` as a requirement for extensions derived
from this class.
See the help for :py:class:`bob.extension.Extension` for more details on
options.
"""
require = ['blitz>=0.10', 'boost']
kwargs.setdefault('packages', []).extend(require)
self_include_dir = resource_filename(__name__, 'include')
kwargs.setdefault('system_include_dirs', []).append(numpy.get_include())
kwargs.setdefault('include_dirs', []).append(self_include_dir)
macros = [
("PY_ARRAY_UNIQUE_SYMBOL", "BOB_NUMPY_C_API"),
("NO_IMPORT_ARRAY", "1"),
]
if LooseVersion(numpy.__version__) >= LooseVersion('1.7'):
macros.append(("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION"))
kwargs.setdefault('define_macros', []).extend(macros)
# Run the constructor for the base class
BobExtension.__init__(self, *args, **kwargs)
class Library (BobLibrary):
"""Pure C++ library building with blitz array.
See the documentation for :py:class:`bob.extension.Extension` for more
details on input parameters.
"""
def __init__(self, *args, **kwargs):
"""Initialize the library with parameters.
This library adds ``blitz>=0.10`` as a requirement for library derived
from this class.
See the help for :py:class:`bob.extension.Library` for more details on
options.
"""
require = ['blitz>=0.10', 'boost']
kwargs.setdefault('packages', []).extend(require)
self_include_dir = resource_filename(__name__, 'include')
kwargs.setdefault('system_include_dirs', []).append(numpy.get_include())
kwargs.setdefault('include_dirs', []).append(self_include_dir)
# TODO: are these macros required for pure C++ builds?
macros = [
("PY_ARRAY_UNIQUE_SYMBOL", "BOB_NUMPY_C_API"),
("NO_IMPORT_ARRAY", "1"),
]
if LooseVersion(numpy.__version__) >= LooseVersion('1.7'):
macros.append(("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION"))
kwargs.setdefault('define_macros', []).extend(macros)
# Run the constructor for the base class
BobLibrary.__init__(self, *args, **kwargs)
| [
"bob.extension.Extension.__init__",
"bob.extension.Library.__init__",
"pkg_resources.resource_filename",
"numpy.get_include",
"distutils.version.LooseVersion"
] | [((1037, 1075), 'pkg_resources.resource_filename', 'resource_filename', (['__name__', '"""include"""'], {}), "(__name__, 'include')\n", (1054, 1075), False, 'from pkg_resources import resource_filename\n'), ((1584, 1628), 'bob.extension.Extension.__init__', 'BobExtension.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (1605, 1628), True, 'from bob.extension import Extension as BobExtension\n'), ((2219, 2257), 'pkg_resources.resource_filename', 'resource_filename', (['__name__', '"""include"""'], {}), "(__name__, 'include')\n", (2236, 2257), False, 'from pkg_resources import resource_filename\n'), ((2825, 2867), 'bob.extension.Library.__init__', 'BobLibrary.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (2844, 2867), True, 'from bob.extension import build_ext, Library as BobLibrary\n'), ((1132, 1151), 'numpy.get_include', 'numpy.get_include', ([], {}), '()\n', (1149, 1151), False, 'import numpy\n'), ((1349, 1380), 'distutils.version.LooseVersion', 'LooseVersion', (['numpy.__version__'], {}), '(numpy.__version__)\n', (1361, 1380), False, 'from distutils.version import LooseVersion\n'), ((1384, 1403), 'distutils.version.LooseVersion', 'LooseVersion', (['"""1.7"""'], {}), "('1.7')\n", (1396, 1403), False, 'from distutils.version import LooseVersion\n'), ((2314, 2333), 'numpy.get_include', 'numpy.get_include', ([], {}), '()\n', (2331, 2333), False, 'import numpy\n'), ((2590, 2621), 'distutils.version.LooseVersion', 'LooseVersion', (['numpy.__version__'], {}), '(numpy.__version__)\n', (2602, 2621), False, 'from distutils.version import LooseVersion\n'), ((2625, 2644), 'distutils.version.LooseVersion', 'LooseVersion', (['"""1.7"""'], {}), "('1.7')\n", (2637, 2644), False, 'from distutils.version import LooseVersion\n')] |
from robot.api.deco import keyword
from robot.libraries.BuiltIn import BuiltIn
class Gazebo(object):
"""Robot Framework test library for the Gazebo simulator
See also http://gazebosim.org/tutorials/?tut=ros_comm
== Table of contents ==
%TOC%
"""
ROBOT_LIBRARY_SCOPE = 'SUITE'
def __init__(self):
self.ros_lib = BuiltIn().get_library_instance('RosGazeboLibrary.ROS')
# Create and destroy models in simulation
# http://gazebosim.org/tutorials/?tut=ros_comm#Services:Createanddestroymodelsinsimulation
@keyword
def spawn_urdf_model(self, urdf_path: str, position: tuple, model_name: str):
''' TODO: Refactor to use service call '''
return self.ros_lib.rosrun('gazebo_ros', 'spawn_model', *[
'-file', urdf_path,
'-urdf',
'-model', model_name,
'-x', position[0],
'-y', position[1],
'-z', position[2],
])
@keyword
def spawn_sdf_model(self, sdf_path: str, position: tuple, model_name: str):
''' TODO: Refactor to use service call '''
return self.ros_lib.rosrun('gazebo_ros', 'spawn_model', *[
'-file', sdf_path,
'-sdf',
'-model', model_name,
'-x', position[0],
'-y', position[1],
'-z', position[2],
])
@keyword
def delete_model(self, model_name: str):
''' Delete a model from simulation
http://gazebosim.org/tutorials/?tut=ros_comm#DeleteModel
'''
return self.ros_lib.rosservice_call(
'gazebo/delete_model', 'gazebo_msgs/DeleteModel',
{ 'model_name': model_name }
)
# State and property setters
# http://gazebosim.org/tutorials/?tut=ros_comm#Services:Stateandpropertysetters
''' TODO
def set_link_properties(self, ...):
def set_physics_properties(self, ...):
def set_model_state(self, ...):
def set_model_configuration(self, ...):
def set_joint_properties(self, ...):
def set_link_state(self, ...):
'''
# State and property getters
# http://gazebosim.org/tutorials/?tut=ros_comm#Services:Stateandpropertygetters
@keyword
def get_model_properties(self, model_name: str):
return self.ros_lib.rosservice_call(
'gazebo/get_model_properties', 'gazebo_msgs/GetModelProperties',
{ 'model_name': model_name }
)
@keyword
def get_model_state(self, model_name: str):
return self.ros_lib.rosservice_call(
'gazebo/get_model_state', 'gazebo_msgs/GetModelState',
{ 'model_name': model_name }
)
''' TODO
def get_world_properties(self, ...):
def get_joint_properties(self, ...):
def get_link_properties(self, ...):
def get_link_state(self, ...):
def get_physics_properties(self, ...):
def link_states(self, ...): # investigate
def model_states(self, ...): # investigate
'''
# Force control
# http://gazebosim.org/tutorials/?tut=ros_comm#Services:Forcecontrol
''' TODO
/gazebo/apply_body_wrench
/gazebo/apply_joint_effort
/gazebo/clear_body_wrenches
/gazebo/clear_joint_forces
'''
# Simulation control
# http://gazebosim.org/tutorials/?tut=ros_comm#Services:Simulationcontrol
@keyword
def reset_simulation(self):
return self.ros_lib.rosservice_call('/gazebo/reset_simulation')
@keyword
def reset_world(self):
return self.ros_lib.rosservice_call('/gazebo/reset_world')
@keyword
def pause_physics(self):
return self.ros_lib.rosservice_call('/gazebo/pause_physics')
@keyword
def unpause_physics(self):
return self.ros_lib.rosservice_call('/gazebo/unpause_physics')
# Undocumented services
# Found via `rosservice list`
'''
/gazebo/delete_light
/gazebo/get_light_properties
/gazebo/get_loggers
/gazebo/set_light_properties
/gazebo/set_logger_level
/gazebo/set_parameters
/gazebo_gui/get_loggers
/gazebo_gui/set_logger_level
'''
# Convenience keywords
@keyword
def launch_empty_world(self):
return self.ros_lib.roslaunch('gazebo_ros', 'empty_world.launch') | [
"robot.libraries.BuiltIn.BuiltIn"
] | [((353, 362), 'robot.libraries.BuiltIn.BuiltIn', 'BuiltIn', ([], {}), '()\n', (360, 362), False, 'from robot.libraries.BuiltIn import BuiltIn\n')] |
import unittest
import zserio
from testutils import getZserioApi
class Bit4RangeCheckTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.api = getZserioApi(__file__, "with_range_check_code.zs",
extraArgs=["-withRangeCheckCode"]).bit4_range_check
def testBit4LowerBound(self):
self._checkBit4Value(BIT4_LOWER_BOUND)
def testBit4UpperBound(self):
self._checkBit4Value(BIT4_UPPER_BOUND)
def testBit4BelowLowerBound(self):
with self.assertRaises(zserio.PythonRuntimeException):
self._checkBit4Value(BIT4_LOWER_BOUND - 1)
def testBit4AboveUpperBound(self):
with self.assertRaises(zserio.PythonRuntimeException):
self._checkBit4Value(BIT4_UPPER_BOUND + 1)
def _checkBit4Value(self, value):
bit4RangeCheckCompound = self.api.Bit4RangeCheckCompound(value_=value)
bitBuffer = zserio.serialize(bit4RangeCheckCompound)
readBit4RangeCheckCompound = zserio.deserialize(self.api.Bit4RangeCheckCompound, bitBuffer)
self.assertEqual(bit4RangeCheckCompound, readBit4RangeCheckCompound)
BIT4_LOWER_BOUND = 0
BIT4_UPPER_BOUND = 15
| [
"zserio.serialize",
"zserio.deserialize",
"testutils.getZserioApi"
] | [((924, 964), 'zserio.serialize', 'zserio.serialize', (['bit4RangeCheckCompound'], {}), '(bit4RangeCheckCompound)\n', (940, 964), False, 'import zserio\n'), ((1002, 1064), 'zserio.deserialize', 'zserio.deserialize', (['self.api.Bit4RangeCheckCompound', 'bitBuffer'], {}), '(self.api.Bit4RangeCheckCompound, bitBuffer)\n', (1020, 1064), False, 'import zserio\n'), ((172, 262), 'testutils.getZserioApi', 'getZserioApi', (['__file__', '"""with_range_check_code.zs"""'], {'extraArgs': "['-withRangeCheckCode']"}), "(__file__, 'with_range_check_code.zs', extraArgs=[\n '-withRangeCheckCode'])\n", (184, 262), False, 'from testutils import getZserioApi\n')] |
import numpy as np
import math
import time
class PulsedProgramming:
"""
This class contains all the parameters for the Pulsed programming on a memristor model.
After initializing the parameters values, start the simulation with self.simulate()
Parameters
----------
max_voltage : float
The max voltage (V) of a pulse. If 0, no limit is apply.
pulse_algorithm : string
The pulse algorithm use. Those are the available choices (Sources in the methods). Default is 'fabien'.
'fabien' : Use fabien_convergence()
'log' : Use a log_convergence()
tolerance : float
The tolerance_value input is an int that represent the absolute tolerance (Ohm) from the res_states the
pulsed programming will find. Smaller is more precise, but too small can never converge.
is_relative_tolerance : bool
If true, the tolerance_value would be in percentage instead of (Ohm). ex: 10 : if true, 10% : if false, 10 Ohm
variability_write : iterable[float]
A gaussian distribution with (mu=0, sigma=variance_write)
index_variability : int
Index of the current variability. If over 1000, reset to 0.
variance_write : float
Variance of the gaussian distribution on the memristor write. See variability.
graph_resistance : List[Union[float, int]]
Contains all resistance of the simulation. It's used in the creation of plots.
graph_voltages : List[Union[float, int]]
Contains all voltages of the simulation. It's used in the creation of plots.
number_of_reading : int
The number of correct value read before passing to the next state.
max_pulse : int
The max number of pulses.
"""
def __init__(self, memristor_simulation, pulse_algorithm='fabien', max_voltage=0, tolerance=0, is_relative_tolerance=False,
variance_write=0, number_of_reading=1, max_pulse=20000, verbose=False, plot_memristor=0):
self.memristor_simulation = memristor_simulation
self.pulse_algorithm = pulse_algorithm
self.tolerance = tolerance
self.max_voltage = max_voltage
self.is_relative_tolerance = is_relative_tolerance
self.variance_write = variance_write
self.number_of_reading = number_of_reading
self.max_pulse = max_pulse
self.verbose = verbose
self.voltage_output = {}
self.plot_memristor = plot_memristor
self.index_variability = 0
self.variability_write = np.random.normal(0, variance_write, 1000)
self.graph_resistance = []
self.graph_voltages = []
def print(self):
print(self.pulse_algorithm)
print(self.tolerance)
print(self.max_voltage)
print(self.voltage_output)
print(self.is_relative_tolerance)
print(self.variance_write)
print(self.number_of_reading)
print(self.max_pulse)
print(self.verbose)
print(np.array(self.graph_resistance))
print(np.array(self.graph_voltages))
def write_resistance(self, memristor, voltage, t_pulse):
"""
This function change the resistance of the memristor by applying a voltage fo t_pulse.
Parameters
----------
memristor : Memristor
The memristor wrote.
voltage : float
The voltage (V) applied.
t_pulse : float
The time of the writing pulse. (s)
Returns
----------
"""
t = int(t_pulse / memristor.time_series_resolution)
signal = [voltage] * t
memristor.simulate(signal)
self.index_variability = self.index_variability + 1 if self.index_variability < len(self.variability_write) - 1 else 0
memristor.g = 1 / (1 / memristor.g + (1 / memristor.g) * self.variability_write[self.index_variability])
def find_number_iteration(self):
"""
This function find the number of iteration needed to create the resistance list depending on the distribution type
Returns
----------
number_iteration : int
number of iteration
"""
number_iteration = 1
if self.distribution_type == 'full_spread':
number_iteration = self.circuit.number_of_memristor
return number_iteration
def simulate(self, voltages_target, precision=None):
"""
This function will set the memristors to the resistance wanted in each voltages_target package.
Parameters
----------
voltages_target : dict
dict with keys as voltage and package as list of resistance
precision : list
[[macro_tune, is_relative_variability], [fine_tune, is_relative_variability]] for the balance() method.
"""
if self.pulse_algorithm != 'fabien' and self.pulse_algorithm != 'log':
raise(Exception(f'Pulse algorithm not supported: {self.pulse_algorithm}'))
# voltages_target_list = list(voltages_target.keys())
# resolution = voltages_target_list[1] - voltages_target_list[0]
index = 1
conf_done = 0
start_time = time.time()
diff_voltage = {}
for v in list(voltages_target.keys()):
if index == 1:
start_time_ = time.time()
self.simulate_list_memristor(voltages_target[v], precision)
self.voltage_output[self.memristor_simulation.circuit.current_v_out()] = [i.read() for i in self.memristor_simulation.circuit.list_memristor]
diff_voltage[abs(v - self.memristor_simulation.circuit.current_v_out())] = [round(1 / np.sum([1/res for res in voltages_target[v]]), 4), round(1 / self.memristor_simulation.circuit.current_conductance(), 4)]
if index == 50 and self.verbose:
conf_done += index
print(f'Conf done: {conf_done}\tTook: {round(time.time() - start_time_, 2)} s\tTime left: {round((time.time() - start_time_) * (len(voltages_target.keys()) - conf_done) / 50, 2)} s')
index = 0
index += 1
if self.verbose:
print(f'Total time: {time.time() - start_time}')
print()
for key in diff_voltage.keys():
print(f'{round(key*1000, 4)} mV\t{diff_voltage.get(key)[0]}\t{diff_voltage.get(key)[1]} (Ohm)')
print(f'Mean diff: {np.mean(list(diff_voltage.keys()))}')
print(f'Min diff: {np.min(list(diff_voltage.keys()))}\tMax diff: {np.max(list(diff_voltage.keys()))}')
return self.voltage_output
def simulate_list_memristor(self, list_resistance, precision):
"""
This function will set the memristors to the resistance wanted list_resistance.
Parameters
----------
list_resistance : list
list of the wanted resistance for the memristor.
precision : list
[[macro_tune, is_relative_variability], [fine_tune, is_relative_variability]] for the balance() method.
"""
for i in range(self.memristor_simulation.circuit.number_of_memristor):
plot = True if i == self.plot_memristor else False
if self.pulse_algorithm == 'fabien':
self.fabien_convergence(self.memristor_simulation.circuit.list_memristor[i], list_resistance[i], plot=plot)
elif self.pulse_algorithm == 'log':
self.log_convergence(self.memristor_simulation.circuit.list_memristor[i], list_resistance[i], plot=plot)
self.balance(list_resistance, precision)
def balance(self, list_resistance, precision):
"""
This function will set the memristors to the resistance wanted list_resistance.
Parameters
----------
list_resistance : list
list of the wanted resistance for the memristor.
precision : list
[[macro_tune, is_relative_variability], [fine_tune, is_relative_variability]] for the balance() method. If 0,
won't do it.
"""
final_g = np.sum([1 / i for i in list_resistance])
delta_g = final_g - self.memristor_simulation.circuit.current_conductance()
for i in range(self.memristor_simulation.circuit.number_of_memristor):
plot = True if -(i+1) == self.plot_memristor else False
final_res = 1 / (self.memristor_simulation.circuit.list_memristor[-(i+1)].g + delta_g)
if self.memristor_simulation.circuit.memristor_model.r_on <= final_res <= self.memristor_simulation.circuit.memristor_model.r_off:
p_tolerance, p_relative = self.tolerance, self.is_relative_tolerance
# print(f'{final_res}\t{1 / self.memristor_simulation.circuit.list_memristor[-(i+1)].g}\t{final_g - self.memristor_simulation.circuit.current_conductance()}')
if precision[0][0] != 0 or precision is not None:
self.tolerance, self.is_relative_tolerance = precision[0][0], precision[0][1]
self.fabien_convergence(self.memristor_simulation.circuit.list_memristor[-(i+1)], final_res, plot)
# print(f'{final_res}\t{1 / self.memristor_simulation.circuit.list_memristor[-(i+1)].g}\t{final_g - self.memristor_simulation.circuit.current_conductance()}')
if precision[1][0] != 0 or precision is not None:
self.tolerance, self.is_relative_tolerance = precision[1][0], precision[1][1]
self.small_convergence(self.memristor_simulation.circuit.list_memristor[-(i+1)], final_res, plot)
# print(f'{final_res}\t{1 / self.memristor_simulation.circuit.list_memristor[-(i+1)].g}\t{final_g - self.memristor_simulation.circuit.current_conductance()}')
self.tolerance, self.is_relative_tolerance = p_tolerance, p_relative
break
def small_convergence(self, memristor, target_res, plot=False):
"""
This function run the pulsed programming with a variable voltage to set the target_res for the memristor with a
really small increment.
Parameters
----------
memristor : Memristor
The memristor object
target_res : float
The target resistance
"""
step = 0.001
positive_voltage = voltage_set = 0.1
negative_voltage = voltage_reset = -0.1
if self.is_relative_tolerance:
res_max = target_res + self.tolerance * target_res / 100
res_min = target_res - self.tolerance * target_res / 100
else:
res_max = target_res + self.tolerance
res_min = target_res - self.tolerance
start_len_res = len(self.graph_resistance)
start_len_v = len(self.graph_voltages)
counter = 0
action = 'read'
flag_finish = False
counter_read = 0
while not flag_finish:
current_res = memristor.read()
if res_min <= current_res <= res_max:
counter_read += 1
if plot:
action = 'read'
self.graph_voltages.append([0.2, counter + start_len_v, action])
elif current_res < res_min:
if self.max_voltage != 0:
negative_voltage = -self.max_voltage if negative_voltage <= -self.max_voltage else negative_voltage
self.write_resistance(memristor, negative_voltage, 200e-9)
if plot:
action = 'reset'
self.graph_voltages.append([negative_voltage, counter + start_len_v, action])
negative_voltage -= step
positive_voltage = voltage_set
elif current_res > res_max:
if self.max_voltage != 0:
positive_voltage = self.max_voltage if positive_voltage >= self.max_voltage else positive_voltage
self.write_resistance(memristor, positive_voltage, 200e-9)
if plot:
action = 'set'
self.graph_voltages.append([positive_voltage, counter + start_len_v, action])
positive_voltage += step
negative_voltage = voltage_reset
if counter_read == self.number_of_reading:
flag_finish = not flag_finish
if counter >= self.max_pulse:
flag_finish = not flag_finish
print(f'Got max pulse {self.max_pulse}')
if plot:
self.graph_resistance.append([current_res, counter + start_len_res, action, flag_finish])
counter += 1
def log_convergence(self, memristor, target_res, plot=False):
"""
This function run the pulsed programming with a variable voltage to set the target_res for the memristor.
From : https://arxiv.org/abs/2103.09931
Parameters
----------
memristor : Memristor
The memristor object
target_res : float
The target resistance
"""
positive_voltage = voltage_set = 0.5
negative_voltage = voltage_reset = -0.5
# additional parameters
min_shift = 0.005
max_shift = 0.2
a = 0.1
if self.is_relative_tolerance:
res_max = target_res + self.tolerance * target_res / 100
res_min = target_res - self.tolerance * target_res / 100
else:
res_max = target_res + self.tolerance
res_min = target_res - self.tolerance
start_len_res = len(self.graph_resistance)
start_len_v = len(self.graph_voltages)
counter = 0
action = 'read'
flag_finish = False
counter_read = 0
r_shift = 1
current_res = memristor.read()
while not flag_finish:
if res_min < current_res < res_max:
counter_read += 1
if plot:
action = 'read'
self.graph_voltages.append([0.2, counter + start_len_v, action])
elif current_res > res_max:
if r_shift < min_shift * (memristor.r_off - memristor.r_on):
positive_voltage += a * np.log10(abs(target_res - current_res) / r_shift)
elif r_shift > max_shift * (memristor.r_off - memristor.r_on):
positive_voltage = voltage_set
if self.max_voltage != 0:
positive_voltage = self.max_voltage if positive_voltage >= self.max_voltage else positive_voltage
self.write_resistance(memristor, positive_voltage, 200e-9)
if plot:
action = 'set'
self.graph_voltages.append([positive_voltage, counter + start_len_v, action])
elif current_res < res_min:
if r_shift < min_shift * (memristor.r_off - memristor.r_on):
negative_voltage -= a * np.log10(abs((target_res - current_res) / r_shift))
elif r_shift > max_shift * (memristor.r_off - memristor.r_on):
negative_voltage = voltage_reset
if self.max_voltage != 0:
negative_voltage = -self.max_voltage if negative_voltage <= -self.max_voltage else negative_voltage
self.write_resistance(memristor, negative_voltage, 200e-9)
if plot:
action = 'reset'
self.graph_voltages.append([negative_voltage, counter + start_len_v, action])
if counter_read == self.number_of_reading:
flag_finish = not flag_finish
if counter >= self.max_pulse:
flag_finish = not flag_finish
print('Got max pulse')
if plot:
self.graph_resistance.append([current_res, counter + start_len_res, action, flag_finish])
counter += 1
previous_res = current_res
current_res = memristor.read()
r_shift = abs(current_res - previous_res) if abs(current_res - previous_res) != 0 else 1
def fabien_convergence(self, memristor, target_res, plot=False):
"""
This function run the pulsed programming with a variable voltage to set the target_res for the memristor.
From : https://iopscience.iop.org/article/10.1088/0957-4484/23/7/075201
Parameters
----------
memristor : Memristor
The memristor object
target_res : float
The target resistance
"""
step = 0.005
positive_voltage = voltage_set = 0.5
negative_voltage = voltage_reset = -0.5
if self.is_relative_tolerance:
res_max = target_res + self.tolerance * target_res / 100
res_min = target_res - self.tolerance * target_res / 100
else:
res_max = target_res + self.tolerance
res_min = target_res - self.tolerance
start_len_res = len(self.graph_resistance)
start_len_v = len(self.graph_voltages)
counter = 0
action = 'read'
flag_finish = False
counter_read = 0
while not flag_finish:
current_res = memristor.read()
if res_min <= current_res <= res_max:
counter_read += 1
if plot:
action = 'read'
self.graph_voltages.append([0.2, counter + start_len_v, action])
elif current_res < res_min:
if self.max_voltage != 0:
negative_voltage = -self.max_voltage if negative_voltage <= -self.max_voltage else negative_voltage
self.write_resistance(memristor, negative_voltage, 200e-9)
if plot:
action = 'reset'
self.graph_voltages.append([negative_voltage, counter + start_len_v, action])
negative_voltage -= step
positive_voltage = voltage_set
elif current_res > res_max:
if self.max_voltage != 0:
positive_voltage = self.max_voltage if positive_voltage >= self.max_voltage else positive_voltage
self.write_resistance(memristor, positive_voltage, 200e-9)
if plot:
action = 'set'
self.graph_voltages.append([positive_voltage, counter + start_len_v, action])
positive_voltage += step
negative_voltage = voltage_reset
if counter_read == self.number_of_reading:
flag_finish = not flag_finish
if counter >= self.max_pulse:
flag_finish = not flag_finish
print('Got max pulse')
if plot:
self.graph_resistance.append([current_res, counter + start_len_res, action, flag_finish])
# print(f'{self.graph_resistance[-1]}\t{self.graph_voltages[-1]}')
counter += 1
| [
"numpy.random.normal",
"numpy.sum",
"numpy.array",
"time.time"
] | [((2523, 2564), 'numpy.random.normal', 'np.random.normal', (['(0)', 'variance_write', '(1000)'], {}), '(0, variance_write, 1000)\n', (2539, 2564), True, 'import numpy as np\n'), ((5167, 5178), 'time.time', 'time.time', ([], {}), '()\n', (5176, 5178), False, 'import time\n'), ((8053, 8095), 'numpy.sum', 'np.sum', (['[(1 / i) for i in list_resistance]'], {}), '([(1 / i) for i in list_resistance])\n', (8059, 8095), True, 'import numpy as np\n'), ((2976, 3007), 'numpy.array', 'np.array', (['self.graph_resistance'], {}), '(self.graph_resistance)\n', (2984, 3007), True, 'import numpy as np\n'), ((3023, 3052), 'numpy.array', 'np.array', (['self.graph_voltages'], {}), '(self.graph_voltages)\n', (3031, 3052), True, 'import numpy as np\n'), ((5309, 5320), 'time.time', 'time.time', ([], {}), '()\n', (5318, 5320), False, 'import time\n'), ((5645, 5694), 'numpy.sum', 'np.sum', (['[(1 / res) for res in voltages_target[v]]'], {}), '([(1 / res) for res in voltages_target[v]])\n', (5651, 5694), True, 'import numpy as np\n'), ((6154, 6165), 'time.time', 'time.time', ([], {}), '()\n', (6163, 6165), False, 'import time\n'), ((5908, 5919), 'time.time', 'time.time', ([], {}), '()\n', (5917, 5919), False, 'import time\n'), ((5961, 5972), 'time.time', 'time.time', ([], {}), '()\n', (5970, 5972), False, 'import time\n')] |
import cv2
import numpy as np
from scipy.interpolate import UnivariateSpline
class Cool(object):
"""cool_filter ---
This class will apply cool filter to an image
by giving a sky blue effect to the input image.
"""
def __init__(self):
# create look-up tables for increasing and decreasing red and blue resp.
self.increaseChannel = self.LUT_8UC1([0, 64, 128, 192, 256],
[0, 70, 140, 210, 256])
self.decreaseChannel = self.LUT_8UC1([0, 64, 128, 192, 256],
[0, 30, 80, 120, 192])
def resize(self,image,window_height = 500):
aspect_ratio = float(image.shape[1])/float(image.shape[0])
window_width = window_height/aspect_ratio
image = cv2.resize(image, (int(window_height),int(window_width)))
return image
def render(self, img_rgb):
img_rgb = cv2.imread(img_rgb)
img_rgb = self.resize(img_rgb, 500)
#cv2.imshow("Original", img_rgb)
r,g,b = cv2.split(img_rgb)
r = cv2.LUT(r, self.increaseChannel).astype(np.uint8)
b = cv2.LUT(b, self.decreaseChannel).astype(np.uint8)
img_rgb = cv2.merge((r,g,b))
# saturation decreased
h,s,v = cv2.split(cv2.cvtColor(img_rgb, cv2.COLOR_RGB2HSV))
s = cv2.LUT(s, self.decreaseChannel).astype(np.uint8)
return cv2.cvtColor(cv2.merge((h,s,v)), cv2.COLOR_HSV2RGB)
def LUT_8UC1(self, x, y):
#Create look-up table using scipy spline interpolation function
spl = UnivariateSpline(x, y)
return spl(range(256))
def start(self, img_path):
tmp_canvas = Cool() #make a temporary object
file_name = img_path #File_name will come here
res = tmp_canvas.render(file_name)
cv2.imwrite("Cool_version.jpg", res)
cv2.imshow("Cool version", res)
cv2.waitKey(0)
cv2.destroyAllWindows()
print("Image saved as 'Cool_version.jpg'")
| [
"cv2.imwrite",
"cv2.merge",
"cv2.LUT",
"cv2.imshow",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.split",
"scipy.interpolate.UnivariateSpline",
"cv2.cvtColor",
"cv2.imread"
] | [((797, 816), 'cv2.imread', 'cv2.imread', (['img_rgb'], {}), '(img_rgb)\n', (807, 816), False, 'import cv2\n'), ((900, 918), 'cv2.split', 'cv2.split', (['img_rgb'], {}), '(img_rgb)\n', (909, 918), False, 'import cv2\n'), ((1043, 1063), 'cv2.merge', 'cv2.merge', (['(r, g, b)'], {}), '((r, g, b))\n', (1052, 1063), False, 'import cv2\n'), ((1371, 1393), 'scipy.interpolate.UnivariateSpline', 'UnivariateSpline', (['x', 'y'], {}), '(x, y)\n', (1387, 1393), False, 'from scipy.interpolate import UnivariateSpline\n'), ((1583, 1619), 'cv2.imwrite', 'cv2.imwrite', (['"""Cool_version.jpg"""', 'res'], {}), "('Cool_version.jpg', res)\n", (1594, 1619), False, 'import cv2\n'), ((1622, 1653), 'cv2.imshow', 'cv2.imshow', (['"""Cool version"""', 'res'], {}), "('Cool version', res)\n", (1632, 1653), False, 'import cv2\n'), ((1656, 1670), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1667, 1670), False, 'import cv2\n'), ((1673, 1696), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1694, 1696), False, 'import cv2\n'), ((1108, 1148), 'cv2.cvtColor', 'cv2.cvtColor', (['img_rgb', 'cv2.COLOR_RGB2HSV'], {}), '(img_rgb, cv2.COLOR_RGB2HSV)\n', (1120, 1148), False, 'import cv2\n'), ((1230, 1250), 'cv2.merge', 'cv2.merge', (['(h, s, v)'], {}), '((h, s, v))\n', (1239, 1250), False, 'import cv2\n'), ((925, 957), 'cv2.LUT', 'cv2.LUT', (['r', 'self.increaseChannel'], {}), '(r, self.increaseChannel)\n', (932, 957), False, 'import cv2\n'), ((981, 1013), 'cv2.LUT', 'cv2.LUT', (['b', 'self.decreaseChannel'], {}), '(b, self.decreaseChannel)\n', (988, 1013), False, 'import cv2\n'), ((1156, 1188), 'cv2.LUT', 'cv2.LUT', (['s', 'self.decreaseChannel'], {}), '(s, self.decreaseChannel)\n', (1163, 1188), False, 'import cv2\n')] |
import torch
from metrics.swd import sliced_wasserstein_distance
from evaluators.sample_evaluators.base_sample_evaluator import BaseSampleEvaluator
from noise_creator import NoiseCreator
class SWDSampleEvaluator(BaseSampleEvaluator):
def __init__(self, noise_creator: NoiseCreator):
self.__noise_creator = noise_creator
def evaluate(self, sample: torch.Tensor) -> torch.Tensor:
comparision_sample = self.__noise_creator.create(sample.size(0)).type_as(sample)
swd_penalty_value = sliced_wasserstein_distance(sample, comparision_sample, 50)
return swd_penalty_value
| [
"metrics.swd.sliced_wasserstein_distance"
] | [((528, 587), 'metrics.swd.sliced_wasserstein_distance', 'sliced_wasserstein_distance', (['sample', 'comparision_sample', '(50)'], {}), '(sample, comparision_sample, 50)\n', (555, 587), False, 'from metrics.swd import sliced_wasserstein_distance\n')] |
# Generated by Django 4.0.3 on 2022-04-02 17:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('utils', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='electricitybilling',
name='unit_price',
field=models.DecimalField(decimal_places=2, default=24.18, max_digits=9),
),
migrations.AlterField(
model_name='mpesaonline',
name='update_status',
field=models.CharField(choices=[('recieved', 'Recieved'), ('updated', 'Updated')], default='recieved', max_length=10),
),
migrations.AlterField(
model_name='waterbilling',
name='unit_price',
field=models.DecimalField(decimal_places=2, default=53.0, max_digits=9, verbose_name='Unit Price (KES)'),
),
]
| [
"django.db.models.DecimalField",
"django.db.models.CharField"
] | [((339, 405), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'default': '(24.18)', 'max_digits': '(9)'}), '(decimal_places=2, default=24.18, max_digits=9)\n', (358, 405), False, 'from django.db import migrations, models\n'), ((539, 654), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('recieved', 'Recieved'), ('updated', 'Updated')]", 'default': '"""recieved"""', 'max_length': '(10)'}), "(choices=[('recieved', 'Recieved'), ('updated', 'Updated')],\n default='recieved', max_length=10)\n", (555, 654), False, 'from django.db import migrations, models\n'), ((782, 884), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'default': '(53.0)', 'max_digits': '(9)', 'verbose_name': '"""Unit Price (KES)"""'}), "(decimal_places=2, default=53.0, max_digits=9,\n verbose_name='Unit Price (KES)')\n", (801, 884), False, 'from django.db import migrations, models\n')] |
"""
The roseguarden project
Copyright (C) 2018-2020 <NAME>,
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program. If not, see <http://www.gnu.org/licenses/>.
"""
__authors__ = ["<NAME>"]
__contact__ = "<EMAIL>"
__credits__ = []
__license__ = "GPLv3"
import copy
from core.common.objDict import ObjDict
class DataView(object):
"""Build up data-views
"""
disable = False
requireLogin = True
requirePermission = None # a permission is required in the meaning of one of the following
def __init__(self, name=None, uri=None):
self.description = 'UNKNOWN'
if name is None:
self.name = type(self).__name__
else:
self.name = name
if uri is None:
self.uri = self.name
else:
self.uri = uri
self.requireLogin = True
self.properties = []
self.metadata = []
self.dataAction = {}
self.dataUpdateHandler = {}
self.dataUpdates = []
self.dataSyncs = []
self.entrykey = None
self.entrytype = None
self.entryPropList = {}
self.metaDataList = {}
def createMeta(self):
return ObjDict(self.metaDataList.copy())
def addMailMeta(self, name, label="", group=None, description=""):
meta = {'name': name, 'label': label, 'type': 'email'}
self.metaDataList[name] = None
self.metadata.append(meta)
def addStringMeta(self, name, label="", group=None, description=""):
meta = {'name': name, 'label': label, 'type': 'string'}
self.metaDataList[name] = None
self.metadata.append(meta)
def addIntegerMeta(self, name, label="", group=None, description=""):
meta = {'name': name, 'label': label, 'type': 'integer'}
self.metaDataList[name] = None
self.metadata.append(meta)
def addDoubleMeta(self, name, label="", group=None, description=""):
meta = {'name': name, 'label': label, 'type': 'double'}
self.metaDataList[name] = None
self.metadata.append(meta)
def addBooleanMeta(self, name, label="", group=None, description=""):
meta = {'name': name, 'label': label, 'type': 'boolean'}
self.metaDataList[name] = None
self.metadata.append(meta)
def addTimeMeta(self, name, label="", group=None, description=""):
meta = {'name': name, 'label': label, 'type': 'time'}
self.metaDataList[name] = None
self.metadata.append(meta)
def addDateMeta(self, name, label="", group=None, description=""):
meta = {'name': name, 'label': label, 'type': 'date'}
self.metaDataList[name] = None
self.metadata.append(meta)
def addDatetimeMeta(self, name, label="", group=None, description=""):
meta = {'name': name, 'label': label, 'type': 'datetime'}
self.metaDataList[name] = None
self.metadata.append(meta)
def createEntry(self):
return ObjDict(self.entryPropList.copy())
def getProperties(self):
properties = []
for p in self.properties:
pn = copy.copy(p)
if pn['type'] == 'multiselect' or pn['type'] == 'select':
try:
if callable(p['selection']):
pn['selection'] = p['selection']()
else:
pn['selection'] = p['selection']
except Exception as e:
raise e
properties.append(pn)
return properties
def addMailProperty(self,
name,
label="",
group=None,
updateHandler=None,
isKey=False,
readOnly=True,
description=""):
prop = {'name': name, 'label': label, 'type': 'email'}
if isKey is True:
prop['isKey'] = True
if self.entrykey is not None:
raise KeyError("DataView '{}' already have a key ({}) and cant be overridden with {}".format(
self.name, self.entrykey, name))
self.entrykey = name
prop['readOnly'] = readOnly
prop['group'] = group
prop['description'] = description
self.dataUpdateHandler[str(name)] = updateHandler
self.properties.append(prop)
self.entryPropList[name] = None
def addStringProperty(self,
name,
label="",
group=None,
updateHandler=None,
isKey=False,
readOnly=True,
description="",
hide=False):
prop = {'name': name, 'label': label, 'type': 'string'}
if isKey is True:
prop['isKey'] = True
if self.entrykey is not None:
raise KeyError("DataView '{}' already have a key ({}) and cant be overridden with {}".format(
self.name, self.entrykey, name))
self.entrykey = name
prop['readOnly'] = readOnly
prop['group'] = group
prop['hide'] = hide
prop['description'] = description
self.dataUpdateHandler[str(name)] = updateHandler
self.properties.append(prop)
self.entryPropList[name] = None
def addDoubleProperty(self,
name,
label="",
group=None,
updateHandler=None,
isKey=False,
readOnly=True,
description=""):
prop = {'name': name, 'label': label, 'type': 'double'}
if isKey is True:
prop['isKey'] = True
if self.entrykey is not None:
raise KeyError("DataView '{}' already have a key ({}) and cant be overridden with {}".format(
self.name, self.entrykey, name))
self.entrykey = name
prop['readOnly'] = readOnly
prop['group'] = group
prop['description'] = description
self.dataUpdateHandler[str(name)] = updateHandler
self.properties.append(prop)
self.entryPropList[name] = None
def addIntegerProperty(self,
name,
label="",
group=None,
updateHandler=None,
isKey=False,
readOnly=True,
description="",
hide=False):
prop = {'name': name, 'label': label, 'type': 'integer'}
if isKey is True:
prop['isKey'] = True
if self.entrykey is not None:
raise KeyError("DataView '{}' already have a key ({}) and cant be overridden with {}".format(
self.name, self.entrykey, name))
self.entrykey = name
prop['hide'] = hide
prop['readOnly'] = readOnly
prop['group'] = group
prop['description'] = description
self.dataUpdateHandler[str(name)] = updateHandler
self.properties.append(prop)
self.entryPropList[name] = None
def addDatetimeProperty(self,
name,
label="",
group=None,
updateHandler=None,
isKey=False,
readOnly=True,
description=""):
prop = {'name': name, 'label': label, 'type': 'datetime'}
if isKey is True:
prop['isKey'] = True
if self.entrykey is not None:
raise KeyError("DataView '{}' already have a key ({}) and cant be overridden with {}".format(
self.name, self.entrykey, name))
self.entrykey = name
prop['readOnly'] = readOnly
prop['group'] = group
prop['description'] = description
self.dataUpdateHandler[str(name)] = updateHandler
self.properties.append(prop)
self.entryPropList[name] = None
def addTimeProperty(self,
name,
label="",
group=None,
updateHandler=None,
isKey=False,
readOnly=True,
description=""):
prop = {'name': name, 'label': label, 'type': 'time'}
if isKey is True:
prop['isKey'] = True
if self.entrykey is not None:
raise KeyError("DataView '{}' already have a key ({}) and cant be overridden with {}".format(
self.name, self.entrykey, name))
self.entrykey = name
prop['readOnly'] = readOnly
prop['group'] = group
prop['description'] = description
self.dataUpdateHandler[str(name)] = updateHandler
self.properties.append(prop)
self.entryPropList[name] = None
def addDateProperty(self,
name,
label="",
group=None,
updateHandler=None,
isKey=False,
readOnly=True,
description=""):
prop = {'name': name, 'label': label, 'type': 'date'}
if isKey is True:
prop['isKey'] = True
if self.entrykey is not None:
raise KeyError("DataView '{}' already have a key ({}) and cant be overridden with {}".format(
self.name, self.entrykey, name))
self.entrykey = name
prop['readOnly'] = readOnly
prop['group'] = group
prop['description'] = description
self.dataUpdateHandler[str(name)] = updateHandler
self.properties.append(prop)
self.entryPropList[name] = None
def addBooleanProperty(self,
name,
label="",
group=None,
updateHandler=None,
isKey=False,
readOnly=True,
description="",
hide=False):
prop = {'name': name, 'label': label, 'type': 'boolean'}
if isKey is True:
prop['isKey'] = True
if self.entrykey is not None:
raise KeyError("DataView '{}' already have a key ({}) and cant be overridden with {}".format(
self.name, self.entrykey, name))
self.entrykey = name
prop['readOnly'] = readOnly
prop['group'] = group
prop['hide'] = hide
prop['description'] = description
self.dataUpdateHandler[str(name)] = updateHandler
self.properties.append(prop)
self.entryPropList[name] = None
def addSelectProperty(self,
name,
selectables,
label="",
group=None,
updateHandler=None,
isKey=False,
readOnly=True,
description=""):
prop = {'name': name, 'label': label, 'type': 'select'}
if isKey is True:
prop['isKey'] = True
if self.entrykey is not None:
raise KeyError("DataView '{}' already have a key ({}) and cant be overridden with {}".format(
self.name, self.entrykey, name))
self.entrykey = name
prop['readOnly'] = readOnly
prop['selection'] = selectables
prop['group'] = group
prop['description'] = description
self.dataUpdateHandler[str(name)] = updateHandler
self.properties.append(prop)
self.entryPropList[name] = None
def addMultiSelectProperty(self,
name,
selectables,
label="",
group=None,
updateHandler=None,
isKey=False,
readOnly=True,
description=""):
prop = {'name': name, 'label': label, 'type': 'multiselect'}
if isKey is True:
prop['isKey'] = True
if self.entrykey is not None:
raise KeyError("DataView '{}' already have a key ({}) and cant be overridden with {}".format(
self.name, self.entrykey, name))
self.entrykey = name
prop['readOnly'] = readOnly
prop['selection'] = selectables
prop['group'] = group
prop['description'] = description
self.dataUpdateHandler[str(name)] = updateHandler
self.properties.append(prop)
self.entryPropList[name] = None
def addActionProperty(self,
name,
label,
action,
icon,
actionHandler=None,
isKey=False,
readOnly=True,
color='red',
description=""):
prop = {'name': name, 'label': label, 'type': 'action'}
prop['isKey'] = False
prop['icon'] = icon
prop['color'] = color
prop['action'] = action
prop['description'] = description
self.dataAction[str(action)] = actionHandler
self.properties.append(prop)
self.entryPropList[name] = None
def addRemoveEntryOption(self, name, label):
prop = {'name': name, 'label': label, 'type': 'remove'}
prop['isKey'] = False
prop['icon'] = 'delete'
self.properties.append(prop)
def emitUpdate(self, key, property, value):
self.dataUpdates.append({'key': key, 'property': property, 'value': value, 'view': self.uri})
def emitSyncUpdate(self, key, view=None, workspace=None):
if view is None:
view = self.uri
self.dataSyncs.append({'type': 'update', 'key': key, 'view': view, 'workspace': workspace})
def emitSyncRemove(self, key, view=None, workspace=None):
if view is None:
view = self.uri
self.dataSyncs.append({'type': 'remove', 'key': key, 'view': view, 'workspace': workspace})
def emitSyncCreate(self, key, view=None, workspace=None):
if view is None:
view = self.name
self.dataSyncs.append({'type': 'create', 'key': key, 'view': view, 'workspace': workspace})
# Handler for getting the freshly build view
def getViewHandler(self, user, workspace, query=None):
raise NotImplementedError
# Handler for getting the views meta-data
def getViewMetaHandler(self, user, workspace):
return {}
# Handler for a request to create a new view entry
def createViewEntryHandler(self, user, workspace):
raise NotImplementedError
# Handler for a request to remove a view entry
def removeViewEntryHandler(self, user, workspace, key):
raise NotImplementedError
# Handler for a request to update a single view entry
def updateViewEntryHandler(self, user, workspace, key, entry):
raise NotImplementedError
# Handler for view actions
def executeViewActionHandler(self, user, workspace, action):
try:
return self.dataAction[action.viewAction](user, workspace, action, action.entry[self.entrykey])
except Exception:
return self.dataAction[action.viewAction](user, workspace, action.entry[self.entrykey])
def defineProperties(self):
pass
def defineMetadata(self):
pass
| [
"copy.copy"
] | [((3598, 3610), 'copy.copy', 'copy.copy', (['p'], {}), '(p)\n', (3607, 3610), False, 'import copy\n')] |
# Copyright (c) OpenMMLab. All rights reserved.
import argparse
import cv2
import mmcv
import numpy as np
import torch
from torchvision.transforms import functional as F
from mmdet.apis import init_detector
from mmdet.datasets.pipelines import Compose
try:
import ffmpegcv
except ImportError:
raise ImportError(
'Please install ffmpegcv with:\n\n pip install ffmpegcv')
def parse_args():
parser = argparse.ArgumentParser(
description='MMDetection video demo with GPU acceleration')
parser.add_argument('video', help='Video file')
parser.add_argument('config', help='Config file')
parser.add_argument('checkpoint', help='Checkpoint file')
parser.add_argument(
'--device', default='cuda:0', help='Device used for inference')
parser.add_argument(
'--score-thr', type=float, default=0.3, help='Bbox score threshold')
parser.add_argument('--out', type=str, help='Output video file')
parser.add_argument('--show', action='store_true', help='Show video')
parser.add_argument(
'--nvdecode', action='store_true', help='Use NVIDIA decoder')
parser.add_argument(
'--wait-time',
type=float,
default=1,
help='The interval of show (s), 0 is block')
args = parser.parse_args()
return args
def prefetch_img_metas(cfg, ori_wh):
w, h = ori_wh
cfg.data.test.pipeline[0].type = 'LoadImageFromWebcam'
test_pipeline = Compose(cfg.data.test.pipeline)
data = {'img': np.zeros((h, w, 3), dtype=np.uint8)}
data = test_pipeline(data)
img_metas = data['img_metas'][0].data
return img_metas
def process_img(frame_resize, img_metas, device):
assert frame_resize.shape == img_metas['pad_shape']
frame_cuda = torch.from_numpy(frame_resize).to(device).float()
frame_cuda = frame_cuda.permute(2, 0, 1) # HWC to CHW
mean = torch.from_numpy(img_metas['img_norm_cfg']['mean']).to(device)
std = torch.from_numpy(img_metas['img_norm_cfg']['std']).to(device)
frame_cuda = F.normalize(frame_cuda, mean=mean, std=std, inplace=True)
frame_cuda = frame_cuda[None, :, :, :] # NCHW
data = {'img': [frame_cuda], 'img_metas': [[img_metas]]}
return data
def main():
args = parse_args()
assert args.out or args.show, \
('Please specify at least one operation (save/show the '
'video) with the argument "--out" or "--show"')
model = init_detector(args.config, args.checkpoint, device=args.device)
if args.nvdecode:
VideoCapture = ffmpegcv.VideoCaptureNV
else:
VideoCapture = ffmpegcv.VideoCapture
video_origin = VideoCapture(args.video)
img_metas = prefetch_img_metas(model.cfg,
(video_origin.width, video_origin.height))
resize_wh = img_metas['pad_shape'][1::-1]
video_resize = VideoCapture(
args.video,
resize=resize_wh,
resize_keepratio=True,
resize_keepratioalign='topleft',
pix_fmt='rgb24')
video_writer = None
if args.out:
video_writer = ffmpegcv.VideoWriter(args.out, fps=video_origin.fps)
with torch.no_grad():
for frame_resize, frame_origin in zip(
mmcv.track_iter_progress(video_resize), video_origin):
data = process_img(frame_resize, img_metas, args.device)
result = model(return_loss=False, rescale=True, **data)[0]
frame_mask = model.show_result(
frame_origin, result, score_thr=args.score_thr)
if args.show:
cv2.namedWindow('video', 0)
mmcv.imshow(frame_mask, 'video', args.wait_time)
if args.out:
video_writer.write(frame_mask)
if video_writer:
video_writer.release()
video_origin.release()
video_resize.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
main()
| [
"mmcv.track_iter_progress",
"argparse.ArgumentParser",
"mmdet.apis.init_detector",
"ffmpegcv.VideoWriter",
"torch.from_numpy",
"mmdet.datasets.pipelines.Compose",
"mmcv.imshow",
"numpy.zeros",
"cv2.destroyAllWindows",
"torch.no_grad",
"torchvision.transforms.functional.normalize",
"cv2.namedWi... | [((425, 513), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""MMDetection video demo with GPU acceleration"""'}), "(description=\n 'MMDetection video demo with GPU acceleration')\n", (448, 513), False, 'import argparse\n'), ((1446, 1477), 'mmdet.datasets.pipelines.Compose', 'Compose', (['cfg.data.test.pipeline'], {}), '(cfg.data.test.pipeline)\n', (1453, 1477), False, 'from mmdet.datasets.pipelines import Compose\n'), ((2025, 2082), 'torchvision.transforms.functional.normalize', 'F.normalize', (['frame_cuda'], {'mean': 'mean', 'std': 'std', 'inplace': '(True)'}), '(frame_cuda, mean=mean, std=std, inplace=True)\n', (2036, 2082), True, 'from torchvision.transforms import functional as F\n'), ((2420, 2483), 'mmdet.apis.init_detector', 'init_detector', (['args.config', 'args.checkpoint'], {'device': 'args.device'}), '(args.config, args.checkpoint, device=args.device)\n', (2433, 2483), False, 'from mmdet.apis import init_detector\n'), ((3828, 3851), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3849, 3851), False, 'import cv2\n'), ((1497, 1532), 'numpy.zeros', 'np.zeros', (['(h, w, 3)'], {'dtype': 'np.uint8'}), '((h, w, 3), dtype=np.uint8)\n', (1505, 1532), True, 'import numpy as np\n'), ((3063, 3115), 'ffmpegcv.VideoWriter', 'ffmpegcv.VideoWriter', (['args.out'], {'fps': 'video_origin.fps'}), '(args.out, fps=video_origin.fps)\n', (3083, 3115), False, 'import ffmpegcv\n'), ((3126, 3141), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3139, 3141), False, 'import torch\n'), ((1873, 1924), 'torch.from_numpy', 'torch.from_numpy', (["img_metas['img_norm_cfg']['mean']"], {}), "(img_metas['img_norm_cfg']['mean'])\n", (1889, 1924), False, 'import torch\n'), ((1946, 1996), 'torch.from_numpy', 'torch.from_numpy', (["img_metas['img_norm_cfg']['std']"], {}), "(img_metas['img_norm_cfg']['std'])\n", (1962, 1996), False, 'import torch\n'), ((3206, 3244), 'mmcv.track_iter_progress', 'mmcv.track_iter_progress', (['video_resize'], {}), '(video_resize)\n', (3230, 3244), False, 'import mmcv\n'), ((3551, 3578), 'cv2.namedWindow', 'cv2.namedWindow', (['"""video"""', '(0)'], {}), "('video', 0)\n", (3566, 3578), False, 'import cv2\n'), ((3595, 3643), 'mmcv.imshow', 'mmcv.imshow', (['frame_mask', '"""video"""', 'args.wait_time'], {}), "(frame_mask, 'video', args.wait_time)\n", (3606, 3643), False, 'import mmcv\n'), ((1753, 1783), 'torch.from_numpy', 'torch.from_numpy', (['frame_resize'], {}), '(frame_resize)\n', (1769, 1783), False, 'import torch\n')] |
import time
from hashlib import sha1
class InfArray():
def __init__(self):
self.left = [0]*16
self.right = [0]*16
def getarr(self, ind):
arr = self.right
if ind < 0: arr, ind = self.left, -ind-1
if ind >= len(arr): arr.extend([0]* (key - len(arr) + 10))
return arr, ind
def __getitem__(self, key):
arr, key = self.getarr(key)
return arr[key]
def __setitem__(self, key, item):
arr, key = self.getarr(key)
arr[key] = item
def __str__(self):
return ' '.join(map(str, reversed(self.left))) + ' ' + ' '.join(map(str, self.right))
def interpreter(prog: str, inp: str):
arr = InfArray()
mptr = 0
pptr = 0
iptr = 0
start = time.time()
while pptr < len(prog) and time.time() - start <= 10*60:
c = ord(prog[pptr]) % 6
if c == 0: mptr -= 1
elif c == 1: mptr += 1
elif c == 2: arr[mptr] += 1
elif c == 3: arr[mptr] -= 1
elif c == 4:
arr[mptr] = ord(inp[iptr])
iptr = (iptr + 1) % len(inp)
elif arr[mptr] == 0: pptr = pptr + 2
else:
if len(prog) - pptr <= 2: unhashed = prog[pptr+1:]
else: unhashed = prog[pptr+1:pptr+3]
pptr = int(sha1(unhashed.encode('utf8')).hexdigest(), 16) % len(prog) - 1
pptr = pptr + 1
return pptr < len(prog), str(arr)
def test_all(progs, inp):
for prog in progs:
nohalt, tape = interpreter(prog, inp)
printf('f {nohalt} {prog} {tape}')
| [
"time.time"
] | [((749, 760), 'time.time', 'time.time', ([], {}), '()\n', (758, 760), False, 'import time\n'), ((792, 803), 'time.time', 'time.time', ([], {}), '()\n', (801, 803), False, 'import time\n')] |
"""Various sample data"""
from binascii import unhexlify
magic = 0xE9BEB4D9
# These keys are from addresses test script
sample_pubsigningkey = unhexlify(
'<KEY>'
'<KEY>')
sample_pubencryptionkey = unhexlify(
'<KEY>'
'e7b9b97792327851a562752e4b79475d1f51f5a71352482b241227f45ed36a9')
sample_privsigningkey = \
b'<KEY>'
sample_privencryptionkey = \
b'<KEY>'
sample_ripe = b'003cd097eb7f35c87b5dc8b4538c22cb55312a9f'
# stream: 1, version: 2
sample_address = 'BM-onkVu1KKL2UaUss5Upg9vXmqd3esTmV79'
sample_factor = 66858749573256452658262553961707680376751171096153613379801854825275240965733
# G * sample_factor
sample_point = (
33567437183004486938355437500683826356288335339807546987348409590129959362313,
94730058721143827257669456336351159718085716196507891067256111928318063085006
)
sample_seed = 'TIGER, tiger, burning bright. In the forests of the night'
# Deterministic addresses with stream 1 and versions 3, 4
sample_deterministic_ripe = b'00cfb69416ae76f68a81c459de4e13460c7d17eb'
sample_deterministic_addr3 = '<KEY>'
sample_deterministic_addr4 = '<KEY>'
sample_daddr3_512 = 18875720106589866286514488037355423395410802084648916523381
sample_daddr4_512 = 25152821841976547050350277460563089811513157529113201589004
sample_statusbar_msg = "new status bar message"
sample_inbox_msg_ids = ['27e644765a3e4b2e973ee7ccf958ea20', '51fc5531-3989-4d69-bbb5-68d64b756f5b',
'2c975c515f8b414db5eea60ba57ba455', 'bc1f2d8a-681c-4cc0-9a12-6067c7e1ac24']
# second address in sample_test_subscription_address is for the announcement broadcast
sample_test_subscription_address = ['BM-2cWQLCBGorT9pUGkYSuGGVr9LzE4mRnQaq', 'BM-GtovgYdgs7qXPkoYaRgrLFuFKz1SFpsw']
sample_subscription_name = 'test sub'
| [
"binascii.unhexlify"
] | [((147, 170), 'binascii.unhexlify', 'unhexlify', (['"""<KEY><KEY>"""'], {}), "('<KEY><KEY>')\n", (156, 170), False, 'from binascii import unhexlify\n'), ((209, 295), 'binascii.unhexlify', 'unhexlify', (['"""<KEY>e7b9b97792327851a562752e4b79475d1f51f5a71352482b241227f45ed36a9"""'], {}), "(\n '<KEY>e7b9b97792327851a562752e4b79475d1f51f5a71352482b241227f45ed36a9')\n", (218, 295), False, 'from binascii import unhexlify\n')] |
from urllib.parse import parse_qs
from anticaptchaofficial.hcaptchaproxyless import hCaptchaProxyless
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from court_scraper.base.selenium_helpers import SeleniumHelpers
from court_scraper.utils import dates_for_range
from .search_results import SearchResultsPage
from ..search_api import SearchApi
class SearchLocators:
LAST_NAME = (By.NAME, 'lastName')
FIRST_NAME = (By.NAME, 'firstName')
MIDDLE_NAME = (By.NAME, 'middleName')
BIRTH_DATE = (By.NAME, 'dateOfBirth')
BUSINESS_NAME = (By.NAME, 'businessName')
COUNTY = (By.XPATH, '//*[@id="react-select-2--value"]/div[2]/input')
COUNTY_DROPDOWN_ARROW = (By.CSS_SELECTOR, '.Select-arrow-zone')
CASE_NUMBER = (By.NAME, 'caseNo')
CASE_NUMBER_RANGE_YEAR = (By.XPATH, '//*[@id="react-select-3--value"]/div[2]/input')
CASE_NUMBER_RANGE_TYPE = (By.XPATH, '//*[@id="react-select-4--value"]/div[2]/input')
CASE_NUMBER_RANGE_BEGIN = (By.NAME, 'caseNoRange.start')
CASE_NUMBER_RANGE_END = (By.NAME, 'caseNoRange.end')
CASE_RESULTS_TABLE = (By.CSS_SELECTOR, 'table#caseSearchResults')
DATE_CASE_TYPE = (By.XPATH, '//*[@id="react-select-5--value"]/div[2]/input')
DATE_CASE_STATUS = (By.XPATH, '//*[@id="react-select-6--value"]/div[2]/input')
FILING_DATE_RANGE_BEGIN = (By.NAME, 'filingDate.start')
FILING_DATE_RANGE_END = (By.NAME, 'filingDate.end')
DISPOSITION_DATE_RANGE_BEGIN = (By.NAME, 'dispositionDate.start')
DISPOSITION_DATE_RANGE_END = (By.NAME, 'dispositionDate.end')
STATE_BAR_ID = (By.NAME, 'attyNo')
CITATION_NUMBER = (By.NAME, 'citnNo')
DA_CASE_NUMBER = (By.NAME, 'daCaseNo')
ISSUING_AGENCY = (By.XPATH, '//*[@id="react-select-8--value"]/div[2]/input')
OFFENSE_DATE_BEGIN = (By.NAME, 'offenseDate.start')
OFFENSE_DATE_END = (By.NAME, 'offenseDate.end')
SEARCH_BUTTON = (By.NAME, 'search')
RESET_BUTTON = (By.XPATH, '//*[@id="home-container"]/main/div/form/div[11]/div/button[2]')
class SearchPage(SeleniumHelpers):
locators = SearchLocators
def __init__(self, driver, captcha_api_key=None):
self.url = "https://wcca.wicourts.gov/advanced.html"
self.captcha_api_key = captcha_api_key
self.driver = driver
def search_by_case_number(self, county, case_numbers=[]):
payload = []
search_api = SearchApi(county)
for idx, case_num in enumerate(case_numbers):
self.go_to() # advanced search page
self._execute_case_search(county, case_num)
# Solve and apply the captcha on the first search.
# (using it on subsequent case detail API calls causes errors)
kwargs = {
'cookies': self.cookies_as_dict(),
}
if idx == 0:
kwargs['captcha_solution'] = self.solve_captcha()
case_info = search_api.case_details(case_num, **kwargs)
payload.append(case_info)
return payload
def search_by_date(self, county, start_date, end_date, case_types=[]):
date_format = "%m-%d-%Y"
dates = dates_for_range(start_date, end_date, output_format=date_format)
payload = []
for idx, day in enumerate(dates):
self.go_to() # advanced search page
self._execute_date_search(county, day, day, case_types)
if not self.search_has_results(self.driver.current_url):
continue
# Solve the captcha on the first search,
# save the solution for re-use, and apply the solution
# on the first case of the first day's search results
# (using it on subsequent case detail API calls causes errors)
result_kwargs = {
'use_captcha_solution': False
}
if idx == 0:
captcha_solution = self.solve_captcha()
result_kwargs['use_captcha_solution'] = True
# Searches that yield a single result redirect automatically
# to case detail page rather than search results listing page.
# For these cases, immediately execute the case detail query
if 'caseDetail' in self.driver.current_url:
case_info = self._get_case_details(
county,
self.driver.current_url,
captcha_solution,
result_kwargs['use_captcha_solution']
)
results = [case_info]
else:
results_page = SearchResultsPage(self.driver, county, self.captcha_api_key, captcha_solution)
results = results_page.results.get(**result_kwargs)
# TODO: if results_page.results_found():
# results_page.display_max_results()
payload.extend(results)
return payload
def _get_case_details(self, county, url, captcha_solution, use_captcha_solution):
# caseNo=2021SC000082&countyNo=2
query_str = url.split('?')[-1]
param_strs = query_str.split('&')
params = {}
for param_pair in param_strs:
key, val = param_pair.split('=')
params[key] = val
case_num = params['caseNo']
search_api = SearchApi(county)
kwargs = {
'cookies': self.cookies_as_dict(),
'county_num': int(params['countyNo'])
}
if use_captcha_solution:
kwargs['captcha_solution'] = captcha_solution
return search_api.case_details(case_num, **kwargs)
def _execute_case_search(self, county, case_number):
self.wait_until_visible(self.locators.COUNTY)
clean_county = self._county_titlecase(county)
self.fill_form_field(self.locators.COUNTY, clean_county)
self.fill_form_field(self.locators.CASE_NUMBER, case_number)
self.click(self.locators.SEARCH_BUTTON)
def _execute_date_search(self, county, start_date, end_date, case_types=[]):
# Wait until the county dropdown-menu arrow is clickable before filling the form field,
# in order to avoid overwriting of the field value by the "Statewide" option default
county_label_obj = self.driver.find_element_by_xpath("//label[contains(text(), 'County')]")
self.wait_until_clickable(self.locators.COUNTY_DROPDOWN_ARROW, driver=county_label_obj)
clean_county = self._county_titlecase(county)
self.fill_form_field(self.locators.COUNTY, clean_county)
self.fill_form_field(self.locators.FILING_DATE_RANGE_BEGIN, start_date)
self.fill_form_field(self.locators.FILING_DATE_RANGE_END, end_date)
if case_types:
self._select_case_types(case_types)
self.click(self.locators.SEARCH_BUTTON)
def _county_titlecase(self, county):
return county.replace('_', ' ').title()
def _select_case_types(self, case_types):
# TODO: Refactor to use locators
for case_type in case_types:
# Locate the case type menu by name
case_type_label_obj = self.driver.find_element_by_xpath("//label[contains(text(), 'Case types')]")
# Expand the Case types menu
select_arrow = case_type_label_obj.find_element_by_css_selector('.Select-arrow-zone')
select_arrow.click()
# Find and click the selection menu option for the case type
option_divs = (
case_type_label_obj
.find_element_by_css_selector('.Select-menu')
.find_elements_by_tag_name('div')
)
option = [opt for opt in option_divs if opt.text.endswith(f'({case_type})')][0]
option.click()
def solve_captcha(self):
# Solve the captcha
iframe = None
for frame in self.driver.find_elements_by_tag_name('iframe'):
if 'challenge' in frame.get_attribute('src'):
iframe = frame
break
iframe_url = iframe.get_attribute('src')
query_str = iframe_url.split('#')[-1]
site_key = parse_qs(query_str)['sitekey'][0]
solver = hCaptchaProxyless()
solver.set_verbose(1)
solver.set_key(self.captcha_api_key)
solver.set_website_url(self.driver.current_url)
solver.set_website_key(site_key)
g_response = solver.solve_and_return_solution()
return g_response
def search_has_results(self, current_url):
WebDriverWait(self.driver, 10).until(
EC.url_changes(current_url)
)
# Return True if it's a single-result redirect to case detail page
if 'caseDetail' in self.driver.current_url:
return True
WebDriverWait(self.driver, 10).until(
EC.visibility_of_element_located(
self.locators.CASE_RESULTS_TABLE
)
)
if 'No records found' in self.driver.page_source:
return False
else:
# Otherwise, assume there are results
return True
| [
"selenium.webdriver.support.ui.WebDriverWait",
"selenium.webdriver.support.expected_conditions.url_changes",
"urllib.parse.parse_qs",
"court_scraper.utils.dates_for_range",
"anticaptchaofficial.hcaptchaproxyless.hCaptchaProxyless",
"selenium.webdriver.support.expected_conditions.visibility_of_element_loca... | [((3216, 3280), 'court_scraper.utils.dates_for_range', 'dates_for_range', (['start_date', 'end_date'], {'output_format': 'date_format'}), '(start_date, end_date, output_format=date_format)\n', (3231, 3280), False, 'from court_scraper.utils import dates_for_range\n'), ((8218, 8237), 'anticaptchaofficial.hcaptchaproxyless.hCaptchaProxyless', 'hCaptchaProxyless', ([], {}), '()\n', (8235, 8237), False, 'from anticaptchaofficial.hcaptchaproxyless import hCaptchaProxyless\n'), ((8598, 8625), 'selenium.webdriver.support.expected_conditions.url_changes', 'EC.url_changes', (['current_url'], {}), '(current_url)\n', (8612, 8625), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((8845, 8911), 'selenium.webdriver.support.expected_conditions.visibility_of_element_located', 'EC.visibility_of_element_located', (['self.locators.CASE_RESULTS_TABLE'], {}), '(self.locators.CASE_RESULTS_TABLE)\n', (8877, 8911), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((8167, 8186), 'urllib.parse.parse_qs', 'parse_qs', (['query_str'], {}), '(query_str)\n', (8175, 8186), False, 'from urllib.parse import parse_qs\n'), ((8548, 8578), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['self.driver', '(10)'], {}), '(self.driver, 10)\n', (8561, 8578), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((8795, 8825), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['self.driver', '(10)'], {}), '(self.driver, 10)\n', (8808, 8825), False, 'from selenium.webdriver.support.ui import WebDriverWait\n')] |
# 2022 eCTF
# Bootloader Interface Emulator
# <NAME>
#
# (c) 2022 The MITRE Corporation
#
# This source file is part of an example system for MITRE's 2022 Embedded System
# CTF (eCTF). This code is being provided only for educational purposes for the
# 2022 MITRE eCTF competition, and may not meet MITRE standards for quality.
# Use this code at your own risk!
#
# DO NOT CHANGE THIS FILE
import argparse
import os
import logging
import socket
import select
from pathlib import Path
from typing import List, Optional, TypeVar
Message = TypeVar("Message")
LOG_FORMAT = "%(asctime)s:%(name)-s%(levelname)-8s %(message)s"
class Sock:
def __init__(
self,
sock_path: str,
q_len=1,
log_level=logging.INFO,
mode: int = None,
network=False,
):
self.sock_path = sock_path
self.network = network
self.buf = b""
# set up socket
if self.network:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.bind(("0.0.0.0", int(sock_path)))
else:
# Make sure the socket does not already exist
try:
os.unlink(sock_path)
except OSError:
if os.path.exists(sock_path):
raise
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.bind(sock_path)
self.sock.listen(q_len)
self.csock = None
# change permissions if necessary
if mode and not self.network:
os.chmod(sock_path, mode)
# set up logger
fhandler = logging.FileHandler("bl_interface.log")
fhandler.setLevel(log_level)
fhandler.setFormatter(logging.Formatter(LOG_FORMAT))
shandler = logging.StreamHandler()
shandler.setLevel(log_level)
shandler.setFormatter(logging.Formatter(LOG_FORMAT))
self.logger = logging.getLogger(f"{sock_path}_log")
self.logger.addHandler(fhandler)
self.logger.addHandler(shandler)
self.logger.setLevel(log_level)
@staticmethod
def sock_ready(sock: socket.SocketType) -> bool:
ready, _, _ = select.select([sock], [], [], 0)
return bool(ready)
def active(self) -> bool:
# try to accept new client
if not self.csock:
if self.sock_ready(self.sock):
self.logger.info(f"Connection opened on {self.sock_path}")
self.csock, _ = self.sock.accept()
return bool(self.csock)
def deserialize(self) -> bytes:
buf = self.buf
self.buf = b""
return buf
def read_msg(self) -> Optional[Message]:
if not self.active():
return None
try:
if self.sock_ready(self.csock):
data = self.csock.recv(4096)
# connection closed
if not data:
self.close()
return None
self.buf += data
return self.deserialize()
except (ConnectionResetError, BrokenPipeError):
# cleanly handle forced closed connection
self.close()
return None
def read_all_msgs(self) -> List[Message]:
msgs = []
msg = self.read_msg()
while msg:
msgs.append(msg)
msg = self.read_msg()
return msgs
@staticmethod
def serialize(msg: bytes) -> bytes:
return msg
def send_msg(self, msg: Message) -> bool:
if not self.active():
return False
try:
self.csock.sendall(self.serialize(msg))
return True
except (ConnectionResetError, BrokenPipeError):
# cleanly handle forced closed connection
self.close()
return False
def close(self):
self.logger.warning(f"Conection closed on {self.sock_path}")
self.csock = None
self.buf = b""
def poll_data_socks(device_sock: Sock, host_sock: Sock):
if device_sock.active():
msg = device_sock.read_msg()
# send message to host
if host_sock.active():
if msg is not None:
host_sock.send_msg(msg)
if host_sock.active():
msg = host_sock.read_msg()
# send message to device
if device_sock.active():
if msg is not None:
device_sock.send_msg(msg)
def poll_restart_socks(device_sock: Sock, host_sock: Sock):
# First check that device opened a restart port
if device_sock.active():
# Send host restart commands to device
if host_sock.active():
msg = host_sock.read_msg()
if msg is not None:
device_sock.send_msg(msg)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--data-bl-sock",
type=Path,
required=True,
help="Path to device-side data socket (will be created)",
)
parser.add_argument(
"--data-host-sock",
type=int,
required=True,
help="Port for host-side data socket (must be available)",
)
parser.add_argument(
"--restart-bl-sock",
type=Path,
required=True,
help="Path to device-side data socket (will be created)",
)
parser.add_argument(
"--restart-host-sock",
type=Path,
required=True,
help="Path to device-side data socket (will be created)",
)
return parser.parse_args()
def main():
args = parse_args()
# open all sockets
data_bl = Sock(str(args.data_bl_sock), mode=0o777)
data_host = Sock(str(args.data_host_sock), mode=0o777, network=True)
restart_bl = Sock(str(args.restart_bl_sock), mode=0o777)
restart_host = Sock(str(args.restart_host_sock), mode=0o777)
# poll sockets forever
while True:
poll_data_socks(data_bl, data_host)
poll_restart_socks(restart_bl, restart_host)
if __name__ == "__main__":
main()
| [
"logging.getLogger",
"os.path.exists",
"select.select",
"logging.StreamHandler",
"argparse.ArgumentParser",
"socket.socket",
"logging.Formatter",
"os.chmod",
"logging.FileHandler",
"os.unlink",
"typing.TypeVar"
] | [((539, 557), 'typing.TypeVar', 'TypeVar', (['"""Message"""'], {}), "('Message')\n", (546, 557), False, 'from typing import List, Optional, TypeVar\n'), ((4879, 4904), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4902, 4904), False, 'import argparse\n'), ((1702, 1741), 'logging.FileHandler', 'logging.FileHandler', (['"""bl_interface.log"""'], {}), "('bl_interface.log')\n", (1721, 1741), False, 'import logging\n'), ((1860, 1883), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1881, 1883), False, 'import logging\n'), ((2005, 2042), 'logging.getLogger', 'logging.getLogger', (['f"""{sock_path}_log"""'], {}), "(f'{sock_path}_log')\n", (2022, 2042), False, 'import logging\n'), ((2259, 2291), 'select.select', 'select.select', (['[sock]', '[]', '[]', '(0)'], {}), '([sock], [], [], 0)\n', (2272, 2291), False, 'import select\n'), ((960, 1009), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (973, 1009), False, 'import socket\n'), ((1392, 1441), 'socket.socket', 'socket.socket', (['socket.AF_UNIX', 'socket.SOCK_STREAM'], {}), '(socket.AF_UNIX, socket.SOCK_STREAM)\n', (1405, 1441), False, 'import socket\n'), ((1632, 1657), 'os.chmod', 'os.chmod', (['sock_path', 'mode'], {}), '(sock_path, mode)\n', (1640, 1657), False, 'import os\n'), ((1809, 1838), 'logging.Formatter', 'logging.Formatter', (['LOG_FORMAT'], {}), '(LOG_FORMAT)\n', (1826, 1838), False, 'import logging\n'), ((1951, 1980), 'logging.Formatter', 'logging.Formatter', (['LOG_FORMAT'], {}), '(LOG_FORMAT)\n', (1968, 1980), False, 'import logging\n'), ((1247, 1267), 'os.unlink', 'os.unlink', (['sock_path'], {}), '(sock_path)\n', (1256, 1267), False, 'import os\n'), ((1315, 1340), 'os.path.exists', 'os.path.exists', (['sock_path'], {}), '(sock_path)\n', (1329, 1340), False, 'import os\n')] |
# -*- coding: utf-8 -*-
"""
This is a script to demo how to open up a macro enabled excel file, write a pandas dataframe to it
and save it as a new file name.
Created on Mon Mar 1 17:47:41 2021
@author: <NAME>
"""
import os
import xlwings as xw
import pandas as pd
os.chdir(r"C:\Users\<NAME>\Desktop\Roisin")
wb = xw.Book("CAO_template.xlsm")
worksheet = wb.sheets['EPOS_Closing_Stock_Detailed']
'Create dataframe'
cars = {'Brand': ['Honda Civic','Toyota Corolla','Ford Focus','Audi A4'],
'Price': [22000,25000,27000,35000]
}
cars_df = pd.DataFrame(cars, columns = ['Brand', 'Price'])
'Write a dataframe to excel'
worksheet.range('A1').value = cars_df
'Create a datafame from and excel sheet'
excel_df = worksheet.range('A1').options(pd.DataFrame, expand='table').value
'Save the excel as a new workbook'
newfilename = ('Test4.xlsm')
wb.save(newfilename)
'close the workbook'
wb.close()
| [
"os.chdir",
"xlwings.Book",
"pandas.DataFrame"
] | [((283, 329), 'os.chdir', 'os.chdir', (['"""C:\\\\Users\\\\<NAME>\\\\Desktop\\\\Roisin"""'], {}), "('C:\\\\Users\\\\<NAME>\\\\Desktop\\\\Roisin')\n", (291, 329), False, 'import os\n'), ((335, 363), 'xlwings.Book', 'xw.Book', (['"""CAO_template.xlsm"""'], {}), "('CAO_template.xlsm')\n", (342, 363), True, 'import xlwings as xw\n'), ((583, 629), 'pandas.DataFrame', 'pd.DataFrame', (['cars'], {'columns': "['Brand', 'Price']"}), "(cars, columns=['Brand', 'Price'])\n", (595, 629), True, 'import pandas as pd\n')] |
from do import DigitalOcean
import argparse
import json
def do_play(token):
do = DigitalOcean(token)
# ----
# for i in range(3):
# do.create_droplet(f'node-{i}', 'fra1', 'do-python')
# do.wait_droplet_creation_process('do-python')
# ----
# do.destroy_droplets('do-python')
# ----
drops = do.manager.get_all_droplets(tag_name='do-python')
for drop in drops:
print(drop.status)
keys = do.manager.get_all_sshkeys()
for key in keys:
print(key.public_key)
def parse_input(file):
with open(file, 'r') as f:
config = json.load(f)
print(config["instances"])
print(config["setup"])
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-token')
args = parser.parse_args()
# parse_input(args.file)
do_play(args.token)
| [
"json.load",
"argparse.ArgumentParser",
"do.DigitalOcean"
] | [((88, 107), 'do.DigitalOcean', 'DigitalOcean', (['token'], {}), '(token)\n', (100, 107), False, 'from do import DigitalOcean\n'), ((708, 733), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (731, 733), False, 'import argparse\n'), ((595, 607), 'json.load', 'json.load', (['f'], {}), '(f)\n', (604, 607), False, 'import json\n')] |
import cPickle as pickle
import pandas as pd
if __name__ == '__main__':
fnames = set(['clinton_tweets.json', 'trump_tweets.json'])
for fname in fnames:
df = pd.read_json('data/' + fname)
df = df.transpose()
df = df['text']
pickle.dump([(i, v) for i, v in zip(df.index, df.values)], open(fname, 'wb'))
| [
"pandas.read_json"
] | [((175, 204), 'pandas.read_json', 'pd.read_json', (["('data/' + fname)"], {}), "('data/' + fname)\n", (187, 204), True, 'import pandas as pd\n')] |
import os
import logging
from flask import Flask
app = Flask(__name__)
@app.route('/status')
def health_check():
app.logger.info('Status request successfull')
app.logger.debug('DEBUG message')
return 'OK - healthy'
@app.route('/metrics')
def metrics():
app.logger.info('Metrics request successfull')
app.logger.debug('DEBUG message')
return 'OK - metrics'
@app.route('/')
def hello_world():
target = os.environ.get('TARGET', 'World')
app.logger.info('Main request successfull')
app.logger.debug('DEBUG message')
return 'Hello {}!\n'.format(target)
if __name__ == "__main__":
## stream logs to a file
logging.basicConfig(filename='app.log', level=logging.DEBUG)
app.run(debug=True, host='0.0.0.0', port=int(os.environ.get('PORT', 8080)))
| [
"logging.basicConfig",
"os.environ.get",
"flask.Flask"
] | [((57, 72), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (62, 72), False, 'from flask import Flask\n'), ((435, 468), 'os.environ.get', 'os.environ.get', (['"""TARGET"""', '"""World"""'], {}), "('TARGET', 'World')\n", (449, 468), False, 'import os\n'), ((657, 717), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': '"""app.log"""', 'level': 'logging.DEBUG'}), "(filename='app.log', level=logging.DEBUG)\n", (676, 717), False, 'import logging\n'), ((768, 796), 'os.environ.get', 'os.environ.get', (['"""PORT"""', '(8080)'], {}), "('PORT', 8080)\n", (782, 796), False, 'import os\n')] |
import numpy as np
import torch
import matplotlib.pyplot as plt
from icecream import ic
def visualize_vector_field(policy, device, min_max = [[-1,-1],[1,1]], fig_number=1):
min_x = min_max[0][0]
max_x = min_max[1][0]
min_y = min_max[0][1]
max_y = min_max[1][1]
n_sample = 100
x = np.linspace(min_x, max_x, n_sample)
y = np.linspace(min_y, max_y, n_sample)
xy = np.meshgrid(x, y)
h = np.concatenate(xy[0])
v = np.concatenate(xy[1])
hv = torch.Tensor(np.stack([h, v]).T).float()
if device is not None:
hv = hv.to(device)
vel = policy(hv)
#vel = to_numpy(vel)
vel = np.nan_to_num(vel)
vel_x = np.reshape(vel[:, 0], (n_sample, n_sample))
vel_y = np.reshape(vel[:, 1], (n_sample, n_sample))
speed = np.sqrt(vel_x ** 2 + vel_y ** 2)
speed = speed/np.max(speed)
plt.streamplot(xy[0], xy[1], vel_x, vel_y, color=speed)
w = 5
Y, X = np.mgrid[-w:w:5j, -w:w:5j]
ic(Y)
ic(X)
import numpy as np
import matplotlib.pyplot as plt
# # Creating dataset
# x = np.arange(0, 10)
# y = np.arange(0, 10)
#
# # Creating grids
# X, Y = np.meshgrid(x, y)
# # ic(X)
# # ic(Y)
#
# # x-component to the right
# u = np.ones((15, 10))
#
# # y-component zero
# v = -np.ones((10, 10))
#
# fig = plt.figure(figsize=(12, 7))
#
# # Plotting stream plot
# plt.streamplot(X, Y, u, v, density=0.5)
#
# # show plot
# # plt.show()
import numpy as np
import matplotlib.pyplot as plt
# Creating data set
w = 3
Y, X = np.mgrid[-w:w:100j, -w:w:100j]
U1 = -1 - X ** 2 + Y
ic(type(U1))
ic(np.shape(U1))
V1 = 1 + X - Y ** 2
ic(np.shape(V1))
U2 = -1.1 - X ** 2 + Y
ic(np.shape(U1))
V2 = 2.1 + X - Y ** 2
# speed = np.sqrt(U ** 2 + V ** 2)
# Creating plot
fig = plt.figure(figsize=(12, 7))
plt.streamplot(X, Y, U1, V1, density=1)
plt.streamplot(X, Y, U2, V2, density=0.8)
# show plot
plt.show() | [
"icecream.ic",
"numpy.reshape",
"numpy.sqrt",
"numpy.max",
"numpy.stack",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.streamplot",
"numpy.linspace",
"numpy.concatenate",
"numpy.meshgrid",
"numpy.shape",
"numpy.nan_to_num",
"matplotlib.pyplot.show"
] | [((947, 952), 'icecream.ic', 'ic', (['Y'], {}), '(Y)\n', (949, 952), False, 'from icecream import ic\n'), ((953, 958), 'icecream.ic', 'ic', (['X'], {}), '(X)\n', (955, 958), False, 'from icecream import ic\n'), ((1714, 1741), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 7)'}), '(figsize=(12, 7))\n', (1724, 1741), True, 'import matplotlib.pyplot as plt\n'), ((1742, 1781), 'matplotlib.pyplot.streamplot', 'plt.streamplot', (['X', 'Y', 'U1', 'V1'], {'density': '(1)'}), '(X, Y, U1, V1, density=1)\n', (1756, 1781), True, 'import matplotlib.pyplot as plt\n'), ((1782, 1823), 'matplotlib.pyplot.streamplot', 'plt.streamplot', (['X', 'Y', 'U2', 'V2'], {'density': '(0.8)'}), '(X, Y, U2, V2, density=0.8)\n', (1796, 1823), True, 'import matplotlib.pyplot as plt\n'), ((1836, 1846), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1844, 1846), True, 'import matplotlib.pyplot as plt\n'), ((307, 342), 'numpy.linspace', 'np.linspace', (['min_x', 'max_x', 'n_sample'], {}), '(min_x, max_x, n_sample)\n', (318, 342), True, 'import numpy as np\n'), ((351, 386), 'numpy.linspace', 'np.linspace', (['min_y', 'max_y', 'n_sample'], {}), '(min_y, max_y, n_sample)\n', (362, 386), True, 'import numpy as np\n'), ((397, 414), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (408, 414), True, 'import numpy as np\n'), ((423, 444), 'numpy.concatenate', 'np.concatenate', (['xy[0]'], {}), '(xy[0])\n', (437, 444), True, 'import numpy as np\n'), ((453, 474), 'numpy.concatenate', 'np.concatenate', (['xy[1]'], {}), '(xy[1])\n', (467, 474), True, 'import numpy as np\n'), ((636, 654), 'numpy.nan_to_num', 'np.nan_to_num', (['vel'], {}), '(vel)\n', (649, 654), True, 'import numpy as np\n'), ((668, 711), 'numpy.reshape', 'np.reshape', (['vel[:, 0]', '(n_sample, n_sample)'], {}), '(vel[:, 0], (n_sample, n_sample))\n', (678, 711), True, 'import numpy as np\n'), ((724, 767), 'numpy.reshape', 'np.reshape', (['vel[:, 1]', '(n_sample, n_sample)'], {}), '(vel[:, 1], (n_sample, n_sample))\n', (734, 767), True, 'import numpy as np\n'), ((780, 812), 'numpy.sqrt', 'np.sqrt', (['(vel_x ** 2 + vel_y ** 2)'], {}), '(vel_x ** 2 + vel_y ** 2)\n', (787, 812), True, 'import numpy as np\n'), ((850, 905), 'matplotlib.pyplot.streamplot', 'plt.streamplot', (['xy[0]', 'xy[1]', 'vel_x', 'vel_y'], {'color': 'speed'}), '(xy[0], xy[1], vel_x, vel_y, color=speed)\n', (864, 905), True, 'import matplotlib.pyplot as plt\n'), ((1542, 1554), 'numpy.shape', 'np.shape', (['U1'], {}), '(U1)\n', (1550, 1554), True, 'import numpy as np\n'), ((1579, 1591), 'numpy.shape', 'np.shape', (['V1'], {}), '(V1)\n', (1587, 1591), True, 'import numpy as np\n'), ((1620, 1632), 'numpy.shape', 'np.shape', (['U1'], {}), '(U1)\n', (1628, 1632), True, 'import numpy as np\n'), ((831, 844), 'numpy.max', 'np.max', (['speed'], {}), '(speed)\n', (837, 844), True, 'import numpy as np\n'), ((497, 513), 'numpy.stack', 'np.stack', (['[h, v]'], {}), '([h, v])\n', (505, 513), True, 'import numpy as np\n')] |
# Generated from tale/syntax/grammar/Tale.g4 by ANTLR 4.8
# encoding: utf-8
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\20")
buf.write("\u00f1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
buf.write("\4\37\t\37\4 \t \4!\t!\3\2\3\2\7\2E\n\2\f\2\16\2H\13\2")
buf.write("\3\2\3\2\3\3\3\3\5\3N\n\3\3\4\3\4\3\4\3\4\3\5\3\5\3\5")
buf.write("\3\5\3\5\5\5Y\n\5\3\6\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b")
buf.write("\3\b\3\t\5\tf\n\t\3\t\3\t\3\t\6\tk\n\t\r\t\16\tl\3\n\3")
buf.write("\n\3\13\3\13\5\13s\n\13\3\f\3\f\3\f\6\fx\n\f\r\f\16\f")
buf.write("y\3\r\3\r\5\r~\n\r\3\16\3\16\3\16\3\16\5\16\u0084\n\16")
buf.write("\3\16\3\16\3\17\3\17\5\17\u008a\n\17\3\20\3\20\3\21\3")
buf.write("\21\3\22\3\22\5\22\u0092\n\22\3\23\3\23\3\24\3\24\3\24")
buf.write("\6\24\u0099\n\24\r\24\16\24\u009a\3\24\3\24\3\25\3\25")
buf.write("\3\25\3\25\3\25\5\25\u00a4\n\25\3\26\3\26\3\26\3\26\3")
buf.write("\26\3\26\7\26\u00ac\n\26\f\26\16\26\u00af\13\26\3\27\3")
buf.write("\27\3\27\3\27\3\27\3\27\3\27\5\27\u00b8\n\27\3\30\3\30")
buf.write("\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u00c2\n\30\f\30\16")
buf.write("\30\u00c5\13\30\3\31\3\31\5\31\u00c9\n\31\3\32\5\32\u00cc")
buf.write("\n\32\3\32\3\32\3\32\3\32\6\32\u00d2\n\32\r\32\16\32\u00d3")
buf.write("\3\33\3\33\3\33\5\33\u00d9\n\33\3\34\3\34\3\35\3\35\3")
buf.write("\35\7\35\u00e0\n\35\f\35\16\35\u00e3\13\35\3\36\3\36\5")
buf.write("\36\u00e7\n\36\3\37\3\37\5\37\u00eb\n\37\3 \3 \3!\3!\3")
buf.write("!\3\u00e1\4*.\"\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36")
buf.write(" \"$&(*,.\60\62\64\668:<>@\2\2\2\u00f0\2F\3\2\2\2\4M\3")
buf.write("\2\2\2\6O\3\2\2\2\bX\3\2\2\2\nZ\3\2\2\2\f]\3\2\2\2\16")
buf.write("`\3\2\2\2\20e\3\2\2\2\22n\3\2\2\2\24r\3\2\2\2\26t\3\2")
buf.write("\2\2\30}\3\2\2\2\32\177\3\2\2\2\34\u0089\3\2\2\2\36\u008b")
buf.write("\3\2\2\2 \u008d\3\2\2\2\"\u0091\3\2\2\2$\u0093\3\2\2\2")
buf.write("&\u0095\3\2\2\2(\u00a3\3\2\2\2*\u00a5\3\2\2\2,\u00b7\3")
buf.write("\2\2\2.\u00b9\3\2\2\2\60\u00c8\3\2\2\2\62\u00cb\3\2\2")
buf.write("\2\64\u00d8\3\2\2\2\66\u00da\3\2\2\28\u00dc\3\2\2\2:\u00e6")
buf.write("\3\2\2\2<\u00ea\3\2\2\2>\u00ec\3\2\2\2@\u00ee\3\2\2\2")
buf.write("BE\7\r\2\2CE\5\4\3\2DB\3\2\2\2DC\3\2\2\2EH\3\2\2\2FD\3")
buf.write("\2\2\2FG\3\2\2\2GI\3\2\2\2HF\3\2\2\2IJ\7\2\2\3J\3\3\2")
buf.write("\2\2KN\5\6\4\2LN\5(\25\2MK\3\2\2\2ML\3\2\2\2N\5\3\2\2")
buf.write("\2OP\5\b\5\2PQ\7\3\2\2QR\5\"\22\2R\7\3\2\2\2SY\5\n\6\2")
buf.write("TY\5\f\7\2UY\5\16\b\2VY\5\20\t\2WY\5\22\n\2XS\3\2\2\2")
buf.write("XT\3\2\2\2XU\3\2\2\2XV\3\2\2\2XW\3\2\2\2Y\t\3\2\2\2Z[")
buf.write("\5\24\13\2[\\\7\b\2\2\\\13\3\2\2\2]^\7\n\2\2^_\5\30\r")
buf.write("\2_\r\3\2\2\2`a\5\24\13\2ab\7\n\2\2bc\5\24\13\2c\17\3")
buf.write("\2\2\2df\5\24\13\2ed\3\2\2\2ef\3\2\2\2fj\3\2\2\2gh\7\b")
buf.write("\2\2hi\7\4\2\2ik\5\24\13\2jg\3\2\2\2kl\3\2\2\2lj\3\2\2")
buf.write("\2lm\3\2\2\2m\21\3\2\2\2no\7\b\2\2o\23\3\2\2\2ps\5\30")
buf.write("\r\2qs\5\26\f\2rp\3\2\2\2rq\3\2\2\2s\25\3\2\2\2tw\5\30")
buf.write("\r\2uv\7\5\2\2vx\5\30\r\2wu\3\2\2\2xy\3\2\2\2yw\3\2\2")
buf.write("\2yz\3\2\2\2z\27\3\2\2\2{~\5\32\16\2|~\5\34\17\2}{\3\2")
buf.write("\2\2}|\3\2\2\2~\31\3\2\2\2\177\u0080\7\6\2\2\u0080\u0083")
buf.write("\5\36\20\2\u0081\u0082\7\4\2\2\u0082\u0084\5 \21\2\u0083")
buf.write("\u0081\3\2\2\2\u0083\u0084\3\2\2\2\u0084\u0085\3\2\2\2")
buf.write("\u0085\u0086\7\7\2\2\u0086\33\3\2\2\2\u0087\u008a\7\b")
buf.write("\2\2\u0088\u008a\5<\37\2\u0089\u0087\3\2\2\2\u0089\u0088")
buf.write("\3\2\2\2\u008a\35\3\2\2\2\u008b\u008c\7\b\2\2\u008c\37")
buf.write("\3\2\2\2\u008d\u008e\7\b\2\2\u008e!\3\2\2\2\u008f\u0092")
buf.write("\5$\23\2\u0090\u0092\5&\24\2\u0091\u008f\3\2\2\2\u0091")
buf.write("\u0090\3\2\2\2\u0092#\3\2\2\2\u0093\u0094\5(\25\2\u0094")
buf.write("%\3\2\2\2\u0095\u0098\7\17\2\2\u0096\u0099\7\r\2\2\u0097")
buf.write("\u0099\5\4\3\2\u0098\u0096\3\2\2\2\u0098\u0097\3\2\2\2")
buf.write("\u0099\u009a\3\2\2\2\u009a\u0098\3\2\2\2\u009a\u009b\3")
buf.write("\2\2\2\u009b\u009c\3\2\2\2\u009c\u009d\7\20\2\2\u009d")
buf.write("\'\3\2\2\2\u009e\u00a4\5*\26\2\u009f\u00a4\5,\27\2\u00a0")
buf.write("\u00a4\5.\30\2\u00a1\u00a4\5\62\32\2\u00a2\u00a4\58\35")
buf.write("\2\u00a3\u009e\3\2\2\2\u00a3\u009f\3\2\2\2\u00a3\u00a0")
buf.write("\3\2\2\2\u00a3\u00a1\3\2\2\2\u00a3\u00a2\3\2\2\2\u00a4")
buf.write(")\3\2\2\2\u00a5\u00a6\b\26\1\2\u00a6\u00a7\58\35\2\u00a7")
buf.write("\u00a8\7\b\2\2\u00a8\u00ad\3\2\2\2\u00a9\u00aa\f\4\2\2")
buf.write("\u00aa\u00ac\7\b\2\2\u00ab\u00a9\3\2\2\2\u00ac\u00af\3")
buf.write("\2\2\2\u00ad\u00ab\3\2\2\2\u00ad\u00ae\3\2\2\2\u00ae+")
buf.write("\3\2\2\2\u00af\u00ad\3\2\2\2\u00b0\u00b1\7\n\2\2\u00b1")
buf.write("\u00b8\5:\36\2\u00b2\u00b3\7\n\2\2\u00b3\u00b4\7\6\2\2")
buf.write("\u00b4\u00b5\5(\25\2\u00b5\u00b6\7\7\2\2\u00b6\u00b8\3")
buf.write("\2\2\2\u00b7\u00b0\3\2\2\2\u00b7\u00b2\3\2\2\2\u00b8-")
buf.write("\3\2\2\2\u00b9\u00ba\b\30\1\2\u00ba\u00bb\5\60\31\2\u00bb")
buf.write("\u00bc\7\n\2\2\u00bc\u00bd\5\60\31\2\u00bd\u00c3\3\2\2")
buf.write("\2\u00be\u00bf\f\4\2\2\u00bf\u00c0\7\n\2\2\u00c0\u00c2")
buf.write("\5\60\31\2\u00c1\u00be\3\2\2\2\u00c2\u00c5\3\2\2\2\u00c3")
buf.write("\u00c1\3\2\2\2\u00c3\u00c4\3\2\2\2\u00c4/\3\2\2\2\u00c5")
buf.write("\u00c3\3\2\2\2\u00c6\u00c9\5*\26\2\u00c7\u00c9\58\35\2")
buf.write("\u00c8\u00c6\3\2\2\2\u00c8\u00c7\3\2\2\2\u00c9\61\3\2")
buf.write("\2\2\u00ca\u00cc\5\64\33\2\u00cb\u00ca\3\2\2\2\u00cb\u00cc")
buf.write("\3\2\2\2\u00cc\u00d1\3\2\2\2\u00cd\u00ce\5\66\34\2\u00ce")
buf.write("\u00cf\7\4\2\2\u00cf\u00d0\5\64\33\2\u00d0\u00d2\3\2\2")
buf.write("\2\u00d1\u00cd\3\2\2\2\u00d2\u00d3\3\2\2\2\u00d3\u00d1")
buf.write("\3\2\2\2\u00d3\u00d4\3\2\2\2\u00d4\63\3\2\2\2\u00d5\u00d9")
buf.write("\5*\26\2\u00d6\u00d9\5.\30\2\u00d7\u00d9\58\35\2\u00d8")
buf.write("\u00d5\3\2\2\2\u00d8\u00d6\3\2\2\2\u00d8\u00d7\3\2\2\2")
buf.write("\u00d9\65\3\2\2\2\u00da\u00db\7\b\2\2\u00db\67\3\2\2\2")
buf.write("\u00dc\u00e1\5:\36\2\u00dd\u00de\7\5\2\2\u00de\u00e0\5")
buf.write(":\36\2\u00df\u00dd\3\2\2\2\u00e0\u00e3\3\2\2\2\u00e1\u00e2")
buf.write("\3\2\2\2\u00e1\u00df\3\2\2\2\u00e29\3\2\2\2\u00e3\u00e1")
buf.write("\3\2\2\2\u00e4\u00e7\7\b\2\2\u00e5\u00e7\5<\37\2\u00e6")
buf.write("\u00e4\3\2\2\2\u00e6\u00e5\3\2\2\2\u00e7;\3\2\2\2\u00e8")
buf.write("\u00eb\5> \2\u00e9\u00eb\5@!\2\u00ea\u00e8\3\2\2\2\u00ea")
buf.write("\u00e9\3\2\2\2\u00eb=\3\2\2\2\u00ec\u00ed\7\t\2\2\u00ed")
buf.write("?\3\2\2\2\u00ee\u00ef\7\13\2\2\u00efA\3\2\2\2\33DFMXe")
buf.write("lry}\u0083\u0089\u0091\u0098\u009a\u00a3\u00ad\u00b7\u00c3")
buf.write("\u00c8\u00cb\u00d3\u00d8\u00e1\u00e6\u00ea")
return buf.getvalue()
class TaleParser ( Parser ):
grammarFileName = "Tale.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'='", "':'", "','", "'('", "')'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "IDENTIFIER", "NUMBER",
"OPERATOR", "STRING", "WS", "NEWLINE", "SKIP_", "INDENT",
"DEDENT" ]
RULE_program = 0
RULE_statement = 1
RULE_assignment = 2
RULE_form = 3
RULE_unaryForm = 4
RULE_prefixOperatorForm = 5
RULE_binaryForm = 6
RULE_keywordForm = 7
RULE_primitiveForm = 8
RULE_parameter = 9
RULE_tupleParameter = 10
RULE_singleParameter = 11
RULE_simpleParameter = 12
RULE_patternMatchingParameter = 13
RULE_parameterName = 14
RULE_parameterType = 15
RULE_assignmentBody = 16
RULE_simpleAssignmentBody = 17
RULE_compoundAssignmentBody = 18
RULE_expression = 19
RULE_unary = 20
RULE_prefixOperator = 21
RULE_binary = 22
RULE_binaryOperand = 23
RULE_keyword = 24
RULE_keywordArgument = 25
RULE_keywordName = 26
RULE_primitive = 27
RULE_primitiveItem = 28
RULE_literal = 29
RULE_intLiteral = 30
RULE_stringLiteral = 31
ruleNames = [ "program", "statement", "assignment", "form", "unaryForm",
"prefixOperatorForm", "binaryForm", "keywordForm", "primitiveForm",
"parameter", "tupleParameter", "singleParameter", "simpleParameter",
"patternMatchingParameter", "parameterName", "parameterType",
"assignmentBody", "simpleAssignmentBody", "compoundAssignmentBody",
"expression", "unary", "prefixOperator", "binary", "binaryOperand",
"keyword", "keywordArgument", "keywordName", "primitive",
"primitiveItem", "literal", "intLiteral", "stringLiteral" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
IDENTIFIER=6
NUMBER=7
OPERATOR=8
STRING=9
WS=10
NEWLINE=11
SKIP_=12
INDENT=13
DEDENT=14
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.8")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class ProgramContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def EOF(self):
return self.getToken(TaleParser.EOF, 0)
def NEWLINE(self, i:int=None):
if i is None:
return self.getTokens(TaleParser.NEWLINE)
else:
return self.getToken(TaleParser.NEWLINE, i)
def statement(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(TaleParser.StatementContext)
else:
return self.getTypedRuleContext(TaleParser.StatementContext,i)
def getRuleIndex(self):
return TaleParser.RULE_program
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterProgram" ):
listener.enterProgram(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitProgram" ):
listener.exitProgram(self)
def program(self):
localctx = TaleParser.ProgramContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_program)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 68
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TaleParser.T__3) | (1 << TaleParser.IDENTIFIER) | (1 << TaleParser.NUMBER) | (1 << TaleParser.OPERATOR) | (1 << TaleParser.STRING) | (1 << TaleParser.NEWLINE))) != 0):
self.state = 66
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [TaleParser.NEWLINE]:
self.state = 64
self.match(TaleParser.NEWLINE)
pass
elif token in [TaleParser.T__3, TaleParser.IDENTIFIER, TaleParser.NUMBER, TaleParser.OPERATOR, TaleParser.STRING]:
self.state = 65
self.statement()
pass
else:
raise NoViableAltException(self)
self.state = 70
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 71
self.match(TaleParser.EOF)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def assignment(self):
return self.getTypedRuleContext(TaleParser.AssignmentContext,0)
def expression(self):
return self.getTypedRuleContext(TaleParser.ExpressionContext,0)
def getRuleIndex(self):
return TaleParser.RULE_statement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStatement" ):
listener.enterStatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStatement" ):
listener.exitStatement(self)
def statement(self):
localctx = TaleParser.StatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_statement)
try:
self.state = 75
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,2,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 73
self.assignment()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 74
self.expression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AssignmentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def form(self):
return self.getTypedRuleContext(TaleParser.FormContext,0)
def assignmentBody(self):
return self.getTypedRuleContext(TaleParser.AssignmentBodyContext,0)
def getRuleIndex(self):
return TaleParser.RULE_assignment
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAssignment" ):
listener.enterAssignment(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAssignment" ):
listener.exitAssignment(self)
def assignment(self):
localctx = TaleParser.AssignmentContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_assignment)
try:
self.enterOuterAlt(localctx, 1)
self.state = 77
self.form()
self.state = 78
self.match(TaleParser.T__0)
self.state = 79
self.assignmentBody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FormContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unaryForm(self):
return self.getTypedRuleContext(TaleParser.UnaryFormContext,0)
def prefixOperatorForm(self):
return self.getTypedRuleContext(TaleParser.PrefixOperatorFormContext,0)
def binaryForm(self):
return self.getTypedRuleContext(TaleParser.BinaryFormContext,0)
def keywordForm(self):
return self.getTypedRuleContext(TaleParser.KeywordFormContext,0)
def primitiveForm(self):
return self.getTypedRuleContext(TaleParser.PrimitiveFormContext,0)
def getRuleIndex(self):
return TaleParser.RULE_form
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterForm" ):
listener.enterForm(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitForm" ):
listener.exitForm(self)
def form(self):
localctx = TaleParser.FormContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_form)
try:
self.state = 86
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,3,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 81
self.unaryForm()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 82
self.prefixOperatorForm()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 83
self.binaryForm()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 84
self.keywordForm()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 85
self.primitiveForm()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UnaryFormContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parameter(self):
return self.getTypedRuleContext(TaleParser.ParameterContext,0)
def IDENTIFIER(self):
return self.getToken(TaleParser.IDENTIFIER, 0)
def getRuleIndex(self):
return TaleParser.RULE_unaryForm
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUnaryForm" ):
listener.enterUnaryForm(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUnaryForm" ):
listener.exitUnaryForm(self)
def unaryForm(self):
localctx = TaleParser.UnaryFormContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_unaryForm)
try:
self.enterOuterAlt(localctx, 1)
self.state = 88
self.parameter()
self.state = 89
self.match(TaleParser.IDENTIFIER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PrefixOperatorFormContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def OPERATOR(self):
return self.getToken(TaleParser.OPERATOR, 0)
def singleParameter(self):
return self.getTypedRuleContext(TaleParser.SingleParameterContext,0)
def getRuleIndex(self):
return TaleParser.RULE_prefixOperatorForm
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPrefixOperatorForm" ):
listener.enterPrefixOperatorForm(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPrefixOperatorForm" ):
listener.exitPrefixOperatorForm(self)
def prefixOperatorForm(self):
localctx = TaleParser.PrefixOperatorFormContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_prefixOperatorForm)
try:
self.enterOuterAlt(localctx, 1)
self.state = 91
self.match(TaleParser.OPERATOR)
self.state = 92
self.singleParameter()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BinaryFormContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parameter(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(TaleParser.ParameterContext)
else:
return self.getTypedRuleContext(TaleParser.ParameterContext,i)
def OPERATOR(self):
return self.getToken(TaleParser.OPERATOR, 0)
def getRuleIndex(self):
return TaleParser.RULE_binaryForm
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBinaryForm" ):
listener.enterBinaryForm(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBinaryForm" ):
listener.exitBinaryForm(self)
def binaryForm(self):
localctx = TaleParser.BinaryFormContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_binaryForm)
try:
self.enterOuterAlt(localctx, 1)
self.state = 94
self.parameter()
self.state = 95
self.match(TaleParser.OPERATOR)
self.state = 96
self.parameter()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class KeywordFormContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parameter(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(TaleParser.ParameterContext)
else:
return self.getTypedRuleContext(TaleParser.ParameterContext,i)
def IDENTIFIER(self, i:int=None):
if i is None:
return self.getTokens(TaleParser.IDENTIFIER)
else:
return self.getToken(TaleParser.IDENTIFIER, i)
def getRuleIndex(self):
return TaleParser.RULE_keywordForm
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterKeywordForm" ):
listener.enterKeywordForm(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitKeywordForm" ):
listener.exitKeywordForm(self)
def keywordForm(self):
localctx = TaleParser.KeywordFormContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_keywordForm)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 99
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,4,self._ctx)
if la_ == 1:
self.state = 98
self.parameter()
self.state = 104
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 101
self.match(TaleParser.IDENTIFIER)
self.state = 102
self.match(TaleParser.T__1)
self.state = 103
self.parameter()
self.state = 106
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==TaleParser.IDENTIFIER):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PrimitiveFormContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(TaleParser.IDENTIFIER, 0)
def getRuleIndex(self):
return TaleParser.RULE_primitiveForm
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPrimitiveForm" ):
listener.enterPrimitiveForm(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPrimitiveForm" ):
listener.exitPrimitiveForm(self)
def primitiveForm(self):
localctx = TaleParser.PrimitiveFormContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_primitiveForm)
try:
self.enterOuterAlt(localctx, 1)
self.state = 108
self.match(TaleParser.IDENTIFIER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def singleParameter(self):
return self.getTypedRuleContext(TaleParser.SingleParameterContext,0)
def tupleParameter(self):
return self.getTypedRuleContext(TaleParser.TupleParameterContext,0)
def getRuleIndex(self):
return TaleParser.RULE_parameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameter" ):
listener.enterParameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameter" ):
listener.exitParameter(self)
def parameter(self):
localctx = TaleParser.ParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_parameter)
try:
self.state = 112
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,6,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 110
self.singleParameter()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 111
self.tupleParameter()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TupleParameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def singleParameter(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(TaleParser.SingleParameterContext)
else:
return self.getTypedRuleContext(TaleParser.SingleParameterContext,i)
def getRuleIndex(self):
return TaleParser.RULE_tupleParameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTupleParameter" ):
listener.enterTupleParameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTupleParameter" ):
listener.exitTupleParameter(self)
def tupleParameter(self):
localctx = TaleParser.TupleParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_tupleParameter)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 114
self.singleParameter()
self.state = 117
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 115
self.match(TaleParser.T__2)
self.state = 116
self.singleParameter()
self.state = 119
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==TaleParser.T__2):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SingleParameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def simpleParameter(self):
return self.getTypedRuleContext(TaleParser.SimpleParameterContext,0)
def patternMatchingParameter(self):
return self.getTypedRuleContext(TaleParser.PatternMatchingParameterContext,0)
def getRuleIndex(self):
return TaleParser.RULE_singleParameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSingleParameter" ):
listener.enterSingleParameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSingleParameter" ):
listener.exitSingleParameter(self)
def singleParameter(self):
localctx = TaleParser.SingleParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_singleParameter)
try:
self.state = 123
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [TaleParser.T__3]:
self.enterOuterAlt(localctx, 1)
self.state = 121
self.simpleParameter()
pass
elif token in [TaleParser.IDENTIFIER, TaleParser.NUMBER, TaleParser.STRING]:
self.enterOuterAlt(localctx, 2)
self.state = 122
self.patternMatchingParameter()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SimpleParameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parameterName(self):
return self.getTypedRuleContext(TaleParser.ParameterNameContext,0)
def parameterType(self):
return self.getTypedRuleContext(TaleParser.ParameterTypeContext,0)
def getRuleIndex(self):
return TaleParser.RULE_simpleParameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSimpleParameter" ):
listener.enterSimpleParameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSimpleParameter" ):
listener.exitSimpleParameter(self)
def simpleParameter(self):
localctx = TaleParser.SimpleParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_simpleParameter)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 125
self.match(TaleParser.T__3)
self.state = 126
self.parameterName()
self.state = 129
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TaleParser.T__1:
self.state = 127
self.match(TaleParser.T__1)
self.state = 128
self.parameterType()
self.state = 131
self.match(TaleParser.T__4)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PatternMatchingParameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(TaleParser.IDENTIFIER, 0)
def literal(self):
return self.getTypedRuleContext(TaleParser.LiteralContext,0)
def getRuleIndex(self):
return TaleParser.RULE_patternMatchingParameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPatternMatchingParameter" ):
listener.enterPatternMatchingParameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPatternMatchingParameter" ):
listener.exitPatternMatchingParameter(self)
def patternMatchingParameter(self):
localctx = TaleParser.PatternMatchingParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_patternMatchingParameter)
try:
self.state = 135
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [TaleParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 133
self.match(TaleParser.IDENTIFIER)
pass
elif token in [TaleParser.NUMBER, TaleParser.STRING]:
self.enterOuterAlt(localctx, 2)
self.state = 134
self.literal()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParameterNameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(TaleParser.IDENTIFIER, 0)
def getRuleIndex(self):
return TaleParser.RULE_parameterName
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameterName" ):
listener.enterParameterName(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameterName" ):
listener.exitParameterName(self)
def parameterName(self):
localctx = TaleParser.ParameterNameContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_parameterName)
try:
self.enterOuterAlt(localctx, 1)
self.state = 137
self.match(TaleParser.IDENTIFIER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParameterTypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(TaleParser.IDENTIFIER, 0)
def getRuleIndex(self):
return TaleParser.RULE_parameterType
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameterType" ):
listener.enterParameterType(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameterType" ):
listener.exitParameterType(self)
def parameterType(self):
localctx = TaleParser.ParameterTypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_parameterType)
try:
self.enterOuterAlt(localctx, 1)
self.state = 139
self.match(TaleParser.IDENTIFIER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AssignmentBodyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def simpleAssignmentBody(self):
return self.getTypedRuleContext(TaleParser.SimpleAssignmentBodyContext,0)
def compoundAssignmentBody(self):
return self.getTypedRuleContext(TaleParser.CompoundAssignmentBodyContext,0)
def getRuleIndex(self):
return TaleParser.RULE_assignmentBody
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAssignmentBody" ):
listener.enterAssignmentBody(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAssignmentBody" ):
listener.exitAssignmentBody(self)
def assignmentBody(self):
localctx = TaleParser.AssignmentBodyContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_assignmentBody)
try:
self.state = 143
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [TaleParser.IDENTIFIER, TaleParser.NUMBER, TaleParser.OPERATOR, TaleParser.STRING]:
self.enterOuterAlt(localctx, 1)
self.state = 141
self.simpleAssignmentBody()
pass
elif token in [TaleParser.INDENT]:
self.enterOuterAlt(localctx, 2)
self.state = 142
self.compoundAssignmentBody()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SimpleAssignmentBodyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self):
return self.getTypedRuleContext(TaleParser.ExpressionContext,0)
def getRuleIndex(self):
return TaleParser.RULE_simpleAssignmentBody
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSimpleAssignmentBody" ):
listener.enterSimpleAssignmentBody(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSimpleAssignmentBody" ):
listener.exitSimpleAssignmentBody(self)
def simpleAssignmentBody(self):
localctx = TaleParser.SimpleAssignmentBodyContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_simpleAssignmentBody)
try:
self.enterOuterAlt(localctx, 1)
self.state = 145
self.expression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CompoundAssignmentBodyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def INDENT(self):
return self.getToken(TaleParser.INDENT, 0)
def DEDENT(self):
return self.getToken(TaleParser.DEDENT, 0)
def NEWLINE(self, i:int=None):
if i is None:
return self.getTokens(TaleParser.NEWLINE)
else:
return self.getToken(TaleParser.NEWLINE, i)
def statement(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(TaleParser.StatementContext)
else:
return self.getTypedRuleContext(TaleParser.StatementContext,i)
def getRuleIndex(self):
return TaleParser.RULE_compoundAssignmentBody
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCompoundAssignmentBody" ):
listener.enterCompoundAssignmentBody(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCompoundAssignmentBody" ):
listener.exitCompoundAssignmentBody(self)
def compoundAssignmentBody(self):
localctx = TaleParser.CompoundAssignmentBodyContext(self, self._ctx, self.state)
self.enterRule(localctx, 36, self.RULE_compoundAssignmentBody)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 147
self.match(TaleParser.INDENT)
self.state = 150
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 150
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [TaleParser.NEWLINE]:
self.state = 148
self.match(TaleParser.NEWLINE)
pass
elif token in [TaleParser.T__3, TaleParser.IDENTIFIER, TaleParser.NUMBER, TaleParser.OPERATOR, TaleParser.STRING]:
self.state = 149
self.statement()
pass
else:
raise NoViableAltException(self)
self.state = 152
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TaleParser.T__3) | (1 << TaleParser.IDENTIFIER) | (1 << TaleParser.NUMBER) | (1 << TaleParser.OPERATOR) | (1 << TaleParser.STRING) | (1 << TaleParser.NEWLINE))) != 0)):
break
self.state = 154
self.match(TaleParser.DEDENT)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unary(self):
return self.getTypedRuleContext(TaleParser.UnaryContext,0)
def prefixOperator(self):
return self.getTypedRuleContext(TaleParser.PrefixOperatorContext,0)
def binary(self):
return self.getTypedRuleContext(TaleParser.BinaryContext,0)
def keyword(self):
return self.getTypedRuleContext(TaleParser.KeywordContext,0)
def primitive(self):
return self.getTypedRuleContext(TaleParser.PrimitiveContext,0)
def getRuleIndex(self):
return TaleParser.RULE_expression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExpression" ):
listener.enterExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExpression" ):
listener.exitExpression(self)
def expression(self):
localctx = TaleParser.ExpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 38, self.RULE_expression)
try:
self.state = 161
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,14,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 156
self.unary(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 157
self.prefixOperator()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 158
self.binary(0)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 159
self.keyword()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 160
self.primitive()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UnaryContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def primitive(self):
return self.getTypedRuleContext(TaleParser.PrimitiveContext,0)
def IDENTIFIER(self):
return self.getToken(TaleParser.IDENTIFIER, 0)
def unary(self):
return self.getTypedRuleContext(TaleParser.UnaryContext,0)
def getRuleIndex(self):
return TaleParser.RULE_unary
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUnary" ):
listener.enterUnary(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUnary" ):
listener.exitUnary(self)
def unary(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = TaleParser.UnaryContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 40
self.enterRecursionRule(localctx, 40, self.RULE_unary, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 164
self.primitive()
self.state = 165
self.match(TaleParser.IDENTIFIER)
self._ctx.stop = self._input.LT(-1)
self.state = 171
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = TaleParser.UnaryContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_unary)
self.state = 167
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 168
self.match(TaleParser.IDENTIFIER)
self.state = 173
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class PrefixOperatorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def OPERATOR(self):
return self.getToken(TaleParser.OPERATOR, 0)
def primitiveItem(self):
return self.getTypedRuleContext(TaleParser.PrimitiveItemContext,0)
def expression(self):
return self.getTypedRuleContext(TaleParser.ExpressionContext,0)
def getRuleIndex(self):
return TaleParser.RULE_prefixOperator
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPrefixOperator" ):
listener.enterPrefixOperator(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPrefixOperator" ):
listener.exitPrefixOperator(self)
def prefixOperator(self):
localctx = TaleParser.PrefixOperatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 42, self.RULE_prefixOperator)
try:
self.state = 181
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,16,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 174
self.match(TaleParser.OPERATOR)
self.state = 175
self.primitiveItem()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 176
self.match(TaleParser.OPERATOR)
self.state = 177
self.match(TaleParser.T__3)
self.state = 178
self.expression()
self.state = 179
self.match(TaleParser.T__4)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BinaryContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def binaryOperand(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(TaleParser.BinaryOperandContext)
else:
return self.getTypedRuleContext(TaleParser.BinaryOperandContext,i)
def OPERATOR(self):
return self.getToken(TaleParser.OPERATOR, 0)
def binary(self):
return self.getTypedRuleContext(TaleParser.BinaryContext,0)
def getRuleIndex(self):
return TaleParser.RULE_binary
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBinary" ):
listener.enterBinary(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBinary" ):
listener.exitBinary(self)
def binary(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = TaleParser.BinaryContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 44
self.enterRecursionRule(localctx, 44, self.RULE_binary, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 184
self.binaryOperand()
self.state = 185
self.match(TaleParser.OPERATOR)
self.state = 186
self.binaryOperand()
self._ctx.stop = self._input.LT(-1)
self.state = 193
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,17,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = TaleParser.BinaryContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_binary)
self.state = 188
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 189
self.match(TaleParser.OPERATOR)
self.state = 190
self.binaryOperand()
self.state = 195
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,17,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class BinaryOperandContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unary(self):
return self.getTypedRuleContext(TaleParser.UnaryContext,0)
def primitive(self):
return self.getTypedRuleContext(TaleParser.PrimitiveContext,0)
def getRuleIndex(self):
return TaleParser.RULE_binaryOperand
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBinaryOperand" ):
listener.enterBinaryOperand(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBinaryOperand" ):
listener.exitBinaryOperand(self)
def binaryOperand(self):
localctx = TaleParser.BinaryOperandContext(self, self._ctx, self.state)
self.enterRule(localctx, 46, self.RULE_binaryOperand)
try:
self.state = 198
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,18,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 196
self.unary(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 197
self.primitive()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class KeywordContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def keywordArgument(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(TaleParser.KeywordArgumentContext)
else:
return self.getTypedRuleContext(TaleParser.KeywordArgumentContext,i)
def keywordName(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(TaleParser.KeywordNameContext)
else:
return self.getTypedRuleContext(TaleParser.KeywordNameContext,i)
def getRuleIndex(self):
return TaleParser.RULE_keyword
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterKeyword" ):
listener.enterKeyword(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitKeyword" ):
listener.exitKeyword(self)
def keyword(self):
localctx = TaleParser.KeywordContext(self, self._ctx, self.state)
self.enterRule(localctx, 48, self.RULE_keyword)
try:
self.enterOuterAlt(localctx, 1)
self.state = 201
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,19,self._ctx)
if la_ == 1:
self.state = 200
self.keywordArgument()
self.state = 207
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 203
self.keywordName()
self.state = 204
self.match(TaleParser.T__1)
self.state = 205
self.keywordArgument()
else:
raise NoViableAltException(self)
self.state = 209
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,20,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class KeywordArgumentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unary(self):
return self.getTypedRuleContext(TaleParser.UnaryContext,0)
def binary(self):
return self.getTypedRuleContext(TaleParser.BinaryContext,0)
def primitive(self):
return self.getTypedRuleContext(TaleParser.PrimitiveContext,0)
def getRuleIndex(self):
return TaleParser.RULE_keywordArgument
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterKeywordArgument" ):
listener.enterKeywordArgument(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitKeywordArgument" ):
listener.exitKeywordArgument(self)
def keywordArgument(self):
localctx = TaleParser.KeywordArgumentContext(self, self._ctx, self.state)
self.enterRule(localctx, 50, self.RULE_keywordArgument)
try:
self.state = 214
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,21,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 211
self.unary(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 212
self.binary(0)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 213
self.primitive()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class KeywordNameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(TaleParser.IDENTIFIER, 0)
def getRuleIndex(self):
return TaleParser.RULE_keywordName
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterKeywordName" ):
listener.enterKeywordName(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitKeywordName" ):
listener.exitKeywordName(self)
def keywordName(self):
localctx = TaleParser.KeywordNameContext(self, self._ctx, self.state)
self.enterRule(localctx, 52, self.RULE_keywordName)
try:
self.enterOuterAlt(localctx, 1)
self.state = 216
self.match(TaleParser.IDENTIFIER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PrimitiveContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def primitiveItem(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(TaleParser.PrimitiveItemContext)
else:
return self.getTypedRuleContext(TaleParser.PrimitiveItemContext,i)
def getRuleIndex(self):
return TaleParser.RULE_primitive
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPrimitive" ):
listener.enterPrimitive(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPrimitive" ):
listener.exitPrimitive(self)
def primitive(self):
localctx = TaleParser.PrimitiveContext(self, self._ctx, self.state)
self.enterRule(localctx, 54, self.RULE_primitive)
try:
self.enterOuterAlt(localctx, 1)
self.state = 218
self.primitiveItem()
self.state = 223
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,22,self._ctx)
while _alt!=1 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1+1:
self.state = 219
self.match(TaleParser.T__2)
self.state = 220
self.primitiveItem()
self.state = 225
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,22,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PrimitiveItemContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(TaleParser.IDENTIFIER, 0)
def literal(self):
return self.getTypedRuleContext(TaleParser.LiteralContext,0)
def getRuleIndex(self):
return TaleParser.RULE_primitiveItem
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPrimitiveItem" ):
listener.enterPrimitiveItem(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPrimitiveItem" ):
listener.exitPrimitiveItem(self)
def primitiveItem(self):
localctx = TaleParser.PrimitiveItemContext(self, self._ctx, self.state)
self.enterRule(localctx, 56, self.RULE_primitiveItem)
try:
self.state = 228
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [TaleParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 226
self.match(TaleParser.IDENTIFIER)
pass
elif token in [TaleParser.NUMBER, TaleParser.STRING]:
self.enterOuterAlt(localctx, 2)
self.state = 227
self.literal()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def intLiteral(self):
return self.getTypedRuleContext(TaleParser.IntLiteralContext,0)
def stringLiteral(self):
return self.getTypedRuleContext(TaleParser.StringLiteralContext,0)
def getRuleIndex(self):
return TaleParser.RULE_literal
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterLiteral" ):
listener.enterLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitLiteral" ):
listener.exitLiteral(self)
def literal(self):
localctx = TaleParser.LiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 58, self.RULE_literal)
try:
self.state = 232
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [TaleParser.NUMBER]:
self.enterOuterAlt(localctx, 1)
self.state = 230
self.intLiteral()
pass
elif token in [TaleParser.STRING]:
self.enterOuterAlt(localctx, 2)
self.state = 231
self.stringLiteral()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IntLiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def NUMBER(self):
return self.getToken(TaleParser.NUMBER, 0)
def getRuleIndex(self):
return TaleParser.RULE_intLiteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIntLiteral" ):
listener.enterIntLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIntLiteral" ):
listener.exitIntLiteral(self)
def intLiteral(self):
localctx = TaleParser.IntLiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 60, self.RULE_intLiteral)
try:
self.enterOuterAlt(localctx, 1)
self.state = 234
self.match(TaleParser.NUMBER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StringLiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def STRING(self):
return self.getToken(TaleParser.STRING, 0)
def getRuleIndex(self):
return TaleParser.RULE_stringLiteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStringLiteral" ):
listener.enterStringLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStringLiteral" ):
listener.exitStringLiteral(self)
def stringLiteral(self):
localctx = TaleParser.StringLiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 62, self.RULE_stringLiteral)
try:
self.enterOuterAlt(localctx, 1)
self.state = 236
self.match(TaleParser.STRING)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int):
if self._predicates == None:
self._predicates = dict()
self._predicates[20] = self.unary_sempred
self._predicates[22] = self.binary_sempred
pred = self._predicates.get(ruleIndex, None)
if pred is None:
raise Exception("No predicate with index:" + str(ruleIndex))
else:
return pred(localctx, predIndex)
def unary_sempred(self, localctx:UnaryContext, predIndex:int):
if predIndex == 0:
return self.precpred(self._ctx, 2)
def binary_sempred(self, localctx:BinaryContext, predIndex:int):
if predIndex == 1:
return self.precpred(self._ctx, 2)
| [
"io.StringIO",
"antlr4.error.Errors.FailedPredicateException"
] | [((255, 265), 'io.StringIO', 'StringIO', ([], {}), '()\n', (263, 265), False, 'from io import StringIO\n'), ((50142, 50203), 'antlr4.error.Errors.FailedPredicateException', 'FailedPredicateException', (['self', '"""self.precpred(self._ctx, 2)"""'], {}), "(self, 'self.precpred(self._ctx, 2)')\n", (50166, 50203), False, 'from antlr4.error.Errors import FailedPredicateException\n'), ((55301, 55362), 'antlr4.error.Errors.FailedPredicateException', 'FailedPredicateException', (['self', '"""self.precpred(self._ctx, 2)"""'], {}), "(self, 'self.precpred(self._ctx, 2)')\n", (55325, 55362), False, 'from antlr4.error.Errors import FailedPredicateException\n')] |
# -*- coding: UTF-8 -*-
'''
Created on May 14, 2014
@author: <NAME> <<EMAIL>>
'''
import os, datetime, sys, platform, base64
class Configuration(object):
def __init__(self):
# Constructor
if os.name == 'posix':
self.OsType = 'linux'
elif os.name == 'nt':
self.OsType = 'Windows'
elif os.name == 'os2':
self.OsType = 'check'
self.application_name = 'Metadata Quality Control'
self.application_version = '0.4'
self.user_home_path = os.path.expanduser('~')
if self.OsType == 'Windows':
self.base_path = str(os.getcwd())+str(os.sep)
self.assets_path = r''+(os.path.join(self.base_path, 'assets'+str(os.sep)))
try:
self.avpreserve_img = os.path.join(sys._MEIPASS, 'assets' + (str(os.sep)) +'avpreserve.png')
except:
pass
else:
self.base_path = str(os.getcwd())+str(os.sep)
self.assets_path = r''+(os.path.join(self.base_path, 'assets'+str(os.sep)))
self.avpreserve_img = r''+(os.path.join(self.assets_path) + 'avpreserve.png')
self.logo_sign_small = 'logo_sign_small.png'
def getImagesPath(self):return str(self.assets_path)
def getAvpreserve_img(self):return self.avpreserve_img
def getBasePath(self):return str(self.base_path)
def getApplicationVersion(self):return str(self.application_version)
def getConfig_file_path(self):
return self.config_file_path
def EncodeInfo(self, string_to_be_encoded):
string_to_be_encoded = str(string_to_be_encoded).strip()
return base64.b16encode(base64.b16encode(string_to_be_encoded))
def getLogoSignSmall(self):
if self.getOsType() == 'Windows':
try:
return os.path.join(sys._MEIPASS, 'assets' + (str(os.sep)) + str(self.logo_sign_small))
except:
pass
else:
os.path.join(self.assets_path)
return os.path.join(self.assets_path, str(self.logo_sign_small))
def getOsType(self):return str(self.OsType)
def getApplicationName(self): return str(self.application_name)
def getUserHomePath(self): return str(os.path.expanduser('~'))
def getDebugFilePath(self):return str(self.log_file_path)
def getWindowsInformation(self):
"""
Gets Detail information of Windows
@return: tuple Windows Information
"""
WindowsInformation = {}
try:
major, minor, build, platformType, servicePack = sys.getwindowsversion()
WindowsInformation['major'] = major
WindowsInformation['minor'] = minor
WindowsInformation['build'] = build
WindowsInformation['platformType'] = platformType
WindowsInformation['servicePack'] = servicePack
windowDetailedName = platform.platform()
WindowsInformation['platform'] = windowDetailedName
windowDetailedName = str(windowDetailedName).split('-')
if windowDetailedName[0] is not None and (str(windowDetailedName[0]) == 'Windows' or str(windowDetailedName[0]) == 'windows'):
WindowsInformation['isWindows'] =True
else:
WindowsInformation['isWindows'] =False
if windowDetailedName[1] is not None and (str(windowDetailedName[1]) != ''):
WindowsInformation['WindowsType'] =str(windowDetailedName[1])
else:
WindowsInformation['WindowsType'] =None
WindowsInformation['ProcessorInfo'] = platform.processor()
try:
os.environ["PROGRAMFILES(X86)"]
bits = 64
except:
bits = 32
pass
WindowsInformation['bitType'] = "Win{0}".format(bits)
except:
pass
return WindowsInformation
def CleanStringForBreaks(self,StringToBeCleaned):
"""
@param StringToBeCleaned:
@return:
"""
CleanString = StringToBeCleaned.strip()
try:
CleanString = CleanString.replace('\r\n', '')
CleanString = CleanString.replace('\n', '')
CleanString = CleanString.replace('\r', '')
except:
pass
return CleanString | [
"sys.getwindowsversion",
"platform.platform",
"os.path.join",
"os.getcwd",
"platform.processor",
"base64.b16encode",
"os.path.expanduser"
] | [((534, 557), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (552, 557), False, 'import os, datetime, sys, platform, base64\n'), ((1680, 1718), 'base64.b16encode', 'base64.b16encode', (['string_to_be_encoded'], {}), '(string_to_be_encoded)\n', (1696, 1718), False, 'import os, datetime, sys, platform, base64\n'), ((1984, 2014), 'os.path.join', 'os.path.join', (['self.assets_path'], {}), '(self.assets_path)\n', (1996, 2014), False, 'import os, datetime, sys, platform, base64\n'), ((2255, 2278), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (2273, 2278), False, 'import os, datetime, sys, platform, base64\n'), ((2602, 2625), 'sys.getwindowsversion', 'sys.getwindowsversion', ([], {}), '()\n', (2623, 2625), False, 'import os, datetime, sys, platform, base64\n'), ((2926, 2945), 'platform.platform', 'platform.platform', ([], {}), '()\n', (2943, 2945), False, 'import os, datetime, sys, platform, base64\n'), ((3639, 3659), 'platform.processor', 'platform.processor', ([], {}), '()\n', (3657, 3659), False, 'import os, datetime, sys, platform, base64\n'), ((629, 640), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (638, 640), False, 'import os, datetime, sys, platform, base64\n'), ((958, 969), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (967, 969), False, 'import os, datetime, sys, platform, base64\n'), ((1110, 1140), 'os.path.join', 'os.path.join', (['self.assets_path'], {}), '(self.assets_path)\n', (1122, 1140), False, 'import os, datetime, sys, platform, base64\n')] |
# defaults.py: contains the built-in variables, events and methods
# used for scripting the C program
import event
events = {}
_event_names = ["on_start", "on_exit"]
for evt in _event_names:
events[evt] = event.Event()
| [
"event.Event"
] | [((211, 224), 'event.Event', 'event.Event', ([], {}), '()\n', (222, 224), False, 'import event\n')] |
# Imports for urn construction utility methods
import logging
from datahub.emitter.mce_builder import make_dataset_urn, make_tag_urn
from datahub.emitter.mcp import MetadataChangeProposalWrapper
from datahub.emitter.rest_emitter import DatahubRestEmitter
# Imports for metadata model classes
from datahub.metadata.schema_classes import (
ChangeTypeClass,
GlobalTagsClass,
TagAssociationClass,
)
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
dataset_urn = make_dataset_urn(platform="hive", name="realestate_db.sales", env="PROD")
tag_urn = make_tag_urn("purchase")
event: MetadataChangeProposalWrapper = MetadataChangeProposalWrapper(
entityType="dataset",
changeType=ChangeTypeClass.UPSERT,
entityUrn=dataset_urn,
aspectName="globalTags",
aspect=GlobalTagsClass(tags=[TagAssociationClass(tag=tag_urn)]),
)
# Create rest emitter
rest_emitter = DatahubRestEmitter(gms_server="http://localhost:8080")
rest_emitter.emit(event)
log.info(f"Set tags to {tag_urn} for dataset {dataset_urn}")
| [
"logging.getLogger",
"logging.basicConfig",
"datahub.metadata.schema_classes.TagAssociationClass",
"datahub.emitter.rest_emitter.DatahubRestEmitter",
"datahub.emitter.mce_builder.make_dataset_urn",
"datahub.emitter.mce_builder.make_tag_urn"
] | [((416, 443), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (433, 443), False, 'import logging\n'), ((444, 483), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (463, 483), False, 'import logging\n'), ((499, 572), 'datahub.emitter.mce_builder.make_dataset_urn', 'make_dataset_urn', ([], {'platform': '"""hive"""', 'name': '"""realestate_db.sales"""', 'env': '"""PROD"""'}), "(platform='hive', name='realestate_db.sales', env='PROD')\n", (515, 572), False, 'from datahub.emitter.mce_builder import make_dataset_urn, make_tag_urn\n'), ((583, 607), 'datahub.emitter.mce_builder.make_tag_urn', 'make_tag_urn', (['"""purchase"""'], {}), "('purchase')\n", (595, 607), False, 'from datahub.emitter.mce_builder import make_dataset_urn, make_tag_urn\n'), ((908, 962), 'datahub.emitter.rest_emitter.DatahubRestEmitter', 'DatahubRestEmitter', ([], {'gms_server': '"""http://localhost:8080"""'}), "(gms_server='http://localhost:8080')\n", (926, 962), False, 'from datahub.emitter.rest_emitter import DatahubRestEmitter\n'), ((832, 864), 'datahub.metadata.schema_classes.TagAssociationClass', 'TagAssociationClass', ([], {'tag': 'tag_urn'}), '(tag=tag_urn)\n', (851, 864), False, 'from datahub.metadata.schema_classes import ChangeTypeClass, GlobalTagsClass, TagAssociationClass\n')] |
from __future__ import print_function
from sympy import symbols, Matrix
from galgebra.printer import xpdf, Format
def main():
Format()
a = Matrix ( 2, 2, ( 1, 2, 3, 4 ) )
b = Matrix ( 2, 1, ( 5, 6 ) )
c = a * b
print(a,b,'=',c)
x, y = symbols ( 'x, y' )
d = Matrix ( 1, 2, ( x ** 3, y ** 3 ))
e = Matrix ( 2, 2, ( x ** 2, 2 * x * y, 2 * x * y, y ** 2 ) )
f = d * e
print('%',d,e,'=',f)
# xpdf()
xpdf(pdfprog=None)
return
if __name__ == "__main__":
main()
| [
"sympy.symbols",
"galgebra.printer.xpdf",
"sympy.Matrix",
"galgebra.printer.Format"
] | [((131, 139), 'galgebra.printer.Format', 'Format', ([], {}), '()\n', (137, 139), False, 'from galgebra.printer import xpdf, Format\n'), ((148, 174), 'sympy.Matrix', 'Matrix', (['(2)', '(2)', '(1, 2, 3, 4)'], {}), '(2, 2, (1, 2, 3, 4))\n', (154, 174), False, 'from sympy import symbols, Matrix\n'), ((188, 208), 'sympy.Matrix', 'Matrix', (['(2)', '(1)', '(5, 6)'], {}), '(2, 1, (5, 6))\n', (194, 208), False, 'from sympy import symbols, Matrix\n'), ((261, 276), 'sympy.symbols', 'symbols', (['"""x, y"""'], {}), "('x, y')\n", (268, 276), False, 'from sympy import symbols, Matrix\n'), ((289, 319), 'sympy.Matrix', 'Matrix', (['(1)', '(2)', '(x ** 3, y ** 3)'], {}), '(1, 2, (x ** 3, y ** 3))\n', (295, 319), False, 'from sympy import symbols, Matrix\n'), ((332, 384), 'sympy.Matrix', 'Matrix', (['(2)', '(2)', '(x ** 2, 2 * x * y, 2 * x * y, y ** 2)'], {}), '(2, 2, (x ** 2, 2 * x * y, 2 * x * y, y ** 2))\n', (338, 384), False, 'from sympy import symbols, Matrix\n'), ((448, 466), 'galgebra.printer.xpdf', 'xpdf', ([], {'pdfprog': 'None'}), '(pdfprog=None)\n', (452, 466), False, 'from galgebra.printer import xpdf, Format\n')] |
"""Implements a basic flask app that provides hashes of text."""
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
import flask_login
#pylint: disable=invalid-name
app = Flask(__name__)
app.config['DEBUG'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgres://yjjuylsytqewni:d0d63322c6abd33e2dadeafd7ef2501f73af54cf2d39596e464ea2c18b0234a3@ec2-23-23-78-213.compute-1.amazonaws.com:5432/d3gdnt7fkmonn1' #pylint: disable=line-too-long
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
app.secret_key = 'HGTYNVK123LOL908973'
db = SQLAlchemy(app)
login_manager = flask_login.LoginManager()
login_manager.init_app(app)
# This import need to be here that's why disabling pylint
#pylint: disable=wrong-import-position
import hopsapp.models
import hopsapp.routes
| [
"flask_sqlalchemy.SQLAlchemy",
"flask_login.LoginManager",
"flask.Flask"
] | [((185, 200), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (190, 200), False, 'from flask import Flask\n'), ((550, 565), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (560, 565), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((583, 609), 'flask_login.LoginManager', 'flask_login.LoginManager', ([], {}), '()\n', (607, 609), False, 'import flask_login\n')] |
import numpy as np;
import cv2;
n = 428671
img_RS_color = np.load('/home/p4bhattachan/gripper/3DCameraServer/testImages/npyFiles/{}_RS_color.npy'.format(n))
cv2.imshow('RS Color Image {}'.format(n), img_RS_color)
#
# # img_RS_depth = np.load('/home/p4bhattachan/gripper/3DCameraServer/testImages/npyFiles/{}_RS_depth.npy'.format(n))
# # cv2.imshow('RS Depth Image {}'.format(n), img_RS_depth)
#
# img_ZED_color = np.load('/home/p4bhattachan/gripper/3DCameraServer/testImages/npyFiles/{}_ZED_color.npy'.format(n))
# cv2.imshow('ZED Color Image {}'.format(n), img_ZED_color)
#
# # img_ZED_depth = np.load('/home/p4bhattachan/gripper/3DCameraServer/testImages/npyFiles/{}_ZED_depth.npy'.format(n))
# # cv2.imshow('ZED Depth Image {}'.format(n), img_ZED_depth)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"cv2.waitKey",
"cv2.destroyAllWindows"
] | [((759, 773), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (770, 773), False, 'import cv2\n'), ((774, 797), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (795, 797), False, 'import cv2\n')] |
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines sequence of notes objects for creating datasets."""
import collections
import copy
# internal imports
from magenta.music import constants
from magenta.protobuf import music_pb2
# Set the quantization cutoff.
# Note events before this cutoff are rounded down to nearest step. Notes
# above this cutoff are rounded up to nearest step. The cutoff is given as a
# fraction of a step.
# For example, with quantize_cutoff = 0.75 using 0-based indexing,
# if .75 < event <= 1.75, it will be quantized to step 1.
# If 1.75 < event <= 2.75 it will be quantized to step 2.
# A number close to 1.0 gives less wiggle room for notes that start early,
# and they will be snapped to the previous step.
QUANTIZE_CUTOFF = 0.5
# Shortcut to chord symbol text annotation type.
CHORD_SYMBOL = music_pb2.NoteSequence.TextAnnotation.CHORD_SYMBOL
class BadTimeSignatureException(Exception):
pass
class MultipleTimeSignatureException(Exception):
pass
class MultipleTempoException(Exception):
pass
class NegativeTimeException(Exception):
pass
def extract_subsequence(sequence, start_time, end_time):
"""Extracts a subsequence from a NoteSequence.
Notes starting before `start_time` are not included. Notes ending after
`end_time` are truncated.
Args:
sequence: The NoteSequence to extract a subsequence from.
start_time: The float time in seconds to start the subsequence.
end_time: The float time in seconds to end the subsequence.
Returns:
A new NoteSequence that is a subsequence of `sequence` in the specified time
range.
"""
subsequence = music_pb2.NoteSequence()
subsequence.CopyFrom(sequence)
del subsequence.notes[:]
for note in sequence.notes:
if note.start_time < start_time or note.start_time >= end_time:
continue
new_note = subsequence.notes.add()
new_note.CopyFrom(note)
new_note.end_time = min(note.end_time, end_time)
subsequence.total_time = min(sequence.total_time, end_time)
return subsequence
def is_power_of_2(x):
return x and not x & (x - 1)
class QuantizedSequence(object):
"""Holds notes and chords which have been quantized to time steps.
Notes contain a pitch, velocity, start time, and end time. Notes
are stored in tracks (which can be different instruments or the same
instrument). There is also a time signature and key signature.
Notes stored in this object are not guaranteed to be sorted by time.
Attributes:
tracks: A dictionary mapping track number to list of Note tuples. Track
number is taken from the instrument number of each NoteSequence note.
chords: A list of ChordSymbol tuples.
qpm: Quarters per minute. This is needed to recover tempo if converting back
to MIDI.
time_signature: This determines the length of a bar of music. This is just
needed to compute the number of quantization steps per bar, though it
can also communicate more high level aspects of the music
(see https://en.wikipedia.org/wiki/Time_signature).
steps_per_quarter: How many quantization steps per quarter note of music.
total_steps: The total number of steps in the quantized sequence.
"""
# Disabling pylint since it is recognizing these as attributes instead of
# classes.
# pylint: disable=invalid-name
Note = collections.namedtuple(
'Note',
['pitch', 'velocity', 'start', 'end', 'instrument', 'program', 'is_drum'])
TimeSignature = collections.namedtuple('TimeSignature',
['numerator', 'denominator'])
ChordSymbol = collections.namedtuple('ChordSymbol', ['step', 'figure'])
# pylint: enable=invalid-name
def __init__(self):
self._reset()
def _reset(self):
self.tracks = {}
self.chords = []
self.qpm = 120.0
self.time_signature = QuantizedSequence.TimeSignature(numerator=4,
denominator=4)
self.steps_per_quarter = 4
self.total_steps = 0
def steps_per_bar(self):
"""Calculates steps per bar.
Returns:
Steps per bar as a floating point number.
"""
quarters_per_beat = 4.0 / self.time_signature.denominator
quarters_per_bar = (quarters_per_beat * self.time_signature.numerator)
steps_per_bar_float = (self.steps_per_quarter * quarters_per_bar)
return steps_per_bar_float
def from_note_sequence(self, note_sequence, steps_per_quarter):
"""Populate self with a music_pb2.NoteSequence proto.
Notes and time signature are saved to self with notes' start and end times
quantized. If there is no time signature 4/4 is assumed. If there is more
than one time signature an exception is raised.
The quarter notes per minute stored in `note_sequence` is used to normalize
tempo. Regardless of how fast or slow quarter notes are played, a note that
is played for 1 quarter note will last `steps_per_quarter` time steps in
the quantized result.
A note's start and end time are snapped to a nearby quantized step. See
the comments above `QUANTIZE_CUTOFF` for details.
Args:
note_sequence: A music_pb2.NoteSequence protocol buffer.
steps_per_quarter: Each quarter note of music will be divided into this
many quantized time steps.
Raises:
MultipleTimeSignatureException: If there is a change in time signature
in `note_sequence`.
MultipleTempoException: If there is a change in tempo in `note_sequence`.
BadTimeSignatureException: If the time signature found in `note_sequence`
has a denominator which is not a power of 2.
NegativeTimeException: If a note or chord occurs at a negative time.
"""
self._reset()
self.steps_per_quarter = steps_per_quarter
if note_sequence.time_signatures:
time_signatures = sorted(note_sequence.time_signatures,
key=lambda ts: ts.time)
# There is an implicit 4/4 time signature at 0 time. So if the first time
# signature is something other than 4/4 and it's at a time other than 0,
# that's an implicit time signature change.
if time_signatures[0].time != 0 and not (
time_signatures[0].numerator == 4 and
time_signatures[0].denominator == 4):
raise MultipleTimeSignatureException(
'NoteSequence has an implicit change from initial 4/4 time '
'signature.')
self.time_signature = QuantizedSequence.TimeSignature(
time_signatures[0].numerator, time_signatures[0].denominator)
for time_signature in time_signatures[1:]:
if (time_signature.numerator != self.time_signature.numerator or
time_signature.denominator != self.time_signature.denominator):
raise MultipleTimeSignatureException(
'NoteSequence has at least one time signature change.')
if not is_power_of_2(self.time_signature.denominator):
raise BadTimeSignatureException(
'Denominator is not a power of 2. Time signature: %d/%d' %
(self.time_signature.numerator, self.time_signature.denominator))
if note_sequence.tempos:
tempos = sorted(note_sequence.tempos, key=lambda t: t.time)
# There is an implicit 120.0 qpm tempo at 0 time. So if the first tempo is
# something other that 120.0 and it's at a time other than 0, that's an
# implicit tempo change.
if tempos[0].time != 0 and tempos[0].qpm != 120.0:
raise MultipleTempoException(
'NoteSequence has an implicit tempo change from initial 120.0 qpm')
self.qpm = tempos[0].qpm
for tempo in tempos[1:]:
if tempo.qpm != self.qpm:
raise MultipleTempoException(
'NoteSequence has at least one tempo change.')
else:
self.qpm = constants.DEFAULT_QUARTERS_PER_MINUTE
# Compute quantization steps per second.
steps_per_second = steps_per_quarter * self.qpm / 60.0
quantize = lambda x: int(x + (1 - QUANTIZE_CUTOFF))
self.total_steps = quantize(note_sequence.total_time * steps_per_second)
for note in note_sequence.notes:
# Quantize the start and end times of the note.
start_step = quantize(note.start_time * steps_per_second)
end_step = quantize(note.end_time * steps_per_second)
if end_step == start_step:
end_step += 1
# Do not allow notes to start or end in negative time.
if start_step < 0 or end_step < 0:
raise NegativeTimeException(
'Got negative note time: start_step = %s, end_step = %s' %
(start_step, end_step))
# Extend quantized sequence if necessary.
if end_step > self.total_steps:
self.total_steps = end_step
if note.instrument not in self.tracks:
self.tracks[note.instrument] = []
self.tracks[note.instrument].append(
QuantizedSequence.Note(pitch=note.pitch,
velocity=note.velocity,
start=start_step,
end=end_step,
instrument=note.instrument,
program=note.program,
is_drum=note.is_drum))
# Also add chord symbol annotations to the quantized sequence.
for annotation in note_sequence.text_annotations:
if annotation.annotation_type == CHORD_SYMBOL:
# Quantize the chord time, disallowing negative time.
step = quantize(annotation.time * steps_per_second)
if step < 0:
raise NegativeTimeException(
'Got negative chord time: step = %s' % step)
self.chords.append(
QuantizedSequence.ChordSymbol(step=step, figure=annotation.text))
def __eq__(self, other):
if not isinstance(other, QuantizedSequence):
return False
for track in self.tracks:
if (track not in other.tracks or
set(self.tracks[track]) != set(other.tracks[track])):
return False
return (
self.qpm == other.qpm and
self.time_signature == other.time_signature and
self.steps_per_quarter == other.steps_per_quarter and
self.total_steps == other.total_steps and
set(self.chords) == set(other.chords))
def __deepcopy__(self, unused_memo=None):
new_copy = type(self)()
new_copy.tracks = copy.deepcopy(self.tracks)
new_copy.chords = copy.deepcopy(self.chords)
new_copy.qpm = self.qpm
new_copy.time_signature = self.time_signature
new_copy.steps_per_quarter = self.steps_per_quarter
new_copy.total_steps = self.total_steps
return new_copy
| [
"magenta.protobuf.music_pb2.NoteSequence",
"collections.namedtuple",
"copy.deepcopy"
] | [((2185, 2209), 'magenta.protobuf.music_pb2.NoteSequence', 'music_pb2.NoteSequence', ([], {}), '()\n', (2207, 2209), False, 'from magenta.protobuf import music_pb2\n'), ((3899, 4008), 'collections.namedtuple', 'collections.namedtuple', (['"""Note"""', "['pitch', 'velocity', 'start', 'end', 'instrument', 'program', 'is_drum']"], {}), "('Note', ['pitch', 'velocity', 'start', 'end',\n 'instrument', 'program', 'is_drum'])\n", (3921, 4008), False, 'import collections\n'), ((4036, 4105), 'collections.namedtuple', 'collections.namedtuple', (['"""TimeSignature"""', "['numerator', 'denominator']"], {}), "('TimeSignature', ['numerator', 'denominator'])\n", (4058, 4105), False, 'import collections\n'), ((4163, 4220), 'collections.namedtuple', 'collections.namedtuple', (['"""ChordSymbol"""', "['step', 'figure']"], {}), "('ChordSymbol', ['step', 'figure'])\n", (4185, 4220), False, 'import collections\n'), ((10943, 10969), 'copy.deepcopy', 'copy.deepcopy', (['self.tracks'], {}), '(self.tracks)\n', (10956, 10969), False, 'import copy\n'), ((10992, 11018), 'copy.deepcopy', 'copy.deepcopy', (['self.chords'], {}), '(self.chords)\n', (11005, 11018), False, 'import copy\n')] |
from collections import defaultdict
from celery.task import task
from pandas import concat, DataFrame
from bamboo.core.aggregator import Aggregator
from bamboo.core.frame import add_parent_column, join_dataset
from bamboo.core.parser import Parser
from bamboo.lib.datetools import recognize_dates
from bamboo.lib.jsontools import df_to_jsondict
from bamboo.lib.mongo import MONGO_ID
from bamboo.lib.parsing import parse_columns
from bamboo.lib.query_args import QueryArgs
from bamboo.lib.utils import combine_dicts, flatten, to_list
def calculate_columns(dataset, calculations):
"""Calculate and store new columns for `calculations`.
The new columns are join t othe Calculation dframe and replace the
dataset's observations.
.. note::
This can result in race-conditions when:
- deleting ``controllers.Datasets.DELETE``
- updating ``controllers.Datasets.POST([dataset_id])``
Therefore, perform these actions asychronously.
:param dataset: The dataset to calculate for.
:param calculations: A list of calculations.
"""
new_cols = None
for c in calculations:
if c.aggregation:
aggregator = __create_aggregator(
dataset, c.formula, c.name, c.groups_as_list)
aggregator.save(dataset)
else:
columns = parse_columns(dataset, c.formula, c.name)
if new_cols is None:
new_cols = DataFrame(columns[0])
else:
new_cols = new_cols.join(columns[0])
if new_cols is not None:
dataset.update_observations(new_cols)
# propagate calculation to any merged child datasets
[__propagate_column(x, dataset) for x in dataset.merged_datasets]
@task(default_retry_delay=5, ignore_result=True)
def calculate_updates(dataset, new_data, new_dframe_raw=None,
parent_dataset_id=None, update_id=None):
"""Update dataset with `new_data`.
This can result in race-conditions when:
- deleting ``controllers.Datasets.DELETE``
- updating ``controllers.Datasets.POST([dataset_id])``
Therefore, perform these actions asychronously.
:param new_data: Data to update this dataset with.
:param new_dframe_raw: DataFrame to update this dataset with.
:param parent_dataset_id: If passed add ID as parent ID to column,
default is None.
"""
if not __update_is_valid(dataset, new_dframe_raw):
dataset.remove_pending_update(update_id)
return
__ensure_ready(dataset, update_id)
if new_dframe_raw is None:
new_dframe_raw = dframe_from_update(dataset, new_data)
new_dframe = recognize_dates(new_dframe_raw, dataset.schema)
new_dframe = __add_calculations(dataset, new_dframe)
# set parent id if provided
if parent_dataset_id:
new_dframe = add_parent_column(new_dframe, parent_dataset_id)
dataset.append_observations(new_dframe)
dataset.clear_summary_stats()
propagate(dataset, new_dframe=new_dframe, update={'add': new_dframe_raw})
dataset.update_complete(update_id)
def dframe_from_update(dataset, new_data):
"""Make a DataFrame for the `new_data`.
:param new_data: Data to add to dframe.
:type new_data: List.
"""
filtered_data = []
columns = dataset.columns
labels_to_slugs = dataset.schema.labels_to_slugs
num_columns = len(columns)
num_rows = dataset.num_rows
dframe_empty = not num_columns
if dframe_empty:
columns = dataset.schema.keys()
for row in new_data:
filtered_row = dict()
for col, val in row.iteritems():
# special case for reserved keys (e.g. _id)
if col == MONGO_ID:
if (not num_columns or col in columns) and\
col not in filtered_row.keys():
filtered_row[col] = val
else:
# if col is a label take slug, if it's a slug take col
slug = labels_to_slugs.get(
col, col if col in labels_to_slugs.values() else None)
# if slug is valid or there is an empty dframe
if (slug or col in labels_to_slugs.keys()) and (
dframe_empty or slug in columns):
filtered_row[slug] = dataset.schema.convert_type(
slug, val)
filtered_data.append(filtered_row)
index = range(num_rows, num_rows + len(filtered_data))
new_dframe = DataFrame(filtered_data, index=index)
return new_dframe
@task(default_retry_delay=5, ignore_result=True)
def propagate(dataset, new_dframe=None, update=None):
"""Propagate changes in a modified dataset."""
__update_aggregate_datasets(dataset, new_dframe, update=update)
if update:
__update_merged_datasets(dataset, update)
__update_joined_datasets(dataset, update)
def __add_calculations(dataset, new_dframe):
labels_to_slugs = dataset.schema.labels_to_slugs
for calculation in dataset.calculations(include_aggs=False):
function = Parser.parse_function(calculation.formula)
new_column = new_dframe.apply(function, axis=1, args=(dataset, ))
potential_name = calculation.name
if potential_name not in dataset.dframe().columns:
if potential_name in labels_to_slugs:
new_column.name = labels_to_slugs[potential_name]
else:
new_column.name = potential_name
new_dframe = new_dframe.join(new_column)
return new_dframe
def __calculation_data(dataset):
"""Create a list of aggregate calculation information.
Builds a list of calculation information from the current datasets
aggregated datasets and aggregate calculations.
"""
calcs_to_data = defaultdict(list)
calculations = dataset.calculations(only_aggs=True)
names_to_formulas = {c.name: c.formula for c in calculations}
names = set(names_to_formulas.keys())
for group, dataset in dataset.aggregated_datasets:
labels_to_slugs = dataset.schema.labels_to_slugs
calculations_for_dataset = list(set(
labels_to_slugs.keys()).intersection(names))
for calc in calculations_for_dataset:
calcs_to_data[calc].append((
names_to_formulas[calc], labels_to_slugs[calc], group,
dataset))
return flatten(calcs_to_data.values())
def __update_is_valid(dataset, new_dframe):
"""Check if the update is valid.
Check whether this is a right-hand side of any joins
and deny the update if the update would produce an invalid
join as a result.
:param dataset: The dataset to check if update valid for.
:param new_dframe: The update dframe to check.
:returns: True is the update is valid, False otherwise.
"""
select = {on: 1 for on in dataset.on_columns_for_rhs_of_joins if on in
new_dframe.columns and on in dataset.columns}
dframe = dataset.dframe(query_args=QueryArgs(select=select))
for on in select.keys():
merged_join_column = concat([new_dframe[on], dframe[on]])
if len(merged_join_column) != merged_join_column.nunique():
return False
return True
def __create_aggregator(dataset, formula, name, groups, dframe=None):
# TODO this should work with index eventually
columns = parse_columns(dataset, formula, name, dframe, no_index=True)
dependent_columns = Parser.dependent_columns(formula, dataset)
aggregation = Parser.parse_aggregation(formula)
# get dframe with only the necessary columns
select = combine_dicts({group: 1 for group in groups},
{col: 1 for col in dependent_columns})
# ensure at least one column (MONGO_ID) for the count aggregation
query_args = QueryArgs(select=select or {MONGO_ID: 1})
dframe = dataset.dframe(query_args=query_args, keep_mongo_keys=not select)
return Aggregator(dframe, groups, aggregation, name, columns)
def __ensure_ready(dataset, update_id):
# dataset must not be pending
if not dataset.is_ready or (
update_id and dataset.has_pending_updates(update_id)):
dataset.reload()
raise calculate_updates.retry()
def __find_merge_offset(dataset, merged_dataset):
offset = 0
for parent_id in merged_dataset.parent_ids:
if dataset.dataset_id == parent_id:
break
offset += dataset.find_one(parent_id).num_rows
return offset
def __propagate_column(dataset, parent_dataset):
"""Propagate columns in `parent_dataset` to `dataset`.
When a new calculation is added to a dataset this will propagate the
new column to all child (merged) datasets.
:param dataset: THe child dataet.
:param parent_dataset: The dataset to propagate.
"""
# delete the rows in this dataset from the parent
dataset.remove_parent_observations(parent_dataset.dataset_id)
# get this dataset without the out-of-date parent rows
dframe = dataset.dframe(keep_parent_ids=True)
# create new dframe from the upated parent and add parent id
parent_dframe = add_parent_column(parent_dataset.dframe(),
parent_dataset.dataset_id)
# merge this new dframe with the existing dframe
updated_dframe = concat([dframe, parent_dframe])
# save new dframe (updates schema)
dataset.replace_observations(updated_dframe)
dataset.clear_summary_stats()
# recur into merged dataset
[__propagate_column(x, dataset) for x in dataset.merged_datasets]
def __remapped_data(dataset_id, mapping, slugified_data):
column_map = mapping.get(dataset_id) if mapping else None
if column_map:
slugified_data = [{column_map.get(k, k): v for k, v in row.items()}
for row in slugified_data]
return slugified_data
def __slugify_data(new_data, labels_to_slugs):
slugified_data = []
new_data = to_list(new_data)
for row in new_data:
for key, value in row.iteritems():
if labels_to_slugs.get(key) and key != MONGO_ID:
del row[key]
row[labels_to_slugs[key]] = value
slugified_data.append(row)
return slugified_data
def __update_aggregate_datasets(dataset, new_dframe, update=None):
calcs_to_data = __calculation_data(dataset)
for formula, slug, groups, a_dataset in calcs_to_data:
__update_aggregate_dataset(dataset, formula, new_dframe, slug, groups,
a_dataset, update is None)
def __update_aggregate_dataset(dataset, formula, new_dframe, name, groups,
a_dataset, reducible):
"""Update the aggregated dataset built for `dataset` with `calculation`.
Proceed with the following steps:
- delete the rows in this dataset from the parent
- recalculate aggregated dataframe from aggregation
- update aggregated dataset with new dataframe and add parent id
- recur on all merged datasets descending from the aggregated
dataset
:param formula: The formula to execute.
:param new_dframe: The DataFrame to aggregate on.
:param name: The name of the aggregation.
:param groups: A column or columns to group on.
:type group: String, list of strings, or None.
:param a_dataset: The DataSet to store the aggregation in.
"""
# parse aggregation and build column arguments
aggregator = __create_aggregator(
dataset, formula, name, groups, dframe=new_dframe)
new_agg_dframe = aggregator.update(dataset, a_dataset, formula, reducible)
# jsondict from new dframe
new_data = df_to_jsondict(new_agg_dframe)
for merged_dataset in a_dataset.merged_datasets:
# remove rows in child from this merged dataset
merged_dataset.remove_parent_observations(a_dataset.dataset_id)
# calculate updates for the child
calculate_updates(merged_dataset, new_data,
parent_dataset_id=a_dataset.dataset_id)
def __update_joined_datasets(dataset, update):
"""Update any joined datasets."""
if 'add' in update:
new_dframe = update['add']
for direction, other_dataset, on, j_dataset in dataset.joined_datasets:
if 'add' in update:
if direction == 'left':
# only proceed if on in new dframe
if on in new_dframe.columns:
left_dframe = other_dataset.dframe(padded=True)
# only proceed if new on value is in on column in lhs
if len(set(new_dframe[on]).intersection(
set(left_dframe[on]))):
merged_dframe = join_dataset(left_dframe, dataset, on)
j_dataset.replace_observations(merged_dframe)
# TODO is it OK not to propagate the join here?
else:
# if on in new data join with existing data
if on in new_dframe:
new_dframe = join_dataset(new_dframe, other_dataset, on)
calculate_updates(j_dataset, df_to_jsondict(new_dframe),
parent_dataset_id=dataset.dataset_id)
elif 'delete' in update:
j_dataset.delete_observation(update['delete'])
elif 'edit' in update:
j_dataset.update_observation(*update['edit'])
def __update_merged_datasets(dataset, update):
if 'add' in update:
data = df_to_jsondict(update['add'])
# store slugs as labels for child datasets
data = __slugify_data(data, dataset.schema.labels_to_slugs)
# update the merged datasets with new_dframe
for mapping, merged_dataset in dataset.merged_datasets_with_map:
if 'add' in update:
mapped_data = __remapped_data(dataset.dataset_id, mapping, data)
calculate_updates(merged_dataset, mapped_data,
parent_dataset_id=dataset.dataset_id)
elif 'delete' in update:
offset = __find_merge_offset(dataset, merged_dataset)
merged_dataset.delete_observation(update['delete'] + offset)
elif 'edit' in update:
offset = __find_merge_offset(dataset, merged_dataset)
index, data = update['edit']
merged_dataset.update_observation(index + offset, data)
| [
"bamboo.core.aggregator.Aggregator",
"bamboo.core.parser.Parser.dependent_columns",
"celery.task.task",
"bamboo.core.parser.Parser.parse_aggregation",
"bamboo.core.frame.add_parent_column",
"bamboo.core.frame.join_dataset",
"bamboo.lib.datetools.recognize_dates",
"bamboo.lib.jsontools.df_to_jsondict",... | [((1744, 1791), 'celery.task.task', 'task', ([], {'default_retry_delay': '(5)', 'ignore_result': '(True)'}), '(default_retry_delay=5, ignore_result=True)\n', (1748, 1791), False, 'from celery.task import task\n'), ((4557, 4604), 'celery.task.task', 'task', ([], {'default_retry_delay': '(5)', 'ignore_result': '(True)'}), '(default_retry_delay=5, ignore_result=True)\n', (4561, 4604), False, 'from celery.task import task\n'), ((2660, 2707), 'bamboo.lib.datetools.recognize_dates', 'recognize_dates', (['new_dframe_raw', 'dataset.schema'], {}), '(new_dframe_raw, dataset.schema)\n', (2675, 2707), False, 'from bamboo.lib.datetools import recognize_dates\n'), ((4493, 4530), 'pandas.DataFrame', 'DataFrame', (['filtered_data'], {'index': 'index'}), '(filtered_data, index=index)\n', (4502, 4530), False, 'from pandas import concat, DataFrame\n'), ((5792, 5809), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (5803, 5809), False, 'from collections import defaultdict\n'), ((7371, 7431), 'bamboo.lib.parsing.parse_columns', 'parse_columns', (['dataset', 'formula', 'name', 'dframe'], {'no_index': '(True)'}), '(dataset, formula, name, dframe, no_index=True)\n', (7384, 7431), False, 'from bamboo.lib.parsing import parse_columns\n'), ((7457, 7499), 'bamboo.core.parser.Parser.dependent_columns', 'Parser.dependent_columns', (['formula', 'dataset'], {}), '(formula, dataset)\n', (7481, 7499), False, 'from bamboo.core.parser import Parser\n'), ((7518, 7551), 'bamboo.core.parser.Parser.parse_aggregation', 'Parser.parse_aggregation', (['formula'], {}), '(formula)\n', (7542, 7551), False, 'from bamboo.core.parser import Parser\n'), ((7615, 7707), 'bamboo.lib.utils.combine_dicts', 'combine_dicts', (['{group: (1) for group in groups}', '{col: (1) for col in dependent_columns}'], {}), '({group: (1) for group in groups}, {col: (1) for col in\n dependent_columns})\n', (7628, 7707), False, 'from bamboo.lib.utils import combine_dicts, flatten, to_list\n'), ((7815, 7856), 'bamboo.lib.query_args.QueryArgs', 'QueryArgs', ([], {'select': '(select or {MONGO_ID: 1})'}), '(select=select or {MONGO_ID: 1})\n', (7824, 7856), False, 'from bamboo.lib.query_args import QueryArgs\n'), ((7948, 8002), 'bamboo.core.aggregator.Aggregator', 'Aggregator', (['dframe', 'groups', 'aggregation', 'name', 'columns'], {}), '(dframe, groups, aggregation, name, columns)\n', (7958, 8002), False, 'from bamboo.core.aggregator import Aggregator\n'), ((9327, 9358), 'pandas.concat', 'concat', (['[dframe, parent_dframe]'], {}), '([dframe, parent_dframe])\n', (9333, 9358), False, 'from pandas import concat, DataFrame\n'), ((9971, 9988), 'bamboo.lib.utils.to_list', 'to_list', (['new_data'], {}), '(new_data)\n', (9978, 9988), False, 'from bamboo.lib.utils import combine_dicts, flatten, to_list\n'), ((11699, 11729), 'bamboo.lib.jsontools.df_to_jsondict', 'df_to_jsondict', (['new_agg_dframe'], {}), '(new_agg_dframe)\n', (11713, 11729), False, 'from bamboo.lib.jsontools import df_to_jsondict\n'), ((2846, 2894), 'bamboo.core.frame.add_parent_column', 'add_parent_column', (['new_dframe', 'parent_dataset_id'], {}), '(new_dframe, parent_dataset_id)\n', (2863, 2894), False, 'from bamboo.core.frame import add_parent_column, join_dataset\n'), ((5079, 5121), 'bamboo.core.parser.Parser.parse_function', 'Parser.parse_function', (['calculation.formula'], {}), '(calculation.formula)\n', (5100, 5121), False, 'from bamboo.core.parser import Parser\n'), ((7087, 7123), 'pandas.concat', 'concat', (['[new_dframe[on], dframe[on]]'], {}), '([new_dframe[on], dframe[on]])\n', (7093, 7123), False, 'from pandas import concat, DataFrame\n'), ((13541, 13570), 'bamboo.lib.jsontools.df_to_jsondict', 'df_to_jsondict', (["update['add']"], {}), "(update['add'])\n", (13555, 13570), False, 'from bamboo.lib.jsontools import df_to_jsondict\n'), ((1342, 1383), 'bamboo.lib.parsing.parse_columns', 'parse_columns', (['dataset', 'c.formula', 'c.name'], {}), '(dataset, c.formula, c.name)\n', (1355, 1383), False, 'from bamboo.lib.parsing import parse_columns\n'), ((7002, 7026), 'bamboo.lib.query_args.QueryArgs', 'QueryArgs', ([], {'select': 'select'}), '(select=select)\n', (7011, 7026), False, 'from bamboo.lib.query_args import QueryArgs\n'), ((1444, 1465), 'pandas.DataFrame', 'DataFrame', (['columns[0]'], {}), '(columns[0])\n', (1453, 1465), False, 'from pandas import concat, DataFrame\n'), ((13082, 13125), 'bamboo.core.frame.join_dataset', 'join_dataset', (['new_dframe', 'other_dataset', 'on'], {}), '(new_dframe, other_dataset, on)\n', (13094, 13125), False, 'from bamboo.core.frame import add_parent_column, join_dataset\n'), ((13172, 13198), 'bamboo.lib.jsontools.df_to_jsondict', 'df_to_jsondict', (['new_dframe'], {}), '(new_dframe)\n', (13186, 13198), False, 'from bamboo.lib.jsontools import df_to_jsondict\n'), ((12752, 12790), 'bamboo.core.frame.join_dataset', 'join_dataset', (['left_dframe', 'dataset', 'on'], {}), '(left_dframe, dataset, on)\n', (12764, 12790), False, 'from bamboo.core.frame import add_parent_column, join_dataset\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# License: BSD
# https://raw.githubusercontent.com/splintered-reality/py_trees_ros/devel/LICENSE
#
##############################################################################
# Documentation
##############################################################################
"""
ROS Visitors are entities that can be passed to a ROS tree implementation
(e.g. :class:`~py_trees_ros.trees.BehaviourTree`) and used to either visit
each and every behaviour in the tree, or visit behaviours as the tree is
traversed in an executing tick. At each behaviour, the visitor
runs its own method on the behaviour to do as it wishes - logging, introspecting).
.. warning:: Visitors should not modify the behaviours they visit.
.. seealso:: The base interface and core visitors in :mod:`py_trees.visitors`
"""
##############################################################################
# Imports
##############################################################################
import py_trees.visitors
import py_trees_ros_interfaces.msg as py_trees_msgs
import rclpy
import time
from . import conversions
##############################################################################
# Visitors
##############################################################################
class SetupLogger(py_trees.visitors.VisitorBase):
"""
Use as a visitor to :meth:`py_trees_ros.trees.TreeManager.setup`
to log the name and timings of each behaviours' setup
to the ROS debug channel.
Args:
node: an rclpy node that will provide debug logger
"""
def __init__(self, node: rclpy.node.Node):
super().__init__(full=True)
self.node = node
def initialise(self):
"""
Initialise the timestamping chain.
"""
self.start_time = time.monotonic()
self.last_time = self.start_time
def run(self, behaviour):
current_time = time.monotonic()
self.node.get_logger().debug(
"'{}'.setup: {:.4f}s".format(behaviour.name, current_time - self.last_time)
)
self.last_time = current_time
def finalise(self):
current_time = time.monotonic()
self.node.get_logger().debug(
"Total tree setup time: {:.4f}s".format(current_time - self.start_time)
)
class TreeToMsgVisitor(py_trees.visitors.VisitorBase):
"""
Visits the entire tree and gathers all behaviours as
messages for the tree logging publishers.
Attributes:
tree (:class:`py_trees_msgs.msg.BehaviourTree`): tree representation in message form
"""
def __init__(self):
"""
Well
"""
super(TreeToMsgVisitor, self).__init__()
self.full = True # examine all nodes
def initialise(self):
"""
Initialise and stamp a :class:`py_trees_msgs.msg.BehaviourTree`
instance.
"""
self.tree = py_trees_msgs.BehaviourTree()
# TODO: crystal api
# self.tree.stamp = rclpy.clock.Clock.now().to_msg()
def run(self, behaviour):
"""
Convert the behaviour into a message and append to the tree.
Args:
behaviour (:class:`~py_trees.behaviour.Behaviour`): behaviour to convert
"""
self.tree.behaviours.append(conversions.behaviour_to_msg(behaviour))
| [
"time.monotonic",
"py_trees_ros_interfaces.msg.BehaviourTree"
] | [((1841, 1857), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (1855, 1857), False, 'import time\n'), ((1953, 1969), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (1967, 1969), False, 'import time\n'), ((2192, 2208), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (2206, 2208), False, 'import time\n'), ((2944, 2973), 'py_trees_ros_interfaces.msg.BehaviourTree', 'py_trees_msgs.BehaviourTree', ([], {}), '()\n', (2971, 2973), True, 'import py_trees_ros_interfaces.msg as py_trees_msgs\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'Friday_Blueprint.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(420, 650)
MainWindow.setSizeIncrement(QtCore.QSize(0, 0))
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(0, 0, 421, 651))
self.label.setText("")
self.label.setPixmap(QtGui.QPixmap("D:/jarvis/Jarvis/utils/images/see.jpg"))
self.label.setScaledContents(True)
self.label.setObjectName("label")
self.verticalLayoutWidget = QtWidgets.QWidget(self.centralwidget)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(0, 0, 71, 651))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout_5 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout_5.setSizeConstraint(QtWidgets.QLayout.SetMaximumSize)
self.verticalLayout_5.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.pushButton_9 = QtWidgets.QPushButton(self.verticalLayoutWidget)
self.pushButton_9.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("D:/jarvis/Jarvis/utils/images/user.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_9.setIcon(icon)
self.pushButton_9.setIconSize(QtCore.QSize(30, 30))
self.pushButton_9.setAutoDefault(True)
self.pushButton_9.setDefault(True)
self.pushButton_9.setFlat(True)
self.pushButton_9.setObjectName("pushButton_9")
self.verticalLayout_5.addWidget(self.pushButton_9)
self.pushButton_10 = QtWidgets.QPushButton(self.verticalLayoutWidget)
self.pushButton_10.setText("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap("D:/jarvis/Jarvis/utils/images/data.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_10.setIcon(icon1)
self.pushButton_10.setIconSize(QtCore.QSize(30, 30))
self.pushButton_10.setAutoDefault(True)
self.pushButton_10.setDefault(True)
self.pushButton_10.setFlat(True)
self.pushButton_10.setObjectName("pushButton_10")
self.verticalLayout_5.addWidget(self.pushButton_10)
self.pushButton_11 = QtWidgets.QPushButton(self.verticalLayoutWidget)
self.pushButton_11.setText("")
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap("D:/jarvis/Jarvis/utils/images/bot.jpg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_11.setIcon(icon2)
self.pushButton_11.setIconSize(QtCore.QSize(49, 30))
self.pushButton_11.setDefault(True)
self.pushButton_11.setFlat(True)
self.pushButton_11.setObjectName("pushButton_11")
self.verticalLayout_5.addWidget(self.pushButton_11)
self.pushButton_12 = QtWidgets.QPushButton(self.verticalLayoutWidget)
self.pushButton_12.setMinimumSize(QtCore.QSize(69, 0))
self.pushButton_12.setMaximumSize(QtCore.QSize(75, 16777215))
self.pushButton_12.setText("")
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap("D:/jarvis/Jarvis/utils/images/settings.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_12.setIcon(icon3)
self.pushButton_12.setIconSize(QtCore.QSize(30, 30))
self.pushButton_12.setAutoDefault(True)
self.pushButton_12.setDefault(True)
self.pushButton_12.setFlat(True)
self.pushButton_12.setObjectName("pushButton_12")
self.verticalLayout_5.addWidget(self.pushButton_12)
spacerItem = QtWidgets.QSpacerItem(20, 151, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
self.verticalLayout_5.addItem(spacerItem)
spacerItem1 = QtWidgets.QSpacerItem(20, 69, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
self.verticalLayout_5.addItem(spacerItem1)
spacerItem2 = QtWidgets.QSpacerItem(13, 253, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
self.verticalLayout_5.addItem(spacerItem2)
self.pushButton_13 = QtWidgets.QPushButton(self.verticalLayoutWidget)
self.pushButton_13.setText("")
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap("D:/jarvis/Jarvis/utils/images/feedback.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_13.setIcon(icon4)
self.pushButton_13.setIconSize(QtCore.QSize(40, 40))
self.pushButton_13.setDefault(True)
self.pushButton_13.setFlat(True)
self.pushButton_13.setObjectName("pushButton_13")
self.verticalLayout_5.addWidget(self.pushButton_13)
self.horizontalLayoutWidget = QtWidgets.QWidget(self.centralwidget)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(70, 600, 351, 51))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout_4.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.pushButton_14 = QtWidgets.QPushButton(self.horizontalLayoutWidget)
self.pushButton_14.setText("")
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap("D:/jarvis/Jarvis/utils/images/lens.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_14.setIcon(icon5)
self.pushButton_14.setIconSize(QtCore.QSize(40, 40))
self.pushButton_14.setAutoDefault(True)
self.pushButton_14.setDefault(True)
self.pushButton_14.setFlat(True)
self.pushButton_14.setObjectName("pushButton_14")
self.horizontalLayout_4.addWidget(self.pushButton_14)
spacerItem3 = QtWidgets.QSpacerItem(65, 15, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem3)
self.label_2 = QtWidgets.QLabel(self.horizontalLayoutWidget)
#Self.label_2.setPixmap(QtGui.QPixmap("D:/jarvis/Jarvis/utils/images/Speak.gif"))
self.label_2.setText("waiting")
self.label_2.setScaledContents(True)
self.label_2.setObjectName("label_2")
self.horizontalLayout_4.addWidget(self.label_2)
spacerItem4 = QtWidgets.QSpacerItem(68, 15, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem4)
self.pushButton_15 = QtWidgets.QPushButton(self.horizontalLayoutWidget)
self.pushButton_15.setText("")
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap("D:/jarvis/Jarvis/utils/images/mic.gif"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_15.setIcon(icon6)
self.pushButton_15.setIconSize(QtCore.QSize(40, 40))
self.pushButton_15.setAutoDefault(True)
self.pushButton_15.setDefault(True)
self.pushButton_15.setFlat(True)
self.pushButton_15.setObjectName("pushButton_15")
self.horizontalLayout_4.addWidget(self.pushButton_15)
spacerItem5 = QtWidgets.QSpacerItem(10, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem5)
self.horizontalLayoutWidget_2 = QtWidgets.QWidget(self.centralwidget)
self.horizontalLayoutWidget_2.setGeometry(QtCore.QRect(70, 560, 351, 41))
self.horizontalLayoutWidget_2.setObjectName("horizontalLayoutWidget_2")
self.horizontalLayout_5 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget_2)
self.horizontalLayout_5.setSizeConstraint(QtWidgets.QLayout.SetNoConstraint)
self.horizontalLayout_5.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.textEdit_2 = QtWidgets.QTextEdit(self.horizontalLayoutWidget_2)
self.textEdit_2.setObjectName("textEdit_2")
self.horizontalLayout_5.addWidget(self.textEdit_2)
self.pushButton_16 = QtWidgets.QPushButton(self.horizontalLayoutWidget_2)
self.pushButton_16.setText("")
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap("D:/jarvis/Jarvis/utils/images/send.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_16.setIcon(icon7)
self.pushButton_16.setIconSize(QtCore.QSize(40, 40))
self.pushButton_16.setCheckable(False)
self.pushButton_16.setAutoRepeatDelay(300)
self.pushButton_16.setAutoDefault(True)
self.pushButton_16.setDefault(True)
self.pushButton_16.setFlat(True)
self.pushButton_16.setObjectName("pushButton_16")
self.horizontalLayout_5.addWidget(self.pushButton_16)
spacerItem6 = QtWidgets.QSpacerItem(10, 10, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_5.addItem(spacerItem6)
self.verticalLayoutWidget_2 = QtWidgets.QWidget(self.centralwidget)
self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(70, 0, 351, 561))
self.verticalLayoutWidget_2.setObjectName("verticalLayoutWidget_2")
self.verticalLayout = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_2)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.textEdit = QtWidgets.QTextEdit(self.verticalLayoutWidget_2)
self.textEdit.setObjectName("textEdit")
self.verticalLayout.addWidget(self.textEdit)
self.label_3 = QtWidgets.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(420, 0, 961, 741))
self.label_3.setText("")
self.label_3.setScaledContents(True)
self.label_3.setObjectName("label_3")
self.label_5 = QtWidgets.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(0, 650, 421, 91))
self.label_5.setText("")
self.label_5.setPixmap(QtGui.QPixmap("D:/jarvis/Jarvis/utils/images/Recognizer.gif"))
self.label_5.setScaledContents(True)
self.label_5.setObjectName("label_5")
MainWindow.setCentralWidget(self.centralwidget)
self.movie = QtGui.QMovie("D:/jarvis/Jarvis/utils/images/AIassistant.gif")
self.label_3.setMovie(self.movie)
self.movie1 = QtGui.QMovie("D:/jarvis/Jarvis/utils/images/Recognizer.gif")
self.label_5.setMovie(self.movie1)
self.startAnimation()
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def startAnimation(self):
self.movie.start()
self.movie1.start()
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "JARVIS"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| [
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QTextEdit",
"PyQt5.QtWidgets.QMainWindow",
"PyQt5.QtGui.QIcon",
"PyQt5.QtWidgets.QSpacerItem",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtGui.QMovie",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtCore.QRect",
"PyQt5.QtGui.QPixmap",
"PyQt5.QtWid... | [((11220, 11252), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (11242, 11252), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11270, 11293), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (11291, 11293), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((577, 606), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['MainWindow'], {}), '(MainWindow)\n', (594, 606), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((687, 723), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (703, 723), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1023, 1060), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1040, 1060), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1240, 1288), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (1261, 1288), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1525, 1573), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (1546, 1573), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1627, 1640), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (1638, 1640), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2133, 2181), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (2154, 2181), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2237, 2250), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (2248, 2250), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2753, 2801), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (2774, 2801), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2857, 2870), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (2868, 2870), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3324, 3372), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (3345, 3372), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3561, 3574), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (3572, 3574), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4073, 4168), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(20)', '(151)', 'QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(20, 151, QtWidgets.QSizePolicy.Minimum, QtWidgets.\n QSizePolicy.Fixed)\n', (4094, 4168), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4237, 4331), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(20)', '(69)', 'QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(20, 69, QtWidgets.QSizePolicy.Minimum, QtWidgets.\n QSizePolicy.Fixed)\n', (4258, 4331), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4401, 4496), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(13)', '(253)', 'QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(13, 253, QtWidgets.QSizePolicy.Minimum, QtWidgets.\n QSizePolicy.Fixed)\n', (4422, 4496), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4572, 4620), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.verticalLayoutWidget'], {}), '(self.verticalLayoutWidget)\n', (4593, 4620), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4676, 4689), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (4687, 4689), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5157, 5194), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (5174, 5194), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5385, 5435), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (5406, 5435), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5597, 5647), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (5618, 5647), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5703, 5716), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (5714, 5716), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6214, 6308), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(65)', '(15)', 'QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Minimum'], {}), '(65, 15, QtWidgets.QSizePolicy.Fixed, QtWidgets.\n QSizePolicy.Minimum)\n', (6235, 6308), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6381, 6426), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (6397, 6426), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6726, 6820), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(68)', '(15)', 'QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Minimum'], {}), '(68, 15, QtWidgets.QSizePolicy.Fixed, QtWidgets.\n QSizePolicy.Minimum)\n', (6747, 6820), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6899, 6949), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.horizontalLayoutWidget'], {}), '(self.horizontalLayoutWidget)\n', (6920, 6949), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7005, 7018), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (7016, 7018), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7515, 7609), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(10)', '(20)', 'QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Minimum'], {}), '(10, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.\n QSizePolicy.Minimum)\n', (7536, 7609), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7698, 7735), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (7715, 7735), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7932, 7984), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.horizontalLayoutWidget_2'], {}), '(self.horizontalLayoutWidget_2)\n', (7953, 7984), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8228, 8278), 'PyQt5.QtWidgets.QTextEdit', 'QtWidgets.QTextEdit', (['self.horizontalLayoutWidget_2'], {}), '(self.horizontalLayoutWidget_2)\n', (8247, 8278), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8420, 8472), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.horizontalLayoutWidget_2'], {}), '(self.horizontalLayoutWidget_2)\n', (8441, 8472), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8528, 8541), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (8539, 8541), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9137, 9231), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(10)', '(10)', 'QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Minimum'], {}), '(10, 10, QtWidgets.QSizePolicy.Fixed, QtWidgets.\n QSizePolicy.Minimum)\n', (9158, 9231), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9318, 9355), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (9335, 9355), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9541, 9591), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.verticalLayoutWidget_2'], {}), '(self.verticalLayoutWidget_2)\n', (9562, 9591), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9736, 9784), 'PyQt5.QtWidgets.QTextEdit', 'QtWidgets.QTextEdit', (['self.verticalLayoutWidget_2'], {}), '(self.verticalLayoutWidget_2)\n', (9755, 9784), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9910, 9946), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (9926, 9946), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10160, 10196), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (10176, 10196), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10557, 10618), 'PyQt5.QtGui.QMovie', 'QtGui.QMovie', (['"""D:/jarvis/Jarvis/utils/images/AIassistant.gif"""'], {}), "('D:/jarvis/Jarvis/utils/images/AIassistant.gif')\n", (10569, 10618), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10683, 10743), 'PyQt5.QtGui.QMovie', 'QtGui.QMovie', (['"""D:/jarvis/Jarvis/utils/images/Recognizer.gif"""'], {}), "('D:/jarvis/Jarvis/utils/images/Recognizer.gif')\n", (10695, 10743), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10864, 10913), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (10901, 10913), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((527, 545), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(0)', '(0)'], {}), '(0, 0)\n', (539, 545), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((755, 783), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(421)', '(651)'], {}), '(0, 0, 421, 651)\n', (767, 783), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((845, 899), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""D:/jarvis/Jarvis/utils/images/see.jpg"""'], {}), "('D:/jarvis/Jarvis/utils/images/see.jpg')\n", (858, 899), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1107, 1134), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(71)', '(651)'], {}), '(0, 0, 71, 651)\n', (1119, 1134), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1664, 1719), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""D:/jarvis/Jarvis/utils/images/user.png"""'], {}), "('D:/jarvis/Jarvis/utils/images/user.png')\n", (1677, 1719), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1836, 1856), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (1848, 1856), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2275, 2330), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""D:/jarvis/Jarvis/utils/images/data.png"""'], {}), "('D:/jarvis/Jarvis/utils/images/data.png')\n", (2288, 2330), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2450, 2470), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (2462, 2470), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2895, 2949), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""D:/jarvis/Jarvis/utils/images/bot.jpg"""'], {}), "('D:/jarvis/Jarvis/utils/images/bot.jpg')\n", (2908, 2949), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3069, 3089), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(49)', '(30)'], {}), '(49, 30)\n', (3081, 3089), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3415, 3434), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(69)', '(0)'], {}), '(69, 0)\n', (3427, 3434), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3478, 3504), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(75)', '(16777215)'], {}), '(75, 16777215)\n', (3490, 3504), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3599, 3658), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""D:/jarvis/Jarvis/utils/images/settings.png"""'], {}), "('D:/jarvis/Jarvis/utils/images/settings.png')\n", (3612, 3658), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3778, 3798), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (3790, 3798), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4714, 4773), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""D:/jarvis/Jarvis/utils/images/feedback.png"""'], {}), "('D:/jarvis/Jarvis/utils/images/feedback.png')\n", (4727, 4773), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4893, 4913), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(40)', '(40)'], {}), '(40, 40)\n', (4905, 4913), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5243, 5273), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(70)', '(600)', '(351)', '(51)'], {}), '(70, 600, 351, 51)\n', (5255, 5273), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5741, 5796), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""D:/jarvis/Jarvis/utils/images/lens.svg"""'], {}), "('D:/jarvis/Jarvis/utils/images/lens.svg')\n", (5754, 5796), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5916, 5936), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(40)', '(40)'], {}), '(40, 40)\n', (5928, 5936), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7043, 7097), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""D:/jarvis/Jarvis/utils/images/mic.gif"""'], {}), "('D:/jarvis/Jarvis/utils/images/mic.gif')\n", (7056, 7097), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7217, 7237), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(40)', '(40)'], {}), '(40, 40)\n', (7229, 7237), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7786, 7816), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(70)', '(560)', '(351)', '(41)'], {}), '(70, 560, 351, 41)\n', (7798, 7816), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8566, 8621), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""D:/jarvis/Jarvis/utils/images/send.png"""'], {}), "('D:/jarvis/Jarvis/utils/images/send.png')\n", (8579, 8621), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8741, 8761), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(40)', '(40)'], {}), '(40, 40)\n', (8753, 8761), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9404, 9433), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(70)', '(0)', '(351)', '(561)'], {}), '(70, 0, 351, 561)\n', (9416, 9433), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9980, 10010), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(420)', '(0)', '(961)', '(741)'], {}), '(420, 0, 961, 741)\n', (9992, 10010), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10230, 10259), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(650)', '(421)', '(91)'], {}), '(0, 650, 421, 91)\n', (10242, 10259), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10325, 10386), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""D:/jarvis/Jarvis/utils/images/Recognizer.gif"""'], {}), "('D:/jarvis/Jarvis/utils/images/Recognizer.gif')\n", (10338, 10386), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')] |
from __future__ import annotations
from copy import copy, deepcopy
from types import MappingProxyType
from typing import (
Any,
Union,
Mapping,
TypeVar,
Callable,
Iterable,
Iterator,
Sequence,
TYPE_CHECKING,
)
from pathlib import Path
from functools import partial
from itertools import chain
from typing_extensions import Literal
import re
import validators
from scanpy import logging as logg
from anndata import AnnData
from scanpy.plotting.palettes import default_102 as default_palette
from dask import delayed
import numpy as np
import xarray as xr
import dask.array as da
from matplotlib.colors import ListedColormap
import matplotlib as mpl
import matplotlib.pyplot as plt
from skimage.util import img_as_float
from skimage.transform import rescale
from squidpy._docs import d, inject_docs
from squidpy._utils import NDArrayA, singledispatchmethod
from squidpy.im._io import _lazy_load_image, _infer_dimensions, _assert_dims_present
from squidpy.gr._utils import (
_assert_in_range,
_assert_positive,
_assert_non_negative,
_assert_spatial_basis,
_assert_non_empty_sequence,
)
from squidpy.im._coords import (
CropCoords,
CropPadding,
_NULL_COORDS,
_NULL_PADDING,
TupleSerializer,
_update_attrs_scale,
_update_attrs_coords,
)
from squidpy.im._feature_mixin import FeatureMixin
from squidpy._constants._constants import InferDimensions
from squidpy._constants._pkg_constants import Key
FoI_t = Union[int, float]
Pathlike_t = Union[str, Path]
Arraylike_t = Union[NDArrayA, xr.DataArray]
InferDims_t = Union[Literal["default", "prefer_channels", "prefer_z"], Sequence[str]]
Input_t = Union[Pathlike_t, Arraylike_t, "ImageContainer"]
Interactive = TypeVar("Interactive") # cannot import because of cyclic dependencies
_ERROR_NOTIMPLEMENTED_LIBID = f"It seems there are multiple `library_id` in `adata.uns[{Key.uns.spatial!r}]`.\n \
Loading multiple images is not implemented (yet), please specify a `library_id`."
__all__ = ["ImageContainer"]
@d.dedent # trick to overcome not top-down order
@d.dedent
class ImageContainer(FeatureMixin):
"""
Container for in memory arrays or on-disk images.
Wraps :class:`xarray.Dataset` to store several image layers with the same `x`, `y` and `z` dimensions in one object.
Dimensions of stored images are ``(y, x, z, channels)``. The channel dimension may vary between image layers.
This class also allows for lazy loading and processing using :mod:`dask`, and is given to all image
processing functions, along with :class:`anndata.AnnData` instance, if necessary.
Parameters
----------
%(add_img.parameters)s
scale
Scaling factor of the image with respect to the spatial coordinates
saved in the accompanying :class:`anndata.AnnData`.
Raises
------
%(add_img.raises)s
"""
def __init__(
self,
img: Input_t | None = None,
layer: str = "image",
lazy: bool = True,
scale: float = 1.0,
**kwargs: Any,
):
self._data: xr.Dataset = xr.Dataset()
self._data.attrs[Key.img.coords] = _NULL_COORDS # can't save None to NetCDF
self._data.attrs[Key.img.padding] = _NULL_PADDING
self._data.attrs[Key.img.scale] = scale
self._data.attrs[Key.img.mask_circle] = False
if img is not None:
self.add_img(img, layer=layer, **kwargs)
if not lazy:
self.compute()
@classmethod
def concat(
cls,
imgs: Iterable[ImageContainer],
library_ids: Sequence[str | None] | None = None,
combine_attrs: str = "identical",
**kwargs: Any,
) -> ImageContainer:
"""
Concatenate ``imgs`` in Z-dimension.
All ``imgs`` need to have the same shape and the same name to be concatenated.
Parameters
----------
imgs
Images that should be concatenated in Z-dimension.
library_ids
Name for each image that will be associated to each Z-dimension. This should match the ``library_id``
in the corresponding :class:`anndata.AnnData` object.
If `None`, the existing name of the Z-dimension is used for each image.
combine_attrs
How to combine attributes of ``imgs``. By default, all ``imgs`` need to have the same scale
and crop attributes. Use ``combine_attrs = 'override'`` to relax this requirement.
This might lead to a mismatch between :class:`ImageContainer` and :class:`anndata.AnnData` coordinates.
kwargs
Keyword arguments for :func:`xarray.concat`.
Returns
-------
Concatenated :class:`squidpy.img.ImageContainer` with ``imgs`` stacks in Z-dimension.
Raises
------
ValueError
If any of the ``imgs`` have more than 1 Z-dimension or if ``library_ids`` are not unique.
"""
# check that imgs are not already 3d
imgs = list(imgs)
for img in imgs:
if img.data.dims["z"] > 1:
raise ValueError(
f"Currently, can concatenate only images with 1 Z-dimension, found `{img.data.dims['z']}`."
)
# check library_ids
if library_ids is None:
library_ids = [None] * len(imgs)
if len(library_ids) != len(imgs):
raise ValueError(f"Expected library ids to be of length `{len(imgs)}`, found `{len(library_ids)}`.")
_library_ids = np.concatenate(
[img._get_library_ids(library_id, allow_new=True) for img, library_id in zip(imgs, library_ids)]
)
if len(set(_library_ids)) != len(_library_ids):
raise ValueError(f"Found non-unique library ids `{list(_library_ids)}`.")
# add library_id to z dim
prep_imgs = []
for lid, img in zip(_library_ids, imgs):
prep_img = img.copy()
prep_img._data = prep_img.data.assign_coords(z=[lid])
prep_imgs.append(prep_img)
return cls._from_dataset(
xr.concat([img.data for img in prep_imgs], dim="z", combine_attrs=combine_attrs, **kwargs)
)
@classmethod
def load(cls, path: Pathlike_t, lazy: bool = True, chunks: int | None = None) -> ImageContainer:
"""
Load data from a *Zarr* store.
Parameters
----------
path
Path to *Zarr* store.
lazy
Whether to use :mod:`dask` to lazily load image.
chunks
Chunk size for :mod:`dask`. Only used when ``lazy = True``.
Returns
-------
The loaded container.
"""
res = cls()
res.add_img(path, layer="image", chunks=chunks, lazy=True)
return res if lazy else res.compute()
def save(self, path: Pathlike_t, **kwargs: Any) -> None:
"""
Save the container into a *Zarr* store.
Parameters
----------
path
Path to a *Zarr* store.
Returns
-------
Nothing, just saves the container.
"""
attrs = self.data.attrs
try:
self._data = self.data.load() # if we're loading lazily and immediately saving
self.data.attrs = {
k: (v.to_tuple() if isinstance(v, TupleSerializer) else v) for k, v in self.data.attrs.items()
}
self.data.to_zarr(str(path), mode="w", **kwargs, **kwargs)
finally:
self.data.attrs = attrs
@d.get_sections(base="add_img", sections=["Parameters", "Raises"])
@d.dedent
@inject_docs(id=InferDimensions)
def add_img(
self,
img: Input_t,
layer: str | None = None,
dims: InferDims_t = InferDimensions.DEFAULT.s,
library_id: str | Sequence[str] | None = None,
lazy: bool = True,
chunks: str | tuple[int, ...] | None = None,
copy: bool = True,
**kwargs: Any,
) -> None:
"""
Add a new image to the container.
Parameters
----------
img
In-memory 2, 3 or 4-dimensional array, a URL to a *Zarr* store (ending in *.zarr*),
or a path to an on-disk image.
%(img_layer)s
dims
Where to save channel dimension when reading from a file or loading an array. Valid options are:
- `{id.CHANNELS_LAST.s!r}` - load the last non-spatial dimension as channels.
- `{id.Z_LAST.s!r}` - load the last non-spatial dimension as Z-dimension.
- `{id.DEFAULT.s!r}` - same as `{id.CHANNELS_LAST.s!r}`, but for 4-dimensional arrays,
tries to also load the first dimension as channels if the last non-spatial dimension is 1.
- a sequence of dimension names matching the shape of ``img``, e.g. ``('y', 'x', 'z', 'channels')``.
`'y'`, `'x'` and `'z'` must always be present.
library_id
Name for each Z-dimension of the image. This should correspond to the ``library_id``
in :attr:`anndata.AnnData.uns`.
lazy
Whether to use :mod:`dask` to lazily load image.
chunks
Chunk size for :mod:`dask`. Only used when ``lazy = True``.
copy
Whether to copy the underlying data if ``img`` is an in-memory array.
Returns
-------
Nothing, just adds a new ``layer`` to :attr:`data`.
Raises
------
ValueError
If loading from a file/store with an unknown format or if a supplied channel dimension cannot be aligned.
NotImplementedError
If loading a specific data type has not been implemented.
"""
layer = self._get_next_image_id("image") if layer is None else layer
dims: InferDimensions | Sequence[str] = ( # type: ignore[no-redef]
InferDimensions(dims) if isinstance(dims, str) else dims
)
res: xr.DataArray | None = self._load_img(img, chunks=chunks, layer=layer, copy=copy, dims=dims, **kwargs)
if res is not None:
library_id = self._get_library_ids(library_id, res, allow_new=not len(self))
try:
res = res.assign_coords({"z": library_id})
except ValueError as e:
if "conflicting sizes for dimension 'z'" not in str(e):
raise
# at this point, we know the container is not empty
raise ValueError(
f"Expected image to have `{len(self.library_ids)}` Z-dimension(s), found `{res.sizes['z']}`."
) from None
if TYPE_CHECKING:
assert isinstance(res, xr.DataArray)
logg.info(f"{'Overwriting' if layer in self else 'Adding'} image layer `{layer}`")
try:
self.data[layer] = res
except ValueError as e:
c_dim = res.dims[-1]
if f"along dimension {str(c_dim)!r} cannot be aligned" not in str(e):
raise
channel_dim = self._get_next_channel_id(res)
logg.warning(f"Channel dimension cannot be aligned with an existing one, using `{channel_dim}`")
self.data[layer] = res.rename({res.dims[-1]: channel_dim})
if not lazy:
self.compute(layer)
@singledispatchmethod
def _load_img(self, img: Pathlike_t | Input_t | ImageContainer, layer: str, **kwargs: Any) -> xr.DataArray | None:
if isinstance(img, ImageContainer):
if layer not in img:
raise KeyError(f"Image identifier `{layer}` not found in `{img}`.")
_ = kwargs.pop("dims", None)
return self._load_img(img[layer], **kwargs)
raise NotImplementedError(f"Loading `{type(img).__name__}` is not yet implemented.")
@_load_img.register(str)
@_load_img.register(Path)
def _(
self,
img_path: Pathlike_t,
chunks: int | None = None,
dims: InferDimensions | tuple[str, ...] = InferDimensions.DEFAULT,
**_: Any,
) -> xr.DataArray | None:
def transform_metadata(data: xr.Dataset) -> xr.Dataset:
for key, img in data.items():
if len(img.dims) != 4:
data[key] = img = img.expand_dims({"z": 1}, axis=-2) # assume only channel dim is present
_assert_dims_present(img.dims, include_z=True)
data.attrs[Key.img.coords] = CropCoords.from_tuple(data.attrs.get(Key.img.coords, _NULL_COORDS.to_tuple()))
data.attrs[Key.img.padding] = CropPadding.from_tuple(
data.attrs.get(Key.img.padding, _NULL_PADDING.to_tuple())
)
data.attrs.setdefault(Key.img.mask_circle, False)
data.attrs.setdefault(Key.img.scale, 1)
return data
img_path = str(img_path)
is_url, suffix = validators.url(img_path), Path(img_path).suffix.lower()
logg.debug(f"Loading data from `{img_path}`")
if not is_url and not Path(img_path).exists():
raise OSError(f"Path `{img_path}` does not exist.")
if suffix in (".jpg", ".jpeg", ".png", ".tif", ".tiff"):
return _lazy_load_image(img_path, dims=dims, chunks=chunks)
if suffix == ".zarr" or Path(img_path).is_dir(): # can also be a URL
if len(self._data):
raise ValueError("Loading data from `Zarr` store is disallowed when the container is not empty.")
self._data = transform_metadata(xr.open_zarr(img_path, chunks=chunks))
elif suffix in (".nc", ".cdf"):
if len(self._data):
raise ValueError("Loading data from `NetCDF` is disallowed when the container is not empty.")
self._data = transform_metadata(xr.open_dataset(img_path, chunks=chunks))
else:
raise ValueError(f"Unable to handle path `{img_path}`.")
@_load_img.register(da.Array)
@_load_img.register(np.ndarray)
def _(
self,
img: NDArrayA,
copy: bool = True,
dims: InferDimensions | tuple[str, ...] = InferDimensions.DEFAULT,
**_: Any,
) -> xr.DataArray:
logg.debug(f"Loading `numpy.array` of shape `{img.shape}`")
return self._load_img(xr.DataArray(img), copy=copy, dims=dims, warn=False)
@_load_img.register(xr.DataArray)
def _(
self,
img: xr.DataArray,
copy: bool = True,
warn: bool = True,
dims: InferDimensions | tuple[str, ...] = InferDimensions.DEFAULT,
**_: Any,
) -> xr.DataArray:
logg.debug(f"Loading `xarray.DataArray` of shape `{img.shape}`")
img = img.copy() if copy else img
if not ("y" in img.dims and "x" in img.dims and "z" in img.dims):
_, dims, _, expand_axes = _infer_dimensions(img, infer_dimensions=dims)
if TYPE_CHECKING:
assert isinstance(dims, Iterable)
if warn:
logg.warning(f"Unable to find `y`, `x` or `z` dimension in `{img.dims}`. Renaming to `{dims}`")
# `axes` is always of length 0, 1 or 2
if len(expand_axes):
dimnames = ("z", "channels") if len(expand_axes) == 2 else (("channels",) if "z" in dims else ("z",))
img = img.expand_dims([d for _, d in zip(expand_axes, dimnames)], axis=expand_axes)
img = img.rename(dict(zip(img.dims, dims)))
return img.transpose("y", "x", "z", ...)
@classmethod
@d.dedent
def from_adata(
cls,
adata: AnnData,
img_key: str | None = None,
library_id: Sequence[str] | str | None = None,
spatial_key: str = Key.uns.spatial,
**kwargs: Any,
) -> ImageContainer:
"""
Load an image from :mod:`anndata` object.
Parameters
----------
%(adata)s
img_key
Key in :attr:`anndata.AnnData.uns` ``['{spatial_key}']['{library_id}']['images']``.
If `None`, the first key found is used.
library_id
Key in :attr:`anndata.AnnData.uns` ``['{spatial_key}']`` specifying which library to access.
spatial_key
Key in :attr:`anndata.AnnData.uns` where spatial metadata is stored.
kwargs
Keyword arguments for :class:`squidpy.im.ImageContainer`.
Returns
-------
The image container.
"""
library_id = Key.uns.library_id(adata, spatial_key, library_id)
if not isinstance(library_id, str):
raise NotImplementedError(_ERROR_NOTIMPLEMENTED_LIBID)
spatial_data = adata.uns[spatial_key][library_id]
if img_key is None:
try:
img_key = next(k for k in spatial_data.get("images", []))
except StopIteration:
raise KeyError(f"No images found in `adata.uns[{spatial_key!r}][{library_id!r}]['images']`") from None
img: NDArrayA | None = spatial_data.get("images", {}).get(img_key, None)
if img is None:
raise KeyError(
f"Unable to find the image in `adata.uns[{spatial_key!r}][{library_id!r}]['images'][{img_key!r}]`."
)
scale = spatial_data.get("scalefactors", {}).get(f"tissue_{img_key}_scalef", None)
if scale is None and "scale" not in kwargs:
logg.warning(
f"Unable to determine the scale factor from "
f"`adata.uns[{spatial_key!r}][{library_id!r}]['scalefactors']['tissue_{img_key}_scalef']`, "
f"using `1.0`. Consider specifying it manually as `scale=...`"
)
scale = 1.0
kwargs.setdefault("scale", scale)
return cls(img, layer=img_key, library_id=library_id, **kwargs)
@d.get_sections(base="crop_corner", sections=["Parameters", "Returns"])
@d.dedent
def crop_corner(
self,
y: FoI_t,
x: FoI_t,
size: FoI_t | tuple[FoI_t, FoI_t] | None = None,
library_id: str | None = None,
scale: float = 1.0,
cval: int | float = 0,
mask_circle: bool = False,
preserve_dtypes: bool = True,
) -> ImageContainer:
"""
Extract a crop from the upper-left corner.
Parameters
----------
%(yx)s
%(size)s
library_id
Name of the Z-dimension to be cropped. If `None`, all Z-dimensions are cropped.
scale
Rescale the crop using :func:`skimage.transform.rescale`.
cval
Fill value to use if ``mask_circle = True`` or if crop goes out of the image boundary.
mask_circle
Whether to mask out values that are not within a circle defined by this crop.
Only available if ``size`` defines a square.
preserve_dtypes
Whether to preserver the data types of underlying :class:`xarray.DataArray`, even if ``cval``
is of different type.
Returns
-------
The cropped image of size ``size * scale``.
Raises
------
ValueError
If the crop would completely lie outside of the image or if ``mask_circle = True`` and
``size`` does not define a square.
Notes
-----
If ``preserve_dtypes = True`` but ``cval`` cannot be safely cast, ``cval`` will be set to 0.
"""
self._assert_not_empty()
y, x = self._convert_to_pixel_space((y, x))
size = self._get_size(size)
size = self._convert_to_pixel_space(size)
ys, xs = size
_assert_positive(ys, name="height")
_assert_positive(xs, name="width")
_assert_positive(scale, name="scale")
orig = CropCoords(x0=x, y0=y, x1=x + xs, y1=y + ys)
ymin, xmin = self.shape
coords = CropCoords(
x0=min(max(x, 0), xmin), y0=min(max(y, 0), ymin), x1=min(x + xs, xmin), y1=min(y + ys, ymin)
)
if not coords.dy:
raise ValueError("Height of the crop is empty.")
if not coords.dx:
raise ValueError("Width of the crop is empty.")
crop = self.data.isel(x=slice(coords.x0, coords.x1), y=slice(coords.y0, coords.y1)).copy(deep=False)
if len(crop.z) > 1:
crop = crop.sel(z=self._get_library_ids(library_id))
crop.attrs = _update_attrs_coords(crop.attrs, coords)
if orig != coords:
padding = orig - coords
# because padding does not change dtype by itself
for key, arr in crop.items():
if preserve_dtypes:
if not np.can_cast(cval, arr.dtype, casting="safe"):
cval = 0
else:
crop[key] = crop[key].astype(np.dtype(type(cval)), copy=False)
crop = crop.pad(
y=(padding.y_pre, padding.y_post),
x=(padding.x_pre, padding.x_post),
mode="constant",
constant_values=cval,
)
crop.attrs[Key.img.padding] = padding
else:
crop.attrs[Key.img.padding] = _NULL_PADDING
return self._from_dataset(
self._post_process(
data=crop, scale=scale, cval=cval, mask_circle=mask_circle, preserve_dtypes=preserve_dtypes
)
)
def _post_process(
self,
data: xr.Dataset,
scale: FoI_t = 1,
cval: FoI_t = 0,
mask_circle: bool = False,
preserve_dtypes: bool = True,
**_: Any,
) -> xr.Dataset:
def _rescale(arr: xr.DataArray) -> xr.DataArray:
scaling_fn = partial(
rescale, scale=[scale, scale, 1], preserve_range=True, order=1, channel_axis=-1, cval=cval
)
dtype = arr.dtype
if isinstance(arr.data, da.Array):
shape = np.maximum(np.round(scale * np.asarray(arr.shape)), 1)
shape[-1] = arr.shape[-1]
shape[-2] = arr.shape[-2]
return xr.DataArray(
da.from_delayed(delayed(lambda arr: scaling_fn(arr).astype(dtype))(arr), shape=shape, dtype=dtype),
dims=arr.dims,
)
return xr.DataArray(scaling_fn(arr).astype(dtype), dims=arr.dims)
if scale != 1:
attrs = data.attrs
library_ids = data.coords["z"]
data = data.map(_rescale).assign_coords({"z": library_ids})
data.attrs = _update_attrs_scale(attrs, scale)
if mask_circle:
if data.dims["y"] != data.dims["x"]:
raise ValueError(
f"Masking circle is only available for square crops, "
f"found crop of shape `{(data.dims['y'], data.dims['x'])}`."
)
c = data.x.shape[0] // 2
# manually reassign coordinates
library_ids = data.coords["z"]
data = data.where((data.x - c) ** 2 + (data.y - c) ** 2 <= c**2, other=cval).assign_coords(
{"z": library_ids}
)
data.attrs[Key.img.mask_circle] = True
if preserve_dtypes:
for key, arr in self.data.items():
data[key] = data[key].astype(arr.dtype, copy=False)
return data
@d.dedent
def crop_center(
self,
y: FoI_t,
x: FoI_t,
radius: FoI_t | tuple[FoI_t, FoI_t],
**kwargs: Any,
) -> ImageContainer:
"""
Extract a circular crop.
The extracted crop will have shape ``(radius[0] * 2 + 1, radius[1] * 2 + 1)``.
Parameters
----------
%(yx)s
radius
Radius along the ``height`` and ``width`` dimensions, respectively.
kwargs
Keyword arguments for :meth:`crop_corner`.
Returns
-------
%(crop_corner.returns)s
"""
y, x = self._convert_to_pixel_space((y, x))
_assert_in_range(y, 0, self.shape[0], name="height")
_assert_in_range(x, 0, self.shape[1], name="width")
if not isinstance(radius, Iterable):
radius = (radius, radius)
(yr, xr) = self._convert_to_pixel_space(radius)
_assert_non_negative(yr, name="radius height")
_assert_non_negative(xr, name="radius width")
return self.crop_corner( # type: ignore[no-any-return]
y=y - yr, x=x - xr, size=(yr * 2 + 1, xr * 2 + 1), **kwargs
)
@d.dedent
def generate_equal_crops(
self,
size: FoI_t | tuple[FoI_t, FoI_t] | None = None,
as_array: str | bool = False,
squeeze: bool = True,
**kwargs: Any,
) -> Iterator[ImageContainer] | Iterator[dict[str, NDArrayA]]:
"""
Decompose image into equally sized crops.
Parameters
----------
%(size)s
%(as_array)s
squeeze
Remove singleton dimensions from the results if ``as_array = True``.
kwargs
Keyword arguments for :meth:`crop_corner`.
Yields
------
The crops, whose type depends on ``as_array``.
Notes
-----
Crops going outside out of the image boundary are padded with ``cval``.
"""
self._assert_not_empty()
size = self._get_size(size)
size = self._convert_to_pixel_space(size)
y, x = self.shape
ys, xs = size
_assert_in_range(ys, 0, y, name="height")
_assert_in_range(xs, 0, x, name="width")
unique_ycoord = np.arange(start=0, stop=(y // ys + (y % ys != 0)) * ys, step=ys)
unique_xcoord = np.arange(start=0, stop=(x // xs + (x % xs != 0)) * xs, step=xs)
ycoords = np.repeat(unique_ycoord, len(unique_xcoord))
xcoords = np.tile(unique_xcoord, len(unique_ycoord))
for y, x in zip(ycoords, xcoords):
yield self.crop_corner(y=y, x=x, size=(ys, xs), **kwargs)._maybe_as_array(
as_array, squeeze=squeeze, lazy=True
)
@d.dedent
def generate_spot_crops(
self,
adata: AnnData,
spatial_key: str = Key.obsm.spatial,
library_id: Sequence[str] | str | None = None,
spot_diameter_key: str = "spot_diameter_fullres",
spot_scale: float = 1.0,
obs_names: Iterable[Any] | None = None,
as_array: str | bool = False,
squeeze: bool = True,
return_obs: bool = False,
**kwargs: Any,
) -> (
Iterator[ImageContainer] | Iterator[NDArrayA] | Iterator[tuple[NDArrayA, ...]] | Iterator[dict[str, NDArrayA]]
):
"""
Iterate over :attr:`anndata.AnnData.obs_names` and extract crops.
Implemented for 10X spatial datasets.
For Z-stacks, the specified ``library_id`` or list of ``library_id`` need to match the name of the Z-dimension.
Always extracts 2D crops from the specified Z-dimension.
Parameters
----------
%(adata)s
%(spatial_key)s
%(img_library_id)s
spot_diameter_key
Key in :attr:`anndata.AnnData.uns` ``['{spatial_key}']['{library_id}']['scalefactors']``
where the spot diameter is stored.
spot_scale
Scaling factor for the spot diameter. Larger values mean more context.
obs_names
Observations from :attr:`anndata.AnnData.obs_names` for which to generate the crops.
If `None`, all observations are used.
%(as_array)s
squeeze
Remove singleton dimensions from the results if ``as_array = True``.
return_obs
Whether to also yield names from ``obs_names``.
kwargs
Keyword arguments for :meth:`crop_center`.
Yields
------
If ``return_obs = True``, yields a :class:`tuple` ``(crop, obs_name)``. Otherwise, yields just the crops.
The type of the crops depends on ``as_array`` and the number of dimensions on ``squeeze``.
"""
self._assert_not_empty()
_assert_positive(spot_scale, name="scale")
_assert_spatial_basis(adata, spatial_key)
# limit to obs_names
if obs_names is None:
obs_names = adata.obs_names
obs_names = _assert_non_empty_sequence(obs_names, name="observations")
adata = adata[obs_names, :]
scale = self.data.attrs.get(Key.img.scale, 1)
spatial = adata.obsm[spatial_key][:, :2]
if library_id is None:
try:
library_id = Key.uns.library_id(adata, spatial_key=spatial_key, library_id=None)
if not isinstance(library_id, str):
raise NotImplementedError(_ERROR_NOTIMPLEMENTED_LIBID)
obs_library_ids = [library_id] * adata.n_obs
except ValueError as e:
if "Unable to determine which library id to use" in str(e):
raise ValueError(
str(e)
+ " Or specify a key in `adata.obs` containing a mapping from observations to library ids."
)
else:
raise e
else:
try:
obs_library_ids = adata.obs[library_id]
except KeyError:
logg.debug(
f"Unable to find library ids in `adata.obs[{library_id!r}]`. "
f"Trying in `adata.uns[{spatial_key!r}]`"
)
library_id = Key.uns.library_id(adata, spatial_key=spatial_key, library_id=library_id)
if not isinstance(library_id, str):
raise NotImplementedError(_ERROR_NOTIMPLEMENTED_LIBID)
obs_library_ids = [library_id] * adata.n_obs
lids = set(obs_library_ids)
if len(self.data.z) > 1 and len(lids) == 1:
logg.warning(
f"ImageContainer has `{len(self.data.z)}` Z-dimensions, using library id `{next(iter(lids))}` for all"
)
if adata.n_obs != len(obs_library_ids):
raise ValueError(f"Expected library ids to be of length `{adata.n_obs}`, found `{len(obs_library_ids)}`.")
for i, (obs, lid) in enumerate(zip(adata.obs_names, obs_library_ids)):
# get spot diameter of current obs (might be different library ids)
diameter = (
Key.uns.spot_diameter(
adata, spatial_key=spatial_key, library_id=lid, spot_diameter_key=spot_diameter_key
)
* scale
)
radius = int(round(diameter // 2 * spot_scale))
# get coords in image pixel space from original space
y = int(spatial[i][1] * scale)
x = int(spatial[i][0] * scale)
# if CropCoords exist, need to offset y and x
if self.data.attrs.get(Key.img.coords, _NULL_COORDS) != _NULL_COORDS:
y = int(y - self.data.attrs[Key.img.coords].y0)
x = int(x - self.data.attrs[Key.img.coords].x0)
crop = self.crop_center(y=y, x=x, radius=radius, library_id=obs_library_ids[i], **kwargs)
crop.data.attrs[Key.img.obs] = obs
crop = crop._maybe_as_array(as_array, squeeze=squeeze, lazy=False)
yield (crop, obs) if return_obs else crop
@classmethod
@d.get_sections(base="uncrop", sections=["Parameters", "Returns"])
def uncrop(
cls,
crops: list[ImageContainer],
shape: tuple[int, int] | None = None,
) -> ImageContainer:
"""
Re-assemble image from crops and their positions.
Fills remaining positions with zeros.
Parameters
----------
crops
List of image crops.
shape
Requested image shape as ``(height, width)``. If `None`, it is automatically determined from ``crops``.
Returns
-------
Re-assembled image from ``crops``.
Raises
------
ValueError
If crop metadata was not found or if the requested ``shape`` is smaller than required by ``crops``.
"""
if not len(crops):
raise ValueError("No crops were supplied.")
keys = set(crops[0].data.keys())
scales = set()
dy, dx = -1, -1
for crop in crops:
if set(crop.data.keys()) != keys:
raise KeyError(f"Expected to find `{sorted(keys)}` keys, found `{sorted(crop.data.keys())}`.")
coord = crop.data.attrs.get(Key.img.coords, None)
if coord is None:
raise ValueError("Crop does not have coordinate metadata.")
if coord == _NULL_COORDS:
raise ValueError(f"Null coordinates detected `{coord}`.")
scales.add(crop.data.attrs.get(Key.img.scale, None))
dy, dx = max(dy, coord.y0 + coord.dy), max(dx, coord.x0 + coord.dx)
scales.discard(None)
if len(scales) != 1:
raise ValueError(f"Unable to uncrop images of different scales `{sorted((scales))}`.")
scale, *_ = scales
if shape is None:
shape = (dy, dx)
# can be float because coords can be scaled
shape = tuple(map(int, shape)) # type: ignore[assignment]
if len(shape) != 2:
raise ValueError(f"Expected `shape` to be of length `2`, found `{len(shape)}`.")
if shape < (dy, dx):
raise ValueError(f"Requested final image shape `{shape}`, but minimal is `({dy}, {dx})`.")
# create resulting dataset
dataset = xr.Dataset()
dataset.attrs[Key.img.scale] = scale
for key in keys:
img = crop.data[key]
# get shape for this DataArray
dataset[key] = xr.DataArray(
np.zeros(shape + tuple(img.shape[2:]), dtype=img.dtype), dims=img.dims, coords=img.coords
)
# fill data with crops
for crop in crops:
coord = crop.data.attrs[Key.img.coords]
padding = crop.data.attrs.get(Key.img.padding, _NULL_PADDING) # maybe warn
dataset[key][coord.slice] = crop[key][coord.to_image_coordinates(padding=padding).slice]
return cls._from_dataset(dataset)
@d.dedent
def show(
self,
layer: str | None = None,
library_id: str | Sequence[str] | None = None,
channel: int | Sequence[int] | None = None,
channelwise: bool = False,
segmentation_layer: str | None = None,
segmentation_alpha: float = 0.75,
transpose: bool | None = None,
ax: mpl.axes.Axes | None = None,
figsize: tuple[float, float] | None = None,
dpi: int | None = None,
save: Pathlike_t | None = None,
**kwargs: Any,
) -> None:
"""
Show an image within this container.
Parameters
----------
%(img_layer)s
library_id
Name of Z-dimension to plot. In `None`, plot all Z-dimensions as separate images.
channel
Channels to plot. If `None`, use all channels.
channelwise
Whether to plot each channel separately or not.
segmentation_layer
Segmentation layer to plot over each ax.
segmentation_alpha
Alpha value for ``segmentation_layer``.
transpose
Whether to plot Z-dimensions in columns or in rows. If `None`, it will be set to ``not channelwise``.
ax
Optional :mod:`matplotlib` axes where to plot the image.
If not `None`, ``save``, ``figsize`` and ``dpi`` have no effect.
%(plotting)s
kwargs
Keyword arguments for :meth:`matplotlib.axes.Axes.imshow`.
Returns
-------
%(plotting_returns)s
Raises
------
ValueError
If number of supplied axes is different than the number of requested Z-dimensions or channels.
"""
from squidpy.pl._utils import save_fig
layer = self._get_layer(layer)
arr: xr.DataArray = self[layer]
library_ids = self._get_library_ids(library_id)
arr = arr.sel(z=library_ids)
if channel is not None:
channel = np.asarray([channel]).ravel() # type: ignore[assignment]
if not len(channel): # type: ignore[arg-type]
raise ValueError("No channels have been selected.")
arr = arr[{arr.dims[-1]: channel}]
else:
channel = np.arange(arr.shape[-1])
if TYPE_CHECKING:
assert isinstance(channel, Sequence)
n_channels = arr.shape[-1]
if n_channels not in (1, 3, 4) and not channelwise:
logg.warning(f"Unable to plot image with `{n_channels}`. Setting `channelwise=True`")
channelwise = True
if transpose is None:
transpose = not channelwise
fig = None
nrows, ncols = len(library_ids), (n_channels if channelwise else 1)
if transpose:
nrows, ncols = ncols, nrows
if ax is None:
fig, ax = plt.subplots(
nrows=nrows,
ncols=ncols,
figsize=(8, 8) if figsize is None else figsize,
dpi=dpi,
tight_layout=True,
squeeze=False,
)
elif isinstance(ax, mpl.axes.Axes):
ax = np.array([ax])
ax = np.asarray(ax)
try:
ax = ax.reshape(nrows, ncols)
except ValueError:
raise ValueError(f"Expected `ax` to be of shape `{(nrows, ncols)}`, found `{ax.shape}`.") from None
if segmentation_layer is not None:
seg_arr = self[segmentation_layer].sel(z=library_ids)
if not seg_arr.attrs.get("segmentation", False):
raise TypeError(f"Expected layer `{segmentation_layer!r}` to be marked as segmentation layer.")
if not np.issubdtype(seg_arr.dtype, np.integer):
raise TypeError(
f"Expected segmentation layer `{segmentation_layer!r}` to be of integer type, "
f"found `{seg_arr.dtype}`."
)
seg_arr = seg_arr.values
seg_cmap = np.array(default_palette, dtype=object)[np.arange(np.max(seg_arr)) % len(default_palette)]
seg_cmap[0] = "#00000000" # transparent background
seg_cmap = ListedColormap(seg_cmap)
else:
seg_arr, seg_cmap = None, None
for z, row in enumerate(ax):
for c, ax_ in enumerate(row):
if transpose:
z, c = c, z
title = layer
if channelwise:
img = arr[..., z, c]
title += f":{channel[c]}"
else:
img = arr[..., z, :]
if len(self.data.coords["z"]) > 1:
title += f", library_id:{library_ids[z]}"
ax_.imshow(img_as_float(img.values, force_copy=False), **kwargs)
if seg_arr is not None:
ax_.imshow(
seg_arr[:, :, z, ...],
cmap=seg_cmap,
interpolation="nearest", # avoid artifacts
alpha=segmentation_alpha,
**{k: v for k, v in kwargs.items() if k not in ("cmap", "interpolation")},
)
ax_.set_title(title)
ax_.set_axis_off()
if save and fig is not None:
save_fig(fig, save)
@d.get_sections(base="_interactive", sections=["Parameters"])
@d.dedent
def interactive(
self,
adata: AnnData,
spatial_key: str = Key.obsm.spatial,
library_key: str | None = None,
library_id: str | Sequence[str] | None = None,
cmap: str = "viridis",
palette: str | None = None,
blending: Literal["opaque", "translucent", "additive"] = "opaque",
symbol: Literal["disc", "square"] = "disc",
key_added: str = "shapes",
) -> Interactive:
"""
Launch :mod:`napari` viewer.
Parameters
----------
%(adata)s
%(spatial_key)s
library_key
Key in :attr:`adata.AnnData.obs` specifying mapping between observations and library ids.
Required if the container has more than 1 Z-dimension.
library_id
Subset of library ids to visualize. If `None`, visualize all library ids.
cmap
Colormap for continuous variables.
palette
Colormap for categorical variables in :attr:`anndata.AnnData.obs`. If `None`, use :mod:`scanpy`'s default.
blending
Method which determines how RGB and alpha values of :class:`napari.layers.Shapes` are mixed.
symbol
Symbol to use for the spots. Valid options are:
- `'disc'` - circle.
- `'square'` - square.
key_added
Key where to store :class:`napari.layers.Shapes`, which can be exported by pressing `SHIFT-E`:
- :attr:`anndata.AnnData.obs` ``['{layer_name}_{key_added}']`` - boolean mask containing the selected
cells.
- :attr:`anndata.AnnData.uns` ``['{layer_name}_{key_added}']['meshes']`` - list of :class:`numpy.array`,
defining a mesh in the spatial coordinates.
See :mod:`napari`'s `tutorial <https://napari.org/howtos/layers/shapes.html>`_ for more
information about different mesh types, such as circles, squares etc.
Returns
-------
Interactive view of this container. Screenshot of the canvas can be taken by
:meth:`squidpy.pl.Interactive.screenshot`.
"""
from squidpy.pl import Interactive # type: ignore[attr-defined]
return Interactive( # type: ignore[no-any-return]
img=self,
adata=adata,
spatial_key=spatial_key,
library_key=library_key,
library_id=library_id,
cmap=cmap,
palette=palette,
blending=blending,
key_added=key_added,
symbol=symbol,
).show()
@d.dedent
def apply(
self,
func: Callable[..., NDArrayA] | Mapping[str, Callable[..., NDArrayA]],
layer: str | None = None,
new_layer: str | None = None,
channel: int | None = None,
lazy: bool = False,
chunks: str | tuple[int, int] | None = None,
copy: bool = True,
drop: bool = True,
fn_kwargs: Mapping[str, Any] = MappingProxyType({}),
**kwargs: Any,
) -> ImageContainer | None:
"""
Apply a function to a layer within this container.
For each Z-dimension a different function can be defined, using its ``library_id`` name.
For not mentioned ``library_id``'s the identity function is applied.
Parameters
----------
func
A function or a mapping of ``{'{library_id}': function}`` which takes a :class:`numpy.ndarray` as input
and produces an image-like output.
%(img_layer)s
new_layer
Name of the new layer. If `None` and ``copy = False``, overwrites the data in ``layer``.
channel
Apply ``func`` only over a specific ``channel``. If `None`, use all channels.
chunks
Chunk size for :mod:`dask`. If `None`, don't use :mod:`dask`.
%(copy_cont)s
drop
Whether to drop Z-dimensions that were not selected by ``func``. Only used when ``copy = True``.
fn_kwargs
Keyword arguments for ``func``.
kwargs
Keyword arguments for :func:`dask.array.map_overlap` or :func:`dask.array.map_blocks`, depending whether
``depth`` is present in ``fn_kwargs``. Only used when ``chunks != None``.
Use ``depth`` to control boundary artifacts if ``func`` requires data from neighboring chunks,
by default, ``boundary = 'reflect`` is used.
Returns
-------
If ``copy = True``, returns a new container with ``layer``.
Raises
------
ValueError
If the ``func`` returns 0 or 1 dimensional array.
"""
def apply_func(func: Callable[..., NDArrayA], arr: xr.DataArray) -> NDArrayA | da.Array:
if chunks is None:
return func(arr.data, **fn_kwargs)
arr = da.asarray(arr.data).rechunk(chunks)
return (
da.map_overlap(func, arr, **fn_kwargs, **kwargs)
if "depth" in kwargs
else da.map_blocks(func, arr, **fn_kwargs, **kwargs, dtype=arr.dtype)
)
if "depth" in kwargs:
kwargs.setdefault("boundary", "reflect")
layer = self._get_layer(layer)
if new_layer is None:
new_layer = layer
arr = self[layer]
library_ids = list(arr.coords["z"].values)
dims, channel_dim = arr.dims, arr.dims[-1]
if channel is not None:
arr = arr[{channel_dim: channel}]
if callable(func):
res = apply_func(func, arr)
new_library_ids = library_ids
else:
res = {}
noop_library_ids = [] if copy and drop else list(set(library_ids) - set(func.keys()))
for key, fn in func.items():
res[key] = apply_func(fn, arr.sel(z=key))
for key in noop_library_ids:
res[key] = arr.sel(z=key).data
new_library_ids = [lid for lid in library_ids if lid in res]
try:
res = da.stack([res[lid] for lid in new_library_ids], axis=2)
except ValueError as e:
if not len(noop_library_ids) or "must have the same shape" not in str(e):
# processing functions returned wrong shape
raise ValueError(
"Unable to stack an array because functions returned arrays of different shapes."
) from e
# funcs might have changed channel dims, replace noops with 0
logg.warning(
f"Function changed the number of channels, cannot use identity "
f"for library ids `{noop_library_ids}`. Replacing with 0"
)
# TODO(michalk8): once (or if) Z-dim is not fixed, always drop ids
tmp = next(iter(res.values()))
for lid in noop_library_ids:
res[lid] = (np.zeros_like if chunks is None else da.zeros_like)(tmp)
res = da.stack([res[lid] for lid in new_library_ids], axis=2)
if res.ndim == 2: # assume that dims are y, x
res = res[..., np.newaxis]
if res.ndim == 3: # assume dims are y, x, z (changing of z dim is not supported)
res = res[..., np.newaxis]
if res.ndim != 4:
raise ValueError(f"Expected `2`, `3` or `4` dimensional array, found `{res.ndim}`.")
if copy:
cont = ImageContainer(
res,
layer=new_layer,
copy=True,
lazy=lazy,
dims=dims,
library_id=new_library_ids,
)
cont.data.attrs = self.data.attrs.copy()
return cont
self.add_img(
res,
layer=new_layer,
lazy=lazy,
copy=new_layer != layer,
dims=dims,
library_id=new_library_ids,
)
@d.dedent
def subset(self, adata: AnnData, spatial_key: str = Key.obsm.spatial, copy: bool = False) -> AnnData:
"""
Subset :class:`anndata.AnnData` using this container.
Useful when this container is a crop of the original image.
Parameters
----------
%(adata)s
%(spatial_key)s
copy
Whether to return a copy of ``adata``.
Returns
-------
Subset of :class:`anndata.AnnData`.
"""
c: CropCoords = self.data.attrs.get(Key.img.coords, _NULL_COORDS)
if c == _NULL_COORDS: # not a crop
return adata.copy() if copy else adata
_assert_spatial_basis(adata, spatial_key)
coordinates = adata.obsm[spatial_key]
coordinates = coordinates * self.data.attrs.get(Key.img.scale, 1)
mask = (
(coordinates[:, 0] >= c.x0)
& (coordinates[:, 0] <= c.x1)
& (coordinates[:, 1] >= c.y0)
& (coordinates[:, 1] <= c.y1)
)
return adata[mask, :].copy() if copy else adata[mask, :]
def rename(self, old: str, new: str) -> ImageContainer:
"""
Rename a layer.
Parameters
----------
old
Name of the layer to rename.
new
New name.
Returns
-------
Modifies and returns self.
"""
self._data = self.data.rename_vars({old: new})
return self
def compute(self, layer: str | None = None) -> ImageContainer:
"""
Trigger lazy computation in-place.
Parameters
----------
layer
Layer which to compute. If `None`, compute all layers.
Returns
-------
Modifies and returns self.
"""
if layer is None:
self.data.load()
else:
self[layer].load()
return self
@property
def library_ids(self) -> list[str]:
"""Library ids."""
try:
return list(map(str, self.data.coords["z"].values))
except KeyError:
return []
@library_ids.setter
def library_ids(self, library_ids: str | Sequence[str] | Mapping[str, str]) -> None:
"""Set library ids."""
if isinstance(library_ids, Mapping):
library_ids = [str(library_ids.get(lid, lid)) for lid in self.library_ids]
elif isinstance(library_ids, str):
library_ids = (library_ids,)
library_ids = list(map(str, library_ids))
if len(set(library_ids)) != len(library_ids):
raise ValueError(f"Remapped library ids must be unique, found `{library_ids}`.")
self._data = self.data.assign_coords({"z": library_ids})
@property
def data(self) -> xr.Dataset:
"""Underlying :class:`xarray.Dataset`."""
return self._data
@property
def shape(self) -> tuple[int, int]:
"""Image shape ``(y, x)``."""
if not len(self):
return 0, 0
return self.data.dims["y"], self.data.dims["x"]
def copy(self, deep: bool = False) -> ImageContainer:
"""
Return a copy of self.
Parameters
----------
deep
Whether to make a deep copy or not.
Returns
-------
Copy of self.
"""
return deepcopy(self) if deep else copy(self)
@classmethod
def _from_dataset(cls, data: xr.Dataset, deep: bool | None = None) -> ImageContainer:
"""
Utility function used for initialization.
Parameters
----------
data
The :class:`xarray.Dataset` to use.
deep
If `None`, don't copy the ``data``. If `True`, make a deep copy of the data, otherwise, make a shallow copy.
Returns
-------
The newly created container.
""" # noqa: D401
res = cls()
res._data = data if deep is None else data.copy(deep=deep)
res._data.attrs.setdefault(Key.img.coords, _NULL_COORDS) # can't save None to NetCDF
res._data.attrs.setdefault(Key.img.padding, _NULL_PADDING)
res._data.attrs.setdefault(Key.img.scale, 1.0)
res._data.attrs.setdefault(Key.img.mask_circle, False)
return res
def _maybe_as_array(
self,
as_array: str | Sequence[str] | bool = False,
squeeze: bool = True,
lazy: bool = True,
) -> ImageContainer | dict[str, NDArrayA] | NDArrayA | tuple[NDArrayA, ...]:
res = self
if as_array:
# do not trigger dask computation
res = {key: (res[key].data if lazy else res[key].values) for key in res} # type: ignore[assignment]
if squeeze:
axis = (2,) if len(self.data.z) == 1 else ()
res = {
k: v.squeeze(axis=axis + ((3,) if v.shape[-1] == 1 else ()))
for k, v in res.items() # type: ignore[assignment,attr-defined]
}
# this is just for convenience for DL iterators
if isinstance(as_array, str):
res = res[as_array]
elif isinstance(as_array, Sequence):
res = tuple(res[key] for key in as_array) # type: ignore[assignment]
if lazy:
return res
return res.compute() if isinstance(res, ImageContainer) else res
def _get_next_image_id(self, layer: str) -> str:
pat = re.compile(rf"^{layer}_(\d*)$")
iterator = chain.from_iterable(pat.finditer(k) for k in self.data.keys())
return f"{layer}_{(max(map(lambda m: int(m.groups()[0]), iterator), default=-1) + 1)}"
def _get_next_channel_id(self, channel: str | xr.DataArray) -> str:
if isinstance(channel, xr.DataArray):
channel, *_ = (str(dim) for dim in channel.dims if dim not in ("y", "x", "z"))
pat = re.compile(rf"^{channel}_(\d*)$")
iterator = chain.from_iterable(pat.finditer(v.dims[-1]) for v in self.data.values())
return f"{channel}_{(max(map(lambda m: int(m.groups()[0]), iterator), default=-1) + 1)}"
def _get_library_id(self, library_id: str | None = None) -> str:
self._assert_not_empty()
if library_id is None:
if len(self.library_ids) > 1:
raise ValueError(
f"Unable to determine which library id to use. Please supply one from `{self.library_ids}`."
)
library_id = self.library_ids[0]
if library_id not in self.library_ids:
raise KeyError(f"Library id `{library_id}` not found in `{self.library_ids}`.")
return library_id
def _get_library_ids(
self,
library_id: str | Sequence[str] | None = None,
arr: xr.DataArray | None = None,
allow_new: bool = False,
) -> list[str]:
"""
Get library ids.
Parameters
----------
library_id
Requested library ids.
arr
If the current container is empty, try getting the library ids from the ``arr``.
allow_new
If `True`, don't check if the returned library ids are present in the non-empty container.
This is set to `True` only in :meth:`concat` to allow for remapping.
Returns
-------
The library ids.
"""
if library_id is None:
if len(self):
library_id = self.library_ids
elif isinstance(arr, xr.DataArray):
try:
library_id = list(arr.coords["z"].values)
except (KeyError, AttributeError) as e:
logg.warning(f"Unable to retrieve library ids, reason `{e}`. Using default names")
# at this point, it should have Z-dim
library_id = [str(i) for i in range(arr.sizes["z"])]
else:
raise ValueError("Please specify the number of library ids if the container is empty.")
if isinstance(library_id, str):
library_id = [library_id]
if not isinstance(library_id, Iterable):
raise TypeError(f"Expected library ids to be `iterable`, found `{type(library_id).__name__!r}`.")
res = list(map(str, library_id))
if not len(res):
raise ValueError("No library ids have been selected.")
if not allow_new and len(self) and not (set(res) & set(self.library_ids)):
raise ValueError(f"Invalid library ids have been selected `{res}`. Valid options are `{self.library_ids}`.")
return res
def _get_layer(self, layer: str | None) -> str:
self._assert_not_empty()
if layer is None:
if len(self) > 1:
raise ValueError(
f"Unable to determine which layer to use. Please supply one from `{sorted(self.data.keys())}`."
)
layer = list(self)[0]
if layer not in self:
raise KeyError(f"Image layer `{layer}` not found in `{sorted(self)}`.")
return layer
def _assert_not_empty(self) -> None:
if not len(self):
raise ValueError("The container is empty.")
def _get_size(self, size: FoI_t | tuple[FoI_t | None, FoI_t | None] | None) -> tuple[FoI_t, FoI_t]:
if size is None:
size = (None, None)
if not isinstance(size, Iterable):
size = (size, size)
res = list(size)
if size[0] is None:
res[0] = self.shape[0]
if size[1] is None:
res[1] = self.shape[1]
return tuple(res) # type: ignore[return-value]
def _convert_to_pixel_space(self, size: tuple[FoI_t, FoI_t]) -> tuple[int, int]:
y, x = size
if isinstance(y, float):
_assert_in_range(y, 0, 1, name="y")
y = int(self.shape[0] * y)
if isinstance(x, float):
_assert_in_range(x, 0, 1, name="x")
x = int(self.shape[1] * x)
return y, x
def __delitem__(self, key: str) -> None:
del self.data[key]
def __iter__(self) -> Iterator[str]:
yield from self.data.keys()
def __len__(self) -> int:
return len(self.data)
def __getitem__(self, key: str) -> xr.DataArray:
return self.data[key]
def __setitem__(self, key: str, value: NDArrayA | xr.DataArray | da.Array) -> None:
if not isinstance(value, (np.ndarray, xr.DataArray, da.Array)):
raise NotImplementedError(f"Adding `{type(value).__name__}` is not yet implemented.")
self.add_img(value, layer=key, copy=True)
def _ipython_key_completions_(self) -> Iterable[str]:
return sorted(map(str, self.data.keys()))
def __copy__(self) -> ImageContainer:
return type(self)._from_dataset(self.data, deep=False)
def __deepcopy__(self, memodict: Mapping[str, Any] = MappingProxyType({})) -> ImageContainer:
return type(self)._from_dataset(self.data, deep=True)
def _repr_html_(self) -> str:
import html
if not len(self):
return f"{self.__class__.__name__} object with 0 layers"
inflection = "" if len(self) <= 1 else "s"
s = f"{self.__class__.__name__} object with {len(self.data.keys())} layer{inflection}:"
style = "text-indent: 25px; margin-top: 0px; margin-bottom: 0px;"
for i, layer in enumerate(self.data.keys()):
s += f"<p style={style!r}><strong>{html.escape(str(layer))}</strong>: "
s += ", ".join(
f"<em>{html.escape(str(dim))}</em> ({shape})"
for dim, shape in zip(self.data[layer].dims, self.data[layer].shape)
)
s += "</p>"
if i == 9 and i < len(self) - 1: # show only first 10 layers
s += f"<p style={style!r}>and {len(self) - i - 1} more...</p>"
break
return s
def __repr__(self) -> str:
return f"{self.__class__.__name__}[shape={self.shape}, layers={sorted(self.data.keys())}]"
def __str__(self) -> str:
return repr(self)
| [
"squidpy.gr._utils._assert_spatial_basis",
"skimage.util.img_as_float",
"squidpy.pl.Interactive",
"scanpy.logging.debug",
"re.compile",
"squidpy._docs.d.get_sections",
"types.MappingProxyType",
"squidpy.im._io._infer_dimensions",
"dask.array.map_blocks",
"xarray.concat",
"numpy.array",
"copy.d... | [((1741, 1763), 'typing.TypeVar', 'TypeVar', (['"""Interactive"""'], {}), "('Interactive')\n", (1748, 1763), False, 'from typing import Any, Union, Mapping, TypeVar, Callable, Iterable, Iterator, Sequence, TYPE_CHECKING\n'), ((7609, 7674), 'squidpy._docs.d.get_sections', 'd.get_sections', ([], {'base': '"""add_img"""', 'sections': "['Parameters', 'Raises']"}), "(base='add_img', sections=['Parameters', 'Raises'])\n", (7623, 7674), False, 'from squidpy._docs import d, inject_docs\n'), ((7694, 7725), 'squidpy._docs.inject_docs', 'inject_docs', ([], {'id': 'InferDimensions'}), '(id=InferDimensions)\n', (7705, 7725), False, 'from squidpy._docs import d, inject_docs\n'), ((17930, 18000), 'squidpy._docs.d.get_sections', 'd.get_sections', ([], {'base': '"""crop_corner"""', 'sections': "['Parameters', 'Returns']"}), "(base='crop_corner', sections=['Parameters', 'Returns'])\n", (17944, 18000), False, 'from squidpy._docs import d, inject_docs\n'), ((31513, 31578), 'squidpy._docs.d.get_sections', 'd.get_sections', ([], {'base': '"""uncrop"""', 'sections': "['Parameters', 'Returns']"}), "(base='uncrop', sections=['Parameters', 'Returns'])\n", (31527, 31578), False, 'from squidpy._docs import d, inject_docs\n'), ((39806, 39866), 'squidpy._docs.d.get_sections', 'd.get_sections', ([], {'base': '"""_interactive"""', 'sections': "['Parameters']"}), "(base='_interactive', sections=['Parameters'])\n", (39820, 39866), False, 'from squidpy._docs import d, inject_docs\n'), ((3132, 3144), 'xarray.Dataset', 'xr.Dataset', ([], {}), '()\n', (3142, 3144), True, 'import xarray as xr\n'), ((13101, 13146), 'scanpy.logging.debug', 'logg.debug', (['f"""Loading data from `{img_path}`"""'], {}), "(f'Loading data from `{img_path}`')\n", (13111, 13146), True, 'from scanpy import logging as logg\n'), ((14335, 14394), 'scanpy.logging.debug', 'logg.debug', (['f"""Loading `numpy.array` of shape `{img.shape}`"""'], {}), "(f'Loading `numpy.array` of shape `{img.shape}`')\n", (14345, 14394), True, 'from scanpy import logging as logg\n'), ((14748, 14812), 'scanpy.logging.debug', 'logg.debug', (['f"""Loading `xarray.DataArray` of shape `{img.shape}`"""'], {}), "(f'Loading `xarray.DataArray` of shape `{img.shape}`')\n", (14758, 14812), True, 'from scanpy import logging as logg\n'), ((16595, 16645), 'squidpy._constants._pkg_constants.Key.uns.library_id', 'Key.uns.library_id', (['adata', 'spatial_key', 'library_id'], {}), '(adata, spatial_key, library_id)\n', (16613, 16645), False, 'from squidpy._constants._pkg_constants import Key\n'), ((19737, 19772), 'squidpy.gr._utils._assert_positive', '_assert_positive', (['ys'], {'name': '"""height"""'}), "(ys, name='height')\n", (19753, 19772), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((19781, 19815), 'squidpy.gr._utils._assert_positive', '_assert_positive', (['xs'], {'name': '"""width"""'}), "(xs, name='width')\n", (19797, 19815), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((19824, 19861), 'squidpy.gr._utils._assert_positive', '_assert_positive', (['scale'], {'name': '"""scale"""'}), "(scale, name='scale')\n", (19840, 19861), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((19878, 19922), 'squidpy.im._coords.CropCoords', 'CropCoords', ([], {'x0': 'x', 'y0': 'y', 'x1': '(x + xs)', 'y1': '(y + ys)'}), '(x0=x, y0=y, x1=x + xs, y1=y + ys)\n', (19888, 19922), False, 'from squidpy.im._coords import CropCoords, CropPadding, _NULL_COORDS, _NULL_PADDING, TupleSerializer, _update_attrs_scale, _update_attrs_coords\n'), ((20498, 20538), 'squidpy.im._coords._update_attrs_coords', '_update_attrs_coords', (['crop.attrs', 'coords'], {}), '(crop.attrs, coords)\n', (20518, 20538), False, 'from squidpy.im._coords import CropCoords, CropPadding, _NULL_COORDS, _NULL_PADDING, TupleSerializer, _update_attrs_scale, _update_attrs_coords\n'), ((24131, 24183), 'squidpy.gr._utils._assert_in_range', '_assert_in_range', (['y', '(0)', 'self.shape[0]'], {'name': '"""height"""'}), "(y, 0, self.shape[0], name='height')\n", (24147, 24183), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((24192, 24243), 'squidpy.gr._utils._assert_in_range', '_assert_in_range', (['x', '(0)', 'self.shape[1]'], {'name': '"""width"""'}), "(x, 0, self.shape[1], name='width')\n", (24208, 24243), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((24393, 24439), 'squidpy.gr._utils._assert_non_negative', '_assert_non_negative', (['yr'], {'name': '"""radius height"""'}), "(yr, name='radius height')\n", (24413, 24439), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((24448, 24493), 'squidpy.gr._utils._assert_non_negative', '_assert_non_negative', (['xr'], {'name': '"""radius width"""'}), "(xr, name='radius width')\n", (24468, 24493), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((25605, 25646), 'squidpy.gr._utils._assert_in_range', '_assert_in_range', (['ys', '(0)', 'y'], {'name': '"""height"""'}), "(ys, 0, y, name='height')\n", (25621, 25646), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((25655, 25695), 'squidpy.gr._utils._assert_in_range', '_assert_in_range', (['xs', '(0)', 'x'], {'name': '"""width"""'}), "(xs, 0, x, name='width')\n", (25671, 25695), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((25721, 25785), 'numpy.arange', 'np.arange', ([], {'start': '(0)', 'stop': '((y // ys + (y % ys != 0)) * ys)', 'step': 'ys'}), '(start=0, stop=(y // ys + (y % ys != 0)) * ys, step=ys)\n', (25730, 25785), True, 'import numpy as np\n'), ((25810, 25874), 'numpy.arange', 'np.arange', ([], {'start': '(0)', 'stop': '((x // xs + (x % xs != 0)) * xs)', 'step': 'xs'}), '(start=0, stop=(x // xs + (x % xs != 0)) * xs, step=xs)\n', (25819, 25874), True, 'import numpy as np\n'), ((28212, 28254), 'squidpy.gr._utils._assert_positive', '_assert_positive', (['spot_scale'], {'name': '"""scale"""'}), "(spot_scale, name='scale')\n", (28228, 28254), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((28263, 28304), 'squidpy.gr._utils._assert_spatial_basis', '_assert_spatial_basis', (['adata', 'spatial_key'], {}), '(adata, spatial_key)\n', (28284, 28304), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((28425, 28483), 'squidpy.gr._utils._assert_non_empty_sequence', '_assert_non_empty_sequence', (['obs_names'], {'name': '"""observations"""'}), "(obs_names, name='observations')\n", (28451, 28483), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((33750, 33762), 'xarray.Dataset', 'xr.Dataset', ([], {}), '()\n', (33760, 33762), True, 'import xarray as xr\n'), ((37629, 37643), 'numpy.asarray', 'np.asarray', (['ax'], {}), '(ax)\n', (37639, 37643), True, 'import numpy as np\n'), ((42896, 42916), 'types.MappingProxyType', 'MappingProxyType', (['{}'], {}), '({})\n', (42912, 42916), False, 'from types import MappingProxyType\n'), ((48576, 48617), 'squidpy.gr._utils._assert_spatial_basis', '_assert_spatial_basis', (['adata', 'spatial_key'], {}), '(adata, spatial_key)\n', (48597, 48617), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((53340, 53371), 're.compile', 're.compile', (['f"""^{layer}_(\\\\d*)$"""'], {}), "(f'^{layer}_(\\\\d*)$')\n", (53350, 53371), False, 'import re\n'), ((53774, 53807), 're.compile', 're.compile', (['f"""^{channel}_(\\\\d*)$"""'], {}), "(f'^{channel}_(\\\\d*)$')\n", (53784, 53807), False, 'import re\n'), ((58789, 58809), 'types.MappingProxyType', 'MappingProxyType', (['{}'], {}), '({})\n', (58805, 58809), False, 'from types import MappingProxyType\n'), ((6160, 6255), 'xarray.concat', 'xr.concat', (['[img.data for img in prep_imgs]'], {'dim': '"""z"""', 'combine_attrs': 'combine_attrs'}), "([img.data for img in prep_imgs], dim='z', combine_attrs=\n combine_attrs, **kwargs)\n", (6169, 6255), True, 'import xarray as xr\n'), ((9987, 10008), 'squidpy._constants._constants.InferDimensions', 'InferDimensions', (['dims'], {}), '(dims)\n', (10002, 10008), False, 'from squidpy._constants._constants import InferDimensions\n'), ((10837, 10924), 'scanpy.logging.info', 'logg.info', (['f"""{\'Overwriting\' if layer in self else \'Adding\'} image layer `{layer}`"""'], {}), '(\n f"{\'Overwriting\' if layer in self else \'Adding\'} image layer `{layer}`")\n', (10846, 10924), True, 'from scanpy import logging as logg\n'), ((13037, 13061), 'validators.url', 'validators.url', (['img_path'], {}), '(img_path)\n', (13051, 13061), False, 'import validators\n'), ((13352, 13404), 'squidpy.im._io._lazy_load_image', '_lazy_load_image', (['img_path'], {'dims': 'dims', 'chunks': 'chunks'}), '(img_path, dims=dims, chunks=chunks)\n', (13368, 13404), False, 'from squidpy.im._io import _lazy_load_image, _infer_dimensions, _assert_dims_present\n'), ((14426, 14443), 'xarray.DataArray', 'xr.DataArray', (['img'], {}), '(img)\n', (14438, 14443), True, 'import xarray as xr\n'), ((14968, 15013), 'squidpy.im._io._infer_dimensions', '_infer_dimensions', (['img'], {'infer_dimensions': 'dims'}), '(img, infer_dimensions=dims)\n', (14985, 15013), False, 'from squidpy.im._io import _lazy_load_image, _infer_dimensions, _assert_dims_present\n'), ((17507, 17724), 'scanpy.logging.warning', 'logg.warning', (['f"""Unable to determine the scale factor from `adata.uns[{spatial_key!r}][{library_id!r}][\'scalefactors\'][\'tissue_{img_key}_scalef\']`, using `1.0`. Consider specifying it manually as `scale=...`"""'], {}), '(\n f"Unable to determine the scale factor from `adata.uns[{spatial_key!r}][{library_id!r}][\'scalefactors\'][\'tissue_{img_key}_scalef\']`, using `1.0`. Consider specifying it manually as `scale=...`"\n )\n', (17519, 17724), True, 'from scanpy import logging as logg\n'), ((21799, 21902), 'functools.partial', 'partial', (['rescale'], {'scale': '[scale, scale, 1]', 'preserve_range': '(True)', 'order': '(1)', 'channel_axis': '(-1)', 'cval': 'cval'}), '(rescale, scale=[scale, scale, 1], preserve_range=True, order=1,\n channel_axis=-1, cval=cval)\n', (21806, 21902), False, 'from functools import partial\n'), ((22654, 22687), 'squidpy.im._coords._update_attrs_scale', '_update_attrs_scale', (['attrs', 'scale'], {}), '(attrs, scale)\n', (22673, 22687), False, 'from squidpy.im._coords import CropCoords, CropPadding, _NULL_COORDS, _NULL_PADDING, TupleSerializer, _update_attrs_scale, _update_attrs_coords\n'), ((36699, 36723), 'numpy.arange', 'np.arange', (['arr.shape[-1]'], {}), '(arr.shape[-1])\n', (36708, 36723), True, 'import numpy as np\n'), ((36907, 36997), 'scanpy.logging.warning', 'logg.warning', (['f"""Unable to plot image with `{n_channels}`. Setting `channelwise=True`"""'], {}), "(\n f'Unable to plot image with `{n_channels}`. Setting `channelwise=True`')\n", (36919, 36997), True, 'from scanpy import logging as logg\n'), ((37298, 37431), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': 'nrows', 'ncols': 'ncols', 'figsize': '((8, 8) if figsize is None else figsize)', 'dpi': 'dpi', 'tight_layout': '(True)', 'squeeze': '(False)'}), '(nrows=nrows, ncols=ncols, figsize=(8, 8) if figsize is None else\n figsize, dpi=dpi, tight_layout=True, squeeze=False)\n', (37310, 37431), True, 'import matplotlib.pyplot as plt\n'), ((38620, 38644), 'matplotlib.colors.ListedColormap', 'ListedColormap', (['seg_cmap'], {}), '(seg_cmap)\n', (38634, 38644), False, 'from matplotlib.colors import ListedColormap\n'), ((51257, 51271), 'copy.deepcopy', 'deepcopy', (['self'], {}), '(self)\n', (51265, 51271), False, 'from copy import copy, deepcopy\n'), ((51285, 51295), 'copy.copy', 'copy', (['self'], {}), '(self)\n', (51289, 51295), False, 'from copy import copy, deepcopy\n'), ((57695, 57730), 'squidpy.gr._utils._assert_in_range', '_assert_in_range', (['y', '(0)', '(1)'], {'name': '"""y"""'}), "(y, 0, 1, name='y')\n", (57711, 57730), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((57815, 57850), 'squidpy.gr._utils._assert_in_range', '_assert_in_range', (['x', '(0)', '(1)'], {'name': '"""x"""'}), "(x, 0, 1, name='x')\n", (57831, 57850), False, 'from squidpy.gr._utils import _assert_in_range, _assert_positive, _assert_non_negative, _assert_spatial_basis, _assert_non_empty_sequence\n'), ((12517, 12563), 'squidpy.im._io._assert_dims_present', '_assert_dims_present', (['img.dims'], {'include_z': '(True)'}), '(img.dims, include_z=True)\n', (12537, 12563), False, 'from squidpy.im._io import _lazy_load_image, _infer_dimensions, _assert_dims_present\n'), ((13674, 13711), 'xarray.open_zarr', 'xr.open_zarr', (['img_path'], {'chunks': 'chunks'}), '(img_path, chunks=chunks)\n', (13686, 13711), True, 'import xarray as xr\n'), ((15131, 15236), 'scanpy.logging.warning', 'logg.warning', (['f"""Unable to find `y`, `x` or `z` dimension in `{img.dims}`. Renaming to `{dims}`"""'], {}), "(\n f'Unable to find `y`, `x` or `z` dimension in `{img.dims}`. Renaming to `{dims}`'\n )\n", (15143, 15236), True, 'from scanpy import logging as logg\n'), ((28702, 28769), 'squidpy._constants._pkg_constants.Key.uns.library_id', 'Key.uns.library_id', (['adata'], {'spatial_key': 'spatial_key', 'library_id': 'None'}), '(adata, spatial_key=spatial_key, library_id=None)\n', (28720, 28769), False, 'from squidpy._constants._pkg_constants import Key\n'), ((30542, 30652), 'squidpy._constants._pkg_constants.Key.uns.spot_diameter', 'Key.uns.spot_diameter', (['adata'], {'spatial_key': 'spatial_key', 'library_id': 'lid', 'spot_diameter_key': 'spot_diameter_key'}), '(adata, spatial_key=spatial_key, library_id=lid,\n spot_diameter_key=spot_diameter_key)\n', (30563, 30652), False, 'from squidpy._constants._pkg_constants import Key\n'), ((37600, 37614), 'numpy.array', 'np.array', (['[ax]'], {}), '([ax])\n', (37608, 37614), True, 'import numpy as np\n'), ((38140, 38180), 'numpy.issubdtype', 'np.issubdtype', (['seg_arr.dtype', 'np.integer'], {}), '(seg_arr.dtype, np.integer)\n', (38153, 38180), True, 'import numpy as np\n'), ((38442, 38481), 'numpy.array', 'np.array', (['default_palette'], {'dtype': 'object'}), '(default_palette, dtype=object)\n', (38450, 38481), True, 'import numpy as np\n'), ((39780, 39799), 'squidpy.pl._utils.save_fig', 'save_fig', (['fig', 'save'], {}), '(fig, save)\n', (39788, 39799), False, 'from squidpy.pl._utils import save_fig\n'), ((42131, 42330), 'squidpy.pl.Interactive', 'Interactive', ([], {'img': 'self', 'adata': 'adata', 'spatial_key': 'spatial_key', 'library_key': 'library_key', 'library_id': 'library_id', 'cmap': 'cmap', 'palette': 'palette', 'blending': 'blending', 'key_added': 'key_added', 'symbol': 'symbol'}), '(img=self, adata=adata, spatial_key=spatial_key, library_key=\n library_key, library_id=library_id, cmap=cmap, palette=palette,\n blending=blending, key_added=key_added, symbol=symbol)\n', (42142, 42330), False, 'from squidpy.pl import Interactive\n'), ((44855, 44903), 'dask.array.map_overlap', 'da.map_overlap', (['func', 'arr'], {}), '(func, arr, **fn_kwargs, **kwargs)\n', (44869, 44903), True, 'import dask.array as da\n'), ((44962, 45026), 'dask.array.map_blocks', 'da.map_blocks', (['func', 'arr'], {'dtype': 'arr.dtype'}), '(func, arr, **fn_kwargs, **kwargs, dtype=arr.dtype)\n', (44975, 45026), True, 'import dask.array as da\n'), ((45976, 46031), 'dask.array.stack', 'da.stack', (['[res[lid] for lid in new_library_ids]'], {'axis': '(2)'}), '([res[lid] for lid in new_library_ids], axis=2)\n', (45984, 46031), True, 'import dask.array as da\n'), ((11238, 11344), 'scanpy.logging.warning', 'logg.warning', (['f"""Channel dimension cannot be aligned with an existing one, using `{channel_dim}`"""'], {}), "(\n f'Channel dimension cannot be aligned with an existing one, using `{channel_dim}`'\n )\n", (11250, 11344), True, 'from scanpy import logging as logg\n'), ((12659, 12682), 'squidpy.im._coords._NULL_COORDS.to_tuple', '_NULL_COORDS.to_tuple', ([], {}), '()\n', (12680, 12682), False, 'from squidpy.im._coords import CropCoords, CropPadding, _NULL_COORDS, _NULL_PADDING, TupleSerializer, _update_attrs_scale, _update_attrs_coords\n'), ((12799, 12823), 'squidpy.im._coords._NULL_PADDING.to_tuple', '_NULL_PADDING.to_tuple', ([], {}), '()\n', (12821, 12823), False, 'from squidpy.im._coords import CropCoords, CropPadding, _NULL_COORDS, _NULL_PADDING, TupleSerializer, _update_attrs_scale, _update_attrs_coords\n'), ((13438, 13452), 'pathlib.Path', 'Path', (['img_path'], {}), '(img_path)\n', (13442, 13452), False, 'from pathlib import Path\n'), ((13940, 13980), 'xarray.open_dataset', 'xr.open_dataset', (['img_path'], {'chunks': 'chunks'}), '(img_path, chunks=chunks)\n', (13955, 13980), True, 'import xarray as xr\n'), ((29459, 29581), 'scanpy.logging.debug', 'logg.debug', (['f"""Unable to find library ids in `adata.obs[{library_id!r}]`. Trying in `adata.uns[{spatial_key!r}]`"""'], {}), "(\n f'Unable to find library ids in `adata.obs[{library_id!r}]`. Trying in `adata.uns[{spatial_key!r}]`'\n )\n", (29469, 29581), True, 'from scanpy import logging as logg\n'), ((29663, 29736), 'squidpy._constants._pkg_constants.Key.uns.library_id', 'Key.uns.library_id', (['adata'], {'spatial_key': 'spatial_key', 'library_id': 'library_id'}), '(adata, spatial_key=spatial_key, library_id=library_id)\n', (29681, 29736), False, 'from squidpy._constants._pkg_constants import Key\n'), ((36431, 36452), 'numpy.asarray', 'np.asarray', (['[channel]'], {}), '([channel])\n', (36441, 36452), True, 'import numpy as np\n'), ((39198, 39240), 'skimage.util.img_as_float', 'img_as_float', (['img.values'], {'force_copy': '(False)'}), '(img.values, force_copy=False)\n', (39210, 39240), False, 'from skimage.util import img_as_float\n'), ((44781, 44801), 'dask.array.asarray', 'da.asarray', (['arr.data'], {}), '(arr.data)\n', (44791, 44801), True, 'import dask.array as da\n'), ((46490, 46632), 'scanpy.logging.warning', 'logg.warning', (['f"""Function changed the number of channels, cannot use identity for library ids `{noop_library_ids}`. Replacing with 0"""'], {}), "(\n f'Function changed the number of channels, cannot use identity for library ids `{noop_library_ids}`. Replacing with 0'\n )\n", (46502, 46632), True, 'from scanpy import logging as logg\n'), ((46972, 47027), 'dask.array.stack', 'da.stack', (['[res[lid] for lid in new_library_ids]'], {'axis': '(2)'}), '([res[lid] for lid in new_library_ids], axis=2)\n', (46980, 47027), True, 'import dask.array as da\n'), ((13063, 13077), 'pathlib.Path', 'Path', (['img_path'], {}), '(img_path)\n', (13067, 13077), False, 'from pathlib import Path\n'), ((13178, 13192), 'pathlib.Path', 'Path', (['img_path'], {}), '(img_path)\n', (13182, 13192), False, 'from pathlib import Path\n'), ((20771, 20815), 'numpy.can_cast', 'np.can_cast', (['cval', 'arr.dtype'], {'casting': '"""safe"""'}), "(cval, arr.dtype, casting='safe')\n", (20782, 20815), True, 'import numpy as np\n'), ((38492, 38507), 'numpy.max', 'np.max', (['seg_arr'], {}), '(seg_arr)\n', (38498, 38507), True, 'import numpy as np\n'), ((22059, 22080), 'numpy.asarray', 'np.asarray', (['arr.shape'], {}), '(arr.shape)\n', (22069, 22080), True, 'import numpy as np\n'), ((55559, 55646), 'scanpy.logging.warning', 'logg.warning', (['f"""Unable to retrieve library ids, reason `{e}`. Using default names"""'], {}), "(\n f'Unable to retrieve library ids, reason `{e}`. Using default names')\n", (55571, 55646), True, 'from scanpy import logging as logg\n')] |
from django import forms
from django.contrib.auth.forms import UserCreationForm
from crispy_bootstrap5.bootstrap5 import FloatingField
from crispy_forms.layout import Layout
from crispy_forms.helper import FormHelper
class CustomUserCreationForm(UserCreationForm):
email = forms.EmailField()
class Meta(UserCreationForm.Meta):
fields = UserCreationForm.Meta.fields + ("email",)
| [
"django.forms.EmailField"
] | [((279, 297), 'django.forms.EmailField', 'forms.EmailField', ([], {}), '()\n', (295, 297), False, 'from django import forms\n')] |
import json
import requests
import config
assignedIdList = list()
def __getList():
HEADERS = {
'Cookie': config.tutorzzzCookie,
'Content-Type': 'application/json'
}
res = requests.post(config.tutorzzzURL, headers = HEADERS, json = config.tutorzzzReqBody)
if res.status_code == 200:
try:
body = res.json()
except:
print("[ERROR]: tutorzzz cookie expired")
return
if body['msg'] == '操作成功':
return body['data']['data']
def __filter():
wanted = []
assignList = __getList()
if assignList == None:
return
for al in assignList:
if al['orderStatus'] == '招募中' and al['id'] not in assignedIdList:
d = {}
d['id'] = al['id']
d['title'] = al['title']
d['devPrice'] = al['devPrice']
wanted.append(d)
assignedIdList.append(d['id'])
return wanted
def remind():
wanted = __filter()
if wanted == None:
return
if len(wanted) == 0:
return '尚无招募任务'
content = '招募中任务\n'
for a in wanted:
content += a['id'] + '\t' + a['title'] + '\t' + a['devPrice'] + '\n'
return content
| [
"requests.post"
] | [((202, 281), 'requests.post', 'requests.post', (['config.tutorzzzURL'], {'headers': 'HEADERS', 'json': 'config.tutorzzzReqBody'}), '(config.tutorzzzURL, headers=HEADERS, json=config.tutorzzzReqBody)\n', (215, 281), False, 'import requests\n')] |
from invoicing.crud.base_crud import BaseCrud
from invoicing.latex.latex_invoice import LatexInvoice
from invoicing.models.invoice_model import InvoiceModel
from invoicing.repository.invoice_repository import InvoiceRepository
from invoicing.repository.job_repository import JobRepository
from invoicing.ui.date import Date
from invoicing.ui.menu import Menu
from invoicing.ui.style import Style
from invoicing.value_validation.value_validation import Validation
class InvoiceCrud(BaseCrud):
def __init__(self):
super().__init__('Invoices', InvoiceRepository, InvoiceModel)
self.menu_actions.add_action('Generate', self.generate)
def make_paginated_menu(self):
return self.paginated_menu(
find=self.repository.find_paginated_join_clients_and_companies,
find_by_id=self.repository.find_by_id_join_clients_and_companies
)
def generate(self):
print(Style.create_title('Generate Invoice'))
invoice = self.make_paginated_menu()
if invoice:
jobRepository = JobRepository()
jobs = jobRepository.find_jobs_by_invoice_id(invoice['id'])
self.enter_billable_time(jobRepository, jobs)
jobs = jobRepository.find_jobs_by_invoice_id(invoice['id'])
invoice_data = self.make_invoice_dictionary(invoice, jobs)
LatexInvoice().generate(**invoice_data)
self.mark_invoiced_jobs_as_complete(jobRepository, jobs)
Menu.wait_for_input()
def enter_billable_time(self, jobRepository, jobs):
print(Style.create_title('Enter Billable Time'))
for job in jobs:
print('Title: %s' % job['title'])
print('Description: %s' % job['description'])
print('Estimated Time: %s' % job['estimated_time'])
print('Logged Time: %s' % job['actual_time'])
billable = ''
while not Validation.isFloat(billable):
billable = input('Billable Time: ')
jobRepository.update_billable_time(job['id'], billable)
jobRepository.save()
jobRepository.check_rows_updated('Job Updated')
def make_invoice_dictionary(self, invoice, jobs):
invoice_data = {
'reference_code': invoice['reference_code'],
'company_name': invoice['company_name'],
'company_address': invoice['company_address'],
'created_at': Date().convert_date_time_for_printing(invoice['created_at']),
'total_cost': str(sum([float(job['rate']) * float(job['billable_time']) for job in jobs])),
'jobs': [{
'title': job['title'],
'description': job['description'],
'type': 'hours',
'billable_time': str(job['billable_time']),
'staff_rate': str(job['rate']),
'cost': str(float(job['rate']) * float(job['billable_time']))
} for job in jobs]
}
return invoice_data
def mark_invoiced_jobs_as_complete(self, jobRepository, jobs):
if len(jobs):
for job in jobs:
jobRepository.update_mark_as_complete(job['id'])
jobRepository.save()
jobRepository.check_rows_updated('The selected jobs have been marked as completed')
| [
"invoicing.ui.date.Date",
"invoicing.repository.job_repository.JobRepository",
"invoicing.value_validation.value_validation.Validation.isFloat",
"invoicing.ui.style.Style.create_title",
"invoicing.ui.menu.Menu.wait_for_input",
"invoicing.latex.latex_invoice.LatexInvoice"
] | [((926, 964), 'invoicing.ui.style.Style.create_title', 'Style.create_title', (['"""Generate Invoice"""'], {}), "('Generate Invoice')\n", (944, 964), False, 'from invoicing.ui.style import Style\n'), ((1059, 1074), 'invoicing.repository.job_repository.JobRepository', 'JobRepository', ([], {}), '()\n', (1072, 1074), False, 'from invoicing.repository.job_repository import JobRepository\n'), ((1481, 1502), 'invoicing.ui.menu.Menu.wait_for_input', 'Menu.wait_for_input', ([], {}), '()\n', (1500, 1502), False, 'from invoicing.ui.menu import Menu\n'), ((1574, 1615), 'invoicing.ui.style.Style.create_title', 'Style.create_title', (['"""Enter Billable Time"""'], {}), "('Enter Billable Time')\n", (1592, 1615), False, 'from invoicing.ui.style import Style\n'), ((1916, 1944), 'invoicing.value_validation.value_validation.Validation.isFloat', 'Validation.isFloat', (['billable'], {}), '(billable)\n', (1934, 1944), False, 'from invoicing.value_validation.value_validation import Validation\n'), ((1360, 1374), 'invoicing.latex.latex_invoice.LatexInvoice', 'LatexInvoice', ([], {}), '()\n', (1372, 1374), False, 'from invoicing.latex.latex_invoice import LatexInvoice\n'), ((2434, 2440), 'invoicing.ui.date.Date', 'Date', ([], {}), '()\n', (2438, 2440), False, 'from invoicing.ui.date import Date\n')] |
from ds3225 import DS3225
import dbus
import dbus.mainloop.glib
import dbus.service
from gi.repository import GObject, GLib
UNLOCKED_DEG = 175
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
BUS_NAME = 'jp.kimura.DS3225Service'
OBJECT_PATH = '/jp/kimura/DS3225Server'
INTERFACE = 'jp.kimura.DS3225'
class DS3225Client(dbus.service.Object):
def __init__(self):
bus = dbus.SessionBus()
bus_name = dbus.service.BusName(BUS_NAME, bus)
super(DS3225Client, self).__init__(bus_name, OBJECT_PATH)
self._proxy = bus.get_object(BUS_NAME, OBJECT_PATH)
def get_pos(self):
return self._proxy.get_pos()
def set_pos(self, pos):
self._proxy.set_pos(pos)
if __name__ == '__main__':
import time
ds3225_client = DS3225Client()
while True:
ds3225_client.set_pos(UNLOCKED_DEG)
time.sleep(2)
ds3225_client.set_pos(UNLOCKED_DEG-90)
time.sleep(2)
| [
"dbus.service.BusName",
"dbus.SessionBus",
"time.sleep",
"dbus.mainloop.glib.DBusGMainLoop"
] | [((145, 198), 'dbus.mainloop.glib.DBusGMainLoop', 'dbus.mainloop.glib.DBusGMainLoop', ([], {'set_as_default': '(True)'}), '(set_as_default=True)\n', (177, 198), False, 'import dbus\n'), ((387, 404), 'dbus.SessionBus', 'dbus.SessionBus', ([], {}), '()\n', (402, 404), False, 'import dbus\n'), ((424, 459), 'dbus.service.BusName', 'dbus.service.BusName', (['BUS_NAME', 'bus'], {}), '(BUS_NAME, bus)\n', (444, 459), False, 'import dbus\n'), ((863, 876), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (873, 876), False, 'import time\n'), ((932, 945), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (942, 945), False, 'import time\n')] |
from os import access
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from torch.utils.data import DataLoader
from torchvision import datasets, transforms
import numpy as np
import matplotlib.pyplot as plt
# Create fully connected neural network
class NN(nn.Module):
def __init__(self, input_size, num_classes):
super(NN, self).__init__()
self.fc1 = nn.Linear(input_size, 50)
self.fc2 = nn.Linear(50, num_classes)
def forward(self, x):
x = F.relu(self.fc1(x))
x = self.fc2(x)
return x
class CNN(nn.Module):
def __init__(self, in_channels=1, num_classes=10):
super(CNN, self).__init__()
self.conv1 = nn.Conv2d(in_channels=1, out_channels=8, kernel_size=(3,3), stride=(1,1), padding=(1,1))
self.pool = nn.MaxPool2d(kernel_size = (2, 2), stride=(2,2))
self.conv2 = nn.Conv2d(in_channels=8, out_channels=16, kernel_size=(3,3), stride=(1,1), padding=(1,1))
self.fc1 = nn.Linear(16 * 7 * 7, num_classes)
def forward(self, x):
x = self.pool(F.relu(self.conv1(x)))
x = self.pool(F.relu(self.conv2(x)))
x = x.reshape(x.shape[0], -1)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
model = CNN(784, 10)
x = torch.randn(64, 784)
print(model(x).shape)
# Set device
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# Hyperparameters
batch_size = 64
learning_rate = 1e-3
num_epochs = 10
input_size = 784
num_classes = 10
# Load data
train_dataset = datasets.MNIST(root='dataset/', train=True, download=True, transform=transforms.ToTensor())
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
test_dataset = datasets.MNIST(root='dataset/', train=False, download=True, transform=transforms.ToTensor())
test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=True)
# Initialize model
model = NN(input_size=input_size, num_classes=num_classes).to(device)
# Define loss and optimizer
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=learning_rate)
# Train model
for epoch in range(num_epochs):
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device)
# Correct shape
data = data.reshape(data.shape[0], -1)
# Forward pass
scores = model(data)
loss = criterion(scores, target)
# Backward pass
optimizer.zero_grad()
loss.backward()
# Gradient descent step
optimizer.step()
# Check accuracy
def check_accuracy(loader, model):
if loader.dataset.train:
print('Checking accuracy on training set')
else:
print('Checking accuracy on test set')
num_correct = 0
num_samples = 0
model.eval()
with torch.no_grad():
for x, y in loader:
x = x.to(device)
y = y.to(device)
x = x.reshape(x.shape[0], -1)
scores = model(x)
_, predictions = scores.max(1)
num_correct += (predictions == y).sum()
num_samples += predictions.size(0)
print('Got %d / %d correct (%.2f)' % (num_correct, num_samples, 100 * float(num_correct) / num_samples))
model.train()
check_accuracy(train_loader, model)
check_accuracy(test_loader, model)
| [
"torch.nn.CrossEntropyLoss",
"torch.nn.Conv2d",
"torch.nn.MaxPool2d",
"torch.cuda.is_available",
"torch.nn.Linear",
"torch.utils.data.DataLoader",
"torch.no_grad",
"torchvision.transforms.ToTensor",
"torch.randn"
] | [((1335, 1355), 'torch.randn', 'torch.randn', (['(64)', '(784)'], {}), '(64, 784)\n', (1346, 1355), False, 'import torch\n'), ((1704, 1766), 'torch.utils.data.DataLoader', 'DataLoader', (['train_dataset'], {'batch_size': 'batch_size', 'shuffle': '(True)'}), '(train_dataset, batch_size=batch_size, shuffle=True)\n', (1714, 1766), False, 'from torch.utils.data import DataLoader\n'), ((1889, 1950), 'torch.utils.data.DataLoader', 'DataLoader', (['test_dataset'], {'batch_size': 'batch_size', 'shuffle': '(True)'}), '(test_dataset, batch_size=batch_size, shuffle=True)\n', (1899, 1950), False, 'from torch.utils.data import DataLoader\n'), ((2082, 2103), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (2101, 2103), True, 'import torch.nn as nn\n'), ((418, 443), 'torch.nn.Linear', 'nn.Linear', (['input_size', '(50)'], {}), '(input_size, 50)\n', (427, 443), True, 'import torch.nn as nn\n'), ((463, 489), 'torch.nn.Linear', 'nn.Linear', (['(50)', 'num_classes'], {}), '(50, num_classes)\n', (472, 489), True, 'import torch.nn as nn\n'), ((725, 820), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(1)', 'out_channels': '(8)', 'kernel_size': '(3, 3)', 'stride': '(1, 1)', 'padding': '(1, 1)'}), '(in_channels=1, out_channels=8, kernel_size=(3, 3), stride=(1, 1),\n padding=(1, 1))\n', (734, 820), True, 'import torch.nn as nn\n'), ((834, 881), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2, 2)', 'stride': '(2, 2)'}), '(kernel_size=(2, 2), stride=(2, 2))\n', (846, 881), True, 'import torch.nn as nn\n'), ((904, 1000), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': '(8)', 'out_channels': '(16)', 'kernel_size': '(3, 3)', 'stride': '(1, 1)', 'padding': '(1, 1)'}), '(in_channels=8, out_channels=16, kernel_size=(3, 3), stride=(1, 1),\n padding=(1, 1))\n', (913, 1000), True, 'import torch.nn as nn\n'), ((1013, 1047), 'torch.nn.Linear', 'nn.Linear', (['(16 * 7 * 7)', 'num_classes'], {}), '(16 * 7 * 7, num_classes)\n', (1022, 1047), True, 'import torch.nn as nn\n'), ((1424, 1449), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1447, 1449), False, 'import torch\n'), ((1666, 1687), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1685, 1687), False, 'from torchvision import datasets, transforms\n'), ((1852, 1873), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1871, 1873), False, 'from torchvision import datasets, transforms\n'), ((2899, 2914), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2912, 2914), False, 'import torch\n')] |
import matplotlib.pyplot as plt
def plot_loss_mae(history):
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('Model loss')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='best')
plt.show()
plt.plot(history.history['mae'])
plt.plot(history.history['val_mae'])
plt.title('Model MAE')
plt.ylabel('MAE')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='best')
plt.show()
def plot_loss_accuracy(history):
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('Model loss')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='best')
plt.show()
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('Model Accuracy')
plt.ylabel('Accuracy')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='best')
plt.show()
| [
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.title",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((66, 99), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['loss']"], {}), "(history.history['loss'])\n", (74, 99), True, 'import matplotlib.pyplot as plt\n'), ((104, 141), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['val_loss']"], {}), "(history.history['val_loss'])\n", (112, 141), True, 'import matplotlib.pyplot as plt\n'), ((146, 169), 'matplotlib.pyplot.title', 'plt.title', (['"""Model loss"""'], {}), "('Model loss')\n", (155, 169), True, 'import matplotlib.pyplot as plt\n'), ((174, 192), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss"""'], {}), "('Loss')\n", (184, 192), True, 'import matplotlib.pyplot as plt\n'), ((197, 216), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epoch"""'], {}), "('Epoch')\n", (207, 216), True, 'import matplotlib.pyplot as plt\n'), ((221, 268), 'matplotlib.pyplot.legend', 'plt.legend', (["['Train', 'Validation']"], {'loc': '"""best"""'}), "(['Train', 'Validation'], loc='best')\n", (231, 268), True, 'import matplotlib.pyplot as plt\n'), ((273, 283), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (281, 283), True, 'import matplotlib.pyplot as plt\n'), ((289, 321), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['mae']"], {}), "(history.history['mae'])\n", (297, 321), True, 'import matplotlib.pyplot as plt\n'), ((326, 362), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['val_mae']"], {}), "(history.history['val_mae'])\n", (334, 362), True, 'import matplotlib.pyplot as plt\n'), ((367, 389), 'matplotlib.pyplot.title', 'plt.title', (['"""Model MAE"""'], {}), "('Model MAE')\n", (376, 389), True, 'import matplotlib.pyplot as plt\n'), ((394, 411), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""MAE"""'], {}), "('MAE')\n", (404, 411), True, 'import matplotlib.pyplot as plt\n'), ((416, 435), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epoch"""'], {}), "('Epoch')\n", (426, 435), True, 'import matplotlib.pyplot as plt\n'), ((440, 487), 'matplotlib.pyplot.legend', 'plt.legend', (["['Train', 'Validation']"], {'loc': '"""best"""'}), "(['Train', 'Validation'], loc='best')\n", (450, 487), True, 'import matplotlib.pyplot as plt\n'), ((492, 502), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (500, 502), True, 'import matplotlib.pyplot as plt\n'), ((541, 574), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['loss']"], {}), "(history.history['loss'])\n", (549, 574), True, 'import matplotlib.pyplot as plt\n'), ((579, 616), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['val_loss']"], {}), "(history.history['val_loss'])\n", (587, 616), True, 'import matplotlib.pyplot as plt\n'), ((621, 644), 'matplotlib.pyplot.title', 'plt.title', (['"""Model loss"""'], {}), "('Model loss')\n", (630, 644), True, 'import matplotlib.pyplot as plt\n'), ((649, 667), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss"""'], {}), "('Loss')\n", (659, 667), True, 'import matplotlib.pyplot as plt\n'), ((672, 691), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epoch"""'], {}), "('Epoch')\n", (682, 691), True, 'import matplotlib.pyplot as plt\n'), ((696, 743), 'matplotlib.pyplot.legend', 'plt.legend', (["['Train', 'Validation']"], {'loc': '"""best"""'}), "(['Train', 'Validation'], loc='best')\n", (706, 743), True, 'import matplotlib.pyplot as plt\n'), ((748, 758), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (756, 758), True, 'import matplotlib.pyplot as plt\n'), ((764, 801), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['accuracy']"], {}), "(history.history['accuracy'])\n", (772, 801), True, 'import matplotlib.pyplot as plt\n'), ((806, 847), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['val_accuracy']"], {}), "(history.history['val_accuracy'])\n", (814, 847), True, 'import matplotlib.pyplot as plt\n'), ((852, 879), 'matplotlib.pyplot.title', 'plt.title', (['"""Model Accuracy"""'], {}), "('Model Accuracy')\n", (861, 879), True, 'import matplotlib.pyplot as plt\n'), ((884, 906), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Accuracy"""'], {}), "('Accuracy')\n", (894, 906), True, 'import matplotlib.pyplot as plt\n'), ((911, 930), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epoch"""'], {}), "('Epoch')\n", (921, 930), True, 'import matplotlib.pyplot as plt\n'), ((935, 982), 'matplotlib.pyplot.legend', 'plt.legend', (["['Train', 'Validation']"], {'loc': '"""best"""'}), "(['Train', 'Validation'], loc='best')\n", (945, 982), True, 'import matplotlib.pyplot as plt\n'), ((987, 997), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (995, 997), True, 'import matplotlib.pyplot as plt\n')] |
# -*- coding: utf-8 -*-
import os
from os.path import dirname, join, normpath
import sys
from sys import platform
from config import config
if platform == 'darwin':
import objc
from AppKit import NSApplication, NSWorkspace, NSBeep, NSSound, NSEvent, NSKeyDown, NSKeyUp, NSFlagsChanged, NSKeyDownMask, NSFlagsChangedMask, NSShiftKeyMask, NSControlKeyMask, NSAlternateKeyMask, NSCommandKeyMask, NSNumericPadKeyMask, NSDeviceIndependentModifierFlagsMask, NSF1FunctionKey, NSF35FunctionKey, NSDeleteFunctionKey, NSClearLineFunctionKey
class HotkeyMgr:
MODIFIERMASK = NSShiftKeyMask|NSControlKeyMask|NSAlternateKeyMask|NSCommandKeyMask|NSNumericPadKeyMask
POLL = 250
# https://developer.apple.com/library/mac/documentation/Cocoa/Reference/ApplicationKit/Classes/NSEvent_Class/#//apple_ref/doc/constant_group/Function_Key_Unicodes
DISPLAY = { 0x03: u'⌅', 0x09: u'⇥', 0xd: u'↩', 0x19: u'⇤', 0x1b: u'esc', 0x20: u'⏘', 0x7f: u'⌫',
0xf700: u'↑', 0xf701: u'↓', 0xf702: u'←', 0xf703: u'→',
0xf727: u'Ins',
0xf728: u'⌦', 0xf729: u'↖', 0xf72a: u'Fn', 0xf72b: u'↘',
0xf72c: u'⇞', 0xf72d: u'⇟', 0xf72e: u'PrtScr', 0xf72f: u'ScrollLock',
0xf730: u'Pause', 0xf731: u'SysReq', 0xf732: u'Break', 0xf733: u'Reset',
0xf739: u'⌧',
}
(ACQUIRE_INACTIVE, ACQUIRE_ACTIVE, ACQUIRE_NEW) = range(3)
def __init__(self):
self.root = None
self.keycode = 0
self.modifiers = 0
self.activated = False
self.observer = None
self.acquire_key = 0
self.acquire_state = HotkeyMgr.ACQUIRE_INACTIVE
self.tkProcessKeyEvent_old = None
self.snd_good = NSSound.alloc().initWithContentsOfFile_byReference_(join(config.respath, 'snd_good.wav'), False)
self.snd_bad = NSSound.alloc().initWithContentsOfFile_byReference_(join(config.respath, 'snd_bad.wav'), False)
def register(self, root, keycode, modifiers):
self.root = root
self.keycode = keycode
self.modifiers = modifiers
self.activated = False
if keycode:
if not self.observer:
self.root.after_idle(self._observe)
self.root.after(HotkeyMgr.POLL, self._poll)
# Monkey-patch tk (tkMacOSXKeyEvent.c)
if not self.tkProcessKeyEvent_old:
sel = 'tkProcessKeyEvent:'
cls = NSApplication.sharedApplication().class__()
self.tkProcessKeyEvent_old = NSApplication.sharedApplication().methodForSelector_(sel)
newmethod = objc.selector(self.tkProcessKeyEvent, selector = self.tkProcessKeyEvent_old.selector, signature = self.tkProcessKeyEvent_old.signature)
objc.classAddMethod(cls, sel, newmethod)
# Monkey-patch tk (tkMacOSXKeyEvent.c) to:
# - workaround crash on OSX 10.9 & 10.10 on seeing a composing character
# - notice when modifier key state changes
# - keep a copy of NSEvent.charactersIgnoringModifiers, which is what we need for the hotkey
# (Would like to use a decorator but need to ensure the application is created before this is installed)
def tkProcessKeyEvent(self, cls, theEvent):
if self.acquire_state:
if theEvent.type() == NSFlagsChanged:
self.acquire_key = theEvent.modifierFlags() & NSDeviceIndependentModifierFlagsMask
self.acquire_state = HotkeyMgr.ACQUIRE_NEW
# suppress the event by not chaining the old function
return theEvent
elif theEvent.type() in (NSKeyDown, NSKeyUp):
c = theEvent.charactersIgnoringModifiers()
self.acquire_key = (c and ord(c[0]) or 0) | (theEvent.modifierFlags() & NSDeviceIndependentModifierFlagsMask)
self.acquire_state = HotkeyMgr.ACQUIRE_NEW
# suppress the event by not chaining the old function
return theEvent
# replace empty characters with charactersIgnoringModifiers to avoid crash
elif theEvent.type() in (NSKeyDown, NSKeyUp) and not theEvent.characters():
theEvent = NSEvent.keyEventWithType_location_modifierFlags_timestamp_windowNumber_context_characters_charactersIgnoringModifiers_isARepeat_keyCode_(theEvent.type(), theEvent.locationInWindow(), theEvent.modifierFlags(), theEvent.timestamp(), theEvent.windowNumber(), theEvent.context(), theEvent.charactersIgnoringModifiers(), theEvent.charactersIgnoringModifiers(), theEvent.isARepeat(), theEvent.keyCode())
return self.tkProcessKeyEvent_old(cls, theEvent)
def _observe(self):
# Must be called after root.mainloop() so that the app's message loop has been created
self.observer = NSEvent.addGlobalMonitorForEventsMatchingMask_handler_(NSKeyDownMask, self._handler)
def _poll(self):
# No way of signalling to Tkinter from within the callback handler block that doesn't
# cause Python to crash, so poll.
if self.activated:
self.activated = False
self.root.event_generate('<<Invoke>>', when="tail")
if self.keycode or self.modifiers:
self.root.after(HotkeyMgr.POLL, self._poll)
def unregister(self):
self.keycode = None
self.modifiers = None
@objc.callbackFor(NSEvent.addGlobalMonitorForEventsMatchingMask_handler_)
def _handler(self, event):
# use event.charactersIgnoringModifiers to handle composing characters like Alt-e
if (event.modifierFlags() & HotkeyMgr.MODIFIERMASK) == self.modifiers and ord(event.charactersIgnoringModifiers()[0]) == self.keycode:
if config.getint('hotkey_always'):
self.activated = True
else: # Only trigger if game client is front process
front = NSWorkspace.sharedWorkspace().frontmostApplication()
if front and front.bundleIdentifier() == 'uk.co.frontier.EliteDangerous':
self.activated = True
def acquire_start(self):
self.acquire_state = HotkeyMgr.ACQUIRE_ACTIVE
self.root.after_idle(self._acquire_poll)
def acquire_stop(self):
self.acquire_state = HotkeyMgr.ACQUIRE_INACTIVE
def _acquire_poll(self):
# No way of signalling to Tkinter from within the monkey-patched event handler that doesn't
# cause Python to crash, so poll.
if self.acquire_state:
if self.acquire_state == HotkeyMgr.ACQUIRE_NEW:
# Abuse tkEvent's keycode field to hold our acquired key & modifier
self.root.event_generate('<KeyPress>', keycode = self.acquire_key)
self.acquire_state = HotkeyMgr.ACQUIRE_ACTIVE
self.root.after(50, self._acquire_poll)
def fromevent(self, event):
# Return configuration (keycode, modifiers) or None=clear or False=retain previous
(keycode, modifiers) = (event.keycode & 0xffff, event.keycode & 0xffff0000) # Set by _acquire_poll()
if keycode and not (modifiers & (NSShiftKeyMask|NSControlKeyMask|NSAlternateKeyMask|NSCommandKeyMask)):
if keycode == 0x1b: # Esc = retain previous
self.acquire_state = HotkeyMgr.ACQUIRE_INACTIVE
return False
elif keycode in [0x7f, ord(NSDeleteFunctionKey), ord(NSClearLineFunctionKey)]: # BkSp, Del, Clear = clear hotkey
self.acquire_state = HotkeyMgr.ACQUIRE_INACTIVE
return None
elif keycode in [0x13, 0x20, 0x2d] or 0x61 <= keycode <= 0x7a: # don't allow keys needed for typing in System Map
NSBeep()
self.acquire_state = HotkeyMgr.ACQUIRE_INACTIVE
return None
return (keycode, modifiers)
def display(self, keycode, modifiers):
# Return displayable form
text = ''
if modifiers & NSControlKeyMask: text += u'⌃'
if modifiers & NSAlternateKeyMask: text += u'⌥'
if modifiers & NSShiftKeyMask: text += u'⇧'
if modifiers & NSCommandKeyMask: text += u'⌘'
if (modifiers & NSNumericPadKeyMask) and keycode <= 0x7f: text += u'№'
if not keycode:
pass
elif ord(NSF1FunctionKey) <= keycode <= ord(NSF35FunctionKey):
text += 'F%d' % (keycode + 1 - ord(NSF1FunctionKey))
elif keycode in HotkeyMgr.DISPLAY: # specials
text += HotkeyMgr.DISPLAY[keycode]
elif keycode < 0x20: # control keys
text += unichr(keycode+0x40)
elif keycode < 0xf700: # key char
text += unichr(keycode).upper()
else:
text += u'⁈'
return text
def play_good(self):
self.snd_good.play()
def play_bad(self):
self.snd_bad.play()
elif platform == 'win32':
import atexit
import ctypes
from ctypes.wintypes import *
import threading
import winsound
RegisterHotKey = ctypes.windll.user32.RegisterHotKey
UnregisterHotKey = ctypes.windll.user32.UnregisterHotKey
MOD_ALT = 0x0001
MOD_CONTROL = 0x0002
MOD_SHIFT = 0x0004
MOD_WIN = 0x0008
MOD_NOREPEAT = 0x4000
GetMessage = ctypes.windll.user32.GetMessageW
TranslateMessage = ctypes.windll.user32.TranslateMessage
DispatchMessage = ctypes.windll.user32.DispatchMessageW
PostThreadMessage = ctypes.windll.user32.PostThreadMessageW
WM_QUIT = 0x0012
WM_HOTKEY = 0x0312
WM_APP = 0x8000
WM_SND_GOOD = WM_APP + 1
WM_SND_BAD = WM_APP + 2
GetKeyState = ctypes.windll.user32.GetKeyState
MapVirtualKey = ctypes.windll.user32.MapVirtualKeyW
VK_BACK = 0x08
VK_CLEAR = 0x0c
VK_RETURN = 0x0d
VK_SHIFT = 0x10
VK_CONTROL = 0x11
VK_MENU = 0x12
VK_CAPITAL = 0x14
VK_MODECHANGE= 0x1f
VK_ESCAPE = 0x1b
VK_SPACE = 0x20
VK_DELETE = 0x2e
VK_LWIN = 0x5b
VK_RWIN = 0x5c
VK_NUMPAD0 = 0x60
VK_DIVIDE = 0x6f
VK_F1 = 0x70
VK_F24 = 0x87
VK_OEM_MINUS = 0xbd
VK_NUMLOCK = 0x90
VK_SCROLL = 0x91
VK_PROCESSKEY= 0xe5
VK_OEM_CLEAR = 0xfe
GetForegroundWindow = ctypes.windll.user32.GetForegroundWindow
GetWindowText = ctypes.windll.user32.GetWindowTextW
GetWindowText.argtypes = [HWND, LPWSTR, ctypes.c_int]
GetWindowTextLength = ctypes.windll.user32.GetWindowTextLengthW
def WindowTitle(h):
if h:
l = GetWindowTextLength(h) + 1
buf = ctypes.create_unicode_buffer(l)
if GetWindowText(h, buf, l):
return buf.value
return ''
class MOUSEINPUT(ctypes.Structure):
_fields_ = [('dx', LONG), ('dy', LONG), ('mouseData', DWORD), ('dwFlags', DWORD), ('time', DWORD), ('dwExtraInfo', ctypes.POINTER(ULONG))]
class KEYBDINPUT(ctypes.Structure):
_fields_ = [('wVk', WORD), ('wScan', WORD), ('dwFlags', DWORD), ('time', DWORD), ('dwExtraInfo', ctypes.POINTER(ULONG))]
class HARDWAREINPUT(ctypes.Structure):
_fields_ = [('uMsg', DWORD), ('wParamL', WORD), ('wParamH', WORD)]
class INPUT_union(ctypes.Union):
_fields_ = [('mi', MOUSEINPUT), ('ki', KEYBDINPUT), ('hi', HARDWAREINPUT)]
class INPUT(ctypes.Structure):
_fields_ = [('type', DWORD), ('union', INPUT_union)]
SendInput = ctypes.windll.user32.SendInput
SendInput.argtypes = [ctypes.c_uint, ctypes.POINTER(INPUT), ctypes.c_int]
INPUT_MOUSE = 0
INPUT_KEYBOARD = 1
INPUT_HARDWARE = 2
class HotkeyMgr:
# https://msdn.microsoft.com/en-us/library/windows/desktop/dd375731%28v=vs.85%29.aspx
# Limit ourselves to symbols in Windows 7 Segoe UI
DISPLAY = { 0x03: 'Break', 0x08: 'Bksp', 0x09: u'↹', 0x0c: 'Clear', 0x0d: u'↵', 0x13: 'Pause',
0x14: u'Ⓐ', 0x1b: 'Esc',
0x20: u'⏘', 0x21: 'PgUp', 0x22: 'PgDn', 0x23: 'End', 0x24: 'Home',
0x25: u'←', 0x26: u'↑', 0x27: u'→', 0x28: u'↓',
0x2c: 'PrtScn', 0x2d: 'Ins', 0x2e: 'Del', 0x2f: 'Help',
0x5d: u'▤', 0x5f: u'☾',
0x90: u'➀', 0x91: 'ScrLk',
0xa6: u'⇦', 0xa7: u'⇨', 0xa9: u'⊗', 0xab: u'☆', 0xac: u'⌂', 0xb4: u'✉',
}
def __init__(self):
self.root = None
self.thread = None
self.snd_good = open(join(config.respath, 'snd_good.wav'), 'rb').read()
self.snd_bad = open(join(config.respath, 'snd_bad.wav'), 'rb').read()
atexit.register(self.unregister)
def register(self, root, keycode, modifiers):
self.root = root
if self.thread:
self.unregister()
if keycode or modifiers:
self.thread = threading.Thread(target = self.worker, name = 'Hotkey "%x:%x"' % (keycode,modifiers), args = (keycode,modifiers))
self.thread.daemon = True
self.thread.start()
def unregister(self):
thread = self.thread
if thread:
self.thread = None
PostThreadMessage(thread.ident, WM_QUIT, 0, 0)
thread.join() # Wait for it to unregister hotkey and quit
def worker(self, keycode, modifiers):
# Hotkey must be registered by the thread that handles it
if not RegisterHotKey(None, 1, modifiers|MOD_NOREPEAT, keycode):
self.thread = None
return
fake = INPUT(INPUT_KEYBOARD, INPUT_union(ki = KEYBDINPUT(keycode, keycode, 0, 0, None)))
msg = MSG()
while GetMessage(ctypes.byref(msg), None, 0, 0) != 0:
if msg.message == WM_HOTKEY:
if config.getint('hotkey_always') or WindowTitle(GetForegroundWindow()).startswith('Elite - Dangerous'):
self.root.event_generate('<<Invoke>>', when="tail")
else:
# Pass the key on
UnregisterHotKey(None, 1)
SendInput(1, fake, ctypes.sizeof(INPUT))
if not RegisterHotKey(None, 1, modifiers|MOD_NOREPEAT, keycode):
break
elif msg.message == WM_SND_GOOD:
winsound.PlaySound(self.snd_good, winsound.SND_MEMORY) # synchronous
elif msg.message == WM_SND_BAD:
winsound.PlaySound(self.snd_bad, winsound.SND_MEMORY) # synchronous
else:
TranslateMessage(ctypes.byref(msg))
DispatchMessage(ctypes.byref(msg))
UnregisterHotKey(None, 1)
self.thread = None
def acquire_start(self):
pass
def acquire_stop(self):
pass
def fromevent(self, event):
# event.state is a pain - it shows the state of the modifiers *before* a modifier key was pressed.
# event.state *does* differentiate between left and right Ctrl and Alt and between Return and Enter
# by putting KF_EXTENDED in bit 18, but RegisterHotKey doesn't differentiate.
modifiers = ((GetKeyState(VK_MENU) & 0x8000) and MOD_ALT) | ((GetKeyState(VK_CONTROL) & 0x8000) and MOD_CONTROL) | ((GetKeyState(VK_SHIFT) & 0x8000) and MOD_SHIFT) | ((GetKeyState(VK_LWIN) & 0x8000) and MOD_WIN) | ((GetKeyState(VK_RWIN) & 0x8000) and MOD_WIN)
keycode = event.keycode
if keycode in [ VK_SHIFT, VK_CONTROL, VK_MENU, VK_LWIN, VK_RWIN ]:
return (0, modifiers)
if not modifiers:
if keycode == VK_ESCAPE: # Esc = retain previous
return False
elif keycode in [ VK_BACK, VK_DELETE, VK_CLEAR, VK_OEM_CLEAR ]: # BkSp, Del, Clear = clear hotkey
return None
elif keycode in [ VK_RETURN, VK_SPACE, VK_OEM_MINUS] or ord('A') <= keycode <= ord('Z'): # don't allow keys needed for typing in System Map
winsound.MessageBeep()
return None
elif keycode in [ VK_NUMLOCK, VK_SCROLL, VK_PROCESSKEY ] or VK_CAPITAL <= keycode <= VK_MODECHANGE: # ignore unmodified mode switch keys
return (0, modifiers)
# See if the keycode is usable and available
if RegisterHotKey(None, 2, modifiers|MOD_NOREPEAT, keycode):
UnregisterHotKey(None, 2)
return (keycode, modifiers)
else:
winsound.MessageBeep()
return None
def display(self, keycode, modifiers):
text = ''
if modifiers & MOD_WIN: text += u'❖+'
if modifiers & MOD_CONTROL: text += u'Ctrl+'
if modifiers & MOD_ALT: text += u'Alt+'
if modifiers & MOD_SHIFT: text += u'⇧+'
if VK_NUMPAD0 <= keycode <= VK_DIVIDE: text += u'№'
if not keycode:
pass
elif VK_F1 <= keycode <= VK_F24:
text += 'F%d' % (keycode + 1 - VK_F1)
elif keycode in HotkeyMgr.DISPLAY: # specials
text += HotkeyMgr.DISPLAY[keycode]
else:
c = MapVirtualKey(keycode, 2) # printable ?
if not c: # oops not printable
text += u'⁈'
elif c < 0x20: # control keys
text += unichr(c+0x40)
else:
text += unichr(c).upper()
return text
def play_good(self):
if self.thread:
PostThreadMessage(self.thread.ident, WM_SND_GOOD, 0, 0)
def play_bad(self):
if self.thread:
PostThreadMessage(self.thread.ident, WM_SND_BAD, 0, 0)
else: # Linux
class HotkeyMgr:
def register(self, root, keycode, modifiers):
pass
def unregister(self):
pass
def play_good(self):
pass
def play_bad(self):
pass
# singleton
hotkeymgr = HotkeyMgr()
| [
"ctypes.byref",
"ctypes.POINTER",
"AppKit.NSEvent.addGlobalMonitorForEventsMatchingMask_handler_",
"objc.callbackFor",
"ctypes.create_unicode_buffer",
"AppKit.NSBeep",
"AppKit.NSApplication.sharedApplication",
"ctypes.sizeof",
"os.path.join",
"config.config.getint",
"AppKit.NSSound.alloc",
"wi... | [((5613, 5685), 'objc.callbackFor', 'objc.callbackFor', (['NSEvent.addGlobalMonitorForEventsMatchingMask_handler_'], {}), '(NSEvent.addGlobalMonitorForEventsMatchingMask_handler_)\n', (5629, 5685), False, 'import objc\n'), ((5006, 5095), 'AppKit.NSEvent.addGlobalMonitorForEventsMatchingMask_handler_', 'NSEvent.addGlobalMonitorForEventsMatchingMask_handler_', (['NSKeyDownMask', 'self._handler'], {}), '(NSKeyDownMask, self.\n _handler)\n', (5060, 5095), False, 'from AppKit import NSApplication, NSWorkspace, NSBeep, NSSound, NSEvent, NSKeyDown, NSKeyUp, NSFlagsChanged, NSKeyDownMask, NSFlagsChangedMask, NSShiftKeyMask, NSControlKeyMask, NSAlternateKeyMask, NSCommandKeyMask, NSNumericPadKeyMask, NSDeviceIndependentModifierFlagsMask, NSF1FunctionKey, NSF35FunctionKey, NSDeleteFunctionKey, NSClearLineFunctionKey\n'), ((12021, 12042), 'ctypes.POINTER', 'ctypes.POINTER', (['INPUT'], {}), '(INPUT)\n', (12035, 12042), False, 'import ctypes\n'), ((1872, 1908), 'os.path.join', 'join', (['config.respath', '"""snd_good.wav"""'], {}), "(config.respath, 'snd_good.wav')\n", (1876, 1908), False, 'from os.path import dirname, join, normpath\n'), ((1997, 2032), 'os.path.join', 'join', (['config.respath', '"""snd_bad.wav"""'], {}), "(config.respath, 'snd_bad.wav')\n", (2001, 2032), False, 'from os.path import dirname, join, normpath\n'), ((2752, 2888), 'objc.selector', 'objc.selector', (['self.tkProcessKeyEvent'], {'selector': 'self.tkProcessKeyEvent_old.selector', 'signature': 'self.tkProcessKeyEvent_old.signature'}), '(self.tkProcessKeyEvent, selector=self.tkProcessKeyEvent_old.\n selector, signature=self.tkProcessKeyEvent_old.signature)\n', (2765, 2888), False, 'import objc\n'), ((2904, 2944), 'objc.classAddMethod', 'objc.classAddMethod', (['cls', 'sel', 'newmethod'], {}), '(cls, sel, newmethod)\n', (2923, 2944), False, 'import objc\n'), ((5981, 6011), 'config.config.getint', 'config.getint', (['"""hotkey_always"""'], {}), "('hotkey_always')\n", (5994, 6011), False, 'from config import config\n'), ((11112, 11143), 'ctypes.create_unicode_buffer', 'ctypes.create_unicode_buffer', (['l'], {}), '(l)\n', (11140, 11143), False, 'import ctypes\n'), ((13143, 13175), 'atexit.register', 'atexit.register', (['self.unregister'], {}), '(self.unregister)\n', (13158, 13175), False, 'import atexit\n'), ((1820, 1835), 'AppKit.NSSound.alloc', 'NSSound.alloc', ([], {}), '()\n', (1833, 1835), False, 'from AppKit import NSApplication, NSWorkspace, NSBeep, NSSound, NSEvent, NSKeyDown, NSKeyUp, NSFlagsChanged, NSKeyDownMask, NSFlagsChangedMask, NSShiftKeyMask, NSControlKeyMask, NSAlternateKeyMask, NSCommandKeyMask, NSNumericPadKeyMask, NSDeviceIndependentModifierFlagsMask, NSF1FunctionKey, NSF35FunctionKey, NSDeleteFunctionKey, NSClearLineFunctionKey\n'), ((1945, 1960), 'AppKit.NSSound.alloc', 'NSSound.alloc', ([], {}), '()\n', (1958, 1960), False, 'from AppKit import NSApplication, NSWorkspace, NSBeep, NSSound, NSEvent, NSKeyDown, NSKeyUp, NSFlagsChanged, NSKeyDownMask, NSFlagsChangedMask, NSShiftKeyMask, NSControlKeyMask, NSAlternateKeyMask, NSCommandKeyMask, NSNumericPadKeyMask, NSDeviceIndependentModifierFlagsMask, NSF1FunctionKey, NSF35FunctionKey, NSDeleteFunctionKey, NSClearLineFunctionKey\n'), ((11401, 11422), 'ctypes.POINTER', 'ctypes.POINTER', (['ULONG'], {}), '(ULONG)\n', (11415, 11422), False, 'import ctypes\n'), ((11571, 11592), 'ctypes.POINTER', 'ctypes.POINTER', (['ULONG'], {}), '(ULONG)\n', (11585, 11592), False, 'import ctypes\n'), ((13389, 13502), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.worker', 'name': '(\'Hotkey "%x:%x"\' % (keycode, modifiers))', 'args': '(keycode, modifiers)'}), '(target=self.worker, name=\'Hotkey "%x:%x"\' % (keycode,\n modifiers), args=(keycode, modifiers))\n', (13405, 13502), False, 'import threading\n'), ((17145, 17167), 'winsound.MessageBeep', 'winsound.MessageBeep', ([], {}), '()\n', (17165, 17167), False, 'import winsound\n'), ((2577, 2610), 'AppKit.NSApplication.sharedApplication', 'NSApplication.sharedApplication', ([], {}), '()\n', (2608, 2610), False, 'from AppKit import NSApplication, NSWorkspace, NSBeep, NSSound, NSEvent, NSKeyDown, NSKeyUp, NSFlagsChanged, NSKeyDownMask, NSFlagsChangedMask, NSShiftKeyMask, NSControlKeyMask, NSAlternateKeyMask, NSCommandKeyMask, NSNumericPadKeyMask, NSDeviceIndependentModifierFlagsMask, NSF1FunctionKey, NSF35FunctionKey, NSDeleteFunctionKey, NSClearLineFunctionKey\n'), ((2666, 2699), 'AppKit.NSApplication.sharedApplication', 'NSApplication.sharedApplication', ([], {}), '()\n', (2697, 2699), False, 'from AppKit import NSApplication, NSWorkspace, NSBeep, NSSound, NSEvent, NSKeyDown, NSKeyUp, NSFlagsChanged, NSKeyDownMask, NSFlagsChangedMask, NSShiftKeyMask, NSControlKeyMask, NSAlternateKeyMask, NSCommandKeyMask, NSNumericPadKeyMask, NSDeviceIndependentModifierFlagsMask, NSF1FunctionKey, NSF35FunctionKey, NSDeleteFunctionKey, NSClearLineFunctionKey\n'), ((14249, 14266), 'ctypes.byref', 'ctypes.byref', (['msg'], {}), '(msg)\n', (14261, 14266), False, 'import ctypes\n'), ((6152, 6181), 'AppKit.NSWorkspace.sharedWorkspace', 'NSWorkspace.sharedWorkspace', ([], {}), '()\n', (6179, 6181), False, 'from AppKit import NSApplication, NSWorkspace, NSBeep, NSSound, NSEvent, NSKeyDown, NSKeyUp, NSFlagsChanged, NSKeyDownMask, NSFlagsChangedMask, NSShiftKeyMask, NSControlKeyMask, NSAlternateKeyMask, NSCommandKeyMask, NSNumericPadKeyMask, NSDeviceIndependentModifierFlagsMask, NSF1FunctionKey, NSF35FunctionKey, NSDeleteFunctionKey, NSClearLineFunctionKey\n'), ((8066, 8074), 'AppKit.NSBeep', 'NSBeep', ([], {}), '()\n', (8072, 8074), False, 'from AppKit import NSApplication, NSWorkspace, NSBeep, NSSound, NSEvent, NSKeyDown, NSKeyUp, NSFlagsChanged, NSKeyDownMask, NSFlagsChangedMask, NSShiftKeyMask, NSControlKeyMask, NSAlternateKeyMask, NSCommandKeyMask, NSNumericPadKeyMask, NSDeviceIndependentModifierFlagsMask, NSF1FunctionKey, NSF35FunctionKey, NSDeleteFunctionKey, NSClearLineFunctionKey\n'), ((12996, 13032), 'os.path.join', 'join', (['config.respath', '"""snd_good.wav"""'], {}), "(config.respath, 'snd_good.wav')\n", (13000, 13032), False, 'from os.path import dirname, join, normpath\n'), ((13080, 13115), 'os.path.join', 'join', (['config.respath', '"""snd_bad.wav"""'], {}), "(config.respath, 'snd_bad.wav')\n", (13084, 13115), False, 'from os.path import dirname, join, normpath\n'), ((14354, 14384), 'config.config.getint', 'config.getint', (['"""hotkey_always"""'], {}), "('hotkey_always')\n", (14367, 14384), False, 'from config import config\n'), ((14908, 14962), 'winsound.PlaySound', 'winsound.PlaySound', (['self.snd_good', 'winsound.SND_MEMORY'], {}), '(self.snd_good, winsound.SND_MEMORY)\n', (14926, 14962), False, 'import winsound\n'), ((14693, 14713), 'ctypes.sizeof', 'ctypes.sizeof', (['INPUT'], {}), '(INPUT)\n', (14706, 14713), False, 'import ctypes\n'), ((15045, 15098), 'winsound.PlaySound', 'winsound.PlaySound', (['self.snd_bad', 'winsound.SND_MEMORY'], {}), '(self.snd_bad, winsound.SND_MEMORY)\n', (15063, 15098), False, 'import winsound\n'), ((16644, 16666), 'winsound.MessageBeep', 'winsound.MessageBeep', ([], {}), '()\n', (16664, 16666), False, 'import winsound\n'), ((15173, 15190), 'ctypes.byref', 'ctypes.byref', (['msg'], {}), '(msg)\n', (15185, 15190), False, 'import ctypes\n'), ((15228, 15245), 'ctypes.byref', 'ctypes.byref', (['msg'], {}), '(msg)\n', (15240, 15245), False, 'import ctypes\n')] |
import torch.nn as nn
class Lstm(nn.Module):
"""
LSTM module
Args:
input_size : input size
hidden_size : hidden size
num_layers : number of hidden layers. Default: 1
dropout : dropout rate. Default: 0.5
bidirectional : If True, becomes a bidirectional RNN. Default: False.
"""
def __init__(self, input_size, hidden_size=100, num_layers=1, dropout=0, bidirectional=False):
super(Lstm, self).__init__()
self.lstm = nn.LSTM(input_size, hidden_size, num_layers, bias=True, batch_first=True,
dropout=dropout, bidirectional=bidirectional)
def forward(self, x):
x, _ = self.lstm(x)
return x
| [
"torch.nn.LSTM"
] | [((473, 596), 'torch.nn.LSTM', 'nn.LSTM', (['input_size', 'hidden_size', 'num_layers'], {'bias': '(True)', 'batch_first': '(True)', 'dropout': 'dropout', 'bidirectional': 'bidirectional'}), '(input_size, hidden_size, num_layers, bias=True, batch_first=True,\n dropout=dropout, bidirectional=bidirectional)\n', (480, 596), True, 'import torch.nn as nn\n')] |
from typing import List
import asyncio
import inspect
import logging
import uuid
import aio_pika
import aio_pika.exceptions
from .base import BaseRPC
from .common import RPCError, RPCHandler, RPCRequest, RPCResponse
class RPC(BaseRPC):
HEARTBEAT_INTERVAL = 300
def __init__(
self,
url: str = None,
name: str = None,
handler: RPCHandler = None,
timeout: float = None,
pool_size: int = 0,
batch_size: int = 0,
wait_for_batch: bool = False,
max_jobs: int = 0,
loop: asyncio.AbstractEventLoop = None,
):
self._loop = loop
self._url = url or self.URL
self._name = name
self._handler = handler
self._timeout = timeout
self._pool_size = pool_size
self._batch_size = batch_size
self._wait_for_batch = wait_for_batch
self._max_jobs = max_jobs
self._mconn: aio_pika.RobustConnection = None
self._mch: aio_pika.RobustChannel = None
self._mq: aio_pika.RobustQueue = None
self._queue = asyncio.Queue(loop=loop)
self._pool = []
self._consuming = False
async def _run_pool(self):
self._pool = [self._run_worker() for _ in range(self._pool_size)]
self._consuming = True
await asyncio.gather(*self._pool, loop=self._loop)
self._pool = []
async def _run_worker(self):
bs = self._batch_size
q = self._queue
while self._consuming:
batch = [await q.get()]
if self._wait_for_batch and bs > 0:
while len(batch) < bs:
batch.append(await q.get())
else:
while (bs <= 0 or len(batch) < bs) and not q.empty():
batch.append(q.get_nowait())
await asyncio.wait_for(
asyncio.ensure_future(
self._process_batch(batch), loop=self._loop,
),
self._timeout,
loop=self._loop,
)
async def _process_single(self, message: aio_pika.IncomingMessage):
return await asyncio.wait_for(
asyncio.ensure_future(
self._process_batch([message]), loop=self._loop,
),
self._timeout,
loop=self._loop,
)
async def _process_batch(self, messages: List[aio_pika.IncomingMessage]):
try:
reqs = []
for m in messages:
# logging.debug(f"message: correlation_id={m.correlation_id}")
req: RPCRequest = self.decode_request(m.body)
reqs.append(req)
# logging.debug(f"handler: {self._handler}")
results = self._handler(*reqs)
if inspect.isawaitable(results):
results = await results
except KeyboardInterrupt:
self._consuming = False
for m in messages:
await m.reject(requeue=True)
return
except Exception as e:
if len(messages) == 1:
results = [RPCError()]
logging.exception(e)
await messages[0].reject()
else:
for m in messages:
await asyncio.wait_for(
asyncio.ensure_future(
self._process_batch([m]), loop=self._loop,
),
self._timeout,
loop=self._loop,
)
return
for message, result in zip(messages, results):
result = aio_pika.Message(
self.encode_response(result),
correlation_id=message.correlation_id,
delivery_mode=message.delivery_mode,
)
await self._mch.default_exchange.publish(
result, routing_key=message.reply_to, mandatory=False,
)
if not message.processed:
await message.ack()
async def consume(self):
while True:
try:
self._mconn = await aio_pika.connect_robust(
self._url,
loop=self._loop,
heartbeat_interval=self.HEARTBEAT_INTERVAL,
)
break
except ConnectionError:
# This case is not handled by aio-pika by some reasons
logging.warning("wait for queue...")
await asyncio.sleep(1, loop=self._loop)
self._mch = await self._mconn.channel()
await self._mch.set_qos(prefetch_count=self._max_jobs)
self._mq = await self._mch.declare_queue(self._name)
if self._pool_size > 0:
await asyncio.gather(
self._run_pool(),
self._mq.consume(self._queue.put),
loop=self._loop,
)
else:
await self._mq.consume(self._process_single)
return self._mconn
async def call(self, msg: RPCRequest) -> RPCResponse:
return await asyncio.wait_for(
asyncio.ensure_future(self._call(msg), loop=self._loop,),
self._timeout,
loop=self._loop,
)
async def _call(self, msg: RPCRequest) -> RPCResponse:
if not self._mconn:
self._mconn = await aio_pika.connect_robust(
self._url,
loop=self._loop,
heartbeat_interval=self.HEARTBEAT_INTERVAL,
)
if not self._mch:
self._mch: aio_pika.RobustChannel = await self._mconn.channel()
mq: aio_pika.RobustQueue = await self._mch.declare_queue()
try:
correlation_id = str(uuid.uuid4())
message = aio_pika.Message(
self.encode_request(msg),
correlation_id=correlation_id,
reply_to=mq.name,
)
await self._mch.default_exchange.publish(
message, routing_key=self._name,
)
async with mq.iterator(no_ack=True) as it:
async for message in it:
break
if message.correlation_id != correlation_id:
raise ValueError("wrong correlation_id")
response: RPCResponse = self.decode_response(message.body)
# logging.debug(f"response: {response}")
if isinstance(response, RPCError):
response.reraise()
return response
finally:
await mq.delete(if_empty=False, if_unused=False)
| [
"inspect.isawaitable",
"asyncio.sleep",
"asyncio.Queue",
"logging.warning",
"uuid.uuid4",
"logging.exception",
"asyncio.gather",
"aio_pika.connect_robust"
] | [((1074, 1098), 'asyncio.Queue', 'asyncio.Queue', ([], {'loop': 'loop'}), '(loop=loop)\n', (1087, 1098), False, 'import asyncio\n'), ((1306, 1350), 'asyncio.gather', 'asyncio.gather', (['*self._pool'], {'loop': 'self._loop'}), '(*self._pool, loop=self._loop)\n', (1320, 1350), False, 'import asyncio\n'), ((2766, 2794), 'inspect.isawaitable', 'inspect.isawaitable', (['results'], {}), '(results)\n', (2785, 2794), False, 'import inspect\n'), ((5368, 5468), 'aio_pika.connect_robust', 'aio_pika.connect_robust', (['self._url'], {'loop': 'self._loop', 'heartbeat_interval': 'self.HEARTBEAT_INTERVAL'}), '(self._url, loop=self._loop, heartbeat_interval=self\n .HEARTBEAT_INTERVAL)\n', (5391, 5468), False, 'import aio_pika\n'), ((5742, 5754), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (5752, 5754), False, 'import uuid\n'), ((3122, 3142), 'logging.exception', 'logging.exception', (['e'], {}), '(e)\n', (3139, 3142), False, 'import logging\n'), ((4132, 4232), 'aio_pika.connect_robust', 'aio_pika.connect_robust', (['self._url'], {'loop': 'self._loop', 'heartbeat_interval': 'self.HEARTBEAT_INTERVAL'}), '(self._url, loop=self._loop, heartbeat_interval=self\n .HEARTBEAT_INTERVAL)\n', (4155, 4232), False, 'import aio_pika\n'), ((4452, 4488), 'logging.warning', 'logging.warning', (['"""wait for queue..."""'], {}), "('wait for queue...')\n", (4467, 4488), False, 'import logging\n'), ((4511, 4544), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {'loop': 'self._loop'}), '(1, loop=self._loop)\n', (4524, 4544), False, 'import asyncio\n')] |
# Generated by Django 3.0.5 on 2020-09-06 19:51
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0004_auto_20200906_1752'),
]
operations = [
migrations.AlterModelOptions(
name='category',
options={'verbose_name': 'Категория', 'verbose_name_plural': 'Категории'},
),
migrations.AlterModelOptions(
name='genre',
options={'verbose_name': 'Жанр', 'verbose_name_plural': 'Жанры'},
),
migrations.AlterModelOptions(
name='title',
options={'ordering': ('-id',), 'verbose_name': 'Произведение', 'verbose_name_plural': 'Произведения'},
),
migrations.RemoveConstraint(
model_name='review',
name='unique_review',
),
migrations.AlterField(
model_name='category',
name='name',
field=models.CharField(max_length=20, verbose_name='Наименование'),
),
migrations.AlterField(
model_name='genre',
name='name',
field=models.CharField(max_length=20, verbose_name='Наименование'),
),
migrations.AlterField(
model_name='title',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='categories', to='api.Category', verbose_name='Категория'),
),
migrations.AlterField(
model_name='title',
name='description',
field=models.TextField(blank=True, null=True, verbose_name='Описание'),
),
migrations.AlterField(
model_name='title',
name='name',
field=models.CharField(max_length=100, verbose_name='Название'),
),
migrations.AddConstraint(
model_name='review',
constraint=models.UniqueConstraint(fields=('title', 'author'), name='unique_review'),
),
]
| [
"django.db.models.UniqueConstraint",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.migrations.AlterModelOptions",
"django.db.migrations.RemoveConstraint",
"django.db.models.CharField"
] | [((264, 388), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""category"""', 'options': "{'verbose_name': 'Категория', 'verbose_name_plural': 'Категории'}"}), "(name='category', options={'verbose_name':\n 'Категория', 'verbose_name_plural': 'Категории'})\n", (292, 388), False, 'from django.db import migrations, models\n'), ((429, 541), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""genre"""', 'options': "{'verbose_name': 'Жанр', 'verbose_name_plural': 'Жанры'}"}), "(name='genre', options={'verbose_name': 'Жанр',\n 'verbose_name_plural': 'Жанры'})\n", (457, 541), False, 'from django.db import migrations, models\n'), ((582, 731), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""title"""', 'options': "{'ordering': ('-id',), 'verbose_name': 'Произведение',\n 'verbose_name_plural': 'Произведения'}"}), "(name='title', options={'ordering': ('-id',),\n 'verbose_name': 'Произведение', 'verbose_name_plural': 'Произведения'})\n", (610, 731), False, 'from django.db import migrations, models\n'), ((772, 842), 'django.db.migrations.RemoveConstraint', 'migrations.RemoveConstraint', ([], {'model_name': '"""review"""', 'name': '"""unique_review"""'}), "(model_name='review', name='unique_review')\n", (799, 842), False, 'from django.db import migrations, models\n'), ((988, 1048), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'verbose_name': '"""Наименование"""'}), "(max_length=20, verbose_name='Наименование')\n", (1004, 1048), False, 'from django.db import migrations, models\n'), ((1167, 1227), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'verbose_name': '"""Наименование"""'}), "(max_length=20, verbose_name='Наименование')\n", (1183, 1227), False, 'from django.db import migrations, models\n'), ((1350, 1517), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""categories"""', 'to': '"""api.Category"""', 'verbose_name': '"""Категория"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.SET_NULL, related_name='categories', to='api.Category',\n verbose_name='Категория')\n", (1367, 1517), False, 'from django.db import migrations, models\n'), ((1634, 1698), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Описание"""'}), "(blank=True, null=True, verbose_name='Описание')\n", (1650, 1698), False, 'from django.db import migrations, models\n'), ((1817, 1874), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""Название"""'}), "(max_length=100, verbose_name='Название')\n", (1833, 1874), False, 'from django.db import migrations, models\n'), ((1977, 2050), 'django.db.models.UniqueConstraint', 'models.UniqueConstraint', ([], {'fields': "('title', 'author')", 'name': '"""unique_review"""'}), "(fields=('title', 'author'), name='unique_review')\n", (2000, 2050), False, 'from django.db import migrations, models\n')] |
""" Routers for weather_models.
"""
import logging
from fastapi import APIRouter, Depends
from app.auth import authentication_required, audit
from app.weather_models import ModelEnum
from app.schemas.weather_models import (
WeatherModelPredictionSummaryResponse,
WeatherStationsModelRunsPredictionsResponse)
from app.schemas.shared import WeatherDataRequest
from app.weather_models.fetch.summaries import fetch_model_prediction_summaries
from app.weather_models.fetch.predictions import (
fetch_model_run_predictions_by_station_code)
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/weather_models",
dependencies=[Depends(audit), Depends(authentication_required)],
)
@router.post('/{model}/predictions/summaries/',
response_model=WeatherModelPredictionSummaryResponse)
async def get_model_prediction_summaries(
model: ModelEnum, request: WeatherDataRequest):
""" Returns a summary of predictions for a given model. """
try:
logger.info('/weather_models/%s/predictions/summaries/', model.name)
summaries = await fetch_model_prediction_summaries(model, request.stations, request.time_of_interest)
return WeatherModelPredictionSummaryResponse(summaries=summaries)
except Exception as exception:
logger.critical(exception, exc_info=True)
raise
@router.post('/{model}/predictions/most_recent/',
response_model=WeatherStationsModelRunsPredictionsResponse)
async def get_most_recent_model_values(
model: ModelEnum, request: WeatherDataRequest):
""" Returns the weather values for the last model prediction that was issued
for the station before actual weather readings became available.
"""
try:
logger.info('/weather_models/%s/predictions/most_recent/', model.name)
station_predictions = await fetch_model_run_predictions_by_station_code(
model, request.stations, request.time_of_interest)
return WeatherStationsModelRunsPredictionsResponse(
stations=station_predictions)
except Exception as exception:
logger.critical(exception, exc_info=True)
raise
| [
"logging.getLogger",
"app.schemas.weather_models.WeatherStationsModelRunsPredictionsResponse",
"app.weather_models.fetch.summaries.fetch_model_prediction_summaries",
"app.schemas.weather_models.WeatherModelPredictionSummaryResponse",
"fastapi.Depends",
"app.weather_models.fetch.predictions.fetch_model_run... | [((556, 583), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (573, 583), False, 'import logging\n'), ((1198, 1256), 'app.schemas.weather_models.WeatherModelPredictionSummaryResponse', 'WeatherModelPredictionSummaryResponse', ([], {'summaries': 'summaries'}), '(summaries=summaries)\n', (1235, 1256), False, 'from app.schemas.weather_models import WeatherModelPredictionSummaryResponse, WeatherStationsModelRunsPredictionsResponse\n'), ((1984, 2057), 'app.schemas.weather_models.WeatherStationsModelRunsPredictionsResponse', 'WeatherStationsModelRunsPredictionsResponse', ([], {'stations': 'station_predictions'}), '(stations=station_predictions)\n', (2027, 2057), False, 'from app.schemas.weather_models import WeatherModelPredictionSummaryResponse, WeatherStationsModelRunsPredictionsResponse\n'), ((653, 667), 'fastapi.Depends', 'Depends', (['audit'], {}), '(audit)\n', (660, 667), False, 'from fastapi import APIRouter, Depends\n'), ((669, 701), 'fastapi.Depends', 'Depends', (['authentication_required'], {}), '(authentication_required)\n', (676, 701), False, 'from fastapi import APIRouter, Depends\n'), ((1098, 1186), 'app.weather_models.fetch.summaries.fetch_model_prediction_summaries', 'fetch_model_prediction_summaries', (['model', 'request.stations', 'request.time_of_interest'], {}), '(model, request.stations, request.\n time_of_interest)\n', (1130, 1186), False, 'from app.weather_models.fetch.summaries import fetch_model_prediction_summaries\n'), ((1860, 1958), 'app.weather_models.fetch.predictions.fetch_model_run_predictions_by_station_code', 'fetch_model_run_predictions_by_station_code', (['model', 'request.stations', 'request.time_of_interest'], {}), '(model, request.stations,\n request.time_of_interest)\n', (1903, 1958), False, 'from app.weather_models.fetch.predictions import fetch_model_run_predictions_by_station_code\n')] |
# -*- coding: utf-8 -*-
# python3 make.py -loc "data/lines/1.csv" -width 3840 -height 2160 -overwrite
# python3 make.py -loc "data/lines/1.csv" -width 3840 -height 2160 -rtl -overwrite
# python3 combine.py
# python3 make.py -data "data/lines/A_LEF.csv" -width 3840 -height 2160 -loc "data/lines/C.csv" -img "img/A.png" -sw 0.1405 -tw 0.145 -overwrite
# python3 make.py -data "data/lines/A_LEF.csv" -width 3840 -height 2160 -loc "data/lines/C.csv" -img "img/A.png" -sw 0.1405 -tw 0.145 -rtl -overwrite
# python3 combine.py -in "output/subway_line_A.mp4,output/subway_line_A_rtl.mp4" -out "output/subway_line_A_loop.mp4"
# python3 make.py -data "data/lines/7.csv" -width 3840 -height 2160 -img "img/7.png" -sw 0.11725 -tw 0.135625 -reverse -overwrite
# python3 make.py -data "data/lines/7.csv" -width 3840 -height 2160 -img "img/7.png" -sw 0.11725 -tw 0.135625 -reverse -rtl -overwrite
# python3 combine.py -in "output/subway_line_7.mp4,output/subway_line_7_rtl.mp4" -out "output/subway_line_7_loop.mp4"
import argparse
import numpy as np
import os
from pprint import pprint
import sys
from lib import *
# input
parser = argparse.ArgumentParser()
parser.add_argument('-data', dest="DATA_FILE", default="data/lines/2.csv", help="Input csv file with preprocessed data")
parser.add_argument('-loc', dest="DATA_LOCAL_FILE", default="", help="Input csv file with preprocessed data of a local train that should 'fill in' stations in-between express trains")
parser.add_argument('-img', dest="IMAGE_FILE", default="img/2.png", help="Subway bullet image")
parser.add_argument('-instruments', dest="INSTRUMENTS_FILE", default="data/instruments.csv", help="Input csv file with instruments config")
parser.add_argument('-dir', dest="MEDIA_DIRECTORY", default="audio/", help="Input media directory")
parser.add_argument('-width', dest="WIDTH", default=1920, type=int, help="Output video width")
parser.add_argument('-height', dest="HEIGHT", default=1080, type=int, help="Output video height")
parser.add_argument('-pad0', dest="PAD_START", default=2000, type=int, help="Pad start in ms")
parser.add_argument('-pad1', dest="PAD_END", default=2000, type=int, help="Pad end in ms")
parser.add_argument('-fps', dest="FPS", default=30, type=int, help="Output video frames per second")
parser.add_argument('-outframe', dest="OUTPUT_FRAME", default="tmp/line_%s/frame.%s.png", help="Output frames pattern")
parser.add_argument('-aout', dest="AUDIO_OUTPUT_FILE", default="output/subway_line_%s.mp3", help="Output audio file")
parser.add_argument('-dout', dest="DATA_OUTPUT_FILE", default="output/subway_line_%s.csv", help="Output data file")
parser.add_argument('-out', dest="OUTPUT_FILE", default="output/subway_line_%s.mp4", help="Output media file")
parser.add_argument('-overwrite', dest="OVERWRITE", action="store_true", help="Overwrite existing files?")
parser.add_argument('-probe', dest="PROBE", action="store_true", help="Just view statistics?")
parser.add_argument('-reverse', dest="REVERSE", action="store_true", help="Reverse the line?")
parser.add_argument('-rtl', dest="RIGHT_TO_LEFT", action="store_true", help="Play from right to left?")
parser.add_argument('-ao', dest="AUDIO_ONLY", action="store_true", help="Only output audio?")
parser.add_argument('-vo', dest="VIDEO_ONLY", action="store_true", help="Only output video?")
parser.add_argument('-do', dest="DATA_ONLY", action="store_true", help="Only output data?")
parser.add_argument('-viz', dest="VISUALIZE_SEQUENCE", action="store_true", help="Output a visualization of the sequence")
parser.add_argument('-plot', dest="PLOT_SEQUENCE", action="store_true", help="Display a plot chart of the sequence")
parser.add_argument('-frame', dest="SINGLE_FRAME", default=-1, type=int, help="Output just a single frame")
# Music config
parser.add_argument('-db', dest="MASTER_DB", type=float, default=-2.4, help="Master +/- decibels to be applied to final audio")
parser.add_argument('-bpm', dest="BPM", type=int, default=120, help="Beats per minute, e.g. 60, 75, 100, 120, 150")
parser.add_argument('-mpb', dest="METERS_PER_BEAT", type=int, default=75, help="Higher numbers creates shorter songs")
parser.add_argument('-dpb', dest="DIVISIONS_PER_BEAT", type=int, default=4, help="e.g. 4 = quarter notes, 8 = eighth notes")
parser.add_argument('-pm', dest="PRICE_MULTIPLIER", type=float, default=1.3, help="Makes instruments more expensive; higher numbers = less instruments playing")
parser.add_argument('-vdur', dest="VARIANCE_MS", type=int, default=20, help="+/- milliseconds an instrument note should be off by to give it a little more 'natural' feel")
# Visual design config
parser.add_argument('-sw', dest="STATION_WIDTH", type=float, default=0.125, help="Minimum station width as a percent of the screen width; adjust this to change the overall visual speed")
parser.add_argument('-tw', dest="TEXT_WIDTH", type=float, default=0.15, help="Station text width as a percent of the screen width")
parser.add_argument('-cy', dest="CENTER_Y", type=float, default=0.475, help="Center y as a percent of screen height")
parser.add_argument('-bty', dest="BOROUGH_TEXT_Y", type=float, default=0.55, help="Borough text center y as a percent of screen height")
parser.add_argument('-sty', dest="STATION_TEXT_Y", type=float, default=0.375, help="Station text center y as a percent of screen height")
parser.add_argument('-cw', dest="CIRCLE_WIDTH", type=int, default=60, help="Circle radius in pixels assuming 1920x1080")
parser.add_argument('-lh', dest="LINE_HEIGHT", type=int, default=24, help="Height of horizontal line in pixels assuming 1920x1080")
parser.add_argument('-bh', dest="BOUNDARY_HEIGHT", type=int, default=166, help="Height of boundary line in pixels assuming 1920x1080")
parser.add_argument('-bw', dest="BOUNDARY_WIDTH", type=int, default=3, help="Width of boundary line in pixels assuming 1920x1080")
parser.add_argument('-bm', dest="BOUNDARY_MARGIN", type=int, default=48, help="Horizontal margin of boundary line in pixels assuming 1920x1080")
parser.add_argument('-mw', dest="MARKER_WIDTH", type=int, default=8, help="Height of horizontal line in pixels assuming 1920x1080")
parser.add_argument('-sts', dest="STATION_TEXT_SIZE", type=int, default=30, help="Station text size in pixels assuming 1920x1080")
parser.add_argument('-stm', dest="STATION_TEXT_MARGIN", type=int, default=20, help="Station text bottom margin in pixels assuming 1920x1080")
parser.add_argument('-slm', dest="STATION_LETTER_MARGIN", type=int, default=1, help="Space after each station text letter in pixels assuming 1920x1080")
parser.add_argument('-bts', dest="BOROUGH_TEXT_SIZE", type=int, default=24, help="Borough text size in pixels assuming 1920x1080")
parser.add_argument('-blm', dest="BOROUGH_LETTER_MARGIN", type=int, default=1, help="Space after each borough text letter in pixels assuming 1920x1080")
parser.add_argument('-bthresh', dest="BOROUGH_THRESHOLD", type=float, default=0.375, help="Minimum width available for displaying borough dividers")
parser.add_argument('-dw', dest="DIVIDER_WIDTH", type=int, default=28, help="Line divider in pixels assuming 1920x1080")
parser.add_argument('-dd', dest="DIVIDER_DISTANCE", type=float, default=0.333, help="Distance between dividers as a percent of screen width")
parser.add_argument('-dc', dest="DIVIDER_COLOR", default="#666666", help="Distance between dividers as a percent of screen width")
parser.add_argument('-bg', dest="BG_COLOR", default="#000000", help="Background color")
parser.add_argument('-tc', dest="TEXT_COLOR", default="#eeeeee", help="Text color")
parser.add_argument('-atc', dest="ALT_TEXT_COLOR", default="#aaaaaa", help="Secondary text color")
parser.add_argument('-mc', dest="MARKER_COLOR", default="#dddddd", help="Marker color")
parser.add_argument('-sfont', dest="STATION_FONT", default="fonts/OpenSans-Bold.ttf", help="Station font")
parser.add_argument('-bfont', dest="BOROUGH_FONT", default="fonts/OpenSans-SemiBold.ttf", help="Borough font")
parser.add_argument('-map', dest="MAP_IMAGE", default="img/nyc.png", help="Station font")
parser.add_argument('-mcoord', dest="MAP_COORDS", default=" -74.1261,40.9087,-73.7066,40.5743", help="Top left, bottom right point")
parser.add_argument('-mapm', dest="MAP_MARGIN", type=int, default=30, help="Margin of map in pixels assuming 1920x1080")
parser.add_argument('-mapw', dest="MAP_W", type=int, default=260, help="Map width in pixels assuming 1920x1080")
parser.add_argument('-mlw', dest="MAP_LINE_WIDTH", type=int, default=4, help="Map line in pixels assuming 1920x1080")
parser.add_argument('-mlc', dest="MAP_LINE_COLOR", default="#eeeeee", help="Secondary text color")
a = parser.parse_args()
if not a.AUDIO_ONLY:
import gizeh
from PIL import Image, ImageDraw, ImageFont
startTime = logTime()
# Calculations
BEAT_MS = roundInt(60.0 / a.BPM * 1000)
ROUND_TO_NEAREST = roundInt(1.0 * BEAT_MS / a.DIVISIONS_PER_BEAT)
basename = getBasename(a.DATA_FILE)
if "_" in basename:
basename, _ = tuple(basename.split("_"))
lineName = basename
if a.RIGHT_TO_LEFT:
basename += "_rtl"
# Read data
_, stations = readCsv(a.DATA_FILE)
_, instruments = readCsv(a.INSTRUMENTS_FILE)
lstations = []
if len(a.DATA_LOCAL_FILE):
_, lstations = readCsv(a.DATA_LOCAL_FILE)
# Parse instruments
instruments = prependAll(instruments, ("file", a.MEDIA_DIRECTORY))
instruments = [i for i in instruments if i["active"] > 0]
instruments = addIndices(instruments, "index")
for i, instrument in enumerate(instruments):
instruments[i]["from_beat_ms"] = roundInt(1.0 * BEAT_MS / instrument["from_tempo"])
instruments[i]["to_beat_ms"] = roundInt(1.0 * BEAT_MS / instrument["to_tempo"])
instruments[i]["interval_ms"] = roundInt(instrument["interval_phase"] * BEAT_MS)
instruments[i]["price"] = instrument["price"] * a.PRICE_MULTIPLIER
# Buy instruments based on a specified budget
def buyInstruments(station, instrumentsShelf):
budget = station['income'] / 12.0
percentile = station['percentile']
instrumentsCart = []
for i in instrumentsShelf:
# skip if not in bracket
if percentile < i['bracket_min'] or percentile >= i['bracket_max']:
continue
# add to cart if in budget
elif i['price'] < budget:
budget -= i['price']
instrumentsCart.append(i.copy())
# out of budget, finished
else:
break
return instrumentsCart
# Add local stations in-between express ones
if len(lstations) > 0:
lbasename = getBasename(a.DATA_LOCAL_FILE)
estations = {}
addStations = []
for i, s in enumerate(stations):
lines = str(s["Daytime Routes"]).split(" ")
if lbasename in lines:
estations[s["Station ID"]] = s.copy()
sortByStart = None
currentLStations = []
for i, s in enumerate(lstations):
if s["Station ID"] in estations:
if sortByStart is not None and len(currentLStations) > 0:
step = 1.0 / (len(currentLStations) + 1)
for j, ls in enumerate(currentLStations):
currentLStations[j]["sortBy"] = sortByStart + (j+1) * step
currentLStations[j]["isLocal"] = 1
addStations += currentLStations
currentLStations = []
sortByStart = estations[s["Station ID"]]["sortBy"]
elif sortByStart is not None:
currentLStations.append(s)
stations += addStations
# stations = sorted(stations, key=lambda d: d["sortBy"])
# for s in stations:
# if "isLocal" in s:
# print(" --"+s["Stop Name"])
# else:
# print(s["Stop Name"])
# sys.exit()
# Parse stations
stations = sorted(stations, key=lambda d: d["income"])
stations = addNormalizedValues(stations, "income", "nIncome")
stations = addIndices(stations, "incomeIndex")
isReverse = a.REVERSE
if a.RIGHT_TO_LEFT:
isReverse = (not isReverse)
stations = sorted(stations, key=lambda d: d["sortBy"], reverse=isReverse)
stations = addIndices(stations, "index")
stationCount = len(stations)
ms = a.PAD_START
for i, station in enumerate(stations):
stations[i]["percentile"] = 1.0 * station["incomeIndex"] / stationCount * 100
# stations[i]["percentile"] = min(99.999, 1.0 * station["nIncome"] * 100)
stations[i]["instruments"] = buyInstruments(stations[i], instruments)
# print(len(stations[i]["instruments"]))
distance = beats = duration = 0
if i < stationCount-1:
distance = earthDistance(stations[i+1]['GTFS Latitude'], stations[i+1]['GTFS Longitude'], station['GTFS Latitude'], station['GTFS Longitude'])
beats = roundInt(1.0 * distance / a.METERS_PER_BEAT)
duration = beats * BEAT_MS
boroughNext = stations[i+1]["Borough"]
stations[i]["distance"] = distance
stations[i]["beats"] = beats
stations[i]["duration"] = duration
stations[i]["vduration"] = duration
stations[i]["BoroughNext"] = boroughNext
stations[i]["ms"] = ms
stations[i]["lineName"] = lineName
ms += duration
if a.PROBE:
print("===========================")
for s in stations:
if "isLocal" in s:
print(formatSeconds(roundInt(s["ms"]/1000.0)) + " --- " + s["Stop Name"] + " (LOCAL) - $" + formatNumber(s["income"]))
else:
print(formatSeconds(roundInt(s["ms"]/1000.0)) + " - " + s["Stop Name"] + " - $" + formatNumber(s["income"]))
print("===========================")
else:
dataFilename = a.DATA_OUTPUT_FILE % basename
makeDirectories([dataFilename])
writeCsv(dataFilename, stations, headings=["ms", "Stop Name", "isLocal", "income", "Borough", "lineName"])
textFilename = replaceFileExtension(dataFilename, ".txt")
text = f'Subway Inequality: {basename} train ({stations[-1]["Stop Name"]} Bound)\n\n'
text += f'This song above mimics a ride along a subway line (the {basename} train), where the quantity and power of the instruments at any given moment in the song corresponds to the median household income of the neighborhood that you are passing through. The goal is to have the dramatic contrasts of the song echo the dramatic contrast of income in the city.\n\n'
for s in stations:
if "isLocal" not in s:
text += f'{formatSeconds(roundInt(s["ms"]/1000.0))} - {s["Stop Name"]} - ${formatNumber(s["income"])} household income\n'
writeTextFile(textFilename, text)
if a.DATA_ONLY:
sys.exit()
# Calculate ranges
distances = [s["distance"] for s in stations if s["distance"] > 0]
totalDistance = sum(distances)
minDistance, maxDistance = (min(distances), max(distances))
durations = [s["duration"] for s in stations if s["duration"] > 0]
totalMs = sum(durations)
minDuration, maxDuration = (min(durations), max(durations))
totalBeats = sum([s["beats"] for s in stations])
totalSeconds = roundInt(totalMs / 1000.0)
secondsPerStation = roundInt(1.0*totalSeconds/stationCount)
print('Total distance in meters: %s' % roundInt(totalDistance))
print('Distance range in meters: [%s, %s]' % (roundInt(minDistance), roundInt(maxDistance)))
print('Average beats per station: %s' % roundInt(1.0*totalBeats/stationCount))
print('Average time per station: %s' % formatSeconds(secondsPerStation))
print('Main sequence beats: %s' % totalBeats)
# Retrieve gain based on current beat
def getVolume(instrument, beat):
beats_per_phase = instrument['gain_phase']
percent_complete = float(beat % beats_per_phase) / beats_per_phase
percent = easeSin(percent_complete)
from_volume = instrument['from_volume']
to_volume = instrument['to_volume']
volume = lerp((from_volume, to_volume), percent)
return volume
# Get beat duration in ms based on current point in time
def getBeatMs(instrument, beat, round_to):
from_beat_ms = instrument['from_beat_ms']
to_beat_ms = instrument['to_beat_ms']
beats_per_phase = instrument['tempo_phase']
percent_complete = float(beat % beats_per_phase) / beats_per_phase
percent = easeSin(percent_complete)
ms = lerp((from_beat_ms, to_beat_ms), percent)
ms = roundInt(roundToNearest(ms, round_to))
return ms
# Return if the instrument should be played in the given interval
def isValidInterval(instrument, elapsed_ms, start_ms, end_ms, minIntervalDuration=3000):
interval_ms = instrument['interval_ms']
interval = instrument['interval']
interval_offset = instrument['interval_offset']
isValid = (int(math.floor(1.0*elapsed_ms/interval_ms)) % interval == interval_offset)
# return isValid
if end_ms - start_ms <= minIntervalDuration * 3:
return isValid
# check to see if we're at the start and not long enough
if isValid and elapsed_ms < (start_ms+minIntervalDuration) and not isValidInterval(instrument, start_ms+minIntervalDuration, start_ms, end_ms, minIntervalDuration):
isValid = False
# make start interval earlier if necessary
elif not isValid and elapsed_ms < (start_ms+minIntervalDuration) and isValidInterval(instrument, start_ms+minIntervalDuration, start_ms, end_ms, minIntervalDuration):
isValid = True
# check to see if we're at the end and not long enough
elif isValid and elapsed_ms > (end_ms-minIntervalDuration) and not isValidInterval(instrument, end_ms-minIntervalDuration, start_ms, end_ms, minIntervalDuration):
isValid = False
# make start interval earlier if necessary
elif not isValid and elapsed_ms > (end_ms-minIntervalDuration) and isValidInterval(instrument, end_ms-minIntervalDuration, start_ms, end_ms, minIntervalDuration):
isValid = True
return isValid
# Add beats to sequence
def addBeatsToSequence(sequence, instrument, duration, ms, beat_ms, round_to, pad_start):
msStart = ms
msEnd = ms + duration
offset_ms = int(instrument['tempo_offset'] * beat_ms)
ms += offset_ms
previous_ms = int(ms)
from_beat_ms = instrument['from_beat_ms']
to_beat_ms = instrument['to_beat_ms']
min_ms = min(from_beat_ms, to_beat_ms)
remaining_duration = int(duration)
elapsed_duration = offset_ms
continue_from_prev = (instrument['bracket_min'] > 0 or instrument['bracket_max'] < 100)
rn = pseudoRandom(instrument["index"]+1)
while remaining_duration >= min_ms:
elapsed_ms = int(ms)
elapsed_beat = int((elapsed_ms-previous_ms) / beat_ms)
# continue beat from previous
if continue_from_prev:
elapsed_beat = int(elapsed_ms / beat_ms)
this_beat_ms = getBeatMs(instrument, elapsed_beat, round_to)
# add to sequence if in valid interval
if isValidInterval(instrument, elapsed_ms, msStart, msEnd):
variance = roundInt(rn * a.VARIANCE_MS * 2 - a.VARIANCE_MS)
sequence.append({
'instrumentIndex': instrument["index"],
'filename': instrument["file"],
'volume': getVolume(instrument, elapsed_beat),
'ms': max([pad_start + elapsed_ms + variance, 0])
})
remaining_duration -= this_beat_ms
elapsed_duration += this_beat_ms
ms += this_beat_ms
return sequence
# Build main sequence
sequence = []
for i, instrument in enumerate(instruments):
ms = 0
stationQueueDur = 0
# Each station in stations
for station in stations:
# Check if instrument is in this station
instrumentIndex = findInList(station['instruments'], 'index', instrument['index'])
# Instrument not here, just add the station duration and continue
if instrumentIndex < 0 and stationQueueDur > 0:
sequence = addBeatsToSequence(sequence, instrument, stationQueueDur, ms, BEAT_MS, ROUND_TO_NEAREST, a.PAD_START)
ms += stationQueueDur + station['duration']
stationQueueDur = 0
elif instrumentIndex < 0:
ms += station['duration']
else:
stationQueueDur += station['duration']
if stationQueueDur > 0:
sequence = addBeatsToSequence(sequence, instrument, stationQueueDur, ms, BEAT_MS, ROUND_TO_NEAREST, a.PAD_START)
sequenceDuration = max([s["ms"] for s in sequence]) + a.PAD_END
# Now start the video frame logic
# Calculations
aa = vars(a)
aa["STATION_WIDTH"] = roundInt(1.0 * a.WIDTH * a.STATION_WIDTH)
aa["TEXT_WIDTH"] = roundInt(1.0 * a.WIDTH * a.TEXT_WIDTH)
aa["CENTER_Y"] = roundInt(1.0 * a.HEIGHT * a.CENTER_Y)
aa["BOROUGH_TEXT_Y"] = roundInt(1.0 * a.HEIGHT * a.BOROUGH_TEXT_Y)
aa["STATION_TEXT_Y"] = roundInt(1.0 * a.HEIGHT * a.STATION_TEXT_Y)
RESOLUTION = a.WIDTH / 1920.0
aa["CIRCLE_WIDTH"] = roundInt(a.CIRCLE_WIDTH * RESOLUTION)
aa["LINE_HEIGHT"] = roundInt(a.LINE_HEIGHT * RESOLUTION)
aa["BOUNDARY_MARGIN"] = roundInt(a.BOUNDARY_MARGIN * RESOLUTION)
aa["BOUNDARY_HEIGHT"] = roundInt(a.BOUNDARY_HEIGHT * RESOLUTION)
aa["BOUNDARY_WIDTH"] = roundInt(a.BOUNDARY_WIDTH * RESOLUTION)
aa["BOROUGH_THRESHOLD"] = roundInt(1.0 * a.WIDTH * a.BOROUGH_THRESHOLD)
aa["MARKER_WIDTH"] = roundInt(a.MARKER_WIDTH * RESOLUTION)
aa["STATION_TEXT_SIZE"] = roundInt(a.STATION_TEXT_SIZE * RESOLUTION)
aa["STATION_TEXT_MARGIN"] = roundInt(a.STATION_TEXT_MARGIN * RESOLUTION)
aa["STATION_LETTER_MARGIN"] = roundInt(a.STATION_LETTER_MARGIN * RESOLUTION)
aa["BOROUGH_TEXT_SIZE"] = roundInt(a.BOROUGH_TEXT_SIZE * RESOLUTION)
aa["BOROUGH_LETTER_MARGIN"] = roundInt(a.BOROUGH_LETTER_MARGIN * RESOLUTION)
aa["MAP_COORDS"] = tuple([float(c) for c in a.MAP_COORDS.strip().split(",")])
aa["MAP_MARGIN"] = roundInt(a.MAP_MARGIN * RESOLUTION)
aa["MAP_W"] = roundInt(a.MAP_W * RESOLUTION)
aa["MAP_LINE_WIDTH"] = roundInt(a.MAP_LINE_WIDTH * RESOLUTION)
aa["DIVIDER_WIDTH"] = roundInt(a.DIVIDER_WIDTH * RESOLUTION)
aa["DIVIDER_DISTANCE"] = roundInt(1.0 * a.WIDTH * a.DIVIDER_DISTANCE)
# Add borough names
boroughNames = {
"Q": "Queens",
"M": "Manhattan",
"Bk": "Brooklyn",
"Bx": "Bronx",
"SI": "Staten Island"
}
for i, station in enumerate(stations):
stations[i]["borough"] = boroughNames[station["Borough"]]
x = 0
mlon0, mlat0, mlon1, mlat1 = a.MAP_COORDS
vstations = stations[:]
# If going right to left, reverse the stations visually
if a.RIGHT_TO_LEFT:
vstations = list(reversed(vstations))
for i, station in enumerate(vstations):
if i < stationCount-1:
vstations[i]["vduration"] = vstations[i+1]["duration"]
else:
vstations[i]["vduration"] = 0
for i, station in enumerate(vstations):
boroughNext = station["borough"]
if i < stationCount-1:
boroughNext = vstations[i+1]["borough"]
vstations[i]["boroughNext"] = boroughNext
vstations[i]["width"] = roundInt(1.0 * station["vduration"] / minDuration * a.STATION_WIDTH)
vstations[i]["x"] = x
vstations[i]["x0"] = x - a.TEXT_WIDTH / 2
vstations[i]["x1"] = x + a.TEXT_WIDTH / 2
vstations[i]["mapNx"] = norm(station["GTFS Longitude"], (mlon0, mlon1))
vstations[i]["mapNy"] = norm(station["GTFS Latitude"], (mlat0, mlat1))
x += vstations[i]["width"]
totalW = x
pxPerMs = 1.0 * totalW / totalMs
pxPerS = pxPerMs * 1000.0
pxPerFrame = pxPerS / a.FPS
print("Total width: %s px" % totalW)
print("Pixels per second: %s" % pxPerS)
print("Pixels per frame: %s" % pxPerFrame)
totalFrames = msToFrame(sequenceDuration, a.FPS)
totalFrames = int(ceilToNearest(totalFrames, a.FPS))
print("Total frames: %s" % totalFrames)
sequenceDuration = frameToMs(totalFrames, a.FPS)
def drawFrame(filename, ms, xOffset, stations, totalW, bulletImg, mapImg, fontStation, fontBorough, a):
if not a.OVERWRITE and os.path.isfile(filename):
return
im = Image.new('RGB', (a.WIDTH, a.HEIGHT), a.BG_COLOR)
draw = ImageDraw.Draw(im, 'RGBA')
cx = roundInt(a.WIDTH * 0.5)
cy = a.CENTER_Y
stationCount = len(stations)
leftX = xOffset
rightX = leftX + totalW
# draw the center line
x0 = 0 if leftX < 0 else leftX
x1 = a.WIDTH if rightX > a.WIDTH else rightX
y0 = cy - a.LINE_HEIGHT/2
y1 = y0 + a.LINE_HEIGHT
draw.rectangle([(x0, y0), (x1, y1)], fill=a.ALT_TEXT_COLOR)
for i, s in enumerate(stations):
# check to see if we should draw borough divider
if s["borough"] != s["boroughNext"]:
deltaBx = abs(stations[i+1]["x"]-s["x"])
# don't draw boundary in tight space
if deltaBx > a.BOROUGH_THRESHOLD:
bdx = roundInt(xOffset + (s["x"] + stations[i+1]["x"]) * 0.5)
bdx0 = bdx - a.WIDTH/2
bdx1 = bdx + a.WIDTH/2
if 0 <= bdx0 <= a.WIDTH or 0 <= bdx1 <= a.WIDTH:
dx0 = bdx - a.BOUNDARY_WIDTH/2
dx1 = dx0 + a.BOUNDARY_WIDTH
dy0 = cy
dy1 = dy0 + a.BOUNDARY_HEIGHT
draw.rectangle([(dx0, dy0), (dx1, dy1)], fill=a.ALT_TEXT_COLOR)
blw, blh = getLineSize(fontBorough, s["borough"], a.BOROUGH_LETTER_MARGIN)
bx = dx0 - a.BOUNDARY_MARGIN - blw/2
drawTextToImage(draw, s["borough"], fontBorough, a.BOROUGH_LETTER_MARGIN, bx, a.BOROUGH_TEXT_Y, a.ALT_TEXT_COLOR)
blw, blh = getLineSize(fontBorough, s["boroughNext"], a.BOROUGH_LETTER_MARGIN)
bx = dx1 + a.BOUNDARY_MARGIN + blw/2
drawTextToImage(draw, s["boroughNext"], fontBorough, a.BOROUGH_LETTER_MARGIN, bx, a.BOROUGH_TEXT_Y, a.ALT_TEXT_COLOR)
sx = xOffset + s["x"]
sy = a.CENTER_Y
# draw dividers
if i < stationCount-1:
dividers = 0
dividerDistance = 0
nextSx = xOffset + stations[i+1]["x"]
deltaSx = abs(nextSx - sx)
if deltaSx >= a.DIVIDER_DISTANCE * 2:
dividers = int(1.0 * deltaSx / a.DIVIDER_DISTANCE) - 1
if dividers > 0:
dividerDistance = roundInt(1.0 * deltaSx / (dividers+1))
for di in range(dividers):
divX = sx + (di+1) * dividerDistance
divX0 = divX - a.DIVIDER_WIDTH/2
divX1 = divX0 + a.DIVIDER_WIDTH
divY0 = y0
divY1 = y1
if divX1 > 0:
draw.rectangle([(divX0, divY0), (divX1, divY1)], fill=a.DIVIDER_COLOR)
# check if station is visible
sx0 = xOffset + s["x0"]
sx1 = xOffset + s["x1"]
if not (0 <= sx0 <= a.WIDTH or 0 <= sx1 <= a.WIDTH):
continue
# just draw empty bullet for local stops
if "isLocal" in s:
brad = roundInt(a.CIRCLE_WIDTH/3)
bx = sx
by = sy
# Draw line using gizeh so it will be smooth
bsurface = gizeh.Surface(width=a.WIDTH, height=a.HEIGHT)
circle = gizeh.circle(r=brad, xy=[bx, by], fill=hexToRGB(a.DIVIDER_COLOR, toFloat=True))
circle.draw(bsurface)
bpixels = bsurface.get_npimage(transparent=True) # should be shape: h, w, rgba
circleImg = Image.fromarray(bpixels, mode="RGBA")
im.paste(circleImg, (0, 0), circleImg)
continue
# draw borough text
bx = sx
by = a.BOROUGH_TEXT_Y
drawTextToImage(draw, s["borough"], fontBorough, a.BOROUGH_LETTER_MARGIN, bx, by, a.ALT_TEXT_COLOR)
# draw bullet
bx = roundInt(sx - a.CIRCLE_WIDTH/2)
by = roundInt(sy - a.CIRCLE_WIDTH/2)
im.paste(bulletImg, (bx, by), bulletImg)
# draw station text
stx = sx
sty = a.STATION_TEXT_Y
slines = getMultilines(s["Stop Name"], fontStation, a.TEXT_WIDTH, a.STATION_LETTER_MARGIN)
drawTextLinesToImage(draw, slines, fontStation, a.STATION_TEXT_MARGIN, a.STATION_LETTER_MARGIN, stx, sty, a.TEXT_COLOR)
# draw the map
mw, mh = mapImg.size
mx = a.MAP_MARGIN
my = a.HEIGHT - mh - a.MAP_MARGIN
im.paste(mapImg, (mx, my))
lineColor = "#"+str(stations[0]["color"])
points = []
allPoints = []
mstations = stations[:]
if a.RIGHT_TO_LEFT:
mstations = list(reversed(mstations))
for i, s in enumerate(mstations):
sms0 = s["ms"]
sms1 = sms0 + s["duration"]
# print("%s, %s" % (sms0, sms1))
mprogress = norm(ms, (sms0, sms1), limit=True) if s["duration"] > 0 else 1.0
lx = lerp((mx, mx+mw), s["mapNx"])
ly = lerp((my, my+mh), s["mapNy"])
if ms >= sms0:
points.append((lx, ly))
if 0.0 < mprogress < 1.0 and i < stationCount-1 and s["duration"] > 0:
lx1 = lerp((mx, mx+mw), mstations[i+1]["mapNx"])
ly1 = lerp((my, my+mh), mstations[i+1]["mapNy"])
lx2 = lerp((lx, lx1), mprogress)
ly2 = lerp((ly, ly1), mprogress)
points.append((lx2, ly2))
allPoints.append((lx, ly))
# Draw line using gizeh so it will be smooth
surface = gizeh.Surface(width=a.WIDTH, height=a.HEIGHT)
line = gizeh.polyline(points=allPoints, stroke_width=max(1, a.MAP_LINE_WIDTH-1), stroke=hexToRGB(a.MAP_LINE_COLOR, toFloat=True))
line.draw(surface)
if len(points) > 1:
sline = gizeh.polyline(points=points, stroke_width=a.MAP_LINE_WIDTH, stroke=hexToRGB(lineColor, toFloat=True))
sline.draw(surface)
spixels = surface.get_npimage(transparent=True) # should be shape: h, w, rgba
lineImage = Image.fromarray(spixels, mode="RGBA")
im.paste(lineImage, (0, 0), lineImage)
# draw the marker
x0 = cx - a.MARKER_WIDTH/2
x1 = x0 + a.MARKER_WIDTH
y0 = 0
y1 = a.HEIGHT
draw.rectangle([(x0, y0), (x1, y1)], fill=(255,255,255,100))
del draw
im.save(filename)
# print("Saved %s" % filename)
def getEasedFrames(easeFrameCount, stationFrameCount, pxPerFrame):
fromFrameCount = int(min(easeFrameCount, stationFrameCount) / 2)
fromPx = fromFrameCount * pxPerFrame
toFrameCount = easeFrameCount + fromFrameCount # 'fromPx' will be stretched into 'toFrameCount' frames
# easedPoints = [easeIn(n) * pxPerFrame for n in np.linspace(0, 1.0, num=toFrameCount)]
easedPoints = [n * pxPerFrame for n in np.linspace(0, 1.0, num=toFrameCount)]
buckets = [0 for n in range(toFrameCount)]
pxPool = fromPx
for i in range(toFrameCount):
index = toFrameCount-1-i
bucketPx = buckets[index]
addPx = easedPoints[index]
if addPx > pxPool:
addPx = pxPool
buckets[index] = addPx
pxPool -= addPx
if pxPool <= 0:
break
if pxPool > 0:
incr = 0.01
while pxPool > 0:
for j in range(toFrameCount):
index = toFrameCount-1-j
bucketPx = buckets[index]
if (bucketPx+incr) <= pxPerFrame:
buckets[index] += incr
pxPool -= incr
# import matplotlib.pyplot as plt
# plt.plot(buckets)
# plt.show()
# sys.exit()
# print("%s ~ %s" % (fromPx, sum(buckets)))
return buckets
audioFilename = a.AUDIO_OUTPUT_FILE % basename
print("%s steps in sequence" % len(sequence))
print('Total sequence time: %s' % formatSeconds(sequenceDuration/1000.0))
if a.VISUALIZE_SEQUENCE:
instrumentsCount = len(instruments)
labelW = 200
unitH = 10
unitW = 10
marginH = 2
imgH = (unitH+marginH) * instrumentsCount
imgW = totalSeconds * unitW + labelW
dfont = ImageFont.truetype(font="fonts/OpenSans-Regular.ttf", size=10)
print("Making viz %s x %s" % (imgW, imgH))
im = Image.new('RGB', (imgW, imgH), "#000000")
draw = ImageDraw.Draw(im, 'RGB')
for i, ins in enumerate(instruments):
y = i * (unitH + marginH)
draw.text((2, y), ins["name"], fill="#FFFFFF", font=dfont)
steps = [step for step in sequence if step["instrumentIndex"]==ins["index"]]
for step in steps:
sx = roundInt((step["ms"] - a.PAD_START) / 1000.0 / totalSeconds * (imgW-labelW) + labelW)
draw.rectangle([(sx, y), (sx+3, y+unitH)], fill=(roundInt(255*step["volume"]),0,0))
if i > 0:
draw.line([(0, y-1), (imgW, y-1)], fill="#cccccc", width=1)
printProgress(i+1, instrumentsCount)
im.save("output/viz.png")
sys.exit()
if a.PLOT_SEQUENCE:
import matplotlib.pyplot as plt
xs = [s['ms']/1000.0 for s in stations]
ys = [s['income'] for s in stations]
plt.plot(xs, ys)
plt.show()
sys.exit()
if a.PROBE:
sys.exit()
makeDirectories([a.AUDIO_OUTPUT_FILE, a.OUTPUT_FILE])
if not a.AUDIO_ONLY:
bulletImg = Image.open(a.IMAGE_FILE)
bulletImg = bulletImg.resize((a.CIRCLE_WIDTH, a.CIRCLE_WIDTH), resample=Image.LANCZOS)
mapImg = Image.open(a.MAP_IMAGE)
mapH = roundInt((1.0 * mapImg.size[1] / mapImg.size[0]) * a.MAP_W)
mapImg = mapImg.resize((a.MAP_W, mapH), resample=Image.LANCZOS)
fontStation = ImageFont.truetype(font=a.STATION_FONT, size=a.STATION_TEXT_SIZE, layout_engine=ImageFont.LAYOUT_RAQM)
fontBorough = ImageFont.truetype(font=a.BOROUGH_FONT, size=a.BOROUGH_TEXT_SIZE, layout_engine=ImageFont.LAYOUT_RAQM)
makeDirectories([a.OUTPUT_FRAME % (basename, "*")])
if a.OVERWRITE and a.SINGLE_FRAME < 1:
removeFiles(a.OUTPUT_FRAME % (basename, "*"))
# calculations for easing in/out
padFrameInCount = msToFrame(a.PAD_START, a.FPS)
station0FrameCount = msToFrame(stations[0]["duration"], a.FPS)
easeInFrames = getEasedFrames(padFrameInCount, station0FrameCount, pxPerFrame)
easeInFrameCount = len(easeInFrames)
padFrameOutCount = msToFrame(a.PAD_END, a.FPS)
station1FrameCount = msToFrame(stations[-2]["duration"], a.FPS)
easeOutFrames = getEasedFrames(padFrameOutCount, station1FrameCount, pxPerFrame)
# easeOutFrames = list(reversed(easeOutFrames))
easeOutFrameCount = len(easeOutFrames)
easeOutPixels = roundInt(sum(easeOutFrames))
print("Making video frame sequence...")
videoFrames = []
centerX = roundInt(a.WIDTH * 0.5)
xOffset = centerX
direction = -1
if a.RIGHT_TO_LEFT:
direction = 1
xOffset -= totalW
xOffsetF = 1.0 * xOffset
target = centerX-totalW if direction < 0 else centerX
for f in range(totalFrames):
frame = f + 1
ms = frameToMs(frame, a.FPS)
frameFilename = a.OUTPUT_FRAME % (basename, zeroPad(frame, totalFrames))
if a.SINGLE_FRAME < 1 or a.SINGLE_FRAME == frame:
if a.SINGLE_FRAME > 0:
frameFilename = "output/frame.png"
drawFrame(frameFilename, ms, xOffset, vstations, totalW, bulletImg, mapImg, fontStation, fontBorough, a)
if a.SINGLE_FRAME > 0:
sys.exit()
pixelsLeft = abs(target - xOffset)
# ease in start
if frame < easeInFrameCount:
xOffsetF += (direction * easeInFrames[frame-1])
xOffset = roundInt(xOffsetF)
# print(abs(xOffset-centerX))
# # correct any discrepancy after ease in
# elif frame <= easeInFrameCount:
# xOffset = (frame - padFrameInCount) * pxPerFrame
# xOffsetF = 1.0 * xOffset
# ease out end
elif pixelsLeft <= easeOutPixels:
pxStep = easeOutFrames.pop() if len(easeOutFrames) > 0 else 1
xOffsetF += (direction * pxStep)
xOffset = roundInt(xOffsetF)
# print("%s > %s" % (xOffset, centerX-totalW))
else:
xOffset += (direction * pxPerFrame)
xOffsetF = 1.0 * xOffset
xOffset = lim(xOffset, (centerX-totalW, centerX))
printProgress(frame, totalFrames)
# break
stepTime = logTime(startTime, "Finished frames")
padZeros = len(str(totalFrames))
outfile = a.OUTPUT_FILE % basename
frameInfile = a.OUTPUT_FRAME % (basename, '%s')
if a.VIDEO_ONLY:
compileFrames(frameInfile, a.FPS, outfile, padZeros)
sys.exit()
if a.OVERWRITE or not os.path.isfile(audioFilename):
mixAudio(sequence, sequenceDuration, audioFilename, masterDb=a.MASTER_DB)
else:
print("%s already exists" % audioFilename)
stepTime = logTime(stepTime, "Finished Audio")
if not a.AUDIO_ONLY:
if a.VIDEO_ONLY:
audioFilename = None
if a.OVERWRITE or not os.path.isfile(outfile):
compileFrames(frameInfile, a.FPS, outfile, padZeros, audioFile=audioFilename)
else:
print("%s already exists" % outfile)
logTime(startTime, "Total execution time")
| [
"PIL.Image.fromarray",
"PIL.Image.open",
"argparse.ArgumentParser",
"PIL.Image.new",
"matplotlib.pyplot.plot",
"PIL.ImageFont.truetype",
"os.path.isfile",
"PIL.ImageDraw.Draw",
"numpy.linspace",
"gizeh.Surface",
"sys.exit",
"matplotlib.pyplot.show"
] | [((1123, 1148), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1146, 1148), False, 'import argparse\n'), ((23531, 23580), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(a.WIDTH, a.HEIGHT)', 'a.BG_COLOR'], {}), "('RGB', (a.WIDTH, a.HEIGHT), a.BG_COLOR)\n", (23540, 23580), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((23592, 23618), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['im', '"""RGBA"""'], {}), "(im, 'RGBA')\n", (23606, 23618), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((28778, 28823), 'gizeh.Surface', 'gizeh.Surface', ([], {'width': 'a.WIDTH', 'height': 'a.HEIGHT'}), '(width=a.WIDTH, height=a.HEIGHT)\n', (28791, 28823), False, 'import gizeh\n'), ((29250, 29287), 'PIL.Image.fromarray', 'Image.fromarray', (['spixels'], {'mode': '"""RGBA"""'}), "(spixels, mode='RGBA')\n", (29265, 29287), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((31269, 31331), 'PIL.ImageFont.truetype', 'ImageFont.truetype', ([], {'font': '"""fonts/OpenSans-Regular.ttf"""', 'size': '(10)'}), "(font='fonts/OpenSans-Regular.ttf', size=10)\n", (31287, 31331), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((31389, 31430), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(imgW, imgH)', '"""#000000"""'], {}), "('RGB', (imgW, imgH), '#000000')\n", (31398, 31430), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((31442, 31467), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['im', '"""RGB"""'], {}), "(im, 'RGB')\n", (31456, 31467), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((32091, 32101), 'sys.exit', 'sys.exit', ([], {}), '()\n', (32099, 32101), False, 'import sys\n'), ((32248, 32264), 'matplotlib.pyplot.plot', 'plt.plot', (['xs', 'ys'], {}), '(xs, ys)\n', (32256, 32264), True, 'import matplotlib.pyplot as plt\n'), ((32269, 32279), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (32277, 32279), True, 'import matplotlib.pyplot as plt\n'), ((32284, 32294), 'sys.exit', 'sys.exit', ([], {}), '()\n', (32292, 32294), False, 'import sys\n'), ((32312, 32322), 'sys.exit', 'sys.exit', ([], {}), '()\n', (32320, 32322), False, 'import sys\n'), ((32417, 32441), 'PIL.Image.open', 'Image.open', (['a.IMAGE_FILE'], {}), '(a.IMAGE_FILE)\n', (32427, 32441), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((32546, 32569), 'PIL.Image.open', 'Image.open', (['a.MAP_IMAGE'], {}), '(a.MAP_IMAGE)\n', (32556, 32569), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((32727, 32833), 'PIL.ImageFont.truetype', 'ImageFont.truetype', ([], {'font': 'a.STATION_FONT', 'size': 'a.STATION_TEXT_SIZE', 'layout_engine': 'ImageFont.LAYOUT_RAQM'}), '(font=a.STATION_FONT, size=a.STATION_TEXT_SIZE,\n layout_engine=ImageFont.LAYOUT_RAQM)\n', (32745, 32833), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((32848, 32954), 'PIL.ImageFont.truetype', 'ImageFont.truetype', ([], {'font': 'a.BOROUGH_FONT', 'size': 'a.BOROUGH_TEXT_SIZE', 'layout_engine': 'ImageFont.LAYOUT_RAQM'}), '(font=a.BOROUGH_FONT, size=a.BOROUGH_TEXT_SIZE,\n layout_engine=ImageFont.LAYOUT_RAQM)\n', (32866, 32954), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((35723, 35733), 'sys.exit', 'sys.exit', ([], {}), '()\n', (35731, 35733), False, 'import sys\n'), ((14419, 14429), 'sys.exit', 'sys.exit', ([], {}), '()\n', (14427, 14429), False, 'import sys\n'), ((23480, 23504), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (23494, 23504), False, 'import os\n'), ((35757, 35786), 'os.path.isfile', 'os.path.isfile', (['audioFilename'], {}), '(audioFilename)\n', (35771, 35786), False, 'import os\n'), ((26612, 26657), 'gizeh.Surface', 'gizeh.Surface', ([], {'width': 'a.WIDTH', 'height': 'a.HEIGHT'}), '(width=a.WIDTH, height=a.HEIGHT)\n', (26625, 26657), False, 'import gizeh\n'), ((26908, 26945), 'PIL.Image.fromarray', 'Image.fromarray', (['bpixels'], {'mode': '"""RGBA"""'}), "(bpixels, mode='RGBA')\n", (26923, 26945), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((29999, 30036), 'numpy.linspace', 'np.linspace', (['(0)', '(1.0)'], {'num': 'toFrameCount'}), '(0, 1.0, num=toFrameCount)\n', (30010, 30036), True, 'import numpy as np\n'), ((36064, 36087), 'os.path.isfile', 'os.path.isfile', (['outfile'], {}), '(outfile)\n', (36078, 36087), False, 'import os\n'), ((34525, 34535), 'sys.exit', 'sys.exit', ([], {}), '()\n', (34533, 34535), False, 'import sys\n')] |
"""
OpenVINO DL Workbench
Class for create setup bundle job
Copyright (c) 2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import shutil
import tempfile
from contextlib import closing
from wb.extensions_factories.database import get_db_session_for_celery
from wb.main.enumerates import JobTypesEnum, StatusEnum
from wb.main.jobs.interfaces.ijob import IJob
from wb.main.jobs.utils.database_functions import set_status_in_db
from wb.main.models import CreateSetupBundleJobModel, SharedArtifactModel
from wb.main.scripts.job_scripts_generators.setup_script_generator import SetupScriptGenerator
from wb.main.utils.bundle_creator.setup_bundle_creator import SetupBundleCreator, SetupComponentsParams
from wb.main.utils.utils import find_by_ext
class CreateSetupBundleJob(IJob):
job_type = JobTypesEnum.create_setup_bundle_type
_job_model_class = CreateSetupBundleJobModel
def __init__(self, job_id: int, **unused_kwargs):
super().__init__(job_id=job_id)
self._attach_default_db_and_socket_observers()
with closing(get_db_session_for_celery()) as session:
create_bundle_job_model: CreateSetupBundleJobModel = self.get_job_model(session)
deployment_bundle_config = create_bundle_job_model.deployment_bundle_config
self.deployment_bundle_id = deployment_bundle_config.deployment_bundle_id
self.additional_components = [name for name, value in deployment_bundle_config.json().items() if value]
self.targets = deployment_bundle_config.targets_to_json
self.operating_system = deployment_bundle_config.operating_system
self.include_model = deployment_bundle_config.include_model
self.topology_name = create_bundle_job_model.project.topology.name if self.include_model else None
self.topology_path = create_bundle_job_model.project.topology.path if self.include_model else None
bundle: SharedArtifactModel = create_bundle_job_model.deployment_bundle_config.deployment_bundle
self.bundle_path = bundle.build_full_artifact_path()
self.is_archive = bundle.is_archive
def run(self):
self._job_state_subject.update_state(status=StatusEnum.running, log='Preparing setup bundle.')
with tempfile.TemporaryDirectory('rw') as tmp_scripts_folder:
setup_path = self.generate_script_from_template(tmp_scripts_folder, 'setup.sh')
get_devices_path = self.generate_script_from_template(tmp_scripts_folder,
'get_inference_engine_devices.sh')
get_resources_path = self.generate_script_from_template(tmp_scripts_folder, 'get_system_resources.sh')
has_internet_connection_path = self.generate_script_from_template(tmp_scripts_folder,
'has_internet_connection.sh')
topology_temporary_path = None
if self.include_model:
topology_temporary_path = os.path.join(tmp_scripts_folder, self.topology_name)
os.makedirs(topology_temporary_path)
xml_file = find_by_ext(self.topology_path, 'xml')
tmp_xml_file = os.path.join(topology_temporary_path, f'{self.topology_name}.xml')
shutil.copy(xml_file, tmp_xml_file)
bin_file = find_by_ext(self.topology_path, 'bin')
tmp_bin_file = os.path.join(topology_temporary_path, f'{self.topology_name}.bin')
shutil.copy(bin_file, tmp_bin_file)
setup_bundle_creator = SetupBundleCreator(
log_callback=lambda message, progress:
self._job_state_subject.update_state(log=message,
progress=progress)
)
setup_components = SetupComponentsParams(setup_path, get_devices_path,
get_resources_path,
has_internet_connection_path,
self.operating_system,
self.targets,
self.additional_components,
topology_temporary_path)
setup_bundle_creator.create(components=setup_components,
destination_bundle=self.bundle_path,
is_archive=self.is_archive)
self.on_success()
@staticmethod
def generate_script_from_template(result_scripts_path: str, script_name: str) -> str:
result_script_path = os.path.join(result_scripts_path, script_name)
job_script_generator = SetupScriptGenerator(script_name)
job_script_generator.create(result_file_path=result_script_path)
return result_script_path
def on_success(self):
with closing(get_db_session_for_celery()) as session:
deployment_job = self.get_job_model(session)
bundle = deployment_job.deployment_bundle_config.deployment_bundle
bundle.update(self.bundle_path)
bundle.write_record(session)
self._job_state_subject.update_state(status=StatusEnum.ready,
log='Setup bundle created successfully.')
set_status_in_db(SharedArtifactModel, bundle.id, StatusEnum.ready, session, force=True)
self._job_state_subject.detach_all_observers()
| [
"tempfile.TemporaryDirectory",
"os.makedirs",
"wb.main.utils.bundle_creator.setup_bundle_creator.SetupComponentsParams",
"wb.main.utils.utils.find_by_ext",
"os.path.join",
"wb.extensions_factories.database.get_db_session_for_celery",
"wb.main.scripts.job_scripts_generators.setup_script_generator.SetupSc... | [((5298, 5344), 'os.path.join', 'os.path.join', (['result_scripts_path', 'script_name'], {}), '(result_scripts_path, script_name)\n', (5310, 5344), False, 'import os\n'), ((5376, 5409), 'wb.main.scripts.job_scripts_generators.setup_script_generator.SetupScriptGenerator', 'SetupScriptGenerator', (['script_name'], {}), '(script_name)\n', (5396, 5409), False, 'from wb.main.scripts.job_scripts_generators.setup_script_generator import SetupScriptGenerator\n'), ((2799, 2832), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', (['"""rw"""'], {}), "('rw')\n", (2826, 2832), False, 'import tempfile\n'), ((4410, 4610), 'wb.main.utils.bundle_creator.setup_bundle_creator.SetupComponentsParams', 'SetupComponentsParams', (['setup_path', 'get_devices_path', 'get_resources_path', 'has_internet_connection_path', 'self.operating_system', 'self.targets', 'self.additional_components', 'topology_temporary_path'], {}), '(setup_path, get_devices_path, get_resources_path,\n has_internet_connection_path, self.operating_system, self.targets, self\n .additional_components, topology_temporary_path)\n', (4431, 4610), False, 'from wb.main.utils.bundle_creator.setup_bundle_creator import SetupBundleCreator, SetupComponentsParams\n'), ((6004, 6095), 'wb.main.jobs.utils.database_functions.set_status_in_db', 'set_status_in_db', (['SharedArtifactModel', 'bundle.id', 'StatusEnum.ready', 'session'], {'force': '(True)'}), '(SharedArtifactModel, bundle.id, StatusEnum.ready, session,\n force=True)\n', (6020, 6095), False, 'from wb.main.jobs.utils.database_functions import set_status_in_db\n'), ((1576, 1603), 'wb.extensions_factories.database.get_db_session_for_celery', 'get_db_session_for_celery', ([], {}), '()\n', (1601, 1603), False, 'from wb.extensions_factories.database import get_db_session_for_celery\n'), ((3577, 3629), 'os.path.join', 'os.path.join', (['tmp_scripts_folder', 'self.topology_name'], {}), '(tmp_scripts_folder, self.topology_name)\n', (3589, 3629), False, 'import os\n'), ((3646, 3682), 'os.makedirs', 'os.makedirs', (['topology_temporary_path'], {}), '(topology_temporary_path)\n', (3657, 3682), False, 'import os\n'), ((3710, 3748), 'wb.main.utils.utils.find_by_ext', 'find_by_ext', (['self.topology_path', '"""xml"""'], {}), "(self.topology_path, 'xml')\n", (3721, 3748), False, 'from wb.main.utils.utils import find_by_ext\n'), ((3780, 3846), 'os.path.join', 'os.path.join', (['topology_temporary_path', 'f"""{self.topology_name}.xml"""'], {}), "(topology_temporary_path, f'{self.topology_name}.xml')\n", (3792, 3846), False, 'import os\n'), ((3863, 3898), 'shutil.copy', 'shutil.copy', (['xml_file', 'tmp_xml_file'], {}), '(xml_file, tmp_xml_file)\n', (3874, 3898), False, 'import shutil\n'), ((3927, 3965), 'wb.main.utils.utils.find_by_ext', 'find_by_ext', (['self.topology_path', '"""bin"""'], {}), "(self.topology_path, 'bin')\n", (3938, 3965), False, 'from wb.main.utils.utils import find_by_ext\n'), ((3997, 4063), 'os.path.join', 'os.path.join', (['topology_temporary_path', 'f"""{self.topology_name}.bin"""'], {}), "(topology_temporary_path, f'{self.topology_name}.bin')\n", (4009, 4063), False, 'import os\n'), ((4080, 4115), 'shutil.copy', 'shutil.copy', (['bin_file', 'tmp_bin_file'], {}), '(bin_file, tmp_bin_file)\n', (4091, 4115), False, 'import shutil\n'), ((5565, 5592), 'wb.extensions_factories.database.get_db_session_for_celery', 'get_db_session_for_celery', ([], {}), '()\n', (5590, 5592), False, 'from wb.extensions_factories.database import get_db_session_for_celery\n')] |
import sys
import re
from PyQt4 import QtGui, QtCore
from polynomial import Polynomial
from rational import Rational
class Window(QtGui.QMainWindow):
width, height = 420, 130
def __init__(self):
super().__init__()
self.setFixedSize(Window.width, Window.height)
self.setWindowTitle('Find Roots')
self.setWindowIcon(QtGui.QIcon('Images/roots.png'))
self.poly = None
self.setFont(QtGui.QFont('Times New Roman'))
self.home()
def home(self):
self.is_imag = True
self.imag_b = QtGui.QCheckBox('Return imaginary numbers?')
self.imag_b.adjustSize()
self.imag_b.setParent(self)
self.imag_b.toggle()
self.imag_b.move(10, 5)
self.imag_b.stateChanged.connect(self.toggle_imag)
self.instruction = QtGui.QLabel(self)
self.instruction.setText('Enter coefficients of a polynomial seperated by commas.')
self.instruction.move(10, 35)
self.instruction.adjustSize()
self.text = QtGui.QLabel(self)
self.entry = QtGui.QLineEdit(self)
self.entry.returnPressed.connect(self.find_roots)
self.entry.move(10, 60)
self.entry.resize(400, 30)
self.confirm = QtGui.QPushButton('Find Roots!', self)
self.confirm.move(10, 100)
self.confirm.clicked.connect(self.find_roots)
QtGui.QShortcut(QtGui.QKeySequence(QtCore.Qt.Key_Return), self, self.find_roots)
self.plot_b = QtGui.QPushButton('Plot', self)
self.plot_b.clicked.connect(self.plot)
self.plot_b.move(120, 100)
self.factor_b = QtGui.QPushButton('Factorise', self)
self.factor_b.clicked.connect(self.factor)
self.factor_b.move(230, 100)
self.derivate_b = QtGui.QPushButton('Derivate', self)
self.derivate_b.clicked.connect(self.derivate)
self.derivate_b.move(340, 100)
self.eq = QtGui.QLabel(self)
self.eq.move(10, Window.height)
self.show()
def toggle_imag(self):
self.is_imag = not self.is_imag
def find_roots(self):
self.entry_text = self.entry.text()
try:
self.poly = self.get_poly(self.entry_text)
except ValueError:
QtGui.QMessageBox.warning(self, 'warning', 'Invalid arguments')
return
roots = self.poly.roots(imag=self.is_imag)
self.eq.setFont(QtGui.QFont('Consolas', 8))
s = '%s = 0' % self.poly.short_str()
self.eq.setText(re.sub("(.{44})", "\\1\n",
s, 0, re.DOTALL))
self.eq.adjustSize()
t = []
for i, r in enumerate(roots):
t.append('x<sub>%s</sub> = %s' % (i, r))
s = '<br>'.join(t)
self.text.setText(s)
self.text.adjustSize()
self.text.move(10, Window.height + self.eq.height())
new_height = Window.height + self.eq.height() + self.text.height() + 10
self.setFixedSize(Window.width, new_height)
def plot(self) -> None:
self.entry_text = self.entry.text()
try:
self.poly = self.get_poly(self.entry_text)
except ValueError:
QtGui.QMessageBox.warning(self, 'warning', 'Invalid arguments')
return
self.poly.plot()
def factor(self):
self.entry_text = self.entry.text()
try:
self.poly = self.get_poly(self.entry_text)
except ValueError:
QtGui.QMessageBox.warning(self, 'warning', 'Invalid arguments')
return
self.eq.setText('')
self.text.setText(self.poly.factor())
self.text.move(10, Window.height)
self.text.adjustSize()
self.text.setWordWrap(True)
self.setFixedSize(Window.width, Window.height + self.text.height())
def derivate(self):
self.entry_text = self.entry.text()
try:
self.poly = self.get_poly(self.entry_text)
except ValueError:
QtGui.QMessageBox.warning(self, 'warning', 'Invalid arguments')
return
self.eq.setText('')
self.text.setText(str(self.poly.derivate()))
self.text.setFont(QtGui.QFont('Courier'))
self.text.move(10, Window.height)
self.text.adjustSize()
self.text.setWordWrap(True)
self.setFixedSize(Window.width, Window.height + self.text.height())
@staticmethod
def get_poly(text):
if 'x' in text:
return Polynomial.from_string(text)
terms = re.findall(r'-?\d+\.?\d*|/', text)
if '/' in terms:
numerator, denominator = terms[:terms.index('/')], terms[terms.index('/') + 1:]
num_coefs, den_coefs = list(map(float, numerator)), list(map(float, denominator))
return Rational(num_coefs, den_coefs)
else:
coefs = map(float, terms)
return Polynomial(*coefs)
def main():
app = QtGui.QApplication(sys.argv)
GUI = Window()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| [
"PyQt4.QtGui.QApplication",
"polynomial.Polynomial.from_string",
"rational.Rational",
"PyQt4.QtGui.QLabel",
"PyQt4.QtGui.QPushButton",
"PyQt4.QtGui.QIcon",
"PyQt4.QtGui.QKeySequence",
"PyQt4.QtGui.QLineEdit",
"PyQt4.QtGui.QMessageBox.warning",
"re.sub",
"PyQt4.QtGui.QCheckBox",
"re.findall",
... | [((4917, 4945), 'PyQt4.QtGui.QApplication', 'QtGui.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (4935, 4945), False, 'from PyQt4 import QtGui, QtCore\n'), ((562, 606), 'PyQt4.QtGui.QCheckBox', 'QtGui.QCheckBox', (['"""Return imaginary numbers?"""'], {}), "('Return imaginary numbers?')\n", (577, 606), False, 'from PyQt4 import QtGui, QtCore\n'), ((824, 842), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self'], {}), '(self)\n', (836, 842), False, 'from PyQt4 import QtGui, QtCore\n'), ((1032, 1050), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self'], {}), '(self)\n', (1044, 1050), False, 'from PyQt4 import QtGui, QtCore\n'), ((1073, 1094), 'PyQt4.QtGui.QLineEdit', 'QtGui.QLineEdit', (['self'], {}), '(self)\n', (1088, 1094), False, 'from PyQt4 import QtGui, QtCore\n'), ((1244, 1282), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['"""Find Roots!"""', 'self'], {}), "('Find Roots!', self)\n", (1261, 1282), False, 'from PyQt4 import QtGui, QtCore\n'), ((1485, 1516), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['"""Plot"""', 'self'], {}), "('Plot', self)\n", (1502, 1516), False, 'from PyQt4 import QtGui, QtCore\n'), ((1624, 1660), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['"""Factorise"""', 'self'], {}), "('Factorise', self)\n", (1641, 1660), False, 'from PyQt4 import QtGui, QtCore\n'), ((1776, 1811), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['"""Derivate"""', 'self'], {}), "('Derivate', self)\n", (1793, 1811), False, 'from PyQt4 import QtGui, QtCore\n'), ((1925, 1943), 'PyQt4.QtGui.QLabel', 'QtGui.QLabel', (['self'], {}), '(self)\n', (1937, 1943), False, 'from PyQt4 import QtGui, QtCore\n'), ((4509, 4545), 're.findall', 're.findall', (['"""-?\\\\d+\\\\.?\\\\d*|/"""', 'text'], {}), "('-?\\\\d+\\\\.?\\\\d*|/', text)\n", (4519, 4545), False, 'import re\n'), ((360, 391), 'PyQt4.QtGui.QIcon', 'QtGui.QIcon', (['"""Images/roots.png"""'], {}), "('Images/roots.png')\n", (371, 391), False, 'from PyQt4 import QtGui, QtCore\n'), ((439, 469), 'PyQt4.QtGui.QFont', 'QtGui.QFont', (['"""Times New Roman"""'], {}), "('Times New Roman')\n", (450, 469), False, 'from PyQt4 import QtGui, QtCore\n'), ((1397, 1437), 'PyQt4.QtGui.QKeySequence', 'QtGui.QKeySequence', (['QtCore.Qt.Key_Return'], {}), '(QtCore.Qt.Key_Return)\n', (1415, 1437), False, 'from PyQt4 import QtGui, QtCore\n'), ((2410, 2436), 'PyQt4.QtGui.QFont', 'QtGui.QFont', (['"""Consolas"""', '(8)'], {}), "('Consolas', 8)\n", (2421, 2436), False, 'from PyQt4 import QtGui, QtCore\n'), ((2507, 2550), 're.sub', 're.sub', (['"""(.{44})"""', '"""\\\\1\n"""', 's', '(0)', 're.DOTALL'], {}), "('(.{44})', '\\\\1\\n', s, 0, re.DOTALL)\n", (2513, 2550), False, 'import re\n'), ((4169, 4191), 'PyQt4.QtGui.QFont', 'QtGui.QFont', (['"""Courier"""'], {}), "('Courier')\n", (4180, 4191), False, 'from PyQt4 import QtGui, QtCore\n'), ((4464, 4492), 'polynomial.Polynomial.from_string', 'Polynomial.from_string', (['text'], {}), '(text)\n', (4486, 4492), False, 'from polynomial import Polynomial\n'), ((4774, 4804), 'rational.Rational', 'Rational', (['num_coefs', 'den_coefs'], {}), '(num_coefs, den_coefs)\n', (4782, 4804), False, 'from rational import Rational\n'), ((4876, 4894), 'polynomial.Polynomial', 'Polynomial', (['*coefs'], {}), '(*coefs)\n', (4886, 4894), False, 'from polynomial import Polynomial\n'), ((2251, 2314), 'PyQt4.QtGui.QMessageBox.warning', 'QtGui.QMessageBox.warning', (['self', '"""warning"""', '"""Invalid arguments"""'], {}), "(self, 'warning', 'Invalid arguments')\n", (2276, 2314), False, 'from PyQt4 import QtGui, QtCore\n'), ((3179, 3242), 'PyQt4.QtGui.QMessageBox.warning', 'QtGui.QMessageBox.warning', (['self', '"""warning"""', '"""Invalid arguments"""'], {}), "(self, 'warning', 'Invalid arguments')\n", (3204, 3242), False, 'from PyQt4 import QtGui, QtCore\n'), ((3461, 3524), 'PyQt4.QtGui.QMessageBox.warning', 'QtGui.QMessageBox.warning', (['self', '"""warning"""', '"""Invalid arguments"""'], {}), "(self, 'warning', 'Invalid arguments')\n", (3486, 3524), False, 'from PyQt4 import QtGui, QtCore\n'), ((3979, 4042), 'PyQt4.QtGui.QMessageBox.warning', 'QtGui.QMessageBox.warning', (['self', '"""warning"""', '"""Invalid arguments"""'], {}), "(self, 'warning', 'Invalid arguments')\n", (4004, 4042), False, 'from PyQt4 import QtGui, QtCore\n')] |
# Copyright (c) 2020 <NAME>,
# <NAME>, <NAME>, <NAME>
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
from bark.benchmark.benchmark_result import BenchmarkConfig
from bark_ml.library_wrappers.lib_fqf_iqn_qrdqn.agent import TrainingBenchmark
from bark.benchmark.benchmark_runner import BenchmarkRunner, BehaviorConfig
def default_training_evaluators():
default_config = {"success" : "EvaluatorGoalReached", "collision_other" : "EvaluatorCollisionEgoAgent",
"out_of_drivable" : "EvaluatorDrivableArea", "max_steps": "EvaluatorStepCount"}
return default_config
def default_terminal_criteria(max_episode_steps):
terminal_when = {"collision_other" : lambda x: x, "out_of_drivable" : lambda x: x, \
"max_steps": lambda x : x>max_episode_steps, "success" : lambda x: x}
return terminal_when
class TrainingBenchmarkDatabase(TrainingBenchmark):
def __init__(self, benchmark_database=None,
evaluators=None,
terminal_when=None):
self.database = benchmark_database
self.evaluators = evaluators
self.terminal_when = terminal_when
def create_benchmark_configs(self, num_scenarios):
benchmark_configs = []
if self.database:
for scenario_generator, scenario_set_name, scenario_set_param_desc in self.database:
benchmark_configs.extend(self.benchmark_configs_from_scen_gen( \
scenario_generator, scenario_set_name, \
scenario_set_param_desc, num_scenarios))
else:
scenario_generator = self.training_env._scenario_generator
benchmark_configs.extend(self.benchmark_configs_from_scen_gen(
scenario_generator, "training_env", \
{}, num_scenarios))
return benchmark_configs
def benchmark_configs_from_scen_gen(self, scenario_generator, scenario_set_name, \
scenario_set_param_desc, num_scenarios):
benchmark_configs = []
for scenario, scenario_idx in scenario_generator:
if num_scenarios and scenario_idx >= num_scenarios:
break
behavior_config = BehaviorConfig("agent", self.agent, None)
benchmark_config = \
BenchmarkConfig(
len(benchmark_configs),
behavior_config,
scenario,
scenario_idx,
scenario_set_name,
scenario_set_param_desc
)
benchmark_configs.append(benchmark_config)
return benchmark_configs
def reset(self, training_env, num_episodes, max_episode_steps, agent):
super(TrainingBenchmarkDatabase, self).reset(training_env, num_episodes, \
max_episode_steps, agent)
benchmark_configs = self.create_benchmark_configs(num_episodes)
evaluators = default_training_evaluators()
if self.evaluators:
evaluators = {**self.evaluators, **evaluators}
terminal_when = default_terminal_criteria(max_episode_steps)
if self.terminal_when:
terminal_when = {**self.terminal_when, **terminal_when}
self.benchmark_runner = BenchmarkRunner(
benchmark_configs = benchmark_configs,
evaluators=evaluators,
terminal_when = terminal_when,
num_scenarios=num_episodes,
log_eval_avg_every = 100000000000,
checkpoint_dir = "checkpoints",
merge_existing = False,
deepcopy=False)
def run(self):
mean_return, formatting = super(TrainingBenchmarkDatabase, self).run()
eval_result = self.benchmark_runner.run()
data_frame = eval_result.get_data_frame()
data_frame["max_steps"] = data_frame.Terminal.apply(lambda x: "max_steps" in x and (not "collision" in x))
data_frame["success"] = data_frame.Terminal.apply(lambda x: "success" in x and (not "collision" in x) and (not "max_steps" in x))
data_frame = data_frame.drop(columns=["scen_set", "scen_idx", "behavior", "Terminal", "step", "config_idx"])
mean = data_frame.mean(axis=0)
eval_result = {**mean.to_dict(), **mean_return}
return eval_result, f"Benchmark Result: {eval_result}"
def is_better(self, eval_result1, than_eval_result2):
pass
| [
"bark.benchmark.benchmark_runner.BehaviorConfig",
"bark.benchmark.benchmark_runner.BenchmarkRunner"
] | [((3232, 3477), 'bark.benchmark.benchmark_runner.BenchmarkRunner', 'BenchmarkRunner', ([], {'benchmark_configs': 'benchmark_configs', 'evaluators': 'evaluators', 'terminal_when': 'terminal_when', 'num_scenarios': 'num_episodes', 'log_eval_avg_every': '(100000000000)', 'checkpoint_dir': '"""checkpoints"""', 'merge_existing': '(False)', 'deepcopy': '(False)'}), "(benchmark_configs=benchmark_configs, evaluators=evaluators,\n terminal_when=terminal_when, num_scenarios=num_episodes,\n log_eval_avg_every=100000000000, checkpoint_dir='checkpoints',\n merge_existing=False, deepcopy=False)\n", (3247, 3477), False, 'from bark.benchmark.benchmark_runner import BenchmarkRunner, BehaviorConfig\n'), ((2198, 2239), 'bark.benchmark.benchmark_runner.BehaviorConfig', 'BehaviorConfig', (['"""agent"""', 'self.agent', 'None'], {}), "('agent', self.agent, None)\n", (2212, 2239), False, 'from bark.benchmark.benchmark_runner import BenchmarkRunner, BehaviorConfig\n')] |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import os
import sys
import shutil
import onnx
import onnxruntime
import json
from google.protobuf.json_format import MessageToJson
import predict_pb2
import onnx_ml_pb2
# Current models only have one input and one output
def get_io_name(model_file_name):
sess = onnxruntime.InferenceSession(model_file_name)
return sess.get_inputs()[0].name, sess.get_outputs()[0].name
def gen_input_pb(pb_full_path, input_name, output_name, request_file_path):
t = onnx_ml_pb2.TensorProto()
with open(pb_full_path, 'rb') as fin:
t.ParseFromString(fin.read())
predict_request = predict_pb2.PredictRequest()
predict_request.inputs[input_name].CopyFrom(t)
predict_request.output_filter.append(output_name)
with open(request_file_path, "wb") as fout:
fout.write(predict_request.SerializeToString())
def gen_output_pb(pb_full_path, output_name, response_file_path):
t = onnx_ml_pb2.TensorProto()
with open(pb_full_path, 'rb') as fin:
t.ParseFromString(fin.read())
predict_response = predict_pb2.PredictResponse()
predict_response.outputs[output_name].CopyFrom(t)
with open(response_file_path, "wb") as fout:
fout.write(predict_response.SerializeToString())
def tensor2dict(full_path):
t = onnx.TensorProto()
with open(full_path, 'rb') as f:
t.ParseFromString(f.read())
jsonStr = MessageToJson(t, use_integers_for_enums=True)
data = json.loads(jsonStr)
return data
def gen_input_json(pb_full_path, input_name, output_name, json_file_path):
data = tensor2dict(pb_full_path)
inputs = {}
inputs[input_name] = data
output_filters = [ output_name ]
req = {}
req["inputs"] = inputs
req["outputFilter"] = output_filters
with open(json_file_path, 'w') as outfile:
json.dump(req, outfile)
def gen_output_json(pb_full_path, output_name, json_file_path):
data = tensor2dict(pb_full_path)
output = {}
output[output_name] = data
resp = {}
resp["outputs"] = output
with open(json_file_path, 'w') as outfile:
json.dump(resp, outfile)
def gen_req_resp(model_zoo, test_data, copy_model=False):
skip_list = [
('opset8', 'mxnet_arcface') # REASON: Known issue
]
opsets = [name for name in os.listdir(model_zoo) if os.path.isdir(os.path.join(model_zoo, name))]
for opset in opsets:
os.makedirs(os.path.join(test_data, opset), exist_ok=True)
current_model_folder = os.path.join(model_zoo, opset)
current_data_folder = os.path.join(test_data, opset)
models = [name for name in os.listdir(current_model_folder) if os.path.isdir(os.path.join(current_model_folder, name))]
for model in models:
print("Working on Opset: {0}, Model: {1}".format(opset, model))
if (opset, model) in skip_list:
print(" SKIP!!")
continue
os.makedirs(os.path.join(current_data_folder, model), exist_ok=True)
src_folder = os.path.join(current_model_folder, model)
dst_folder = os.path.join(current_data_folder, model)
onnx_file_path = ''
for fname in os.listdir(src_folder):
if not fname.startswith(".") and fname.endswith(".onnx") and os.path.isfile(os.path.join(src_folder, fname)):
onnx_file_path = os.path.join(src_folder, fname)
break
if onnx_file_path == '':
raise FileNotFoundError('Could not find any *.onnx file in {0}'.format(src_folder))
if copy_model:
# Copy model file
target_file_path = os.path.join(dst_folder, "model.onnx")
shutil.copy2(onnx_file_path, target_file_path)
for fname in os.listdir(src_folder):
if not fname.endswith(".onnx") and os.path.isfile(os.path.join(src_folder, fname)):
shutil.copy2(os.path.join(src_folder, fname), dst_folder)
iname, oname = get_io_name(onnx_file_path)
model_test_data = [name for name in os.listdir(src_folder) if os.path.isdir(os.path.join(src_folder, name))]
for test in model_test_data:
src = os.path.join(src_folder, test)
dst = os.path.join(dst_folder, test)
os.makedirs(dst, exist_ok=True)
gen_input_json(os.path.join(src, 'input_0.pb'), iname, oname, os.path.join(dst, 'request.json'))
gen_output_json(os.path.join(src, 'output_0.pb'), oname, os.path.join(dst, 'response.json'))
gen_input_pb(os.path.join(src, 'input_0.pb'), iname, oname, os.path.join(dst, 'request.pb'))
gen_output_pb(os.path.join(src, 'output_0.pb'), oname, os.path.join(dst, 'response.pb'))
if __name__ == '__main__':
model_zoo = os.path.realpath(sys.argv[1])
test_data = os.path.realpath(sys.argv[2])
os.makedirs(test_data, exist_ok=True)
gen_req_resp(model_zoo, test_data)
| [
"json.loads",
"os.listdir",
"onnx_ml_pb2.TensorProto",
"os.makedirs",
"predict_pb2.PredictRequest",
"shutil.copy2",
"json.dump",
"onnxruntime.InferenceSession",
"os.path.join",
"onnx.TensorProto",
"os.path.realpath",
"predict_pb2.PredictResponse",
"google.protobuf.json_format.MessageToJson"
... | [((364, 409), 'onnxruntime.InferenceSession', 'onnxruntime.InferenceSession', (['model_file_name'], {}), '(model_file_name)\n', (392, 409), False, 'import onnxruntime\n'), ((557, 582), 'onnx_ml_pb2.TensorProto', 'onnx_ml_pb2.TensorProto', ([], {}), '()\n', (580, 582), False, 'import onnx_ml_pb2\n'), ((679, 707), 'predict_pb2.PredictRequest', 'predict_pb2.PredictRequest', ([], {}), '()\n', (705, 707), False, 'import predict_pb2\n'), ((984, 1009), 'onnx_ml_pb2.TensorProto', 'onnx_ml_pb2.TensorProto', ([], {}), '()\n', (1007, 1009), False, 'import onnx_ml_pb2\n'), ((1107, 1136), 'predict_pb2.PredictResponse', 'predict_pb2.PredictResponse', ([], {}), '()\n', (1134, 1136), False, 'import predict_pb2\n'), ((1328, 1346), 'onnx.TensorProto', 'onnx.TensorProto', ([], {}), '()\n', (1344, 1346), False, 'import onnx\n'), ((1427, 1472), 'google.protobuf.json_format.MessageToJson', 'MessageToJson', (['t'], {'use_integers_for_enums': '(True)'}), '(t, use_integers_for_enums=True)\n', (1440, 1472), False, 'from google.protobuf.json_format import MessageToJson\n'), ((1482, 1501), 'json.loads', 'json.loads', (['jsonStr'], {}), '(jsonStr)\n', (1492, 1501), False, 'import json\n'), ((4596, 4625), 'os.path.realpath', 'os.path.realpath', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (4612, 4625), False, 'import os\n'), ((4640, 4669), 'os.path.realpath', 'os.path.realpath', (['sys.argv[2]'], {}), '(sys.argv[2])\n', (4656, 4669), False, 'import os\n'), ((4673, 4710), 'os.makedirs', 'os.makedirs', (['test_data'], {'exist_ok': '(True)'}), '(test_data, exist_ok=True)\n', (4684, 4710), False, 'import os\n'), ((1833, 1856), 'json.dump', 'json.dump', (['req', 'outfile'], {}), '(req, outfile)\n', (1842, 1856), False, 'import json\n'), ((2092, 2116), 'json.dump', 'json.dump', (['resp', 'outfile'], {}), '(resp, outfile)\n', (2101, 2116), False, 'import json\n'), ((2466, 2496), 'os.path.join', 'os.path.join', (['model_zoo', 'opset'], {}), '(model_zoo, opset)\n', (2478, 2496), False, 'import os\n'), ((2523, 2553), 'os.path.join', 'os.path.join', (['test_data', 'opset'], {}), '(test_data, opset)\n', (2535, 2553), False, 'import os\n'), ((2281, 2302), 'os.listdir', 'os.listdir', (['model_zoo'], {}), '(model_zoo)\n', (2291, 2302), False, 'import os\n'), ((2391, 2421), 'os.path.join', 'os.path.join', (['test_data', 'opset'], {}), '(test_data, opset)\n', (2403, 2421), False, 'import os\n'), ((2951, 2992), 'os.path.join', 'os.path.join', (['current_model_folder', 'model'], {}), '(current_model_folder, model)\n', (2963, 2992), False, 'import os\n'), ((3012, 3052), 'os.path.join', 'os.path.join', (['current_data_folder', 'model'], {}), '(current_data_folder, model)\n', (3024, 3052), False, 'import os\n'), ((3099, 3121), 'os.listdir', 'os.listdir', (['src_folder'], {}), '(src_folder)\n', (3109, 3121), False, 'import os\n'), ((2320, 2349), 'os.path.join', 'os.path.join', (['model_zoo', 'name'], {}), '(model_zoo, name)\n', (2332, 2349), False, 'import os\n'), ((2586, 2618), 'os.listdir', 'os.listdir', (['current_model_folder'], {}), '(current_model_folder)\n', (2596, 2618), False, 'import os\n'), ((2874, 2914), 'os.path.join', 'os.path.join', (['current_data_folder', 'model'], {}), '(current_data_folder, model)\n', (2886, 2914), False, 'import os\n'), ((3515, 3553), 'os.path.join', 'os.path.join', (['dst_folder', '"""model.onnx"""'], {}), "(dst_folder, 'model.onnx')\n", (3527, 3553), False, 'import os\n'), ((3562, 3608), 'shutil.copy2', 'shutil.copy2', (['onnx_file_path', 'target_file_path'], {}), '(onnx_file_path, target_file_path)\n', (3574, 3608), False, 'import shutil\n'), ((3631, 3653), 'os.listdir', 'os.listdir', (['src_folder'], {}), '(src_folder)\n', (3641, 3653), False, 'import os\n'), ((4033, 4063), 'os.path.join', 'os.path.join', (['src_folder', 'test'], {}), '(src_folder, test)\n', (4045, 4063), False, 'import os\n'), ((4078, 4108), 'os.path.join', 'os.path.join', (['dst_folder', 'test'], {}), '(dst_folder, test)\n', (4090, 4108), False, 'import os\n'), ((4117, 4148), 'os.makedirs', 'os.makedirs', (['dst'], {'exist_ok': '(True)'}), '(dst, exist_ok=True)\n', (4128, 4148), False, 'import os\n'), ((2636, 2676), 'os.path.join', 'os.path.join', (['current_model_folder', 'name'], {}), '(current_model_folder, name)\n', (2648, 2676), False, 'import os\n'), ((3268, 3299), 'os.path.join', 'os.path.join', (['src_folder', 'fname'], {}), '(src_folder, fname)\n', (3280, 3299), False, 'import os\n'), ((3911, 3933), 'os.listdir', 'os.listdir', (['src_folder'], {}), '(src_folder)\n', (3921, 3933), False, 'import os\n'), ((4172, 4203), 'os.path.join', 'os.path.join', (['src', '"""input_0.pb"""'], {}), "(src, 'input_0.pb')\n", (4184, 4203), False, 'import os\n'), ((4219, 4252), 'os.path.join', 'os.path.join', (['dst', '"""request.json"""'], {}), "(dst, 'request.json')\n", (4231, 4252), False, 'import os\n'), ((4278, 4310), 'os.path.join', 'os.path.join', (['src', '"""output_0.pb"""'], {}), "(src, 'output_0.pb')\n", (4290, 4310), False, 'import os\n'), ((4319, 4353), 'os.path.join', 'os.path.join', (['dst', '"""response.json"""'], {}), "(dst, 'response.json')\n", (4331, 4353), False, 'import os\n'), ((4376, 4407), 'os.path.join', 'os.path.join', (['src', '"""input_0.pb"""'], {}), "(src, 'input_0.pb')\n", (4388, 4407), False, 'import os\n'), ((4423, 4454), 'os.path.join', 'os.path.join', (['dst', '"""request.pb"""'], {}), "(dst, 'request.pb')\n", (4435, 4454), False, 'import os\n'), ((4478, 4510), 'os.path.join', 'os.path.join', (['src', '"""output_0.pb"""'], {}), "(src, 'output_0.pb')\n", (4490, 4510), False, 'import os\n'), ((4519, 4551), 'os.path.join', 'os.path.join', (['dst', '"""response.pb"""'], {}), "(dst, 'response.pb')\n", (4531, 4551), False, 'import os\n'), ((3207, 3238), 'os.path.join', 'os.path.join', (['src_folder', 'fname'], {}), '(src_folder, fname)\n', (3219, 3238), False, 'import os\n'), ((3951, 3981), 'os.path.join', 'os.path.join', (['src_folder', 'name'], {}), '(src_folder, name)\n', (3963, 3981), False, 'import os\n'), ((3715, 3746), 'os.path.join', 'os.path.join', (['src_folder', 'fname'], {}), '(src_folder, fname)\n', (3727, 3746), False, 'import os\n'), ((3774, 3805), 'os.path.join', 'os.path.join', (['src_folder', 'fname'], {}), '(src_folder, fname)\n', (3786, 3805), False, 'import os\n')] |
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 10 14:31:17 2015
@author: <NAME>.
Description:
This script does CPU and GPU matrix element time complexity
profiling. It has a function which applies the matrix element
analysis for a given set of parameters, profiles the code and
plots the time complexity results (with fit) and plots the matrix
elements from each case.
"""
import numpy as np
import scipy as sp
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
from my_timer import timer
from math import log
from scipy.optimize import curve_fit
def f_MEplaceholder(neval, mode):
# Placeholder integration instead of ME calc
result, error = (sp.integrate.quad(lambda x:
sp.special.jv(2.5, x), 0, neval) if mode == 'gpu'
else sp.integrate.quadrature(lambda x:
sp.special.jv(2.5, x), 0, neval))
return result, error
def flinear(N, mode):
"""
O(n) function
"""
y = np.asarray([i for i in range(N)])
np.asarray([i for i in range(N)])
np.asarray([i for i in range(N)])
return y ,1
def fsquare(N, mode):
"""
O(n^2) function
"""
for i in range(N):
for j in range(N):
y = i*j
return y,1
def algoAnalysis(fn, nMin, nMax, mode):
"""
Run timer and plot time complexity
"""
n = []
time_result = []
y_result = []
y_err = []
for i in [j*32 for j in range(nMin,nMax+1)]:
with timer() as t:
temp_result, temp_err = fn(i, mode)
time_result.append(t.msecs)
y_result.append(temp_result)
y_err.append(temp_err)
n.append(i)
return n, time_result, y_result, y_err
def plotAll(n, time_data, y_data, err_data):
n = np.asarray(n)
time_data = np.asarray(time_data)
y_data = np.asarray(y_data)
err_data = np.asarray(err_data)
err_data[0] = err_data[1]*0.5
# plotting helpers
nTime = n[2]
n = map(lambda x: log(x,2), n[0])
colors = ['lightblue', 'lightgreen']
edgeColors = ['#1B2ACC','#3F7F4C']
faceColors = ['#089FFF', '#7EFF99']
label_entries_for_results = ['GPU Matrix Elements', 'CPU Matrix Elements']
label_entries_for_time = ['GPU Runtime', 'CPU Runtime']
plt.figure(figsize=(15,6))
###########################################################################
# The following plots the runtime information for GPU and CPU runs.
def sqFunc(x, a, b, c):
return a*x**2 + b*x +c
def linFunc(x, a, b):
return a*x + b
funcList = [linFunc, sqFunc]
ax = plt.subplot(1,2,1)
# draw plots for timing data
for dat_mode in xrange(0,2):
params = curve_fit(funcList[dat_mode], nTime, time_data[dat_mode])
x = np.linspace(nTime[0], nTime[-1], 1000)
if dat_mode == 0:
[a,b] = params[0]
y = funcList[dat_mode](x, a, b)
s = "Fit for GPU: $%.5fx$ + $%.5f$"%(a,b)
if dat_mode == 1:
[a,b,c] = params[0]
y = funcList[dat_mode](x, a, b, c)
s = "Fit for CPU: $%.5fx^2$ + $%.5fx$ + $%.2f$"%(a,b,c)
ax.text(0.035, 0.75-dat_mode*0.1, s,
transform = ax.transAxes,
fontsize = 16)
ax.plot(x,y, color='k', linestyle="--", linewidth = 4)
ax.plot(nTime, time_data[dat_mode], color=colors[dat_mode],
marker = 'o', label=label_entries_for_time[dat_mode],
linestyle = 'None')
# setting axis limits
plt.xlim([min(nTime)-50, max(nTime)+50])
plt.ylim([min(min(time_data[0]), min(time_data[1]))*1.3,
max(max(time_data[0]), max(time_data[1]))*1.3])
# hiding axis ticks
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on")
# adding horizontal grid lines
ax.yaxis.grid(True)
# remove axis spines
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["bottom"].set_visible(False)
ax.spines["left"].set_visible(False)
# labels
plt.xlabel('Maximum number of phase space points')
plt.ylabel('Runtime (msec)')
leg = plt.legend(loc='upper left', fancybox=True, numpoints=1)
leg.get_frame().set_alpha(0.5)
###########################################################################
# The following plots the Matrix Elements for the GPU and CPU respectively
# on a subplot, on top of each other with their corresponding errors.
ax = plt.subplot(1,2,2)
# draw plots for results
for dat_mode in xrange(0,2):
ax.errorbar(x=n, y=y_data[dat_mode], yerr=err_data[dat_mode],
fmt='o', color=colors[dat_mode], ecolor='black',
alpha = 0.3)
ax.plot(n, y_data[dat_mode,:], marker='o',
linestyle = 'None', color=colors[dat_mode],
label=label_entries_for_results[dat_mode])
ax.fill_between(n, y_data[dat_mode]-err_data[dat_mode],
y_data[dat_mode]+err_data[dat_mode],
alpha=0.2, edgecolor=edgeColors[dat_mode],
facecolor=faceColors[dat_mode],
linewidth=4, linestyle='-.', antialiased=True)
# setting axis limits
plt.xlim([min(n)-1*0.2, max(n)+1*0.2])
plt.ylim([min(min(y_data[0]), min(y_data[1]))*1.3,
max(max(y_data[0]), max(y_data[1]))*1.3])
# hiding axis ticks
plt.tick_params(axis="both", which="both", bottom="off", top="off",
labelbottom="on", left="off", right="off", labelleft="on")
# adding horizontal grid lines
ax.yaxis.grid(True)
# remove axis spines
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["bottom"].set_visible(False)
ax.spines["left"].set_visible(False)
# labels
plt.xlabel('$\log_2$(Maximum number of phase space points)')
plt.ylabel('Matrix Element')
leg = plt.legend(loc='upper left', fancybox=True, numpoints=1)
leg.get_frame().set_alpha(0.5)
plt.tight_layout()
plt.savefig('plots.pdf')
plt.show()
# main() function
def main():
print('\nAnalyzing Algorithms...')
n_GPU, timeGPU, yResult_GPU, yErr_GPU = algoAnalysis(f_MEplaceholder, 8, 20, 'gpu')
n_CPU, time_CPU, yResult_CPU, yErr_CPU = algoAnalysis(f_MEplaceholder, 8, 20, 'cpu')
nLin, timeLin, y1, y2 = algoAnalysis(flinear, 10, 50, 'cpu')
nSq, timeSq, y1, y2 = algoAnalysis(fsquare, 10, 50, 'cpu')
nList = [n_GPU, n_CPU, nLin, nSq] ### DELETE NLIN NSQ AFTER
timeList = [timeLin, timeSq]
yResultList = [yResult_GPU, yResult_CPU]
yErrList = [yErr_GPU, yErr_CPU]
plotAll(nList, timeList, yResultList, yErrList)
# call main
if __name__ == '__main__':
# matplotlib.rcParams.update({'font.family': 'Zapf Chancery'})
main()
| [
"scipy.optimize.curve_fit",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.ylabel",
"matplotlib.use",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.tick_params",
"numpy.asarray",
"math.log",
"matplotlib.pyplot.figure",
"numpy.linspace",
"matplotlib.pyplot.tight_layout",
"my_timer.timer",
"... | [((496, 517), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (510, 517), False, 'import matplotlib\n'), ((1829, 1842), 'numpy.asarray', 'np.asarray', (['n'], {}), '(n)\n', (1839, 1842), True, 'import numpy as np\n'), ((1859, 1880), 'numpy.asarray', 'np.asarray', (['time_data'], {}), '(time_data)\n', (1869, 1880), True, 'import numpy as np\n'), ((1894, 1912), 'numpy.asarray', 'np.asarray', (['y_data'], {}), '(y_data)\n', (1904, 1912), True, 'import numpy as np\n'), ((1928, 1948), 'numpy.asarray', 'np.asarray', (['err_data'], {}), '(err_data)\n', (1938, 1948), True, 'import numpy as np\n'), ((2326, 2353), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 6)'}), '(figsize=(15, 6))\n', (2336, 2353), True, 'from matplotlib import pyplot as plt\n'), ((2658, 2678), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (2669, 2678), True, 'from matplotlib import pyplot as plt\n'), ((3774, 3904), 'matplotlib.pyplot.tick_params', 'plt.tick_params', ([], {'axis': '"""both"""', 'which': '"""both"""', 'bottom': '"""off"""', 'top': '"""off"""', 'labelbottom': '"""on"""', 'left': '"""off"""', 'right': '"""off"""', 'labelleft': '"""on"""'}), "(axis='both', which='both', bottom='off', top='off',\n labelbottom='on', left='off', right='off', labelleft='on')\n", (3789, 3904), True, 'from matplotlib import pyplot as plt\n'), ((4183, 4233), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Maximum number of phase space points"""'], {}), "('Maximum number of phase space points')\n", (4193, 4233), True, 'from matplotlib import pyplot as plt\n'), ((4238, 4266), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Runtime (msec)"""'], {}), "('Runtime (msec)')\n", (4248, 4266), True, 'from matplotlib import pyplot as plt\n'), ((4277, 4333), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper left"""', 'fancybox': '(True)', 'numpoints': '(1)'}), "(loc='upper left', fancybox=True, numpoints=1)\n", (4287, 4333), True, 'from matplotlib import pyplot as plt\n'), ((4614, 4634), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (4625, 4634), True, 'from matplotlib import pyplot as plt\n'), ((5567, 5697), 'matplotlib.pyplot.tick_params', 'plt.tick_params', ([], {'axis': '"""both"""', 'which': '"""both"""', 'bottom': '"""off"""', 'top': '"""off"""', 'labelbottom': '"""on"""', 'left': '"""off"""', 'right': '"""off"""', 'labelleft': '"""on"""'}), "(axis='both', which='both', bottom='off', top='off',\n labelbottom='on', left='off', right='off', labelleft='on')\n", (5582, 5697), True, 'from matplotlib import pyplot as plt\n'), ((5976, 6037), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\log_2$(Maximum number of phase space points)"""'], {}), "('$\\\\log_2$(Maximum number of phase space points)')\n", (5986, 6037), True, 'from matplotlib import pyplot as plt\n'), ((6041, 6069), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Matrix Element"""'], {}), "('Matrix Element')\n", (6051, 6069), True, 'from matplotlib import pyplot as plt\n'), ((6080, 6136), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper left"""', 'fancybox': '(True)', 'numpoints': '(1)'}), "(loc='upper left', fancybox=True, numpoints=1)\n", (6090, 6136), True, 'from matplotlib import pyplot as plt\n'), ((6177, 6195), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (6193, 6195), True, 'from matplotlib import pyplot as plt\n'), ((6201, 6225), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""plots.pdf"""'], {}), "('plots.pdf')\n", (6212, 6225), True, 'from matplotlib import pyplot as plt\n'), ((6230, 6240), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6238, 6240), True, 'from matplotlib import pyplot as plt\n'), ((2760, 2817), 'scipy.optimize.curve_fit', 'curve_fit', (['funcList[dat_mode]', 'nTime', 'time_data[dat_mode]'], {}), '(funcList[dat_mode], nTime, time_data[dat_mode])\n', (2769, 2817), False, 'from scipy.optimize import curve_fit\n'), ((2830, 2868), 'numpy.linspace', 'np.linspace', (['nTime[0]', 'nTime[-1]', '(1000)'], {}), '(nTime[0], nTime[-1], 1000)\n', (2841, 2868), True, 'import numpy as np\n'), ((1545, 1552), 'my_timer.timer', 'timer', ([], {}), '()\n', (1550, 1552), False, 'from my_timer import timer\n'), ((2046, 2055), 'math.log', 'log', (['x', '(2)'], {}), '(x, 2)\n', (2049, 2055), False, 'from math import log\n'), ((794, 815), 'scipy.special.jv', 'sp.special.jv', (['(2.5)', 'x'], {}), '(2.5, x)\n', (807, 815), True, 'import scipy as sp\n'), ((927, 948), 'scipy.special.jv', 'sp.special.jv', (['(2.5)', 'x'], {}), '(2.5, x)\n', (940, 948), True, 'import scipy as sp\n')] |