code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
try:
import bmtk.simulator.bionet as bionet
from bmtk.simulator.bionet.gids import GidPool
from bmtk.simulator.bionet.pyfunction_cache import *
from neuron import h
h.load_file('stdrun.hoc')
nrn_installed = True
except ImportError:
nrn_installed = False
has_mechanism = False
if nrn_installed:
try:
vecstim = h.VecStim()
has_mechanism = True
except AttributeError:
has_mechanism = False
|
[
"neuron.h.VecStim",
"neuron.h.load_file"
] |
[((186, 211), 'neuron.h.load_file', 'h.load_file', (['"""stdrun.hoc"""'], {}), "('stdrun.hoc')\n", (197, 211), False, 'from neuron import h\n'), ((358, 369), 'neuron.h.VecStim', 'h.VecStim', ([], {}), '()\n', (367, 369), False, 'from neuron import h\n')]
|
import sqlalchemy as sa
import sqlalchemy.ext as ext
import sqlalchemy.ext.declarative
import sqlalchemy.orm as orm
from .database import Base
class InterestingTrend():
def __init__(self, title, description):
self.title = title
self.description = description
def to_dict(self):
return {
'title': self.title,
'description': self.description
}
@staticmethod
def from_dict(dct):
return InterestingTrend(dct['title'], dct['description'])
class DateHeat(Base):
__tablename__ = 'dateheat'
date = sa.Column(sa.Date(), primary_key=True)
heat = sa.Column(sa.String())
peaks = sa.Column(sa.String())
interest = sa.Column(sa.String())
def __init__(self, date, heat, peaks, interest):
self.date = date
self.heat = heat
self.peaks = peaks
self.interest = interest
def __repr__(self):
return '<DateHeat {}>'.format(self.date)
class DateLink(Base):
__tablename__ = 'datelink'
date = sa.Column(sa.Date(), primary_key=True)
hid = sa.Column(sa.String())
def __init__(self, date, hid):
self.date = date
self.hid = hid
def __repr__(self):
return '<DateLink {} - {}>'.format(self.date, self.hid)
|
[
"sqlalchemy.String",
"sqlalchemy.Date"
] |
[((601, 610), 'sqlalchemy.Date', 'sa.Date', ([], {}), '()\n', (608, 610), True, 'import sqlalchemy as sa\n'), ((651, 662), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (660, 662), True, 'import sqlalchemy as sa\n'), ((686, 697), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (695, 697), True, 'import sqlalchemy as sa\n'), ((724, 735), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (733, 735), True, 'import sqlalchemy as sa\n'), ((1050, 1059), 'sqlalchemy.Date', 'sa.Date', ([], {}), '()\n', (1057, 1059), True, 'import sqlalchemy as sa\n'), ((1099, 1110), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (1108, 1110), True, 'import sqlalchemy as sa\n')]
|
import json
import requests
class GimmeProxyAPI(object):
"""docstring for proxy"""
def __init__(self, **args):
self.base_url = "https://gimmeproxy.com/api/getProxy"
self.response = None
if self.response is None:
self.response = self.get_proxy(args=args)
def response(self):
return self.response
def base_url(self):
return self.base_url
def get_proxy(self, **args):
request = requests.get(self.base_url, params=args)
if request.status_code == 200:
self.response = request.json()
else:
raise Exception("An unknown error occured, status_code = {}".format(r.status_code))
return self.response
def get_curl(self):
curl = self.response["curl"]
return curl
def get_ip_port(self):
ip_port = self.response["ipPort"]
return ip_port
def get_port(self):
port = self.response["port"]
return port
def get_ip(self):
ip = self.response["ip"]
return ip
|
[
"requests.get"
] |
[((431, 471), 'requests.get', 'requests.get', (['self.base_url'], {'params': 'args'}), '(self.base_url, params=args)\n', (443, 471), False, 'import requests\n')]
|
from __future__ import absolute_import, division, print_function
from tap.api_resources.abstract.createable_api_resource import CreateableAPIResource
from tap.api_resources.abstract.updateable_api_resource import UpdateableAPIResource
from tap.api_resources.abstract.deleteable_api_resource import DeleteableAPIResource
from tap.api_resources.abstract.listeable_api_resource import ListeableAPIResource
import tap
@tap.api_resources.abstract.nested_resource_class_methods(
'card',
operations=['create', 'retrieve', 'delete', 'list']
)
class Customer(CreateableAPIResource,
UpdateableAPIResource,
DeleteableAPIResource,
ListeableAPIResource):
OBJECT_NAME = 'customer'
|
[
"tap.api_resources.abstract.nested_resource_class_methods"
] |
[((419, 541), 'tap.api_resources.abstract.nested_resource_class_methods', 'tap.api_resources.abstract.nested_resource_class_methods', (['"""card"""'], {'operations': "['create', 'retrieve', 'delete', 'list']"}), "('card', operations\n =['create', 'retrieve', 'delete', 'list'])\n", (475, 541), False, 'import tap\n')]
|
# This file is part of Pynguin.
#
# Pynguin is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pynguin is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pynguin. If not, see <https://www.gnu.org/licenses/>.
from unittest.mock import MagicMock
import pytest
import pynguin.ga.chromosome as chrom
import pynguin.ga.fitnessfunction as ff
from pynguin.ga.chromosome import Chromosome
@pytest.fixture
def fitness_function():
return MagicMock(ff.FitnessFunction)
@pytest.fixture
def chromosome():
class DummyChromosome(chrom.Chromosome):
def size(self) -> int:
return 0
def clone(self) -> Chromosome:
pass
def cross_over(
self, other: chrom.Chromosome, position1: int, position2: int
) -> None:
pass
return DummyChromosome()
def test_fitness_no_fitness_values(chromosome):
with pytest.raises(AssertionError):
assert chromosome.get_fitness()
def test_fitness_one_fitness_function(chromosome, fitness_function):
chromosome.add_fitness_function(fitness_function)
chromosome._update_fitness_values(fitness_function, ff.FitnessValues(5, 0.9))
chromosome.set_changed(False)
assert chromosome.get_fitness() == 5
assert chromosome.get_coverage() == 0.9
def test_fitness_two_fitness_functions(chromosome, fitness_function):
chromosome.add_fitness_function(fitness_function)
chromosome._update_fitness_values(fitness_function, ff.FitnessValues(0.42, 0.1))
fitness_func2 = MagicMock(ff.FitnessFunction)
chromosome.add_fitness_function(fitness_func2)
chromosome._update_fitness_values(fitness_func2, ff.FitnessValues(0.23, 0.5))
chromosome.set_changed(False)
assert chromosome.get_fitness() == 0.65
assert chromosome.get_coverage() == 0.3
def test_values_for_fitness_function(chromosome, fitness_function):
chromosome.add_fitness_function(fitness_function)
chromosome._update_fitness_values(fitness_function, ff.FitnessValues(5, 0.5))
chromosome.set_changed(False)
assert chromosome.get_fitness_for(fitness_function) == 5
assert chromosome.get_coverage_for(fitness_function) == 0.5
def test_has_changed_default(chromosome):
assert chromosome.has_changed()
def test_has_changed(chromosome):
chromosome.set_changed(False)
assert not chromosome.has_changed()
def test_caching(chromosome, fitness_function):
fitness_function.compute_fitness_values.side_effect = [
ff.FitnessValues(5, 0.5),
ff.FitnessValues(6, 0.6),
]
chromosome.add_fitness_function(fitness_function)
assert chromosome.get_fitness() == 5
assert chromosome.get_coverage() == 0.5
assert not chromosome.has_changed()
assert chromosome.get_number_of_evaluations() == 1
chromosome.set_changed(True)
assert chromosome.get_fitness() == 6
assert chromosome.get_coverage() == 0.6
assert not chromosome.has_changed()
assert chromosome.get_number_of_evaluations() == 2
def test_illegal_values(chromosome, fitness_function):
fitness_function.compute_fitness_values.return_value = ff.FitnessValues(-1, 1.5)
chromosome.add_fitness_function(fitness_function)
with pytest.raises(RuntimeError):
chromosome.get_fitness()
def test_get_fitness_functions(chromosome):
func1 = MagicMock(ff.FitnessFunction)
func2 = MagicMock(ff.FitnessFunction)
chromosome.add_fitness_function(func1)
chromosome.add_fitness_function(func2)
assert chromosome.get_fitness_functions() == [func1, func2]
|
[
"pynguin.ga.fitnessfunction.FitnessValues",
"pytest.raises",
"unittest.mock.MagicMock"
] |
[((903, 932), 'unittest.mock.MagicMock', 'MagicMock', (['ff.FitnessFunction'], {}), '(ff.FitnessFunction)\n', (912, 932), False, 'from unittest.mock import MagicMock\n'), ((1975, 2004), 'unittest.mock.MagicMock', 'MagicMock', (['ff.FitnessFunction'], {}), '(ff.FitnessFunction)\n', (1984, 2004), False, 'from unittest.mock import MagicMock\n'), ((3563, 3588), 'pynguin.ga.fitnessfunction.FitnessValues', 'ff.FitnessValues', (['(-1)', '(1.5)'], {}), '(-1, 1.5)\n', (3579, 3588), True, 'import pynguin.ga.fitnessfunction as ff\n'), ((3772, 3801), 'unittest.mock.MagicMock', 'MagicMock', (['ff.FitnessFunction'], {}), '(ff.FitnessFunction)\n', (3781, 3801), False, 'from unittest.mock import MagicMock\n'), ((3814, 3843), 'unittest.mock.MagicMock', 'MagicMock', (['ff.FitnessFunction'], {}), '(ff.FitnessFunction)\n', (3823, 3843), False, 'from unittest.mock import MagicMock\n'), ((1347, 1376), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (1360, 1376), False, 'import pytest\n'), ((1599, 1623), 'pynguin.ga.fitnessfunction.FitnessValues', 'ff.FitnessValues', (['(5)', '(0.9)'], {}), '(5, 0.9)\n', (1615, 1623), True, 'import pynguin.ga.fitnessfunction as ff\n'), ((1926, 1953), 'pynguin.ga.fitnessfunction.FitnessValues', 'ff.FitnessValues', (['(0.42)', '(0.1)'], {}), '(0.42, 0.1)\n', (1942, 1953), True, 'import pynguin.ga.fitnessfunction as ff\n'), ((2109, 2136), 'pynguin.ga.fitnessfunction.FitnessValues', 'ff.FitnessValues', (['(0.23)', '(0.5)'], {}), '(0.23, 0.5)\n', (2125, 2136), True, 'import pynguin.ga.fitnessfunction as ff\n'), ((2440, 2464), 'pynguin.ga.fitnessfunction.FitnessValues', 'ff.FitnessValues', (['(5)', '(0.5)'], {}), '(5, 0.5)\n', (2456, 2464), True, 'import pynguin.ga.fitnessfunction as ff\n'), ((2933, 2957), 'pynguin.ga.fitnessfunction.FitnessValues', 'ff.FitnessValues', (['(5)', '(0.5)'], {}), '(5, 0.5)\n', (2949, 2957), True, 'import pynguin.ga.fitnessfunction as ff\n'), ((2967, 2991), 'pynguin.ga.fitnessfunction.FitnessValues', 'ff.FitnessValues', (['(6)', '(0.6)'], {}), '(6, 0.6)\n', (2983, 2991), True, 'import pynguin.ga.fitnessfunction as ff\n'), ((3652, 3679), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (3665, 3679), False, 'import pytest\n')]
|
from cv2 import destroyAllWindows, imread, imshow, imwrite, split, waitKey
# Choose one for these image path.
# IMAGE_PATH = "../0_assets/cmyk_paint.png"
IMAGE_PATH = "../0_assets/RGB_paint.png"
DISPLAY_WINDOW_COLOR_STRING = [
"Blue",
"Green",
"Red",
]
image = imread(IMAGE_PATH)
# Get Color Buffer to Store in BGR Style Format.
B, G, R = split(image)
# Show the original before showing each in color channel.
imshow("Original Image of %s" % IMAGE_PATH, image)
# Iterate for each Color Channel. Do not invoke wait signal for each window to see the comparison.
for idx, eachColors in enumerate([B, G, R]):
imshow(
"%s Color Representation | %s" % (DISPLAY_WINDOW_COLOR_STRING[idx], IMAGE_PATH),
eachColors,
)
imwrite(
"rgb_%s_color.png" % DISPLAY_WINDOW_COLOR_STRING[idx].lower(),
eachColors,
)
# # Note that, each Color Channel shows lighter color.
# ! The lighter it is, the more it actually represents the color.
# We wait for the user input via wait signal to terminate.
if waitKey(0):
destroyAllWindows()
|
[
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.imread",
"cv2.split",
"cv2.imshow"
] |
[((276, 294), 'cv2.imread', 'imread', (['IMAGE_PATH'], {}), '(IMAGE_PATH)\n', (282, 294), False, 'from cv2 import destroyAllWindows, imread, imshow, imwrite, split, waitKey\n'), ((355, 367), 'cv2.split', 'split', (['image'], {}), '(image)\n', (360, 367), False, 'from cv2 import destroyAllWindows, imread, imshow, imwrite, split, waitKey\n'), ((427, 477), 'cv2.imshow', 'imshow', (["('Original Image of %s' % IMAGE_PATH)", 'image'], {}), "('Original Image of %s' % IMAGE_PATH, image)\n", (433, 477), False, 'from cv2 import destroyAllWindows, imread, imshow, imwrite, split, waitKey\n'), ((1045, 1055), 'cv2.waitKey', 'waitKey', (['(0)'], {}), '(0)\n', (1052, 1055), False, 'from cv2 import destroyAllWindows, imread, imshow, imwrite, split, waitKey\n'), ((627, 730), 'cv2.imshow', 'imshow', (["('%s Color Representation | %s' % (DISPLAY_WINDOW_COLOR_STRING[idx],\n IMAGE_PATH))", 'eachColors'], {}), "('%s Color Representation | %s' % (DISPLAY_WINDOW_COLOR_STRING[idx],\n IMAGE_PATH), eachColors)\n", (633, 730), False, 'from cv2 import destroyAllWindows, imread, imshow, imwrite, split, waitKey\n'), ((1061, 1080), 'cv2.destroyAllWindows', 'destroyAllWindows', ([], {}), '()\n', (1078, 1080), False, 'from cv2 import destroyAllWindows, imread, imshow, imwrite, split, waitKey\n')]
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name="Todo-List",
version=2.0,
description='a todo-list cli',
author='<NAME>',
license='MIT',
url='http://github.com/Jonas-Luetolf/Todo-List',
python_requires='>=3.10',
install_requires=[
'PyYAML (>= 3.12)',
],
package_dir={'': './'},
packages=['table',"todo_list"],
scripts=['todo-list'],
)
|
[
"distutils.core.setup"
] |
[((96, 408), 'distutils.core.setup', 'setup', ([], {'name': '"""Todo-List"""', 'version': '(2.0)', 'description': '"""a todo-list cli"""', 'author': '"""<NAME>"""', 'license': '"""MIT"""', 'url': '"""http://github.com/Jonas-Luetolf/Todo-List"""', 'python_requires': '""">=3.10"""', 'install_requires': "['PyYAML (>= 3.12)']", 'package_dir': "{'': './'}", 'packages': "['table', 'todo_list']", 'scripts': "['todo-list']"}), "(name='Todo-List', version=2.0, description='a todo-list cli', author=\n '<NAME>', license='MIT', url=\n 'http://github.com/Jonas-Luetolf/Todo-List', python_requires='>=3.10',\n install_requires=['PyYAML (>= 3.12)'], package_dir={'': './'}, packages\n =['table', 'todo_list'], scripts=['todo-list'])\n", (101, 408), False, 'from distutils.core import setup\n')]
|
"""Application Models."""
from marshmallow import fields, Schema
from marshmallow.validate import OneOf
from ..enums import *
from ..models.BaseSchema import BaseSchema
from .SellerPhoneNumber import SellerPhoneNumber
class StoreManagerSerializer(BaseSchema):
# Catalog swagger.json
mobile_no = fields.Nested(SellerPhoneNumber, required=False)
email = fields.Str(required=False)
name = fields.Str(required=False)
|
[
"marshmallow.fields.Str",
"marshmallow.fields.Nested"
] |
[((317, 365), 'marshmallow.fields.Nested', 'fields.Nested', (['SellerPhoneNumber'], {'required': '(False)'}), '(SellerPhoneNumber, required=False)\n', (330, 365), False, 'from marshmallow import fields, Schema\n'), ((383, 409), 'marshmallow.fields.Str', 'fields.Str', ([], {'required': '(False)'}), '(required=False)\n', (393, 409), False, 'from marshmallow import fields, Schema\n'), ((426, 452), 'marshmallow.fields.Str', 'fields.Str', ([], {'required': '(False)'}), '(required=False)\n', (436, 452), False, 'from marshmallow import fields, Schema\n')]
|
from typing import List, Iterator, Optional
import argparse
import sys
import json
from overrides import overrides
from allennlp.commands.subcommand import Subcommand
from allennlp.common.checks import check_for_gpu, ConfigurationError
from allennlp.common.file_utils import cached_path
from allennlp.common.util import sanitize
from allennlp.models.archival import load_archive
from allennlp.predictors.predictor import Predictor, JsonDict
from allennlp.data import Instance
from nominal_srl.nominal_srl_predictor import NominalSemanticRoleLabelerPredictor
import predict_utils
desc = "Run nominal SRL predictor on a single sentence."
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("archive_file", type=str, help="the archived model to make predictions with")
parser.add_argument('-s', '--input_sentence', type=str, help="the sentence to predict on", required=True)
parser.add_argument('-i', '--nom_indices', nargs='*', type=int, help="the indices of the nominal predicates", required=True)
parser.add_argument("--cuda_device", type=int, default=-1, help="id of GPU to use (if any)")
parser.add_argument('-o', '--output_file', type=str, default="output.txt", help="path to output file")
parser.add_argument('-ta', '--text_annotation', default=False, action='store_true', help="specify whether to produce the output in text annotation form")
args = parser.parse_args()
def _get_predictor(args) -> NominalSemanticRoleLabelerPredictor:
check_for_gpu(args.cuda_device)
archive = load_archive(
args.archive_file,
cuda_device=args.cuda_device,
)
return NominalSemanticRoleLabelerPredictor.from_archive(archive, "nombank-semantic-role-labeling")
class _PredictManager:
def __init__(
self,
predictor: NominalSemanticRoleLabelerPredictor,
input_sentence: str,
indices: List[int],
output_file: Optional[str],
write_ta: bool,
) -> None:
self._predictor = predictor
self._indices = indices
self._input_sentence = input_sentence
if output_file is not None:
self._output_file = open(output_file, "w")
else:
self._output_file = None
self._write_ta = write_ta
self.generator = "nominal_srl.nom_predict_sentence"
def create_text_annotation(
self, srl_output: JsonDict
) -> JsonDict:
ta= {"corpusId": "", "id": ""}
tokens = srl_output.pop("words")
text = self._input_sentence
ta["text"] = text
ta["tokens"] = tokens
ta["tokenOffsets"] = predict_utils.create_token_char_offsets(text)
sentence_end_positions = [i+1 for i,x in enumerate(tokens) if x=="."]
sentences = {"generator": self.generator, "score": 1.0, "sentenceEndPositions": sentence_end_positions}
ta["sentences"] = sentences
# Create views.
views = []
views.append(predict_utils.create_sentence_view(tokens))
views.append(predict_utils.create_tokens_view(tokens))
views.append(self.create_srl_nom_view(srl_output.pop("nominals")))
ta["views"] = views
return sanitize(ta)
def create_srl_nom_view(
self, nom_srl_frames
) -> JsonDict:
srl_nom_view = {"viewName": "SRL_NOM_NOMBANK"}
constituents = []
relations = []
for frame in nom_srl_frames:
predicate = frame.pop("nominal")
description = frame.pop("description")
tags = frame.pop("tags")
predicate_idx = frame.pop("predicate_index")
properties = {"SenseNumber": "NA", "predicate": predicate}
if len(predicate_idx)>1:
print('Multiple indices of predicate. Using first.')
constituent = {"label": "Predicate", "score": 1.0, "start": predicate_idx[0], "end": predicate_idx[0]+1, "properties": properties}
predicate_constituent_idx = len(constituents)
constituents.append(constituent)
active_tag = ""
active_tag_start_idx = -1
for tag_idx, tag in enumerate(tags):
if tag in {"O", "B-V"}:
if active_tag != "":
constituent = {"label": active_tag, "score": 1.0, "start": active_tag_start_idx, "end": tag_idx}
relation = {"relationName": active_tag, "srcConstituent": predicate_constituent_idx, "targetConstituent": len(constituents)}
relations.append(relation)
constituents.append(constituent)
active_tag = ""
active_tag_start_idx = -1
continue
if tag[2:] == active_tag:
continue
else:
if active_tag != "":
constituent = {"label": active_tag, "score": 1.0, "start": active_tag_start_idx, "end": tag_idx}
relation = {"relationName": active_tag, "srcConstituent": predicate_constituent_idx, "targetContituent": len(constituents)}
relations.append(relation)
constituents.append(constituent)
active_tag = tag[2:]
active_tag_start_idx = tag_idx
nom_view_data = [{"viewType": "", "viewName": "SRL_NOM_NOMBANK", "generator": self.generator, "score": 1.0, "constituents": constituents, "relations": relations}]
srl_nom_view["viewData"] = nom_view_data
return srl_nom_view
def _print_to_file(
self, prediction: str
) -> None:
if self._output_file is not None:
self._output_file.write(prediction)
self._output_file.close()
else:
print("No output file was specified. Writing to STDOUT instead.")
print(prediction)
def run(self) -> None:
result = self._predictor.predict(self._input_sentence, self._indices)
print('OUTPUT_DICT: ', result)
if self._write_ta:
ta = self.create_text_annotation(result)
self._print_to_file(json.dumps(ta, indent=4))
else:
self._print_to_file(json.dumps(result, indent=4))
predictor = _get_predictor(args)
manager = _PredictManager(
predictor,
args.input_sentence,
args.nom_indices,
args.output_file,
args.text_annotation,
)
manager.run()
|
[
"allennlp.common.checks.check_for_gpu",
"argparse.ArgumentParser",
"predict_utils.create_tokens_view",
"allennlp.common.util.sanitize",
"predict_utils.create_token_char_offsets",
"json.dumps",
"predict_utils.create_sentence_view",
"allennlp.models.archival.load_archive",
"nominal_srl.nominal_srl_predictor.NominalSemanticRoleLabelerPredictor.from_archive"
] |
[((651, 692), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'desc'}), '(description=desc)\n', (674, 692), False, 'import argparse\n'), ((1471, 1502), 'allennlp.common.checks.check_for_gpu', 'check_for_gpu', (['args.cuda_device'], {}), '(args.cuda_device)\n', (1484, 1502), False, 'from allennlp.common.checks import check_for_gpu, ConfigurationError\n'), ((1517, 1578), 'allennlp.models.archival.load_archive', 'load_archive', (['args.archive_file'], {'cuda_device': 'args.cuda_device'}), '(args.archive_file, cuda_device=args.cuda_device)\n', (1529, 1578), False, 'from allennlp.models.archival import load_archive\n'), ((1629, 1724), 'nominal_srl.nominal_srl_predictor.NominalSemanticRoleLabelerPredictor.from_archive', 'NominalSemanticRoleLabelerPredictor.from_archive', (['archive', '"""nombank-semantic-role-labeling"""'], {}), "(archive,\n 'nombank-semantic-role-labeling')\n", (1677, 1724), False, 'from nominal_srl.nominal_srl_predictor import NominalSemanticRoleLabelerPredictor\n'), ((2606, 2651), 'predict_utils.create_token_char_offsets', 'predict_utils.create_token_char_offsets', (['text'], {}), '(text)\n', (2645, 2651), False, 'import predict_utils\n'), ((3168, 3180), 'allennlp.common.util.sanitize', 'sanitize', (['ta'], {}), '(ta)\n', (3176, 3180), False, 'from allennlp.common.util import sanitize\n'), ((2943, 2985), 'predict_utils.create_sentence_view', 'predict_utils.create_sentence_view', (['tokens'], {}), '(tokens)\n', (2977, 2985), False, 'import predict_utils\n'), ((3008, 3048), 'predict_utils.create_tokens_view', 'predict_utils.create_tokens_view', (['tokens'], {}), '(tokens)\n', (3040, 3048), False, 'import predict_utils\n'), ((6134, 6158), 'json.dumps', 'json.dumps', (['ta'], {'indent': '(4)'}), '(ta, indent=4)\n', (6144, 6158), False, 'import json\n'), ((6206, 6234), 'json.dumps', 'json.dumps', (['result'], {'indent': '(4)'}), '(result, indent=4)\n', (6216, 6234), False, 'import json\n')]
|
from accelerator.managers.member_profile_manager import MemberProfileManager
from accelerator.models import CoreProfile
class MemberProfile(CoreProfile):
user_type = 'member'
default_page = "member_homepage"
objects = MemberProfileManager()
class Meta:
db_table = 'accelerator_memberprofile'
|
[
"accelerator.managers.member_profile_manager.MemberProfileManager"
] |
[((233, 255), 'accelerator.managers.member_profile_manager.MemberProfileManager', 'MemberProfileManager', ([], {}), '()\n', (253, 255), False, 'from accelerator.managers.member_profile_manager import MemberProfileManager\n')]
|
# OrdiNeu's auto incrementor for Dugnutt
import keyboard
import wx
# Globals
Filename = "test.txt"
Format = "Number of times pressed: {}"
count = 0
hotkey = "ctrl+alt+z"
dehotkey = "ctrl+alt+x"
error = ""
refresh = None
# Callback to automatically write in the text file
def changeCount(amount, auto):
global count
global error
if (auto):
count += amount
try:
with open(Filename, 'w') as f:
f.write(Format.format(count))
error = ""
if refresh is not None:
refresh()
except Exception as e:
error = str(e)
def increment(autoIncrement=True):
changeCount(+1, autoIncrement)
def decrement(autoDecrement=True):
changeCount(-1, autoDecrement)
# Setup the Keyboard
keyboard.add_hotkey(hotkey, increment)
keyboard.add_hotkey(dehotkey, decrement)
increment(autoIncrement=False)
# Class for the UI
class IncrementorUI(wx.Frame):
def __init__(self, *args, **kwargs):
super(IncrementorUI, self).__init__(*args, **kwargs)
self.InitUI()
def InitUI(self):
self.panel = wx.Panel(self)
vbox = wx.BoxSizer(wx.VERTICAL)
# Filename input
filenamePanel = wx.Panel(self.panel)
f_hbox = wx.BoxSizer(wx.HORIZONTAL)
inputLabel = wx.StaticText(filenamePanel, label="Filename: ")
self.input = wx.StaticText(filenamePanel, label=Filename)
self.fileNameSelector = wx.Button(filenamePanel, label="Select")
self.fileNameSelector.Bind(wx.EVT_BUTTON, self.OpenFileDialog)
f_hbox.Add(inputLabel, wx.LEFT)
f_hbox.Add(self.input, wx.EXPAND)
f_hbox.Add(self.fileNameSelector, wx.RIGHT)
filenamePanel.SetSizer(f_hbox)
# Format input
formatPanel = wx.Panel(self.panel)
fo_hbox = wx.BoxSizer(wx.HORIZONTAL)
formatLabel = wx.StaticText(formatPanel, label="Format: ")
self.format = wx.TextCtrl(formatPanel, value=Format)
self.format.Bind(wx.EVT_TEXT, self.SetFormat)
fo_hbox.Add(formatLabel, wx.LEFT)
fo_hbox.Add(self.format, wx.EXPAND)
formatPanel.SetSizer(fo_hbox)
# Count input
countPanel = wx.Panel(self.panel)
co_hbox = wx.BoxSizer(wx.HORIZONTAL)
countLabel = wx.StaticText(countPanel, label="Count: ")
self.count = wx.SpinCtrl(countPanel, value=str(count), min=-99999999, max=99999999)
self.count.Bind(wx.EVT_TEXT, self.SetCount)
co_hbox.Add(countLabel, wx.LEFT)
co_hbox.Add(self.count, wx.EXPAND)
countPanel.SetSizer(co_hbox)
# Hotkey input
hotkeyPanel = wx.Panel(self.panel)
hk_hbox = wx.BoxSizer(wx.HORIZONTAL)
hotkeyLabel = wx.StaticText(hotkeyPanel, label="+1 Hotkey: ")
self.hotkey = wx.TextCtrl(hotkeyPanel, value=hotkey)
self.hotkeySelector = wx.Button(hotkeyPanel, label="Set hotkey")
self.hotkeySelector.Bind(wx.EVT_BUTTON, self.StartListen)
hk_hbox.Add(hotkeyLabel, wx.LEFT)
hk_hbox.Add(self.hotkey, wx.EXPAND)
hk_hbox.Add(self.hotkeySelector, wx.RIGHT)
hotkeyPanel.SetSizer(hk_hbox)
# Hotkey input
dehotkeyPanel = wx.Panel(self.panel)
dehk_hbox = wx.BoxSizer(wx.HORIZONTAL)
dehotkeyLabel = wx.StaticText(dehotkeyPanel, label="-1 Hotkey: ")
self.dehotkey = wx.TextCtrl(dehotkeyPanel, value=dehotkey)
self.dehotkeySelector = wx.Button(dehotkeyPanel, label="Set hotkey")
self.dehotkeySelector.Bind(wx.EVT_BUTTON, self.StartDecrementListen)
dehk_hbox.Add(dehotkeyLabel, wx.LEFT)
dehk_hbox.Add(self.dehotkey, wx.EXPAND)
dehk_hbox.Add(self.dehotkeySelector, wx.RIGHT)
dehotkeyPanel.SetSizer(dehk_hbox)
# Error input
self.ErrorLabel = wx.StaticText(self.panel, label=error)
# Outer panel
vbox.Add(filenamePanel)
vbox.Add(formatPanel)
vbox.Add(countPanel)
vbox.Add(hotkeyPanel)
vbox.Add(dehotkeyPanel)
vbox.Add(self.ErrorLabel)
self.panel.SetSizer(vbox)
def OpenFileDialog(self, e):
global Filename
with wx.FileDialog(self, 'Select File', wildcard="Text file(*.txt)|*.txt", style=wx.FD_SAVE) as fileDialog:
if fileDialog.ShowModal() == wx.ID_CANCEL:
return
Filename = fileDialog.GetPath()
self.RefreshUI()
def RefreshUI(self):
self.count.Unbind(wx.EVT_TEXT)
self.format.Unbind(wx.EVT_TEXT)
self.input.SetLabel(Filename)
self.count.SetValue(count)
self.ErrorLabel.SetLabel(error)
self.count.Bind(wx.EVT_TEXT, self.SetCount)
self.format.Bind(wx.EVT_TEXT, self.SetFormat)
def SetFormat(self, e):
global Format
Format = e.GetString()
self.RefreshUI()
increment(False)
def SetCount(self, e):
global count
count = int(e.GetString())
self.RefreshUI()
increment(False)
def StartListen(self, e):
self.hotkey.SetValue("Listening for keypress...")
self.keyboardHook = keyboard.hook(self.EndListen)
def EndListen(self, e):
global hotkey
keyboard.remove_hotkey(hotkey)
# Only remove the listen status if it isn't a modifier
hotkey = keyboard.get_hotkey_name()
keyboard.add_hotkey(hotkey, increment)
if not keyboard.is_modifier(e.name):
keyboard.unhook(self.keyboardHook)
self.hotkey.SetValue(hotkey)
self.RefreshUI()
increment(False)
def StartDecrementListen(self, e):
self.dehotkey.SetValue("Listening for keypress...")
self.deKeyboardHook = keyboard.hook(self.EndDecrementListen)
def EndDecrementListen(self, e):
global dehotkey
keyboard.remove_hotkey(dehotkey)
# Only remove the listen status if it isn't a modifier
dehotkey = keyboard.get_hotkey_name()
keyboard.add_hotkey(dehotkey, decrement)
if not keyboard.is_modifier(e.name):
keyboard.unhook(self.deKeyboardHook)
self.dehotkey.SetValue(dehotkey)
self.RefreshUI()
decrement(False)
app = wx.App()
frame = IncrementorUI(None, title="OrdiNeu's Auto-incrementor for Dugnutt", style=wx.CLOSE_BOX | wx.CAPTION | wx.RESIZE_BORDER)
refresh = frame.RefreshUI
frame.Show()
app.MainLoop()
|
[
"wx.BoxSizer",
"keyboard.remove_hotkey",
"keyboard.unhook",
"wx.Panel",
"wx.StaticText",
"wx.Button",
"wx.TextCtrl",
"wx.App",
"keyboard.add_hotkey",
"keyboard.hook",
"keyboard.get_hotkey_name",
"wx.FileDialog",
"keyboard.is_modifier"
] |
[((755, 793), 'keyboard.add_hotkey', 'keyboard.add_hotkey', (['hotkey', 'increment'], {}), '(hotkey, increment)\n', (774, 793), False, 'import keyboard\n'), ((794, 834), 'keyboard.add_hotkey', 'keyboard.add_hotkey', (['dehotkey', 'decrement'], {}), '(dehotkey, decrement)\n', (813, 834), False, 'import keyboard\n'), ((6164, 6172), 'wx.App', 'wx.App', ([], {}), '()\n', (6170, 6172), False, 'import wx\n'), ((1085, 1099), 'wx.Panel', 'wx.Panel', (['self'], {}), '(self)\n', (1093, 1099), False, 'import wx\n'), ((1115, 1139), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (1126, 1139), False, 'import wx\n'), ((1190, 1210), 'wx.Panel', 'wx.Panel', (['self.panel'], {}), '(self.panel)\n', (1198, 1210), False, 'import wx\n'), ((1228, 1254), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (1239, 1254), False, 'import wx\n'), ((1276, 1324), 'wx.StaticText', 'wx.StaticText', (['filenamePanel'], {'label': '"""Filename: """'}), "(filenamePanel, label='Filename: ')\n", (1289, 1324), False, 'import wx\n'), ((1346, 1390), 'wx.StaticText', 'wx.StaticText', (['filenamePanel'], {'label': 'Filename'}), '(filenamePanel, label=Filename)\n', (1359, 1390), False, 'import wx\n'), ((1423, 1463), 'wx.Button', 'wx.Button', (['filenamePanel'], {'label': '"""Select"""'}), "(filenamePanel, label='Select')\n", (1432, 1463), False, 'import wx\n'), ((1754, 1774), 'wx.Panel', 'wx.Panel', (['self.panel'], {}), '(self.panel)\n', (1762, 1774), False, 'import wx\n'), ((1793, 1819), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (1804, 1819), False, 'import wx\n'), ((1842, 1886), 'wx.StaticText', 'wx.StaticText', (['formatPanel'], {'label': '"""Format: """'}), "(formatPanel, label='Format: ')\n", (1855, 1886), False, 'import wx\n'), ((1909, 1947), 'wx.TextCtrl', 'wx.TextCtrl', (['formatPanel'], {'value': 'Format'}), '(formatPanel, value=Format)\n', (1920, 1947), False, 'import wx\n'), ((2170, 2190), 'wx.Panel', 'wx.Panel', (['self.panel'], {}), '(self.panel)\n', (2178, 2190), False, 'import wx\n'), ((2209, 2235), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (2220, 2235), False, 'import wx\n'), ((2257, 2299), 'wx.StaticText', 'wx.StaticText', (['countPanel'], {'label': '"""Count: """'}), "(countPanel, label='Count: ')\n", (2270, 2299), False, 'import wx\n'), ((2611, 2631), 'wx.Panel', 'wx.Panel', (['self.panel'], {}), '(self.panel)\n', (2619, 2631), False, 'import wx\n'), ((2650, 2676), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (2661, 2676), False, 'import wx\n'), ((2699, 2746), 'wx.StaticText', 'wx.StaticText', (['hotkeyPanel'], {'label': '"""+1 Hotkey: """'}), "(hotkeyPanel, label='+1 Hotkey: ')\n", (2712, 2746), False, 'import wx\n'), ((2769, 2807), 'wx.TextCtrl', 'wx.TextCtrl', (['hotkeyPanel'], {'value': 'hotkey'}), '(hotkeyPanel, value=hotkey)\n', (2780, 2807), False, 'import wx\n'), ((2838, 2880), 'wx.Button', 'wx.Button', (['hotkeyPanel'], {'label': '"""Set hotkey"""'}), "(hotkeyPanel, label='Set hotkey')\n", (2847, 2880), False, 'import wx\n'), ((3170, 3190), 'wx.Panel', 'wx.Panel', (['self.panel'], {}), '(self.panel)\n', (3178, 3190), False, 'import wx\n'), ((3211, 3237), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (3222, 3237), False, 'import wx\n'), ((3262, 3311), 'wx.StaticText', 'wx.StaticText', (['dehotkeyPanel'], {'label': '"""-1 Hotkey: """'}), "(dehotkeyPanel, label='-1 Hotkey: ')\n", (3275, 3311), False, 'import wx\n'), ((3336, 3378), 'wx.TextCtrl', 'wx.TextCtrl', (['dehotkeyPanel'], {'value': 'dehotkey'}), '(dehotkeyPanel, value=dehotkey)\n', (3347, 3378), False, 'import wx\n'), ((3411, 3455), 'wx.Button', 'wx.Button', (['dehotkeyPanel'], {'label': '"""Set hotkey"""'}), "(dehotkeyPanel, label='Set hotkey')\n", (3420, 3455), False, 'import wx\n'), ((3773, 3811), 'wx.StaticText', 'wx.StaticText', (['self.panel'], {'label': 'error'}), '(self.panel, label=error)\n', (3786, 3811), False, 'import wx\n'), ((5085, 5114), 'keyboard.hook', 'keyboard.hook', (['self.EndListen'], {}), '(self.EndListen)\n', (5098, 5114), False, 'import keyboard\n'), ((5174, 5204), 'keyboard.remove_hotkey', 'keyboard.remove_hotkey', (['hotkey'], {}), '(hotkey)\n', (5196, 5204), False, 'import keyboard\n'), ((5286, 5312), 'keyboard.get_hotkey_name', 'keyboard.get_hotkey_name', ([], {}), '()\n', (5310, 5312), False, 'import keyboard\n'), ((5321, 5359), 'keyboard.add_hotkey', 'keyboard.add_hotkey', (['hotkey', 'increment'], {}), '(hotkey, increment)\n', (5340, 5359), False, 'import keyboard\n'), ((5670, 5708), 'keyboard.hook', 'keyboard.hook', (['self.EndDecrementListen'], {}), '(self.EndDecrementListen)\n', (5683, 5708), False, 'import keyboard\n'), ((5779, 5811), 'keyboard.remove_hotkey', 'keyboard.remove_hotkey', (['dehotkey'], {}), '(dehotkey)\n', (5801, 5811), False, 'import keyboard\n'), ((5895, 5921), 'keyboard.get_hotkey_name', 'keyboard.get_hotkey_name', ([], {}), '()\n', (5919, 5921), False, 'import keyboard\n'), ((5930, 5970), 'keyboard.add_hotkey', 'keyboard.add_hotkey', (['dehotkey', 'decrement'], {}), '(dehotkey, decrement)\n', (5949, 5970), False, 'import keyboard\n'), ((4127, 4219), 'wx.FileDialog', 'wx.FileDialog', (['self', '"""Select File"""'], {'wildcard': '"""Text file(*.txt)|*.txt"""', 'style': 'wx.FD_SAVE'}), "(self, 'Select File', wildcard='Text file(*.txt)|*.txt', style\n =wx.FD_SAVE)\n", (4140, 4219), False, 'import wx\n'), ((5376, 5404), 'keyboard.is_modifier', 'keyboard.is_modifier', (['e.name'], {}), '(e.name)\n', (5396, 5404), False, 'import keyboard\n'), ((5418, 5452), 'keyboard.unhook', 'keyboard.unhook', (['self.keyboardHook'], {}), '(self.keyboardHook)\n', (5433, 5452), False, 'import keyboard\n'), ((5987, 6015), 'keyboard.is_modifier', 'keyboard.is_modifier', (['e.name'], {}), '(e.name)\n', (6007, 6015), False, 'import keyboard\n'), ((6029, 6065), 'keyboard.unhook', 'keyboard.unhook', (['self.deKeyboardHook'], {}), '(self.deKeyboardHook)\n', (6044, 6065), False, 'import keyboard\n')]
|
import glob
import os
import random
import cv2
def crop_image(src_image_path, dst_image_path):
output_side_length=256
img = cv2.imread(src_image_path)
height, width, depth = img.shape
new_height = output_side_length
new_width = output_side_length
if height > width:
new_height = int(output_side_length * height / width)
else:
new_width = int(output_side_length * width / height)
resized_img = cv2.resize(img, (new_width, new_height))
height_offset = int((new_height - output_side_length) / 2)
width_offset = int((new_width - output_side_length) / 2)
cropped_img = resized_img[height_offset : height_offset + output_side_length, width_offset : width_offset + output_side_length]
cv2.imwrite(dst_image_path, cropped_img)
def main():
# parameters
from_dir = 'download_images'
to_dir = 'crop_images'
split_ratio = 0.75
# make directory
if (os.path.exists(to_dir) == False):
os.makedirs(to_dir)
os.makedirs(os.path.join(to_dir, 'train'))
os.makedirs(os.path.join(to_dir, 'test'))
# make list for train
train_list = open('train.txt','w')
test_list = open('test.txt','w')
label_list = open('labels.txt','w')
class_no=0
image_count = 0
labels = glob.glob('{}/*'.format(from_dir))
for label in labels:
label_name = os.path.basename(label)
print(label_name)
os.makedirs(os.path.join(to_dir, 'train', label_name))
os.makedirs(os.path.join(to_dir, 'test', label_name))
images = glob.glob('{}/*.jpeg'.format(label))
# write label for train
label_list.write(label_name + '\n')
length = len(images)
split_count = 0
split_number = length * split_ratio
random.shuffle(images)
for image in images:
image_name = os.path.basename(image)
if split_count < split_number:
to_train_image = os.path.join(to_dir, 'train', label_name, image_name)
print('{} > {}'.format(image, to_train_image))
crop_image(image, to_train_image)
# write image path for train
train_list.write('{} {}\n'.format(to_train_image, class_no))
else:
to_test_image = os.path.join(to_dir, 'test', label_name, image_name)
print('{} > {}'.format(image, to_test_image))
crop_image(image, to_test_image)
# write image path for test
test_list.write('{} {}\n'.format(to_test_image, class_no))
image_count = image_count + 1
split_count = split_count + 1
class_no += 1
train_list.close()
test_list.close()
label_list.close()
if __name__ == '__main__':
main()
|
[
"os.makedirs",
"os.path.basename",
"cv2.imwrite",
"random.shuffle",
"os.path.exists",
"cv2.imread",
"os.path.join",
"cv2.resize"
] |
[((133, 159), 'cv2.imread', 'cv2.imread', (['src_image_path'], {}), '(src_image_path)\n', (143, 159), False, 'import cv2\n'), ((442, 482), 'cv2.resize', 'cv2.resize', (['img', '(new_width, new_height)'], {}), '(img, (new_width, new_height))\n', (452, 482), False, 'import cv2\n'), ((743, 783), 'cv2.imwrite', 'cv2.imwrite', (['dst_image_path', 'cropped_img'], {}), '(dst_image_path, cropped_img)\n', (754, 783), False, 'import cv2\n'), ((927, 949), 'os.path.exists', 'os.path.exists', (['to_dir'], {}), '(to_dir)\n', (941, 949), False, 'import os\n'), ((969, 988), 'os.makedirs', 'os.makedirs', (['to_dir'], {}), '(to_dir)\n', (980, 988), False, 'import os\n'), ((1363, 1386), 'os.path.basename', 'os.path.basename', (['label'], {}), '(label)\n', (1379, 1386), False, 'import os\n'), ((1773, 1795), 'random.shuffle', 'random.shuffle', (['images'], {}), '(images)\n', (1787, 1795), False, 'import random\n'), ((1009, 1038), 'os.path.join', 'os.path.join', (['to_dir', '"""train"""'], {}), "(to_dir, 'train')\n", (1021, 1038), False, 'import os\n'), ((1060, 1088), 'os.path.join', 'os.path.join', (['to_dir', '"""test"""'], {}), "(to_dir, 'test')\n", (1072, 1088), False, 'import os\n'), ((1433, 1474), 'os.path.join', 'os.path.join', (['to_dir', '"""train"""', 'label_name'], {}), "(to_dir, 'train', label_name)\n", (1445, 1474), False, 'import os\n'), ((1496, 1536), 'os.path.join', 'os.path.join', (['to_dir', '"""test"""', 'label_name'], {}), "(to_dir, 'test', label_name)\n", (1508, 1536), False, 'import os\n'), ((1850, 1873), 'os.path.basename', 'os.path.basename', (['image'], {}), '(image)\n', (1866, 1873), False, 'import os\n'), ((1950, 2003), 'os.path.join', 'os.path.join', (['to_dir', '"""train"""', 'label_name', 'image_name'], {}), "(to_dir, 'train', label_name, image_name)\n", (1962, 2003), False, 'import os\n'), ((2289, 2341), 'os.path.join', 'os.path.join', (['to_dir', '"""test"""', 'label_name', 'image_name'], {}), "(to_dir, 'test', label_name, image_name)\n", (2301, 2341), False, 'import os\n')]
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import itertools
import numpy as np
from absl.testing import parameterized
from tensorflow.python.client import session as sl
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python import ipu
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import googletest
from tensorflow.python.training import gradient_descent
# Error threshold for forward pass test.
THRESHOLD = 0.03
# Dimensions of the random data tensor.
DIMS = (1024, 1024, 4)
# Initialise with a random seed.
SEED = np.random.randint(np.iinfo(np.int32).max, size=[2], dtype=np.int32)
# Number of times to verify output for a given seed.
SEED_TEST_REPETITIONS = 6
def build_test_cases(exhaustive=False):
# Dropout rate(s) to test.
rate = [0.1, 0.5, 0.9] if exhaustive else [0.5]
# User specified and non-specified cases.
seed = [SEED, None]
# Shape of the dropout.
# Note that shaping the dropout such that a very large portion of
# the input weights are dropped will fail the test criteria, as expected.
noise_shape = [[], [DIMS[0], DIMS[1], 1]]
if exhaustive:
noise_shape.append([DIMS[0], 1, DIMS[2]])
noise_shape.append([1, DIMS[1], DIMS[2]])
# Get the cartesian product (can get very large).
prod = itertools.product(rate, seed, noise_shape)
test_cases = []
for n, perm in enumerate(prod):
test = {
'testcase_name': ' Case: %3d' % n,
'rate': perm[0],
'seed': perm[1],
'noise_shape': perm[2]
}
test_cases.append(test)
return test_cases
# Default is not to test every combination.
TEST_CASES = build_test_cases()
class PopnnRandomDropoutTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
@staticmethod
def _ipu_dropout(w, rate, seed, noise_shape):
output = ipu.ops.rand_ops.dropout(w,
rate=rate,
seed=seed,
noise_shape=noise_shape)
return [output]
@staticmethod
def _setup_test(f):
with ops.device('cpu'):
input_data = array_ops.placeholder(np.float32, DIMS)
with ipu.scopes.ipu_scope("/device:IPU:0"):
r = ipu.ipu_compiler.compile(f, inputs=[input_data])
cfg = ipu.utils.create_ipu_config()
cfg = ipu.utils.set_ipu_model_options(cfg, compile_ipu_code=False)
ipu.utils.configure_ipu_system(cfg)
return r, input_data
@test_util.deprecated_graph_mode_only
def testInvalidNoiseShape(self):
in_data = np.random.rand(16, 8, 16)
print(in_data.shape)
seed = np.array([12, 34], dtype=np.int32)
with sl.Session() as sess:
with self.assertRaisesRegex(ValueError, "must equal the rank of x."):
def _wrong_length(w):
return self._ipu_dropout(w, 0.5, seed, [1])
r, input_data = self._setup_test(_wrong_length)
_ = sess.run(r, {input_data: in_data})
with self.assertRaisesRegex(ValueError, "Dimension mismatch"):
def _wrong_dims(w):
return self._ipu_dropout(w, 0.5, seed, [8, 1, 16])
r, input_data = self._setup_test(_wrong_dims)
_ = sess.run(r, {input_data: in_data})
@parameterized.named_parameters(*TEST_CASES)
@test_util.deprecated_graph_mode_only
def testDropout(self, rate, seed, noise_shape):
def _run_dropout(w):
return self._ipu_dropout(w, rate, seed, noise_shape)
r, input_data = self._setup_test(_run_dropout)
with sl.Session() as sess:
in_data = np.random.rand(*DIMS)
result = sess.run(r, {input_data: in_data})
percent_kept = np.count_nonzero(result) / np.count_nonzero(in_data)
# There's a considerable amount for randomness so we have a reasonably
# large dimensionality of test data to make sure the error is smaller.
is_roughly_close = abs(percent_kept - (1.0 - rate))
# The observed error is actually a lot less than this (>1%) but we don't
# want to cause random regressions and 3% is probably still acceptable
# for any outlier randoms.
self.assertTrue(is_roughly_close < THRESHOLD)
@parameterized.named_parameters(*TEST_CASES)
@test_util.deprecated_graph_mode_only
def testUserSeed(self, rate, seed, noise_shape):
def _run_dropout(w):
return self._ipu_dropout(w, rate, seed, noise_shape)
r, input_data = self._setup_test(_run_dropout)
with sl.Session() as sess:
in_data = np.random.rand(*DIMS)
# For a given output, verify that each subsequent output is equal to it.
first_result = None
for _ in range(SEED_TEST_REPETITIONS):
result = sess.run(r, {input_data: in_data})
if first_result is None:
first_result = result
continue
self.assertAllEqual(first_result, result)
@parameterized.named_parameters(*TEST_CASES)
@test_util.deprecated_graph_mode_only
def testDropoutBackwardPass(self, rate, seed, noise_shape):
def _run_dropout(w):
output = self._ipu_dropout(w, rate, seed, noise_shape)
largest = output
cost = math_ops.square(largest)
opt = gradient_descent.GradientDescentOptimizer(learning_rate=0.1)
gradients = opt.compute_gradients(cost, w)
return [output, gradients]
r, input_data = self._setup_test(_run_dropout)
with sl.Session() as sess:
in_data = np.random.rand(*DIMS)
result = sess.run(r, {input_data: in_data})
dropout_out = result[0]
gradients = result[1][0][0]
# Check we have the same number of zeros.
self.assertAllEqual(np.count_nonzero(dropout_out),
np.count_nonzero(gradients))
@parameterized.named_parameters(*TEST_CASES)
@test_util.deprecated_graph_mode_only
def testScaling(self, rate, seed, noise_shape):
def _run_dropout(w):
return self._ipu_dropout(w, rate, seed, noise_shape)
r, input_data = self._setup_test(_run_dropout)
with sl.Session() as sess:
in_data = np.ones(DIMS)
[result] = sess.run(r, {input_data: in_data})
kept_values = result[np.nonzero(result)]
expected_kept_values = 1 / (1 - rate) * np.ones(kept_values.shape)
self.assertAllClose(kept_values, expected_kept_values)
if __name__ == "__main__":
googletest.main()
|
[
"numpy.iinfo",
"numpy.ones",
"tensorflow.python.framework.ops.device",
"tensorflow.python.ipu.utils.set_ipu_model_options",
"tensorflow.python.client.session.Session",
"tensorflow.python.ipu.ipu_compiler.compile",
"tensorflow.python.platform.googletest.main",
"tensorflow.python.ops.array_ops.placeholder",
"itertools.product",
"tensorflow.python.ops.math_ops.square",
"tensorflow.python.ipu.utils.configure_ipu_system",
"absl.testing.parameterized.named_parameters",
"tensorflow.python.ipu.scopes.ipu_scope",
"numpy.count_nonzero",
"tensorflow.python.ipu.ops.rand_ops.dropout",
"tensorflow.python.training.gradient_descent.GradientDescentOptimizer",
"numpy.nonzero",
"numpy.array",
"numpy.random.rand",
"tensorflow.python.ipu.utils.create_ipu_config"
] |
[((2023, 2065), 'itertools.product', 'itertools.product', (['rate', 'seed', 'noise_shape'], {}), '(rate, seed, noise_shape)\n', (2040, 2065), False, 'import itertools\n'), ((3961, 4004), 'absl.testing.parameterized.named_parameters', 'parameterized.named_parameters', (['*TEST_CASES'], {}), '(*TEST_CASES)\n', (3991, 4004), False, 'from absl.testing import parameterized\n'), ((4884, 4927), 'absl.testing.parameterized.named_parameters', 'parameterized.named_parameters', (['*TEST_CASES'], {}), '(*TEST_CASES)\n', (4914, 4927), False, 'from absl.testing import parameterized\n'), ((5568, 5611), 'absl.testing.parameterized.named_parameters', 'parameterized.named_parameters', (['*TEST_CASES'], {}), '(*TEST_CASES)\n', (5598, 5611), False, 'from absl.testing import parameterized\n'), ((6421, 6464), 'absl.testing.parameterized.named_parameters', 'parameterized.named_parameters', (['*TEST_CASES'], {}), '(*TEST_CASES)\n', (6451, 6464), False, 'from absl.testing import parameterized\n'), ((7019, 7036), 'tensorflow.python.platform.googletest.main', 'googletest.main', ([], {}), '()\n', (7034, 7036), False, 'from tensorflow.python.platform import googletest\n'), ((1319, 1337), 'numpy.iinfo', 'np.iinfo', (['np.int32'], {}), '(np.int32)\n', (1327, 1337), True, 'import numpy as np\n'), ((2582, 2656), 'tensorflow.python.ipu.ops.rand_ops.dropout', 'ipu.ops.rand_ops.dropout', (['w'], {'rate': 'rate', 'seed': 'seed', 'noise_shape': 'noise_shape'}), '(w, rate=rate, seed=seed, noise_shape=noise_shape)\n', (2606, 2656), False, 'from tensorflow.python import ipu\n'), ((3301, 3326), 'numpy.random.rand', 'np.random.rand', (['(16)', '(8)', '(16)'], {}), '(16, 8, 16)\n', (3315, 3326), True, 'import numpy as np\n'), ((3363, 3397), 'numpy.array', 'np.array', (['[12, 34]'], {'dtype': 'np.int32'}), '([12, 34], dtype=np.int32)\n', (3371, 3397), True, 'import numpy as np\n'), ((2839, 2856), 'tensorflow.python.framework.ops.device', 'ops.device', (['"""cpu"""'], {}), "('cpu')\n", (2849, 2856), False, 'from tensorflow.python.framework import ops\n'), ((2877, 2916), 'tensorflow.python.ops.array_ops.placeholder', 'array_ops.placeholder', (['np.float32', 'DIMS'], {}), '(np.float32, DIMS)\n', (2898, 2916), False, 'from tensorflow.python.ops import array_ops\n'), ((2927, 2964), 'tensorflow.python.ipu.scopes.ipu_scope', 'ipu.scopes.ipu_scope', (['"""/device:IPU:0"""'], {}), "('/device:IPU:0')\n", (2947, 2964), False, 'from tensorflow.python import ipu\n'), ((2976, 3024), 'tensorflow.python.ipu.ipu_compiler.compile', 'ipu.ipu_compiler.compile', (['f'], {'inputs': '[input_data]'}), '(f, inputs=[input_data])\n', (3000, 3024), False, 'from tensorflow.python import ipu\n'), ((3038, 3067), 'tensorflow.python.ipu.utils.create_ipu_config', 'ipu.utils.create_ipu_config', ([], {}), '()\n', (3065, 3067), False, 'from tensorflow.python import ipu\n'), ((3080, 3140), 'tensorflow.python.ipu.utils.set_ipu_model_options', 'ipu.utils.set_ipu_model_options', (['cfg'], {'compile_ipu_code': '(False)'}), '(cfg, compile_ipu_code=False)\n', (3111, 3140), False, 'from tensorflow.python import ipu\n'), ((3147, 3182), 'tensorflow.python.ipu.utils.configure_ipu_system', 'ipu.utils.configure_ipu_system', (['cfg'], {}), '(cfg)\n', (3177, 3182), False, 'from tensorflow.python import ipu\n'), ((3408, 3420), 'tensorflow.python.client.session.Session', 'sl.Session', ([], {}), '()\n', (3418, 3420), True, 'from tensorflow.python.client import session as sl\n'), ((4241, 4253), 'tensorflow.python.client.session.Session', 'sl.Session', ([], {}), '()\n', (4251, 4253), True, 'from tensorflow.python.client import session as sl\n'), ((4279, 4300), 'numpy.random.rand', 'np.random.rand', (['*DIMS'], {}), '(*DIMS)\n', (4293, 4300), True, 'import numpy as np\n'), ((5165, 5177), 'tensorflow.python.client.session.Session', 'sl.Session', ([], {}), '()\n', (5175, 5177), True, 'from tensorflow.python.client import session as sl\n'), ((5203, 5224), 'numpy.random.rand', 'np.random.rand', (['*DIMS'], {}), '(*DIMS)\n', (5217, 5224), True, 'import numpy as np\n'), ((5837, 5861), 'tensorflow.python.ops.math_ops.square', 'math_ops.square', (['largest'], {}), '(largest)\n', (5852, 5861), False, 'from tensorflow.python.ops import math_ops\n'), ((5875, 5935), 'tensorflow.python.training.gradient_descent.GradientDescentOptimizer', 'gradient_descent.GradientDescentOptimizer', ([], {'learning_rate': '(0.1)'}), '(learning_rate=0.1)\n', (5916, 5935), False, 'from tensorflow.python.training import gradient_descent\n'), ((6081, 6093), 'tensorflow.python.client.session.Session', 'sl.Session', ([], {}), '()\n', (6091, 6093), True, 'from tensorflow.python.client import session as sl\n'), ((6119, 6140), 'numpy.random.rand', 'np.random.rand', (['*DIMS'], {}), '(*DIMS)\n', (6133, 6140), True, 'import numpy as np\n'), ((6701, 6713), 'tensorflow.python.client.session.Session', 'sl.Session', ([], {}), '()\n', (6711, 6713), True, 'from tensorflow.python.client import session as sl\n'), ((6739, 6752), 'numpy.ones', 'np.ones', (['DIMS'], {}), '(DIMS)\n', (6746, 6752), True, 'import numpy as np\n'), ((4372, 4396), 'numpy.count_nonzero', 'np.count_nonzero', (['result'], {}), '(result)\n', (4388, 4396), True, 'import numpy as np\n'), ((4399, 4424), 'numpy.count_nonzero', 'np.count_nonzero', (['in_data'], {}), '(in_data)\n', (4415, 4424), True, 'import numpy as np\n'), ((6331, 6360), 'numpy.count_nonzero', 'np.count_nonzero', (['dropout_out'], {}), '(dropout_out)\n', (6347, 6360), True, 'import numpy as np\n'), ((6388, 6415), 'numpy.count_nonzero', 'np.count_nonzero', (['gradients'], {}), '(gradients)\n', (6404, 6415), True, 'import numpy as np\n'), ((6833, 6851), 'numpy.nonzero', 'np.nonzero', (['result'], {}), '(result)\n', (6843, 6851), True, 'import numpy as np\n'), ((6899, 6925), 'numpy.ones', 'np.ones', (['kept_values.shape'], {}), '(kept_values.shape)\n', (6906, 6925), True, 'import numpy as np\n')]
|
from static import *
from lib import map_value
from point import Point
from ray import Ray
import numpy as np
import random
import math
class Source:
def __init__(self, x, y, fov, pg, screen):
self.pos = Point(x, y)
self.angle = np.random.randint(0, 360)
self.view_mode = 0
self.pg = pg
self.screen = screen
self.fov = fov
return
def generate_rays(self):
''' list to store all light ray objects emerging from light source '''
self.rays = []
self.ray_color = BLUE
self.point_color = GREEN
for i in range(0, N):
angle = i*self.fov/N * np.pi/180
self.rays.append(Ray(self.pos.x, self.pos.y, self.ray_color, self.point_color, self.pg, self.screen, angle))
return
def change_ray_colors(self):
self.ray_color = random.choice(COLORS)
self.point_color = random.choice(COLORS)
for ray in self.rays:
ray.change_color(self.ray_color, self.point_color)
return
def move(self, x, y):
self.pos.move(x, y)
for ray in self.rays:
ray.move(x, y)
return
def dist(self, ip):
return np.sqrt(np.sum([(self.pos.x-ip[0])**2, (self.pos.y-ip[1])**2]))
def draw(self):
self.pg.draw.rect(self.screen, BLACK, (0, 0, SWIDTH, HEIGHT))
if (self.pos.x < WIDTH):
self.pg.draw.circle(self.screen, GREEN, (self.pos.x, self.pos.y), 10)
return
''' 3D Rendering of ray-casting process '''
''' There are dozens of other ways to map 2D info to 3D, '''
''' which affects how the rendering process looks like to our eyes. '''
''' parameters i and distance refers to the index of a ray and its distance to the nearest wall '''
''' '''
def draw3D(self, i, distance, color):
if distance==0:
return
''' width of rectangle being rendered in 3D '''
dx = int(WIDTH/N)
''' height of rectangle being rendered in 3D '''
if VIEW_MODES[self.view_mode] == 'tangent':
dy = int(DISTORTION_ANGLE/distance)
elif VIEW_MODES[self.view_mode] == 'cosine':
dy = int((N*HEIGHT/distance)*math.cos(abs(i*(self.fov/N)-self.fov)*math.pi/180))
elif VIEW_MODES[self.view_mode] == 'fisheye':
dy = int(HEIGHT-distance)
''' color value provides an effect in which wall's color being altered '''
''' depending on its distance to the light source '''
#color = 255-map_value(distance)
color = tuple([v-map_value(distance, v) for v in color])
try:
self.pg.draw.rect(self.screen, color, (WIDTH + (i*dx), int((HEIGHT-dy)/2), dx, dy))
except:
pass
return
|
[
"numpy.sum",
"lib.map_value",
"random.choice",
"numpy.random.randint",
"ray.Ray",
"point.Point"
] |
[((220, 231), 'point.Point', 'Point', (['x', 'y'], {}), '(x, y)\n', (225, 231), False, 'from point import Point\n'), ((253, 278), 'numpy.random.randint', 'np.random.randint', (['(0)', '(360)'], {}), '(0, 360)\n', (270, 278), True, 'import numpy as np\n'), ((876, 897), 'random.choice', 'random.choice', (['COLORS'], {}), '(COLORS)\n', (889, 897), False, 'import random\n'), ((925, 946), 'random.choice', 'random.choice', (['COLORS'], {}), '(COLORS)\n', (938, 946), False, 'import random\n'), ((1243, 1305), 'numpy.sum', 'np.sum', (['[(self.pos.x - ip[0]) ** 2, (self.pos.y - ip[1]) ** 2]'], {}), '([(self.pos.x - ip[0]) ** 2, (self.pos.y - ip[1]) ** 2])\n', (1249, 1305), True, 'import numpy as np\n'), ((706, 801), 'ray.Ray', 'Ray', (['self.pos.x', 'self.pos.y', 'self.ray_color', 'self.point_color', 'self.pg', 'self.screen', 'angle'], {}), '(self.pos.x, self.pos.y, self.ray_color, self.point_color, self.pg, self\n .screen, angle)\n', (709, 801), False, 'from ray import Ray\n'), ((2611, 2633), 'lib.map_value', 'map_value', (['distance', 'v'], {}), '(distance, v)\n', (2620, 2633), False, 'from lib import map_value\n')]
|
# Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from monai.inferers import SlidingWindowInferer
from monai.losses import DiceCELoss
from monai.optimizers import Novograd
from monai.transforms import (
Activationsd,
AddChanneld,
AsDiscreted,
CropForegroundd,
EnsureTyped,
LoadImaged,
RandCropByPosNegLabeld,
RandShiftIntensityd,
ScaleIntensityRanged,
Spacingd,
ToDeviced,
ToTensord,
)
from monailabel.tasks.train.basic_train import BasicTrainTask, Context
logger = logging.getLogger(__name__)
class MyTrain(BasicTrainTask):
def __init__(
self,
model_dir,
network,
description="Train Segmentation model for spleen",
**kwargs,
):
self._network = network
super().__init__(model_dir, description, **kwargs)
def network(self, context: Context):
return self._network
def optimizer(self, context: Context):
return Novograd(self._network.parameters(), 0.0001)
def loss_function(self, context: Context):
return DiceCELoss(to_onehot_y=True, softmax=True, squared_pred=True, batch=True)
def train_pre_transforms(self, context: Context):
t = [
LoadImaged(keys=("image", "label")),
AddChanneld(keys=("image", "label")),
Spacingd(
keys=("image", "label"),
pixdim=(1.0, 1.0, 1.0),
mode=("bilinear", "nearest"),
),
ScaleIntensityRanged(keys="image", a_min=-57, a_max=164, b_min=0.0, b_max=1.0, clip=True),
CropForegroundd(keys=("image", "label"), source_key="image"),
]
if context.request.get("to_gpu", False):
t.extend([EnsureTyped(keys=("image", "label")), ToDeviced(keys=("image", "label"), device=context.device)])
t.extend(
[
RandCropByPosNegLabeld(
keys=("image", "label"),
label_key="label",
spatial_size=(96, 96, 96),
pos=1,
neg=1,
num_samples=4,
image_key="image",
image_threshold=0,
),
RandShiftIntensityd(keys="image", offsets=0.1, prob=0.5),
]
)
return t
def train_post_transforms(self, context: Context):
return [
ToTensord(keys=("pred", "label")),
Activationsd(keys="pred", softmax=True),
AsDiscreted(
keys=("pred", "label"),
argmax=(True, False),
to_onehot=True,
n_classes=2,
),
]
def val_pre_transforms(self, context: Context):
t = [
LoadImaged(keys=("image", "label")),
AddChanneld(keys=("image", "label")),
Spacingd(
keys=("image", "label"),
pixdim=(1.0, 1.0, 1.0),
mode=("bilinear", "nearest"),
),
ScaleIntensityRanged(keys="image", a_min=-57, a_max=164, b_min=0.0, b_max=1.0, clip=True),
CropForegroundd(keys=("image", "label"), source_key="image"),
]
if context.request.get("to_gpu", False):
t.extend([EnsureTyped(keys=("image", "label")), ToDeviced(keys=("image", "label"), device=context.device)])
return t
def val_inferer(self, context: Context):
return SlidingWindowInferer(roi_size=(160, 160, 160), sw_batch_size=1, overlap=0.25)
|
[
"monai.transforms.AddChanneld",
"monai.transforms.ScaleIntensityRanged",
"monai.transforms.ToDeviced",
"monai.inferers.SlidingWindowInferer",
"monai.losses.DiceCELoss",
"monai.transforms.AsDiscreted",
"monai.transforms.RandShiftIntensityd",
"monai.transforms.LoadImaged",
"monai.transforms.RandCropByPosNegLabeld",
"monai.transforms.ToTensord",
"monai.transforms.Spacingd",
"monai.transforms.Activationsd",
"monai.transforms.EnsureTyped",
"logging.getLogger",
"monai.transforms.CropForegroundd"
] |
[((1064, 1091), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1081, 1091), False, 'import logging\n'), ((1606, 1679), 'monai.losses.DiceCELoss', 'DiceCELoss', ([], {'to_onehot_y': '(True)', 'softmax': '(True)', 'squared_pred': '(True)', 'batch': '(True)'}), '(to_onehot_y=True, softmax=True, squared_pred=True, batch=True)\n', (1616, 1679), False, 'from monai.losses import DiceCELoss\n'), ((3998, 4075), 'monai.inferers.SlidingWindowInferer', 'SlidingWindowInferer', ([], {'roi_size': '(160, 160, 160)', 'sw_batch_size': '(1)', 'overlap': '(0.25)'}), '(roi_size=(160, 160, 160), sw_batch_size=1, overlap=0.25)\n', (4018, 4075), False, 'from monai.inferers import SlidingWindowInferer\n'), ((1761, 1796), 'monai.transforms.LoadImaged', 'LoadImaged', ([], {'keys': "('image', 'label')"}), "(keys=('image', 'label'))\n", (1771, 1796), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((1810, 1846), 'monai.transforms.AddChanneld', 'AddChanneld', ([], {'keys': "('image', 'label')"}), "(keys=('image', 'label'))\n", (1821, 1846), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((1860, 1951), 'monai.transforms.Spacingd', 'Spacingd', ([], {'keys': "('image', 'label')", 'pixdim': '(1.0, 1.0, 1.0)', 'mode': "('bilinear', 'nearest')"}), "(keys=('image', 'label'), pixdim=(1.0, 1.0, 1.0), mode=('bilinear',\n 'nearest'))\n", (1868, 1951), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((2024, 2118), 'monai.transforms.ScaleIntensityRanged', 'ScaleIntensityRanged', ([], {'keys': '"""image"""', 'a_min': '(-57)', 'a_max': '(164)', 'b_min': '(0.0)', 'b_max': '(1.0)', 'clip': '(True)'}), "(keys='image', a_min=-57, a_max=164, b_min=0.0, b_max=\n 1.0, clip=True)\n", (2044, 2118), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((2127, 2187), 'monai.transforms.CropForegroundd', 'CropForegroundd', ([], {'keys': "('image', 'label')", 'source_key': '"""image"""'}), "(keys=('image', 'label'), source_key='image')\n", (2142, 2187), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((2957, 2990), 'monai.transforms.ToTensord', 'ToTensord', ([], {'keys': "('pred', 'label')"}), "(keys=('pred', 'label'))\n", (2966, 2990), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((3004, 3043), 'monai.transforms.Activationsd', 'Activationsd', ([], {'keys': '"""pred"""', 'softmax': '(True)'}), "(keys='pred', softmax=True)\n", (3016, 3043), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((3057, 3147), 'monai.transforms.AsDiscreted', 'AsDiscreted', ([], {'keys': "('pred', 'label')", 'argmax': '(True, False)', 'to_onehot': '(True)', 'n_classes': '(2)'}), "(keys=('pred', 'label'), argmax=(True, False), to_onehot=True,\n n_classes=2)\n", (3068, 3147), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((3313, 3348), 'monai.transforms.LoadImaged', 'LoadImaged', ([], {'keys': "('image', 'label')"}), "(keys=('image', 'label'))\n", (3323, 3348), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((3362, 3398), 'monai.transforms.AddChanneld', 'AddChanneld', ([], {'keys': "('image', 'label')"}), "(keys=('image', 'label'))\n", (3373, 3398), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((3412, 3503), 'monai.transforms.Spacingd', 'Spacingd', ([], {'keys': "('image', 'label')", 'pixdim': '(1.0, 1.0, 1.0)', 'mode': "('bilinear', 'nearest')"}), "(keys=('image', 'label'), pixdim=(1.0, 1.0, 1.0), mode=('bilinear',\n 'nearest'))\n", (3420, 3503), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((3576, 3670), 'monai.transforms.ScaleIntensityRanged', 'ScaleIntensityRanged', ([], {'keys': '"""image"""', 'a_min': '(-57)', 'a_max': '(164)', 'b_min': '(0.0)', 'b_max': '(1.0)', 'clip': '(True)'}), "(keys='image', a_min=-57, a_max=164, b_min=0.0, b_max=\n 1.0, clip=True)\n", (3596, 3670), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((3679, 3739), 'monai.transforms.CropForegroundd', 'CropForegroundd', ([], {'keys': "('image', 'label')", 'source_key': '"""image"""'}), "(keys=('image', 'label'), source_key='image')\n", (3694, 3739), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((2416, 2585), 'monai.transforms.RandCropByPosNegLabeld', 'RandCropByPosNegLabeld', ([], {'keys': "('image', 'label')", 'label_key': '"""label"""', 'spatial_size': '(96, 96, 96)', 'pos': '(1)', 'neg': '(1)', 'num_samples': '(4)', 'image_key': '"""image"""', 'image_threshold': '(0)'}), "(keys=('image', 'label'), label_key='label',\n spatial_size=(96, 96, 96), pos=1, neg=1, num_samples=4, image_key=\n 'image', image_threshold=0)\n", (2438, 2585), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((2773, 2829), 'monai.transforms.RandShiftIntensityd', 'RandShiftIntensityd', ([], {'keys': '"""image"""', 'offsets': '(0.1)', 'prob': '(0.5)'}), "(keys='image', offsets=0.1, prob=0.5)\n", (2792, 2829), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((2270, 2306), 'monai.transforms.EnsureTyped', 'EnsureTyped', ([], {'keys': "('image', 'label')"}), "(keys=('image', 'label'))\n", (2281, 2306), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((2308, 2365), 'monai.transforms.ToDeviced', 'ToDeviced', ([], {'keys': "('image', 'label')", 'device': 'context.device'}), "(keys=('image', 'label'), device=context.device)\n", (2317, 2365), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((3822, 3858), 'monai.transforms.EnsureTyped', 'EnsureTyped', ([], {'keys': "('image', 'label')"}), "(keys=('image', 'label'))\n", (3833, 3858), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n'), ((3860, 3917), 'monai.transforms.ToDeviced', 'ToDeviced', ([], {'keys': "('image', 'label')", 'device': 'context.device'}), "(keys=('image', 'label'), device=context.device)\n", (3869, 3917), False, 'from monai.transforms import Activationsd, AddChanneld, AsDiscreted, CropForegroundd, EnsureTyped, LoadImaged, RandCropByPosNegLabeld, RandShiftIntensityd, ScaleIntensityRanged, Spacingd, ToDeviced, ToTensord\n')]
|
"""Simple client to the Channel Archiver using xmlrpc."""
import logging as log
from xmlrpc.client import ServerProxy
import numpy
from . import data, utils
from .fetcher import Fetcher
__all__ = [
"CaClient",
"CaFetcher",
]
class CaClient(object):
"""Class to handle XMLRPC interaction with a channel archiver."""
def __init__(self, url):
"""
Args:
url: url for the channel archiver
"""
self._proxy = ServerProxy(url)
@staticmethod
def _create_archive_event(pv, ca_event):
"""Create ArchiveEvent from the objects received over XMLRPC.
Args:
pv: PV name to add to the event
ca_event: object received over XMLRPC
Returns:
ArchiveEvent object
"""
value = ca_event["value"]
timestamp = ca_event["secs"] + 1e-9 * ca_event["nano"]
severity = ca_event["sevr"]
return data.ArchiveEvent(pv, value, timestamp, severity)
def get(self, pv, start, end, count):
"""Request events over XMLRPC.
Args:
pv: PV name to request events for
start: datetime of start of requested period
end: datetime of end of requested period
count: maximum number of events to retrieve
Returns:
List of ArchiveEvent objects
"""
start_secs = utils.datetime_to_epoch(start)
end_secs = utils.datetime_to_epoch(end)
response = self._proxy.archiver.values(
1, [pv], start_secs, 0, end_secs, 0, count, 0
)
return [
CaClient._create_archive_event(pv, val) for val in response[0]["values"]
]
class CaFetcher(Fetcher):
"""Class to retrieve data from a channel archiver."""
def __init__(self, url):
"""
Args:
url: url for the channel archiver
"""
self._client = CaClient(url)
def _get_values(self, pv, start, end=None, count=None, request_params=None):
# Make count a large number if not specified to ensure we get all
# data.
count = 2 ** 31 if count is None else count
empty_array = numpy.zeros((0,))
all_data = data.ArchiveData(pv, empty_array, empty_array, empty_array)
last_timestamp = -1
done = False
while done is not True and len(all_data) < count:
requested = min(count - len(all_data), 10000)
if all_data.timestamps.size:
last_timestamp = all_data.timestamps[-1]
start = utils.epoch_to_datetime(last_timestamp)
log.info("Request PV {} for {} samples.".format(pv, requested))
log.info("Request start {} end {}".format(start, end))
events = self._client.get(pv, start, end, requested)
done = len(events) < requested
# Drop any events that are earlier than ones already fetched.
events = [e for e in events if e.timestamp > last_timestamp]
new_data = data.data_from_events(pv, events)
all_data = all_data.concatenate(new_data, zero_pad=True)
return all_data
|
[
"numpy.zeros",
"xmlrpc.client.ServerProxy"
] |
[((468, 484), 'xmlrpc.client.ServerProxy', 'ServerProxy', (['url'], {}), '(url)\n', (479, 484), False, 'from xmlrpc.client import ServerProxy\n'), ((2181, 2198), 'numpy.zeros', 'numpy.zeros', (['(0,)'], {}), '((0,))\n', (2192, 2198), False, 'import numpy\n')]
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import os
import tensorflow as tf
import math
ROOT_PATH = os.path.abspath('../../')
print(ROOT_PATH)
SUMMARY_PATH = os.path.join(ROOT_PATH, 'output/summary')
# backbone
NET_NAME = 'resnet50_v1d'
RESTORE_FROM_RPN = False
FIXED_BLOCKS = 1 # allow 0~3
FREEZE_BLOCKS = [True, False, False, False, False] # for gluoncv backbone
# neck
FPN_MODE = 'fpn'
SHARE_NET = True
USE_P5 = True
FPN_CHANNEL = 256
# bbox head
NUM_SUBNET_CONV = 4
LEVEL = ['P3', 'P4', 'P5', 'P6', 'P7']
BASE_ANCHOR_SIZE_LIST = [32, 64, 128, 256, 512]
ANCHOR_STRIDE = [8, 16, 32, 64, 128]
ANCHOR_SCALES = [2 ** 0, 2 ** (1.0 / 3.0), 2 ** (2.0 / 3.0)]
ANCHOR_RATIOS = [1, 1 / 2, 2., 1 / 3., 3., 5., 1 / 5.]
ANCHOR_ANGLES = [-90, -75, -60, -45, -30, -15]
ANCHOR_SCALE_FACTORS = None
USE_CENTER_OFFSET = True
METHOD = 'H'
ANGLE_RANGE = 90 # 90 or 180
USE_GN = False
SUBNETS_WEIGHTS_INITIALIZER = tf.random_normal_initializer(mean=0.0, stddev=0.01, seed=None)
SUBNETS_BIAS_INITIALIZER = tf.constant_initializer(value=0.0)
PROBABILITY = 0.01
FINAL_CONV_BIAS_INITIALIZER = tf.constant_initializer(value=-math.log((1.0 - PROBABILITY) / PROBABILITY))
# loss
CLS_WEIGHT = 1.0
REG_WEIGHT = 1.0
# sample
IOU_POSITIVE_THRESHOLD = 0.5
IOU_NEGATIVE_THRESHOLD = 0.4
# post-processing
NMS = True
NMS_IOU_THRESHOLD = 0.3
MAXIMUM_DETECTIONS = 100
FILTERED_SCORE = 0.05
VIS_SCORE = 0.4
# test and eval
TEST_SAVE_PATH = os.path.join(ROOT_PATH, 'tools/test_result')
EVALUATE_R_DIR = os.path.join(ROOT_PATH, 'output/evaluate_result_pickle/')
USE_07_METRIC = True
EVAL_THRESHOLD = 0.5
|
[
"os.path.abspath",
"tensorflow.constant_initializer",
"tensorflow.random_normal_initializer",
"math.log",
"os.path.join"
] |
[((149, 174), 'os.path.abspath', 'os.path.abspath', (['"""../../"""'], {}), "('../../')\n", (164, 174), False, 'import os\n'), ((207, 248), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""output/summary"""'], {}), "(ROOT_PATH, 'output/summary')\n", (219, 248), False, 'import os\n'), ((953, 1015), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', ([], {'mean': '(0.0)', 'stddev': '(0.01)', 'seed': 'None'}), '(mean=0.0, stddev=0.01, seed=None)\n', (981, 1015), True, 'import tensorflow as tf\n'), ((1043, 1077), 'tensorflow.constant_initializer', 'tf.constant_initializer', ([], {'value': '(0.0)'}), '(value=0.0)\n', (1066, 1077), True, 'import tensorflow as tf\n'), ((1464, 1508), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""tools/test_result"""'], {}), "(ROOT_PATH, 'tools/test_result')\n", (1476, 1508), False, 'import os\n'), ((1526, 1583), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""output/evaluate_result_pickle/"""'], {}), "(ROOT_PATH, 'output/evaluate_result_pickle/')\n", (1538, 1583), False, 'import os\n'), ((1158, 1201), 'math.log', 'math.log', (['((1.0 - PROBABILITY) / PROBABILITY)'], {}), '((1.0 - PROBABILITY) / PROBABILITY)\n', (1166, 1201), False, 'import math\n')]
|
import random as rn
import numpy as np
import matplotlib.pyplot as plt
import math
from matplotlib import patches
from matplotlib.patches import Polygon
def random_population(_nv, n, _lb, _ub):
_pop = np.zeros((n, 2 * nv))
for i in range(n):
_pop[i, :] = np.random.uniform(lb, ub)
for j in range(int(_pop[i, :].size / 2)):
if _pop[i, j * 2] < 0:
_pop[i, j * 2] = int(-1)
else:
_pop[i, j * 2] = int(1)
return _pop
def crossover(_pop, crossover_rate):
next_gen = np.zeros((crossover_rate, _pop.shape[1]))
for i in range(int(crossover_rate / 2)):
r1 = np.random.randint(0, _pop.shape[0])
r2 = np.random.randint(0, _pop.shape[0])
while r1 == r2:
r1 = np.random.randint(0, _pop.shape[0])
r2 = np.random.randint(0, _pop.shape[0])
cutting_point = np.random.randint(1, _pop.shape[1])
next_gen[2 * i, 0:cutting_point] = _pop[r1, 0:cutting_point]
next_gen[2 * i, cutting_point:] = _pop[r2, cutting_point:]
next_gen[2 * i + 1, 0:cutting_point] = _pop[r2, 0:cutting_point]
next_gen[2 * i + 1, cutting_point:] = _pop[r1, cutting_point:]
return next_gen
def mutation(_pop, mutation_rate):
next_gen = np.zeros((mutation_rate, _pop.shape[1]))
for i in range(int(mutation_rate / 2)):
r1 = np.random.randint(0, _pop.shape[0])
r2 = np.random.randint(0, _pop.shape[0])
while r1 == r2:
r1 = np.random.randint(0, _pop.shape[0])
r2 = np.random.randint(0, _pop.shape[0])
cutting_point = np.random.randint(0, _pop.shape[1])
next_gen[2 * i] = _pop[r1]
next_gen[2 * i, cutting_point] = _pop[r2, cutting_point]
next_gen[2 * i + 1] = _pop[r2]
next_gen[2 * i + 1, cutting_point] = _pop[r1, cutting_point]
return next_gen
def local_search(_pop, n, _step_size):
next_gen = np.zeros((n, _pop.shape[1]))
for i in range(n):
r1 = np.random.randint(0, _pop.shape[0])
unit = _pop[r1, :]
unit[1] += np.random.uniform(-_step_size, _step_size)
if unit[1] < lb[1]:
unit[1] = lb[1]
if unit[1] > ub[1]:
unit[1] = ub[1]
next_gen[i, :] = unit
return next_gen
def evaluation(_pop, x_s, y_s, alfa_s, _done):
_fitness_values = np.zeros((_pop.shape[0], 2))
_flipped_fitness_values = np.zeros((_pop.shape[0], 2))
i = 0
_trajectory = []
V = np.zeros(nv)
angle = np.zeros(nv)
for individual in _pop:
for n in range(nv):
V[n] = individual[2 * n]
angle[n] = individual[2 * n + 1]
x = x_s - ds * math.cos(alfa_s)
y = y_s - ds * math.sin(alfa_s)
alfa_n = alfa_s
for u in range(nv):
if abs(angle[u]) < 0.0001:
x_n = x + V[u] * math.cos(alfa_n)
y_n = y + V[u] * math.sin(alfa_n)
else:
a = dist_between_axles / math.tan(angle[u])
Ro = math.sqrt(dist_between_axles ** 2 / 4 + (abs(a) + car_width / 2) ** 2)
tau = math.copysign(1, angle[u]) * alfa_n + a * math.sin(dist_between_axles / 2 * Ro)
gama = V[u] * dt / Ro
x_n = x + Ro * (math.sin(gama + tau) - math.sin(tau))
y_n = y + math.copysign(1, angle[u]) * Ro * (math.cos(tau) - math.cos(gama + tau))
alfa_n = alfa_n + math.copysign(1, angle[u]) * gama
if abs(alfa_n) > math.pi:
alfa_n = alfa_n - math.copysign(1, alfa_n) * math.pi * 2
x = x_n + ds * math.cos(alfa_n)
y = y_n + ds * math.sin(alfa_n)
for j in range(2):
if j == 0: # objective 1
if parking_length < x < -5 or parking_width < y < -5:
_fitness_values[i, j] = 1000
else:
_fitness_values[i, j] = math.sqrt(x ** 2 + y ** 2)
elif j == 1: # objective 2
_fitness_values[i, j] = beta - alfa_n
_flipped_fitness_values[i, 0] = 1 / _fitness_values[i, 0]
_flipped_fitness_values[i, 1] = 1 / _fitness_values[i, 1]
if _fitness_values[i, 0] <= 0.8 and \
(abs(_fitness_values[i, 1]) <= 0.1745 or abs(_fitness_values[i, 1]) >= 2.9671):
_done = True
if final is True:
_trajectory = np.append(_trajectory, [individual])
i = i + 1
return _fitness_values, _trajectory, _done, _flipped_fitness_values
def best_individuals_visualization(best, x_s, y_s, alfa_s):
_positions_x = []
_positions_y = []
_car_angle = []
i = 0
C = nv * 2
V = np.zeros(nv)
angle = np.zeros(nv)
best_units = np.array_split(best, len(best) / C)
for individual in best_units:
for n in range(nv):
V[n] = individual[2 * n]
angle[n] = individual[2 * n + 1]
x = x_s - ds * math.cos(alfa_s)
y = y_s - ds * math.sin(alfa_s)
alfa_n = alfa_s
for u in range(nv):
if abs(angle[u]) < 0.0001:
x_n = x + V[u] * dt * math.cos(alfa_n)
y_n = y + V[u] * dt * math.sin(alfa_n)
else:
a = dist_between_axles / math.tan(angle[u])
Ro = math.sqrt(dist_between_axles ** 2 / 4 + (abs(a) + car_width / 2) ** 2)
tau = math.copysign(1, angle[u]) * alfa_n + a * math.sin(dist_between_axles / 2 * Ro)
gama = V[u] * dt / Ro
x_n = x + Ro * (math.sin(gama + tau) - math.sin(tau))
y_n = y + math.copysign(1, angle[u]) * Ro * (math.cos(tau) - math.cos(gama + tau))
alfa_n = alfa_n + math.copysign(1, angle[u]) * gama
if abs(alfa_n) > math.pi:
alfa_n = alfa_n - math.copysign(1, alfa_n) * math.pi * 2
x = x_n + ds * math.cos(alfa_n)
y = y_n + ds * math.sin(alfa_n)
_positions_x = np.append(_positions_x, [x])
_positions_y = np.append(_positions_y, [y])
_car_angle = np.append(_car_angle, [alfa_n])
i = i + 1
position_x_arr = _positions_x
position_y_arr = _positions_y
car_angles_arr = _car_angle
return position_x_arr, position_y_arr, car_angles_arr
def crowding_calculation(_fitness_values):
_pop_size = len(_fitness_values[:, 0])
fitness_value_number = len(_fitness_values[0, :])
matrix_for_crowding = np.zeros((_pop_size, fitness_value_number))
normalize_fitness_values = (_fitness_values - _fitness_values.min(0)) / _fitness_values.ptp(0) # normalize fit val
for i in range(fitness_value_number):
crowding_results = np.zeros(_pop_size)
crowding_results[0] = 1 # extreme point has the max crowding distance
crowding_results[_pop_size - 1] = 1 # extreme point has the max crowding distance
sorting_normalize_fitness_values = np.sort(normalize_fitness_values[:, i])
sorting_normalized_values_index = np.argsort(normalize_fitness_values[:, i])
# crowding distance calculation
crowding_results[1:_pop_size - 1] = (
sorting_normalize_fitness_values[2:_pop_size] - sorting_normalize_fitness_values[0:_pop_size - 2])
re_sorting = np.argsort(sorting_normalized_values_index) # re_sorting to the original order
matrix_for_crowding[:, i] = crowding_results[re_sorting]
crowding_distance = np.sum(matrix_for_crowding, axis=1) # crowding distance of each solution
return crowding_distance
def remove_using_crowding(_fitness_values, number_solutions_needed):
pop_index = np.arange(_fitness_values.shape[0])
crowding_distance = crowding_calculation(_fitness_values)
selected_pop_index = np.zeros(number_solutions_needed)
selected_fitness_values = np.zeros((number_solutions_needed, len(_fitness_values[0, :])))
for i in range(number_solutions_needed):
_pop_size = pop_index.shape[0]
solution_1 = rn.randint(0, _pop_size - 1)
solution_2 = rn.randint(0, _pop_size - 1)
if crowding_distance[solution_1] >= crowding_distance[solution_2]:
selected_pop_index[i] = pop_index[solution_1]
selected_fitness_values[i, :] = _fitness_values[solution_1, :]
pop_index = np.delete(pop_index, solution_1, axis=0)
_fitness_values = np.delete(fitness_values, solution_1, axis=0)
crowding_distance = np.delete(crowding_distance, solution_1, axis=0)
else:
selected_pop_index[i] = pop_index[solution_2]
selected_fitness_values[i, :] = _fitness_values[solution_2, :]
pop_index = np.delete(pop_index, solution_2, axis=0)
_fitness_values = np.delete(fitness_values, solution_2, axis=0)
crowding_distance = np.delete(crowding_distance, solution_2, axis=0)
selected_pop_index = np.asarray(selected_pop_index, dtype=int)
return selected_pop_index
def pareto_front_finding(_fitness_values, pop_index):
_pop_size = _fitness_values.shape[0]
_pareto_front = np.ones(_pop_size, dtype=bool)
for i in range(_pop_size):
for j in range(_pop_size):
if all(_fitness_values[j] <= _fitness_values[i]) and any(_fitness_values[j] < _fitness_values[i]):
_pareto_front[i] = 0
break
return pop_index[_pareto_front]
def selection(_pop, _fitness_values, _pop_size):
pop_index_0 = np.arange(pop.shape[0])
pop_index = np.arange(pop.shape[0])
_pareto_front_index = []
while len(_pareto_front_index) < _pop_size:
new_pareto_front = pareto_front_finding(fitness_values[pop_index_0, :], pop_index_0)
total_pareto_size = len(_pareto_front_index) + len(new_pareto_front)
if total_pareto_size > _pop_size:
number_solutions_needed = pop_size - len(_pareto_front_index)
selected_solutions = (remove_using_crowding(_fitness_values[new_pareto_front], number_solutions_needed))
new_pareto_front = new_pareto_front[selected_solutions]
_pareto_front_index = np.hstack((_pareto_front_index, new_pareto_front)) # add to pareto
remaining_index = set(pop_index) - set(_pareto_front_index)
pop_index_0 = np.array(list(remaining_index))
selected_pop = _pop[_pareto_front_index.astype(int)]
return selected_pop
def GOL(_flipped_fitness_values, _fitness_values):
gol = []
max_fitness_val_pos = max(_fitness_values[:, 0])
max_fitness_val_ang = max(_fitness_values[:, 1])
for k in range(pop_summed):
if _flipped_fitness_values[k, 0] / max_fitness_val_pos < _flipped_fitness_values[k, 1] / max_fitness_val_ang:
gol = np.append(gol, _flipped_fitness_values[k, 0] / max_fitness_val_pos)
else:
gol = np.append(gol, _flipped_fitness_values[k, 1] / max_fitness_val_ang)
best_gol = max(gol)
return best_gol
########################
# Parameters #
########################
starting_x = 50.0 # wartości od 10.0 do 55.0
starting_y = 35.0 # wartości od 10.0 do 35.0
car_rotation = -math.pi/3 # wartości od -math.pi do math.pi
number_of_controls = 60
population_size = 160
########################
# Parameters #
########################
stan = [starting_x, starting_y, car_rotation]
nv = number_of_controls
lb = []
ub = []
for _ in range(nv):
lb = np.append(lb, [-1, -math.pi / 6])
ub = np.append(ub, [1, math.pi / 6])
pop_size = population_size
rate_crossover = 30
rate_mutation = 20
rate_local_search = 30
pop_summed = int(population_size + rate_crossover + rate_mutation + rate_local_search)
step_size = 0.1
pop = random_population(nv, pop_size, lb, ub)
best_gols = []
final = False
done = False
parking_spot_length = 6.0
parking_spot_width = 3.0
beta = 0
parking_length = 60.0
parking_width = 40.0
car_width = 1.8
car_length = 4.0
front_axle = 1.2
rear_axle = 0.34
ds = (front_axle - rear_axle / 2)
dist_between_axles = car_length - front_axle - rear_axle
dt = 1
iterations = 0
while not done:
offspring_from_crossover = crossover(pop, rate_crossover)
offspring_from_mutation = mutation(pop, rate_mutation)
offspring_from_local_search = local_search(pop, rate_local_search, step_size)
pop = np.append(pop, offspring_from_crossover, axis=0)
pop = np.append(pop, offspring_from_mutation, axis=0)
pop = np.append(pop, offspring_from_local_search, axis=0)
fitness_values, trajectory, done, flipped_fitness_values = evaluation(pop, stan[0], stan[1], stan[2], done)
best_gols = np.append(best_gols, GOL(flipped_fitness_values, fitness_values))
pop = selection(pop, fitness_values, pop_size)
print('iteration', iterations)
iterations = iterations + 1
final = True
fitness_values, final_trajectory, done, final_flipped_fitness_values = evaluation(pop, stan[0], stan[1], stan[2], done)
positions_x, positions_y, car_angles = best_individuals_visualization(final_trajectory, stan[0], stan[1], stan[2])
index = np.arange(pop.shape[0]).astype(int)
pareto_front_index = pareto_front_finding(fitness_values, index)
pop = pop[pareto_front_index, :]
pareto_front = fitness_values[pareto_front_index]
print("______________")
print("Kryteria optymalizacji:")
print("Odl. od miejsca | Różnica kąta wzgl.")
print("parkingowego | miejsca parkingowego")
print(fitness_values)
plt.scatter(fitness_values[:, 0], abs(abs(fitness_values[:, 1] * (180 / math.pi)) - 180), marker='x', c='r')
plt.scatter(pareto_front[:, 0], abs(abs(pareto_front[:, 1] * (180 / math.pi)) - 180), marker='x', c='b')
blue_patch = patches.Patch(color='blue', label='Osobniki Pareto Optymalne')
red_patch = patches.Patch(color='red', label='Reszta populacji')
plt.legend(handles=[blue_patch, red_patch])
plt.xlabel('Odległość od miejsca parkingowego w linii prostej [m]')
plt.ylabel('Różnica kąta względem miejsca parkingowego [stopnie]')
plt.show()
fig = plt.figure()
ax = fig.add_subplot()
ax.set_title('Trasa przejazdu optymalnego osobnika')
ax.set_xlabel('X [m]')
ax.set_ylabel('Y [m]')
ax.set_xlim(-10, parking_length)
ax.set_ylim(-10, parking_width)
ax.add_patch(patches.Rectangle((0 - parking_spot_length / 2, 0 - parking_spot_width / 2), parking_spot_length,
parking_spot_width, edgecolor='black', fill=False))
fig.show()
for m in range(nv):
xA = positions_x[m] - car_length / 2 * math.cos(car_angles[m]) - car_width / 2 * math.sin(car_angles[m])
yA = positions_y[m] - car_length / 2 * math.sin(car_angles[m]) + car_width / 2 * math.cos(car_angles[m])
xB = xA + car_width * math.sin(car_angles[m])
yB = yA - car_width * math.cos(car_angles[m])
xD = xA + car_length * math.cos(car_angles[m])
yD = yA + car_length * math.sin(car_angles[m])
xC = xB + car_length * math.cos(car_angles[m])
yC = yB + car_length * math.sin(car_angles[m])
points = [[xA, yA], [xB, yB], [xC, yC], [xD, yD]]
car = Polygon(points, fill=None, edgecolor='r')
ax.add_patch(car)
plt.show()
plot_iterations = np.arange(iterations)
plt.scatter(plot_iterations, best_gols, marker='o', c='g')
plt.title('Najlepszy parametr GOL dla każdej iteracji')
plt.xlabel('Numer iteracji')
plt.ylabel('Parametr GOL')
plt.show()
|
[
"matplotlib.pyplot.title",
"numpy.sum",
"numpy.ones",
"numpy.argsort",
"matplotlib.patches.Polygon",
"matplotlib.pyplot.figure",
"numpy.random.randint",
"numpy.arange",
"math.copysign",
"matplotlib.patches.Patch",
"random.randint",
"matplotlib.patches.Rectangle",
"numpy.append",
"math.cos",
"matplotlib.pyplot.show",
"math.sqrt",
"matplotlib.pyplot.legend",
"numpy.asarray",
"math.sin",
"numpy.hstack",
"numpy.sort",
"matplotlib.pyplot.ylabel",
"numpy.delete",
"numpy.random.uniform",
"matplotlib.pyplot.scatter",
"math.tan",
"numpy.zeros",
"matplotlib.pyplot.xlabel"
] |
[((14013, 14075), 'matplotlib.patches.Patch', 'patches.Patch', ([], {'color': '"""blue"""', 'label': '"""Osobniki Pareto Optymalne"""'}), "(color='blue', label='Osobniki Pareto Optymalne')\n", (14026, 14075), False, 'from matplotlib import patches\n'), ((14089, 14141), 'matplotlib.patches.Patch', 'patches.Patch', ([], {'color': '"""red"""', 'label': '"""Reszta populacji"""'}), "(color='red', label='Reszta populacji')\n", (14102, 14141), False, 'from matplotlib import patches\n'), ((14143, 14186), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'handles': '[blue_patch, red_patch]'}), '(handles=[blue_patch, red_patch])\n', (14153, 14186), True, 'import matplotlib.pyplot as plt\n'), ((14188, 14255), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Odległość od miejsca parkingowego w linii prostej [m]"""'], {}), "('Odległość od miejsca parkingowego w linii prostej [m]')\n", (14198, 14255), True, 'import matplotlib.pyplot as plt\n'), ((14257, 14323), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Różnica kąta względem miejsca parkingowego [stopnie]"""'], {}), "('Różnica kąta względem miejsca parkingowego [stopnie]')\n", (14267, 14323), True, 'import matplotlib.pyplot as plt\n'), ((14325, 14335), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (14333, 14335), True, 'import matplotlib.pyplot as plt\n'), ((14345, 14357), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (14355, 14357), True, 'import matplotlib.pyplot as plt\n'), ((15452, 15462), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (15460, 15462), True, 'import matplotlib.pyplot as plt\n'), ((15484, 15505), 'numpy.arange', 'np.arange', (['iterations'], {}), '(iterations)\n', (15493, 15505), True, 'import numpy as np\n'), ((15507, 15565), 'matplotlib.pyplot.scatter', 'plt.scatter', (['plot_iterations', 'best_gols'], {'marker': '"""o"""', 'c': '"""g"""'}), "(plot_iterations, best_gols, marker='o', c='g')\n", (15518, 15565), True, 'import matplotlib.pyplot as plt\n'), ((15567, 15622), 'matplotlib.pyplot.title', 'plt.title', (['"""Najlepszy parametr GOL dla każdej iteracji"""'], {}), "('Najlepszy parametr GOL dla każdej iteracji')\n", (15576, 15622), True, 'import matplotlib.pyplot as plt\n'), ((15624, 15652), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Numer iteracji"""'], {}), "('Numer iteracji')\n", (15634, 15652), True, 'import matplotlib.pyplot as plt\n'), ((15654, 15680), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Parametr GOL"""'], {}), "('Parametr GOL')\n", (15664, 15680), True, 'import matplotlib.pyplot as plt\n'), ((15682, 15692), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (15690, 15692), True, 'import matplotlib.pyplot as plt\n'), ((216, 237), 'numpy.zeros', 'np.zeros', (['(n, 2 * nv)'], {}), '((n, 2 * nv))\n', (224, 237), True, 'import numpy as np\n'), ((576, 617), 'numpy.zeros', 'np.zeros', (['(crossover_rate, _pop.shape[1])'], {}), '((crossover_rate, _pop.shape[1]))\n', (584, 617), True, 'import numpy as np\n'), ((1321, 1361), 'numpy.zeros', 'np.zeros', (['(mutation_rate, _pop.shape[1])'], {}), '((mutation_rate, _pop.shape[1]))\n', (1329, 1361), True, 'import numpy as np\n'), ((1996, 2024), 'numpy.zeros', 'np.zeros', (['(n, _pop.shape[1])'], {}), '((n, _pop.shape[1]))\n', (2004, 2024), True, 'import numpy as np\n'), ((2435, 2463), 'numpy.zeros', 'np.zeros', (['(_pop.shape[0], 2)'], {}), '((_pop.shape[0], 2))\n', (2443, 2463), True, 'import numpy as np\n'), ((2495, 2523), 'numpy.zeros', 'np.zeros', (['(_pop.shape[0], 2)'], {}), '((_pop.shape[0], 2))\n', (2503, 2523), True, 'import numpy as np\n'), ((2566, 2578), 'numpy.zeros', 'np.zeros', (['nv'], {}), '(nv)\n', (2574, 2578), True, 'import numpy as np\n'), ((2592, 2604), 'numpy.zeros', 'np.zeros', (['nv'], {}), '(nv)\n', (2600, 2604), True, 'import numpy as np\n'), ((4847, 4859), 'numpy.zeros', 'np.zeros', (['nv'], {}), '(nv)\n', (4855, 4859), True, 'import numpy as np\n'), ((4873, 4885), 'numpy.zeros', 'np.zeros', (['nv'], {}), '(nv)\n', (4881, 4885), True, 'import numpy as np\n'), ((6685, 6728), 'numpy.zeros', 'np.zeros', (['(_pop_size, fitness_value_number)'], {}), '((_pop_size, fitness_value_number))\n', (6693, 6728), True, 'import numpy as np\n'), ((7682, 7717), 'numpy.sum', 'np.sum', (['matrix_for_crowding'], {'axis': '(1)'}), '(matrix_for_crowding, axis=1)\n', (7688, 7717), True, 'import numpy as np\n'), ((7879, 7914), 'numpy.arange', 'np.arange', (['_fitness_values.shape[0]'], {}), '(_fitness_values.shape[0])\n', (7888, 7914), True, 'import numpy as np\n'), ((8004, 8037), 'numpy.zeros', 'np.zeros', (['number_solutions_needed'], {}), '(number_solutions_needed)\n', (8012, 8037), True, 'import numpy as np\n'), ((9162, 9203), 'numpy.asarray', 'np.asarray', (['selected_pop_index'], {'dtype': 'int'}), '(selected_pop_index, dtype=int)\n', (9172, 9203), True, 'import numpy as np\n'), ((9359, 9389), 'numpy.ones', 'np.ones', (['_pop_size'], {'dtype': 'bool'}), '(_pop_size, dtype=bool)\n', (9366, 9389), True, 'import numpy as np\n'), ((9745, 9768), 'numpy.arange', 'np.arange', (['pop.shape[0]'], {}), '(pop.shape[0])\n', (9754, 9768), True, 'import numpy as np\n'), ((9786, 9809), 'numpy.arange', 'np.arange', (['pop.shape[0]'], {}), '(pop.shape[0])\n', (9795, 9809), True, 'import numpy as np\n'), ((11762, 11795), 'numpy.append', 'np.append', (['lb', '[-1, -math.pi / 6]'], {}), '(lb, [-1, -math.pi / 6])\n', (11771, 11795), True, 'import numpy as np\n'), ((11806, 11837), 'numpy.append', 'np.append', (['ub', '[1, math.pi / 6]'], {}), '(ub, [1, math.pi / 6])\n', (11815, 11837), True, 'import numpy as np\n'), ((12666, 12714), 'numpy.append', 'np.append', (['pop', 'offspring_from_crossover'], {'axis': '(0)'}), '(pop, offspring_from_crossover, axis=0)\n', (12675, 12714), True, 'import numpy as np\n'), ((12726, 12773), 'numpy.append', 'np.append', (['pop', 'offspring_from_mutation'], {'axis': '(0)'}), '(pop, offspring_from_mutation, axis=0)\n', (12735, 12773), True, 'import numpy as np\n'), ((12785, 12836), 'numpy.append', 'np.append', (['pop', 'offspring_from_local_search'], {'axis': '(0)'}), '(pop, offspring_from_local_search, axis=0)\n', (12794, 12836), True, 'import numpy as np\n'), ((14565, 14717), 'matplotlib.patches.Rectangle', 'patches.Rectangle', (['(0 - parking_spot_length / 2, 0 - parking_spot_width / 2)', 'parking_spot_length', 'parking_spot_width'], {'edgecolor': '"""black"""', 'fill': '(False)'}), "((0 - parking_spot_length / 2, 0 - parking_spot_width / 2),\n parking_spot_length, parking_spot_width, edgecolor='black', fill=False)\n", (14582, 14717), False, 'from matplotlib import patches\n'), ((15386, 15427), 'matplotlib.patches.Polygon', 'Polygon', (['points'], {'fill': 'None', 'edgecolor': '"""r"""'}), "(points, fill=None, edgecolor='r')\n", (15393, 15427), False, 'from matplotlib.patches import Polygon\n'), ((284, 309), 'numpy.random.uniform', 'np.random.uniform', (['lb', 'ub'], {}), '(lb, ub)\n', (301, 309), True, 'import numpy as np\n'), ((678, 713), 'numpy.random.randint', 'np.random.randint', (['(0)', '_pop.shape[0]'], {}), '(0, _pop.shape[0])\n', (695, 713), True, 'import numpy as np\n'), ((728, 763), 'numpy.random.randint', 'np.random.randint', (['(0)', '_pop.shape[0]'], {}), '(0, _pop.shape[0])\n', (745, 763), True, 'import numpy as np\n'), ((922, 957), 'numpy.random.randint', 'np.random.randint', (['(1)', '_pop.shape[1]'], {}), '(1, _pop.shape[1])\n', (939, 957), True, 'import numpy as np\n'), ((1421, 1456), 'numpy.random.randint', 'np.random.randint', (['(0)', '_pop.shape[0]'], {}), '(0, _pop.shape[0])\n', (1438, 1456), True, 'import numpy as np\n'), ((1471, 1506), 'numpy.random.randint', 'np.random.randint', (['(0)', '_pop.shape[0]'], {}), '(0, _pop.shape[0])\n', (1488, 1506), True, 'import numpy as np\n'), ((1665, 1700), 'numpy.random.randint', 'np.random.randint', (['(0)', '_pop.shape[1]'], {}), '(0, _pop.shape[1])\n', (1682, 1700), True, 'import numpy as np\n'), ((2063, 2098), 'numpy.random.randint', 'np.random.randint', (['(0)', '_pop.shape[0]'], {}), '(0, _pop.shape[0])\n', (2080, 2098), True, 'import numpy as np\n'), ((2147, 2189), 'numpy.random.uniform', 'np.random.uniform', (['(-_step_size)', '_step_size'], {}), '(-_step_size, _step_size)\n', (2164, 2189), True, 'import numpy as np\n'), ((6921, 6940), 'numpy.zeros', 'np.zeros', (['_pop_size'], {}), '(_pop_size)\n', (6929, 6940), True, 'import numpy as np\n'), ((7157, 7196), 'numpy.sort', 'np.sort', (['normalize_fitness_values[:, i]'], {}), '(normalize_fitness_values[:, i])\n', (7164, 7196), True, 'import numpy as np\n'), ((7240, 7282), 'numpy.argsort', 'np.argsort', (['normalize_fitness_values[:, i]'], {}), '(normalize_fitness_values[:, i])\n', (7250, 7282), True, 'import numpy as np\n'), ((7509, 7552), 'numpy.argsort', 'np.argsort', (['sorting_normalized_values_index'], {}), '(sorting_normalized_values_index)\n', (7519, 7552), True, 'import numpy as np\n'), ((8243, 8271), 'random.randint', 'rn.randint', (['(0)', '(_pop_size - 1)'], {}), '(0, _pop_size - 1)\n', (8253, 8271), True, 'import random as rn\n'), ((8294, 8322), 'random.randint', 'rn.randint', (['(0)', '(_pop_size - 1)'], {}), '(0, _pop_size - 1)\n', (8304, 8322), True, 'import random as rn\n'), ((10399, 10449), 'numpy.hstack', 'np.hstack', (['(_pareto_front_index, new_pareto_front)'], {}), '((_pareto_front_index, new_pareto_front))\n', (10408, 10449), True, 'import numpy as np\n'), ((13418, 13441), 'numpy.arange', 'np.arange', (['pop.shape[0]'], {}), '(pop.shape[0])\n', (13427, 13441), True, 'import numpy as np\n'), ((807, 842), 'numpy.random.randint', 'np.random.randint', (['(0)', '_pop.shape[0]'], {}), '(0, _pop.shape[0])\n', (824, 842), True, 'import numpy as np\n'), ((861, 896), 'numpy.random.randint', 'np.random.randint', (['(0)', '_pop.shape[0]'], {}), '(0, _pop.shape[0])\n', (878, 896), True, 'import numpy as np\n'), ((1550, 1585), 'numpy.random.randint', 'np.random.randint', (['(0)', '_pop.shape[0]'], {}), '(0, _pop.shape[0])\n', (1567, 1585), True, 'import numpy as np\n'), ((1604, 1639), 'numpy.random.randint', 'np.random.randint', (['(0)', '_pop.shape[0]'], {}), '(0, _pop.shape[0])\n', (1621, 1639), True, 'import numpy as np\n'), ((6180, 6208), 'numpy.append', 'np.append', (['_positions_x', '[x]'], {}), '(_positions_x, [x])\n', (6189, 6208), True, 'import numpy as np\n'), ((6237, 6265), 'numpy.append', 'np.append', (['_positions_y', '[y]'], {}), '(_positions_y, [y])\n', (6246, 6265), True, 'import numpy as np\n'), ((6292, 6323), 'numpy.append', 'np.append', (['_car_angle', '[alfa_n]'], {}), '(_car_angle, [alfa_n])\n', (6301, 6323), True, 'import numpy as np\n'), ((8559, 8599), 'numpy.delete', 'np.delete', (['pop_index', 'solution_1'], {'axis': '(0)'}), '(pop_index, solution_1, axis=0)\n', (8568, 8599), True, 'import numpy as np\n'), ((8631, 8676), 'numpy.delete', 'np.delete', (['fitness_values', 'solution_1'], {'axis': '(0)'}), '(fitness_values, solution_1, axis=0)\n', (8640, 8676), True, 'import numpy as np\n'), ((8710, 8758), 'numpy.delete', 'np.delete', (['crowding_distance', 'solution_1'], {'axis': '(0)'}), '(crowding_distance, solution_1, axis=0)\n', (8719, 8758), True, 'import numpy as np\n'), ((8934, 8974), 'numpy.delete', 'np.delete', (['pop_index', 'solution_2'], {'axis': '(0)'}), '(pop_index, solution_2, axis=0)\n', (8943, 8974), True, 'import numpy as np\n'), ((9006, 9051), 'numpy.delete', 'np.delete', (['fitness_values', 'solution_2'], {'axis': '(0)'}), '(fitness_values, solution_2, axis=0)\n', (9015, 9051), True, 'import numpy as np\n'), ((9085, 9133), 'numpy.delete', 'np.delete', (['crowding_distance', 'solution_2'], {'axis': '(0)'}), '(crowding_distance, solution_2, axis=0)\n', (9094, 9133), True, 'import numpy as np\n'), ((11025, 11092), 'numpy.append', 'np.append', (['gol', '(_flipped_fitness_values[k, 0] / max_fitness_val_pos)'], {}), '(gol, _flipped_fitness_values[k, 0] / max_fitness_val_pos)\n', (11034, 11092), True, 'import numpy as np\n'), ((11127, 11194), 'numpy.append', 'np.append', (['gol', '(_flipped_fitness_values[k, 1] / max_fitness_val_ang)'], {}), '(gol, _flipped_fitness_values[k, 1] / max_fitness_val_ang)\n', (11136, 11194), True, 'import numpy as np\n'), ((14868, 14891), 'math.sin', 'math.sin', (['car_angles[m]'], {}), '(car_angles[m])\n', (14876, 14891), False, 'import math\n'), ((14978, 15001), 'math.cos', 'math.cos', (['car_angles[m]'], {}), '(car_angles[m])\n', (14986, 15001), False, 'import math\n'), ((15031, 15054), 'math.sin', 'math.sin', (['car_angles[m]'], {}), '(car_angles[m])\n', (15039, 15054), False, 'import math\n'), ((15082, 15105), 'math.cos', 'math.cos', (['car_angles[m]'], {}), '(car_angles[m])\n', (15090, 15105), False, 'import math\n'), ((15136, 15159), 'math.cos', 'math.cos', (['car_angles[m]'], {}), '(car_angles[m])\n', (15144, 15159), False, 'import math\n'), ((15188, 15211), 'math.sin', 'math.sin', (['car_angles[m]'], {}), '(car_angles[m])\n', (15196, 15211), False, 'import math\n'), ((15242, 15265), 'math.cos', 'math.cos', (['car_angles[m]'], {}), '(car_angles[m])\n', (15250, 15265), False, 'import math\n'), ((15294, 15317), 'math.sin', 'math.sin', (['car_angles[m]'], {}), '(car_angles[m])\n', (15302, 15317), False, 'import math\n'), ((2773, 2789), 'math.cos', 'math.cos', (['alfa_s'], {}), '(alfa_s)\n', (2781, 2789), False, 'import math\n'), ((2814, 2830), 'math.sin', 'math.sin', (['alfa_s'], {}), '(alfa_s)\n', (2822, 2830), False, 'import math\n'), ((4546, 4582), 'numpy.append', 'np.append', (['_trajectory', '[individual]'], {}), '(_trajectory, [individual])\n', (4555, 4582), True, 'import numpy as np\n'), ((5114, 5130), 'math.cos', 'math.cos', (['alfa_s'], {}), '(alfa_s)\n', (5122, 5130), False, 'import math\n'), ((5155, 5171), 'math.sin', 'math.sin', (['alfa_s'], {}), '(alfa_s)\n', (5163, 5171), False, 'import math\n'), ((14826, 14849), 'math.cos', 'math.cos', (['car_angles[m]'], {}), '(car_angles[m])\n', (14834, 14849), False, 'import math\n'), ((14936, 14959), 'math.sin', 'math.sin', (['car_angles[m]'], {}), '(car_angles[m])\n', (14944, 14959), False, 'import math\n'), ((3090, 3108), 'math.tan', 'math.tan', (['angle[u]'], {}), '(angle[u])\n', (3098, 3108), False, 'import math\n'), ((3733, 3749), 'math.cos', 'math.cos', (['alfa_n'], {}), '(alfa_n)\n', (3741, 3749), False, 'import math\n'), ((3778, 3794), 'math.sin', 'math.sin', (['alfa_n'], {}), '(alfa_n)\n', (3786, 3794), False, 'import math\n'), ((4053, 4079), 'math.sqrt', 'math.sqrt', (['(x ** 2 + y ** 2)'], {}), '(x ** 2 + y ** 2)\n', (4062, 4079), False, 'import math\n'), ((5445, 5463), 'math.tan', 'math.tan', (['angle[u]'], {}), '(angle[u])\n', (5453, 5463), False, 'import math\n'), ((6090, 6106), 'math.cos', 'math.cos', (['alfa_n'], {}), '(alfa_n)\n', (6098, 6106), False, 'import math\n'), ((6135, 6151), 'math.sin', 'math.sin', (['alfa_n'], {}), '(alfa_n)\n', (6143, 6151), False, 'import math\n'), ((2961, 2977), 'math.cos', 'math.cos', (['alfa_n'], {}), '(alfa_n)\n', (2969, 2977), False, 'import math\n'), ((3012, 3028), 'math.sin', 'math.sin', (['alfa_n'], {}), '(alfa_n)\n', (3020, 3028), False, 'import math\n'), ((3225, 3251), 'math.copysign', 'math.copysign', (['(1)', 'angle[u]'], {}), '(1, angle[u])\n', (3238, 3251), False, 'import math\n'), ((3267, 3304), 'math.sin', 'math.sin', (['(dist_between_axles / 2 * Ro)'], {}), '(dist_between_axles / 2 * Ro)\n', (3275, 3304), False, 'import math\n'), ((3550, 3576), 'math.copysign', 'math.copysign', (['(1)', 'angle[u]'], {}), '(1, angle[u])\n', (3563, 3576), False, 'import math\n'), ((5309, 5325), 'math.cos', 'math.cos', (['alfa_n'], {}), '(alfa_n)\n', (5317, 5325), False, 'import math\n'), ((5365, 5381), 'math.sin', 'math.sin', (['alfa_n'], {}), '(alfa_n)\n', (5373, 5381), False, 'import math\n'), ((5580, 5606), 'math.copysign', 'math.copysign', (['(1)', 'angle[u]'], {}), '(1, angle[u])\n', (5593, 5606), False, 'import math\n'), ((5622, 5659), 'math.sin', 'math.sin', (['(dist_between_axles / 2 * Ro)'], {}), '(dist_between_axles / 2 * Ro)\n', (5630, 5659), False, 'import math\n'), ((5905, 5931), 'math.copysign', 'math.copysign', (['(1)', 'angle[u]'], {}), '(1, angle[u])\n', (5918, 5931), False, 'import math\n'), ((3377, 3397), 'math.sin', 'math.sin', (['(gama + tau)'], {}), '(gama + tau)\n', (3385, 3397), False, 'import math\n'), ((3400, 3413), 'math.sin', 'math.sin', (['tau'], {}), '(tau)\n', (3408, 3413), False, 'import math\n'), ((3442, 3468), 'math.copysign', 'math.copysign', (['(1)', 'angle[u]'], {}), '(1, angle[u])\n', (3455, 3468), False, 'import math\n'), ((3477, 3490), 'math.cos', 'math.cos', (['tau'], {}), '(tau)\n', (3485, 3490), False, 'import math\n'), ((3493, 3513), 'math.cos', 'math.cos', (['(gama + tau)'], {}), '(gama + tau)\n', (3501, 3513), False, 'import math\n'), ((5732, 5752), 'math.sin', 'math.sin', (['(gama + tau)'], {}), '(gama + tau)\n', (5740, 5752), False, 'import math\n'), ((5755, 5768), 'math.sin', 'math.sin', (['tau'], {}), '(tau)\n', (5763, 5768), False, 'import math\n'), ((5797, 5823), 'math.copysign', 'math.copysign', (['(1)', 'angle[u]'], {}), '(1, angle[u])\n', (5810, 5823), False, 'import math\n'), ((5832, 5845), 'math.cos', 'math.cos', (['tau'], {}), '(tau)\n', (5840, 5845), False, 'import math\n'), ((5848, 5868), 'math.cos', 'math.cos', (['(gama + tau)'], {}), '(gama + tau)\n', (5856, 5868), False, 'import math\n'), ((3666, 3690), 'math.copysign', 'math.copysign', (['(1)', 'alfa_n'], {}), '(1, alfa_n)\n', (3679, 3690), False, 'import math\n'), ((6021, 6045), 'math.copysign', 'math.copysign', (['(1)', 'alfa_n'], {}), '(1, alfa_n)\n', (6034, 6045), False, 'import math\n')]
|
# -*- coding: utf-8 -*-
from providerModules.a4kScrapers import core
class sources(core.DefaultSources):
def __init__(self, *args, **kwargs):
super(sources, self).__init__(__name__, *args, **kwargs)
def _get_token_and_cookies(self, url):
response = self._request.get(url.base)
token_id = core.re.findall(r'token\: (.*)\n', response.text)[0]
token = ''.join(core.re.findall(token_id + r" ?\+?\= ?'(.*)'", response.text))
cookies = ''
for cookie in response.cookies:
cookies += '%s=%s;' % (cookie.name, cookie.value)
return (token, cookies)
def _search_request(self, url, query, force_token_refresh=False):
(token, cookies) = core.database.get(self._get_token_and_cookies, 0 if force_token_refresh else 1, url)
headers = {
'x-request-token': token,
'cookie': cookies
}
query = core.quote_plus(query)
data = {
'query': query,
'offset': 0,
'limit': 99,
'filters[field]': 'seeds',
'filters[sort]': 'desc',
'filters[time]': 4,
'filters[category]': 3 if self.is_movie_query() else 4,
'filters[adult]': False,
'filters[risky]': False
}
response = self._request.post(url.base + url.search, data, headers=headers)
if response.status_code != 200:
if not force_token_refresh:
return self._search_request(url, query, force_token_refresh=True)
core.tools.log('No response from %s' %url, 'error')
return []
response = core.json.loads(response.text)
if response['error']:
return []
else:
return response['content']
def _soup_filter(self, response):
return response
def _title_filter(self, el):
return el['name']
def _info(self, el, url, torrent):
torrent['magnet'] = el['magnet']
try:
size = int(el['size'])
if size == 0:
torrent['magnet'] = ''
else:
if size < 120 and el['source'] == 'thePirateBay':
size = size * 1024
elif size > 122880:
size = int(size / 1024)
elif size < 120:
torrent['magnet'] = ''
torrent['size'] = size
except: pass
torrent['seeds'] = el['seeds']
return torrent
|
[
"providerModules.a4kScrapers.core.re.findall",
"providerModules.a4kScrapers.core.tools.log",
"providerModules.a4kScrapers.core.json.loads",
"providerModules.a4kScrapers.core.quote_plus",
"providerModules.a4kScrapers.core.database.get"
] |
[((718, 806), 'providerModules.a4kScrapers.core.database.get', 'core.database.get', (['self._get_token_and_cookies', '(0 if force_token_refresh else 1)', 'url'], {}), '(self._get_token_and_cookies, 0 if force_token_refresh else\n 1, url)\n', (735, 806), False, 'from providerModules.a4kScrapers import core\n'), ((919, 941), 'providerModules.a4kScrapers.core.quote_plus', 'core.quote_plus', (['query'], {}), '(query)\n', (934, 941), False, 'from providerModules.a4kScrapers import core\n'), ((1650, 1680), 'providerModules.a4kScrapers.core.json.loads', 'core.json.loads', (['response.text'], {}), '(response.text)\n', (1665, 1680), False, 'from providerModules.a4kScrapers import core\n'), ((323, 373), 'providerModules.a4kScrapers.core.re.findall', 'core.re.findall', (['"""token\\\\: (.*)\\\\n"""', 'response.text'], {}), "('token\\\\: (.*)\\\\n', response.text)\n", (338, 373), False, 'from providerModules.a4kScrapers import core\n'), ((400, 462), 'providerModules.a4kScrapers.core.re.findall', 'core.re.findall', (['(token_id + " ?\\\\+?\\\\= ?\'(.*)\'")', 'response.text'], {}), '(token_id + " ?\\\\+?\\\\= ?\'(.*)\'", response.text)\n', (415, 462), False, 'from providerModules.a4kScrapers import core\n'), ((1556, 1608), 'providerModules.a4kScrapers.core.tools.log', 'core.tools.log', (["('No response from %s' % url)", '"""error"""'], {}), "('No response from %s' % url, 'error')\n", (1570, 1608), False, 'from providerModules.a4kScrapers import core\n')]
|
import os
import numpy as np
from typing import Dict, Generic, List, NamedTuple, Tuple, TypeVar
TTensorizedNodeData = TypeVar("TTensorizedNodeData")
def enforce_not_None(e):
"""Enforce non-nullness of input. Used for typechecking and runtime safety."""
if e is None:
raise Exception("Input is None.")
return e
class TensorizedGraphData(Generic[TTensorizedNodeData]):
__slots__ = ("num_nodes", "node_tensorized_data", "adjacency_lists", "reference_nodes")
def __init__(
self,
num_nodes: int,
node_tensorized_data: List[TTensorizedNodeData],
adjacency_lists: List[Tuple[np.ndarray, np.ndarray]],
reference_nodes: Dict[str, np.ndarray],
):
self.num_nodes = num_nodes
self.node_tensorized_data = node_tensorized_data
self.adjacency_lists = adjacency_lists
self.reference_nodes = reference_nodes
|
[
"typing.TypeVar"
] |
[((118, 148), 'typing.TypeVar', 'TypeVar', (['"""TTensorizedNodeData"""'], {}), "('TTensorizedNodeData')\n", (125, 148), False, 'from typing import Dict, Generic, List, NamedTuple, Tuple, TypeVar\n')]
|
"""NDG XACML ElementTree Policy Reader
NERC DataGrid
"""
__author__ = "<NAME>"
__date__ = "16/03/10"
__copyright__ = "(C) 2010 Science and Technology Facilities Council"
__contact__ = "<EMAIL>"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "<EMAIL>"
__revision__ = "$Id$"
from ndg.xacml.parsers import XMLParseError
from ndg.xacml.core.policy import Policy
from ndg.xacml.core.policydefaults import PolicyDefaults
from ndg.xacml.core.variabledefinition import VariableDefinition
from ndg.xacml.core.rule import Rule
from ndg.xacml.core.target import Target
from ndg.xacml.parsers.etree import QName, getElementChildren
from ndg.xacml.parsers.etree.reader import ETreeAbstractReader
from ndg.xacml.parsers.etree.factory import ReaderFactory
class PolicyReader(ETreeAbstractReader):
"""Parse a Policy Document using ElementTree
@cvar TYPE: XACML type to instantiate from parsed object
@type TYPE: type"""
TYPE = Policy
def __call__(self, obj, common):
"""Parse policy object
@param obj: input object to parse
@type obj: ElementTree Element, or stream object
@param common: parsing common data
@type common: from ndg.xacml.parsers.common.Common
@return: new XACML expression instance
@rtype: ndg.xacml.core.policy.Policy derived type
@raise XMLParseError: error reading element
@raise NotImplementedError: parsing is not implemented for rule
combiner, combiner parameters and obligations elements.
"""
elem = super(PolicyReader, self)._parse(obj)
return self.processElement(elem, common)
@classmethod
def parse(cls, obj, common=None):
"""Parse from input object and return new XACML object
As a special case, allow the common data to be None. This is because for
parsing a policy rather than a policy set, no common data is needed.
@param obj: input source - file name, stream object or other
@type obj: string, stream or other
@param common: parsing common data
@type common: from ndg.xacml.parsers.common.Common
@return: new XACML object
@rtype: XacmlCoreBase sub type
"""
return super(ETreeAbstractReader, cls).parse(obj, common)
def processElement(self, elem, common):
"""Parse policy object
@param elem: root element of policy
@type elem: ElementTree Element
@param common: parsing common data
@type common: from ndg.xacml.parsers.common.Common
@return: new XACML expression instance
@rtype: ndg.xacml.core.policy.Policy derived type
@raise XMLParseError: error reading element
@raise NotImplementedError: parsing is not implemented for rule
combiner, combiner parameters and obligations elements.
"""
# XACML type to instantiate
xacmlType = self.TYPE
policy = xacmlType()
localName = QName.getLocalPart(elem.tag)
if localName != xacmlType.ELEMENT_LOCAL_NAME:
raise XMLParseError("No \"%s\" element found" %
xacmlType.ELEMENT_LOCAL_NAME)
# Unpack *required* attributes from top-level element
attributeValues = []
for attributeName in (xacmlType.POLICY_ID_ATTRIB_NAME,
xacmlType.RULE_COMBINING_ALG_ID_ATTRIB_NAME):
attributeValue = elem.attrib.get(attributeName)
if attributeValue is None:
raise XMLParseError('No "%s" attribute found in "%s" '
'element' %
(attributeName,
xacmlType.ELEMENT_LOCAL_NAME))
attributeValues.append(attributeValue)
policy.policyId, policy.ruleCombiningAlgId = attributeValues
# Defaults to XACML version 1.0
# TODO: version check
policy.version = (elem.attrib.get(xacmlType.VERSION_ATTRIB_NAME) or
xacmlType.DEFAULT_XACML_VERSION)
# Parse sub-elements
for childElem in getElementChildren(elem):
localName = QName.getLocalPart(childElem.tag)
if localName == xacmlType.DESCRIPTION_LOCAL_NAME:
if childElem.text is not None:
policy.description = childElem.text.strip()
elif localName == xacmlType.POLICY_DEFAULTS_LOCAL_NAME:
PolicyDefaultsReader = ReaderFactory.getReader(PolicyDefaults)
policy.policyDefaults = PolicyDefaultsReader.parse(childElem,
common)
elif localName == Target.ELEMENT_LOCAL_NAME:
TargetReader = ReaderFactory.getReader(Target)
policy.target = TargetReader.parse(childElem, common)
elif localName == xacmlType.COMBINER_PARAMETERS_LOCAL_NAME:
raise NotImplementedError()
elif localName == xacmlType.RULE_COMBINER_PARAMETERS_LOCAL_NAME:
raise NotImplementedError()
elif localName == VariableDefinition.ELEMENT_LOCAL_NAME:
VariableDefinitionReader = ReaderFactory.getReader(
VariableDefinition)
variableDefinition = VariableDefinitionReader.parse(childElem,
common)
elif localName == Rule.ELEMENT_LOCAL_NAME:
RuleReader = ReaderFactory.getReader(Rule)
rule = RuleReader.parse(childElem, common)
if rule.id in [_rule.id for _rule in policy.rules]:
raise XMLParseError("Duplicate Rule ID %r found" % rule.id)
policy.rules.append(rule)
elif localName == xacmlType.OBLIGATIONS_LOCAL_NAME:
raise NotImplementedError('Parsing for Obligations element is '
'not implemented')
else:
raise XMLParseError("XACML Policy child element name %r not "
"recognised" % localName)
# Record reference in case of references to this policy.
# Allow for there not being a policy finder since this is not needed if
# if the root is a policy rather than a policy set.
if common is not None and hasattr(common, 'policyFinder'):
common.policyFinder.addPolicyReference(policy)
return policy
|
[
"ndg.xacml.parsers.etree.QName.getLocalPart",
"ndg.xacml.parsers.etree.factory.ReaderFactory.getReader",
"ndg.xacml.parsers.etree.getElementChildren",
"ndg.xacml.parsers.XMLParseError"
] |
[((3025, 3053), 'ndg.xacml.parsers.etree.QName.getLocalPart', 'QName.getLocalPart', (['elem.tag'], {}), '(elem.tag)\n', (3043, 3053), False, 'from ndg.xacml.parsers.etree import QName, getElementChildren\n'), ((4252, 4276), 'ndg.xacml.parsers.etree.getElementChildren', 'getElementChildren', (['elem'], {}), '(elem)\n', (4270, 4276), False, 'from ndg.xacml.parsers.etree import QName, getElementChildren\n'), ((3126, 3195), 'ndg.xacml.parsers.XMLParseError', 'XMLParseError', (['(\'No "%s" element found\' % xacmlType.ELEMENT_LOCAL_NAME)'], {}), '(\'No "%s" element found\' % xacmlType.ELEMENT_LOCAL_NAME)\n', (3139, 3195), False, 'from ndg.xacml.parsers import XMLParseError\n'), ((4302, 4335), 'ndg.xacml.parsers.etree.QName.getLocalPart', 'QName.getLocalPart', (['childElem.tag'], {}), '(childElem.tag)\n', (4320, 4335), False, 'from ndg.xacml.parsers.etree import QName, getElementChildren\n'), ((3583, 3691), 'ndg.xacml.parsers.XMLParseError', 'XMLParseError', (['(\'No "%s" attribute found in "%s" element\' % (attributeName, xacmlType.\n ELEMENT_LOCAL_NAME))'], {}), '(\'No "%s" attribute found in "%s" element\' % (attributeName,\n xacmlType.ELEMENT_LOCAL_NAME))\n', (3596, 3691), False, 'from ndg.xacml.parsers import XMLParseError\n'), ((4650, 4689), 'ndg.xacml.parsers.etree.factory.ReaderFactory.getReader', 'ReaderFactory.getReader', (['PolicyDefaults'], {}), '(PolicyDefaults)\n', (4673, 4689), False, 'from ndg.xacml.parsers.etree.factory import ReaderFactory\n'), ((4951, 4982), 'ndg.xacml.parsers.etree.factory.ReaderFactory.getReader', 'ReaderFactory.getReader', (['Target'], {}), '(Target)\n', (4974, 4982), False, 'from ndg.xacml.parsers.etree.factory import ReaderFactory\n'), ((5443, 5486), 'ndg.xacml.parsers.etree.factory.ReaderFactory.getReader', 'ReaderFactory.getReader', (['VariableDefinition'], {}), '(VariableDefinition)\n', (5466, 5486), False, 'from ndg.xacml.parsers.etree.factory import ReaderFactory\n'), ((5804, 5833), 'ndg.xacml.parsers.etree.factory.ReaderFactory.getReader', 'ReaderFactory.getReader', (['Rule'], {}), '(Rule)\n', (5827, 5833), False, 'from ndg.xacml.parsers.etree.factory import ReaderFactory\n'), ((5987, 6040), 'ndg.xacml.parsers.XMLParseError', 'XMLParseError', (["('Duplicate Rule ID %r found' % rule.id)"], {}), "('Duplicate Rule ID %r found' % rule.id)\n", (6000, 6040), False, 'from ndg.xacml.parsers import XMLParseError\n'), ((6382, 6460), 'ndg.xacml.parsers.XMLParseError', 'XMLParseError', (["('XACML Policy child element name %r not recognised' % localName)"], {}), "('XACML Policy child element name %r not recognised' % localName)\n", (6395, 6460), False, 'from ndg.xacml.parsers import XMLParseError\n')]
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
outbursts --- Lightcurve and outburst analysis
==============================================
"""
__all__ = [
'CometaryTrends'
]
from collections import namedtuple
import logging
import numpy as np
from scipy.cluster import hierarchy
from scipy.optimize import leastsq
import astropy.units as u
from astropy.time import Time
from astropy.stats import sigma_clip
from ..util import linefit
dmdtFit = namedtuple(
'dmdtFit', ['m0', 'dmdt', 'm0_unc', 'dmdt_unc', 'rms', 'rchisq']
)
ExpFit = namedtuple(
'ExpFit', ['dm', 'tau', 'dm_unc', 'tau_unc', 'rms', 'rchisq']
)
Color = namedtuple(
'Color', ['t', 'clusters', 'm_filter', 'm',
'm_unc', 'c', 'c_unc', 'avg', 'avg_unc']
)
Color.__doc__ = 'Color estimate.'
Color.t.__doc__ = 'Average observation date for each color estimate. [astropy Time]'
Color.clusters.__doc__ = 'Observation clusters used to define color; 0 for unused.'
Color.m_filter.__doc__ = 'Filter for m.'
Color.m.__doc__ = 'Apparent mag for each date in given filter. [mag]'
Color.m_unc.__doc__ = 'Uncertainty on m. [mag]'
Color.c.__doc__ = 'Individual colors. [mag]'
Color.c_unc.__doc__ = 'Uncertainty on c. [mag]'
Color.avg.__doc__ = 'Weighted average color. [mag]'
Color.avg_unc.__doc__ = 'Uncertainty on avg. [mag]'
class CometaryTrends:
"""Define lightcurve trends designed for identifying cometary outbursts.
Parameters
----------
eph : sbpy Ephem
Ephemeris of the target. Field requirements depend on the trend
fitting methods to be used. Generally provide date, rh, delta, phase.
m, m_unc : Quantity
Photometry and uncertainty in magnitudes.
filt : array, optional
Filters for each ``m``.
fit_mask : array, optional
``True`` for elements to ignore when fitting (e.g., outbursts).
logger : Logger, optional
Use this logger for messaging.
**kwargs
Any ``CometaryTrends`` property.
Properties
----------
m_original : Quantity
Unmodified (input) photometry.
m : Quantity
Apparent magnitude, possibly limited to one filter (see ``fit_filter``)
or filter transformed (see ``color_transform``).
colors : dict of Quantity
Use these colors when transforming between filters. Key by filter
tuple in wavelength order, e.g., to set g-r use:
`{('g', 'r'): 0.5 * u.mag}`
``colors`` is also set when ``self.color`` is used.
fit_filter : str or None
Set to a filter in ``self.filt`` to limit fitting to this filter.
color_transform : bool
Set to ``True`` to transform observations to that specified in
``fit_filter`` via ``colors``.
"""
def __init__(self, eph, m, m_unc, filt=None, fit_mask=None, logger=None,
**kwargs):
# store parameters and properties
self.eph = eph
self.m = m
self.m_unc = m_unc
self.filt = np.array(filt)
self.fit_mask = (
np.zeros(len(m), bool) if fit_mask is None
else np.array(fit_mask)
)
self.colors = kwargs.get('colors', {})
self.fit_filter = kwargs.get('fit_filter')
self.color_transform = kwargs.get('color_transform', False)
if logger is None:
self.logger = logging.getLogger('CometaryTrends')
else:
self.logger = logger
# parameter check
if not all((isinstance(m, u.Quantity), isinstance(m_unc, u.Quantity))):
raise ValueError(
'm, m_unc must be Quantity in units of magnitude.')
n = [len(x) for x in (eph, m, m_unc, self.fit_mask)]
if filt is not None:
n += [len(filt)]
if len(np.unique(n)) != 1:
raise ValueError('all arrays must have the same length')
@property
def m_original(self):
return self._m
@property
def m(self):
"""Apparent magnitude.
Possibly limited to one filter (see ``fit_filter``) or filter
transformed (see ``color_transform``).
"""
m = np.ma.MaskedArray(self._m.copy(),
mask=np.zeros(len(self._m), bool))
if (self.filt is not None) and (self.fit_filter is not None):
for i in range(len(m)):
if self.filt[i] != self.fit_filter:
if self.color_transform:
# try to color transform
color = (self.filt[i], self.fit_filter)
if color in self.colors:
m[i] -= self.colors[color]
elif color[::-1] in self.colors:
m[i] += self.colors[color[::-1]]
else:
# not possible
m.mask[i] = True
else:
# not color transforming this filter
m.mask[i] = True
return m
@m.setter
def m(self, _m):
self._m = _m
@property
def fit_m(self):
"""Magnitude array masked for fitting."""
m = self.m
m.mask += self.fit_mask
return m
@property
def fit_filter(self):
"""Filter to fit.
Set to ``None`` to fit all data(without color transformations).
"""
return self._fit_filter
@fit_filter.setter
def fit_filter(self, filt):
if not isinstance(filt, (str, type(None))):
raise ValueError('fit filter must be a string or ``None``')
self._fit_filter = filt
@property
def color_transform(self):
"""Color transformation flag.
If fitting only one filter, set to ``True`` to allow
color transformations via ``self.color``.
"""
return self._color_transform
@color_transform.setter
def color_transform(self, flag):
self._color_transform = bool(flag)
def color(self, blue, red, max_dt=16 / 24, max_unc=0.25 * u.mag,
m_filter=None):
"""Estimate the color, blue - red, using weighted averages.
``eph`` requires ``'date'``.
Masked data is excluded.
Data is not nucleus subtracted.
Parameters
----------
blue: string
The name of the bluer filter.
red: string
The name of the redder filter.
max_dt: float, optional
Maximum time difference to consider when clustering observations.
max_unc: Quantity, optional
Ignore results with uncertainty > ``max_unc``.
m_filter : string, optional
Report mean apparent magnitude in this filter. Default is the
redder filter.
Returns
-------
color: Color
The color results or ``None`` if it cannot be calculated.
"""
if len(self.filt) < 2:
self.logger.info('Not enough filters.')
return None
b = self.filt == blue
r = self.filt == red
if m_filter is None:
m_filter = red
elif m_filter not in [blue, red]:
raise ValueError("m_filter must be one of blue or red")
clusters = hierarchy.fclusterdata(
self.eph['date'].mjd[:, np.newaxis],
max_dt, criterion='distance'
)
self.logger.info(f'{clusters.max()} clusters found.')
mjd = []
m_mean = []
m_mean_unc = []
bmr = []
bmr_unc = []
for cluster in np.unique(clusters):
i = (clusters == cluster) * ~self.fit_mask
# require both filters in this cluster
if (not np.any(b[i])) or (not np.any(r[i])):
clusters[i] = 0
continue
# estimate weighted averages and compute color
wb, sw = np.average(self.m_original[b * i],
weights=self.m_unc[b * i]**-2,
returned=True)
wb_unc = sw**-0.5
wr, sw = np.average(self.m_original[r * i],
weights=self.m_unc[r * i]**-2,
returned=True)
wr_unc = sw**-0.5
if np.hypot(wb_unc, wr_unc) > max_unc:
continue
mjd.append(self.eph['date'].mjd[i].mean())
if m_filter == 'blue':
m_mean.append(wb)
m_mean_unc.append(wb_unc)
else:
m_mean.append(wr)
m_mean_unc.append(wr_unc)
bmr.append(wb - wr)
bmr_unc.append(np.hypot(wb_unc, wr_unc))
if len(bmr) == 0:
self.logger.info('No colors measured.')
return None
m_mean = u.Quantity(m_mean)
m_mean_unc = u.Quantity(m_mean_unc)
bmr = u.Quantity(bmr)
bmr_unc = u.Quantity(bmr_unc)
avg, sw = np.average(bmr, weights=bmr_unc**-2, returned=True)
avg_unc = sw**-0.5
self.colors[(blue, red)] = avg
return Color(Time(mjd, format='mjd'), clusters, m_filter,
m_mean, m_mean_unc, bmr, bmr_unc, avg, avg_unc)
@staticmethod
def linear_add(a, b):
"""The sum a+b computed in linear space."""
return -np.log(np.exp(-a.value) + np.exp(-b.to_value(a.unit))) * a.unit
@staticmethod
def linear_subtract(a, b):
"""The difference a-b computed in linear space."""
return -np.log(np.exp(-a.value) - np.exp(-b.to_value(a.unit))) * a.unit
def H(self, fixed_angular_size=False, Phi=None, nucleus=None):
"""Absolute magnitude.
Parameters
----------
fixed_angular_size: bool
``True`` if the photometric aperture is measured with a fixed
angular size. If so, the target-observer distance(Δ) correction
will be Δ**-1.
Phi: function, optional
Phase function.
nucleus : Quantity
Subtract this nucleus before scaling.
"""
m = self.m.copy()
unit = m.data.unit
if nucleus is not None:
m = np.ma.MaskedArray(self.linear_subtract(m.data, nucleus),
mask=m.mask)
d = 2.5 if fixed_angular_size else 5
H = (m - 5 * np.log10(self.eph['rh'].to_value('au')) * unit
- d * np.log10(self.eph['delta'].to_value('au')) * unit)
if Phi is not None:
H += 2.5 * np.log10(Phi(self.eph['phase'])) * unit
return H
def ostat(self, k=4, dt=14, sigma=2, **kwargs):
"""Compute the outburst statistic for each photometry point.
ostat is calculated for each masked point, but the masked points are
not included in the photometric baseline calculation.
Parameters
----------
k : float, optional
Heliocentric distance slope on apparent magnitude for the baseline
estimate.
dt : float, optional
Number of days of history to use for the baseline estimate.
sigma : float, optional
Number of sigmas to clip the data.
**kwargs
Additional keyword arguments are passed to ``H()``.
Returns
-------
o : array
The outburst statistic.
"""
Hy = (
self.H(**kwargs)
- 2.5 * (k - 2) * np.log10(self.eph['rh'].to_value('au')) * u.mag
)
o = np.ma.zeros(len(Hy))
for i in range(len(Hy)):
j = (
(self.eph['date'] < self.eph['date'][i])
* (self.eph['date'] > (self.eph['date'][i] - dt * u.day))
)
if j.sum() < 1:
o[i] = np.ma.masked
continue
# reject outliers, calculate weighted mean
good = j * ~Hy.mask * np.isfinite(Hy.data)
if np.sum(good) > 2:
m = sigma_clip(Hy[good].data, sigma=sigma)
else:
m = Hy[good]
m -= Hy[i] # normalize to data point being tested
m_unc = self.m_unc[good]
baseline, sw = np.ma.average(m, weights=m_unc**-2,
returned=True)
baseline_unc = sw**-0.5
unc = max(np.sqrt(baseline_unc**2 + self.m_unc[i]**2).value, 0.1)
o[i] = np.round(baseline.value / unc, 1)
return o
def _fit_setup(self, nucleus=None, absolute=False, **kwargs):
dt = self.eph['date'].mjd * u.day
dt -= dt.min()
if absolute:
m = self.H(nucleus=nucleus, **kwargs)
m.mask = self.fit_m.mask
else:
m = self.fit_m
if nucleus is not None:
m = np.ma.MaskedArray(
self.linear_subtract(m.data, nucleus),
mask=m.mask
)
# subtraction may introduce nans
m.mask += ~np.isfinite(m)
return dt, m
def dmdt(self, nucleus=None, guess=None, k=1, absolute=False, **kwargs):
"""Fit magnitude versus time as a function of ``t**k``.
``eph`` requires ``'date'``.
``absolute`` requires ``'rh'``, ``'delta'``, and ``'phase'`` in
``eph``.
Parameters
----------
nucleus : Quantity
Subtract this nucleus before fitting, assumed to be in the same
filter as ``self.m``.
guess : tuple of floats
Initial fit guess: (m0, slope).
k : float, optional
Scale time by ``t^k``.
absolute : boo, optional
Fix absolute magnitude via ``self.H()``.
**kwargs
Additional keyword arguments pass to ``self.H()``.
Returns
-------
dt: np.array
trend: np.array
Including the nucleus.
fit_mask: np.array
Data points used in the fit.
fit: dmdtFit
Fit results.
"""
dt, m = self._fit_setup(nucleus=nucleus, absolute=absolute, **kwargs)
unit = m.data.unit
mask = m.mask
guess = (0.05, 15) if guess is None else guess
r = linefit(dt.value[~mask]**k, m.data.value[~mask],
self.m_unc.value[~mask], guess)
trend = (r[0][1] + r[0][0] * dt.value**k) * unit
fit_unc = r[1] if r[1] is not None else (0, 0)
# restore nucleus?
if nucleus is not None:
trend = self.linear_add(trend, nucleus)
residuals = m - trend
fit = dmdtFit(r[0][1] * unit, r[0][0] * unit / u.day**k,
fit_unc[1] * unit, fit_unc[0] * unit / u.day**k,
np.std(residuals[~mask].data),
np.sum((residuals[~mask].data / self.m_unc[~mask])**2)
/ np.sum(~mask))
return dt, trend, ~mask, fit
def exp(self, baseline, absolute=False, **kwargs):
"""Fit magnitude versus time as a function of ``e**(k*t)``.
``eph`` requires ``'date'``.
``absolute`` requires ``'rh'``, ``'delta'``, and ``'phase'`` in
``eph``.
Parameters
----------
baseline : Quantity
Fit the exponential with respect to this baseline trend (may
include the nucleus). Must be absolute magnitude if ``absolute``
is true.
absolute : boo, optional
Fix absolute magnitude via ``self.H()``.
**kwargs
Additional keyword arguments pass to ``self.H()``.
Returns
-------
dt: np.array
trend: np.array
Including the nucleus.
fit_mask: np.array
Data points used in the fit.
fit: ExpFit
Fit results.
"""
dt, m = self._fit_setup(absolute=absolute, **kwargs)
dm = m - baseline
unit = m.data.unit
mask = m.mask
print(m)
def model(dt, peak, tau):
lc = peak * np.exp(-dt / tau)
lc[dt < 0] = 0
return lc
def chi(p, dt, dm, m_unc):
m = model(dt, *p)
return (dm - m) / m_unc
args = (dt.value[~mask], dm.data.value[~mask], self.m_unc.value[~mask])
guess = (dm.compressed().min().value, 10)
r = leastsq(chi, guess, args=args, full_output=True)
fit_unc = np.sqrt(np.diag(r[1]))
trend = model(dt.value, *r[0]) * unit
# restore baseline
trend = trend + baseline
residuals = m - trend
fit = ExpFit(r[0][0] * unit, r[0][1] * u.day,
fit_unc[0] * unit, fit_unc[1] * u.day,
np.std(residuals[~mask].data),
np.sum((residuals[~mask].data / self.m_unc[~mask])**2)
/ np.sum(~mask))
return dt, trend, ~mask, fit
# def mrh(self, fixed_angular_size, filt=None, color_transform=True,
# Phi=phase_HalleyMarcus):
# """Fit magnitude as a function of rh.
# ``eph`` requires rh, delta, phase.
# m = M - k log10(rh) - d log10(Delta) + 2.5 log10(Phi(phase))
# d = 2.5 for fixed_angular_size == True, 5 otherwise.
# Parameters
# ----------
# fixed_angular_size: bool
# Aperture is fixed in angular size.
# filt: str, optional
# Fit only this filter.
# color_transformation: bool, optional
# If fitting only one filter, set to ``True`` to allow
# color transformations via ``self.color``.
# Phi: function, optional
# Use this phase function.
# Returns
# -------
# trend: np.array
# fit_mask: np.array
# Data points used in the fit.
# fit: mrhFit
# """
# m = self.coma(filt)
# if filt is not None and not color_transform:
# m[self.filt != filt] = np.nan
# if fixed_angular_size:
# d = 2.5
# else:
# d = 5
# dm = (-d * np.log10(self.eph['delta'].to_value('au'))
# + 2.5 * np.log10(Phi(self.eph['phase']))) * u.mag
# i = ~self.fit_mask * np.isfinite(m)
# r = linefit(self.eph['rh'][i].value, (m - dm)[i].value,
# self.m_unc[i].value, (0.05, 15))
# trend = (r[0][1] + r[0][0] * self.eph['rh'].value) * m.unit + dm
# residuals = m - trend
# # restore nucleus?
# if self.nucleus is not None:
# trend = -np.log(np.exp(-trend.value) +
# np.exp(-self.nucleus.value)) * u.mag
# fit = mrhFit(r[0][1] * m.unit, r[0][0] * m.unit / u.day,
# r[1][1] * m.unit, r[1][0] * m.unit / u.day,
# np.std(residuals[i]),
# np.sum((residuals[i] / self.m_unc[i])**2) / np.sum(i))
# return trend, i, fit
|
[
"numpy.sum",
"scipy.optimize.leastsq",
"numpy.exp",
"numpy.diag",
"numpy.round",
"numpy.unique",
"astropy.stats.sigma_clip",
"numpy.std",
"numpy.isfinite",
"astropy.units.Quantity",
"numpy.average",
"astropy.time.Time",
"scipy.cluster.hierarchy.fclusterdata",
"numpy.hypot",
"numpy.ma.average",
"numpy.any",
"numpy.array",
"collections.namedtuple",
"logging.getLogger",
"numpy.sqrt"
] |
[((475, 551), 'collections.namedtuple', 'namedtuple', (['"""dmdtFit"""', "['m0', 'dmdt', 'm0_unc', 'dmdt_unc', 'rms', 'rchisq']"], {}), "('dmdtFit', ['m0', 'dmdt', 'm0_unc', 'dmdt_unc', 'rms', 'rchisq'])\n", (485, 551), False, 'from collections import namedtuple\n'), ((567, 640), 'collections.namedtuple', 'namedtuple', (['"""ExpFit"""', "['dm', 'tau', 'dm_unc', 'tau_unc', 'rms', 'rchisq']"], {}), "('ExpFit', ['dm', 'tau', 'dm_unc', 'tau_unc', 'rms', 'rchisq'])\n", (577, 640), False, 'from collections import namedtuple\n'), ((656, 756), 'collections.namedtuple', 'namedtuple', (['"""Color"""', "['t', 'clusters', 'm_filter', 'm', 'm_unc', 'c', 'c_unc', 'avg', 'avg_unc']"], {}), "('Color', ['t', 'clusters', 'm_filter', 'm', 'm_unc', 'c',\n 'c_unc', 'avg', 'avg_unc'])\n", (666, 756), False, 'from collections import namedtuple\n'), ((3012, 3026), 'numpy.array', 'np.array', (['filt'], {}), '(filt)\n', (3020, 3026), True, 'import numpy as np\n'), ((7309, 7402), 'scipy.cluster.hierarchy.fclusterdata', 'hierarchy.fclusterdata', (["self.eph['date'].mjd[:, np.newaxis]", 'max_dt'], {'criterion': '"""distance"""'}), "(self.eph['date'].mjd[:, np.newaxis], max_dt,\n criterion='distance')\n", (7331, 7402), False, 'from scipy.cluster import hierarchy\n'), ((7618, 7637), 'numpy.unique', 'np.unique', (['clusters'], {}), '(clusters)\n', (7627, 7637), True, 'import numpy as np\n'), ((8858, 8876), 'astropy.units.Quantity', 'u.Quantity', (['m_mean'], {}), '(m_mean)\n', (8868, 8876), True, 'import astropy.units as u\n'), ((8898, 8920), 'astropy.units.Quantity', 'u.Quantity', (['m_mean_unc'], {}), '(m_mean_unc)\n', (8908, 8920), True, 'import astropy.units as u\n'), ((8935, 8950), 'astropy.units.Quantity', 'u.Quantity', (['bmr'], {}), '(bmr)\n', (8945, 8950), True, 'import astropy.units as u\n'), ((8969, 8988), 'astropy.units.Quantity', 'u.Quantity', (['bmr_unc'], {}), '(bmr_unc)\n', (8979, 8988), True, 'import astropy.units as u\n'), ((9007, 9060), 'numpy.average', 'np.average', (['bmr'], {'weights': '(bmr_unc ** -2)', 'returned': '(True)'}), '(bmr, weights=bmr_unc ** -2, returned=True)\n', (9017, 9060), True, 'import numpy as np\n'), ((16435, 16483), 'scipy.optimize.leastsq', 'leastsq', (['chi', 'guess'], {'args': 'args', 'full_output': '(True)'}), '(chi, guess, args=args, full_output=True)\n', (16442, 16483), False, 'from scipy.optimize import leastsq\n'), ((3125, 3143), 'numpy.array', 'np.array', (['fit_mask'], {}), '(fit_mask)\n', (3133, 3143), True, 'import numpy as np\n'), ((3374, 3409), 'logging.getLogger', 'logging.getLogger', (['"""CometaryTrends"""'], {}), "('CometaryTrends')\n", (3391, 3409), False, 'import logging\n'), ((7941, 8027), 'numpy.average', 'np.average', (['self.m_original[b * i]'], {'weights': '(self.m_unc[b * i] ** -2)', 'returned': '(True)'}), '(self.m_original[b * i], weights=self.m_unc[b * i] ** -2,\n returned=True)\n', (7951, 8027), True, 'import numpy as np\n'), ((8138, 8224), 'numpy.average', 'np.average', (['self.m_original[r * i]'], {'weights': '(self.m_unc[r * i] ** -2)', 'returned': '(True)'}), '(self.m_original[r * i], weights=self.m_unc[r * i] ** -2,\n returned=True)\n', (8148, 8224), True, 'import numpy as np\n'), ((9148, 9171), 'astropy.time.Time', 'Time', (['mjd'], {'format': '"""mjd"""'}), "(mjd, format='mjd')\n", (9152, 9171), False, 'from astropy.time import Time\n'), ((12255, 12307), 'numpy.ma.average', 'np.ma.average', (['m'], {'weights': '(m_unc ** -2)', 'returned': '(True)'}), '(m, weights=m_unc ** -2, returned=True)\n', (12268, 12307), True, 'import numpy as np\n'), ((12480, 12513), 'numpy.round', 'np.round', (['(baseline.value / unc)', '(1)'], {}), '(baseline.value / unc, 1)\n', (12488, 12513), True, 'import numpy as np\n'), ((14822, 14851), 'numpy.std', 'np.std', (['residuals[~mask].data'], {}), '(residuals[~mask].data)\n', (14828, 14851), True, 'import numpy as np\n'), ((16510, 16523), 'numpy.diag', 'np.diag', (['r[1]'], {}), '(r[1])\n', (16517, 16523), True, 'import numpy as np\n'), ((16799, 16828), 'numpy.std', 'np.std', (['residuals[~mask].data'], {}), '(residuals[~mask].data)\n', (16805, 16828), True, 'import numpy as np\n'), ((3797, 3809), 'numpy.unique', 'np.unique', (['n'], {}), '(n)\n', (3806, 3809), True, 'import numpy as np\n'), ((8329, 8353), 'numpy.hypot', 'np.hypot', (['wb_unc', 'wr_unc'], {}), '(wb_unc, wr_unc)\n', (8337, 8353), True, 'import numpy as np\n'), ((8711, 8735), 'numpy.hypot', 'np.hypot', (['wb_unc', 'wr_unc'], {}), '(wb_unc, wr_unc)\n', (8719, 8735), True, 'import numpy as np\n'), ((11967, 11987), 'numpy.isfinite', 'np.isfinite', (['Hy.data'], {}), '(Hy.data)\n', (11978, 11987), True, 'import numpy as np\n'), ((12003, 12015), 'numpy.sum', 'np.sum', (['good'], {}), '(good)\n', (12009, 12015), True, 'import numpy as np\n'), ((12041, 12079), 'astropy.stats.sigma_clip', 'sigma_clip', (['Hy[good].data'], {'sigma': 'sigma'}), '(Hy[good].data, sigma=sigma)\n', (12051, 12079), False, 'from astropy.stats import sigma_clip\n'), ((14875, 14931), 'numpy.sum', 'np.sum', (['((residuals[~mask].data / self.m_unc[~mask]) ** 2)'], {}), '((residuals[~mask].data / self.m_unc[~mask]) ** 2)\n', (14881, 14931), True, 'import numpy as np\n'), ((14954, 14967), 'numpy.sum', 'np.sum', (['(~mask)'], {}), '(~mask)\n', (14960, 14967), True, 'import numpy as np\n'), ((16123, 16140), 'numpy.exp', 'np.exp', (['(-dt / tau)'], {}), '(-dt / tau)\n', (16129, 16140), True, 'import numpy as np\n'), ((16851, 16907), 'numpy.sum', 'np.sum', (['((residuals[~mask].data / self.m_unc[~mask]) ** 2)'], {}), '((residuals[~mask].data / self.m_unc[~mask]) ** 2)\n', (16857, 16907), True, 'import numpy as np\n'), ((16929, 16942), 'numpy.sum', 'np.sum', (['(~mask)'], {}), '(~mask)\n', (16935, 16942), True, 'import numpy as np\n'), ((7766, 7778), 'numpy.any', 'np.any', (['b[i]'], {}), '(b[i])\n', (7772, 7778), True, 'import numpy as np\n'), ((7788, 7800), 'numpy.any', 'np.any', (['r[i]'], {}), '(r[i])\n', (7794, 7800), True, 'import numpy as np\n'), ((12405, 12452), 'numpy.sqrt', 'np.sqrt', (['(baseline_unc ** 2 + self.m_unc[i] ** 2)'], {}), '(baseline_unc ** 2 + self.m_unc[i] ** 2)\n', (12412, 12452), True, 'import numpy as np\n'), ((13074, 13088), 'numpy.isfinite', 'np.isfinite', (['m'], {}), '(m)\n', (13085, 13088), True, 'import numpy as np\n'), ((9382, 9398), 'numpy.exp', 'np.exp', (['(-a.value)'], {}), '(-a.value)\n', (9388, 9398), True, 'import numpy as np\n'), ((9571, 9587), 'numpy.exp', 'np.exp', (['(-a.value)'], {}), '(-a.value)\n', (9577, 9587), True, 'import numpy as np\n')]
|
from insertion_sort.insertion_sort import insertion_Sort
import pytest
@pytest.mark.parametrize(
"input,expected_value",
[
([8, 4, 23, 42, 16, 15], [4, 8, 15, 16, 23, 42]),
([20, 18, 12, 8, 5, -2], [-2, 5, 8, 12, 18, 20]),
([5, 12, 7, 5, 5, 7], [5, 5, 5, 7, 7, 12]),
([2, 3, 5, 7, 13, 11], [2, 3, 5, 7, 11, 13]),
],
)
def test_validate_brackets(input, expected_value):
assert insertion_Sort(input) == expected_value
|
[
"pytest.mark.parametrize",
"insertion_sort.insertion_sort.insertion_Sort"
] |
[((74, 328), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input,expected_value"""', '[([8, 4, 23, 42, 16, 15], [4, 8, 15, 16, 23, 42]), ([20, 18, 12, 8, 5, -2],\n [-2, 5, 8, 12, 18, 20]), ([5, 12, 7, 5, 5, 7], [5, 5, 5, 7, 7, 12]), ([\n 2, 3, 5, 7, 13, 11], [2, 3, 5, 7, 11, 13])]'], {}), "('input,expected_value', [([8, 4, 23, 42, 16, 15], [\n 4, 8, 15, 16, 23, 42]), ([20, 18, 12, 8, 5, -2], [-2, 5, 8, 12, 18, 20]\n ), ([5, 12, 7, 5, 5, 7], [5, 5, 5, 7, 7, 12]), ([2, 3, 5, 7, 13, 11], [\n 2, 3, 5, 7, 11, 13])])\n", (97, 328), False, 'import pytest\n'), ((427, 448), 'insertion_sort.insertion_sort.insertion_Sort', 'insertion_Sort', (['input'], {}), '(input)\n', (441, 448), False, 'from insertion_sort.insertion_sort import insertion_Sort\n')]
|
import boto3
from botocore.exceptions import ClientError
import json
import time
dynamodb = boto3.resource('dynamodb', region_name='us-east-1')
ddb_table = dynamodb.Table('GomokuPlayerInfo')
def lambda_handler(event, context):
print(event)
# You can also use TicketId to track Matchmaking Event.
ticket_id = event['TicketId']
player_name = event['PlayerName']
response = { 'IpAddress': '', 'PlayerSessionId': '', 'Port': 0 }
try:
match_response = ddb_table.get_item(
TableName='GomokuPlayerInfo',
Key={
'PlayerName': player_name
})
if 'Item' in match_response:
print(match_response['Item'])
connection_info = json.loads(match_response['Item']['ConnectionInfo'])
if connection_info['status'] == 'matching':
response['IpAddress'] = connection_info['IpAddress']
response['Port'] = connection_info['Port']
response['PlayerSessionId'] = connection_info['PlayerSessionId']
connection_update = { 'IpAddress': connection_info['IpAddress'], 'Port': connection_info['Port'], 'PlayerSessionId': connection_info['PlayerSessionId'], 'timestamp': int(time.time()), 'status': 'complete' }
ddb_table.update_item(
TableName="GomokuPlayerInfo",
Key={ 'PlayerName' : player_name },
UpdateExpression="set ConnectionInfo = :connection_update",
ExpressionAttributeValues={
':connection_update': "" + json.dumps(connection_update),
},
ReturnValues="UPDATED_NEW"
)
except ClientError as e:
print(e.response['Error']['Message'])
print(response)
return response
|
[
"boto3.resource",
"json.loads",
"json.dumps",
"time.time"
] |
[((93, 144), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {'region_name': '"""us-east-1"""'}), "('dynamodb', region_name='us-east-1')\n", (107, 144), False, 'import boto3\n'), ((732, 784), 'json.loads', 'json.loads', (["match_response['Item']['ConnectionInfo']"], {}), "(match_response['Item']['ConnectionInfo'])\n", (742, 784), False, 'import json\n'), ((1253, 1264), 'time.time', 'time.time', ([], {}), '()\n', (1262, 1264), False, 'import time\n'), ((1615, 1644), 'json.dumps', 'json.dumps', (['connection_update'], {}), '(connection_update)\n', (1625, 1644), False, 'import json\n')]
|
#!/usr/bin/env python
import rospy
from geometry_msgs.msg import Twist
from std_msgs.msg import Int64
class Twist2int64():
def __init__(self):
self.command_left = Int64()
self.command_right = Int64()
self.received_twist = None
rospy.init_node('Twist2int64')
rospy.Subscriber('motor/twist/cmd_vel', Twist, self.callback)
self.pub_right = rospy.Publisher('right_motor/cmd_vel', Int64, queue_size=10)
self.pub_left = rospy.Publisher('left_motor/cmd_vel', Int64, queue_size=10)
def twist2int64_main(self):
rospy.spin()
def callback(self, message):
self.received_twist = message
self.command_right, self.command_left = self.twist2rpm(self.received_twist)
self.pub_right.publish(self.command_right)
self.pub_left.publish(self.command_left)
def twist2rpm(self, received_data):
wheel_size = 0.04
axle_length = 0.04
v = received_data.linear.x
omega = received_data.angular.z
v_r = (omega*axle_length + 2*v)/2
v_l = (omega*axle_length - 2*v)/(-2)
v_r = v_r/(wheel_size * 2 * 3.14)
v_l = v_l/(wheel_size * 2 * 3.14)
rpm_r = 60 * v_r
rpm_l = 60 * v_l
return rpm_r, rpm_l
if __name__ == '__main__':
twist2int64 = Twist2int64()
twist2int64.twist2int64_main()
|
[
"std_msgs.msg.Int64",
"rospy.Subscriber",
"rospy.Publisher",
"rospy.init_node",
"rospy.spin"
] |
[((170, 177), 'std_msgs.msg.Int64', 'Int64', ([], {}), '()\n', (175, 177), False, 'from std_msgs.msg import Int64\n'), ((203, 210), 'std_msgs.msg.Int64', 'Int64', ([], {}), '()\n', (208, 210), False, 'from std_msgs.msg import Int64\n'), ((246, 276), 'rospy.init_node', 'rospy.init_node', (['"""Twist2int64"""'], {}), "('Twist2int64')\n", (261, 276), False, 'import rospy\n'), ((281, 342), 'rospy.Subscriber', 'rospy.Subscriber', (['"""motor/twist/cmd_vel"""', 'Twist', 'self.callback'], {}), "('motor/twist/cmd_vel', Twist, self.callback)\n", (297, 342), False, 'import rospy\n'), ((364, 424), 'rospy.Publisher', 'rospy.Publisher', (['"""right_motor/cmd_vel"""', 'Int64'], {'queue_size': '(10)'}), "('right_motor/cmd_vel', Int64, queue_size=10)\n", (379, 424), False, 'import rospy\n'), ((445, 504), 'rospy.Publisher', 'rospy.Publisher', (['"""left_motor/cmd_vel"""', 'Int64'], {'queue_size': '(10)'}), "('left_motor/cmd_vel', Int64, queue_size=10)\n", (460, 504), False, 'import rospy\n'), ((540, 552), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (550, 552), False, 'import rospy\n')]
|
from django.core.management.base import BaseCommand, CommandError
from main.models import Project, Person
import csv
# This file is part of https://github.com/cpina/science-cruise-data-management
#
# This project was programmed in a hurry without any prior Django experience,
# while circumnavigating the Antarctic on the ACE expedition, without proper
# Internet access, with 150 scientists using the system and doing at the same
# cruise other data management and system administration tasks.
#
# Sadly there aren't unit tests and we didn't have time to refactor the code
# during the cruise, which is really needed.
#
# <NAME> (<EMAIL>) and <NAME> (<EMAIL>), 2016-2017.
class Command(BaseCommand):
help = 'Adds data to the person table'
def add_arguments(self, parser):
parser.add_argument('filename', type=str)
def handle(self, *args, **options):
print(options['filename'])
self.import_data_from_csv(options['filename'])
def import_data_from_csv(self, filename):
with open(filename) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
print(row)
project = Project()
project.number = row['project_number']
project.title= row['project_title']
project.alternative_title = row['project_alternative_title']
project.abstract = row['abstract']
if row['name_first'] != '':
print("{}-{}".format(row['name_first'],row['name_last']))
person = Person.objects.filter(name_first=row['name_first']).filter(name_last=row['name_last'])[0]
project.principal_investigator =person
project.save()
|
[
"csv.DictReader",
"main.models.Project",
"main.models.Person.objects.filter"
] |
[((1073, 1096), 'csv.DictReader', 'csv.DictReader', (['csvfile'], {}), '(csvfile)\n', (1087, 1096), False, 'import csv\n'), ((1181, 1190), 'main.models.Project', 'Project', ([], {}), '()\n', (1188, 1190), False, 'from main.models import Project, Person\n'), ((1578, 1629), 'main.models.Person.objects.filter', 'Person.objects.filter', ([], {'name_first': "row['name_first']"}), "(name_first=row['name_first'])\n", (1599, 1629), False, 'from main.models import Project, Person\n')]
|
#!/usr/bin/env python
import time
from ... import base
class FindSlotSM(base.StateMachine):
"""
verify tape untensioned
setup stats and beam
move to approximate center
find edge(s)
move to center
set center
set roi (outside?)
state = {
'center': {
'x': ..., 'y': ..., # stage position
'time': ...., # time found
},
}
config = {
'center': {
'x': ..., 'y': ..., # stage position
'time': ...., # time found
},
'start': {'x': ..., 'y': ...},
'move_size': {'x': ..., 'y': ...},
'max_moves': {'x': ..., 'y': ...},
'stats': (stats config, stream config)
'delay': # stats delay
'threshold': std threshold
}
"""
def setup(self):
#self.stds = []
self.data = []
self.last_move = None
start_time = time.time()
self._error = None
# verify tape untensioned
if self.node.tape.get_state() != 'untensioned':
raise IOError(
"Tape is not untensioned [%s] cannot find slot"
% self.node.tape.get_state())
# check motion stage unlocked
if self.node.motion.is_locked():
raise IOError("Motion stage is locked cannot find slot")
# check screen
screen_delay = None
if not self.node.scope.screen_is_open():
self.node.scope.press_button('screen')
screen_delay = self.node.scope.config().get('screen_delay', 5.0)
ncfg = self.node.config()
cfg = self.node.config()['find_slot']
# move to center
kwargs = {
'x': cfg['start']['x'],
'y': cfg['start']['y'],
'wait': True, 'relative': False, 'poll': True, 'hold': True}
self.node.motion.move(**kwargs)
# setup stats
self.old_camera_configs = []
for c in self.node.cameras:
self.old_camera_configs.append(c.config())
c.config({
'broadcast': { # disable frame/etc broadcasting
'enable': cfg['broadcast'],
},
'stream': {
'grab_type': 'grab',
'delay': cfg['delay'],
}
})
# clear stats
self.directions = [
('x', -cfg['move_size']['x']),
('y', -cfg['move_size']['y'])]
self.n_moves = 0
self.stats_futures = None
#self.stats_futures = [c.get_new_stats() for c in self.node.cameras]
#self.node.frame_stats = [None for _ in xrange(len(self.node.cameras))]
# start streaming
[c.start_streaming() for c in self.node.cameras]
# record meta
self.node.config({'find_slot': 'meta'}, prune=True)
self.node.config({'find_slot': {'meta': {
'start_time': start_time,
}}})
if screen_delay is not None:
return 'widen_beam', screen_delay
return 'widen_beam'
def error(self):
# move to 'safe' position
cfg = self.node.config()
#cfg = ncfg['find_slot']
#if cfg['find_slot']['include_stds']:
if cfg['find_slot']['include_data']:
#self.node.config({'find_slot': {'meta': {'stds': self.stds}}})
self.node.config({'find_slot': {'meta': {'data': self.data}}})
# move to center
kwargs = {
'x': cfg['origin']['x'],
'y': cfg['origin']['y'],
'wait': True, 'relative': False, 'poll': True, 'hold': True}
self.node.motion.move(**kwargs)
# reset camera configs
for (oc, c) in zip(self.old_camera_configs, self.node.cameras):
c.config(oc)
# stop streaming
[c.start_streaming() for c in self.node.cameras]
if self._error is not None:
raise self._error
raise Exception("find_slot undefined error")
def widen_beam(self):
self.node.scope.widen_beam()
cfg = self.node.config()['find_slot']
return 'setup_beam', cfg['post_widen_delay']
def setup_beam(self):
# tighten by N 16x clicks
self.tight_n_16x_clicks = self.node.config()[
'find_slot']['tighten_beam_n_16x_clicks']
self.node.scope.adjust_brightness(
self.tight_n_16x_clicks, 'l', x16=True)
if not self.node.scope.screen_is_open():
screen_delay = self.node.scope.config().get('screen_delay', 5.0)
return 'wait_for_screen', screen_delay
return 'check_stats'
def wait_for_screen(self):
if self.node.scope.screen_is_open():
return 'check_stats'
raise IOError("screen is either not working or is flaky")
def move(self):
if len(self.directions) == 0:
return 'finish'
cfg = self.node.config()['find_slot']
axis, nm = self.directions[0]
if self.n_moves > cfg['max_moves'][axis]:
self._error = IOError(
"find_slot failed to find direction %s in n_moves %s" %
(axis, self.n_moves))
return 'error'
# move
kwargs = {
axis: nm, 'wait': True, 'relative': True,
'poll': True, 'hold': True}
self.last_move = {axis: nm}
self.node.motion.move(**kwargs)
self.n_moves += 1
# clear stats
self.stats_futures = [c.get_new_stats() for c in self.node.cameras]
#self.node.frame_stats = [None for _ in xrange(len(self.node.cameras))]
return ('check_stats', self.stats_futures)
def change_direction(self):
if len(self.directions) == 0:
return 'finish'
cfg = self.node.config()['find_slot']
axis, _ = self.directions.pop(0)
# check for < minimum moves?
if self.n_moves < cfg['min_moves'][axis]:
self._error = IOError(
"find_slot found %s edge in too few moves %s < %s" %
(axis, self.n_moves, cfg['min_moves'][axis]))
return 'error'
# move to 'center' of direction
nm = cfg['offset'][axis]
kwargs = {
axis: nm, 'wait': True, 'relative': True,
'poll': True, 'hold': True}
mr = self.node.motion.move(**kwargs)
# set center
self.node.config({'find_slot': {
'center': {axis: mr[axis]},
'meta': {axis: {
'n_moves': self.n_moves,
'time': time.time(),
}}}})
# reset n_moves
self.n_moves = 0
return 'move'
def check_stats(self):
if self.stats_futures is None:
self.stats_futures = [c.get_new_stats() for c in self.node.cameras]
return ('check_stats', self.stats_futures)
# wait for new stats
#if any((s is None for s in self.node.frame_stats)):
# return (
# 'check_stats',
# self.node.config()['find_slot']['check_timeout'])
# check for edge of slot
threshold = self.node.config()['find_slot']['threshold']
stat = self.node.config()['find_slot'].get('stat', 'std')
edge = False
#stds = {}
datum = {}
#for s in self.node.frame_stats:
# TODO check futures for running or errors
for (i, sf) in enumerate(self.stats_futures):
s = sf.result()
datum[i] = s[stat]
#stds[i] = s['std']
if datum[i] < threshold:
edge = True
if self.last_move is not None:
self.data.append({
'move': self.last_move,
'datum': datum,
})
#self.stds.append({
# 'move': self.last_move,
# 'stds': stds,
#})
self.stats_futures = None
if edge:
return 'change_direction'
return 'move'
def finish(self):
if hasattr(self, 'tight_n_16x_clicks'):
self.node.scope.adjust_brightness(
self.tight_n_16x_clicks, 'r', x16=True)
del self.tight_n_16x_clicks
# reset camera configs
for (oc, c) in zip(self.old_camera_configs, self.node.cameras):
c.config(oc)
del self.old_camera_configs
# stop streaming
[c.stop_streaming() for c in self.node.cameras]
meta = {'finish_time': time.time()}
#if self.node.config()['find_slot']['include_stds']:
if self.node.config()['find_slot']['include_data']:
meta['data'] = self.data
#meta['stds'] = self.stds
self.node.config({'find_slot': {'meta': meta}})
self.node.config({'slot': {
'center': self.node.config()['find_slot']['center'],
}})
# reload rois now that there is a defined center
self.node.load_rois()
return None
def _teardown(self):
super(FindSlotSM, self)._teardown()
if hasattr(self, 'tight_n_16x_clicks'):
self.node.scope.adjust_brightness(
self.tight_n_16x_clicks, 'r', x16=True)
del self.tight_n_16x_clicks
# reset camera configs
if hasattr(self, 'old_camera_configs'):
for (oc, c) in zip(self.old_camera_configs, self.node.cameras):
c.config(oc)
# stop streaming
[c.stop_streaming() for c in self.node.cameras]
|
[
"time.time"
] |
[((914, 925), 'time.time', 'time.time', ([], {}), '()\n', (923, 925), False, 'import time\n'), ((8446, 8457), 'time.time', 'time.time', ([], {}), '()\n', (8455, 8457), False, 'import time\n'), ((6528, 6539), 'time.time', 'time.time', ([], {}), '()\n', (6537, 6539), False, 'import time\n')]
|
#!/usr/bin/env python3
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
r"""Creates Android resources directories and boilerplate files for a module.
This is a utility script for conveniently creating resources directories and
values .xml files in modules prefilled with boilerplate and example usages. It
prints out suggested changes to the BUILD.gn and will apply them if accepted.
Examples:
Touch colors.xml and styles.xml in module foo:
tools/android/modularization/convenience/touch_resources.py \
chrome/browser/foo \
-v colors styles
Touch dimens.xml in module foo's internal dir for hdpi, xhdpi and xxdpi:
tools/android/modularization/convenience/touch_resources.py \
chrome/browser/foo/internal \
-v dimens \
-q hdpi xhdpi xxhdpi
Touch drawable directories in module foo for hdpi, xhdpi and xxdpi:
tools/android/modularization/convenience/touch_resources.py \
chrome/browser/foo \
-d drawable \
-q hdpi xhdpi xxhdpi
"""
import argparse
import datetime
import pathlib
from typing import List, Optional, Tuple
import build_gn_editor
_IGNORED_FILES_IN_RES = {'DIR_METADATA', 'OWNERS'}
_VALUES_SUPPORTED = [
'arrays',
'colors',
'dimens',
'ids',
'strings',
'styles',
]
_DIRS_SUPPORTED = [
'animator',
'anim',
'color',
'drawable',
'font',
'mipmap',
'layout',
'menu',
'raw',
'values',
'xml',
]
def main():
arg_parser = argparse.ArgumentParser(
description='Creates Android resources directories and boilerplate files '
'for a module.')
arg_parser.add_argument('module',
help='Module directory to create resources for. e.g. '
'chrome/browser/foo')
arg_parser.add_argument('-v',
'--values',
nargs='+',
default=[],
choices=_VALUES_SUPPORTED,
help='Creates values .xml resources files that do '
'not exist yet.')
arg_parser.add_argument(
'-d',
'--directories',
nargs='+',
default=[],
choices=_DIRS_SUPPORTED,
help='Creates resources file directories that do not exist yet. '
'Use --values to create the values directory.')
arg_parser.add_argument(
'-q',
'--qualifiers',
nargs='+',
help='If specified, resources will be created under these Android '
'resources qualifiers. See '
'https://developer.android.com/guide/topics/resources/providing-resources#AlternativeResources'
)
arguments = arg_parser.parse_args()
# Recognize directory structure and determine the existing BUILD.gn location
# and where resources are or should be
build_gn_path, resources_path = _identify_module_structure(arguments.module)
# Create res/ directory if it does not exist
if not resources_path.is_dir():
resources_path.mkdir(parents=True)
print(f'Created resources directory: {resources_path}')
# Detect existing resources
all_resources = [
p for p in resources_path.rglob('*')
if p.is_file() and p.name not in _IGNORED_FILES_IN_RES
]
changes_requested = False
new_resources = []
# Process -q/--qualifiers
if not arguments.qualifiers:
qualifier_suffixes = ['']
else:
qualifier_suffixes = [f'-{qualifier}' for qualifier in arguments.qualifiers]
# Process -v/--values
for value_type in arguments.values:
changes_requested = True
if value_type == 'strings':
raise ValueError(
'strings.xml files are replaced by strings.grd files for '
'localization, and modules do not need to create separate '
'strings.grd files. Existing strings can be left in and new strings '
'can be added to '
'chrome/browser/ui/android/strings/android_chrome_strings.grd')
else:
created_resources = _touch_values_files(resources_path, value_type,
qualifier_suffixes)
new_resources.extend(created_resources)
all_resources.extend(created_resources)
# Process -d/--directories
for subdirectory in arguments.directories:
changes_requested = True
if subdirectory == 'values':
raise ValueError(
'Use -v/--values to create the values directory and values resources.'
)
else:
_touch_subdirectories(resources_path, subdirectory, qualifier_suffixes)
if not changes_requested:
print('No resource types specified to create, so just created the res/ '
'directory. Use -v/--values to create value resources and '
'-d/--directories to create resources subdirectories.')
# Print out build target suggestions
all_resources.sort(key=str)
if not all_resources:
return
build_file = build_gn_editor.BuildFile(build_gn_path)
build_gn_changes_ok = _update_build_file(build_file, all_resources)
if not build_gn_changes_ok:
_print_build_target_suggestions(build_gn_path, all_resources)
return
print('Final delta:')
print(build_file.get_diff())
apply_changes = _yes_or_no('Would you like to apply these changes?')
if not apply_changes:
return
build_file.write_content_to_file()
def _yes_or_no(question: str) -> bool:
val = input(question + ' [(y)es/(N)o] ')
try:
y_or_n = val.lower().strip()
return y_or_n[0] == 'y'
except Exception:
print('Invalid input. Assuming No.')
return False
def _determine_target_to_use(targets: List[str], target_type: str,
default_name: str) -> Optional[str]:
num_targets = len(targets)
if not num_targets:
print(f'Found no existing {target_type} will create ":{default_name}".')
return default_name
elif num_targets == 1:
print(f'Found existing target {target_type}("{targets[0]}"), using it.')
return targets[0]
else:
print(f'Found multiple existing {target_type} targets, pick one: ')
return _enumerate_targets_and_ask(targets)
def _enumerate_targets_and_ask(targets: List[str]) -> Optional[str]:
for i, target in enumerate(targets):
print(f'{i + 1}: {target}')
try:
val = int(
input('Enter the number corresponding the to target you want to '
'use: ')) - 1
except ValueError:
return None
if 0 <= val < len(targets):
return targets[val]
return None
def _identify_module_structure(path_argument: str
) -> Tuple[pathlib.Path, pathlib.Path]:
module_path = pathlib.Path(path_argument)
assert module_path.is_dir()
# If present, prefer module/android/BUILD.gn
possible_android_path = module_path / 'android'
if possible_android_path.is_dir():
possible_build_gn_path = possible_android_path / 'BUILD.gn'
if possible_build_gn_path.is_file():
build_gn_path = possible_build_gn_path
resources_path = possible_android_path / 'java' / 'res'
return build_gn_path, resources_path
# The recommended structure is module/BUILD.gn
possible_build_gn_path = module_path / 'BUILD.gn'
if possible_build_gn_path.is_file():
build_gn_path = possible_build_gn_path
possible_existing_java_path = module_path / 'java'
# If module/java exists, use module/java/res, but the preferred structure is
# module/android/java/res
if possible_existing_java_path.is_dir():
resources_path = possible_existing_java_path / 'res'
else:
resources_path = possible_android_path / 'java' / 'res'
return build_gn_path, resources_path
raise Exception(
f'BUILD.gn found neither in {module_path} nor in {possible_android_path}')
def _touch_values_files(resources_path: pathlib.Path, value_resource_type: str,
qualifier_suffixes: List[str]) -> List[pathlib.Path]:
created_files = []
for qualifier_suffix in qualifier_suffixes:
values_path = resources_path / f'values{qualifier_suffix}'
values_path.mkdir(parents=True, exist_ok=True)
xml_path = values_path / f'{value_resource_type}.xml'
if xml_path.is_file():
print(f'{xml_path} already exists.')
continue
with xml_path.open('a') as f:
f.write(_create_filler(value_resource_type))
print(f'Created {xml_path}')
created_files.append(xml_path)
return created_files
_RESOURCES_BOILERPLATE_TEMPLATE = """<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright {year} The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file. -->
<resources xmlns:tools="http://schemas.android.com/tools">
{contents}
</resources>
"""
_DIMENS_BOILERPLATE = """ <!-- Foo icon dimensions -->
<dimen name="foo_icon_height">24dp</dimen>
<dimen name="foo_icon_width">24dp</dimen>"""
_COLORS_BOILERPLATE = """ <!-- Foo UI colors -->
<color name="foo_background_color">@color/default_bg_color_light</color>"""
_STYLES_BOILERPLATE = """ <!-- Styling for a Foo menu button. -->
<style name="FooMenuButton">
<item name="android:layout_width">48dp</item>
<item name="android:layout_height">24dp</item>
<item name="tint">@color/default_icon_color_tint_list</item>
</style>"""
_IDS_BOILERPLATE = """ <!-- Dialog button ids -->
<item type="id" name="foo_ok_button" />
<item type="id" name="foo_cancel_button" />"""
_ARRAYS_BOILERPLATE = """ <!-- Prime numbers -->
<integer-array name="foo_primes">
<item>2</item>
<item>3</item>
<item>5</item>
<item>7</item>
</integer-array>
<!-- Geometrics shapes -->
<array name="foo_shapes">
<item>@drawable/triangle</item>
<item>@drawable/square</item>
<item>@drawable/circle</item>
</array>"""
_BOILERPLATE = {
'dimens': _DIMENS_BOILERPLATE,
'colors': _COLORS_BOILERPLATE,
'styles': _STYLES_BOILERPLATE,
'ids': _IDS_BOILERPLATE,
'arrays': _ARRAYS_BOILERPLATE
}
def _create_filler(value_resource_type: str) -> str:
boilerplate = _BOILERPLATE[value_resource_type]
return _RESOURCES_BOILERPLATE_TEMPLATE.format(year=_get_current_year(),
contents=boilerplate)
def _get_current_year() -> int:
return datetime.datetime.now().year
_COMMON_RESOURCE_DEPS = [
"//chrome/browser/ui/android/strings:ui_strings_grd",
"//components/browser_ui/strings/android:browser_ui_strings_grd",
"//components/browser_ui/styles/android:java_resources",
"//components/browser_ui/widget/android:java_resources",
"//third_party/android_deps:material_design_java",
"//ui/android:ui_java_resources",
]
def _touch_subdirectories(resources_path: pathlib.Path, subdirectory: str,
qualifier_suffixes: List[str]) -> List[pathlib.Path]:
for qualifier_suffix in qualifier_suffixes:
subdir_name = f'{subdirectory}{qualifier_suffix}'
subdir_path = resources_path / subdir_name
if not subdir_path.is_dir():
subdir_path.mkdir(parents=True)
print(f'Created {subdir_path}')
else:
print(f'{subdir_path} already exists.')
def _generate_resources_sources(build_gn_dir_path: pathlib.Path,
new_resources: List[pathlib.Path]) -> List[str]:
return [f'"{str(r.relative_to(build_gn_dir_path))}"' for r in new_resources]
def _list_to_lines(lines, indent):
spaces = ' ' * indent
return '\n'.join([f'{spaces}{line},' for line in lines])
def _generate_suggested_resources_deps() -> List[str]:
return [f'# "{dep}"' for dep in _COMMON_RESOURCE_DEPS]
def _generate_resources_content(build_gn_path: pathlib.Path,
new_resources: List[pathlib.Path], *,
include_comment: bool) -> str:
build_gn_dir_path = build_gn_path.parent
new_resources_lines = _list_to_lines(
_generate_resources_sources(build_gn_dir_path, new_resources), 4)
suggested_deps_lines = _list_to_lines(_generate_suggested_resources_deps(), 4)
comment = ''
if include_comment:
comment = ('\n # Commonly required resources deps for convenience, ' +
'add other required deps and remove unnecessary ones.')
resources_content = f"""sources = [
{new_resources_lines}
]
deps = [{comment}
{suggested_deps_lines}
]"""
return resources_content
def _generate_suggested_resources(build_gn_path: pathlib.Path,
new_resources: List[pathlib.Path]) -> str:
resources_content = _generate_resources_content(build_gn_path,
new_resources,
include_comment=True)
resources_target_suggestion = f"""
android_resources("java_resources") {{
{resources_content}
}}"""
return resources_target_suggestion
def _generate_suggested_java_package(build_gn_path: pathlib.Path) -> str:
build_gn_dir_path = build_gn_path.parent
parts_for_package = build_gn_dir_path.parts
# internal, public or android subdirectories are not part of the Java package.
while parts_for_package[-1] in ('internal', 'public', 'android'):
parts_for_package = parts_for_package[:-1]
return f'org.chromium.{".".join(parts_for_package)}'
def _generate_library_content(build_gn_path: pathlib.Path,
resources_target_name: str) -> str:
suggested_java_package = _generate_suggested_java_package(build_gn_path)
library_content = f"""deps = [
":{resources_target_name}",
]
resources_package = "{suggested_java_package}" """
return library_content
def _generate_library_target(build_gn_path: pathlib.Path,
resources_target_name: str) -> str:
library_content = _generate_library_content(build_gn_path,
resources_target_name)
android_library_target_suggestion = f"""
android_library("java") {{
{library_content}
}}"""
return android_library_target_suggestion
def _create_or_update_variable_list(target: build_gn_editor.BuildTarget,
variable_name: str,
elements: List[str]) -> None:
variable = target.get_variable(variable_name)
if variable:
variable_list = variable.get_content_as_list()
if not variable_list:
raise build_gn_editor.BuildFileUpdateError(
f'{target.get_type()}("{target.get_name()}") '
f'{variable_name} is not a list.')
variable_list.add_elements(elements)
variable.set_content_from_list(variable_list)
target.replace_variable(variable)
return
variable = build_gn_editor.TargetVariable(variable_name, '')
variable_list = build_gn_editor.VariableContentList()
variable_list.add_elements(elements)
variable.set_content_from_list(variable_list)
target.add_variable(variable)
def _update_build_file(build_file: build_gn_editor.BuildFile,
all_resources: List[pathlib.Path]) -> bool:
libraries = build_file.get_target_names_of_type('android_library')
resources = build_file.get_target_names_of_type('android_resources')
library_target = _determine_target_to_use(libraries, 'android_library',
'java')
resources_target = _determine_target_to_use(resources, 'android_resources',
'java_resources')
if not library_target or not resources_target:
print('Invalid build target selections. Aborting BUILD.gn changes.')
return False
try:
_update_build_targets(build_file, all_resources, library_target,
resources_target)
except build_gn_editor.BuildFileUpdateError as e:
print(f'Changes to build targets failed: {e}. Aborting BUILD.gn changes.')
return False
try:
build_file.format_content()
except build_gn_editor.BuildFileUpdateError as e:
print(f'Formatting BUILD gn failed: {e}\n Aborting BUILD.gn changes')
return False
return True
def _update_build_targets(build_file: build_gn_editor.BuildFile,
all_resources: List[pathlib.Path],
library_target: str, resources_target: str) -> None:
resources = build_file.get_target('android_resources', resources_target)
if not resources:
resources = build_gn_editor.BuildTarget(
'android_resources', resources_target,
_generate_resources_content(build_file.get_path(),
all_resources,
include_comment=False))
build_file.add_target(resources)
else:
_create_or_update_variable_list(
resources, 'sources',
_generate_resources_sources(build_file.get_path().parent,
all_resources))
_create_or_update_variable_list(resources, 'deps',
_generate_suggested_resources_deps())
build_file.replace_target(resources)
library = build_file.get_target('android_library', library_target)
if not library:
library = build_gn_editor.BuildTarget(
'android_library', library_target,
_generate_library_content(build_file.get_path(), resources_target))
build_file.add_target(library)
else:
_create_or_update_variable_list(library, 'deps', [f'":{resources_target}"'])
resources_package = library.get_variable('resources_package')
if not resources_package:
resources_package_str = _generate_suggested_java_package(
build_file.get_path())
library.add_variable(
build_gn_editor.TargetVariable('resources_package',
f'"{resources_package_str}"'))
build_file.replace_target(library)
def _print_build_target_suggestions(build_gn_path: pathlib.Path,
new_resources: List[pathlib.Path]) -> None:
resources_target_suggestion = _generate_suggested_resources(
build_gn_path, new_resources)
android_library_target_suggestion = _generate_library_target(
build_gn_path, 'java_resources')
print(f'Suggestion for {build_gn_path}:')
print(resources_target_suggestion)
print(android_library_target_suggestion)
print()
if __name__ == '__main__':
main()
|
[
"argparse.ArgumentParser",
"build_gn_editor.VariableContentList",
"build_gn_editor.TargetVariable",
"build_gn_editor.BuildFile",
"pathlib.Path",
"datetime.datetime.now"
] |
[((1549, 1671), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Creates Android resources directories and boilerplate files for a module."""'}), "(description=\n 'Creates Android resources directories and boilerplate files for a module.'\n )\n", (1572, 1671), False, 'import argparse\n'), ((4933, 4973), 'build_gn_editor.BuildFile', 'build_gn_editor.BuildFile', (['build_gn_path'], {}), '(build_gn_path)\n', (4958, 4973), False, 'import build_gn_editor\n'), ((6633, 6660), 'pathlib.Path', 'pathlib.Path', (['path_argument'], {}), '(path_argument)\n', (6645, 6660), False, 'import pathlib\n'), ((14748, 14797), 'build_gn_editor.TargetVariable', 'build_gn_editor.TargetVariable', (['variable_name', '""""""'], {}), "(variable_name, '')\n", (14778, 14797), False, 'import build_gn_editor\n'), ((14816, 14853), 'build_gn_editor.VariableContentList', 'build_gn_editor.VariableContentList', ([], {}), '()\n', (14851, 14853), False, 'import build_gn_editor\n'), ((10366, 10389), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (10387, 10389), False, 'import datetime\n'), ((17689, 17774), 'build_gn_editor.TargetVariable', 'build_gn_editor.TargetVariable', (['"""resources_package"""', 'f""""{resources_package_str}\\""""'], {}), '(\'resources_package\',\n f\'"{resources_package_str}"\')\n', (17719, 17774), False, 'import build_gn_editor\n')]
|
import argparse
import numpy as np
import chainer
from siam_rpn.general.eval_sot_vot import eval_sot_vot
from siam_rpn.siam_rpn import SiamRPN
from siam_rpn.siam_rpn_tracker import SiamRPNTracker
from siam_rpn.siam_mask_tracker import SiamMaskTracker
from siam_rpn.general.vot_tracking_dataset import VOTTrackingDataset
from chainercv.utils import apply_to_iterator
from chainercv.utils import ProgressHook
from chainer import iterators
from siam_rpn.general.predictor_with_gt import PredictorWithGT
def collate_images_from_same_video(data, used_ids=None):
imgs = data.slice[:, 'img']
polys = data.slice[:, 'poly']
video_ids = data.slice[:, 'video_id']
frame_ids = data.slice[:, 'frame_id']
if used_ids is None:
used_ids = np.unique(video_ids)
np.sort(used_ids)
videos = []
video_polys = []
for video_id in used_ids:
indices = np.where(video_ids == video_id)[0]
the_frame_ids = list(frame_ids.slice[indices])
assert all(list(the_frame_ids) == np.arange(len(the_frame_ids)))
videos.append(imgs.slice[indices])
video_polys.append(polys[indices])
return videos, video_polys
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--pretrained-model', type=str)
parser.add_argument('--gpu', type=int, default=-1)
parser.add_argument('--mask', action='store_true')
args = parser.parse_args()
data = VOTTrackingDataset('data')
if args.mask:
model = SiamRPN(multi_scale=False, mask=True)
chainer.serializers.load_npz(args.pretrained_model, model)
tracker = SiamMaskTracker(model)
else:
model = SiamRPN()
chainer.serializers.load_npz(args.pretrained_model, model)
tracker = SiamRPNTracker(model)
if args.gpu >= 0:
chainer.cuda.get_device_from_id(args.gpu).use()
tracker.to_gpu()
videos, video_polys = collate_images_from_same_video(
data, used_ids=None)
video_dataset = chainer.datasets.TupleDataset(videos, video_polys)
it = iterators.SerialIterator(video_dataset, 1, False, False)
in_values, out_values, rest_values = apply_to_iterator(
PredictorWithGT(tracker, mask=args.mask), it,
n_input=2, hook=ProgressHook(len(video_dataset)))
# delete unused iterators explicitly
imgs, video_polys = in_values
pred_bboxes, pred_statuses, sizes = out_values
del imgs
video_polys = list(video_polys)
pred_bboxes = list(pred_bboxes)
pred_statuses = list(pred_statuses)
sizes = list(sizes)
np.savez(
'eval_sot_out.npz',
pred_bboxes=pred_bboxes, pred_statuses=pred_statuses,
gt_polys=video_polys, sizes=sizes)
result = eval_sot_vot(pred_bboxes, pred_statuses, video_polys, sizes)
print(result['eao'], result['accuracy'], result['robustness'])
|
[
"siam_rpn.siam_rpn.SiamRPN",
"chainer.datasets.TupleDataset",
"argparse.ArgumentParser",
"chainer.serializers.load_npz",
"siam_rpn.general.predictor_with_gt.PredictorWithGT",
"siam_rpn.siam_mask_tracker.SiamMaskTracker",
"siam_rpn.general.eval_sot_vot.eval_sot_vot",
"numpy.sort",
"numpy.where",
"chainer.iterators.SerialIterator",
"numpy.savez",
"chainer.cuda.get_device_from_id",
"siam_rpn.siam_rpn_tracker.SiamRPNTracker",
"numpy.unique",
"siam_rpn.general.vot_tracking_dataset.VOTTrackingDataset"
] |
[((1213, 1238), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1236, 1238), False, 'import argparse\n'), ((1448, 1474), 'siam_rpn.general.vot_tracking_dataset.VOTTrackingDataset', 'VOTTrackingDataset', (['"""data"""'], {}), "('data')\n", (1466, 1474), False, 'from siam_rpn.general.vot_tracking_dataset import VOTTrackingDataset\n'), ((2011, 2061), 'chainer.datasets.TupleDataset', 'chainer.datasets.TupleDataset', (['videos', 'video_polys'], {}), '(videos, video_polys)\n', (2040, 2061), False, 'import chainer\n'), ((2072, 2128), 'chainer.iterators.SerialIterator', 'iterators.SerialIterator', (['video_dataset', '(1)', '(False)', '(False)'], {}), '(video_dataset, 1, False, False)\n', (2096, 2128), False, 'from chainer import iterators\n'), ((2582, 2704), 'numpy.savez', 'np.savez', (['"""eval_sot_out.npz"""'], {'pred_bboxes': 'pred_bboxes', 'pred_statuses': 'pred_statuses', 'gt_polys': 'video_polys', 'sizes': 'sizes'}), "('eval_sot_out.npz', pred_bboxes=pred_bboxes, pred_statuses=\n pred_statuses, gt_polys=video_polys, sizes=sizes)\n", (2590, 2704), True, 'import numpy as np\n'), ((2740, 2800), 'siam_rpn.general.eval_sot_vot.eval_sot_vot', 'eval_sot_vot', (['pred_bboxes', 'pred_statuses', 'video_polys', 'sizes'], {}), '(pred_bboxes, pred_statuses, video_polys, sizes)\n', (2752, 2800), False, 'from siam_rpn.general.eval_sot_vot import eval_sot_vot\n'), ((758, 778), 'numpy.unique', 'np.unique', (['video_ids'], {}), '(video_ids)\n', (767, 778), True, 'import numpy as np\n'), ((787, 804), 'numpy.sort', 'np.sort', (['used_ids'], {}), '(used_ids)\n', (794, 804), True, 'import numpy as np\n'), ((1510, 1547), 'siam_rpn.siam_rpn.SiamRPN', 'SiamRPN', ([], {'multi_scale': '(False)', 'mask': '(True)'}), '(multi_scale=False, mask=True)\n', (1517, 1547), False, 'from siam_rpn.siam_rpn import SiamRPN\n'), ((1556, 1614), 'chainer.serializers.load_npz', 'chainer.serializers.load_npz', (['args.pretrained_model', 'model'], {}), '(args.pretrained_model, model)\n', (1584, 1614), False, 'import chainer\n'), ((1633, 1655), 'siam_rpn.siam_mask_tracker.SiamMaskTracker', 'SiamMaskTracker', (['model'], {}), '(model)\n', (1648, 1655), False, 'from siam_rpn.siam_mask_tracker import SiamMaskTracker\n'), ((1682, 1691), 'siam_rpn.siam_rpn.SiamRPN', 'SiamRPN', ([], {}), '()\n', (1689, 1691), False, 'from siam_rpn.siam_rpn import SiamRPN\n'), ((1700, 1758), 'chainer.serializers.load_npz', 'chainer.serializers.load_npz', (['args.pretrained_model', 'model'], {}), '(args.pretrained_model, model)\n', (1728, 1758), False, 'import chainer\n'), ((1777, 1798), 'siam_rpn.siam_rpn_tracker.SiamRPNTracker', 'SiamRPNTracker', (['model'], {}), '(model)\n', (1791, 1798), False, 'from siam_rpn.siam_rpn_tracker import SiamRPNTracker\n'), ((2198, 2238), 'siam_rpn.general.predictor_with_gt.PredictorWithGT', 'PredictorWithGT', (['tracker'], {'mask': 'args.mask'}), '(tracker, mask=args.mask)\n', (2213, 2238), False, 'from siam_rpn.general.predictor_with_gt import PredictorWithGT\n'), ((891, 922), 'numpy.where', 'np.where', (['(video_ids == video_id)'], {}), '(video_ids == video_id)\n', (899, 922), True, 'import numpy as np\n'), ((1830, 1871), 'chainer.cuda.get_device_from_id', 'chainer.cuda.get_device_from_id', (['args.gpu'], {}), '(args.gpu)\n', (1861, 1871), False, 'import chainer\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2017 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
from sydent.db.threepid_associations import GlobalAssociationStore
import logging
import json
import signedjson.sign
from sydent.http.servlets import get_args, jsonwrap, send_cors
logger = logging.getLogger(__name__)
class BulkLookupServlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
def render_POST(self, request):
"""
Bulk-lookup for threepids.
Params: 'threepids': list of threepids, each of which is a list of medium, address
Returns: Object with key 'threepids', which is a list of results where each result
is a 3 item list of medium, address, mxid
Note that results are not streamed to the client.
Threepids for which no mapping is found are omitted.
"""
send_cors(request)
err, args = get_args(request, ('threepids',))
if err:
return err
threepids = args['threepids']
if not isinstance(threepids, list):
request.setResponseCode(400)
return {'errcode': 'M_INVALID_PARAM', 'error': 'threepids must be a list'}, None
logger.info("Bulk lookup of %d threepids: %r", len(threepids), threepids)
globalAssocStore = GlobalAssociationStore(self.sydent)
results = globalAssocStore.getMxids(threepids)
return json.dumps({ 'threepids': results })
@jsonwrap
def render_OPTIONS(self, request):
send_cors(request)
request.setResponseCode(200)
return {}
|
[
"sydent.http.servlets.get_args",
"sydent.db.threepid_associations.GlobalAssociationStore",
"json.dumps",
"sydent.http.servlets.send_cors",
"logging.getLogger"
] |
[((839, 866), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (856, 866), False, 'import logging\n'), ((1451, 1469), 'sydent.http.servlets.send_cors', 'send_cors', (['request'], {}), '(request)\n', (1460, 1469), False, 'from sydent.http.servlets import get_args, jsonwrap, send_cors\n'), ((1490, 1523), 'sydent.http.servlets.get_args', 'get_args', (['request', "('threepids',)"], {}), "(request, ('threepids',))\n", (1498, 1523), False, 'from sydent.http.servlets import get_args, jsonwrap, send_cors\n'), ((1891, 1926), 'sydent.db.threepid_associations.GlobalAssociationStore', 'GlobalAssociationStore', (['self.sydent'], {}), '(self.sydent)\n', (1913, 1926), False, 'from sydent.db.threepid_associations import GlobalAssociationStore\n'), ((1998, 2032), 'json.dumps', 'json.dumps', (["{'threepids': results}"], {}), "({'threepids': results})\n", (2008, 2032), False, 'import json\n'), ((2098, 2116), 'sydent.http.servlets.send_cors', 'send_cors', (['request'], {}), '(request)\n', (2107, 2116), False, 'from sydent.http.servlets import get_args, jsonwrap, send_cors\n')]
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from django.urls import reverse
class Hobbies(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
img_url = models.URLField(max_length=1000, default="https://www.okea.org/wp-content/uploads/2019/10/placeholder.png")
date_posted = models.DateTimeField(default=timezone.now)
author = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("hobbies-detail", kwargs={"pk": self.pk})
class UserHobbies(models.Model):
user = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
hobbies = models.ForeignKey(Hobbies, on_delete=models.SET_NULL, null=True)
|
[
"django.db.models.TextField",
"django.db.models.URLField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.urls.reverse",
"django.db.models.DateTimeField"
] |
[((181, 213), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (197, 213), False, 'from django.db import models\n'), ((228, 246), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (244, 246), False, 'from django.db import models\n'), ((261, 373), 'django.db.models.URLField', 'models.URLField', ([], {'max_length': '(1000)', 'default': '"""https://www.okea.org/wp-content/uploads/2019/10/placeholder.png"""'}), "(max_length=1000, default=\n 'https://www.okea.org/wp-content/uploads/2019/10/placeholder.png')\n", (276, 373), False, 'from django.db import models\n'), ((387, 429), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (407, 429), False, 'from django.db import models\n'), ((443, 504), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.SET_NULL', 'null': '(True)'}), '(User, on_delete=models.SET_NULL, null=True)\n', (460, 504), False, 'from django.db import models\n'), ((702, 763), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.SET_NULL', 'null': '(True)'}), '(User, on_delete=models.SET_NULL, null=True)\n', (719, 763), False, 'from django.db import models\n'), ((778, 842), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Hobbies'], {'on_delete': 'models.SET_NULL', 'null': '(True)'}), '(Hobbies, on_delete=models.SET_NULL, null=True)\n', (795, 842), False, 'from django.db import models\n'), ((607, 656), 'django.urls.reverse', 'reverse', (['"""hobbies-detail"""'], {'kwargs': "{'pk': self.pk}"}), "('hobbies-detail', kwargs={'pk': self.pk})\n", (614, 656), False, 'from django.urls import reverse\n')]
|
from django.dispatch import receiver
from django.contrib.auth import get_user_model
from messenger_channels.querysets import get_pvchat_ids_cached
from user.signals import user_online
from user.serializers import UserLastSeenSerializer
from messenger_channels.utils import send_event
User = get_user_model()
@receiver(user_online, sender=User)
def send_user_online_to_channels(sender, instance, **_):
pv_ids = get_pvchat_ids_cached(instance.pk)
for pv_id in pv_ids:
send_event(
group_name=pv_id,
event_title="user_online",
event_type='send_online',
user=UserLastSeenSerializer(instance).data,
)
|
[
"messenger_channels.querysets.get_pvchat_ids_cached",
"django.dispatch.receiver",
"django.contrib.auth.get_user_model",
"user.serializers.UserLastSeenSerializer"
] |
[((293, 309), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (307, 309), False, 'from django.contrib.auth import get_user_model\n'), ((313, 347), 'django.dispatch.receiver', 'receiver', (['user_online'], {'sender': 'User'}), '(user_online, sender=User)\n', (321, 347), False, 'from django.dispatch import receiver\n'), ((418, 452), 'messenger_channels.querysets.get_pvchat_ids_cached', 'get_pvchat_ids_cached', (['instance.pk'], {}), '(instance.pk)\n', (439, 452), False, 'from messenger_channels.querysets import get_pvchat_ids_cached\n'), ((623, 655), 'user.serializers.UserLastSeenSerializer', 'UserLastSeenSerializer', (['instance'], {}), '(instance)\n', (645, 655), False, 'from user.serializers import UserLastSeenSerializer\n')]
|
# Generated by Django 3.1.1 on 2021-03-24 16:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main_app', '0003_auto_20210324_2124'),
]
operations = [
migrations.AddField(
model_name='profile',
name='about_me',
field=models.TextField(blank=True, default='', max_length=500),
),
]
|
[
"django.db.models.TextField"
] |
[((338, 394), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(500)'}), "(blank=True, default='', max_length=500)\n", (354, 394), False, 'from django.db import migrations, models\n')]
|
#
# Copyright <NAME>, 2019-2020
#
# Ship class and supporting classes
from collections import OrderedDict
from enum import Enum
import torch
from dice import ArmadaDice
from game_constants import (
ArmadaDimensions,
ArmadaTypes
)
class UpgradeType(Enum):
commander = 1
officer = 2
weapons_team = 3
support_team = 4
offensive_retrofit = 5
defensive_retrofit = 6
turbolasers = 7
ion_cannons = 8
ordnance = 9
fleet_support = 10
experimental_retrofit = 11
boarding_team = 12
title = 13
class Armament:
def __init__(self, redCount, blueCount, blackCount):
self.red = redCount
self.blue = blueCount
self.black = blackCount
class ShipType:
def __init__(self, name, attributes):
self.name = name
self.attributes = attributes
class Ship:
def __init__(self, name, player_number, template=None, upgrades=None, encoding=None, device=None):
"""Contsruct a specific instance of a ship.
Args:
name (str) : Name for this vessel.
player_number (int) : The player who controls this ship.
template (ShipType) : Ship template to copy.
upgrades (table str->str) : Upgrades to equip.
encoding (torch.Tensor) : An existing encoding to copy (if template and upgrades
are None)
device (str) : Default Tensor type ('cuda' or 'cpu'). Automatic if None.
"""
if (template is None or upgrades is None) and encoding is None:
raise RuntimeError("Ship requires either template and updrades or encoding.")
self.name = name
if device is None:
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.encoding = torch.zeros(Ship.encodeSize()).to(device)
if encoding is not None:
self.encoding.copy_(encoding)
else:
self.encoding.fill_(0.)
# Initialize attributes of this specific ship instance
self.set('player', player_number)
self.set('hull', int(template["Hull"]))
self.set("ship", 0.)
self.set("size", ArmadaDimensions.size_names.index(template['Size'].lower()))
idx, length = Ship.get_index("defense_tokens")
self.encoding[idx:idx + length] = 0.
for ttype in ArmadaTypes.defense_tokens:
tname = "Defense Token {}".format(ttype.capitalize())
token_idx = idx + ArmadaTypes.defense_tokens.index(ttype)
if tname in template:
if 0 == len(template[tname]):
self.encoding[token_idx] = 0
else:
self.encoding[token_idx] = int(template[tname])
# Max shields (current shields will be filled in the reset function)
idx = Ship.get_index("max_shields")[0]
for zone in ['left', 'right', 'front', 'rear']:
name = "Shields {}".format(zone.capitalize())
self.encoding[idx + ArmadaTypes.hull_zones.index(zone)] = int(template[name])
if 'Huge' == template['Size']:
for zone in ['left-auxiliary', 'right-auxiliary']:
name = "Shields {} {}".format(zone.capitalize())
self.encoding[idx + ArmadaTypes.hull_zones.index(zone)] = int(template[name])
# Presence of hull zones/firing arcs
idx, length = Ship.get_index("hull_zones")
self.encoding[idx:idx + length] = 0.
# Set the hull zones to indicate which are present
idx = Ship.get_index("hull_zones")[0]
for zone in ['left', 'right', 'front', 'rear']:
self.encoding[idx + ArmadaTypes.hull_zones.index(zone)] = 1.
if 'Huge' == template['Size']:
for zone in ['left-auxiliary', 'right-auxiliary']:
self.encoding[idx + ArmadaTypes.hull_zones.index(zone)] = 1.
# Initialize the armaments
idx = Ship.get_index("dice")[0]
for i, zone in enumerate(['left', 'right', 'front', 'rear']):
for j, color in enumerate(ArmadaDice.die_colors):
name = "Armament {} {}".format(zone.capitalize(), color.capitalize())
hull_offset = ArmadaTypes.hull_zones.index(zone)
if 0 < len(template[name]):
number = int(template[name])
else:
number = 0
self.encoding[idx + hull_offset * len(ArmadaDice.die_colors) + j] = number
if 'Huge' == template['Size']:
for i, zone in enumerate(['left-auxiliary', 'right-auxiliary']):
for j, color in enumerate(ArmadaDice.die_colors):
name = "Armament {} {}".format(zone.capitalize(), color.capitalize())
hull_offset = ArmadaTypes.hull_zones.index(zone)
number = int(template[name])
self.encoding[idx + hull_offset * len(ArmadaDice.die_colors) + j] = number
self.reset()
# TODO Check for legality and actually handle
self.width, self.height = ArmadaDimensions.ship_bases_feet[
ArmadaDimensions.size_names[int(self.get('size'))]]
self.upgrades = upgrades
@staticmethod
def _initialize_encoding():
"""Initialize the _enc_index and _enc_len variables."""
Ship._enc_index = OrderedDict()
Ship._enc_len = OrderedDict()
def addEntry(name, length, cur_idx):
Ship._enc_index[name] = cur_idx
Ship._enc_len[name] = length
return Ship._enc_index[name] + Ship._enc_len[name]
cur_idx = addEntry(name='player', length=1, cur_idx=0)
cur_idx = addEntry(name='hull', length=1, cur_idx=cur_idx)
cur_idx = addEntry(name='damage', length=1, cur_idx=cur_idx)
# TODO Face up damage card effects
cur_idx = addEntry(name='speed', length=1, cur_idx=cur_idx)
cur_idx = addEntry(name='ship', length=1, cur_idx=cur_idx)
cur_idx = addEntry(name='size', length=1, cur_idx=cur_idx)
# Defense tokens and state belong here, whether the token has been spent during this
# attack step is stored in the attack state
cur_idx = addEntry(name='defense_tokens', length=len(ArmadaTypes.defense_tokens), cur_idx=cur_idx)
cur_idx = addEntry(name='green_defense_tokens', length=len(ArmadaTypes.defense_tokens), cur_idx=cur_idx)
cur_idx = addEntry(name='red_defense_tokens', length=len(ArmadaTypes.defense_tokens), cur_idx=cur_idx)
cur_idx = addEntry(name='max_shields', length=len(ArmadaTypes.hull_zones), cur_idx=cur_idx)
cur_idx = addEntry(name='shields', length=len(ArmadaTypes.hull_zones), cur_idx=cur_idx)
# Presence of particular hull zones
cur_idx = addEntry(name='hull_zones', length=len(ArmadaTypes.hull_zones), cur_idx=cur_idx)
# Armament for each zone
cur_idx = addEntry(
name='dice',
length=len(ArmadaTypes.hull_zones) * len(ArmadaDice.die_colors), cur_idx=cur_idx)
# TODO Line of sight marker locations and firing arc locations
# TODO Upgrades
# TODO Ignition arc
cur_idx = addEntry(name='commands', length=ArmadaTypes.max_command_dials, cur_idx=cur_idx)
# Location is a pair of x and y coordinates in feet (since that is the range ruler size).
cur_idx = addEntry(name='location', length=2, cur_idx=cur_idx)
# The heading is the clockwise rotation of the ship in radians
cur_idx = addEntry(name='heading', length=1, cur_idx=cur_idx)
@staticmethod
def encodeSize():
"""Get the size of the ship encoding.
Returns:
int: Size of the ship encoding (number of Tensor elements)
"""
# Programmatically initialize the index lookup if it doesn't exist
if not hasattr(Ship, '_enc_index'):
Ship._initialize_encoding()
last_key = list(Ship._enc_index.keys())[-1]
size = Ship._enc_index[last_key] + Ship._enc_len[last_key]
return size
@staticmethod
def get_index(data_name):
"""Get the index of a data element.
Arguments:
data_name(str): Name of the data element.
Returns:
(int, int): Tuple of the beginning of the data and the length.
"""
# Programmatically initialize the index lookup if it doesn't exist
if not hasattr(Ship, '_enc_index'):
Ship._initialize_encoding()
if data_name not in Ship._enc_index:
raise RuntimeError("Ship has no attribute named {}".format(data_name))
return (Ship._enc_index[data_name], Ship._enc_len[data_name])
def base_size(self):
"""Get the ship width and length.
Returns:
tuple(int, int): width and length
"""
index = self.encoding[Ship._enc_index['size']]
return ArmadaDimensions.ship_bases[ArmadaDimensions.size_names[index]]
def token_count(self, index):
"""Get the number of green and red tokens at the given index.
The index corresponds to a particular type of token as defined in
ArmadaTypes.defense_tokens.
Returns:
tuple(int, int): The number of green and red tokens.
"""
green_idx = Ship._enc_index["green_defense_tokens"]
red_idx = Ship._enc_index["red_defense_tokens"]
return self.encoding[green_idx + index], self.encoding[red_idx + index]
def ready_defense_tokens(self):
"""Replace all red tokens with green versions."""
with torch.no_grad():
# Add the red tokens to the green tokens and set red tokens to 0
green_idx = Ship._enc_index["green_defense_tokens"]
red_idx = Ship._enc_index["red_defense_tokens"]
token_len = Ship._enc_len['green_defense_tokens']
self.encoding[green_idx:green_idx + token_len] += self.encoding[red_idx:red_idx + token_len]
self.encoding[red_idx:red_idx + src_len] = 0.
def spend_token(self, token_type, color_type):
"""Spend a token of the given type and color.
Args:
token_type (str): Token type to spend.
color_type (int): 0 for green, 1 for red
"""
red_idx = Ship._enc_index["red_defense_tokens"]
type_offset = ArmadaTypes.defense_tokens.index(token_type)
if 0 == color_type:
green_idx = Ship._enc_index["green_defense_tokens"]
self.encoding[green_idx + type_offset] -= 1
self.encoding[red_idx + type_offset] += 1
else:
self.encoding[red_idx + type_offset] -= 1
def ready_upgrade_cards(self):
"""Unexhaust upgrade cards."""
# Not implemented yet
pass
def adjacent_zones(self, zone):
"""Return hull zones adjacent to the given zone."""
index = int(self.encoding[Ship._enc_index['size']].item())
size = ArmadaDimensions.size_names[index]
if size == 'huge':
if zone not in ArmadaTypes.adjacent_huge_hull_zones:
raise RuntimeError("Unrecognized hull zone {}".format(zone))
return ArmadaTypes.adjacent_huge_hull_zones[zone]
else:
if zone not in ArmadaTypes.adjacent_hull_zones:
raise RuntimeError("Unrecognized hull zone {}".format(zone))
return ArmadaTypes.adjacent_hull_zones[zone]
def get(self, name):
"""Get a value from the encoding.
Arguments:
name (str): Name of the encoding field.
Returns:
value (float): The value of the encoding with the given name.
"""
index, length = Ship.get_index(name)
if 1 == length:
return self.encoding[index].item()
else:
raise RuntimeError("Use Ship.get_range for multi-element data.")
def get_range(self, name):
"""Get a view of the encoding of a field with multiple elements.
Arguments:
name (str): Name of the encoding field.
Returns:
value (torch.Tensor): The tensor is a view of the original data, clone or convert to a
list to avoid modification.
"""
index, length = Ship.get_index(name)
if 1 == length:
raise RuntimeError("Use Ship.get for single element data.")
else:
return self.encoding[index:index + length]
def set(self, name, value):
"""Set a value in encoding.
Arguments:
name (str): Name of the encoding field.
value (numeric, List, or torch.Tensor): A value assignable to a tensor.
"""
vtype = type(value)
if vtype is not int and vtype is not float and vtype is not list and vtype is not torch.Tensor:
raise RuntimeError('Ship.set does not have data type "{}"'.format(vtype))
index, length = Ship.get_index(name)
if 1 == length:
self.encoding[index] = value
else:
if type(value) is int or type(value) is float:
raise RuntimeError("Attempt to assign a scalar value to an encoding range.")
# Convert a list to a tensor to assign a range
if type(value) is list:
self.encoding[index:index + length] = torch.tensor(value)
else:
self.encoding[index:index + length] = value
def set_range(self, name, value):
"""Set a range in the encoding to a value.
Arguments:
name (str): Name of the encoding field.
value (numeric): Value to set.
"""
vtype = type(value)
if vtype is not int and vtype is not float:
raise RuntimeError('Ship.set_range does not support data type "{}"'.format(vtype))
index, length = Ship.get_index(name)
self.encoding[index:index + length] = value
def reset(self):
"""Resets shields, hull, and defense tokens and initialize values in the encoding."""
self.set("damage", 0.)
self.set("speed", 0.)
self.set_range("commands", 0.)
# Set defense tokens, and shields
# Initialize all tokens as green
self.set('green_defense_tokens', self.get_range('defense_tokens'))
self.set_range('red_defense_tokens', 0.)
self.set('shields', self.get_range('max_shields'))
# Set a location off of the board. Lump each player's ships together.
self.set("location", [-1., self.get('player') * -1.])
self.set("heading", 0.)
def roll(self, zone, distance):
"""
return an attack roll for the given arc at the given range.
Args:
zone (str) : One of front, left, right, and rear
distance (str) : short, medium, or long
Returns an array of colors and faces
"""
colors = []
faces = []
# TODO Extreme range
# Roll red dice at all valid ranges
die_offset = Ship._enc_index['dice']
hull_offset = die_offset + ArmadaTypes.hull_zones.index(zone) * len(ArmadaDice.die_colors)
if distance in ["short", "medium", "long"]:
red_offset = ArmadaDice.die_colors.index("red")
num_dice = int(self.encoding[hull_offset + red_offset].item())
colors = colors + ["red"] * num_dice
# Roll blue dice at all short to medium
if distance in ["short", "medium"]:
blue_offset = ArmadaDice.die_colors.index("blue")
num_dice = int(self.encoding[hull_offset + blue_offset].item())
colors = colors + ["blue"] * num_dice
# Roll black dice at short range
if distance in ["short"]:
black_offset = ArmadaDice.die_colors.index("black")
num_dice = int(self.encoding[hull_offset + black_offset].item())
colors = colors + ["black"] * num_dice
# TODO FIXME Only gathering should happen in the ship, rolling should follow in a different
# area of code
for color in colors:
faces.append(ArmadaDice.random_roll(color))
return colors, faces
def shield_damage(self, zone, amount):
"""
Deal damage to a hull zone but only deplete the shields, don't assign hull damage. Return
the amount of damage that is in excess of the shields.
Args:
zone (str): One of ArmadaTypes.hull_zones
amount (int): Amount of damage
Returns:
(int): Amount of damage that will be assigned to the hull.
"""
damage = amount
if "hull" != zone:
shield_offset = Ship._enc_index['shields'] + ArmadaTypes.hull_zones.index(zone)
shields = int(self.encoding[shield_offset].item())
if shields >= damage:
shields -= damage
damage = 0
else:
damage -= shields
shields = 0
self.encoding[shield_offset] = shields
return damage
def damage(self, zone, amount):
"""
Deal damage to a hull zone.
Args:
zone (str): One of ArmadaTypes.hull_zones or "hull"
amount (int): Amount of damage
"""
damage = amount
if "hull" != zone:
shield_offset = Ship._enc_index['shields'] + ArmadaTypes.hull_zones.index(zone)
shields = int(self.encoding[shield_offset].item())
if shields >= damage:
shields -= damage
damage = 0
else:
damage -= shields
shields = 0
self.encoding[shield_offset] = shields
# TODO FIXME This would be the correct time to handle the standard critical (or XX-9)
self.set('damage', self.get('damage') + damage)
def hull(self):
hull_offset = Ship._enc_index['hull']
hull = int(self.encoding[hull_offset].item())
return hull
def damage_cards(self):
return int(self.get('damage'))
def stringify(self):
"""Return a string version of the ship."""
shield_offset = Ship._enc_index['shields']
shield_length = Ship._enc_len['shields']
shields = self.encoding[shield_offset:shield_offset + shield_length]
green_def_idx = Ship._enc_index['green_defense_tokens']
green_def_len = Ship._enc_len['green_defense_tokens']
green_tokens = self.encoding[green_def_idx:green_def_idx + green_def_len]
red_def_idx = Ship._enc_index['red_defense_tokens']
red_def_len = Ship._enc_len['red_defense_tokens']
red_tokens = self.encoding[red_def_idx:red_def_idx + red_def_len]
return str(
"{}: hull ({}/{}), shields {}, green defense tokens {}, red defense tokens {}".format(
self.name, self.hull()-self.damage_cards(), self.hull(), shields, green_tokens, red_tokens))
def __str__(self):
return self.stringify()
def __repr__(self):
return self.stringify()
def parseShips(filename):
""" Returns a list of ships."""
keys = {}
ship_templates = {}
with open(filename, newline='') as ships:
shipreader = csv.reader(ships, delimiter=',', quotechar='|')
rowcount = 0
for row in shipreader:
# parse the header first to find the column keys
if ( 0 == rowcount ):
count = 0
for key in row:
count = count + 1
keys[count] = key
else:
newship = {}
count = 0
# Fill in all of the information on this vessel
for key in row:
count = count + 1
newship[keys[count]] = key
# Create a new ship template
ship_templates[newship['Ship Name']] = newship
rowcount = rowcount + 1
ship_types = {}
for name, attributes in ship_templates.items():
ship_types[name] = ShipType(name, attributes)
#print("{}:".format(name))
#for a_name, a_value in attributes.items():
# print(" {} : {}".format(a_name, a_value))
return ship_types
|
[
"dice.ArmadaDice.random_roll",
"game_constants.ArmadaTypes.hull_zones.index",
"dice.ArmadaDice.die_colors.index",
"torch.cuda.is_available",
"game_constants.ArmadaTypes.defense_tokens.index",
"collections.OrderedDict",
"torch.no_grad",
"torch.tensor"
] |
[((5769, 5782), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (5780, 5782), False, 'from collections import OrderedDict\n'), ((5807, 5820), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (5818, 5820), False, 'from collections import OrderedDict\n'), ((10757, 10801), 'game_constants.ArmadaTypes.defense_tokens.index', 'ArmadaTypes.defense_tokens.index', (['token_type'], {}), '(token_type)\n', (10789, 10801), False, 'from game_constants import ArmadaDimensions, ArmadaTypes\n'), ((9999, 10014), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (10012, 10014), False, 'import torch\n'), ((15667, 15701), 'dice.ArmadaDice.die_colors.index', 'ArmadaDice.die_colors.index', (['"""red"""'], {}), "('red')\n", (15694, 15701), False, 'from dice import ArmadaDice\n'), ((15944, 15979), 'dice.ArmadaDice.die_colors.index', 'ArmadaDice.die_colors.index', (['"""blue"""'], {}), "('blue')\n", (15971, 15979), False, 'from dice import ArmadaDice\n'), ((16208, 16244), 'dice.ArmadaDice.die_colors.index', 'ArmadaDice.die_colors.index', (['"""black"""'], {}), "('black')\n", (16235, 16244), False, 'from dice import ArmadaDice\n'), ((13781, 13800), 'torch.tensor', 'torch.tensor', (['value'], {}), '(value)\n', (13793, 13800), False, 'import torch\n'), ((15526, 15560), 'game_constants.ArmadaTypes.hull_zones.index', 'ArmadaTypes.hull_zones.index', (['zone'], {}), '(zone)\n', (15554, 15560), False, 'from game_constants import ArmadaDimensions, ArmadaTypes\n'), ((16550, 16579), 'dice.ArmadaDice.random_roll', 'ArmadaDice.random_roll', (['color'], {}), '(color)\n', (16572, 16579), False, 'from dice import ArmadaDice\n'), ((17147, 17181), 'game_constants.ArmadaTypes.hull_zones.index', 'ArmadaTypes.hull_zones.index', (['zone'], {}), '(zone)\n', (17175, 17181), False, 'from game_constants import ArmadaDimensions, ArmadaTypes\n'), ((17820, 17854), 'game_constants.ArmadaTypes.hull_zones.index', 'ArmadaTypes.hull_zones.index', (['zone'], {}), '(zone)\n', (17848, 17854), False, 'from game_constants import ArmadaDimensions, ArmadaTypes\n'), ((1933, 1958), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1956, 1958), False, 'import torch\n'), ((2717, 2756), 'game_constants.ArmadaTypes.defense_tokens.index', 'ArmadaTypes.defense_tokens.index', (['ttype'], {}), '(ttype)\n', (2749, 2756), False, 'from game_constants import ArmadaDimensions, ArmadaTypes\n'), ((4565, 4599), 'game_constants.ArmadaTypes.hull_zones.index', 'ArmadaTypes.hull_zones.index', (['zone'], {}), '(zone)\n', (4593, 4599), False, 'from game_constants import ArmadaDimensions, ArmadaTypes\n'), ((3287, 3321), 'game_constants.ArmadaTypes.hull_zones.index', 'ArmadaTypes.hull_zones.index', (['zone'], {}), '(zone)\n', (3315, 3321), False, 'from game_constants import ArmadaDimensions, ArmadaTypes\n'), ((3985, 4019), 'game_constants.ArmadaTypes.hull_zones.index', 'ArmadaTypes.hull_zones.index', (['zone'], {}), '(zone)\n', (4013, 4019), False, 'from game_constants import ArmadaDimensions, ArmadaTypes\n'), ((5196, 5230), 'game_constants.ArmadaTypes.hull_zones.index', 'ArmadaTypes.hull_zones.index', (['zone'], {}), '(zone)\n', (5224, 5230), False, 'from game_constants import ArmadaDimensions, ArmadaTypes\n'), ((3564, 3598), 'game_constants.ArmadaTypes.hull_zones.index', 'ArmadaTypes.hull_zones.index', (['zone'], {}), '(zone)\n', (3592, 3598), False, 'from game_constants import ArmadaDimensions, ArmadaTypes\n'), ((4176, 4210), 'game_constants.ArmadaTypes.hull_zones.index', 'ArmadaTypes.hull_zones.index', (['zone'], {}), '(zone)\n', (4204, 4210), False, 'from game_constants import ArmadaDimensions, ArmadaTypes\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, verbose_name='ID', serialize=False)),
('answerQuestionHash', models.CharField(max_length=40)),
('answerChoiceHash', models.CharField(max_length=40)),
('answerDateResponded', models.DateTimeField(verbose_name='Date Answered')),
('answerIPOfAnswerer', models.GenericIPAddressField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='AvaliableAnswers',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, verbose_name='ID', serialize=False)),
('avaliableAnswersQuestionHash', models.CharField(max_length=40)),
('avaliableAnswersText', models.CharField(max_length=200)),
('avaliableAnswersHash', models.CharField(max_length=40)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, verbose_name='ID', serialize=False)),
('questionText', models.TextField()),
('quesiionDate', models.DateTimeField(verbose_name='Question Date')),
('questionHash', models.CharField(max_length=40)),
],
options={
},
bases=(models.Model,),
),
]
|
[
"django.db.models.TextField",
"django.db.models.CharField",
"django.db.models.AutoField",
"django.db.models.GenericIPAddressField",
"django.db.models.DateTimeField"
] |
[((298, 391), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'auto_created': '(True)', 'verbose_name': '"""ID"""', 'serialize': '(False)'}), "(primary_key=True, auto_created=True, verbose_name='ID',\n serialize=False)\n", (314, 391), False, 'from django.db import models, migrations\n'), ((429, 460), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)'}), '(max_length=40)\n', (445, 460), False, 'from django.db import models, migrations\n'), ((500, 531), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)'}), '(max_length=40)\n', (516, 531), False, 'from django.db import models, migrations\n'), ((574, 624), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""Date Answered"""'}), "(verbose_name='Date Answered')\n", (594, 624), False, 'from django.db import models, migrations\n'), ((666, 696), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', ([], {}), '()\n', (694, 696), False, 'from django.db import models, migrations\n'), ((910, 1003), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'auto_created': '(True)', 'verbose_name': '"""ID"""', 'serialize': '(False)'}), "(primary_key=True, auto_created=True, verbose_name='ID',\n serialize=False)\n", (926, 1003), False, 'from django.db import models, migrations\n'), ((1051, 1082), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)'}), '(max_length=40)\n', (1067, 1082), False, 'from django.db import models, migrations\n'), ((1126, 1158), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1142, 1158), False, 'from django.db import models, migrations\n'), ((1202, 1233), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)'}), '(max_length=40)\n', (1218, 1233), False, 'from django.db import models, migrations\n'), ((1439, 1532), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'auto_created': '(True)', 'verbose_name': '"""ID"""', 'serialize': '(False)'}), "(primary_key=True, auto_created=True, verbose_name='ID',\n serialize=False)\n", (1455, 1532), False, 'from django.db import models, migrations\n'), ((1564, 1582), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1580, 1582), False, 'from django.db import models, migrations\n'), ((1618, 1668), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""Question Date"""'}), "(verbose_name='Question Date')\n", (1638, 1668), False, 'from django.db import models, migrations\n'), ((1704, 1735), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)'}), '(max_length=40)\n', (1720, 1735), False, 'from django.db import models, migrations\n')]
|
from machine import Pin, I2C
from neopixel import NeoPixel
from time import sleep, ticks_ms, ticks_diff
import framebuf
import gc
import sh1106
# Wemos pins - for our and our users' convenience
D0 = const(16)
D1 = const(5)
D2 = const(4)
D3 = const(0)
D4 = const(2)
D5 = const(14)
D6 = const(12)
D7 = const(13)
D8 = const(15)
# I2C and screen
i2c = I2C(scl=Pin(D1), sda=Pin(D2), freq=400000) # I2C object on pins D1 an dD2
lcd = sh1106.SH1106_I2C(128, 64, i2c, None, 0x3c, rotate=180) # SH1106 display on I2C 0x3C, rotated
# screen init
lcd.sleep(False) # Turn on the display
lcd.fill(0) # Erase display
# Neopixel
numPixels = 1 # How many pixels are attached to the nugget? If just the built in display, put 1
pin = Pin(D8, Pin.OUT) # set GPIO15 to output to drive NeoPixels
def get_neopixels(count):
return NeoPixel(pin, count) # create NeoPixel driver on GPIO15 for all neopixels
# Button pins
down_p = Pin(D3, Pin.IN, Pin.PULL_UP) # down is green
up_p = Pin(D6, Pin.IN, Pin.PULL_UP) # up is red
left_p = Pin(D7, Pin.IN, Pin.PULL_UP) # left is blue
right_p = Pin(D5, Pin.IN, Pin.PULL_UP) # right is yellow
# Button wrapper code for usability
class Buttons():
debounce_time = 150 # milliseconds
def __init__(self, buttons, callbacks={}, aliases={}):
self.b = buttons
self.cb = callbacks
self.b_al = aliases
self.values = {name:False for name in buttons}
self.debounce = {name:0 for name in buttons}
def update(self):
for name, button in self.b.items():
new = not button.value() # inverting the pin here
old = self.values[name]
if new and not old:
# button just pressed, recording that
self.values[name] = True
# clearing debounce timer if it's set - we only debounce on release
self.debounce[name] = None
# executing the button callback if available
cb = self.cb.get(name, None)
if callable(cb):
cb()
elif old and not new:
# button is released
# we debounce only button release
# this is so that button presses are processed quicker
if not self.debounce[name]:
# starting debounce timer
self.debounce[name] = ticks_ms()
else:
if ticks_diff(ticks_ms(), self.debounce[name]) > self.debounce_time:
# button has been de-pressed for long enough
# accepting and moving on
self.values[name] = False
elif new:
# button still pressed
# just removing the debounce timer
# in case it's been activated by a bit of bounce on press
self.debounce[name] = None
else:
pass # button long-released
def __getattr__(self, attr):
# lets you get button value by direction - like `buttons.left`
if attr in self.b:
# return value
return self.values[attr]
# lets you get button value by color - like `buttons.blue`
elif attr in self.b_al:
return self.values[self.b_al[attr]]
buttons = Buttons({"down":down_p, "up":up_p, "left":left_p, "right":right_p} ,
aliases={"red":"up", "blue":"left", "yellow":"red", "green":"down"})
# Screen image decompression
def unpack(packed_data):
"""
Decompresses image data using a very simple algorithm described in 'pack'.
Returns a bytearray.
"""
i = 0 # index for the unpacked bytearray element that we're currently on
# checking the compression format version, for future compatibility in case this algo changes significantly
if packed_data[0] != 1:
print("Don't know how to decompress this image, format version:", packed_data[0])
return None
# pre-creating a bytearray of the length we need, initially filled with zeroes
# to avoid creating too many useless objects and wasting memory as we unpack
unpacked_data = bytearray(packed_data[1])
for element in packed_data[2:]: # need to skip two elements - version and length
if isinstance(element, int): # just an int, simply putting it into the bytearray
unpacked_data[i] = element
i += 1
else:
value, count = element
if value == 0: # small optimization
# skipping zero-filling since bytearrays are pre-filled with zeroes
i += count
else:
for _ in range(count):
unpacked_data[i] = value
i += 1
return unpacked_data
# Showing compressed images
def show_compressed(packed_data, fb_width=124, fb_height=64):
data = unpack(packed_data)
fb = framebuf.FrameBuffer(data, fb_width, fb_height, framebuf.MONO_VLSB)
lcd.fill(0)
lcd.blit(fb, 0, 0)
lcd.show()
del data
del fb
gc.collect()
cutie_c = [1, 992, [0, 253], 192, [224, 2], 112, 56, 60, [28, 2], [14, 9], [28, 2], 56, 120, 240, 224, 192, 128, [0, 63], 192, 224, 240, 120, 60, 28, [14, 3], [7, 7], 6, [14, 2], 28, 60, 56, 240, 224, 192, 128, [0, 7], 240, 252, 255, 7, 1, [0, 11], 32, [248, 2], [252, 2], 248, 112, [0, 2], 1, 3, 31, 254, 248, 128, [0, 57], 224, 252, 255, 7, 1, [0, 12], 120, [252, 4], 120, [0, 3], 1, 7, 255, 254, 240, [0, 5], 15, 127, 255, 224, 128, [0, 13], [1, 3], [0, 5], 192, 240, 255, 63, 1, [0, 26], 112, [248, 7], 112, [0, 22], 3, 31, 127, 240, 192, 128, [0, 19], 128, 192, 240, 255, 63, 7, [0, 7], 1, 3, 7, 15, 30, 60, 56, 48, [112, 2], [96, 2], [224, 3], 96, [112, 3], [56, 2], 28, 14, 15, 7, 1, [0, 21], 24, 120, 240, 192, 128, [0, 5], 1, 3, 255, 3, 1, [0, 5], 128, 192, 240, 120, 24, [0, 17], 1, 3, 7, 15, 30, 28, [56, 3], [112, 7], 48, [56, 2], 28, 30, 14, 7, 3, 1, [0, 60], 1, 3, 7, 6, 14, [12, 4], 15, 12, 8, [12, 2], [6, 2], [3, 2], 1, [0, 175]]
dead_c = [1, 992, [0, 137], 1, 3, 15, 31, 127, 255, 254, 248, 240, 192, 128, [0, 4], 128, 192, 240, 252, 254, 255, 63, 31, 7, 3, [0, 50], 1, 3, 15, 31, 127, 255, 254, 248, 240, 192, 128, [0, 4], 128, 192, 240, 252, 254, 255, 63, 31, 7, 3, [0, 30], 3, 7, 31, 191, 255, 254, [248, 2], 254, 255, 31, 15, 7, 1, [0, 61], 3, 7, 31, 191, 255, 254, [248, 2], 254, 255, 31, 15, 7, 1, [0, 33], 128, 224, 240, 252, 254, 127, 31, 15, 3, 7, 31, 127, 255, 254, 248, 240, 192, 128, [0, 57], 128, 224, 240, 252, 254, 127, 31, 15, 3, 7, 31, 127, 255, 254, 248, 240, 192, 128, [0, 27], 32, 56, 60, [63, 3], 15, 3, 1, [0, 8], 3, 15, 31, [63, 2], 62, 60, 48, 32, [0, 20], 112, [248, 7], 112, [0, 20], 32, 56, 60, [63, 3], 15, 3, 1, [0, 8], 3, 15, 31, [63, 2], 62, 60, 48, 32, [0, 61], 24, 120, 240, 192, 128, [0, 5], 1, 3, 255, 3, 1, [0, 5], 128, 192, 240, 120, 24, [0, 102], 1, 3, 7, 6, 14, [12, 4], 15, 12, 8, [12, 2], [6, 2], [3, 2], 1, [0, 175]]
nyaa_c = [1, 992, [0, 270], 128, 224, [248, 3], 224, 192, [0, 68], 128, 224, [248, 3], 224, 192, [0, 37], 128, 224, 240, 252, 254, 127, 31, 15, 3, 7, 31, 127, 255, 254, 248, 240, 192, 128, [0, 57], 128, 224, 240, 252, 254, 127, 31, 15, 3, 7, 31, 127, 255, 254, 248, 240, 192, 128, [0, 27], 32, 56, 60, [63, 3], 15, 3, 1, [0, 8], 3, 15, 31, [63, 2], 62, 60, 48, 32, [0, 20], 112, [248, 7], 112, [0, 20], 32, 56, 60, [63, 3], 15, 3, 1, [0, 8], 3, 15, 31, [63, 2], 62, 60, 48, 32, [0, 61], 24, 120, 240, 192, 128, [0, 5], 1, 3, 255, 3, 1, [0, 5], 128, 192, 240, 120, 24, [0, 102], 1, 3, 7, 6, 14, [12, 4], 15, 12, 8, [12, 2], [6, 2], [3, 2], 1, [0, 175]]
|
[
"framebuf.FrameBuffer",
"time.ticks_ms",
"sh1106.SH1106_I2C",
"gc.collect",
"neopixel.NeoPixel",
"machine.Pin"
] |
[((432, 485), 'sh1106.SH1106_I2C', 'sh1106.SH1106_I2C', (['(128)', '(64)', 'i2c', 'None', '(60)'], {'rotate': '(180)'}), '(128, 64, i2c, None, 60, rotate=180)\n', (449, 485), False, 'import sh1106\n'), ((726, 742), 'machine.Pin', 'Pin', (['D8', 'Pin.OUT'], {}), '(D8, Pin.OUT)\n', (729, 742), False, 'from machine import Pin, I2C\n'), ((927, 955), 'machine.Pin', 'Pin', (['D3', 'Pin.IN', 'Pin.PULL_UP'], {}), '(D3, Pin.IN, Pin.PULL_UP)\n', (930, 955), False, 'from machine import Pin, I2C\n'), ((983, 1011), 'machine.Pin', 'Pin', (['D6', 'Pin.IN', 'Pin.PULL_UP'], {}), '(D6, Pin.IN, Pin.PULL_UP)\n', (986, 1011), False, 'from machine import Pin, I2C\n'), ((1035, 1063), 'machine.Pin', 'Pin', (['D7', 'Pin.IN', 'Pin.PULL_UP'], {}), '(D7, Pin.IN, Pin.PULL_UP)\n', (1038, 1063), False, 'from machine import Pin, I2C\n'), ((1090, 1118), 'machine.Pin', 'Pin', (['D5', 'Pin.IN', 'Pin.PULL_UP'], {}), '(D5, Pin.IN, Pin.PULL_UP)\n', (1093, 1118), False, 'from machine import Pin, I2C\n'), ((825, 845), 'neopixel.NeoPixel', 'NeoPixel', (['pin', 'count'], {}), '(pin, count)\n', (833, 845), False, 'from neopixel import NeoPixel\n'), ((4919, 4986), 'framebuf.FrameBuffer', 'framebuf.FrameBuffer', (['data', 'fb_width', 'fb_height', 'framebuf.MONO_VLSB'], {}), '(data, fb_width, fb_height, framebuf.MONO_VLSB)\n', (4939, 4986), False, 'import framebuf\n'), ((5069, 5081), 'gc.collect', 'gc.collect', ([], {}), '()\n', (5079, 5081), False, 'import gc\n'), ((360, 367), 'machine.Pin', 'Pin', (['D1'], {}), '(D1)\n', (363, 367), False, 'from machine import Pin, I2C\n'), ((373, 380), 'machine.Pin', 'Pin', (['D2'], {}), '(D2)\n', (376, 380), False, 'from machine import Pin, I2C\n'), ((2388, 2398), 'time.ticks_ms', 'ticks_ms', ([], {}), '()\n', (2396, 2398), False, 'from time import sleep, ticks_ms, ticks_diff\n'), ((2455, 2465), 'time.ticks_ms', 'ticks_ms', ([], {}), '()\n', (2463, 2465), False, 'from time import sleep, ticks_ms, ticks_diff\n')]
|
from olc_webportalv2.data import views
from django.conf.urls import url, include
from django.utils.translation import gettext_lazy as _
urlpatterns = [
url(_(r'^data_home/'), views.data_home, name='data_home'),
url(_(r'^raw_data/'), views.raw_data, name='raw_data'),
url(_(r'^assembled_data/'), views.assembled_data, name='assembled_data'),
url(_(r'^data_download/(?P<data_request_pk>\d+)/$'), views.data_download, name='data_download'),
]
|
[
"django.utils.translation.gettext_lazy"
] |
[((161, 177), 'django.utils.translation.gettext_lazy', '_', (['"""^data_home/"""'], {}), "('^data_home/')\n", (162, 177), True, 'from django.utils.translation import gettext_lazy as _\n'), ((224, 239), 'django.utils.translation.gettext_lazy', '_', (['"""^raw_data/"""'], {}), "('^raw_data/')\n", (225, 239), True, 'from django.utils.translation import gettext_lazy as _\n'), ((284, 305), 'django.utils.translation.gettext_lazy', '_', (['"""^assembled_data/"""'], {}), "('^assembled_data/')\n", (285, 305), True, 'from django.utils.translation import gettext_lazy as _\n'), ((362, 409), 'django.utils.translation.gettext_lazy', '_', (['"""^data_download/(?P<data_request_pk>\\\\d+)/$"""'], {}), "('^data_download/(?P<data_request_pk>\\\\d+)/$')\n", (363, 409), True, 'from django.utils.translation import gettext_lazy as _\n')]
|
#!/usr/bin/env python3
import inspect
import logging
from typing import Any, Mapping, Sequence, Union
from functools import reduce
from schematic import CONFIG
from schematic.exceptions import (
MissingConfigValueError,
MissingConfigAndArgumentValueError,
)
logger = logging.getLogger(__name__)
def query_dict(dictionary: Mapping[Any, Any], keys: Sequence[Any]) -> Union[Any, None]:
"""Access a nested value in a dictionary corresponding
to a series of keys.
Args:
dictionary: A dictionary containing anything.
keys: A sequence of values corresponding to keys
in `dictionary`
Returns:
The nested value corresponding to the given series
of keys, or `None` is such a value doesn't exist.
"""
def extract(dictionary: Any, key: Any) -> Union[Any, None]:
"""Get value associated with key, defaulting to None."""
if dictionary is None or not isinstance(dictionary, dict):
return None
return dictionary.get(key)
return reduce(extract, keys, dictionary)
def get_from_config(
dictionary: Mapping[Any, Any], keys: Sequence[Any]
) -> Union[Any, None]:
"""Access a nested configuration value from a yaml
configuration file.
Args:
dictionary: A dictionary containing anything.
keys: A sequence of values corresponding to keys
in `dictionary`.
Returns:
The nested value corresponding to the given series.
Raises:
MissingConfigValueError: When configuration value not
found in config.yml file for given key.
"""
# get configuration value from config file
config_value = query_dict(dictionary, keys)
# if configuration value not present then raise Exception
if config_value is None:
raise MissingConfigValueError(keys)
config_keys_str = " > ".join(keys)
logger.info(
f"The ({config_keys_str}) argument with value "
f"'{config_value}' is being read from the config file."
)
return config_value
def fill_in_from_config(
arg_name: str, arg_value: Any, config_keys: Sequence[Any], allow_none: bool = False
) -> Any:
"""Fill in a missing value from a configuration object.
Args:
arg_name: Name of the argument. Used for logging.
config_keys: List of keys used to access a nested
value in `config` corresponding to `arg_name`.
arg_value: Value of the argument provided at the
command line.
allow_none: Return None if argument value and
configuration value are both None (rather
than raising an error).
Returns:
The argument value, either from the calling context
or the corresponding field in the configuration.
Raises:
AssertionError: If both the argument value and the
configuration object are `None`.
"""
# Avoid accessing config if argument value is provided
if arg_value is not None:
return arg_value
# raise Exception if both, configuration value not present
# in config file and CLI argument value is missing
try:
config_value = get_from_config(CONFIG.DATA, config_keys)
except MissingConfigValueError:
if allow_none:
return None
raise MissingConfigAndArgumentValueError(arg_name, config_keys)
# Make sure argument value and
config_keys_str = " > ".join(config_keys)
logger.info(
f"The '--{arg_name}' argument is being taken from configuration "
f"file ({config_keys_str}), i.e., '{config_value}'."
)
return config_value
|
[
"functools.reduce",
"schematic.exceptions.MissingConfigValueError",
"schematic.exceptions.MissingConfigAndArgumentValueError",
"logging.getLogger"
] |
[((279, 306), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (296, 306), False, 'import logging\n'), ((1038, 1071), 'functools.reduce', 'reduce', (['extract', 'keys', 'dictionary'], {}), '(extract, keys, dictionary)\n', (1044, 1071), False, 'from functools import reduce\n'), ((1813, 1842), 'schematic.exceptions.MissingConfigValueError', 'MissingConfigValueError', (['keys'], {}), '(keys)\n', (1836, 1842), False, 'from schematic.exceptions import MissingConfigValueError, MissingConfigAndArgumentValueError\n'), ((3311, 3368), 'schematic.exceptions.MissingConfigAndArgumentValueError', 'MissingConfigAndArgumentValueError', (['arg_name', 'config_keys'], {}), '(arg_name, config_keys)\n', (3345, 3368), False, 'from schematic.exceptions import MissingConfigValueError, MissingConfigAndArgumentValueError\n')]
|
import datetime as dt
from django.db import models
from cloudinary.models import CloudinaryField
class photos(models.Model):
# title field
title = models.CharField(max_length=100)
#image field
image = CloudinaryField('image')
|
[
"django.db.models.CharField",
"cloudinary.models.CloudinaryField"
] |
[((157, 189), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (173, 189), False, 'from django.db import models\n'), ((219, 243), 'cloudinary.models.CloudinaryField', 'CloudinaryField', (['"""image"""'], {}), "('image')\n", (234, 243), False, 'from cloudinary.models import CloudinaryField\n')]
|
# coding: utf-8
"""
mzTab-M reference implementation and validation API.
This is the mzTab-M reference implementation and validation API service. # noqa: E501
OpenAPI spec version: 2.0.0
Contact: <EMAIL>
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from mztab_m_swagger_client.models.parameter import Parameter # noqa: F401,E501
class ColumnParameterMapping(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'column_name': 'str',
'param': 'Parameter'
}
attribute_map = {
'column_name': 'column_name',
'param': 'param'
}
def __init__(self, column_name=None, param=None): # noqa: E501
"""ColumnParameterMapping - a model defined in Swagger""" # noqa: E501
self._column_name = None
self._param = None
self.discriminator = None
self.column_name = column_name
self.param = param
@property
def column_name(self):
"""Gets the column_name of this ColumnParameterMapping. # noqa: E501
The fully qualified target column name. # noqa: E501
:return: The column_name of this ColumnParameterMapping. # noqa: E501
:rtype: str
"""
return self._column_name
@column_name.setter
def column_name(self, column_name):
"""Sets the column_name of this ColumnParameterMapping.
The fully qualified target column name. # noqa: E501
:param column_name: The column_name of this ColumnParameterMapping. # noqa: E501
:type: str
"""
if column_name is None:
raise ValueError("Invalid value for `column_name`, must not be `None`") # noqa: E501
self._column_name = column_name
@property
def param(self):
"""Gets the param of this ColumnParameterMapping. # noqa: E501
The parameter specifying the unit. # noqa: E501
:return: The param of this ColumnParameterMapping. # noqa: E501
:rtype: Parameter
"""
return self._param
@param.setter
def param(self, param):
"""Sets the param of this ColumnParameterMapping.
The parameter specifying the unit. # noqa: E501
:param param: The param of this ColumnParameterMapping. # noqa: E501
:type: Parameter
"""
if param is None:
raise ValueError("Invalid value for `param`, must not be `None`") # noqa: E501
self._param = param
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ColumnParameterMapping, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ColumnParameterMapping):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"six.iteritems"
] |
[((3023, 3056), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (3036, 3056), False, 'import six\n')]
|
from bsc.config.helper import get_key_from_file
ADDRESS = get_key_from_file('address.json')
API_KEY = get_key_from_file('api_key.json')
|
[
"bsc.config.helper.get_key_from_file"
] |
[((59, 92), 'bsc.config.helper.get_key_from_file', 'get_key_from_file', (['"""address.json"""'], {}), "('address.json')\n", (76, 92), False, 'from bsc.config.helper import get_key_from_file\n'), ((103, 136), 'bsc.config.helper.get_key_from_file', 'get_key_from_file', (['"""api_key.json"""'], {}), "('api_key.json')\n", (120, 136), False, 'from bsc.config.helper import get_key_from_file\n')]
|
import json
from src.dfa import DFA
from src.state import State
def get_state_by_label(label, states):
for state in states:
if state.label == label:
return state
def from_json(filename):
d = DFA()
d.states = set()
d.initial_state = None
with open(filename, 'r') as r_file:
dfa_data = json.load(r_file)
d.set_alphabet(set(dfa_data["alphabet"]))
states = dfa_data["states"]
for label, state_data in states.items():
s = State(label, state_data["accepting"], state_data["initial"])
d.add_state(s)
if s.initial:
d.initial_state = s
for label, state_data in states.items():
from_state = get_state_by_label(label, d.states)
# need to iter twice to create transitions - better way might exist
for transition_label, to_state_label in state_data["outgoing"].items():
to_state = get_state_by_label(to_state_label, d.states)
d.add_edge(from_state, to_state, transition_label)
return d
|
[
"json.load",
"src.dfa.DFA",
"src.state.State"
] |
[((223, 228), 'src.dfa.DFA', 'DFA', ([], {}), '()\n', (226, 228), False, 'from src.dfa import DFA\n'), ((336, 353), 'json.load', 'json.load', (['r_file'], {}), '(r_file)\n', (345, 353), False, 'import json\n'), ((490, 550), 'src.state.State', 'State', (['label', "state_data['accepting']", "state_data['initial']"], {}), "(label, state_data['accepting'], state_data['initial'])\n", (495, 550), False, 'from src.state import State\n')]
|
import random
def coinToss():
number = input("Number of times to flip coin: ")
recordList = []
heads = 0
tails = 0
for amount in range(number):
flip = random.randint(0, 1)
if (flip == 0):
print("Heads")
recordList.append("Heads")
else:
print("Tails")
recordList.append("Tails")
print(str(recordList))
print(str(recordList.count("Heads")) + str(recordList.count("Tails")))
|
[
"random.randint"
] |
[((180, 200), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (194, 200), False, 'import random\n')]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.AP_list, name='AP_list'),
]
|
[
"django.conf.urls.url"
] |
[((74, 114), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.AP_list'], {'name': '"""AP_list"""'}), "('^$', views.AP_list, name='AP_list')\n", (77, 114), False, 'from django.conf.urls import url\n')]
|
#!/usr/bin/python3
# -*- coding: UTF-8 -*-
"""
Copyright (C) 2019. Huawei Technologies Co., Ltd. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the Apache License Version 2.0.You may not use
this file except in compliance with the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
Apache License for more details at
http://www.apache.org/licenses/LICENSE-2.0
AMCT_CAFFE sample of accuracy_based_auto_calibration based on MobileNet V2
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import argparse
from pathlib import Path
import numpy as np
import cv2 # pylint: disable=E0401
import datasets
MODEL_INPUT_BLOB_NAME = 'data'
MODEL_OUTPUT_BLOB_NAME = 'prob'
PATH = os.path.split(os.path.realpath(__file__))[0]
PATH = os.path.realpath(os.path.join(PATH, '..'))
TMP = os.path.join(PATH, 'tmp')
RESULT = os.path.join(PATH, 'results')
BATCH_SIZE = 32
SCALE = 0.017
CROP_SIZE = 224
MEAN_FILE = None
MEAN_VALUE = [103.94, 116.78, 123.68]
DATA_DIR = os.path.join(PATH, 'data/images')
LABEL_FILE = os.path.join(DATA_DIR, 'image_label.txt')
# Need to specify the dir of caffe and dataset (ImageNet)
CAFFE_DIR = ''
LMDB_DATASET_DIR = ''
CALIBRATION_BATCH_NUM = 2
def parse_args():
"""Parse input arguments."""
parser = argparse.ArgumentParser(description='Mobilenet_v2 demo')
parser.add_argument('--model_file', dest='model_file',
help='Specify the model file of caffe model.',
default='./model/mobilenet_v2_deploy.prototxt',
type=str)
parser.add_argument('--weights_file', dest='weights_file',
help='Specify the weights file of caffe model.',
default="./model/mobilenet_v2.caffemodel",
type=str)
parser.add_argument('--gpu', dest='gpu_id', help='GPU device id to use [0]',
default=None, type=int)
parser.add_argument('--iterations', dest='iterations',
help='Specify iterations of test',
default=1000, type=int)
parser.add_argument('--caffe_dir', dest='caffe_dir',
help='Specify the dir of caffe',
default=CAFFE_DIR,
type=str)
parser.add_argument('--pre_test', dest='pre_test',
help='Do test with amct caffe calibration or not',
action='store_false')
parser.add_argument('--dataset',
dest='dataset',
help='The path of benchmark dataset.',
default=LMDB_DATASET_DIR,
type=str)
args = parser.parse_args()
return args
def args_check(args):
"""check args"""
# --model_file
if args.model_file is None:
raise RuntimeError('Must specify a caffe deploy prototxt file')
model_file = os.path.realpath(args.model_file)
if not Path(model_file).exists():
raise RuntimeError('Must specify a caffe deploy prototxt file')
# --weights_file
if args.weights_file is None:
raise RuntimeError('Must specify a caffe caffemodel file')
weights_file = os.path.realpath(args.weights_file)
if not Path(weights_file).exists():
raise RuntimeError('Must specify a caffe caffemodel file')
# --iterations
if args.iterations > 1500:
raise RuntimeError('Max iterations on sample dataset is 1500')
def args_check_caffe_dir(args):
"""check args of caffe dir"""
if args.caffe_dir is None:
raise RuntimeError('Must specify a caffe framework dir')
caffe_dir = os.path.realpath(args.caffe_dir)
if not Path(caffe_dir).exists():
raise RuntimeError('Must specify a caffe framework dir')
caffe_exec_bin = os.path.join(caffe_dir, 'build/tools/caffe')
if not Path(caffe_exec_bin).exists():
raise RuntimeError('Must make caffe before execute demo')
pycaffe_file = os.path.join(caffe_dir, 'python/caffe/pycaffe.py')
if not Path(pycaffe_file).exists():
raise RuntimeError('Must make pycaffe before execute demo')
def add_path(path):
"""Add path to env"""
if path not in sys.path:
sys.path.insert(0, path)
QUANT_ARGS = parse_args()
args_check(QUANT_ARGS)
args_check_caffe_dir(QUANT_ARGS)
add_path(os.path.join(QUANT_ARGS.caffe_dir, 'python'))
import caffe # pylint: disable=E0401, C0413
import amct_caffe as amct # pylint: disable=E0401, C0413
from amct_caffe.common.auto_calibration import \
AutoCalibrationEvaluatorBase # pylint: disable=E0401, C0413
def get_blobs_from_im(data_dir, imgs, batch_size):
"""Read image files to blobs [3, 256, 256]"""
if batch_size != len(imgs):
raise RuntimeError('batch_size:{} != len(imgs):{}'.format(
batch_size, len(imgs)))
blobs_data = np.zeros((batch_size, 3, 256, 256), np.uint8)
for index in range(batch_size):
im_file = os.path.join(data_dir, imgs[index])
im_data = cv2.imread(im_file)
im_data = cv2.resize(
im_data, (256, 256), interpolation=cv2.INTER_CUBIC)
im_data = im_data.swapaxes(0, 2)
im_data = im_data.swapaxes(1, 2)
blobs_data[index, :, :, :] = im_data
return blobs_data
def get_labels_from_txt():
"""Read all images' name and label from label_file"""
images = []
labels = []
with open(LABEL_FILE, 'r') as label_file:
lines = label_file.readlines()
for line in lines:
images.append(line.split(' ')[0])
labels.append(int(line.split(' ')[1]))
return images, labels
def img_preprocess(blobs_data, mean_value, crop_size):
"""Do image data pre-process"""
# crop image[height, width] to [crop_size, crop_size]
height = blobs_data.shape[2]
width = blobs_data.shape[3]
h_off = int((height - crop_size) / 2)
w_off = int((width - crop_size) / 2)
crop_data = blobs_data[:, :, h_off:(height - h_off), w_off:(width - w_off)]
# trans uint8 image data to float
crop_data = crop_data.astype(np.float32, copy=False)
# do channel-wise reduce mean value
for channel in range(crop_data.shape[1]):
crop_data[:, channel, :, :] -= mean_value[channel]
# mutiply the scale value
crop_data *= SCALE
return crop_data
def img_postprocess(probs, labels):
"""Do image post-process"""
# calculate top1 and top5 accuracy
top1_get = 0
top5_get = 0
if len(probs.shape) == 4:
probs = probs.reshape((probs.shape[0], probs.shape[1]))
prob_size = probs.shape[1]
for index, label in enumerate(labels):
top5_record = (probs[index, :].argsort())[prob_size - 5:prob_size]
if label == top5_record[-1]:
top1_get += 1
top5_get += 1
elif label in top5_record:
top5_get += 1
return float(top1_get) / len(labels), float(top5_get) / len(labels)
def run_caffe_model(model_file, weights_file, iterations):
"""run caffe model forward"""
net = caffe.Net(model_file, weights_file, caffe.TEST)
top1_total = 0
top5_total = 0
images, labels = get_labels_from_txt()
for iter_num in range(iterations):
blobs_data = get_blobs_from_im(
DATA_DIR,
images[iter_num * BATCH_SIZE:(iter_num + 1) * BATCH_SIZE],
BATCH_SIZE)
blobs_data = img_preprocess(blobs_data, [104, 117, 123], 224)
forward_kwargs = {MODEL_INPUT_BLOB_NAME: blobs_data}
blobs_out = net.forward(**forward_kwargs)
top1, top5 = img_postprocess(
blobs_out[MODEL_OUTPUT_BLOB_NAME],
labels[iter_num * BATCH_SIZE:(iter_num + 1) * BATCH_SIZE])
top1_total += top1
top5_total += top5
print('****************iteration:{}*****************'.format(iter_num))
print('top1_acc:{}'.format(top1))
print('top5_acc:{}'.format(top5))
print('******final top1:{}'.format(top1_total / iterations))
print('******final top5:{}'.format(top5_total / iterations))
def do_benchmark_test(args, model_file, weights_file, iterations=1000):
""" Calc the accuracy on the lmdb dataset"""
net = caffe.Net(model_file, weights_file, caffe.TEST)
top1_total = 0
top5_total = 0
lmdb_data = datasets.LMDBData(args.dataset)
lmdb_data.set_scale(SCALE)
lmdb_data.set_crop_size(CROP_SIZE)
if MEAN_FILE is not None:
lmdb_data.set_mean_file(MEAN_FILE)
else:
lmdb_data.set_mean_value(MEAN_VALUE)
for index in range(iterations):
data, labels = lmdb_data.get_blobs(BATCH_SIZE)
forward_kwargs = {MODEL_INPUT_BLOB_NAME: data}
blobs_out = net.forward(**forward_kwargs)
top1, top5 = img_postprocess(blobs_out[MODEL_OUTPUT_BLOB_NAME], labels)
top1_total += top1
top5_total += top5
print('*****************iteration:{}******************'.format(index))
print('top1_acc:{}'.format(top1))
print('top5_acc:{}'.format(top5))
print('******final top1:{}'.format(top1_total / iterations))
print('******final top5:{}'.format(top5_total / iterations))
return top1_total / iterations
class AutoCalibrationEvaluator(AutoCalibrationEvaluatorBase):
"""auto calibration evaluator"""
def __init__(self, target_loss, batch_num, args):
"""
evaluate_batch_num is the needed batch num for evaluating
the model. Larger evaluate_batch_num is recommended, because
the evaluation metric of input model can be more precise
with larger eval dataset.
"""
self.target_loss = target_loss
self.batch_num = batch_num
self.args = args
super().__init__()
def calibration(self, model_file, weights_file):
""""
Function:
do the calibration with model
Parameter:
model_file: the prototxt model define file of caffe model
weights_file: the binary caffemodel file of caffe model
"""
run_caffe_model(model_file, weights_file, self.batch_num)
def evaluate(self, model_file, weights_file):
""""
Function:
evaluate the model with batch_num of data, return the eval
metric of the input model, such as top1 for classification
model, mAP for detection model and so on.
Parameter:
model_file: the prototxt model define file of caffe model
weights_file: the binary caffemodel file of caffe model
"""
return do_benchmark_test(self.args, model_file, weights_file,
self.args.iterations)
def metric_eval(self, original_metric, new_metric):
"""
Function:
whether the metric of new fake quant model can satisfy the
requirement
Parameter:
original_metric: the metric of non quantized model
new_metric: the metric of new quantized model
"""
# the loss of top1 acc need to be less than 0.2%
loss = original_metric - new_metric
if loss * 100 < self.target_loss:
return True, loss
return False, loss
def main(args):
"""main function"""
if args.gpu_id is not None:
caffe.set_mode_gpu()
caffe.set_device(args.gpu_id)
amct.set_gpu_mode()
else:
caffe.set_mode_cpu()
# User model files
model_file = os.path.realpath(args.model_file)
weights_file = os.path.realpath(args.weights_file)
# Run pre model test
if not args.pre_test:
do_benchmark_test(args, model_file, weights_file, args.iterations)
print('[AMCT][INFO]Run Mobilenet_v2 without quantize success!')
return
# step 1: create the quant config file
config_json_file = './config.json'
skip_layers = []
batch_num = CALIBRATION_BATCH_NUM
activation_offset = True
amct.create_quant_config(config_json_file, model_file, weights_file,
skip_layers, batch_num, activation_offset)
scale_offset_record_file = os.path.join(TMP, 'scale_offset_record.txt')
result_path = os.path.join(RESULT, 'MobileNetV2')
evaluator = AutoCalibrationEvaluator(target_loss=0.2, batch_num=batch_num,
args=args)
# step 2: start the accuracy_based_auto_calibration process
amct.accuracy_based_auto_calibration(
args.model_file,
args.weights_file,
evaluator,
config_json_file,
scale_offset_record_file,
result_path)
if __name__ == '__main__':
main(QUANT_ARGS)
|
[
"caffe.set_mode_gpu",
"argparse.ArgumentParser",
"amct_caffe.set_gpu_mode",
"amct_caffe.create_quant_config",
"os.path.realpath",
"numpy.zeros",
"sys.path.insert",
"caffe.set_mode_cpu",
"amct_caffe.accuracy_based_auto_calibration",
"cv2.imread",
"caffe.set_device",
"pathlib.Path",
"datasets.LMDBData",
"caffe.Net",
"os.path.join",
"cv2.resize"
] |
[((1070, 1095), 'os.path.join', 'os.path.join', (['PATH', '"""tmp"""'], {}), "(PATH, 'tmp')\n", (1082, 1095), False, 'import os\n'), ((1105, 1134), 'os.path.join', 'os.path.join', (['PATH', '"""results"""'], {}), "(PATH, 'results')\n", (1117, 1134), False, 'import os\n'), ((1248, 1281), 'os.path.join', 'os.path.join', (['PATH', '"""data/images"""'], {}), "(PATH, 'data/images')\n", (1260, 1281), False, 'import os\n'), ((1295, 1336), 'os.path.join', 'os.path.join', (['DATA_DIR', '"""image_label.txt"""'], {}), "(DATA_DIR, 'image_label.txt')\n", (1307, 1336), False, 'import os\n'), ((1038, 1062), 'os.path.join', 'os.path.join', (['PATH', '""".."""'], {}), "(PATH, '..')\n", (1050, 1062), False, 'import os\n'), ((1525, 1581), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Mobilenet_v2 demo"""'}), "(description='Mobilenet_v2 demo')\n", (1548, 1581), False, 'import argparse\n'), ((3173, 3206), 'os.path.realpath', 'os.path.realpath', (['args.model_file'], {}), '(args.model_file)\n', (3189, 3206), False, 'import os\n'), ((3458, 3493), 'os.path.realpath', 'os.path.realpath', (['args.weights_file'], {}), '(args.weights_file)\n', (3474, 3493), False, 'import os\n'), ((3902, 3934), 'os.path.realpath', 'os.path.realpath', (['args.caffe_dir'], {}), '(args.caffe_dir)\n', (3918, 3934), False, 'import os\n'), ((4058, 4102), 'os.path.join', 'os.path.join', (['caffe_dir', '"""build/tools/caffe"""'], {}), "(caffe_dir, 'build/tools/caffe')\n", (4070, 4102), False, 'import os\n'), ((4230, 4280), 'os.path.join', 'os.path.join', (['caffe_dir', '"""python/caffe/pycaffe.py"""'], {}), "(caffe_dir, 'python/caffe/pycaffe.py')\n", (4242, 4280), False, 'import os\n'), ((4592, 4636), 'os.path.join', 'os.path.join', (['QUANT_ARGS.caffe_dir', '"""python"""'], {}), "(QUANT_ARGS.caffe_dir, 'python')\n", (4604, 4636), False, 'import os\n'), ((5111, 5156), 'numpy.zeros', 'np.zeros', (['(batch_size, 3, 256, 256)', 'np.uint8'], {}), '((batch_size, 3, 256, 256), np.uint8)\n', (5119, 5156), True, 'import numpy as np\n'), ((7289, 7336), 'caffe.Net', 'caffe.Net', (['model_file', 'weights_file', 'caffe.TEST'], {}), '(model_file, weights_file, caffe.TEST)\n', (7298, 7336), False, 'import caffe\n'), ((8433, 8480), 'caffe.Net', 'caffe.Net', (['model_file', 'weights_file', 'caffe.TEST'], {}), '(model_file, weights_file, caffe.TEST)\n', (8442, 8480), False, 'import caffe\n'), ((8536, 8567), 'datasets.LMDBData', 'datasets.LMDBData', (['args.dataset'], {}), '(args.dataset)\n', (8553, 8567), False, 'import datasets\n'), ((11696, 11729), 'os.path.realpath', 'os.path.realpath', (['args.model_file'], {}), '(args.model_file)\n', (11712, 11729), False, 'import os\n'), ((11749, 11784), 'os.path.realpath', 'os.path.realpath', (['args.weights_file'], {}), '(args.weights_file)\n', (11765, 11784), False, 'import os\n'), ((12173, 12288), 'amct_caffe.create_quant_config', 'amct.create_quant_config', (['config_json_file', 'model_file', 'weights_file', 'skip_layers', 'batch_num', 'activation_offset'], {}), '(config_json_file, model_file, weights_file,\n skip_layers, batch_num, activation_offset)\n', (12197, 12288), True, 'import amct_caffe as amct\n'), ((12346, 12390), 'os.path.join', 'os.path.join', (['TMP', '"""scale_offset_record.txt"""'], {}), "(TMP, 'scale_offset_record.txt')\n", (12358, 12390), False, 'import os\n'), ((12409, 12444), 'os.path.join', 'os.path.join', (['RESULT', '"""MobileNetV2"""'], {}), "(RESULT, 'MobileNetV2')\n", (12421, 12444), False, 'import os\n'), ((12645, 12789), 'amct_caffe.accuracy_based_auto_calibration', 'amct.accuracy_based_auto_calibration', (['args.model_file', 'args.weights_file', 'evaluator', 'config_json_file', 'scale_offset_record_file', 'result_path'], {}), '(args.model_file, args.weights_file,\n evaluator, config_json_file, scale_offset_record_file, result_path)\n', (12681, 12789), True, 'import amct_caffe as amct\n'), ((983, 1009), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (999, 1009), False, 'import os\n'), ((4474, 4498), 'sys.path.insert', 'sys.path.insert', (['(0)', 'path'], {}), '(0, path)\n', (4489, 4498), False, 'import sys\n'), ((5211, 5246), 'os.path.join', 'os.path.join', (['data_dir', 'imgs[index]'], {}), '(data_dir, imgs[index])\n', (5223, 5246), False, 'import os\n'), ((5265, 5284), 'cv2.imread', 'cv2.imread', (['im_file'], {}), '(im_file)\n', (5275, 5284), False, 'import cv2\n'), ((5303, 5365), 'cv2.resize', 'cv2.resize', (['im_data', '(256, 256)'], {'interpolation': 'cv2.INTER_CUBIC'}), '(im_data, (256, 256), interpolation=cv2.INTER_CUBIC)\n', (5313, 5365), False, 'import cv2\n'), ((11529, 11549), 'caffe.set_mode_gpu', 'caffe.set_mode_gpu', ([], {}), '()\n', (11547, 11549), False, 'import caffe\n'), ((11558, 11587), 'caffe.set_device', 'caffe.set_device', (['args.gpu_id'], {}), '(args.gpu_id)\n', (11574, 11587), False, 'import caffe\n'), ((11596, 11615), 'amct_caffe.set_gpu_mode', 'amct.set_gpu_mode', ([], {}), '()\n', (11613, 11615), True, 'import amct_caffe as amct\n'), ((11634, 11654), 'caffe.set_mode_cpu', 'caffe.set_mode_cpu', ([], {}), '()\n', (11652, 11654), False, 'import caffe\n'), ((3218, 3234), 'pathlib.Path', 'Path', (['model_file'], {}), '(model_file)\n', (3222, 3234), False, 'from pathlib import Path\n'), ((3505, 3523), 'pathlib.Path', 'Path', (['weights_file'], {}), '(weights_file)\n', (3509, 3523), False, 'from pathlib import Path\n'), ((3946, 3961), 'pathlib.Path', 'Path', (['caffe_dir'], {}), '(caffe_dir)\n', (3950, 3961), False, 'from pathlib import Path\n'), ((4114, 4134), 'pathlib.Path', 'Path', (['caffe_exec_bin'], {}), '(caffe_exec_bin)\n', (4118, 4134), False, 'from pathlib import Path\n'), ((4292, 4310), 'pathlib.Path', 'Path', (['pycaffe_file'], {}), '(pycaffe_file)\n', (4296, 4310), False, 'from pathlib import Path\n')]
|
#!/usr/bin/python3.5
'''
MIT License
Copyright (c) 2018 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
A simple cache simulator developed by <NAME>
This code is developed for Computer Architecture subject
University of North Carolina, Charlotte, USA
'''
import re
from Cache import Cache
import argparse
import gzip
parser = argparse.ArgumentParser()
parser.add_argument("cache_trace", help="Memory address trace in gzip format")
parser.add_argument("cache_size", help="Cache size in KB.", type=int)
parser.add_argument("block_size", help="Block size in B.", type=int)
parser.add_argument("set_number", help="set number", type=int)
parser.add_argument("address_bit_size", help="set number", type=int, default=32, action='store', nargs='?')
args = parser.parse_args()
regex = re.compile('[0-9]+')
'''
def readFile(fileAddr):
instructions=[]
dataAddr=[]
with open(fileAddr) as f:
lines = f.readlines()
for line in lines:
data = line.split(' ')
type = int(data[0], 10)
addr = int(data[1], 16)
if(type == 2):
instructions.append(addr)
elif(type == 0 or type == 1):
dataAddr.append(addr)
else:
print(data[0])
return [instructions, dataAddr]
'''
def simulateCaches(file_handler, i_ch, d_ch):
for line in file_handler:
data = line.split(' ')
type = int(data[0], 10)
addr = int(data[1], 16)
if (type == 2): # Instruction fetch
i_ch.read(addr) # Data read (0) or Data write (1)
elif (type == 0 or type == 1):
d_ch.read(addr)
try:
miss_rate_d = '{0:.2f}'.format(float(d_ch.miss) * 100 / d_ch.access)
except ZeroDivisionError:
miss_rate_d = 'N/A'
try:
miss_rate_i = '{0:.2f}'.format(float(i_ch.miss) * 100 / i_ch.access)
except ZeroDivisionError:
miss_rate_i = 'N/A'
print("{} miss rate : {}, access : {} and {} miss rate : {}, access : {}"
.format(i_ch.name, miss_rate_i, i_ch.access,
d_ch.name, miss_rate_d, d_ch.access), flush=True, end='\r')
print()
printResult(i_ch)
printResult(d_ch)
def printResult(ch):
print
print("-----------------------------")
print("\tResult for " + ch.name +":")
print("\tTotal : " + str(ch.access))
print("\tMisses : " + str(ch.miss))
print("\tHit : " + str(ch.access - ch.miss))
print("\tHit Rate : {0:.5}".format(float(ch.access - ch.miss)*100/ch.access))
print("-----------------------------")
if __name__ == '__main__':
filePath = args.cache_trace
cacheSize = args.cache_size * 1024
blockSize = args.block_size
setNumber = args.set_number
address_bit_size = args.address_bit_size
file_handler = gzip.open(filePath, 'rt')
l1_ins = Cache(address_bit_size, 'l1_icache', cacheSize, blockSize, setNumber)
l1_d = Cache(address_bit_size, 'l1_dcache', cacheSize, blockSize, setNumber)
l1_ins.construct()
l1_d.construct()
simulateCaches(file_handler, l1_ins, l1_d)
|
[
"Cache.Cache",
"gzip.open",
"argparse.ArgumentParser",
"re.compile"
] |
[((1317, 1342), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1340, 1342), False, 'import argparse\n'), ((1769, 1789), 're.compile', 're.compile', (['"""[0-9]+"""'], {}), "('[0-9]+')\n", (1779, 1789), False, 'import re\n'), ((3861, 3886), 'gzip.open', 'gzip.open', (['filePath', '"""rt"""'], {}), "(filePath, 'rt')\n", (3870, 3886), False, 'import gzip\n'), ((3901, 3970), 'Cache.Cache', 'Cache', (['address_bit_size', '"""l1_icache"""', 'cacheSize', 'blockSize', 'setNumber'], {}), "(address_bit_size, 'l1_icache', cacheSize, blockSize, setNumber)\n", (3906, 3970), False, 'from Cache import Cache\n'), ((3985, 4054), 'Cache.Cache', 'Cache', (['address_bit_size', '"""l1_dcache"""', 'cacheSize', 'blockSize', 'setNumber'], {}), "(address_bit_size, 'l1_dcache', cacheSize, blockSize, setNumber)\n", (3990, 4054), False, 'from Cache import Cache\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from copy import deepcopy
from typing import Any, Generator, Iterable, Iterator
__author__ = "<NAME>"
__doc__ = r"""
Created on 28/10/2019
"""
__all__ = ["unzip", "unzipper"]
def unzip(iterable: Iterable) -> Iterable:
""" """
return zip(*iterable)
def unzipper(iterable: Iterable[Iterable]) -> Iterable:
"""
Unzips an iterable of an iterable
Be carefully has undefined and expected behaviour
:param iterable:
:return:"""
def check_next_iter(iterable: Any) -> Any:
""" """
if isinstance(iterable, Iterable):
try:
a = next(iter(iterable))
if isinstance(a, Iterable):
return a
except StopIteration:
pass
if isinstance(iterable, Iterable):
check_a = check_next_iter(check_next_iter(deepcopy(iterable)))
if check_next_iter(check_a):
for a in iterable:
yield unzipper(a)
elif check_a:
for a in iterable:
yield unzip(a)
else:
for i in iterable:
yield i
return
if __name__ == "__main__":
def recursive_eval(node: Any):
""" """
if isinstance(node, (Iterable, Generator, Iterator)):
gather = []
for i in node:
gather.append(recursive_eval(i))
return gather
return node
def aasda():
""" """
r = range(4)
print(0)
a = [[[*r] for _ in r] for _ in r]
print(a)
print(1)
for _, assd in zip(r, unzipper(a)):
print()
print(recursive_eval(assd))
print()
for _, (a, *_) in zip(r, unzipper(a)):
print()
print(recursive_eval(a))
print()
print(2)
def skad23():
""" """
print(0)
zippy_once = zip(range(6), range(3))
dsadsa = list(deepcopy(zippy_once))
zippy_twice = zip(dsadsa, dsadsa)
zippy_twice_copy = deepcopy(zippy_twice)
asds = list(deepcopy(zippy_twice_copy))
zippy_trice = zip(asds, asds)
zippy_trice_copy = deepcopy(zippy_trice)
print(1)
for aa in zippy_twice:
print(recursive_eval(aa))
print(2)
for a1 in unzip(zippy_twice_copy):
print(recursive_eval(a1))
print(3)
for a1 in unzip(zippy_once):
print(recursive_eval(a1))
print(4)
for a1 in zippy_trice:
print(recursive_eval(a1))
print(5)
for a1 in unzip(zippy_trice_copy):
print(recursive_eval(a1))
print(6)
def skad():
""" """
print(0)
zippy_once = zip(zip(range(6), range(3)))
zippy_once_copy = deepcopy(zippy_once)
dsadsa = list(deepcopy(zippy_once))
zippy_twice = zip(dsadsa, dsadsa)
zippy_twice_copy = deepcopy(zippy_twice)
asds = list(deepcopy(zippy_twice_copy))
zippy_trice = zip(asds, asds)
zippy_trice_copy = deepcopy(zippy_trice)
asds2323 = list(deepcopy(zippy_trice_copy))
zippy_quad = zip(asds2323, asds2323)
zippy_quad_copy = deepcopy(zippy_quad)
print(1)
for aa in zippy_twice:
print(recursive_eval(aa))
print(2)
for a1 in unzipper(zippy_twice_copy):
print(recursive_eval(a1))
print(3)
for a1 in zippy_once_copy:
print(recursive_eval(a1))
print(4)
for a1 in unzipper(zippy_once):
print(recursive_eval(a1))
print(5)
for a1 in zippy_trice:
print(recursive_eval(a1))
print(6)
for a1 in unzipper(zippy_trice_copy):
print(recursive_eval(a1))
print(7)
for a1 in zippy_quad:
print(recursive_eval(a1))
print(8)
for a1 in unzipper(zippy_quad_copy):
print(recursive_eval(a1))
print(9)
aasda()
print()
print("asafasdw")
print()
skad()
# skad23()
|
[
"copy.deepcopy"
] |
[((2116, 2137), 'copy.deepcopy', 'deepcopy', (['zippy_twice'], {}), '(zippy_twice)\n', (2124, 2137), False, 'from copy import deepcopy\n'), ((2251, 2272), 'copy.deepcopy', 'deepcopy', (['zippy_trice'], {}), '(zippy_trice)\n', (2259, 2272), False, 'from copy import deepcopy\n'), ((2887, 2907), 'copy.deepcopy', 'deepcopy', (['zippy_once'], {}), '(zippy_once)\n', (2895, 2907), False, 'from copy import deepcopy\n'), ((3021, 3042), 'copy.deepcopy', 'deepcopy', (['zippy_twice'], {}), '(zippy_twice)\n', (3029, 3042), False, 'from copy import deepcopy\n'), ((3156, 3177), 'copy.deepcopy', 'deepcopy', (['zippy_trice'], {}), '(zippy_trice)\n', (3164, 3177), False, 'from copy import deepcopy\n'), ((3301, 3321), 'copy.deepcopy', 'deepcopy', (['zippy_quad'], {}), '(zippy_quad)\n', (3309, 3321), False, 'from copy import deepcopy\n'), ((2025, 2045), 'copy.deepcopy', 'deepcopy', (['zippy_once'], {}), '(zippy_once)\n', (2033, 2045), False, 'from copy import deepcopy\n'), ((2158, 2184), 'copy.deepcopy', 'deepcopy', (['zippy_twice_copy'], {}), '(zippy_twice_copy)\n', (2166, 2184), False, 'from copy import deepcopy\n'), ((2930, 2950), 'copy.deepcopy', 'deepcopy', (['zippy_once'], {}), '(zippy_once)\n', (2938, 2950), False, 'from copy import deepcopy\n'), ((3063, 3089), 'copy.deepcopy', 'deepcopy', (['zippy_twice_copy'], {}), '(zippy_twice_copy)\n', (3071, 3089), False, 'from copy import deepcopy\n'), ((3202, 3228), 'copy.deepcopy', 'deepcopy', (['zippy_trice_copy'], {}), '(zippy_trice_copy)\n', (3210, 3228), False, 'from copy import deepcopy\n'), ((910, 928), 'copy.deepcopy', 'deepcopy', (['iterable'], {}), '(iterable)\n', (918, 928), False, 'from copy import deepcopy\n')]
|
import dask
from fidesops.graph.config import (
CollectionAddress,
)
from fidesops.graph.traversal import Traversal
from fidesops.models.connectionconfig import ConnectionConfig, ConnectionType
from fidesops.models.policy import Policy
from fidesops.task.graph_task import collect_queries, TaskResources, EMPTY_REQUEST
from .traversal_data import sample_traversal
from ..graph.graph_test_util import (
MockSqlTask,
)
dask.config.set(scheduler="processes")
connection_configs = [
ConnectionConfig(key="mysql", connection_type=ConnectionType.postgres),
ConnectionConfig(key="postgres", connection_type=ConnectionType.postgres),
]
def test_to_dask_input_data() -> None:
t = sample_traversal()
n = t.traversal_node_dict[CollectionAddress("mysql", "Address")]
task = MockSqlTask(n, TaskResources(EMPTY_REQUEST, Policy(), connection_configs))
customers_data = [
{"contact_address_id": 31, "foo": "X"},
{"contact_address_id": 32, "foo": "Y"},
]
orders_data = [
{"billing_address_id": 1, "shipping_address_id": 2},
{"billing_address_id": 11, "shipping_address_id": 22},
]
v = task.to_dask_input_data(customers_data, orders_data)
assert set(v["id"]) == {31, 32, 1, 2, 11, 22}
def test_sql_dry_run_queries() -> None:
traversal = sample_traversal()
env = collect_queries(
traversal,
TaskResources(EMPTY_REQUEST, Policy(), connection_configs),
)
assert (
env[CollectionAddress("mysql", "Customer")]
== "SELECT customer_id,name,email,contact_address_id FROM Customer WHERE email = ?"
)
assert (
env[CollectionAddress("mysql", "User")]
== "SELECT id,user_id,name FROM User WHERE user_id = ?"
)
assert (
env[CollectionAddress("postgres", "Order")]
== "SELECT order_id,customer_id,shipping_address_id,billing_address_id FROM Order WHERE customer_id IN (?, ?)"
)
assert (
env[CollectionAddress("mysql", "Address")]
== "SELECT id,street,city,state,zip FROM Address WHERE id IN (?, ?)"
)
def test_mongo_dry_run_queries() -> None:
from .traversal_data import integration_db_graph
traversal = Traversal(integration_db_graph("postgres"), {"email": ["x"]})
env = collect_queries(
traversal,
TaskResources(
EMPTY_REQUEST,
Policy(),
[
ConnectionConfig(key="mysql", connection_type=ConnectionType.mongodb),
ConnectionConfig(
key="postgres", connection_type=ConnectionType.mongodb
),
],
),
)
assert (
env[CollectionAddress("postgres", "customer")]
== "db.postgres.customer.find({'email': ?}, {'id': 1, 'name': 1, 'email': 1, 'address_id': 1})"
)
assert (
env[CollectionAddress("postgres", "orders")]
== "db.postgres.orders.find({'customer_id': {'$in': [?, ?]}}, {'id': 1, 'customer_id': 1, 'shipping_address_id': 1, 'payment_card_id': 1})"
)
assert (
env[CollectionAddress("postgres", "address")]
== "db.postgres.address.find({'id': {'$in': [?, ?]}}, {'id': 1, 'street': 1, 'city': 1, 'state': 1, 'zip': 1})"
)
|
[
"fidesops.graph.config.CollectionAddress",
"dask.config.set",
"fidesops.models.connectionconfig.ConnectionConfig",
"fidesops.models.policy.Policy"
] |
[((427, 465), 'dask.config.set', 'dask.config.set', ([], {'scheduler': '"""processes"""'}), "(scheduler='processes')\n", (442, 465), False, 'import dask\n'), ((494, 564), 'fidesops.models.connectionconfig.ConnectionConfig', 'ConnectionConfig', ([], {'key': '"""mysql"""', 'connection_type': 'ConnectionType.postgres'}), "(key='mysql', connection_type=ConnectionType.postgres)\n", (510, 564), False, 'from fidesops.models.connectionconfig import ConnectionConfig, ConnectionType\n'), ((570, 643), 'fidesops.models.connectionconfig.ConnectionConfig', 'ConnectionConfig', ([], {'key': '"""postgres"""', 'connection_type': 'ConnectionType.postgres'}), "(key='postgres', connection_type=ConnectionType.postgres)\n", (586, 643), False, 'from fidesops.models.connectionconfig import ConnectionConfig, ConnectionType\n'), ((745, 782), 'fidesops.graph.config.CollectionAddress', 'CollectionAddress', (['"""mysql"""', '"""Address"""'], {}), "('mysql', 'Address')\n", (762, 782), False, 'from fidesops.graph.config import CollectionAddress\n'), ((840, 848), 'fidesops.models.policy.Policy', 'Policy', ([], {}), '()\n', (846, 848), False, 'from fidesops.models.policy import Policy\n'), ((1417, 1425), 'fidesops.models.policy.Policy', 'Policy', ([], {}), '()\n', (1423, 1425), False, 'from fidesops.models.policy import Policy\n'), ((1480, 1518), 'fidesops.graph.config.CollectionAddress', 'CollectionAddress', (['"""mysql"""', '"""Customer"""'], {}), "('mysql', 'Customer')\n", (1497, 1518), False, 'from fidesops.graph.config import CollectionAddress\n'), ((1644, 1678), 'fidesops.graph.config.CollectionAddress', 'CollectionAddress', (['"""mysql"""', '"""User"""'], {}), "('mysql', 'User')\n", (1661, 1678), False, 'from fidesops.graph.config import CollectionAddress\n'), ((1776, 1814), 'fidesops.graph.config.CollectionAddress', 'CollectionAddress', (['"""postgres"""', '"""Order"""'], {}), "('postgres', 'Order')\n", (1793, 1814), False, 'from fidesops.graph.config import CollectionAddress\n'), ((1967, 2004), 'fidesops.graph.config.CollectionAddress', 'CollectionAddress', (['"""mysql"""', '"""Address"""'], {}), "('mysql', 'Address')\n", (1984, 2004), False, 'from fidesops.graph.config import CollectionAddress\n'), ((2373, 2381), 'fidesops.models.policy.Policy', 'Policy', ([], {}), '()\n', (2379, 2381), False, 'from fidesops.models.policy import Policy\n'), ((2670, 2711), 'fidesops.graph.config.CollectionAddress', 'CollectionAddress', (['"""postgres"""', '"""customer"""'], {}), "('postgres', 'customer')\n", (2687, 2711), False, 'from fidesops.graph.config import CollectionAddress\n'), ((2849, 2888), 'fidesops.graph.config.CollectionAddress', 'CollectionAddress', (['"""postgres"""', '"""orders"""'], {}), "('postgres', 'orders')\n", (2866, 2888), False, 'from fidesops.graph.config import CollectionAddress\n'), ((3070, 3110), 'fidesops.graph.config.CollectionAddress', 'CollectionAddress', (['"""postgres"""', '"""address"""'], {}), "('postgres', 'address')\n", (3087, 3110), False, 'from fidesops.graph.config import CollectionAddress\n'), ((2413, 2482), 'fidesops.models.connectionconfig.ConnectionConfig', 'ConnectionConfig', ([], {'key': '"""mysql"""', 'connection_type': 'ConnectionType.mongodb'}), "(key='mysql', connection_type=ConnectionType.mongodb)\n", (2429, 2482), False, 'from fidesops.models.connectionconfig import ConnectionConfig, ConnectionType\n'), ((2500, 2572), 'fidesops.models.connectionconfig.ConnectionConfig', 'ConnectionConfig', ([], {'key': '"""postgres"""', 'connection_type': 'ConnectionType.mongodb'}), "(key='postgres', connection_type=ConnectionType.mongodb)\n", (2516, 2572), False, 'from fidesops.models.connectionconfig import ConnectionConfig, ConnectionType\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.core.files.storage
import django.utils.timezone
import django_extensions.db.fields
from django.conf import settings
from django.db import migrations, models
import stackdio.core.fields
def get_config_file_path(instance, filename):
return filename
def get_global_orch_props_file_path(instance, filename):
return 'cloud/{0}/{1}'.format(instance.slug, filename)
class Migration(migrations.Migration):
initial = True
dependencies = [
('blueprints', '0001_0_8_initial'),
]
replaces = [
('cloud', '0001_initial'),
('cloud', '0002_initial'),
('cloud', '0004_v0_7_migrations'),
('cloud', '0005_v0_7b_migrations'),
('cloud', '0006_v0_7c_migrations'),
('cloud', '0007_v0_7d_migrations'),
('cloud', '0008_v0_7e_migrations'),
]
operations = [
migrations.CreateModel(
name='CloudAccount',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', django_extensions.db.fields.CreationDateTimeField(default=django.utils.timezone.now, verbose_name='created', editable=False, blank=True)),
('modified', django_extensions.db.fields.ModificationDateTimeField(default=django.utils.timezone.now, verbose_name='modified', editable=False, blank=True)),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(null=True, verbose_name='description', blank=True)),
('slug', django_extensions.db.fields.AutoSlugField(populate_from='title', verbose_name='slug', editable=False, blank=True)),
('yaml', models.TextField()),
('vpc_id', models.CharField(max_length=64, verbose_name='VPC ID', blank=True)),
('account_id', models.CharField(max_length=64, verbose_name='Account ID')),
('create_security_groups', models.BooleanField(default=True, verbose_name='Create Security Groups')),
('config_file', stackdio.core.fields.DeletingFileField(default=None, upload_to=get_config_file_path, storage=django.core.files.storage.FileSystemStorage(location=settings.STACKDIO_CONFIG.salt_providers_dir), max_length=255, blank=True, null=True)),
('global_orch_props_file', stackdio.core.fields.DeletingFileField(default=None, upload_to=get_global_orch_props_file_path, storage=django.core.files.storage.FileSystemStorage(location=settings.FILE_STORAGE_DIRECTORY), max_length=255, blank=True, null=True)),
],
options={
'ordering': ('title',),
'default_permissions': ('admin', 'create', 'delete', 'update', 'view'),
},
),
migrations.CreateModel(
name='CloudImage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', django_extensions.db.fields.CreationDateTimeField(default=django.utils.timezone.now, verbose_name='created', editable=False, blank=True)),
('modified', django_extensions.db.fields.ModificationDateTimeField(default=django.utils.timezone.now, verbose_name='modified', editable=False, blank=True)),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(null=True, verbose_name='description', blank=True)),
('slug', django_extensions.db.fields.AutoSlugField(populate_from='title', verbose_name='slug', editable=False, blank=True)),
('image_id', models.CharField(max_length=64, verbose_name='Image ID')),
('ssh_user', models.CharField(max_length=64, verbose_name='SSH User')),
('config_file', stackdio.core.fields.DeletingFileField(default=None, upload_to=get_config_file_path, storage=django.core.files.storage.FileSystemStorage(location=settings.STACKDIO_CONFIG.salt_profiles_dir), max_length=255, blank=True, null=True)),
],
options={
'ordering': ('title',),
'default_permissions': ('admin', 'create', 'delete', 'update', 'view'),
},
),
migrations.CreateModel(
name='CloudInstanceSize',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(null=True, verbose_name='description', blank=True)),
('slug', django_extensions.db.fields.AutoSlugField(populate_from='title', verbose_name='slug', editable=False, blank=True)),
('instance_id', models.CharField(max_length=64, verbose_name='Instance ID')),
],
options={
'ordering': ('id',),
'default_permissions': (),
},
),
migrations.CreateModel(
name='CloudProvider',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=32, verbose_name='Name', choices=[('ec2', 'Amazon Web Services')])),
],
options={
'default_permissions': ('admin', 'view'),
},
),
migrations.CreateModel(
name='CloudRegion',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(null=True, verbose_name='description', blank=True)),
('slug', django_extensions.db.fields.AutoSlugField(populate_from='title', verbose_name='slug', editable=False, blank=True)),
],
options={
'ordering': ('provider', 'title'),
'default_permissions': (),
},
),
migrations.CreateModel(
name='CloudZone',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(null=True, verbose_name='description', blank=True)),
('slug', django_extensions.db.fields.AutoSlugField(populate_from='title', verbose_name='slug', editable=False, blank=True)),
],
options={
'ordering': ('region', 'title'),
'default_permissions': (),
},
),
migrations.CreateModel(
name='SecurityGroup',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', django_extensions.db.fields.CreationDateTimeField(default=django.utils.timezone.now, verbose_name='created', editable=False, blank=True)),
('modified', django_extensions.db.fields.ModificationDateTimeField(default=django.utils.timezone.now, verbose_name='modified', editable=False, blank=True)),
('name', models.CharField(max_length=255)),
('description', models.CharField(max_length=255)),
('group_id', models.CharField(max_length=16)),
('is_default', models.BooleanField(default=False)),
('is_managed', models.BooleanField(default=False)),
('account', models.ForeignKey(related_name='security_groups', to='cloud.CloudAccount')),
('blueprint_host_definition', models.ForeignKey(related_name='security_groups', default=None, to='blueprints.BlueprintHostDefinition', null=True)),
],
options={
'default_permissions': ('admin', 'create', 'delete', 'update', 'view'),
},
),
migrations.CreateModel(
name='Snapshot',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', django_extensions.db.fields.CreationDateTimeField(default=django.utils.timezone.now, verbose_name='created', editable=False, blank=True)),
('modified', django_extensions.db.fields.ModificationDateTimeField(default=django.utils.timezone.now, verbose_name='modified', editable=False, blank=True)),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(null=True, verbose_name='description', blank=True)),
('slug', django_extensions.db.fields.AutoSlugField(populate_from='title', verbose_name='slug', editable=False, blank=True)),
('snapshot_id', models.CharField(max_length=32)),
('size_in_gb', models.IntegerField()),
('filesystem_type', models.CharField(max_length=16, choices=[('ext2', 'ext2'), ('ext3', 'ext3'), ('ext4', 'ext4'), ('fuse', 'fuse'), ('xfs', 'xfs')])),
('account', models.ForeignKey(related_name='snapshots', to='cloud.CloudAccount')),
],
options={
'default_permissions': ('admin', 'create', 'delete', 'update', 'view'),
},
),
]
|
[
"django.db.models.TextField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.IntegerField"
] |
[((1026, 1119), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (1042, 1119), False, 'from django.db import migrations, models\n'), ((1484, 1538), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""title"""'}), "(max_length=255, verbose_name='title')\n", (1500, 1538), False, 'from django.db import migrations, models\n'), ((1573, 1640), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'verbose_name': '"""description"""', 'blank': '(True)'}), "(null=True, verbose_name='description', blank=True)\n", (1589, 1640), False, 'from django.db import migrations, models\n'), ((1809, 1827), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1825, 1827), False, 'from django.db import migrations, models\n'), ((1857, 1923), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'verbose_name': '"""VPC ID"""', 'blank': '(True)'}), "(max_length=64, verbose_name='VPC ID', blank=True)\n", (1873, 1923), False, 'from django.db import migrations, models\n'), ((1957, 2015), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'verbose_name': '"""Account ID"""'}), "(max_length=64, verbose_name='Account ID')\n", (1973, 2015), False, 'from django.db import migrations, models\n'), ((2061, 2133), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'verbose_name': '"""Create Security Groups"""'}), "(default=True, verbose_name='Create Security Groups')\n", (2080, 2133), False, 'from django.db import migrations, models\n'), ((2974, 3067), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (2990, 3067), False, 'from django.db import migrations, models\n'), ((3432, 3486), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""title"""'}), "(max_length=255, verbose_name='title')\n", (3448, 3486), False, 'from django.db import migrations, models\n'), ((3521, 3588), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'verbose_name': '"""description"""', 'blank': '(True)'}), "(null=True, verbose_name='description', blank=True)\n", (3537, 3588), False, 'from django.db import migrations, models\n'), ((3761, 3817), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'verbose_name': '"""Image ID"""'}), "(max_length=64, verbose_name='Image ID')\n", (3777, 3817), False, 'from django.db import migrations, models\n'), ((3849, 3905), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'verbose_name': '"""SSH User"""'}), "(max_length=64, verbose_name='SSH User')\n", (3865, 3905), False, 'from django.db import migrations, models\n'), ((4477, 4570), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (4493, 4570), False, 'from django.db import migrations, models\n'), ((4595, 4649), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""title"""'}), "(max_length=255, verbose_name='title')\n", (4611, 4649), False, 'from django.db import migrations, models\n'), ((4684, 4751), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'verbose_name': '"""description"""', 'blank': '(True)'}), "(null=True, verbose_name='description', blank=True)\n", (4700, 4751), False, 'from django.db import migrations, models\n'), ((4927, 4986), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'verbose_name': '"""Instance ID"""'}), "(max_length=64, verbose_name='Instance ID')\n", (4943, 4986), False, 'from django.db import migrations, models\n'), ((5242, 5335), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (5258, 5335), False, 'from django.db import migrations, models\n'), ((5359, 5471), 'django.db.models.CharField', 'models.CharField', ([], {'unique': '(True)', 'max_length': '(32)', 'verbose_name': '"""Name"""', 'choices': "[('ec2', 'Amazon Web Services')]"}), "(unique=True, max_length=32, verbose_name='Name', choices=[\n ('ec2', 'Amazon Web Services')])\n", (5375, 5471), False, 'from django.db import migrations, models\n'), ((5698, 5791), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (5714, 5791), False, 'from django.db import migrations, models\n'), ((5816, 5870), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""title"""'}), "(max_length=255, verbose_name='title')\n", (5832, 5870), False, 'from django.db import migrations, models\n'), ((5905, 5972), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'verbose_name': '"""description"""', 'blank': '(True)'}), "(null=True, verbose_name='description', blank=True)\n", (5921, 5972), False, 'from django.db import migrations, models\n'), ((6379, 6472), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (6395, 6472), False, 'from django.db import migrations, models\n'), ((6497, 6551), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""title"""'}), "(max_length=255, verbose_name='title')\n", (6513, 6551), False, 'from django.db import migrations, models\n'), ((6586, 6653), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'verbose_name': '"""description"""', 'blank': '(True)'}), "(null=True, verbose_name='description', blank=True)\n", (6602, 6653), False, 'from django.db import migrations, models\n'), ((7062, 7155), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (7078, 7155), False, 'from django.db import migrations, models\n'), ((7519, 7551), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (7535, 7551), False, 'from django.db import migrations, models\n'), ((7586, 7618), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (7602, 7618), False, 'from django.db import migrations, models\n'), ((7650, 7681), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(16)'}), '(max_length=16)\n', (7666, 7681), False, 'from django.db import migrations, models\n'), ((7715, 7749), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (7734, 7749), False, 'from django.db import migrations, models\n'), ((7783, 7817), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (7802, 7817), False, 'from django.db import migrations, models\n'), ((7848, 7922), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""security_groups"""', 'to': '"""cloud.CloudAccount"""'}), "(related_name='security_groups', to='cloud.CloudAccount')\n", (7865, 7922), False, 'from django.db import migrations, models\n'), ((7971, 8091), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""security_groups"""', 'default': 'None', 'to': '"""blueprints.BlueprintHostDefinition"""', 'null': '(True)'}), "(related_name='security_groups', default=None, to=\n 'blueprints.BlueprintHostDefinition', null=True)\n", (7988, 8091), False, 'from django.db import migrations, models\n'), ((8345, 8438), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (8361, 8438), False, 'from django.db import migrations, models\n'), ((8803, 8857), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""title"""'}), "(max_length=255, verbose_name='title')\n", (8819, 8857), False, 'from django.db import migrations, models\n'), ((8892, 8959), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'verbose_name': '"""description"""', 'blank': '(True)'}), "(null=True, verbose_name='description', blank=True)\n", (8908, 8959), False, 'from django.db import migrations, models\n'), ((9135, 9166), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)'}), '(max_length=32)\n', (9151, 9166), False, 'from django.db import migrations, models\n'), ((9200, 9221), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (9219, 9221), False, 'from django.db import migrations, models\n'), ((9260, 9393), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(16)', 'choices': "[('ext2', 'ext2'), ('ext3', 'ext3'), ('ext4', 'ext4'), ('fuse', 'fuse'), (\n 'xfs', 'xfs')]"}), "(max_length=16, choices=[('ext2', 'ext2'), ('ext3', 'ext3'),\n ('ext4', 'ext4'), ('fuse', 'fuse'), ('xfs', 'xfs')])\n", (9276, 9393), False, 'from django.db import migrations, models\n'), ((9420, 9488), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""snapshots"""', 'to': '"""cloud.CloudAccount"""'}), "(related_name='snapshots', to='cloud.CloudAccount')\n", (9437, 9488), False, 'from django.db import migrations, models\n')]
|
# Generated by Django 2.0.1 on 2019-06-10 15:07
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ticket', '0013_auto_20190603_0738'),
]
operations = [
migrations.AddField(
model_name='ticket',
name='team_name',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ticket_team', to='ticket.Team'),
),
]
|
[
"django.db.models.ForeignKey"
] |
[((369, 519), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'default': 'None', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""ticket_team"""', 'to': '"""ticket.Team"""'}), "(blank=True, default=None, null=True, on_delete=django.db.\n models.deletion.CASCADE, related_name='ticket_team', to='ticket.Team')\n", (386, 519), False, 'from django.db import migrations, models\n')]
|
from pywim.utils.stats import iqr
import numpy as np
import pandas as pd
import peakutils
def sensors_estimation(
signal_data: pd.DataFrame, sensors_delta_distance: list
) -> [np.array]:
"""
:param signal_data:
:param sensors_delta_distance:
:return:
"""
# x axis: time
x = signal_data.index.values
sensors_peak_time = []
sensors_delta_time = [None]
for k in signal_data.keys():
# y axis: volts
y = signal_data[k].values
indexes = peakutils.indexes(y, thres=0.5, min_dist=30)
sensors_peak_time.append(x[indexes])
for i in range(1, len(sensors_peak_time)):
sensors_delta_time.append(
sensors_peak_time[i] - sensors_peak_time[i - 1]
)
# the information about first sensor should be equal to the second sensor
sensors_delta_time[0] = sensors_delta_time[1]
sensors_delta_speed = []
for i in range(len(sensors_delta_distance)):
sensors_delta_speed.append(
sensors_delta_distance[i] / sensors_delta_time[i]
)
# the information about first sensor should be equal to the second sensor
sensors_delta_speed[0] = sensors_delta_speed[1]
return sensors_delta_speed
def average_estimation(
signal_data: pd.DataFrame=None,
sensors_delta_distance: list=None,
sensors_delta_speed: list=None
) -> float:
"""
:param signal_data:
:param sensors_delta_distance:
:param sensors_delta_speed:
:return:
"""
if not sensors_delta_speed:
sensors_delta_speed = sensors_estimation(
signal_data, sensors_delta_distance
)
speed_values = np.array([])
for sensor_speeds in sensors_delta_speed[1:]:
speed_values = np.concatenate((speed_values, sensor_speeds))
return iqr.reject_outliers(pd.Series(speed_values)).mean()
|
[
"peakutils.indexes",
"numpy.array",
"pandas.Series",
"numpy.concatenate"
] |
[((1659, 1671), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1667, 1671), True, 'import numpy as np\n'), ((506, 550), 'peakutils.indexes', 'peakutils.indexes', (['y'], {'thres': '(0.5)', 'min_dist': '(30)'}), '(y, thres=0.5, min_dist=30)\n', (523, 550), False, 'import peakutils\n'), ((1746, 1791), 'numpy.concatenate', 'np.concatenate', (['(speed_values, sensor_speeds)'], {}), '((speed_values, sensor_speeds))\n', (1760, 1791), True, 'import numpy as np\n'), ((1824, 1847), 'pandas.Series', 'pd.Series', (['speed_values'], {}), '(speed_values)\n', (1833, 1847), True, 'import pandas as pd\n')]
|
from setuptools import find_packages, setup
INSTALL_REQUIRES = [
'cheroot==8.3.0',
'flask==1.1.2',
'flask-sqlalchemy==2.4.3',
'sqlalchemy==1.3.7',
'bcrypt==3.1.7',
'hashids==1.2.0',
'click==7.1.2',
'markdown==2.6.9',
'mdx-linkify==1.0',
]
DEV_REQUIRES = [
'wheel',
'twine',
]
MEMCACHE_REQUIRES = [
'pymemcache==3.2.0',
]
if __name__ == '__main__':
setup(
version='0.7',
name='MiniWiki',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=find_packages(exclude=['tests']),
entry_points={
'console_scripts': (
'miniwiki=miniwiki.__main__:start_miniwiki',
),
},
python_requires='>=3.6',
install_requires=INSTALL_REQUIRES,
extras_require={
'dev': DEV_REQUIRES,
'memcache': MEMCACHE_REQUIRES,
},
include_package_data=True,
)
|
[
"setuptools.find_packages"
] |
[((562, 594), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests']"}), "(exclude=['tests'])\n", (575, 594), False, 'from setuptools import find_packages, setup\n')]
|
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.renderers import JSONRenderer
from cantusdata.models import Manuscript
from cantusdata.serializers.search import SearchSerializer
from cantusdata.helpers.solrsearch import SolrSearchQueryless
class ManuscriptGlyphSetView(APIView):
serializer_class = SearchSerializer
renderer_classes = (JSONRenderer,)
def get(self, request, *args, **kwargs):
manuscript = Manuscript.objects.get(id=kwargs["pk"])
result = (
SolrSearchQueryless(
'q=type%3Acantusdata_music_notation+AND+siglum_slug%3A"{0}"'.format(
manuscript.siglum_slug
)
)
.facets(["neumes"])
.facet_counts["facet_fields"]["neumes"]
)
return Response(result)
|
[
"cantusdata.models.Manuscript.objects.get",
"rest_framework.response.Response"
] |
[((485, 524), 'cantusdata.models.Manuscript.objects.get', 'Manuscript.objects.get', ([], {'id': "kwargs['pk']"}), "(id=kwargs['pk'])\n", (507, 524), False, 'from cantusdata.models import Manuscript\n'), ((846, 862), 'rest_framework.response.Response', 'Response', (['result'], {}), '(result)\n', (854, 862), False, 'from rest_framework.response import Response\n')]
|
import requests
import json
import os
import logging
from random import randint
from bs4 import BeautifulSoup
class AFITop100:
def __init__(self, quotes):
self.quotes = quotes
def get_random_quote(self):
firstquote_index = min(self.quotes.keys())
lastquote_index = max(self.quotes.keys())
random_index = randint(firstquote_index, lastquote_index)
return tuple([random_index, self.quotes[random_index]])
def get_quote(self, index):
return tuple([index, self.quotes.get(index)])
def store_quotes_json(packed_quotes):
quotes_file = get_quotes_filename()
if not os.path.exists(os.path.dirname(quotes_file)):
os.makedirs(os.path.dirname(quotes_file))
with open(quotes_file, 'w') as fh:
json.dump(packed_quotes, fh)
def fetch_quotes_json():
quotes_file = get_quotes_filename()
with open(quotes_file, 'r') as fh:
quotes = json.load(fh)
keys = map(int, quotes.keys())
return dict(zip(keys, quotes.values()))
def check_json_exists():
quotes_file = get_quotes_filename()
return os.path.exists(quotes_file)
def get_quotes_filename():
datadir = os.path.expandvars("$HOME/data")
return os.path.join(datadir, 'quotes.json')
def fetch_afi_quotes_html(url='https://www.afi.com/afis-100-years-100-movie-quotes/'):
try:
page = requests.get(url)
page.raise_for_status()
return page.content
except requests.exceptions.HTTPError as e:
logging.error(f"Something went wront in fetch_afi_quotes_html: {e}")
raise
except requests.ConnectionError as e:
logging.error("No internet connection available to fetch quotes.")
raise
def find_quotes(html, selector='div.single_list.col-sm-12.movie_popup'):
try:
soup = BeautifulSoup(html, 'html.parser')
quotes = soup.select(selector)
return quotes
except Exception as e:
logging.error(f"Unable to select movie elements: {e}")
raise
def pack_quotes(quotes, **kwargs):
"""
:param quotes: a list of BeautifulSoup tags containing quotes, and quote details
:return: a dictionary of packaged quotes
"""
packed_quotes = {}
for group in quotes:
raw = group.select_one(kwargs.get('quotetag'))
raw_quote = raw.string
raw_quote = raw_quote.strip()
rank, quote = raw_quote.split(" ", 1)
rank = int(rank.rstrip("."))
raw = group.select_one(kwargs.get('movietag'))
raw_movie, raw_year = raw.strings
raw_movie = raw_movie.strip()
movie = raw_movie.title()
raw_year = raw_year.lstrip('(')
year = raw_year.rstrip(')')
packed_quotes[rank] = {"Quote": quote, "Movie": movie, "Year": year}
return packed_quotes
|
[
"json.dump",
"logging.error",
"json.load",
"random.randint",
"os.path.dirname",
"os.path.exists",
"os.path.expandvars",
"requests.get",
"bs4.BeautifulSoup",
"os.path.join"
] |
[((1104, 1131), 'os.path.exists', 'os.path.exists', (['quotes_file'], {}), '(quotes_file)\n', (1118, 1131), False, 'import os\n'), ((1175, 1207), 'os.path.expandvars', 'os.path.expandvars', (['"""$HOME/data"""'], {}), "('$HOME/data')\n", (1193, 1207), False, 'import os\n'), ((1219, 1255), 'os.path.join', 'os.path.join', (['datadir', '"""quotes.json"""'], {}), "(datadir, 'quotes.json')\n", (1231, 1255), False, 'import os\n'), ((348, 390), 'random.randint', 'randint', (['firstquote_index', 'lastquote_index'], {}), '(firstquote_index, lastquote_index)\n', (355, 390), False, 'from random import randint\n'), ((778, 806), 'json.dump', 'json.dump', (['packed_quotes', 'fh'], {}), '(packed_quotes, fh)\n', (787, 806), False, 'import json\n'), ((931, 944), 'json.load', 'json.load', (['fh'], {}), '(fh)\n', (940, 944), False, 'import json\n'), ((1369, 1386), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1381, 1386), False, 'import requests\n'), ((1816, 1850), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""html.parser"""'], {}), "(html, 'html.parser')\n", (1829, 1850), False, 'from bs4 import BeautifulSoup\n'), ((649, 677), 'os.path.dirname', 'os.path.dirname', (['quotes_file'], {}), '(quotes_file)\n', (664, 677), False, 'import os\n'), ((700, 728), 'os.path.dirname', 'os.path.dirname', (['quotes_file'], {}), '(quotes_file)\n', (715, 728), False, 'import os\n'), ((1502, 1570), 'logging.error', 'logging.error', (['f"""Something went wront in fetch_afi_quotes_html: {e}"""'], {}), "(f'Something went wront in fetch_afi_quotes_html: {e}')\n", (1515, 1570), False, 'import logging\n'), ((1635, 1701), 'logging.error', 'logging.error', (['"""No internet connection available to fetch quotes."""'], {}), "('No internet connection available to fetch quotes.')\n", (1648, 1701), False, 'import logging\n'), ((1947, 2001), 'logging.error', 'logging.error', (['f"""Unable to select movie elements: {e}"""'], {}), "(f'Unable to select movie elements: {e}')\n", (1960, 2001), False, 'import logging\n')]
|
from typing import Any
from .logging import logger, LogLevelEnum
from clubbi_utils import json
from typing import Callable
import logging
class JsonLogger:
def __init__(self, logger:logging.Logger):
self.logger = logger
setattr(self, "fatal", self._log(LogLevelEnum.fatal))
setattr(self, "error", self._log(LogLevelEnum.error))
setattr(self, "warning", self._log(LogLevelEnum.warning))
setattr(self, "info", self._log(LogLevelEnum.info))
setattr(self, "debug", self._log(LogLevelEnum.debug))
def log(self, log_level: LogLevelEnum, workflow: str, message: str, **kwargs: Any) -> None:
payload = dict(
workflow=workflow,
message=message,
level=log_level,
**kwargs,
)
self.logger.log(level=log_level.to_python_log_level(), msg=json.dumps(payload))
def _log(self, log_level: LogLevelEnum) -> Callable:
return lambda workflow, message, **kwargs: self.log(log_level, workflow, message, **kwargs)
def fatal(self, workflow: str, message: str, **kwargs: Any) -> None:
pass
def error(self, workflow: str, message: str, **kwargs: Any) -> None:
pass
def warning(self, workflow: str, message: str, **kwargs: Any) -> None:
pass
def info(self, workflow: str, message: str, **kwargs: Any) -> None:
pass
def debug(self, workflow: str, message: str, **kwargs: Any) -> None:
pass
jlogger = JsonLogger(logger)
|
[
"clubbi_utils.json.dumps"
] |
[((855, 874), 'clubbi_utils.json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (865, 874), False, 'from clubbi_utils import json\n')]
|
from typing import List, Dict, Tuple
from collections import Counter
import random
import lesson3
from lesson5 import inverse_normal_cdf
import math
import matplotlib.pyplot as plt
from lesson4 import correlation, standard_deviation
from lesson3 import Matrix, Vector, make_matrix, vector_mean, subtract, magnitude, scalar_multiply
from lesson7_Gradient_Descent import gradient_step
# Αυτή είναι η μέθοδος για να ομαδοποιούμε στοιχεία σε κάδους ανάλογα το μέγεθος της τιμής τους
# και ανάλογα το εύρος τιμών(πλατος) κάθε κάδου bucket_size
def bucketize(point: float, bucket_size: float) -> float:
return bucket_size * math.floor(point / bucket_size)
def make_histogram(points: List[float], bucket_size: float) -> Dict[float, int]:
return Counter(bucketize(point, bucket_size) for point in points)
def plot_histogram(points: List[float], bucket_size: float, title: str = ""):
histogram = make_histogram(points, bucket_size)
plt.bar(histogram.keys(), histogram.values(), width=bucket_size)
plt.title(title)
plt.show()
random.seed(0)
uniform = [200 * random.random() - 100 for _ in range(10000)]
normal = [57 * inverse_normal_cdf(random.random())
for _ in range(10000)]
plot_histogram(uniform, 10, "Το ιστόγραμμα του συνόλου uniform")
plot_histogram(normal, 10, "Το ιστόγραμμα του συνόλου Normal")
"""Επιστρέφει ενα τυχαιο δειγμα απο μια τυπικη κανονικη κατανομη"""
def random_normal() -> float:
return inverse_normal_cdf(random.random())
xs = [random_normal() for _ in range(1000)]
ys1 = [x + random_normal() / 2 for x in xs]
ys2 = [-x + random_normal() / 2 for x in xs]
plot_histogram(ys1, 10, 'plot ys1')
plot_histogram(ys2, 10, 'plot ys2')
"""Διάγραμμα διασποράς"""
plt.scatter(xs, ys1, marker='.', color='black', label='ys1')
plt.scatter(xs, ys2, marker='.', color='gray', label='ys2')
plt.xlabel('xs')
plt.ylabel('ys')
plt.legend(loc=9)
plt.title("Πολυ διαφορετικές απο κοινού κατανομές")
plt.show()
print(correlation(xs, ys1))
print(correlation(xs, ys2))
def correlation_matrix(data: List[Vector]) -> Matrix:
def correlation_ij(i: int, j: int) -> float:
return correlation(data[i], data[j])
return make_matrix(len(data), len(data), correlation_ij)
assert 0.89 < correlation(xs, ys1) < 0.91
assert -0.91 < correlation(xs, ys2) < -0.89
corr_data = [[math.floor(inverse_normal_cdf(random.random())) for _ in range(6000)] for _ in range(4)]
num_vectors = len(corr_data)
fig, ax = plt.subplots(num_vectors, num_vectors)
for i in range(num_vectors):
for j in range(num_vectors):
if i != j: ax[i][j].scatter(corr_data[j], corr_data[i])
else: ax[i][j].annotate("series" + str(i), (0.5, 0.5),
xycoords='axes fraction',
ha='center', va='center')
if i < num_vectors -1: ax[i][j].xaxis.set_visible(False)
if j > 0: ax[i][j].yaxis.set_visible(False)
ax[-1][-1].set_xlim(ax[0][-1].get_xlim())
ax[0][0].set_ylim(ax[0][1].get_ylim())
plt.show()
#NamedTuples
import datetime
stock_price ={'closing_price': 102.06,
'date': datetime.date(2014, 8, 29),
'symbol': 'AAPL'}
prices: Dict[datetime.date, float] = {}
from collections import namedtuple
StockPrice = namedtuple('StockPrice',['symbol', 'date', 'closing_price'])
price = StockPrice('MSFT',datetime.date(2018, 12, 14), 106.03)
assert price.symbol == 'MSFT'
assert price.closing_price == 106.03
from typing import NamedTuple
class StockPrice(NamedTuple):
symbol: str
date: datetime.date
closing_price: float
def is_high_tech(self) -> bool:
return self.symbol in ['MSFT', 'GOOG', 'FB', 'AMZN', 'AAPL']
price = StockPrice('MSFT', datetime.date(2018, 12, 14), 106.03)
assert price.symbol == 'MSFT'
assert price.closing_price == 106.03
assert price.is_high_tech()
from dataclasses import dataclass
@dataclass
class StockPrice2:
symbol: str
date: datetime.date
closing_price: float
def is_high_tech(self) -> bool:
return self.symbol in ['MSFT', 'GOOG', 'FB', 'AMZN', 'AAPL']
price2 = StockPrice2('MSFT', datetime.date(2018, 12, 14), 106.03)
assert price2.symbol == 'MSFT'
assert price2.closing_price == 106.03
assert price2.is_high_tech()
price2.closing_price /= 2
assert price2.closing_price == 53.015
from dateutil.parser import parse
def parse_row(row: List[str]) -> StockPrice:
symbol, date, closing_price = row
return StockPrice(symbol=symbol,
date=parse(date).date(),
closing_price=float(closing_price))
stock = parse_row(['MSFT', '2018-12-14', '106.03'])
from typing import Optional
import re
def try_parse_row(row: List[str]) -> Optional[StockPrice]:
symbol, date, closing_price = row
if not re.match(r"^[A-Z]+$", symbol):
return None
try:
date = parse(date).date()
except ValueError:
return None
try:
closing_price = float(closing_price)
except ValueError:
return None
return StockPrice(symbol, date, closing_price)
# Should return None for errors
assert try_parse_row(["MSFT0", "2018-12-14", "106.03"]) is None
assert try_parse_row(["MSFT", "2018-12--14", "106.03"]) is None
assert try_parse_row(["MSFT", "2018-12-14", "x"]) is None
# But should return same as before if data is good.
assert try_parse_row(["MSFT", "2018-12-14", "106.03"]) == stock
import csv
data: List[StockPrice] = []
with open('file.txt') as f:
reader = csv.reader(f)
for row in reader:
maybe_stock = try_parse_row(row)
if maybe_stock is None:
print(f'παράλειψη ακυρης γραμμης: {row}')
else:
data.append(maybe_stock)
print(maybe_stock)
data = [
StockPrice(symbol='MSFT',
date=datetime.date(2018, 12, 24),
closing_price=106.03,
#.....
)]
#max_aapl_price = max(stock_price.closing_price
# for stock_price in data
# if stock_price.symbol == 'AAPL')
from collections import defaultdict
#max_prices: Dict[str, float] = defaultdict(lambda: float('-inf'))
#for sp in data:
# symbol, closing_price = sp.symbol, sp.closing_price
# if closing_price > max_prices[symbol]:
# max_prices[symbol] = closing_price
prices: Dict[str, List[StockPrice]] = defaultdict(list)
for sp in data:
prices[sp.symbol].append(sp)
prices = {symbol: sorted(symbol_prices)
for symbol, symbol_prices in prices.items()}
def pct_change(yesterday: StockPrice, today: StockPrice) -> float:
return today.closing_price / yesterday.closing_price - 1
class DailyChange(NamedTuple):
symbol: str
date: datetime.date
pct_change: float
def day_over_day_changes(prices: List[StockPrice]) -> List[DailyChange]:
return [DailyChange(symbol=today.symbol,
date=today.date,
pct_change=pct_change(yesterday, today))
for yesterday, today in zip(prices[0:], prices[1:])]
all_changes = [change
for symbol_prices in prices.values()
for change in day_over_day_changes(symbol_prices)]
#max_change = max(all_changes, key=lambda change: change.pct_change)
#min_change = min(all_changes, key=lambda change: change.pct_change)
#changes_by_month: List[DailyChange] = {month: [] for month in range(1, 13)}
#for change in all_changes:
# changes_by_month[change.date.month].append(change)
#avg_daily_change = {month: sum(change.pct_change for change in changes) / len(changes)
#for month, changes in changes_by_month.items()}
a_to_b = lesson3.distance([63, 150], [67, 160])
a_to_c = lesson3.distance([63, 150], [70, 171])
b_to_c = lesson3.distance([67, 160], [70, 171])
a_to_b = lesson3.distance([160, 150], [170.2, 160])
a_to_c = lesson3.distance([160, 150], [177.8, 171])
b_to_c = lesson3.distance([170.2, 160], [177.8, 171])
def scale(data: List[Vector]) -> Tuple[Vector, Vector]:
dim = len(data[0])
means = vector_mean(data)
stdevs = [standard_deviation([vector[i] for vector in data])
for i in range(dim)]
return means, stdevs
vectors = [[-3, -1, 1], [-1, 0, 1], [1, 1, 1]]
means, stdevs = scale(vectors)
assert means == [-1, 0, 1]
assert stdevs == [2, 1, 0]
def rescale(data: List[Vector]) -> List[Vector]:
dim = len(data[0])
means, stdevs = scale(data)
rescaled = [v[:] for v in data]
for v in rescaled:
for i in range(dim):
if stdevs[i] > 0:
v[i] = (v[i] - means[i]) / stdevs[i]
return rescaled
means, stdevs = scale(rescale(vectors))
assert means == [0, 0, 1]
assert stdevs == [1, 1, 0]
import tqdm
for i in tqdm.tqdm(range(100), colour='blue'):
_ = [random.random() for _ in range(1000000)]
def primes_up_to(n: int) -> List[int]:
primes = [2]
with tqdm.trange(3, n) as t:
for i in t:
i_is_prime = not any( i % p == 0 for p in primes)
if i_is_prime:
primes.append(i)
t.set_description(f"{len(primes)} primes")
return primes
my_primes = primes_up_to(100_000)
def de_mean(data: List[Vector]) -> List[Vector]:
mean = vector_mean(data)
return [subtract(vector, mean) for vector in data]
def direction(w: Vector) -> Vector:
mag = magnitude(w)
return [w_i / mag for w_i in w]
def directional_variance(data: List[Vector], w: Vector) -> float:
w_dir = direction(w)
return sum(lesson3.dot(v, w_dir) ** 2 for v in data)
def directional_variance_gradient(data: List[Vector], w: Vector) -> Vector:
w_dir = direction(w)
return [sum(2 * lesson3.dot(v, w_dir) * v[i] for v in data)
for i in range(len(w))]
def first_principal_component(data: List[Vector],
n: int = 100,
step_size: float = 0.1) -> Vector:
guess = [1.0 for _ in data[0]]
with tqdm.trange(n) as t:
for _ in t:
dv = directional_variance(data, guess)
gradient = directional_variance_gradient(data, guess)
guess = gradient_step(guess, gradient, step_size)
t.set_description(f"dev: {dv:.3f}")
return direction(guess)
pca_data = [
[20.9666776351559,-13.1138080189357],
[22.7719907680008,-19.8890894944696],
[25.6687103160153,-11.9956004517219],
[18.0019794950564,-18.1989191165133],
[21.3967402102156,-10.8893126308196],
[0.443696899177716,-19.7221132386308],
[29.9198322142127,-14.0958668502427],
[19.0805843080126,-13.7888747608312],
[16.4685063521314,-11.2612927034291],
[21.4597664701884,-12.4740034586705],
[3.87655283720532,-17.575162461771],
[34.5713920556787,-10.705185165378],
[13.3732115747722,-16.7270274494424],
[20.7281704141919,-8.81165591556553],
[24.839851437942,-12.1240962157419],
[20.3019544741252,-12.8725060780898],
[21.9021426929599,-17.3225432396452],
[23.2285885715486,-12.2676568419045],
[28.5749111681851,-13.2616470619453],
[29.2957424128701,-14.6299928678996],
[15.2495527798625,-18.4649714274207],
[26.5567257400476,-9.19794350561966],
[30.1934232346361,-12.6272709845971],
[36.8267446011057,-7.25409849336718],
[32.157416823084,-10.4729534347553],
[5.85964365291694,-22.6573731626132],
[25.7426190674693,-14.8055803854566],
[16.237602636139,-16.5920595763719],
[14.7408608850568,-20.0537715298403],
[6.85907008242544,-18.3965586884781],
[26.5918329233128,-8.92664811750842],
[-11.2216019958228,-27.0519081982856],
[8.93593745011035,-20.8261235122575],
[24.4481258671796,-18.0324012215159],
[2.82048515404903,-22.4208457598703],
[30.8803004755948,-11.455358009593],
[15.4586738236098,-11.1242825084309],
[28.5332537090494,-14.7898744423126],
[40.4830293441052,-2.41946428697183],
[15.7563759125684,-13.5771266003795],
[19.3635588851727,-20.6224770470434],
[13.4212840786467,-19.0238227375766],
[7.77570680426702,-16.6385739839089],
[21.4865983854408,-15.290799330002],
[12.6392705930724,-23.6433305964301],
[12.4746151388128,-17.9720169566614],
[23.4572410437998,-14.602080545086],
[13.6878189833565,-18.9687408182414],
[15.4077465943441,-14.5352487124086],
[20.3356581548895,-10.0883159703702],
[20.7093833689359,-12.6939091236766],
[11.1032293684441,-14.1383848928755],
[17.5048321498308,-9.2338593361801],
[16.3303688220188,-15.1054735529158],
[26.6929062710726,-13.306030567991],
[34.4985678099711,-9.86199941278607],
[39.1374291499406,-10.5621430853401],
[21.9088956482146,-9.95198845621849],
[22.2367457578087,-17.2200123442707],
[10.0032784145577,-19.3557700653426],
[14.045833906665,-15.871937521131],
[15.5640911917607,-18.3396956121887],
[24.4771926581586,-14.8715313479137],
[26.533415556629,-14.693883922494],
[12.8722580202544,-21.2750596021509],
[24.4768291376862,-15.9592080959207],
[18.2230748567433,-14.6541444069985],
[4.1902148367447,-20.6144032528762],
[12.4332594022086,-16.6079789231489],
[20.5483758651873,-18.8512560786321],
[17.8180560451358,-12.5451990696752],
[11.0071081078049,-20.3938092335862],
[8.30560561422449,-22.9503944138682],
[33.9857852657284,-4.8371294974382],
[17.4376502239652,-14.5095976075022],
[29.0379635148943,-14.8461553663227],
[29.1344666599319,-7.70862921632672],
[32.9730697624544,-15.5839178785654],
[13.4211493998212,-20.150199857584],
[11.380538260355,-12.8619410359766],
[28.672631499186,-8.51866271785711],
[16.4296061111902,-23.3326051279759],
[25.7168371582585,-13.8899296143829],
[13.3185154732595,-17.8959160024249],
[3.60832478605376,-25.4023343597712],
[39.5445949652652,-11.466377647931],
[25.1693484426101,-12.2752652925707],
[25.2884257196471,-7.06710309184533],
[6.77665715793125,-22.3947299635571],
[20.1844223778907,-16.0427471125407],
[25.5506805272535,-9.33856532270204],
[25.1495682602477,-7.17350567090738],
[15.6978431006492,-17.5979197162642],
[37.42780451491,-10.843637288504],
[22.974620174842,-10.6171162611686],
[34.6327117468934,-9.26182440487384],
[34.7042513789061,-6.9630753351114],
[15.6563953929008,-17.2196961218915],
[25.2049825789225,-14.1592086208169]
]
print(first_principal_component(de_mean(pca_data)))
def project(v: Vector, w: Vector) -> Vector:
projection_length = lesson3.dot(v, w)
return scalar_multiply(projection_length, w)
def remove_projection_from_vector(v: Vector, w: Vector) -> Vector:
return subtract(v, project(v, w))
def remove_projection(data: List[Vector], w: Vector) -> List[Vector]:
return [remove_projection_from_vector(v, w) for v in data]
def pca(data: List[Vector], num_components: int) -> List[Vector]:
components: List[Vector] = []
for _ in range(num_components):
component = first_principal_component(data)
components.append(component)
data = remove_projection(data, component)
return components
def transform_vector(v: Vector, components: List[Vector]) -> Vector:
return [lesson3.dot(v, w) for w in components]
def transform(data: List[Vector], components: List[Vector]) -> List[Vector]:
return [transform_vector(v, components) for v in data]
|
[
"matplotlib.pyplot.title",
"csv.reader",
"collections.defaultdict",
"lesson3.dot",
"lesson3.subtract",
"lesson3.vector_mean",
"lesson4.standard_deviation",
"lesson4.correlation",
"random.seed",
"matplotlib.pyplot.subplots",
"lesson3.distance",
"dateutil.parser.parse",
"matplotlib.pyplot.show",
"tqdm.trange",
"matplotlib.pyplot.legend",
"datetime.date",
"re.match",
"random.random",
"matplotlib.pyplot.ylabel",
"lesson3.scalar_multiply",
"lesson7_Gradient_Descent.gradient_step",
"matplotlib.pyplot.scatter",
"lesson3.magnitude",
"math.floor",
"collections.namedtuple",
"matplotlib.pyplot.xlabel"
] |
[((1054, 1068), 'random.seed', 'random.seed', (['(0)'], {}), '(0)\n', (1065, 1068), False, 'import random\n'), ((1728, 1788), 'matplotlib.pyplot.scatter', 'plt.scatter', (['xs', 'ys1'], {'marker': '"""."""', 'color': '"""black"""', 'label': '"""ys1"""'}), "(xs, ys1, marker='.', color='black', label='ys1')\n", (1739, 1788), True, 'import matplotlib.pyplot as plt\n'), ((1789, 1848), 'matplotlib.pyplot.scatter', 'plt.scatter', (['xs', 'ys2'], {'marker': '"""."""', 'color': '"""gray"""', 'label': '"""ys2"""'}), "(xs, ys2, marker='.', color='gray', label='ys2')\n", (1800, 1848), True, 'import matplotlib.pyplot as plt\n'), ((1849, 1865), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""xs"""'], {}), "('xs')\n", (1859, 1865), True, 'import matplotlib.pyplot as plt\n'), ((1866, 1882), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""ys"""'], {}), "('ys')\n", (1876, 1882), True, 'import matplotlib.pyplot as plt\n'), ((1883, 1900), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(9)'}), '(loc=9)\n', (1893, 1900), True, 'import matplotlib.pyplot as plt\n'), ((1901, 1952), 'matplotlib.pyplot.title', 'plt.title', (['"""Πολυ διαφορετικές απο κοινού κατανομές"""'], {}), "('Πολυ διαφορετικές απο κοινού κατανομές')\n", (1910, 1952), True, 'import matplotlib.pyplot as plt\n'), ((1953, 1963), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1961, 1963), True, 'import matplotlib.pyplot as plt\n'), ((2462, 2500), 'matplotlib.pyplot.subplots', 'plt.subplots', (['num_vectors', 'num_vectors'], {}), '(num_vectors, num_vectors)\n', (2474, 2500), True, 'import matplotlib.pyplot as plt\n'), ((3004, 3014), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3012, 3014), True, 'import matplotlib.pyplot as plt\n'), ((3256, 3317), 'collections.namedtuple', 'namedtuple', (['"""StockPrice"""', "['symbol', 'date', 'closing_price']"], {}), "('StockPrice', ['symbol', 'date', 'closing_price'])\n", (3266, 3317), False, 'from collections import namedtuple\n'), ((6359, 6376), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (6370, 6376), False, 'from collections import defaultdict\n'), ((7649, 7687), 'lesson3.distance', 'lesson3.distance', (['[63, 150]', '[67, 160]'], {}), '([63, 150], [67, 160])\n', (7665, 7687), False, 'import lesson3\n'), ((7697, 7735), 'lesson3.distance', 'lesson3.distance', (['[63, 150]', '[70, 171]'], {}), '([63, 150], [70, 171])\n', (7713, 7735), False, 'import lesson3\n'), ((7745, 7783), 'lesson3.distance', 'lesson3.distance', (['[67, 160]', '[70, 171]'], {}), '([67, 160], [70, 171])\n', (7761, 7783), False, 'import lesson3\n'), ((7793, 7835), 'lesson3.distance', 'lesson3.distance', (['[160, 150]', '[170.2, 160]'], {}), '([160, 150], [170.2, 160])\n', (7809, 7835), False, 'import lesson3\n'), ((7845, 7887), 'lesson3.distance', 'lesson3.distance', (['[160, 150]', '[177.8, 171]'], {}), '([160, 150], [177.8, 171])\n', (7861, 7887), False, 'import lesson3\n'), ((7897, 7941), 'lesson3.distance', 'lesson3.distance', (['[170.2, 160]', '[177.8, 171]'], {}), '([170.2, 160], [177.8, 171])\n', (7913, 7941), False, 'import lesson3\n'), ((1020, 1036), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (1029, 1036), True, 'import matplotlib.pyplot as plt\n'), ((1041, 1051), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1049, 1051), True, 'import matplotlib.pyplot as plt\n'), ((1970, 1990), 'lesson4.correlation', 'correlation', (['xs', 'ys1'], {}), '(xs, ys1)\n', (1981, 1990), False, 'from lesson4 import correlation, standard_deviation\n'), ((1998, 2018), 'lesson4.correlation', 'correlation', (['xs', 'ys2'], {}), '(xs, ys2)\n', (2009, 2018), False, 'from lesson4 import correlation, standard_deviation\n'), ((2247, 2267), 'lesson4.correlation', 'correlation', (['xs', 'ys1'], {}), '(xs, ys1)\n', (2258, 2267), False, 'from lesson4 import correlation, standard_deviation\n'), ((2290, 2310), 'lesson4.correlation', 'correlation', (['xs', 'ys2'], {}), '(xs, ys2)\n', (2301, 2310), False, 'from lesson4 import correlation, standard_deviation\n'), ((3107, 3133), 'datetime.date', 'datetime.date', (['(2014)', '(8)', '(29)'], {}), '(2014, 8, 29)\n', (3120, 3133), False, 'import datetime\n'), ((3343, 3370), 'datetime.date', 'datetime.date', (['(2018)', '(12)', '(14)'], {}), '(2018, 12, 14)\n', (3356, 3370), False, 'import datetime\n'), ((3711, 3738), 'datetime.date', 'datetime.date', (['(2018)', '(12)', '(14)'], {}), '(2018, 12, 14)\n', (3724, 3738), False, 'import datetime\n'), ((4113, 4140), 'datetime.date', 'datetime.date', (['(2018)', '(12)', '(14)'], {}), '(2018, 12, 14)\n', (4126, 4140), False, 'import datetime\n'), ((5489, 5502), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (5499, 5502), False, 'import csv\n'), ((8035, 8052), 'lesson3.vector_mean', 'vector_mean', (['data'], {}), '(data)\n', (8046, 8052), False, 'from lesson3 import Matrix, Vector, make_matrix, vector_mean, subtract, magnitude, scalar_multiply\n'), ((9219, 9236), 'lesson3.vector_mean', 'vector_mean', (['data'], {}), '(data)\n', (9230, 9236), False, 'from lesson3 import Matrix, Vector, make_matrix, vector_mean, subtract, magnitude, scalar_multiply\n'), ((9340, 9352), 'lesson3.magnitude', 'magnitude', (['w'], {}), '(w)\n', (9349, 9352), False, 'from lesson3 import Matrix, Vector, make_matrix, vector_mean, subtract, magnitude, scalar_multiply\n'), ((14120, 14137), 'lesson3.dot', 'lesson3.dot', (['v', 'w'], {}), '(v, w)\n', (14131, 14137), False, 'import lesson3\n'), ((14149, 14186), 'lesson3.scalar_multiply', 'scalar_multiply', (['projection_length', 'w'], {}), '(projection_length, w)\n', (14164, 14186), False, 'from lesson3 import Matrix, Vector, make_matrix, vector_mean, subtract, magnitude, scalar_multiply\n'), ((630, 661), 'math.floor', 'math.floor', (['(point / bucket_size)'], {}), '(point / bucket_size)\n', (640, 661), False, 'import math\n'), ((1475, 1490), 'random.random', 'random.random', ([], {}), '()\n', (1488, 1490), False, 'import random\n'), ((2140, 2169), 'lesson4.correlation', 'correlation', (['data[i]', 'data[j]'], {}), '(data[i], data[j])\n', (2151, 2169), False, 'from lesson4 import correlation, standard_deviation\n'), ((4781, 4809), 're.match', 're.match', (['"""^[A-Z]+$"""', 'symbol'], {}), "('^[A-Z]+$', symbol)\n", (4789, 4809), False, 'import re\n'), ((8067, 8117), 'lesson4.standard_deviation', 'standard_deviation', (['[vector[i] for vector in data]'], {}), '([vector[i] for vector in data])\n', (8085, 8117), False, 'from lesson4 import correlation, standard_deviation\n'), ((8774, 8789), 'random.random', 'random.random', ([], {}), '()\n', (8787, 8789), False, 'import random\n'), ((8882, 8899), 'tqdm.trange', 'tqdm.trange', (['(3)', 'n'], {}), '(3, n)\n', (8893, 8899), False, 'import tqdm\n'), ((9249, 9271), 'lesson3.subtract', 'subtract', (['vector', 'mean'], {}), '(vector, mean)\n', (9257, 9271), False, 'from lesson3 import Matrix, Vector, make_matrix, vector_mean, subtract, magnitude, scalar_multiply\n'), ((9947, 9961), 'tqdm.trange', 'tqdm.trange', (['n'], {}), '(n)\n', (9958, 9961), False, 'import tqdm\n'), ((14811, 14828), 'lesson3.dot', 'lesson3.dot', (['v', 'w'], {}), '(v, w)\n', (14822, 14828), False, 'import lesson3\n'), ((1086, 1101), 'random.random', 'random.random', ([], {}), '()\n', (1099, 1101), False, 'import random\n'), ((1165, 1180), 'random.random', 'random.random', ([], {}), '()\n', (1178, 1180), False, 'import random\n'), ((5796, 5823), 'datetime.date', 'datetime.date', (['(2018)', '(12)', '(24)'], {}), '(2018, 12, 24)\n', (5809, 5823), False, 'import datetime\n'), ((10125, 10166), 'lesson7_Gradient_Descent.gradient_step', 'gradient_step', (['guess', 'gradient', 'step_size'], {}), '(guess, gradient, step_size)\n', (10138, 10166), False, 'from lesson7_Gradient_Descent import gradient_step\n'), ((2364, 2379), 'random.random', 'random.random', ([], {}), '()\n', (2377, 2379), False, 'import random\n'), ((4858, 4869), 'dateutil.parser.parse', 'parse', (['date'], {}), '(date)\n', (4863, 4869), False, 'from dateutil.parser import parse\n'), ((9497, 9518), 'lesson3.dot', 'lesson3.dot', (['v', 'w_dir'], {}), '(v, w_dir)\n', (9508, 9518), False, 'import lesson3\n'), ((4499, 4510), 'dateutil.parser.parse', 'parse', (['date'], {}), '(date)\n', (4504, 4510), False, 'from dateutil.parser import parse\n'), ((9662, 9683), 'lesson3.dot', 'lesson3.dot', (['v', 'w_dir'], {}), '(v, w_dir)\n', (9673, 9683), False, 'import lesson3\n')]
|
# -*- coding: utf-8 -*-
import os
import subprocess
import sys
import platform
from ruamel.yaml import YAML
from ruamel.yaml.compat import StringIO
class MyYAML(YAML):
def dump(self, data, stream=None, **kw):
inefficient = False
if stream is None:
inefficient = True
stream = StringIO()
YAML.dump(self, data, stream, **kw)
if inefficient:
return stream.getvalue()
def runCommand(command: list):
proc = subprocess.Popen(command, stderr=subprocess.PIPE)
out, err = proc.communicate()
if err:
msg = err.decode("utf-8")
print(msg, file=sys.stderr)
return proc.returncode
def uic(inputFile: str, outputFile: str):
if platform.system().lower() == 'windows':
command: list = ['pyside2-uic', inputFile, '-o', outputFile]
else:
command: list = ['uic', '-g', 'python', inputFile, '-o', outputFile]
return runCommand(command)
def rcc(inputFile: str, outputFile: str):
if platform.system().lower() == 'windows':
command: list = ['pyside2-rcc', inputFile, '-o', outputFile]
else:
command: list = ['rcc', '-g', 'python', inputFile, '-o', outputFile]
return runCommand(command)
def clean(verbose: bool = True):
with open('build.yml', 'r', encoding='utf-8') as file:
for i in MyYAML().load(file.read()):
try:
os.remove(i['pyName'])
except FileNotFoundError:
pass
if verbose is True:
print('cleaned {0}'.format(i['pyName']))
def main(verbose: bool = True):
with open('build.yml', 'r', encoding='utf-8') as file:
for i in MyYAML().load(file.read()):
if i['type'] == 'ui':
try:
os.remove(i['pyName'])
except FileNotFoundError:
pass
uic(i['name'], i['pyName'])
if verbose is True:
print('{0} > {1}'.format(i['name'], i['pyName']))
elif i['type'] == 'qrc':
try:
os.remove(i['pyName'])
except FileNotFoundError:
pass
rcc(i['name'], i['pyName'])
if verbose is True:
print('{0} > {1}'.format(i['name'], i['pyName']))
if __name__ == '__main__':
main()
|
[
"subprocess.Popen",
"os.remove",
"ruamel.yaml.compat.StringIO",
"ruamel.yaml.YAML.dump",
"platform.system"
] |
[((485, 534), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'stderr': 'subprocess.PIPE'}), '(command, stderr=subprocess.PIPE)\n', (501, 534), False, 'import subprocess\n'), ((344, 379), 'ruamel.yaml.YAML.dump', 'YAML.dump', (['self', 'data', 'stream'], {}), '(self, data, stream, **kw)\n', (353, 379), False, 'from ruamel.yaml import YAML\n'), ((325, 335), 'ruamel.yaml.compat.StringIO', 'StringIO', ([], {}), '()\n', (333, 335), False, 'from ruamel.yaml.compat import StringIO\n'), ((729, 746), 'platform.system', 'platform.system', ([], {}), '()\n', (744, 746), False, 'import platform\n'), ((1007, 1024), 'platform.system', 'platform.system', ([], {}), '()\n', (1022, 1024), False, 'import platform\n'), ((1406, 1428), 'os.remove', 'os.remove', (["i['pyName']"], {}), "(i['pyName'])\n", (1415, 1428), False, 'import os\n'), ((1790, 1812), 'os.remove', 'os.remove', (["i['pyName']"], {}), "(i['pyName'])\n", (1799, 1812), False, 'import os\n'), ((2108, 2130), 'os.remove', 'os.remove', (["i['pyName']"], {}), "(i['pyName'])\n", (2117, 2130), False, 'import os\n')]
|
import li
import json
# testing util
if __name__ == "__main__":
# execute only if run as a script
users=["Le_Scratch","justmaker","kazeriahm","Khrok","fauzi061089", "Vladismen", "kapuso", "Vadum-tv", "El-Nino9", "mathemagician18", "jongy", "shtrubi", "Teju12345", "papasi", "dalmatinac101"]
for line in li.stream(users):
print(li.game_to_message(line))
|
[
"li.game_to_message",
"li.stream"
] |
[((318, 334), 'li.stream', 'li.stream', (['users'], {}), '(users)\n', (327, 334), False, 'import li\n'), ((350, 374), 'li.game_to_message', 'li.game_to_message', (['line'], {}), '(line)\n', (368, 374), False, 'import li\n')]
|
import os, sys
import unittest
from tilde.core.api import API
from tilde.core.settings import BASE_DIR, EXAMPLE_DIR
class Test_API(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.sample = API()
def test_count_classifiers(self):
available_classifiers = []
path = os.path.realpath(BASE_DIR + '/../classifiers')
for classifierpath in os.listdir(path):
if os.path.isfile(os.path.join(path, classifierpath)) and classifierpath.endswith('.py') and classifierpath != '__init__.py':
available_classifiers.append(classifierpath)
self.assertEqual(len(self.sample.Classifiers), len(available_classifiers),
"Expected to have %s, but got %s modules. May be unused classifier occured since?" % (len(available_classifiers), len(self.sample.Classifiers)))
def test_formula(self):
self.assertEqual(self.sample.formula(['H', 'O', 'C', 'H', 'H', 'C', 'H', 'H', 'H' ]), 'C2H6O', "Formula was errorneously generated!")
def test_savvyize_simple(self):
path = os.path.join(EXAMPLE_DIR, 'CRYSTAL')
found = self.sample.savvyize(path)
self.assertEqual(len(found), 3,
"Unexpected number of files has been found in %s: %s. May be number of files has been changed since?" % (path, len(found)))
def test_savvyize_recursive(self):
path = os.path.join(EXAMPLE_DIR, 'VASP')
found = self.sample.savvyize(path, recursive=True)
self.assertEqual(len(found), 2,
"Unexpected number of files has been found in %s: %s. May be number of files has been changed since?" % (path, len(found)))
def test_savvyize_stemma(self):
path = os.path.join(BASE_DIR, 'co')
found = self.sample.savvyize(path, recursive=True, stemma=True)
found = [module for module in found if not module.endswith('.pyc')] # NB Python3 is accounted
self.assertEqual(len(found), 2,
"Unexpected number of files has been found in %s: %s. May be number of files has been changed since?" % (path, len(found)))
|
[
"os.path.realpath",
"tilde.core.api.API",
"os.path.join",
"os.listdir"
] |
[((218, 223), 'tilde.core.api.API', 'API', ([], {}), '()\n', (221, 223), False, 'from tilde.core.api import API\n'), ((313, 359), 'os.path.realpath', 'os.path.realpath', (["(BASE_DIR + '/../classifiers')"], {}), "(BASE_DIR + '/../classifiers')\n", (329, 359), False, 'import os, sys\n'), ((390, 406), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (400, 406), False, 'import os, sys\n'), ((1071, 1107), 'os.path.join', 'os.path.join', (['EXAMPLE_DIR', '"""CRYSTAL"""'], {}), "(EXAMPLE_DIR, 'CRYSTAL')\n", (1083, 1107), False, 'import os, sys\n'), ((1382, 1415), 'os.path.join', 'os.path.join', (['EXAMPLE_DIR', '"""VASP"""'], {}), "(EXAMPLE_DIR, 'VASP')\n", (1394, 1415), False, 'import os, sys\n'), ((1703, 1731), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""co"""'], {}), "(BASE_DIR, 'co')\n", (1715, 1731), False, 'import os, sys\n'), ((438, 472), 'os.path.join', 'os.path.join', (['path', 'classifierpath'], {}), '(path, classifierpath)\n', (450, 472), False, 'import os, sys\n')]
|
"""Records new images for workers and uploads targets."""
import aiohttp
import aioredis
from .app import create_app
class Service:
def __init__(self, port, imagery_host, imagery_port, interop_host,
interop_port, redis_host, redis_port, max_auto_targets):
"""Create a new image-rec-master service."""
self._port = port
app = create_app(
redis_url=f'redis://{redis_host}:{redis_port}',
imagery_url=f'http://{imagery_host}:{imagery_port}',
interop_url=f'http://{interop_host}:{interop_port}',
max_auto_targets=max_auto_targets
)
self._app = app
self._runner = aiohttp.web.AppRunner(self._app)
async def start(self):
"""Start the service."""
app = self._app
runner = self._runner
app['http_client'] = aiohttp.ClientSession(loop=app.loop)
app['redis'] = await aioredis.create_redis_pool(
app.get('redis_url'), minsize=5, maxsize=10, loop=app.loop
)
await runner.setup()
site = aiohttp.web.TCPSite(runner, '0.0.0.0', self._port)
await site.start()
async def stop(self):
"""Stop the service."""
app = self._app
runner = self._runner
await runner.cleanup()
app['redis'].close()
await app['redis'].wait_closed()
await app['http_client'].close()
|
[
"aiohttp.web.AppRunner",
"aiohttp.ClientSession",
"aiohttp.web.TCPSite"
] |
[((681, 713), 'aiohttp.web.AppRunner', 'aiohttp.web.AppRunner', (['self._app'], {}), '(self._app)\n', (702, 713), False, 'import aiohttp\n'), ((859, 895), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'loop': 'app.loop'}), '(loop=app.loop)\n', (880, 895), False, 'import aiohttp\n'), ((1080, 1130), 'aiohttp.web.TCPSite', 'aiohttp.web.TCPSite', (['runner', '"""0.0.0.0"""', 'self._port'], {}), "(runner, '0.0.0.0', self._port)\n", (1099, 1130), False, 'import aiohttp\n')]
|
import json, os
from flask import current_app, url_for
from flask.ext.script import Command
from flask.ext.security.confirmable import confirm_user
from flask_application.models import FlaskDocument
from flask_application.profiles.models import Profile, ImageTable
from flask_application.guides.models import Guide, Step
class ResetDB(Command):
"""Drops all tables and recreates them"""
def run(self, **kwargs):
self.drop_collections()
@staticmethod
def drop_collections():
for klass in FlaskDocument.all_subclasses():
klass.drop_collection()
class PopulateDB(Command):
"""Fills in predefined data to DB"""
users = (
('user', '<EMAIL>', 'password', ['user'], True),
)
def run(self, **kwargs):
self.create_roles()
self.create_users()
self.create_profiles()
self.create_guides()
@staticmethod
def create_roles():
for role in ('admin', 'editor', 'author', 'user'):
current_app.user_datastore.create_role(name=role, description=role)
current_app.user_datastore.commit()
@staticmethod
def create_users():
for u in PopulateDB.users:
user = current_app.user_datastore.create_user(
username=u[0],
email=u[1],
password=u[2],
roles=u[3],
active=u[4]
)
confirm_user(user)
current_app.user_datastore.commit()
@staticmethod
def create_profiles():
for u in PopulateDB.users:
profile = Profile(username=u[0])
profile = Profile.initialize_to_default(profile)
profile.is_example = True
profile.save()
@staticmethod
def create_guides():
guides = PopulateDB._get_guides_data()
for g in guides:
g.save()
@staticmethod
def _get_guides_data():
with open('resources/guides.json', 'r') as f:
content = f.read()
guides = json.loads(content)['guides']
out = []
for g in guides:
newGuide = Guide(title=g['title'], slug=g['slug'], abstract=g['abstract'])
for s in g['steps']:
if s['img']:
newStep = Step(body=s['body'], img=s['img'])
else:
newStep = Step(body=s['body'])
newGuide.steps.append(newStep)
out.append(newGuide)
return out
class UpdateDB(PopulateDB):
def run(self, **kwargs):
self.update_guides()
@staticmethod
def update_guides():
Guide.drop_collection()
guides = UpdateDB._get_guides_data()
for g in guides:
g.save()
class AddDefaultProfile(PopulateDB):
def run(self, **kwargs):
self._add_defaultprofile()
@staticmethod
def _add_defaultprofile():
profile = Profile.objects(is_default=True)
if profile:
profile.delete()
profile = Profile.create_default_profile()
|
[
"flask.current_app.user_datastore.commit",
"flask_application.models.FlaskDocument.all_subclasses",
"flask_application.profiles.models.Profile.create_default_profile",
"flask_application.profiles.models.Profile",
"json.loads",
"flask_application.guides.models.Guide",
"flask_application.guides.models.Guide.drop_collection",
"flask_application.profiles.models.Profile.initialize_to_default",
"flask.current_app.user_datastore.create_user",
"flask_application.guides.models.Step",
"flask_application.profiles.models.Profile.objects",
"flask.ext.security.confirmable.confirm_user",
"flask.current_app.user_datastore.create_role"
] |
[((525, 555), 'flask_application.models.FlaskDocument.all_subclasses', 'FlaskDocument.all_subclasses', ([], {}), '()\n', (553, 555), False, 'from flask_application.models import FlaskDocument\n'), ((1091, 1126), 'flask.current_app.user_datastore.commit', 'current_app.user_datastore.commit', ([], {}), '()\n', (1124, 1126), False, 'from flask import current_app, url_for\n'), ((2639, 2662), 'flask_application.guides.models.Guide.drop_collection', 'Guide.drop_collection', ([], {}), '()\n', (2660, 2662), False, 'from flask_application.guides.models import Guide, Step\n'), ((2924, 2956), 'flask_application.profiles.models.Profile.objects', 'Profile.objects', ([], {'is_default': '(True)'}), '(is_default=True)\n', (2939, 2956), False, 'from flask_application.profiles.models import Profile, ImageTable\n'), ((3025, 3057), 'flask_application.profiles.models.Profile.create_default_profile', 'Profile.create_default_profile', ([], {}), '()\n', (3055, 3057), False, 'from flask_application.profiles.models import Profile, ImageTable\n'), ((1015, 1082), 'flask.current_app.user_datastore.create_role', 'current_app.user_datastore.create_role', ([], {'name': 'role', 'description': 'role'}), '(name=role, description=role)\n', (1053, 1082), False, 'from flask import current_app, url_for\n'), ((1224, 1334), 'flask.current_app.user_datastore.create_user', 'current_app.user_datastore.create_user', ([], {'username': 'u[0]', 'email': 'u[1]', 'password': 'u[2]', 'roles': 'u[3]', 'active': 'u[4]'}), '(username=u[0], email=u[1], password=\n u[2], roles=u[3], active=u[4])\n', (1262, 1334), False, 'from flask import current_app, url_for\n'), ((1436, 1454), 'flask.ext.security.confirmable.confirm_user', 'confirm_user', (['user'], {}), '(user)\n', (1448, 1454), False, 'from flask.ext.security.confirmable import confirm_user\n'), ((1468, 1503), 'flask.current_app.user_datastore.commit', 'current_app.user_datastore.commit', ([], {}), '()\n', (1501, 1503), False, 'from flask import current_app, url_for\n'), ((1607, 1629), 'flask_application.profiles.models.Profile', 'Profile', ([], {'username': 'u[0]'}), '(username=u[0])\n', (1614, 1629), False, 'from flask_application.profiles.models import Profile, ImageTable\n'), ((1652, 1690), 'flask_application.profiles.models.Profile.initialize_to_default', 'Profile.initialize_to_default', (['profile'], {}), '(profile)\n', (1681, 1690), False, 'from flask_application.profiles.models import Profile, ImageTable\n'), ((2042, 2061), 'json.loads', 'json.loads', (['content'], {}), '(content)\n', (2052, 2061), False, 'import json, os\n'), ((2137, 2200), 'flask_application.guides.models.Guide', 'Guide', ([], {'title': "g['title']", 'slug': "g['slug']", 'abstract': "g['abstract']"}), "(title=g['title'], slug=g['slug'], abstract=g['abstract'])\n", (2142, 2200), False, 'from flask_application.guides.models import Guide, Step\n'), ((2293, 2327), 'flask_application.guides.models.Step', 'Step', ([], {'body': "s['body']", 'img': "s['img']"}), "(body=s['body'], img=s['img'])\n", (2297, 2327), False, 'from flask_application.guides.models import Guide, Step\n'), ((2380, 2400), 'flask_application.guides.models.Step', 'Step', ([], {'body': "s['body']"}), "(body=s['body'])\n", (2384, 2400), False, 'from flask_application.guides.models import Guide, Step\n')]
|
"""
CentalService.auth.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Contains all the forms for the CentralService authorization functions.
The two forms that are used are for login and for creating a new user.
@copyright: (c) 2016 SynergyLabs
@license: UCSD License. See License file for details.
"""
from flask_wtf import Form
from wtforms import StringField, PasswordField, BooleanField, SubmitField
from wtforms.validators import DataRequired, Email, Length, EqualTo
from wtforms import ValidationError
from ..models.cs_models import User
class LoginForm(Form):
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Keep me logged in')
submit = SubmitField('Log In')
class RegistrationForm(Form):
password = PasswordField('Password',
validators=[DataRequired(), EqualTo('password2', message='Passwords must match')])
password2 = PasswordField('<PASSWORD>', validators=[DataRequired()])
submit = SubmitField('Register')
def validate_email(self, field):
if User.objects(email=field.data).first() is not None:
raise ValidationError('Email already registered.')
|
[
"wtforms.ValidationError",
"wtforms.validators.Email",
"wtforms.BooleanField",
"wtforms.SubmitField",
"wtforms.validators.EqualTo",
"wtforms.validators.DataRequired"
] |
[((718, 751), 'wtforms.BooleanField', 'BooleanField', (['"""Keep me logged in"""'], {}), "('Keep me logged in')\n", (730, 751), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField\n'), ((765, 786), 'wtforms.SubmitField', 'SubmitField', (['"""Log In"""'], {}), "('Log In')\n", (776, 786), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField\n'), ((1058, 1081), 'wtforms.SubmitField', 'SubmitField', (['"""Register"""'], {}), "('Register')\n", (1069, 1081), False, 'from wtforms import StringField, PasswordField, BooleanField, SubmitField\n'), ((1201, 1245), 'wtforms.ValidationError', 'ValidationError', (['"""Email already registered."""'], {}), "('Email already registered.')\n", (1216, 1245), False, 'from wtforms import ValidationError\n'), ((604, 618), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (616, 618), False, 'from wtforms.validators import DataRequired, Email, Length, EqualTo\n'), ((620, 627), 'wtforms.validators.Email', 'Email', ([], {}), '()\n', (625, 627), False, 'from wtforms.validators import DataRequired, Email, Length, EqualTo\n'), ((683, 697), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (695, 697), False, 'from wtforms.validators import DataRequired, Email, Length, EqualTo\n'), ((901, 915), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (913, 915), False, 'from wtforms.validators import DataRequired, Email, Length, EqualTo\n'), ((917, 969), 'wtforms.validators.EqualTo', 'EqualTo', (['"""password2"""'], {'message': '"""Passwords must match"""'}), "('password2', message='Passwords must match')\n", (924, 969), False, 'from wtforms.validators import DataRequired, Email, Length, EqualTo\n'), ((1028, 1042), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (1040, 1042), False, 'from wtforms.validators import DataRequired, Email, Length, EqualTo\n')]
|
# Generated by Django 2.1.7 on 2019-04-10 01:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('movies', '0005_auto_20190410_0659'),
]
operations = [
migrations.AddField(
model_name='movies',
name='ph_credit',
field=models.IntegerField(blank=True, null=True),
),
]
|
[
"django.db.models.IntegerField"
] |
[((336, 378), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (355, 378), False, 'from django.db import migrations, models\n')]
|
import sys
import unittest
try:
from StringIO import StringIO
except:
from io import StringIO
class TestControllerPlugin(unittest.TestCase):
# Factory
def test_make_cache_controllerplugin_factory(self):
from supervisor_cache import controllerplugin
controller = DummyController()
plugin = controllerplugin.make_cache_controllerplugin(controller)
self.assertEqual(controller, plugin.ctl)
# Constructor
def test_ctor_assigns_controller(self):
controller = DummyController()
plugin = self.makeOne(controller)
self.assertEqual(controller, plugin.ctl)
# cache_clear
def test_do_cache_clear(self):
controller = DummyController()
plugin = self.makeOne(controller)
cache_interface = plugin.cache
cache_interface.cache = dict(foo='bar')
plugin.do_cache_clear('')
self.assertEqual({}, cache_interface.cache)
def test_do_cache_clear_accepts_no_args(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.do_cache_clear('arg')
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_clear'))
def test_help_cache_clear(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.help_cache_clear()
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_clear'))
# cache_count
def test_do_cache_count(self):
controller = DummyController()
plugin = self.makeOne(controller)
cache_interface = plugin.cache
cache_interface.cache = dict(foo='bar', baz='qux')
plugin.do_cache_count('')
output = controller.sio.getvalue()
self.assertEqual('2', output)
def test_do_cache_count_accepts_no_args(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.do_cache_count('arg')
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_count'))
def test_help_cache_count(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.help_cache_count()
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_count'))
# cache_delete
def test_do_cache_delete(self):
controller = DummyController()
plugin = self.makeOne(controller)
cache_interface = plugin.cache
cache_interface.cache = dict(foo='bar', baz='qux')
plugin.do_cache_delete('foo')
self.assertTrue('foo' not in cache_interface.cache.keys())
self.assertEqual('qux', cache_interface.cache['baz'])
def test_do_cache_delete_accepts_a_quoted_arg(self):
controller = DummyController()
plugin = self.makeOne(controller)
cache_interface = plugin.cache
cache_interface.cache = {'f o o': 'bar', 'baz': 'qux'}
plugin.do_cache_delete('"f o o"')
self.assertTrue('f o o' not in cache_interface.cache.keys())
def test_do_cache_delete_accepts_only_one_arg(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.do_cache_delete('first second')
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_delete'))
def test_help_cache_delete(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.help_cache_delete()
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_delete <key>'))
# cache_fetch
def test_do_cache_fetch(self):
controller = DummyController()
plugin = self.makeOne(controller)
cache_interface = plugin.cache
cache_interface.cache = dict(foo='bar')
plugin.do_cache_fetch('foo')
out = controller.sio.getvalue()
self.assertEqual("'bar'", out)
def test_do_cache_fetch_accepts_a_quoted_arg(self):
controller = DummyController()
plugin = self.makeOne(controller)
cache_interface = plugin.cache
cache_interface.cache = {'f o o': 'bar'}
plugin.do_cache_fetch('"f o o"')
out = controller.sio.getvalue()
self.assertEqual("'bar'", out)
def test_do_cache_fetch_accepts_only_one_arg(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.do_cache_fetch('first second')
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_fetch'))
def test_help_cache_fetch(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.help_cache_fetch()
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_fetch <key>'))
# cache_keys
def test_do_cache_keys(self):
controller = DummyController()
plugin = self.makeOne(controller)
cache_interface = plugin.cache
cache_interface.cache = dict(foo='bar', baz='qux')
plugin.do_cache_keys('')
output = controller.sio.getvalue()
self.assertTrue('foo' in output)
self.assertTrue('baz' in output)
def test_do_cache_keys_accepts_no_args(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.do_cache_keys('arg')
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_keys'))
def test_help_cache_keys(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.help_cache_keys()
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_keys'))
# cache_store
def test_do_cache_store(self):
controller = DummyController()
plugin = self.makeOne(controller)
cache_interface = plugin.cache
cache_interface.cache = {}
plugin.do_cache_store('foo bar')
self.assertEqual('bar', cache_interface.cache['foo'])
def test_do_cache_store_accepts_a_quoted_key(self):
controller = DummyController()
plugin = self.makeOne(controller)
cache_interface = plugin.cache
cache_interface.cache = {}
plugin.do_cache_store('"foo bar" baz')
self.assertEqual('baz', cache_interface.cache['foo bar'])
def test_do_cache_store_accepts_a_quoted_value(self):
controller = DummyController()
plugin = self.makeOne(controller)
cache_interface = plugin.cache
cache_interface.cache = {}
plugin.do_cache_store('foo "bar baz"')
self.assertEqual('bar baz', cache_interface.cache['foo'])
def test_do_cache_store_accepts_no_less_than_two_args(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.do_cache_store('first')
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_store'))
def test_do_cache_store_accepts_no_more_than_two_args(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.do_cache_store('first second third')
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_store'))
def test_help_cache_store(self):
controller = DummyController()
plugin = self.makeOne(controller)
plugin.help_cache_store()
out = controller.sio.getvalue()
self.assertTrue(out.startswith('cache_store <key> <value>'))
# Test Helpers
def makeOne(self, *arg, **kw):
return self.getTargetClass()(*arg, **kw)
def getTargetClass(self):
from supervisor_cache.controllerplugin import CacheControllerPlugin
return CacheControllerPlugin
class DummyController:
def __init__(self):
self.sio = StringIO()
def output(self, out):
assert(isinstance(out, str))
self.sio.write(out)
def get_server_proxy(self, namespace=None):
if namespace == 'cache':
from supervisor.tests.base import DummySupervisor
supervisor = DummySupervisor()
from supervisor_cache.rpcinterface import CacheNamespaceRPCInterface
cache = CacheNamespaceRPCInterface(supervisor)
return cache
def test_suite():
return unittest.findTestCases(sys.modules[__name__])
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
|
[
"unittest.main",
"io.StringIO",
"supervisor_cache.controllerplugin.make_cache_controllerplugin",
"supervisor_cache.rpcinterface.CacheNamespaceRPCInterface",
"unittest.findTestCases",
"supervisor.tests.base.DummySupervisor"
] |
[((8366, 8411), 'unittest.findTestCases', 'unittest.findTestCases', (['sys.modules[__name__]'], {}), '(sys.modules[__name__])\n', (8388, 8411), False, 'import unittest\n'), ((8444, 8483), 'unittest.main', 'unittest.main', ([], {'defaultTest': '"""test_suite"""'}), "(defaultTest='test_suite')\n", (8457, 8483), False, 'import unittest\n'), ((334, 390), 'supervisor_cache.controllerplugin.make_cache_controllerplugin', 'controllerplugin.make_cache_controllerplugin', (['controller'], {}), '(controller)\n', (378, 390), False, 'from supervisor_cache import controllerplugin\n'), ((7877, 7887), 'io.StringIO', 'StringIO', ([], {}), '()\n', (7885, 7887), False, 'from io import StringIO\n'), ((8150, 8167), 'supervisor.tests.base.DummySupervisor', 'DummySupervisor', ([], {}), '()\n', (8165, 8167), False, 'from supervisor.tests.base import DummySupervisor\n'), ((8270, 8308), 'supervisor_cache.rpcinterface.CacheNamespaceRPCInterface', 'CacheNamespaceRPCInterface', (['supervisor'], {}), '(supervisor)\n', (8296, 8308), False, 'from supervisor_cache.rpcinterface import CacheNamespaceRPCInterface\n')]
|
''' Pytest corresponding to calc_norm_v '''
# import numpy as np
import shortrate_model_vasicek as vas
import interest_rate_capfloor_convenience as intconv
mdl = vas.short_rate_vasicek(kappa=0.86, theta=0.08, sigma=0.01, r0=0.06,
norm_method=intconv.calc_v_norm_1d, dbg=True)
def test_norm():
''' init_ test'''
assert round(1e6*mdl.calc_norm_v(t=0.0, t0=0.25, t1=0.5, dbg=True), 3) == 1.028
def test_pt_05():
''' initial pricing test '''
assert round(mdl.price_zero(0.5), 4) == 0.9686
def test_put():
''' init test for put calculation '''
x1 = mdl.price_zero(0.25)
x2 = mdl.price_zero(0.5)
assert round(1e4*mdl.calc_european_put(strike=(x2/x1), t0=0.25, t1=0.5), 3) == 3.918
|
[
"shortrate_model_vasicek.short_rate_vasicek"
] |
[((163, 280), 'shortrate_model_vasicek.short_rate_vasicek', 'vas.short_rate_vasicek', ([], {'kappa': '(0.86)', 'theta': '(0.08)', 'sigma': '(0.01)', 'r0': '(0.06)', 'norm_method': 'intconv.calc_v_norm_1d', 'dbg': '(True)'}), '(kappa=0.86, theta=0.08, sigma=0.01, r0=0.06,\n norm_method=intconv.calc_v_norm_1d, dbg=True)\n', (185, 280), True, 'import shortrate_model_vasicek as vas\n')]
|
from bokeh.plotting import figure, output_file, show
from bokeh.models import Arrow, OpenHead, NormalHead, VeeHead
output_file("arrow.html", title="arrow.py example")
p = figure(plot_width=600, plot_height=600, x_range=(-0.1,1.1), y_range=(-0.1,0.8))
p.circle(x=[0, 1, 0.5], y=[0, 0, 0.7], radius=0.1, color=["navy", "yellow", "red"], fill_alpha=0.1)
p.add_layout(Arrow(end=OpenHead(), x_start=0, y_start=0, x_end=1, y_end=0))
p.add_layout(Arrow(end=NormalHead(), x_start=1, y_start=0, x_end=0.5, y_end=0.7))
p.add_layout(Arrow(end=VeeHead(), x_start=0.5, y_start=0.7, x_end=0, y_end=0))
show(p)
|
[
"bokeh.plotting.figure",
"bokeh.models.NormalHead",
"bokeh.plotting.output_file",
"bokeh.plotting.show",
"bokeh.models.OpenHead",
"bokeh.models.VeeHead"
] |
[((116, 167), 'bokeh.plotting.output_file', 'output_file', (['"""arrow.html"""'], {'title': '"""arrow.py example"""'}), "('arrow.html', title='arrow.py example')\n", (127, 167), False, 'from bokeh.plotting import figure, output_file, show\n'), ((173, 258), 'bokeh.plotting.figure', 'figure', ([], {'plot_width': '(600)', 'plot_height': '(600)', 'x_range': '(-0.1, 1.1)', 'y_range': '(-0.1, 0.8)'}), '(plot_width=600, plot_height=600, x_range=(-0.1, 1.1), y_range=(-0.1,\n 0.8))\n', (179, 258), False, 'from bokeh.plotting import figure, output_file, show\n'), ((593, 600), 'bokeh.plotting.show', 'show', (['p'], {}), '(p)\n', (597, 600), False, 'from bokeh.plotting import figure, output_file, show\n'), ((378, 388), 'bokeh.models.OpenHead', 'OpenHead', ([], {}), '()\n', (386, 388), False, 'from bokeh.models import Arrow, OpenHead, NormalHead, VeeHead\n'), ((454, 466), 'bokeh.models.NormalHead', 'NormalHead', ([], {}), '()\n', (464, 466), False, 'from bokeh.models import Arrow, OpenHead, NormalHead, VeeHead\n'), ((536, 545), 'bokeh.models.VeeHead', 'VeeHead', ([], {}), '()\n', (543, 545), False, 'from bokeh.models import Arrow, OpenHead, NormalHead, VeeHead\n')]
|
from django.core.exceptions import PermissionDenied
from django.utils.crypto import get_random_string
from rest_framework_jwt.settings import api_settings
from utils.constants import AUTO_GENERATED_PASSWORD_LENGTH
# binding.pry equivalent
# import code; code.interact(local=locals())
def get_hustler_data(hustler_object):
"""
Serializes a Hustler object for JSON
:param hustler_object: Hustler object
:return: dict
"""
from hustlers.api.serializers import HustlerSerializer
serialized_hustler_data = HustlerSerializer(hustler_object).data
return serialized_hustler_data
def jwt_response_payload_handler(token=None, user=None, request=None):
"""
Custom JWT payload creator
/auth/login/ will redirects to this endpoint
User auth using tokens or user object wrapper around vanilla auth/login
:param token: JWT token
:param user: User object
:param request: Request object
:return: dict
"""
if hasattr(user, "hustler"):
if user.is_active is False:
raise PermissionDenied("Hustler is inactive")
else:
raise PermissionDenied("Hustler does not exist!")
hustler_data = get_hustler_data(user.hustler)
if token is None:
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
payload = jwt_payload_handler(user)
token = jwt_encode_handler(payload)
return_data = {"auth_token": token, "hustler_data": hustler_data}
return return_data
def generate_hustler_password(length_of_password=AUTO_GENERATED_PASSWORD_LENGTH):
"""
:param length_of_password:
:return:
"""
return get_random_string(length_of_password)
|
[
"django.utils.crypto.get_random_string",
"hustlers.api.serializers.HustlerSerializer",
"django.core.exceptions.PermissionDenied"
] |
[((1700, 1737), 'django.utils.crypto.get_random_string', 'get_random_string', (['length_of_password'], {}), '(length_of_password)\n', (1717, 1737), False, 'from django.utils.crypto import get_random_string\n'), ((535, 568), 'hustlers.api.serializers.HustlerSerializer', 'HustlerSerializer', (['hustler_object'], {}), '(hustler_object)\n', (552, 568), False, 'from hustlers.api.serializers import HustlerSerializer\n'), ((1118, 1161), 'django.core.exceptions.PermissionDenied', 'PermissionDenied', (['"""Hustler does not exist!"""'], {}), "('Hustler does not exist!')\n", (1134, 1161), False, 'from django.core.exceptions import PermissionDenied\n'), ((1054, 1093), 'django.core.exceptions.PermissionDenied', 'PermissionDenied', (['"""Hustler is inactive"""'], {}), "('Hustler is inactive')\n", (1070, 1093), False, 'from django.core.exceptions import PermissionDenied\n')]
|
from datetime import datetime
from flask import Flask
from flask import request
from flask import send_file
try:
# restplus is dead: https://github.com/noirbizarre/flask-restplus/issues/770
from flask_restx import Resource, Api
from flask_restx import reqparse
except ImportError:
try:
from flask_restplus import Resource, Api
except ImportError:
import werkzeug
werkzeug.cached_property = werkzeug.utils.cached_property
from flask_restplus import Resource, Api
from flask_restplus import reqparse
from markupsafe import escape
import json
app = Flask(__name__)
api = Api(app)
# e.g.: http://127.0.0.1:5000/test?ts=1467244800&lat=45.0&lon=-176.0
@api.route('/test')
class TestService(Resource):
def get(self):
parser = reqparse.RequestParser()
parser.add_argument('ts', type=int, help='unix epoch seconds')
parser.add_argument('lat', type=float, help='datetime in unix time format')
parser.add_argument('lon', type=float, help='datetime in unix time format')
args = parser.parse_args()
dt = datetime.fromtimestamp(args["ts"])
result = {"response" : "HelloWorld"}
return result
@api.route('/get_image')
class ImageService(Resource):
def get(self):
icon1="icons/circle-xl.png"
return send_file(icon1, mimetype='image/png')
|
[
"flask_restplus.Api",
"flask.Flask",
"flask_restplus.reqparse.RequestParser",
"datetime.datetime.fromtimestamp",
"flask.send_file"
] |
[((612, 627), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (617, 627), False, 'from flask import Flask\n'), ((634, 642), 'flask_restplus.Api', 'Api', (['app'], {}), '(app)\n', (637, 642), False, 'from flask_restplus import Resource, Api\n'), ((799, 823), 'flask_restplus.reqparse.RequestParser', 'reqparse.RequestParser', ([], {}), '()\n', (821, 823), False, 'from flask_restplus import reqparse\n'), ((1112, 1146), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (["args['ts']"], {}), "(args['ts'])\n", (1134, 1146), False, 'from datetime import datetime\n'), ((1340, 1378), 'flask.send_file', 'send_file', (['icon1'], {'mimetype': '"""image/png"""'}), "(icon1, mimetype='image/png')\n", (1349, 1378), False, 'from flask import send_file\n')]
|
#!/usr/bin/env python
# Copyright (c) 2018, 2019 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
"""
Oracle Cloud Infrastructure(OCI) Ansible Modules Uninstaller Script
===================================================================
This script deletes OCI Ansible modules, Oracle docs fragments and Oracle Ansible utility file from the ansible path.
To uninstall OCI Ansible modules, execute:
$ ./uninstall.py
To execute the script with debug messages, execute:
$ ./uninstall.py --debug
author: "<NAME> (@rohitChaware)"
"""
from __future__ import print_function
import argparse
import os.path
import shutil
import sys
try:
import ansible
ANSIBLE_IS_INSTALLED = True
except ImportError:
ANSIBLE_IS_INSTALLED = False
debug = False
def parse_cli_args():
parser = argparse.ArgumentParser(description="Script to uninstall oci-ansible-role")
parser.add_argument(
"--debug",
action="store_true",
default=False,
help="Send debug messages to STDERR",
)
return parser.parse_args()
def log(*args, **kwargs):
if debug:
print(*args, file=sys.stderr, **kwargs)
def main():
if not ANSIBLE_IS_INSTALLED:
print("Could not load ansible module.")
sys.exit(1)
global debug
args = parse_cli_args()
if args.debug:
debug = True
ansible_path = os.path.dirname(os.path.abspath(os.path.realpath(ansible.__file__)))
log("Ansible path: {}".format(ansible_path))
module_utils_path = os.path.join(ansible_path, "module_utils", "oracle")
log("Module utilities path: {}".format(module_utils_path))
document_fragments_path_old = os.path.join(
ansible_path, "utils", "module_docs_fragments"
)
document_fragments_path_new = os.path.join(ansible_path, "plugins", "doc_fragments")
if os.path.exists(document_fragments_path_new):
document_fragments_path = document_fragments_path_new
else:
document_fragments_path = document_fragments_path_old
log("Documentation fragments path: {}".format(document_fragments_path))
delete(module_utils_path)
oci_docs_fragments = []
for filename in os.listdir(document_fragments_path):
if filename.startswith("oracle"):
oci_docs_fragments.append(os.path.join(document_fragments_path, filename))
delete(oci_docs_fragments)
oracle_module_dir_path = os.path.join(ansible_path, "modules", "cloud", "oracle")
delete(oracle_module_dir_path)
print("Uninstalled OCI Ansible modules successfully.")
def delete(paths):
if type(paths) is not list:
paths = [paths]
for path in paths:
if os.path.isdir(path):
print("Deleting directory {}".format(path))
shutil.rmtree(path)
elif os.path.isfile(path):
print("Deleting {}".format(path))
os.remove(path)
if __name__ == "__main__":
main()
|
[
"shutil.rmtree",
"argparse.ArgumentParser",
"sys.exit"
] |
[((1025, 1100), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Script to uninstall oci-ansible-role"""'}), "(description='Script to uninstall oci-ansible-role')\n", (1048, 1100), False, 'import argparse\n'), ((1473, 1484), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1481, 1484), False, 'import sys\n'), ((2972, 2991), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (2985, 2991), False, 'import shutil\n')]
|
from geopy.distance import geodesic
from pprint import pprint
from itertools import permutations
from tqdm import tqdm
import gmplot
from math import cos, sin, atan2, sqrt
import time
import re
home = (43.077589, -89.414075)
with open('wisc-coords-local.txt', 'r') as f:
text = f.read()
def dist_between(coords, home=home):
return geodesic(coords, home).meters
coords = dict()
for i, x in enumerate(text.split('\n')):
if x == '':
continue
tmp = re.split(r'0{4},', x)
lat = float(tmp[0])
lon = float(tmp[1])
dist = dist_between((lat, lon))
if dist > 4700:
continue
name = tmp[2].lower()
coords[name] = dict()
coords[name]['coords'] = (lat, lon)
coords[name]['distance'] = dist
# print(len(coords))
# exit()
# pprint(coords.items())
for k,v in sorted(coords.items(), key = lambda k: k[1]['distance']):
print(f"{v['coords'][0]},{v['coords'][1]},{k}")
# exit()
print("43.074757,-89.380006,afk 4 stop")
print("43.076735,-89.413106,afk 2 stop 2 gym")
exit()
def center_geolocation(coords):
lats = []
lons = []
for k,v in coords.items():
lats.append(v['coords'][0])
lons.append(v['coords'][1])
return (sum(lats)/len(lats), sum(lons)/len(lons))
def fac(n):
if n < 2:
return 1
return n * fac(n-1)
lats = [v['coords'][0] for v in coords.values()]
lons = [v['coords'][1] for v in coords.values()]
center = center_geolocation(coords)
gmap3 = gmplot.GoogleMapPlotter(center[0], center[1], 13)
gmap3.scatter(lats, lons, '# FF0000',
size = 40, marker = False )
# gmap3.plot(lats, lons,
# 'cornflowerblue', edge_width = 2.5)
gmap3.draw("/Users/Nicholas/Github/pokemon-go/wisc/map.html")
|
[
"geopy.distance.geodesic",
"re.split",
"gmplot.GoogleMapPlotter"
] |
[((1452, 1501), 'gmplot.GoogleMapPlotter', 'gmplot.GoogleMapPlotter', (['center[0]', 'center[1]', '(13)'], {}), '(center[0], center[1], 13)\n', (1475, 1501), False, 'import gmplot\n'), ((474, 494), 're.split', 're.split', (['"""0{4},"""', 'x'], {}), "('0{4},', x)\n", (482, 494), False, 'import re\n'), ((343, 365), 'geopy.distance.geodesic', 'geodesic', (['coords', 'home'], {}), '(coords, home)\n', (351, 365), False, 'from geopy.distance import geodesic\n')]
|
# -*- coding: utf-8 -*-
import pytest
from giraffez._teradata import RequestEnded, StatementEnded, StatementInfoEnded
import giraffez
from giraffez.constants import *
from giraffez.errors import *
from giraffez.types import *
class ResultsHelper:
"""
Helps to emulate how exceptions are raised when working with the CLIv2 so
that the control flow will be adequately represented.
"""
def __init__(self, rows):
self.first = True
self.index = 0
self.rows = rows
def get(self):
if self.first:
self.first = False
raise StatementInfoEnded
if self.index >= len(self.rows):
raise RequestEnded
row = self.rows[self.index]
self.index += 1
return row
def __call__(self):
return self.get()
@pytest.mark.usefixtures('config', 'context')
class TestCmd(object):
def test_results(self, mocker):
connect_mock = mocker.patch('giraffez.cmd.TeradataCmd._connect')
mock_columns = mocker.patch("giraffez.cmd.Cursor._columns")
cmd = giraffez.Cmd()
query = "select * from db1.info"
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns.return_value = columns
rows = [
["value1", "value2", "value3"],
["value1", "value2", "value3"],
["value1", "value2", "value3"],
]
expected_rows = [
{"col1": "value1", "col2": "value2", "col3": "value3"},
{"col1": "value1", "col2": "value2", "col3": "value3"},
{"col1": "value1", "col2": "value2", "col3": "value3"},
]
cmd.cmd = mocker.MagicMock()
cmd.cmd.fetchone.side_effect = ResultsHelper(rows)
result = list(cmd.execute(query))
assert [x.items() for x in result] == expected_rows
cmd._close()
# This ensures that the config was proper mocked
connect_mock.assert_called_with('db1', 'user123', '<PASSWORD>', None, None)
def test_invalid_credentials(self, mocker):
connect_mock = mocker.patch('giraffez.cmd.TeradataCmd._connect')
connect_mock.side_effect = InvalidCredentialsError("test")
with pytest.raises(InvalidCredentialsError):
cmd = giraffez.Cmd(protect=True)
cmd._close()
@pytest.mark.usefixtures('config', 'context', 'tmpfiles')
class TestInsert(object):
def test_insert_from_file(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3"]))
f.write("\n")
rows = []
for i in range(100):
rows.append("|".join(["value1", "value2", "value3"]))
f.write("\n".join(rows))
with giraffez.Cmd() as cmd:
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
assert result.get('count') == 100
def test_insert_from_file_quoted(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3"]))
f.write("\n")
rows = []
for i in range(99):
rows.append("|".join(["value1", "value2", "value3"]))
rows.append("|".join(["value1",'"value2|withpipe"', "value3"]))
f.write("\n".join(rows))
with giraffez.Cmd() as cmd:
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
assert result.get('count') == 100
def test_insert_from_file_single_quoted(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3"]))
f.write("\n")
rows = []
for i in range(99):
rows.append("|".join(["value1", "value2", "value3"]))
rows.append("|".join(["value1","'value2|withpipe'", "value3"]))
f.write("\n".join(rows))
with giraffez.Cmd() as cmd:
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|", quotechar="'")
assert result.get('count') == 100
def test_insert_from_file_nonstandard_quote(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3"]))
f.write("\n")
rows = []
for i in range(99):
rows.append("|".join(["value1", "value2", "value3"]))
rows.append("|".join(['va"lue1','$value2|withpipe"and"quote$', "value3"]))
f.write("\n".join(rows))
with giraffez.Cmd() as cmd:
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|", quotechar="$")
assert result.get('count') == 100
def test_insert_from_file_error(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3"]))
f.write("\n")
f.write("|".join(["value1", "value2", "value3", "value4"]))
f.write("\n")
with giraffez.Cmd() as cmd:
cmd.panic = False
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
def test_insert_from_file_error_panic(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3"]))
f.write("\n")
f.write("|".join(["value1", "value2", "value3", "value4"]))
f.write("\n")
with giraffez.Cmd() as cmd:
with pytest.raises(GiraffeEncodeError):
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
print(result)
def test_insert_from_file_invalid_header(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
# Invalid column (blank string)
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3", "", ""]))
f.write("\n")
f.write("|".join(["value1", "value2", "value3"]))
f.write("\n")
with giraffez.Cmd() as cmd:
with pytest.raises(GiraffeError):
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
print(result)
# Invalid column (wrong name)
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col4"]))
f.write("\n")
f.write("|".join(["value1", "value2", "value3"]))
f.write("\n")
with giraffez.Cmd() as cmd:
with pytest.raises(GiraffeError):
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
print(result)
# Too many columns (duplicate name)
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3", "col3"]))
f.write("\n")
f.write("|".join(["value1", "value2", "value3"]))
f.write("\n")
with giraffez.Cmd() as cmd:
with pytest.raises(GiraffeEncodeError):
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
print(result)
def test_insert_insert_no_specify_fields(self, mocker):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
rows = [
("value1", "value3"),
("value1", "value3"),
("value1", "value3"),
]
with giraffez.Cmd() as cmd:
with pytest.raises(GiraffeEncodeError):
cmd.insert("db1.test", rows)
|
[
"giraffez.Cmd",
"pytest.raises",
"pytest.mark.usefixtures"
] |
[((826, 870), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""config"""', '"""context"""'], {}), "('config', 'context')\n", (849, 870), False, 'import pytest\n'), ((2444, 2500), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""config"""', '"""context"""', '"""tmpfiles"""'], {}), "('config', 'context', 'tmpfiles')\n", (2467, 2500), False, 'import pytest\n'), ((1086, 1100), 'giraffez.Cmd', 'giraffez.Cmd', ([], {}), '()\n', (1098, 1100), False, 'import giraffez\n'), ((2331, 2369), 'pytest.raises', 'pytest.raises', (['InvalidCredentialsError'], {}), '(InvalidCredentialsError)\n', (2344, 2369), False, 'import pytest\n'), ((2389, 2415), 'giraffez.Cmd', 'giraffez.Cmd', ([], {'protect': '(True)'}), '(protect=True)\n', (2401, 2415), False, 'import giraffez\n'), ((3329, 3343), 'giraffez.Cmd', 'giraffez.Cmd', ([], {}), '()\n', (3341, 3343), False, 'import giraffez\n'), ((4358, 4372), 'giraffez.Cmd', 'giraffez.Cmd', ([], {}), '()\n', (4370, 4372), False, 'import giraffez\n'), ((5394, 5408), 'giraffez.Cmd', 'giraffez.Cmd', ([], {}), '()\n', (5406, 5408), False, 'import giraffez\n'), ((6461, 6475), 'giraffez.Cmd', 'giraffez.Cmd', ([], {}), '()\n', (6473, 6475), False, 'import giraffez\n'), ((7365, 7379), 'giraffez.Cmd', 'giraffez.Cmd', ([], {}), '()\n', (7377, 7379), False, 'import giraffez\n'), ((8248, 8262), 'giraffez.Cmd', 'giraffez.Cmd', ([], {}), '()\n', (8260, 8262), False, 'import giraffez\n'), ((9228, 9242), 'giraffez.Cmd', 'giraffez.Cmd', ([], {}), '()\n', (9240, 9242), False, 'import giraffez\n'), ((9682, 9696), 'giraffez.Cmd', 'giraffez.Cmd', ([], {}), '()\n', (9694, 9696), False, 'import giraffez\n'), ((10150, 10164), 'giraffez.Cmd', 'giraffez.Cmd', ([], {}), '()\n', (10162, 10164), False, 'import giraffez\n'), ((10982, 10996), 'giraffez.Cmd', 'giraffez.Cmd', ([], {}), '()\n', (10994, 10996), False, 'import giraffez\n'), ((8288, 8321), 'pytest.raises', 'pytest.raises', (['GiraffeEncodeError'], {}), '(GiraffeEncodeError)\n', (8301, 8321), False, 'import pytest\n'), ((9268, 9295), 'pytest.raises', 'pytest.raises', (['GiraffeError'], {}), '(GiraffeError)\n', (9281, 9295), False, 'import pytest\n'), ((9722, 9749), 'pytest.raises', 'pytest.raises', (['GiraffeError'], {}), '(GiraffeError)\n', (9735, 9749), False, 'import pytest\n'), ((10190, 10223), 'pytest.raises', 'pytest.raises', (['GiraffeEncodeError'], {}), '(GiraffeEncodeError)\n', (10203, 10223), False, 'import pytest\n'), ((11022, 11055), 'pytest.raises', 'pytest.raises', (['GiraffeEncodeError'], {}), '(GiraffeEncodeError)\n', (11035, 11055), False, 'import pytest\n')]
|
#!/usr/bin/env python3
"""Command line tasks to build and deploy the ACW Battle Data."""
import os
import shutil
from os import path
import logging
from invoke import task
LOGGER = logging.getLogger(__name__)
@task
def setup(ctx):
"""Setup directory structure."""
os.makedirs(ctx.dst, exist_ok=True)
@task(setup)
def unit_sizes(ctx):
"""Build unit size data."""
shutil.copy(path.join(ctx.src, 'rawdata', 'unit_sizes', 'unit_sizes.csv'),
ctx.dst)
shutil.copy(
path.join(ctx.src, 'rawdata', 'unit_sizes', 'eicher_units_table.csv'),
ctx.dst)
@task(setup)
def aad(ctx):
"""Build the AAD CWSAC initial data."""
ctx.run(f"{ctx.python} -m acwbattledata.aad {ctx.src} {ctx.dst}")
@task(setup)
def cwsac(ctx):
"""Build the CWSAC Report I data."""
ctx.run(f"{ctx.python} -m acwbattledata.cwsac {ctx.src} {ctx.dst}")
@task(setup)
def cws2(ctx):
"""Build the CWSAC Report II data."""
ctx.run(f"{ctx.python} -m acwbattledata.cws2 {ctx.src} {ctx.dst}")
@task
def download_cwss(ctx):
"""Download CWSS data."""
files = ('old/battle.xml',
'old/persons.xml',
'old/battleunitlink.xml',
'new/tsv/Regiments_Unitz.tsv',
'new/tsv/State_Name.tsv',
'new/tsv/Unititle.tsv',
'new/tsv/Contitle.tsv',
'new/tsv/Category.tsv')
for file_ in files:
basefilename = path.basename(file_)
dstfile = path.join(ctx.cwss.data_dir, basefilename)
if not os.path.exists(dstfile):
ctx.run(f"aws s3 cp --region {ctx.cwss.s3.region} "
f" s3://{ctx.cwss.s3.bucket}/{file_} "
f" {dstfile} ")
else:
print(f"{dstfile} exists")
@task(pre=[setup, download_cwss])
def cwss(ctx):
"""Build the CWSS data."""
ctx.run(f"{ctx.python} -m acwbattledata.cwss "
f" {ctx.src} {ctx.cwss.data_dir} {ctx.dst}")
@task(pre=[setup, aad, cwsac, cws2, cwss, unit_sizes])
def nps(ctx):
"""Build the NPS Combined Data."""
ctx.run(f"{ctx.Rscript} bin/build_nps_combined.R {ctx.src} {ctx.dst}")
@task(setup)
def bodart(ctx):
"""Build data from Bodart."""
ctx.run(f"{ctx.python} -m acwbattledata.bodart {ctx.src} {ctx.dst}")
@task(setup)
def dyer(ctx):
"""Build data from Dyer (1908)."""
ctx.run(f"{ctx.python} -m acwbattledata.dyer {ctx.src} {ctx.dst}")
@task(setup)
def fox(ctx):
"""Build data from Fox."""
ctx.run(f"{ctx.python} -m acwbattledata.fox {ctx.src} {ctx.dst}")
@task(setup)
def greer(ctx):
"""Build weekly casualty data from Greer."""
ctx.run(f"{ctx.python} -m acwbattledata.greer {ctx.src} {ctx.dst}")
@task(setup)
def kennedy(ctx):
"""Build casualty data from Kennedy."""
ctx.run(f"{ctx.python} -m acwbattledata.kennedy {ctx.src} {ctx.dst}")
@task(setup)
def livermore(ctx):
"""Build casualty data from Kennedy."""
ctx.run(f"{ctx.Rscript} bin/build_livermore.R "
f"{ctx.src} {ctx.dst}")
ctx.run(f"{ctx.python} -m acwbattledata.livermore_to_cwsac "
f"{ctx.src} {ctx.dst}")
@task(setup)
def thorpe(ctx):
"""Build Thorpe data."""
ctx.run(f"{ctx.python} -m acwbattledata.thorpe {ctx.src} {ctx.dst}")
@task(setup)
def nyt(ctx):
"""Build New York Times chronology data."""
shutil.copy(
path.join(ctx.src, "rawdata", "nytimes_civil_war_chronology",
"nytimes_civil_war_chronology.json"), ctx.dst)
@task(setup)
def phisterer(ctx):
"""Build phisterer data."""
ctx.run(f"{ctx.python} -m acwbattledata.phisterer {ctx.src} {ctx.dst}")
@task(setup)
def shenandoah(ctx):
"""Build the NPS Shenandoah Report Data."""
ctx.run(f"{ctx.python} -m acwbattledata.shenandoah {ctx.src} {ctx.dst}")
@task(pre=[setup, unit_sizes])
def clodfelter(ctx):
"""Build the Clodfelter data."""
ctx.run(f"{ctx.python} -m acwbattledata.clodfelter {ctx.src} {ctx.dst}")
ctx.run(f"{ctx.Rscript} bin/update_clodfelter_forces.R "
f"{ctx.src} {ctx.dst}")
@task(setup)
def cdb90(ctx):
"""Build the CDB90 data."""
ctx.run(f"{ctx.python} -m acwbattledata.cdb90 {ctx.src} {ctx.dst}")
@task(setup)
def civilwarorg(ctx):
"""Build the civilwar.org data."""
ctx.run(f"{ctx.python} -m acwbattledata.civilwarorg {ctx.src} {ctx.dst}")
@task(setup)
def misc(ctx):
"""Build some miscellaneous datasets."""
ctx.run(f"{ctx.python} -m acwbattledata.misc {ctx.src} {ctx.dst}")
@task(setup)
def battlemisc(ctx):
"""Build miscellaneous battle data."""
ctx.run(f"{ctx.python} -m acwbattledata.battlemisc {ctx.src} {ctx.dst}")
@task(setup)
def ships(ctx):
"""Build the dataset on ships."""
ctx.run(f"{ctx.python} -m acwbattledata.ships {ctx.src} {ctx.dst}")
@task(setup)
def wikipedia(ctx):
"""Build wikipedia data."""
ctx.run(f"{ctx.python} -m acwbattledata.wikipedia {ctx.src} {ctx.dst}")
@task(setup)
def eicher(ctx):
"""Build Eicher datasets."""
ctx.run(f"{ctx.python} -m acwbattledata.eicher {ctx.src} {ctx.dst}")
@task(setup)
def download_wikipedia(ctx):
"""Download wikipedia data."""
outdir = path.join(ctx.src, 'wikipedia')
ctx.run(f"{ctx.python} bin/download_wikipedia.py {ctx.src} {outdir}")
DATA_TASKS = [
unit_sizes, aad, shenandoah, cwsac, cws2, cwss, nps, bodart, dyer, fox,
greer, kennedy, livermore, thorpe, nyt, phisterer, clodfelter, cdb90,
ships, civilwarorg, wikipedia, eicher, misc, battlemisc]
"""Tasks to run before build."""
@task(setup)
def datapackage(ctx):
"""Build datapackage.json"""
ctx.run(f"{ctx.python} -m acwbattledata.datapackage {ctx.src} {ctx.dst}")
@task(pre=[*DATA_TASKS, datapackage])
def build(ctx):
"""Build all datasets."""
pass
|
[
"os.makedirs",
"os.path.basename",
"os.path.exists",
"invoke.task",
"os.path.join",
"logging.getLogger"
] |
[((184, 211), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (201, 211), False, 'import logging\n'), ((316, 327), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (320, 327), False, 'from invoke import task\n'), ((601, 612), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (605, 612), False, 'from invoke import task\n'), ((744, 755), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (748, 755), False, 'from invoke import task\n'), ((888, 899), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (892, 899), False, 'from invoke import task\n'), ((1770, 1802), 'invoke.task', 'task', ([], {'pre': '[setup, download_cwss]'}), '(pre=[setup, download_cwss])\n', (1774, 1802), False, 'from invoke import task\n'), ((1960, 2013), 'invoke.task', 'task', ([], {'pre': '[setup, aad, cwsac, cws2, cwss, unit_sizes]'}), '(pre=[setup, aad, cwsac, cws2, cwss, unit_sizes])\n', (1964, 2013), False, 'from invoke import task\n'), ((2145, 2156), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (2149, 2156), False, 'from invoke import task\n'), ((2284, 2295), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (2288, 2295), False, 'from invoke import task\n'), ((2424, 2435), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (2428, 2435), False, 'from invoke import task\n'), ((2554, 2565), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (2558, 2565), False, 'from invoke import task\n'), ((2706, 2717), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (2710, 2717), False, 'from invoke import task\n'), ((2857, 2868), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (2861, 2868), False, 'from invoke import task\n'), ((3125, 3136), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (3129, 3136), False, 'from invoke import task\n'), ((3259, 3270), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (3263, 3270), False, 'from invoke import task\n'), ((3488, 3499), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (3492, 3499), False, 'from invoke import task\n'), ((3631, 3642), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (3635, 3642), False, 'from invoke import task\n'), ((3792, 3821), 'invoke.task', 'task', ([], {'pre': '[setup, unit_sizes]'}), '(pre=[setup, unit_sizes])\n', (3796, 3821), False, 'from invoke import task\n'), ((4057, 4068), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (4061, 4068), False, 'from invoke import task\n'), ((4192, 4203), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (4196, 4203), False, 'from invoke import task\n'), ((4346, 4357), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (4350, 4357), False, 'from invoke import task\n'), ((4492, 4503), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (4496, 4503), False, 'from invoke import task\n'), ((4648, 4659), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (4652, 4659), False, 'from invoke import task\n'), ((4789, 4800), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (4793, 4800), False, 'from invoke import task\n'), ((4932, 4943), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (4936, 4943), False, 'from invoke import task\n'), ((5070, 5081), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (5074, 5081), False, 'from invoke import task\n'), ((5529, 5540), 'invoke.task', 'task', (['setup'], {}), '(setup)\n', (5533, 5540), False, 'from invoke import task\n'), ((5677, 5713), 'invoke.task', 'task', ([], {'pre': '[*DATA_TASKS, datapackage]'}), '(pre=[*DATA_TASKS, datapackage])\n', (5681, 5713), False, 'from invoke import task\n'), ((277, 312), 'os.makedirs', 'os.makedirs', (['ctx.dst'], {'exist_ok': '(True)'}), '(ctx.dst, exist_ok=True)\n', (288, 312), False, 'import os\n'), ((5159, 5190), 'os.path.join', 'path.join', (['ctx.src', '"""wikipedia"""'], {}), "(ctx.src, 'wikipedia')\n", (5168, 5190), False, 'from os import path\n'), ((397, 458), 'os.path.join', 'path.join', (['ctx.src', '"""rawdata"""', '"""unit_sizes"""', '"""unit_sizes.csv"""'], {}), "(ctx.src, 'rawdata', 'unit_sizes', 'unit_sizes.csv')\n", (406, 458), False, 'from os import path\n'), ((510, 579), 'os.path.join', 'path.join', (['ctx.src', '"""rawdata"""', '"""unit_sizes"""', '"""eicher_units_table.csv"""'], {}), "(ctx.src, 'rawdata', 'unit_sizes', 'eicher_units_table.csv')\n", (519, 579), False, 'from os import path\n'), ((1433, 1453), 'os.path.basename', 'path.basename', (['file_'], {}), '(file_)\n', (1446, 1453), False, 'from os import path\n'), ((1472, 1514), 'os.path.join', 'path.join', (['ctx.cwss.data_dir', 'basefilename'], {}), '(ctx.cwss.data_dir, basefilename)\n', (1481, 1514), False, 'from os import path\n'), ((3358, 3460), 'os.path.join', 'path.join', (['ctx.src', '"""rawdata"""', '"""nytimes_civil_war_chronology"""', '"""nytimes_civil_war_chronology.json"""'], {}), "(ctx.src, 'rawdata', 'nytimes_civil_war_chronology',\n 'nytimes_civil_war_chronology.json')\n", (3367, 3460), False, 'from os import path\n'), ((1530, 1553), 'os.path.exists', 'os.path.exists', (['dstfile'], {}), '(dstfile)\n', (1544, 1553), False, 'import os\n')]
|
from Agent import Agent
from Color import Color
from State import State
class MiniMaxAgent(Agent):
def make_decision(self, board):
self.receive_armies(board)
state = State(board, self.available_armies_count, 1)
place_armies_result, _ = self.maximize_place_armies(state, -999999, 999999)
attack_result, _ = self.maximize_attack(place_armies_result.parent, -999999, 999999)
if attack_result is not None:
board.bulk_update(attack_result.parent.board)
def maximize_place_armies(self, state, alpha, beta):
max_child, max_heuristic = (None, -999999)
if state.is_terminal(self.get_opponent_color()):
return max_child, self.evaluate_heuristic(state.board)
for child_node in self.get_place_armies_children(state):
self.receive_armies(child_node.state.board)
_, new_heuristic = self.minimize_place_armies(child_node.state, alpha, beta)
# Update heuristic
if new_heuristic > max_heuristic:
max_child, max_heuristic = child_node, new_heuristic
# Update alpha
if new_heuristic > alpha:
alpha = new_heuristic
# Break from loop to prune if no chance for better results
if alpha >= beta:
break
return max_child, max_heuristic #return max child
def minimize_place_armies(self, state, alpha, beta):
min_child, min_heuristic = (None, 999999)
if state.is_terminal(self.get_opponent_color()):
return min_child, self.evaluate_heuristic(state.board)
for child_node in self.get_place_armies_children(state):
self.receive_armies(child_node.state.board)
_, new_heuristic = self.maximize_place_armies(child_node.state, alpha, beta)
# Update heuristic
if new_heuristic < min_heuristic:
min_child, min_heuristic = child_node, new_heuristic
# Update alpha
if new_heuristic < beta:
beta = new_heuristic
# Break from loop to prune if no chance for better results
if alpha >= beta:
break
return min_child, min_heuristic
def maximize_attack(self, state, alpha, beta):
max_child, max_heuristic = (None, -999999)
if state.is_terminal(self.get_opponent_color()):
return max_child, self.evaluate_heuristic(state.board)
for child_node in self.get_attacking_children(state):
self.receive_armies(child_node.state.board)
_, new_heuristic = self.minimize_attack(child_node.state, alpha, beta)
# Update heuristic
if new_heuristic > max_heuristic:
max_child, max_heuristic = child_node, new_heuristic
# Update alpha
if new_heuristic > alpha:
alpha = new_heuristic
# Break from loop to prune if no chance for better results
if alpha >= beta:
break
return max_child, max_heuristic #return max child
def minimize_attack(self, state, alpha, beta):
min_child, min_heuristic = (None, 999999)
if state.is_terminal(self.get_opponent_color()):
return min_child, self.evaluate_heuristic(state.board)
for child_node in self.get_attacking_children(state):
self.receive_armies(child_node.state.board)
_, new_heuristic = self.maximize_attack(child_node.state, alpha, beta)
# Update heuristic
if new_heuristic < min_heuristic:
min_child, min_heuristic = child_node, new_heuristic
# Update alpha
if new_heuristic < beta:
beta = new_heuristic
# Break from loop to prune if no chance for better results
if alpha >= beta:
break
return min_child, min_heuristic
|
[
"State.State"
] |
[((189, 233), 'State.State', 'State', (['board', 'self.available_armies_count', '(1)'], {}), '(board, self.available_armies_count, 1)\n', (194, 233), False, 'from State import State\n')]
|
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.contrib.flatpages.forms import FlatpageForm
from django.contrib.flatpages.models import FlatPage
from django.contrib.auth.decorators import login_required
"""Views for editing legal pages."""
# While there is some code duplication here, this is intentional
# to keep flexibility for future changes to these parts of the app
# consider this temporary.
@login_required
def imprint_edit(request):
"""Imprint and contact information page"""
if not request.user.is_superuser:
return HttpResponseRedirect(reverse('index'))
page_model = FlatPage.objects.get(url='/legal/imprint/')
if request.method == 'POST':
form = FlatpageForm(request.POST or None, instance=page_model)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse('imprint'))
else:
form = FlatpageForm(instance=page_model)
return render(request, 'om/legal/edit_imprint.html', {'form': form})
@login_required
def privacy_edit(request):
"""Privacy statements page"""
if not request.user.is_superuser:
return HttpResponseRedirect(reverse('index'))
page_model = FlatPage.objects.get(url='/legal/privacy/')
if request.method == 'POST':
form = FlatpageForm(request.POST or None, instance=page_model)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse('privacy'))
else:
form = FlatpageForm(instance=page_model)
return render(request, 'om/legal/edit_privacy.html', {'form': form})
|
[
"django.shortcuts.render",
"django.urls.reverse",
"django.contrib.flatpages.forms.FlatpageForm",
"django.contrib.flatpages.models.FlatPage.objects.get"
] |
[((680, 723), 'django.contrib.flatpages.models.FlatPage.objects.get', 'FlatPage.objects.get', ([], {'url': '"""/legal/imprint/"""'}), "(url='/legal/imprint/')\n", (700, 723), False, 'from django.contrib.flatpages.models import FlatPage\n'), ((1012, 1073), 'django.shortcuts.render', 'render', (['request', '"""om/legal/edit_imprint.html"""', "{'form': form}"], {}), "(request, 'om/legal/edit_imprint.html', {'form': form})\n", (1018, 1073), False, 'from django.shortcuts import render\n'), ((1263, 1306), 'django.contrib.flatpages.models.FlatPage.objects.get', 'FlatPage.objects.get', ([], {'url': '"""/legal/privacy/"""'}), "(url='/legal/privacy/')\n", (1283, 1306), False, 'from django.contrib.flatpages.models import FlatPage\n'), ((1595, 1656), 'django.shortcuts.render', 'render', (['request', '"""om/legal/edit_privacy.html"""', "{'form': form}"], {}), "(request, 'om/legal/edit_privacy.html', {'form': form})\n", (1601, 1656), False, 'from django.shortcuts import render\n'), ((773, 828), 'django.contrib.flatpages.forms.FlatpageForm', 'FlatpageForm', (['(request.POST or None)'], {'instance': 'page_model'}), '(request.POST or None, instance=page_model)\n', (785, 828), False, 'from django.contrib.flatpages.forms import FlatpageForm\n'), ((966, 999), 'django.contrib.flatpages.forms.FlatpageForm', 'FlatpageForm', ([], {'instance': 'page_model'}), '(instance=page_model)\n', (978, 999), False, 'from django.contrib.flatpages.forms import FlatpageForm\n'), ((1356, 1411), 'django.contrib.flatpages.forms.FlatpageForm', 'FlatpageForm', (['(request.POST or None)'], {'instance': 'page_model'}), '(request.POST or None, instance=page_model)\n', (1368, 1411), False, 'from django.contrib.flatpages.forms import FlatpageForm\n'), ((1549, 1582), 'django.contrib.flatpages.forms.FlatpageForm', 'FlatpageForm', ([], {'instance': 'page_model'}), '(instance=page_model)\n', (1561, 1582), False, 'from django.contrib.flatpages.forms import FlatpageForm\n'), ((644, 660), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (651, 660), False, 'from django.urls import reverse\n'), ((1227, 1243), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (1234, 1243), False, 'from django.urls import reverse\n'), ((921, 939), 'django.urls.reverse', 'reverse', (['"""imprint"""'], {}), "('imprint')\n", (928, 939), False, 'from django.urls import reverse\n'), ((1504, 1522), 'django.urls.reverse', 'reverse', (['"""privacy"""'], {}), "('privacy')\n", (1511, 1522), False, 'from django.urls import reverse\n')]
|
from types import FunctionType
from fullcontact import FullContact
from nose.tools import assert_equal, assert_true
class TestFullContact(object):
def test_init(self):
fc = FullContact('test_key')
assert_equal(fc.api_key, 'test_key')
def test__prepare_batch_url(self):
fc = FullContact('test_key')
assert_equal(
fc._prepare_batch_url(('person', {'email': '<EMAIL>'})),
'https://api.fullcontact.com/v2/person.json?email=test%40test.<EMAIL>'
)
def test_invalid_api_keys(self):
fc = FullContact('test_key')
r = fc.person(email='<EMAIL>')
assert_equal(r.status_code, 403)
test_batch = [
('person', {'email': '<EMAIL>'}),
('person', {'name': '<NAME>'})
]
r = fc.api_batch(test_batch)
assert_equal(r.status_code, 403)
def test_adds_endpoint_methods(self):
fc = FullContact('')
for endpoint in fc.get_endpoints:
assert_true(isinstance(getattr(fc, endpoint), FunctionType))
|
[
"fullcontact.FullContact",
"nose.tools.assert_equal"
] |
[((187, 210), 'fullcontact.FullContact', 'FullContact', (['"""test_key"""'], {}), "('test_key')\n", (198, 210), False, 'from fullcontact import FullContact\n'), ((219, 255), 'nose.tools.assert_equal', 'assert_equal', (['fc.api_key', '"""test_key"""'], {}), "(fc.api_key, 'test_key')\n", (231, 255), False, 'from nose.tools import assert_equal, assert_true\n'), ((309, 332), 'fullcontact.FullContact', 'FullContact', (['"""test_key"""'], {}), "('test_key')\n", (320, 332), False, 'from fullcontact import FullContact\n'), ((568, 591), 'fullcontact.FullContact', 'FullContact', (['"""test_key"""'], {}), "('test_key')\n", (579, 591), False, 'from fullcontact import FullContact\n'), ((639, 671), 'nose.tools.assert_equal', 'assert_equal', (['r.status_code', '(403)'], {}), '(r.status_code, 403)\n', (651, 671), False, 'from nose.tools import assert_equal, assert_true\n'), ((841, 873), 'nose.tools.assert_equal', 'assert_equal', (['r.status_code', '(403)'], {}), '(r.status_code, 403)\n', (853, 873), False, 'from nose.tools import assert_equal, assert_true\n'), ((930, 945), 'fullcontact.FullContact', 'FullContact', (['""""""'], {}), "('')\n", (941, 945), False, 'from fullcontact import FullContact\n')]
|
from time import time
from django.core.urlresolvers import reverse
from rest_framework.test import APIClient
# todo: Backend?
# from accounts.business.authentication import Backend
from accounts.business.fields import RecoverTypeField
from vaultier.test.tools import FileAccessMixin, VaultierAPIClient
from django.utils import timezone
def auth_api_call(email=None, date=None, signature=None):
url = reverse('auth-auth')
client = APIClient()
m = FileAccessMixin()
if not date:
date = timezone.now()
if not signature:
privkey = m.read_file('vaultier.key')
# todo: Backend?
# signature = Backend.sign(privkey, email, date)
signature = None
response = client.post(url, {'email': email,
'date': date,
'signature': signature}
)
return response
def register_api_call(*args, **kwargs):
m = FileAccessMixin()
pubkey = m.read_file('vaultier.pub')
kwargs['public_key'] = pubkey
url = reverse('user-list')
client = APIClient()
kwargs.update({'timestamp': int(time())})
response = client.post(url, kwargs)
return response
def invite_member_api_call(token, email=None, workspace=None, send=True,
resend=True):
url = reverse('member-list')
client = VaultierAPIClient()
client.token(token)
response = client.post(url, {
'email': email,
'workspace': workspace,
'send': send,
'resend': resend
})
return response
def list_members_api_call(token, workspace):
url = reverse('member-list')
client = VaultierAPIClient()
client.token(token)
response = client.get(url, {'workspace': workspace})
return response
def delete_member_api_call(token, member):
url = reverse('member-detail', args=(member,))
client = VaultierAPIClient()
client.token(token)
response = client.delete(url)
return response
def create_lost_keys_api_call(email, **kwargs):
"""
Call to lost_key create view
:param email: user email
:param kwargs:
:return: Response
"""
client = VaultierAPIClient()
kwargs['email'] = email
url = reverse('lost_keys-list')
response = client.post(url, data={'email': email}, kwargs=kwargs)
return response
def update_lost_key_api_rebuild_call(lost_key_id, auth_hash=None,
public_key=None):
"""
Call to update view with parameter recover_type set to
RecoverTypeField.REBUILD
:param lost_key_id: int
:param auth_hash: str
:param public_key: str
:return: Response
"""
client = VaultierAPIClient()
url = "{}?hash={}".format(reverse('lost_keys-detail', args=(lost_key_id,)),
auth_hash)
return client.put(url, data={
'public_key': public_key, 'recover_type': RecoverTypeField.REBUILD})
def update_lost_key_api_disable_call(lost_key_id, auth_hash=None,
public_key=None):
"""
Call to update view with parameter recover_type set to
RecoverTypeField.DISABLE
:param lost_key_id: int
:param auth_hash: str
:param public_key: str
:return: Response
"""
client = VaultierAPIClient()
url = "{}?hash={}".format(reverse('lost_keys-detail', args=(lost_key_id,)),
auth_hash)
return client.put(url, data={
'public_key': public_key, 'recover_type': RecoverTypeField.DISABLE})
def retrieve_lost_key_api_call(lost_key_id, auth_hash=None):
"""
Call to retrieve view
:param lost_key_id: int
:param auth_hash: str
:return: Response
"""
client = VaultierAPIClient()
url = "{}?hash={}".format(reverse('lost_keys-detail', args=(lost_key_id,)),
auth_hash)
return client.get(url)
|
[
"vaultier.test.tools.VaultierAPIClient",
"django.core.urlresolvers.reverse",
"django.utils.timezone.now",
"time.time",
"rest_framework.test.APIClient",
"vaultier.test.tools.FileAccessMixin"
] |
[((406, 426), 'django.core.urlresolvers.reverse', 'reverse', (['"""auth-auth"""'], {}), "('auth-auth')\n", (413, 426), False, 'from django.core.urlresolvers import reverse\n'), ((440, 451), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (449, 451), False, 'from rest_framework.test import APIClient\n'), ((460, 477), 'vaultier.test.tools.FileAccessMixin', 'FileAccessMixin', ([], {}), '()\n', (475, 477), False, 'from vaultier.test.tools import FileAccessMixin, VaultierAPIClient\n'), ((955, 972), 'vaultier.test.tools.FileAccessMixin', 'FileAccessMixin', ([], {}), '()\n', (970, 972), False, 'from vaultier.test.tools import FileAccessMixin, VaultierAPIClient\n'), ((1059, 1079), 'django.core.urlresolvers.reverse', 'reverse', (['"""user-list"""'], {}), "('user-list')\n", (1066, 1079), False, 'from django.core.urlresolvers import reverse\n'), ((1093, 1104), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (1102, 1104), False, 'from rest_framework.test import APIClient\n'), ((1337, 1359), 'django.core.urlresolvers.reverse', 'reverse', (['"""member-list"""'], {}), "('member-list')\n", (1344, 1359), False, 'from django.core.urlresolvers import reverse\n'), ((1373, 1392), 'vaultier.test.tools.VaultierAPIClient', 'VaultierAPIClient', ([], {}), '()\n', (1390, 1392), False, 'from vaultier.test.tools import FileAccessMixin, VaultierAPIClient\n'), ((1638, 1660), 'django.core.urlresolvers.reverse', 'reverse', (['"""member-list"""'], {}), "('member-list')\n", (1645, 1660), False, 'from django.core.urlresolvers import reverse\n'), ((1674, 1693), 'vaultier.test.tools.VaultierAPIClient', 'VaultierAPIClient', ([], {}), '()\n', (1691, 1693), False, 'from vaultier.test.tools import FileAccessMixin, VaultierAPIClient\n'), ((1850, 1890), 'django.core.urlresolvers.reverse', 'reverse', (['"""member-detail"""'], {'args': '(member,)'}), "('member-detail', args=(member,))\n", (1857, 1890), False, 'from django.core.urlresolvers import reverse\n'), ((1904, 1923), 'vaultier.test.tools.VaultierAPIClient', 'VaultierAPIClient', ([], {}), '()\n', (1921, 1923), False, 'from vaultier.test.tools import FileAccessMixin, VaultierAPIClient\n'), ((2184, 2203), 'vaultier.test.tools.VaultierAPIClient', 'VaultierAPIClient', ([], {}), '()\n', (2201, 2203), False, 'from vaultier.test.tools import FileAccessMixin, VaultierAPIClient\n'), ((2242, 2267), 'django.core.urlresolvers.reverse', 'reverse', (['"""lost_keys-list"""'], {}), "('lost_keys-list')\n", (2249, 2267), False, 'from django.core.urlresolvers import reverse\n'), ((2701, 2720), 'vaultier.test.tools.VaultierAPIClient', 'VaultierAPIClient', ([], {}), '()\n', (2718, 2720), False, 'from vaultier.test.tools import FileAccessMixin, VaultierAPIClient\n'), ((3296, 3315), 'vaultier.test.tools.VaultierAPIClient', 'VaultierAPIClient', ([], {}), '()\n', (3313, 3315), False, 'from vaultier.test.tools import FileAccessMixin, VaultierAPIClient\n'), ((3742, 3761), 'vaultier.test.tools.VaultierAPIClient', 'VaultierAPIClient', ([], {}), '()\n', (3759, 3761), False, 'from vaultier.test.tools import FileAccessMixin, VaultierAPIClient\n'), ((511, 525), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (523, 525), False, 'from django.utils import timezone\n'), ((2751, 2799), 'django.core.urlresolvers.reverse', 'reverse', (['"""lost_keys-detail"""'], {'args': '(lost_key_id,)'}), "('lost_keys-detail', args=(lost_key_id,))\n", (2758, 2799), False, 'from django.core.urlresolvers import reverse\n'), ((3346, 3394), 'django.core.urlresolvers.reverse', 'reverse', (['"""lost_keys-detail"""'], {'args': '(lost_key_id,)'}), "('lost_keys-detail', args=(lost_key_id,))\n", (3353, 3394), False, 'from django.core.urlresolvers import reverse\n'), ((3792, 3840), 'django.core.urlresolvers.reverse', 'reverse', (['"""lost_keys-detail"""'], {'args': '(lost_key_id,)'}), "('lost_keys-detail', args=(lost_key_id,))\n", (3799, 3840), False, 'from django.core.urlresolvers import reverse\n'), ((1141, 1147), 'time.time', 'time', ([], {}), '()\n', (1145, 1147), False, 'from time import time\n')]
|
from glustercli2.parsers import parsed_pool_list
class Peer:
def __init__(self, cli, hostname):
self.cli = cli
self.hostname = hostname
@classmethod
def peer_cmd(cls, cli, cmd):
return cli.exec_gluster_command(
["peer"] + cmd
)
@classmethod
def list(cls, cli):
out = cli.exec_gluster_command(["pool", "list"])
peers = parsed_pool_list(out)
for peer in peers:
if peer.hostname == "localhost":
peer.hostname = cli.get_current_host()
return peers
@classmethod
def add(cls, cli, hostname):
cls.peer_cmd(cli, ["attach", hostname])
def detach(self):
"""
== Peer Delete/Detach
Delete or Detach a Peer from Cluster.
Example:
[source,python]
----
from glustercli2 import GlusterCLI
gcli = GlusterCLI()
gcli.peer("server2.kadalu").delete()
----
"""
self.peer_cmd(self.cli, ["detach", self.hostname])
|
[
"glustercli2.parsers.parsed_pool_list"
] |
[((402, 423), 'glustercli2.parsers.parsed_pool_list', 'parsed_pool_list', (['out'], {}), '(out)\n', (418, 423), False, 'from glustercli2.parsers import parsed_pool_list\n')]
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import six
from .middleware import RedirectFallbackMiddleware
from .models import Redirect
@override_settings(
APPEND_SLASH=False,
MIDDLEWARE_CLASSES=list(settings.MIDDLEWARE_CLASSES) +
['apps.redirects.middleware.RedirectFallbackMiddleware'],
SITE_ID=1,
)
class RedirectTests(TestCase):
def test_model(self):
r1 = Redirect.objects.create(
old_path='/initial', new_path='/new_target')
self.assertEqual(six.text_type(r1), "/initial ---> /new_target")
def test_redirect(self):
Redirect.objects.create(
old_path='/initial', new_path='/new_target')
response = self.client.get('/initial')
self.assertRedirects(response,
'/en/new_target', status_code=301, target_status_code=404)
@override_settings(APPEND_SLASH=True)
def test_redirect_with_append_slash(self):
Redirect.objects.create(
old_path='/initial/', new_path='/new_target/')
response = self.client.get('/initial')
self.assertRedirects(response,
'/en/new_target/', status_code=301, target_status_code=404)
@override_settings(APPEND_SLASH=True)
def test_redirect_with_append_slash_and_query_string(self):
Redirect.objects.create(
old_path='/initial/?foo', new_path='/new_target/')
response = self.client.get('/initial?foo')
self.assertRedirects(response,
'/en/new_target/', status_code=301, target_status_code=404)
def test_regular_expression(self):
Redirect.objects.create(
old_path='/news/index/(\d+)/(.*)/',
new_path='/my/news/$2/',
regular_expression=True)
response = self.client.get('/news/index/12345/foobar/')
self.assertRedirects(response,
'/en/my/news/foobar/',
status_code=301, target_status_code=404)
redirect = Redirect.objects.get(regular_expression=True)
self.assertEqual(redirect.nr_times_visited, 1)
def test_fallback_redirects(self):
"""
Ensure redirects with fallback_redirect set are the last evaluated
"""
Redirect.objects.create(
old_path='/project/foo',
new_path='/my/project/foo')
Redirect.objects.create(
old_path='/project/foo/(.*)',
new_path='/my/project/foo/$1',
regular_expression=True)
Redirect.objects.create(
old_path='/project/(.*)',
new_path='/projects',
regular_expression=True,
fallback_redirect=True)
Redirect.objects.create(
old_path='/project/bar/(.*)',
new_path='/my/project/bar/$1',
regular_expression=True)
Redirect.objects.create(
old_path='/project/bar',
new_path='/my/project/bar')
response = self.client.get('/project/foo')
self.assertRedirects(response,
'/en/my/project/foo',
status_code=301, target_status_code=404)
response = self.client.get('/project/bar')
self.assertRedirects(response,
'/en/my/project/bar',
status_code=301, target_status_code=404)
response = self.client.get('/project/bar/details')
self.assertRedirects(response,
'/en/my/project/bar/details',
status_code=301, target_status_code=404)
response = self.client.get('/project/foobar')
self.assertRedirects(response,
'/en/projects',
status_code=301, target_status_code=404)
response = self.client.get('/project/foo/details')
self.assertRedirects(response,
'/en/my/project/foo/details',
status_code=301, target_status_code=404)
|
[
"django.utils.six.text_type",
"django.test.utils.override_settings"
] |
[((977, 1013), 'django.test.utils.override_settings', 'override_settings', ([], {'APPEND_SLASH': '(True)'}), '(APPEND_SLASH=True)\n', (994, 1013), False, 'from django.test.utils import override_settings\n'), ((1317, 1353), 'django.test.utils.override_settings', 'override_settings', ([], {'APPEND_SLASH': '(True)'}), '(APPEND_SLASH=True)\n', (1334, 1353), False, 'from django.test.utils import override_settings\n'), ((646, 663), 'django.utils.six.text_type', 'six.text_type', (['r1'], {}), '(r1)\n', (659, 663), False, 'from django.utils import six\n')]
|
import argparse
import csv
import sys
from core_data_modules.cleaners import Codes
from core_data_modules.logging import Logger
from core_data_modules.traced_data.io import TracedDataJsonIO
from core_data_modules.util import PhoneNumberUuidTable
Logger.set_project_name("OCHA")
log = Logger(__name__)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Exports a list of phone numbers for the consenting participants "
"to REACH")
parser.add_argument("traced_data_path", metavar="traced-data-path",
help="Path to the REACH traced data file to extract phone numbers from")
parser.add_argument("phone_number_uuid_table_path", metavar="phone-number-uuid-table-path",
help="JSON file containing the phone number <-> UUID lookup table for the messages/surveys "
"datasets")
parser.add_argument("output_path", metavar="output-path",
help="CSV file to write the REACH contacts to")
args = parser.parse_args()
traced_data_path = args.traced_data_path
phone_number_uuid_table_path = args.phone_number_uuid_table_path
output_path = args.output_path
sys.setrecursionlimit(15000)
# Load the phone number <-> uuid table
log.info(f"Loading the phone number <-> uuid table from file '{phone_number_uuid_table_path}'...")
with open(phone_number_uuid_table_path, "r") as f:
phone_number_uuid_table = PhoneNumberUuidTable.load(f)
log.info(f"Loaded {len(phone_number_uuid_table.numbers())} contacts")
# Load the REACH traced data
log.info(f"Loading REACH traced data from file '{traced_data_path}'...")
with open(traced_data_path, "r") as f:
data = TracedDataJsonIO.import_json_to_traced_data_iterable(f)
log.info(f"Loaded {len(data)} traced data objects")
# Search the TracedData for consenting contacts
log.info("Searching for consenting uuids...")
consenting_uuids = set()
for td in data:
if td["withdrawn_consent"] == Codes.TRUE:
continue
consenting_uuids.add(td["UID"])
log.info(f"Found {len(consenting_uuids)} consenting uuids")
# Convert the uuids to phone numbers
log.info("Converting the uuids to phone numbers...")
phone_numbers = [f"+{phone_number_uuid_table.get_phone(uuid)}" for uuid in consenting_uuids]
log.warning(f"Exporting {len(phone_numbers)} phone numbers to {output_path}...")
with open(output_path, "w") as f:
writer = csv.DictWriter(f, fieldnames=["URN:Tel", "Name"], lineterminator="\n")
writer.writeheader()
for n in phone_numbers:
writer.writerow({
"URN:Tel": n
})
log.info(f"Wrote {len(phone_numbers)} contacts to {output_path}")
|
[
"core_data_modules.logging.Logger.set_project_name",
"sys.setrecursionlimit",
"argparse.ArgumentParser",
"core_data_modules.traced_data.io.TracedDataJsonIO.import_json_to_traced_data_iterable",
"core_data_modules.logging.Logger",
"core_data_modules.util.PhoneNumberUuidTable.load",
"csv.DictWriter"
] |
[((248, 279), 'core_data_modules.logging.Logger.set_project_name', 'Logger.set_project_name', (['"""OCHA"""'], {}), "('OCHA')\n", (271, 279), False, 'from core_data_modules.logging import Logger\n'), ((286, 302), 'core_data_modules.logging.Logger', 'Logger', (['__name__'], {}), '(__name__)\n', (292, 302), False, 'from core_data_modules.logging import Logger\n'), ((344, 460), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Exports a list of phone numbers for the consenting participants to REACH"""'}), "(description=\n 'Exports a list of phone numbers for the consenting participants to REACH')\n", (367, 460), False, 'import argparse\n'), ((1253, 1281), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(15000)'], {}), '(15000)\n', (1274, 1281), False, 'import sys\n'), ((1518, 1546), 'core_data_modules.util.PhoneNumberUuidTable.load', 'PhoneNumberUuidTable.load', (['f'], {}), '(f)\n', (1543, 1546), False, 'from core_data_modules.util import PhoneNumberUuidTable\n'), ((1794, 1849), 'core_data_modules.traced_data.io.TracedDataJsonIO.import_json_to_traced_data_iterable', 'TracedDataJsonIO.import_json_to_traced_data_iterable', (['f'], {}), '(f)\n', (1846, 1849), False, 'from core_data_modules.traced_data.io import TracedDataJsonIO\n'), ((2570, 2640), 'csv.DictWriter', 'csv.DictWriter', (['f'], {'fieldnames': "['URN:Tel', 'Name']", 'lineterminator': '"""\n"""'}), "(f, fieldnames=['URN:Tel', 'Name'], lineterminator='\\n')\n", (2584, 2640), False, 'import csv\n')]
|
from matplotlib.colors import hsv_to_rgb, to_hex
def get_n_colours(n, s=0.5, v=0.95):
return [hsv_to_rgb((i/n, s, v)) for i in range(n)]
def extend_colour_map(data, colour_map, date_colour):
missing_values = [x for x in data.dropna().unique() if x not in colour_map] # All events that don't have a specified colour
if date_colour is None: # If the default date square colour isn't specified, we should generate this too
new_colours = get_n_colours(len(missing_values)+1)
date_colour = new_colours.pop()
else:
new_colours = get_n_colours(len(missing_values))
new_colours_map = {x[0]: x[1] for x in zip(missing_values, new_colours)} # Match the events and newly generated colours
colour_map = {k: to_hex(c) for k, c in {**colour_map, **new_colours_map}.items()} # Concat dicts and convert all colours to hex
return colour_map, to_hex(date_colour)
|
[
"matplotlib.colors.to_hex",
"matplotlib.colors.hsv_to_rgb"
] |
[((100, 125), 'matplotlib.colors.hsv_to_rgb', 'hsv_to_rgb', (['(i / n, s, v)'], {}), '((i / n, s, v))\n', (110, 125), False, 'from matplotlib.colors import hsv_to_rgb, to_hex\n'), ((752, 761), 'matplotlib.colors.to_hex', 'to_hex', (['c'], {}), '(c)\n', (758, 761), False, 'from matplotlib.colors import hsv_to_rgb, to_hex\n'), ((887, 906), 'matplotlib.colors.to_hex', 'to_hex', (['date_colour'], {}), '(date_colour)\n', (893, 906), False, 'from matplotlib.colors import hsv_to_rgb, to_hex\n')]
|
# Generated by Django 2.2.5 on 2019-10-27 02:27
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('helloWorldApp', '0010_auto_20191025_2008'),
]
operations = [
migrations.AddField(
model_name='suggestion',
name='upvote',
field=models.ManyToManyField(blank=True, related_name='sugg_upvote', to=settings.AUTH_USER_MODEL),
),
]
|
[
"django.db.migrations.swappable_dependency",
"django.db.models.ManyToManyField"
] |
[((194, 251), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (225, 251), False, 'from django.db import migrations, models\n'), ((444, 540), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'related_name': '"""sugg_upvote"""', 'to': 'settings.AUTH_USER_MODEL'}), "(blank=True, related_name='sugg_upvote', to=settings.\n AUTH_USER_MODEL)\n", (466, 540), False, 'from django.db import migrations, models\n')]
|
from sense_hat import SenseHat
import threading
import firebase_admin
from firebase_admin import credentials, firestore
# constants
COLLECTION = 'raspberry'
DOCUMENT = 'omgeving'
# firebase
cred = credentials.Certificate("../config/firebase_admin.json")
firebase_admin.initialize_app(cred)
# connect firestore
db = firestore.client()
pi_ref = db.collection(COLLECTION).document(DOCUMENT)
# sense
sense = SenseHat()
# sensors
temp = sense.get_temperature()
hum = sense.get_humidity()
temp_hum = sense.get_temperature_from_humidity()
temp_pres = sense.get_temperature_from_pressure()
pres = sense.get_pressure()
data = {
u'temperature' : temp,
u'humidity' : hum,
u'humidity temperature' : temp_hum,
u'pressure' : pres,
u'pressure temperature' : temp_pres,
}
# interval
def set_interval(func, sec):
def func_wrapper():
set_interval(func, sec)
func()
t = threading.Timer(sec, func_wrapper)
t.start()
return t
# firebase send data
def send_data():
pi_ref.set(data)
print('aangepast')
timer = set_interval(send_data, 300)
|
[
"firebase_admin.credentials.Certificate",
"threading.Timer",
"sense_hat.SenseHat",
"firebase_admin.firestore.client",
"firebase_admin.initialize_app"
] |
[((199, 255), 'firebase_admin.credentials.Certificate', 'credentials.Certificate', (['"""../config/firebase_admin.json"""'], {}), "('../config/firebase_admin.json')\n", (222, 255), False, 'from firebase_admin import credentials, firestore\n'), ((256, 291), 'firebase_admin.initialize_app', 'firebase_admin.initialize_app', (['cred'], {}), '(cred)\n', (285, 291), False, 'import firebase_admin\n'), ((318, 336), 'firebase_admin.firestore.client', 'firestore.client', ([], {}), '()\n', (334, 336), False, 'from firebase_admin import credentials, firestore\n'), ((409, 419), 'sense_hat.SenseHat', 'SenseHat', ([], {}), '()\n', (417, 419), False, 'from sense_hat import SenseHat\n'), ((888, 922), 'threading.Timer', 'threading.Timer', (['sec', 'func_wrapper'], {}), '(sec, func_wrapper)\n', (903, 922), False, 'import threading\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Sat Sep 15 17:25:09 2018
@author: Heller
"""
import os
inde="1"
inde=int(inde)
flag=0
for f in os.listdir("notes/"):
flag=flag+1
if(flag==inde):
file="notes/"+f
with open(file) as fa:
content = fa.readlines()
content=[x.strip() for x in content]
rep=""
for i in content:
rep=rep+i+"<br>"
print(rep)
|
[
"os.listdir"
] |
[((135, 155), 'os.listdir', 'os.listdir', (['"""notes/"""'], {}), "('notes/')\n", (145, 155), False, 'import os\n')]
|
from setuptools import setup, find_packages
setup(name='pydukeenergy',
version='0.0.6',
description='Interface to the unofficial Duke Energy API',
url='http://github.com/w1ll1am23/pyduke-energy',
author='<NAME>',
license='MIT',
install_requires=['requests>=2.0', 'beautifulsoup4>=4.6.0'],
tests_require=['mock'],
test_suite='tests',
packages=find_packages(exclude=["dist", "*.test", "*.test.*", "test.*", "test"]),
zip_safe=True)
|
[
"setuptools.find_packages"
] |
[((397, 468), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['dist', '*.test', '*.test.*', 'test.*', 'test']"}), "(exclude=['dist', '*.test', '*.test.*', 'test.*', 'test'])\n", (410, 468), False, 'from setuptools import setup, find_packages\n')]
|
import collections
import random
def next_move_state(map, head_xy, direction):
switcher = {
'left': (-1, 0),
'right': (1, 0),
'up': (0, -1),
'down': (0, 1)
}
return map[head_xy[1] + switcher.get(direction)[1]][head_xy[0] + switcher.get(direction)[0]]
def next_direction(map, head_xy, move_xy):
switcher = {
(-1, 0): 'left',
(1, 0): 'right',
(0, -1): 'up',
(0, 1): 'down'
}
return switcher.get(((move_xy[0] - head_xy[0]), (move_xy[1] - head_xy[1])))
def shortest_path(map, starting, goal):
queue = collections.deque([[(starting[0], starting[1])]])
seen = set([starting])
tmp_map = map
if tmp_map[goal[1]][goal[0]] != 7:
tmp_map[goal[1]][goal[0]] = 1
while queue:
path = queue.popleft()
x, y = path[-1]
if (x,y) == goal:
return path;
for x2, y2 in ((x+1,y), (x-1,y), (x,y+1), (x,y-1)):
if 0 <= x2 < len(map) and 0 <= y2 < len(map) and map[y2][x2] < 2 and (x2,y2) not in seen:
queue.append(path + [(x2,y2)])
seen.add((x2,y2))
def random_move(map, head_xy):
directions = ['left', 'right', 'up', 'down']
if head_xy[0] is 0:
directions.remove('left')
elif head_xy[0] is len(map) - 1:
directions.remove('right')
if head_xy[1] is 0:
directions.remove('up')
elif head_xy[1] is len(map) - 1:
directions.remove('down')
for direction in directions:
if next_move_state(map, head_xy, direction) > 1 and len(directions) > 1:
directions.remove(direction)
return random.choice(directions)
|
[
"random.choice",
"collections.deque"
] |
[((592, 641), 'collections.deque', 'collections.deque', (['[[(starting[0], starting[1])]]'], {}), '([[(starting[0], starting[1])]])\n', (609, 641), False, 'import collections\n'), ((1637, 1662), 'random.choice', 'random.choice', (['directions'], {}), '(directions)\n', (1650, 1662), False, 'import random\n')]
|
from google_images_download import google_images_download
food_list = ['food with protein','unhealthy food','carbs','food with sugar','vegetables']
for food in food_list:
args = {"keywords":food, "format": "jpg", "limit":1000, "output_directory":"./tf_files/food_images"}
response = google_images_download.googleimagesdownload()
paths = response.download(args)
|
[
"google_images_download.google_images_download.googleimagesdownload"
] |
[((301, 346), 'google_images_download.google_images_download.googleimagesdownload', 'google_images_download.googleimagesdownload', ([], {}), '()\n', (344, 346), False, 'from google_images_download import google_images_download\n')]
|
"""@package MuSCADeT
"""
from scipy import signal as scp
import numpy as np
import matplotlib.pyplot as plt
import astropy.io.fits as pf
import scipy.ndimage.filters as med
import MuSCADeT.pca_ring_spectrum as pcas
import MuSCADeT.wave_transform as mw
NOISE_TAB = np.array([ 0.8907963 , 0.20066385, 0.08550751, 0.04121745, 0.02042497,
0.01018976, 0.00504662, 0.00368314])
NOISE_TAB_2G = np.array([ 0.94288346, 0.22998949, 0.10029194, 0.04860995, 0.02412084,
0.01498695])
def mMCA(img, A,kmax, niter,mode = 'PCA', PCA = [2,40], harder = 0, pos = False,threshmode = 'mom',lvl = 0, PSF = None,
soft = False, reweighting = 'none', alpha = [0,0], npca = 64, mask = [0,0], plot = False, noise_map = [0,0],
newwave=1):
"""
mMCA runs the MuSCADeT algorithm over a cube of multi-band images.
INPUTS:
img: multiband cube with size nbxn1xn2 where nb is the number of bands and n1xn2,
the size of the images
A: the mixing matrix. if mode is set to 'PCA', A will be ignored and can be set to 0
kmax: detection threshold in units of noise standard deviation usually chosen between 3 and 5
niter: number of iterations of the MuSCADeT algorithm
OUTPUTS:
S: extracted sources
A: mixing matrix, either given by the user or estimate by PCA with option mode ='PCA'
alpha: angles in PCA space to identify pixels with same SEDs
OPTIONS:
mode: if set to 'PCA', the mixing matrix A will be estimated from PCA decomposition of the SEDs
PCA: parameters for PCA sensitivity. if mode is set to 'PCA', the PCA estimator will take PCA[0]
as the number of sources to be extracted and PCA[1] as a sensitivity parameter to discriminate between
source. Values betwee 5 and 30 are usually recommended
harder: if set to 1,
pos: if set to True, the output of the hard thresholding procedure is constrined to be positive
threshmode: if set to 'mom', adaptive method of moments is used at every iteration to decrease the threshold
lvl: number of wavelet levels to use in the decompositions, default is 6.
soft: if set to True, soft thresholding is used
alpha: angles in degrees to feed the PCA finder. If set, the PCA finder will use pixels along the directions pointed by these angles in PCA space to estimate SED
That option is particularly useful if automated PCA fails at clearly identifying different SEDs. This happens in case of high degrees of blending.
mask: if parts of the band images images are to be masked (e.g. stars in the FOV), the user can provide a mask with size n1xn2
with all pixels at one except for the masked pixels that should be set to 0.
npca: number of pixels in which images are downsampled to perform a fast PCA.
plot: set to true to display PCA coefficients of the SEDs. Set to False for automated mode
EXAMPLE:
S,A = wine.MCA.mMCA(cube, A, 5,10, PCA=[2,80], mode=pca, harder = 1)
"""
nb, n1, n2 = np.shape(img)
if lvl == 0:
lvl = int(np.log2(n1))
print("using lvl (including coarse scale !)", lvl)
if np.sum(mask) == 0:
mask = np.ones((n1,n2))
img = np.multiply(img,mask)
print("mode", mode)
if mode == 'PCA':
Apca = PCA_initialise(img.T, PCA[0], angle = PCA[1], alpha = alpha, npca = npca, plot = plot, newwave=newwave)
Apca = np.multiply(Apca,[1./np.sum(Apca,0)])
A = Apca
nb,ns = np.shape(A)
X = np.zeros((ns,n1*n2))
A = np.multiply(A,[1./np.sum(A,0)])
AT = A.T
mu = 2. / linorm(A, 10)
Y = np.reshape(img,(nb,n1*n2))
Ri = np.dot(AT,Y)
sigma_y = np.zeros(nb)
for i in range(nb):
sigma_y[i] = MAD(np.reshape(Y[i,:],(n1,n2)))
if PSF is not None:
PSFT = np.copy(PSF)
for ind in range(nb):
PSFT[ind,:,:] = PSF[ind,:,:].T
def PSF_apply(x):
y = np.copy(x)*0
for i in range(nb):
y[i,:,:] = scp.fftconvolve(x[i,:,:],PSF[i,:,:],mode = 'same')
return y
def PSFT_apply(x):
y = np.copy(x)*0
for i in range(nb):
y[i,:,:] = scp.fftconvolve(x[i,:,:],PSFT[i,:,:],mode = 'same')
return y
for i in range(nb):
sigma_y[i] = sigma_y[i]*np.sqrt(np.sum(PSFT[i,:,:]**2))
sigma = np.zeros(ns)
for i in range(ns):
sigma[i] = np.sqrt(np.sum( (AT[i,:]**2)*(sigma_y**2)))
kmas = MOM(np.reshape(Ri,(ns,n1,n1)),sigma,lvl,newwave)#15#np.max(np.dot(1/(mu*np.dot(AT,Y),1),mu*np.dot(AT,Y)))
print(kmas)
step = (kmas-kmax)/(niter-5)
k = kmas
################FOR PLOT#############
th = np.ones((lvl,n1,n2))
th0 = np.multiply(th.T, NOISE_TAB[:lvl]).T * sigma[0]
th1 = np.multiply(th.T, NOISE_TAB[:lvl]).T * sigma[1]
per= np.zeros((ns,niter))
w = np.zeros((ns,lvl,n1,n2))
wmap = np.zeros((ns,lvl,n1,n2))
S = np.zeros((ns,n1*n2))
thmap = np.zeros((ns,lvl,n1,n2))
ks = np.zeros(niter)
sub = 0
reweight = 0
weight2 = 1
if np.sum(noise_map) != 0:
sig_map = np.dot(AT,np.reshape(noise_map,(nb,n1*n2)))
sigma = np.reshape(sig_map,(ns,n1,n2))
for i in range(niter):
if i % 10 == 0:
print(i)
AX = np.dot(A,X)
if PSF is not None:
AX = PSF_apply(AX.reshape((nb,n1,n2))).reshape((nb,n1*n2))
R = mu*np.dot(AT, PSFT_apply(np.reshape(Y-AX,(nb,n1,n2))).reshape(nb,n1*n2))
else:
R = mu*np.dot(AT, Y-AX)
X = np.real(X+R)
S = X
if threshmode == 'mom':
kmas = MOM(np.reshape(R,(ns,n1,n2)),sigma,lvl=lvl)
threshmom = np.max([kmas,kmax])
if threshmom < k:
k = threshmom
step = ((k-kmax)/(niter-i-6))
print('threshold from MOM',threshmom)
for j in range(ns):
kthr = np.max([kmax, k])
Sj,wmap = mr_filter(np.reshape(S[j,:],(n1,n2)),20,kthr,sigma[j],harder = harder, lvl = lvl,pos = pos,soft = soft, newwave=newwave)
S[j,:] = np.reshape(Sj,(n1*n2))
X = np.multiply(S,np.reshape(mask,(n1*n2)))
a = 1
ks[i] = kthr
k = k-step
S = np.reshape(S,(ns,n1,n2))
plt.plot(ks, linewidth = 5)
plt.xlabel('Iterations', fontsize=30)
plt.ylabel('k', fontsize=30)
plt.title('k = f(it)', fontsize = 50)
plt.show()
return S,A
def MOM(R, sigma, lvl=6 , newwave=1):
"""
Estimates the best for a threshold from method of moments
INPUTS:
R: multi-sources cube with size nsxn1xn2 where ns is the number of sources
and n1xn2, the size of an image
sigma: noise standard deviation
OUTPUTS:
k: threshold level
OPTIONS:
lvl: number of wavelet levels used in the decomposition, default is 6.
EXAMPLES
"""
ns,n1,n2 = np.shape(R)
wmax = np.zeros((ns))
wm = np.zeros((ns,lvl))
w = np.zeros((ns,lvl,n1,n2))
for j in range(ns):
w[j,:,:,:], _ = mw.wave_transform(R[j,:,:],lvl, newwave=newwave, verbose=False)
for j in range(ns):
for l in range(lvl-1):
wm[j,l] = np.max(np.abs(w[j,l,:,:]))/NOISE_TAB[l]
wmax[j] = np.max(wm[j,:])
wmax[j] = wmax[j]/np.mean(sigma[j])
k = np.min(wmax)+(np.max(wmax)-np.min(wmax))/100
return k
def MM(R, sigma, lvl=6, newwave=1):
n1,n2 = np.shape(R)
wm = np.zeros((lvl))
w = np.zeros((lvl,n1,n2))
w[:,:,:], _ = mw.wave_transform(R,lvl, newwave=newwave, verbose=False)
for l in range(lvl-1):
wm[l] = np.max(np.abs(w[l,:,:]))/NOISE_TAB[l]
wmax = np.max(wm)/sigma
k = (wmax)-(wmax)/100
return k
def MAD(x):
"""
Estimates noise level in an image from Median Absolute Deviation
INPUTS:
x: image
OUTPUTS:
sigma: noise standard deviation
EXAMPLES
"""
meda = med.median_filter(x,size = (3,3))
medfil = np.abs(x-meda)
sh = np.shape(x)
sigma = 1.48*np.median((medfil))
return sigma
def mr_filter(img, niter, k, sigma,lvl = 6, pos = False, harder = 0,mulweight = 1, subweight = 0, addweight = 0, soft = False, newwave=1):
"""
Computes wavelet iterative filtering on an image.
INPUTS:
img: image to be filtered
niter: number of iterations (10 is usually recommended)
k: threshold level in units of sigma
sigma: noise standard deviation
OUTPUTS:
imnew: filtered image
wmap: weight map
OPTIONS:
lvl: number of wavelet levels used in the decomposition, default is 6.
pos: if set to True, positivity constrain is applied to the output image
harder: if set to one, threshold levels are risen. This is used to compensate for correlated noise
for instance
mulweight: multiplicative weight (default is 1)
subweight: weight map derived from other sources applied to diminish the impact of a given set of coefficient (default is 0)
addweight: weight map used to enhance previously detected features in an iterative process (default is 0)
soft: if set to True, soft thresholding is used
EXAMPLES
"""
shim = np.shape(img)
n1 = shim[0]
n2 = shim[1]
M = np.zeros((lvl,n1,n2))
M[-1,:,:] = 1
th = np.ones_like(M) * k
##A garder
th[0,:,:] = k+1
####################
th = np.multiply(th.T, NOISE_TAB[:lvl]).T * sigma
th[np.where(th<0)] = 0
th[-1,:,:] = 0
imnew = 0
i = 0
R = img
# here, always 1st gen transform (apparently better ?)
alpha, _ = mw.wave_transform(R, lvl, newwave=0, verbose=False)
if pos == True :
M[np.where(alpha-np.abs(addweight)+np.abs(subweight)-np.abs(th)*mulweight > 0)] = 1
else:
M[np.where(np.abs(alpha)-np.abs(addweight)+np.abs(subweight)-np.abs(th)*mulweight > 0)] = 1
while i < niter:
R = img-imnew
alpha, pysap_transform = mw.wave_transform(R,lvl, newwave=newwave, verbose=False)
if soft == True and i>0:
alpha= np.sign(alpha)*(np.abs(alpha)-np.abs(addweight)+np.abs(subweight)-(th*mulweight))
Rnew = mw.iuwt(M*alpha, newwave=newwave, convol2d=0,
pysap_transform=pysap_transform, verbose=False)
imnew = imnew+Rnew
imnew[(imnew < 0)] = 0
i = i+1
wmap, _ = mw.wave_transform(imnew,lvl, newwave=newwave, verbose=False)
return imnew,wmap
def linorm(A,nit):
"""
Estimates the maximal eigen value of a matrix A
INPUTS:
A: matrix
nit: number of iterations
OUTPUTS:
xn: maximal eigen value
EXAMPLES
"""
ns,nb = np.shape(A)
x0 = np.random.rand(nb)
x0 = x0/np.sqrt(np.sum(x0**2))
for i in range(nit):
x = np.dot(A,x0)
xn = np.sqrt(np.sum(x**2))
xp = x/xn
y = np.dot(A.T,xp)
yn = np.sqrt(np.sum(y**2))
if yn < np.dot(y.T,x0) :
break
x0 = y/yn
return xn
def PCA_initialise(cube, ns, angle = 15,npca = 32, alpha = [0,0], plot = 0, newwave=1):
"""
Estimates the mixing matrix of of two sources in a multi band set of images
INPUTS:
cube: multi-band cube from which to extract mixing coefficients
ns: number of mixed sources
OUTPUTS:
A0: mixing matrix
OPTIONS:
angle: sensitivity parameter. The angular resolution at which the algorithm has to look for PCA coefficients clustering
npca: square root of the number of pixels to be used. Since too big images result in too big computation time
we propose to downsample the image in order to get reasonable calculation time
EXAMPLES
"""
n,n,nband = np.shape(cube)
cubep = cube+0.
lvl = int(np.log2(n))
s = np.zeros(nband)
for i in range(nband):
s[i] = MAD(cube[:,:,i])
cubep[:,:,i] = mr_filter(cube[:,:,i],10,3,s[i],harder = 0, lvl=lvl, newwave=newwave)[0]
cubepca = np.zeros((np.min([n,npca]),np.min([n,npca]),nband))
xk, yk = np.where(cubepca[:,:,0]==0)
cubepca[xk, yk, :] = cubep[xk*int(n/npca), yk*int(n/npca), :]
lines = np.reshape(cubep,(n**2, nband))
alphas, basis, sig= pcas.pca_ring_spectrum(cubepca[:,:,:].T,std = s)
ims0 = pcas.pca_lines(alphas,sig,angle, ns, alpha0 = alpha, plot = plot)
vals = np.array(list(set(np.reshape(ims0,(npca*npca)))))
vals = vals[np.where(vals>=0)]
nsp = np.size(vals)
spectras = np.ones([ns, nband])
rank = nsp
S_prior = np.zeros((n,n,np.size(vals)))
xs,ys = np.where(S_prior[:,:,0]==0)
count = 0
for k in vals:
x,y = np.where(ims0 == k)
im = np.zeros((npca, npca))
im[x,y] = 1
S_prior[xs,ys,count] = im[np.int_(xs*(npca/n)), np.int_(ys*(npca/n))]#/(k+1)
vecube = np.reshape(cubepca,(nband,npca*npca))
######Essai norm#####
xcol,ycol=np.where(ims0==k)
specs = np.reshape(cubepca[xcol,ycol,:],(len(xcol),nband))
s1 =np.multiply(np.mean(specs,0),
1/np.sum(np.reshape(cubepca,(npca**2,nband),0)))
spectras[count,:]=s1/np.sum(s1,0)
S_prior[:,:,count] = S_prior[:,:,count]*np.dot(cube,spectras[count,:])
count = count+1
S0 = np.reshape(S_prior[:,:,::-1],(ns,n*n))
A0 = spectras.T
return A0
|
[
"matplotlib.pyplot.title",
"MuSCADeT.pca_ring_spectrum.pca_lines",
"numpy.abs",
"numpy.sum",
"numpy.ones",
"numpy.shape",
"numpy.mean",
"scipy.signal.fftconvolve",
"numpy.int_",
"numpy.multiply",
"numpy.copy",
"numpy.max",
"numpy.reshape",
"MuSCADeT.wave_transform.wave_transform",
"numpy.real",
"scipy.ndimage.filters.median_filter",
"numpy.size",
"matplotlib.pyplot.show",
"numpy.ones_like",
"numpy.median",
"numpy.log2",
"numpy.min",
"matplotlib.pyplot.ylabel",
"numpy.dot",
"MuSCADeT.wave_transform.iuwt",
"matplotlib.pyplot.plot",
"numpy.zeros",
"numpy.where",
"numpy.array",
"numpy.sign",
"numpy.random.rand",
"matplotlib.pyplot.xlabel",
"MuSCADeT.pca_ring_spectrum.pca_ring_spectrum"
] |
[((269, 379), 'numpy.array', 'np.array', (['[0.8907963, 0.20066385, 0.08550751, 0.04121745, 0.02042497, 0.01018976, \n 0.00504662, 0.00368314]'], {}), '([0.8907963, 0.20066385, 0.08550751, 0.04121745, 0.02042497, \n 0.01018976, 0.00504662, 0.00368314])\n', (277, 379), True, 'import numpy as np\n'), ((407, 494), 'numpy.array', 'np.array', (['[0.94288346, 0.22998949, 0.10029194, 0.04860995, 0.02412084, 0.01498695]'], {}), '([0.94288346, 0.22998949, 0.10029194, 0.04860995, 0.02412084, \n 0.01498695])\n', (415, 494), True, 'import numpy as np\n'), ((3108, 3121), 'numpy.shape', 'np.shape', (['img'], {}), '(img)\n', (3116, 3121), True, 'import numpy as np\n'), ((3299, 3321), 'numpy.multiply', 'np.multiply', (['img', 'mask'], {}), '(img, mask)\n', (3310, 3321), True, 'import numpy as np\n'), ((3576, 3587), 'numpy.shape', 'np.shape', (['A'], {}), '(A)\n', (3584, 3587), True, 'import numpy as np\n'), ((3596, 3619), 'numpy.zeros', 'np.zeros', (['(ns, n1 * n2)'], {}), '((ns, n1 * n2))\n', (3604, 3619), True, 'import numpy as np\n'), ((3709, 3739), 'numpy.reshape', 'np.reshape', (['img', '(nb, n1 * n2)'], {}), '(img, (nb, n1 * n2))\n', (3719, 3739), True, 'import numpy as np\n'), ((3746, 3759), 'numpy.dot', 'np.dot', (['AT', 'Y'], {}), '(AT, Y)\n', (3752, 3759), True, 'import numpy as np\n'), ((3773, 3785), 'numpy.zeros', 'np.zeros', (['nb'], {}), '(nb)\n', (3781, 3785), True, 'import numpy as np\n'), ((4478, 4490), 'numpy.zeros', 'np.zeros', (['ns'], {}), '(ns)\n', (4486, 4490), True, 'import numpy as np\n'), ((4809, 4831), 'numpy.ones', 'np.ones', (['(lvl, n1, n2)'], {}), '((lvl, n1, n2))\n', (4816, 4831), True, 'import numpy as np\n'), ((4956, 4977), 'numpy.zeros', 'np.zeros', (['(ns, niter)'], {}), '((ns, niter))\n', (4964, 4977), True, 'import numpy as np\n'), ((4985, 5012), 'numpy.zeros', 'np.zeros', (['(ns, lvl, n1, n2)'], {}), '((ns, lvl, n1, n2))\n', (4993, 5012), True, 'import numpy as np\n'), ((5021, 5048), 'numpy.zeros', 'np.zeros', (['(ns, lvl, n1, n2)'], {}), '((ns, lvl, n1, n2))\n', (5029, 5048), True, 'import numpy as np\n'), ((5054, 5077), 'numpy.zeros', 'np.zeros', (['(ns, n1 * n2)'], {}), '((ns, n1 * n2))\n', (5062, 5077), True, 'import numpy as np\n'), ((5087, 5114), 'numpy.zeros', 'np.zeros', (['(ns, lvl, n1, n2)'], {}), '((ns, lvl, n1, n2))\n', (5095, 5114), True, 'import numpy as np\n'), ((5121, 5136), 'numpy.zeros', 'np.zeros', (['niter'], {}), '(niter)\n', (5129, 5136), True, 'import numpy as np\n'), ((6387, 6414), 'numpy.reshape', 'np.reshape', (['S', '(ns, n1, n2)'], {}), '(S, (ns, n1, n2))\n', (6397, 6414), True, 'import numpy as np\n'), ((6416, 6441), 'matplotlib.pyplot.plot', 'plt.plot', (['ks'], {'linewidth': '(5)'}), '(ks, linewidth=5)\n', (6424, 6441), True, 'import matplotlib.pyplot as plt\n'), ((6448, 6485), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Iterations"""'], {'fontsize': '(30)'}), "('Iterations', fontsize=30)\n", (6458, 6485), True, 'import matplotlib.pyplot as plt\n'), ((6490, 6518), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""k"""'], {'fontsize': '(30)'}), "('k', fontsize=30)\n", (6500, 6518), True, 'import matplotlib.pyplot as plt\n'), ((6523, 6558), 'matplotlib.pyplot.title', 'plt.title', (['"""k = f(it)"""'], {'fontsize': '(50)'}), "('k = f(it)', fontsize=50)\n", (6532, 6558), True, 'import matplotlib.pyplot as plt\n'), ((6565, 6575), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6573, 6575), True, 'import matplotlib.pyplot as plt\n'), ((7072, 7083), 'numpy.shape', 'np.shape', (['R'], {}), '(R)\n', (7080, 7083), True, 'import numpy as np\n'), ((7100, 7112), 'numpy.zeros', 'np.zeros', (['ns'], {}), '(ns)\n', (7108, 7112), True, 'import numpy as np\n'), ((7124, 7143), 'numpy.zeros', 'np.zeros', (['(ns, lvl)'], {}), '((ns, lvl))\n', (7132, 7143), True, 'import numpy as np\n'), ((7151, 7178), 'numpy.zeros', 'np.zeros', (['(ns, lvl, n1, n2)'], {}), '((ns, lvl, n1, n2))\n', (7159, 7178), True, 'import numpy as np\n'), ((7620, 7631), 'numpy.shape', 'np.shape', (['R'], {}), '(R)\n', (7628, 7631), True, 'import numpy as np\n'), ((7650, 7663), 'numpy.zeros', 'np.zeros', (['lvl'], {}), '(lvl)\n', (7658, 7663), True, 'import numpy as np\n'), ((7674, 7697), 'numpy.zeros', 'np.zeros', (['(lvl, n1, n2)'], {}), '((lvl, n1, n2))\n', (7682, 7697), True, 'import numpy as np\n'), ((7740, 7797), 'MuSCADeT.wave_transform.wave_transform', 'mw.wave_transform', (['R', 'lvl'], {'newwave': 'newwave', 'verbose': '(False)'}), '(R, lvl, newwave=newwave, verbose=False)\n', (7757, 7797), True, 'import MuSCADeT.wave_transform as mw\n'), ((8171, 8204), 'scipy.ndimage.filters.median_filter', 'med.median_filter', (['x'], {'size': '(3, 3)'}), '(x, size=(3, 3))\n', (8188, 8204), True, 'import scipy.ndimage.filters as med\n'), ((8218, 8234), 'numpy.abs', 'np.abs', (['(x - meda)'], {}), '(x - meda)\n', (8224, 8234), True, 'import numpy as np\n'), ((8242, 8253), 'numpy.shape', 'np.shape', (['x'], {}), '(x)\n', (8250, 8253), True, 'import numpy as np\n'), ((9520, 9533), 'numpy.shape', 'np.shape', (['img'], {}), '(img)\n', (9528, 9533), True, 'import numpy as np\n'), ((9576, 9599), 'numpy.zeros', 'np.zeros', (['(lvl, n1, n2)'], {}), '((lvl, n1, n2))\n', (9584, 9599), True, 'import numpy as np\n'), ((9921, 9972), 'MuSCADeT.wave_transform.wave_transform', 'mw.wave_transform', (['R', 'lvl'], {'newwave': '(0)', 'verbose': '(False)'}), '(R, lvl, newwave=0, verbose=False)\n', (9938, 9972), True, 'import MuSCADeT.wave_transform as mw\n'), ((11051, 11062), 'numpy.shape', 'np.shape', (['A'], {}), '(A)\n', (11059, 11062), True, 'import numpy as np\n'), ((11072, 11090), 'numpy.random.rand', 'np.random.rand', (['nb'], {}), '(nb)\n', (11086, 11090), True, 'import numpy as np\n'), ((12129, 12143), 'numpy.shape', 'np.shape', (['cube'], {}), '(cube)\n', (12137, 12143), True, 'import numpy as np\n'), ((12198, 12213), 'numpy.zeros', 'np.zeros', (['nband'], {}), '(nband)\n', (12206, 12213), True, 'import numpy as np\n'), ((12453, 12484), 'numpy.where', 'np.where', (['(cubepca[:, :, 0] == 0)'], {}), '(cubepca[:, :, 0] == 0)\n', (12461, 12484), True, 'import numpy as np\n'), ((12559, 12593), 'numpy.reshape', 'np.reshape', (['cubep', '(n ** 2, nband)'], {}), '(cubep, (n ** 2, nband))\n', (12569, 12593), True, 'import numpy as np\n'), ((12620, 12669), 'MuSCADeT.pca_ring_spectrum.pca_ring_spectrum', 'pcas.pca_ring_spectrum', (['cubepca[:, :, :].T'], {'std': 's'}), '(cubepca[:, :, :].T, std=s)\n', (12642, 12669), True, 'import MuSCADeT.pca_ring_spectrum as pcas\n'), ((12684, 12747), 'MuSCADeT.pca_ring_spectrum.pca_lines', 'pcas.pca_lines', (['alphas', 'sig', 'angle', 'ns'], {'alpha0': 'alpha', 'plot': 'plot'}), '(alphas, sig, angle, ns, alpha0=alpha, plot=plot)\n', (12698, 12747), True, 'import MuSCADeT.pca_ring_spectrum as pcas\n'), ((12858, 12871), 'numpy.size', 'np.size', (['vals'], {}), '(vals)\n', (12865, 12871), True, 'import numpy as np\n'), ((12892, 12912), 'numpy.ones', 'np.ones', (['[ns, nband]'], {}), '([ns, nband])\n', (12899, 12912), True, 'import numpy as np\n'), ((12990, 13021), 'numpy.where', 'np.where', (['(S_prior[:, :, 0] == 0)'], {}), '(S_prior[:, :, 0] == 0)\n', (12998, 13021), True, 'import numpy as np\n'), ((13708, 13752), 'numpy.reshape', 'np.reshape', (['S_prior[:, :, ::-1]', '(ns, n * n)'], {}), '(S_prior[:, :, ::-1], (ns, n * n))\n', (13718, 13752), True, 'import numpy as np\n'), ((3238, 3250), 'numpy.sum', 'np.sum', (['mask'], {}), '(mask)\n', (3244, 3250), True, 'import numpy as np\n'), ((3272, 3289), 'numpy.ones', 'np.ones', (['(n1, n2)'], {}), '((n1, n2))\n', (3279, 3289), True, 'import numpy as np\n'), ((3906, 3918), 'numpy.copy', 'np.copy', (['PSF'], {}), '(PSF)\n', (3913, 3918), True, 'import numpy as np\n'), ((4596, 4624), 'numpy.reshape', 'np.reshape', (['Ri', '(ns, n1, n1)'], {}), '(Ri, (ns, n1, n1))\n', (4606, 4624), True, 'import numpy as np\n'), ((5189, 5206), 'numpy.sum', 'np.sum', (['noise_map'], {}), '(noise_map)\n', (5195, 5206), True, 'import numpy as np\n'), ((5291, 5324), 'numpy.reshape', 'np.reshape', (['sig_map', '(ns, n1, n2)'], {}), '(sig_map, (ns, n1, n2))\n', (5301, 5324), True, 'import numpy as np\n'), ((5408, 5420), 'numpy.dot', 'np.dot', (['A', 'X'], {}), '(A, X)\n', (5414, 5420), True, 'import numpy as np\n'), ((5672, 5686), 'numpy.real', 'np.real', (['(X + R)'], {}), '(X + R)\n', (5679, 5686), True, 'import numpy as np\n'), ((7229, 7295), 'MuSCADeT.wave_transform.wave_transform', 'mw.wave_transform', (['R[j, :, :]', 'lvl'], {'newwave': 'newwave', 'verbose': '(False)'}), '(R[j, :, :], lvl, newwave=newwave, verbose=False)\n', (7246, 7295), True, 'import MuSCADeT.wave_transform as mw\n'), ((7428, 7444), 'numpy.max', 'np.max', (['wm[j, :]'], {}), '(wm[j, :])\n', (7434, 7444), True, 'import numpy as np\n'), ((7513, 7525), 'numpy.min', 'np.min', (['wmax'], {}), '(wmax)\n', (7519, 7525), True, 'import numpy as np\n'), ((7889, 7899), 'numpy.max', 'np.max', (['wm'], {}), '(wm)\n', (7895, 7899), True, 'import numpy as np\n'), ((8271, 8288), 'numpy.median', 'np.median', (['medfil'], {}), '(medfil)\n', (8280, 8288), True, 'import numpy as np\n'), ((9626, 9641), 'numpy.ones_like', 'np.ones_like', (['M'], {}), '(M)\n', (9638, 9641), True, 'import numpy as np\n'), ((9769, 9785), 'numpy.where', 'np.where', (['(th < 0)'], {}), '(th < 0)\n', (9777, 9785), True, 'import numpy as np\n'), ((10280, 10337), 'MuSCADeT.wave_transform.wave_transform', 'mw.wave_transform', (['R', 'lvl'], {'newwave': 'newwave', 'verbose': '(False)'}), '(R, lvl, newwave=newwave, verbose=False)\n', (10297, 10337), True, 'import MuSCADeT.wave_transform as mw\n'), ((10494, 10594), 'MuSCADeT.wave_transform.iuwt', 'mw.iuwt', (['(M * alpha)'], {'newwave': 'newwave', 'convol2d': '(0)', 'pysap_transform': 'pysap_transform', 'verbose': '(False)'}), '(M * alpha, newwave=newwave, convol2d=0, pysap_transform=\n pysap_transform, verbose=False)\n', (10501, 10594), True, 'import MuSCADeT.wave_transform as mw\n'), ((10724, 10785), 'MuSCADeT.wave_transform.wave_transform', 'mw.wave_transform', (['imnew', 'lvl'], {'newwave': 'newwave', 'verbose': '(False)'}), '(imnew, lvl, newwave=newwave, verbose=False)\n', (10741, 10785), True, 'import MuSCADeT.wave_transform as mw\n'), ((11169, 11182), 'numpy.dot', 'np.dot', (['A', 'x0'], {}), '(A, x0)\n', (11175, 11182), True, 'import numpy as np\n'), ((11247, 11262), 'numpy.dot', 'np.dot', (['A.T', 'xp'], {}), '(A.T, xp)\n', (11253, 11262), True, 'import numpy as np\n'), ((12178, 12188), 'numpy.log2', 'np.log2', (['n'], {}), '(n)\n', (12185, 12188), True, 'import numpy as np\n'), ((12829, 12848), 'numpy.where', 'np.where', (['(vals >= 0)'], {}), '(vals >= 0)\n', (12837, 12848), True, 'import numpy as np\n'), ((13071, 13090), 'numpy.where', 'np.where', (['(ims0 == k)'], {}), '(ims0 == k)\n', (13079, 13090), True, 'import numpy as np\n'), ((13104, 13126), 'numpy.zeros', 'np.zeros', (['(npca, npca)'], {}), '((npca, npca))\n', (13112, 13126), True, 'import numpy as np\n'), ((13251, 13292), 'numpy.reshape', 'np.reshape', (['cubepca', '(nband, npca * npca)'], {}), '(cubepca, (nband, npca * npca))\n', (13261, 13292), True, 'import numpy as np\n'), ((13338, 13357), 'numpy.where', 'np.where', (['(ims0 == k)'], {}), '(ims0 == k)\n', (13346, 13357), True, 'import numpy as np\n'), ((3158, 3169), 'numpy.log2', 'np.log2', (['n1'], {}), '(n1)\n', (3165, 3169), True, 'import numpy as np\n'), ((3835, 3864), 'numpy.reshape', 'np.reshape', (['Y[i, :]', '(n1, n2)'], {}), '(Y[i, :], (n1, n2))\n', (3845, 3864), True, 'import numpy as np\n'), ((4542, 4578), 'numpy.sum', 'np.sum', (['(AT[i, :] ** 2 * sigma_y ** 2)'], {}), '(AT[i, :] ** 2 * sigma_y ** 2)\n', (4548, 4578), True, 'import numpy as np\n'), ((4840, 4874), 'numpy.multiply', 'np.multiply', (['th.T', 'NOISE_TAB[:lvl]'], {}), '(th.T, NOISE_TAB[:lvl])\n', (4851, 4874), True, 'import numpy as np\n'), ((4898, 4932), 'numpy.multiply', 'np.multiply', (['th.T', 'NOISE_TAB[:lvl]'], {}), '(th.T, NOISE_TAB[:lvl])\n', (4909, 4932), True, 'import numpy as np\n'), ((5241, 5277), 'numpy.reshape', 'np.reshape', (['noise_map', '(nb, n1 * n2)'], {}), '(noise_map, (nb, n1 * n2))\n', (5251, 5277), True, 'import numpy as np\n'), ((5818, 5838), 'numpy.max', 'np.max', (['[kmas, kmax]'], {}), '([kmas, kmax])\n', (5824, 5838), True, 'import numpy as np\n'), ((6054, 6071), 'numpy.max', 'np.max', (['[kmax, k]'], {}), '([kmax, k])\n', (6060, 6071), True, 'import numpy as np\n'), ((6236, 6259), 'numpy.reshape', 'np.reshape', (['Sj', '(n1 * n2)'], {}), '(Sj, n1 * n2)\n', (6246, 6259), True, 'import numpy as np\n'), ((6287, 6312), 'numpy.reshape', 'np.reshape', (['mask', '(n1 * n2)'], {}), '(mask, n1 * n2)\n', (6297, 6312), True, 'import numpy as np\n'), ((7470, 7487), 'numpy.mean', 'np.mean', (['sigma[j]'], {}), '(sigma[j])\n', (7477, 7487), True, 'import numpy as np\n'), ((9717, 9751), 'numpy.multiply', 'np.multiply', (['th.T', 'NOISE_TAB[:lvl]'], {}), '(th.T, NOISE_TAB[:lvl])\n', (9728, 9751), True, 'import numpy as np\n'), ((11111, 11126), 'numpy.sum', 'np.sum', (['(x0 ** 2)'], {}), '(x0 ** 2)\n', (11117, 11126), True, 'import numpy as np\n'), ((11203, 11217), 'numpy.sum', 'np.sum', (['(x ** 2)'], {}), '(x ** 2)\n', (11209, 11217), True, 'import numpy as np\n'), ((11283, 11297), 'numpy.sum', 'np.sum', (['(y ** 2)'], {}), '(y ** 2)\n', (11289, 11297), True, 'import numpy as np\n'), ((11314, 11329), 'numpy.dot', 'np.dot', (['y.T', 'x0'], {}), '(y.T, x0)\n', (11320, 11329), True, 'import numpy as np\n'), ((12398, 12415), 'numpy.min', 'np.min', (['[n, npca]'], {}), '([n, npca])\n', (12404, 12415), True, 'import numpy as np\n'), ((12415, 12432), 'numpy.min', 'np.min', (['[n, npca]'], {}), '([n, npca])\n', (12421, 12432), True, 'import numpy as np\n'), ((12962, 12975), 'numpy.size', 'np.size', (['vals'], {}), '(vals)\n', (12969, 12975), True, 'import numpy as np\n'), ((13447, 13464), 'numpy.mean', 'np.mean', (['specs', '(0)'], {}), '(specs, 0)\n', (13454, 13464), True, 'import numpy as np\n'), ((13581, 13594), 'numpy.sum', 'np.sum', (['s1', '(0)'], {}), '(s1, 0)\n', (13587, 13594), True, 'import numpy as np\n'), ((13642, 13674), 'numpy.dot', 'np.dot', (['cube', 'spectras[count, :]'], {}), '(cube, spectras[count, :])\n', (13648, 13674), True, 'import numpy as np\n'), ((3644, 3656), 'numpy.sum', 'np.sum', (['A', '(0)'], {}), '(A, 0)\n', (3650, 3656), True, 'import numpy as np\n'), ((4035, 4045), 'numpy.copy', 'np.copy', (['x'], {}), '(x)\n', (4042, 4045), True, 'import numpy as np\n'), ((4107, 4161), 'scipy.signal.fftconvolve', 'scp.fftconvolve', (['x[i, :, :]', 'PSF[i, :, :]'], {'mode': '"""same"""'}), "(x[i, :, :], PSF[i, :, :], mode='same')\n", (4122, 4161), True, 'from scipy import signal as scp\n'), ((4222, 4232), 'numpy.copy', 'np.copy', (['x'], {}), '(x)\n', (4229, 4232), True, 'import numpy as np\n'), ((4294, 4349), 'scipy.signal.fftconvolve', 'scp.fftconvolve', (['x[i, :, :]', 'PSFT[i, :, :]'], {'mode': '"""same"""'}), "(x[i, :, :], PSFT[i, :, :], mode='same')\n", (4309, 4349), True, 'from scipy import signal as scp\n'), ((5643, 5661), 'numpy.dot', 'np.dot', (['AT', '(Y - AX)'], {}), '(AT, Y - AX)\n', (5649, 5661), True, 'import numpy as np\n'), ((5754, 5781), 'numpy.reshape', 'np.reshape', (['R', '(ns, n1, n2)'], {}), '(R, (ns, n1, n2))\n', (5764, 5781), True, 'import numpy as np\n'), ((6104, 6133), 'numpy.reshape', 'np.reshape', (['S[j, :]', '(n1, n2)'], {}), '(S[j, :], (n1, n2))\n', (6114, 6133), True, 'import numpy as np\n'), ((7527, 7539), 'numpy.max', 'np.max', (['wmax'], {}), '(wmax)\n', (7533, 7539), True, 'import numpy as np\n'), ((7540, 7552), 'numpy.min', 'np.min', (['wmax'], {}), '(wmax)\n', (7546, 7552), True, 'import numpy as np\n'), ((7847, 7865), 'numpy.abs', 'np.abs', (['w[l, :, :]'], {}), '(w[l, :, :])\n', (7853, 7865), True, 'import numpy as np\n'), ((10390, 10404), 'numpy.sign', 'np.sign', (['alpha'], {}), '(alpha)\n', (10397, 10404), True, 'import numpy as np\n'), ((12780, 12809), 'numpy.reshape', 'np.reshape', (['ims0', '(npca * npca)'], {}), '(ims0, npca * npca)\n', (12790, 12809), True, 'import numpy as np\n'), ((13182, 13206), 'numpy.int_', 'np.int_', (['(xs * (npca / n))'], {}), '(xs * (npca / n))\n', (13189, 13206), True, 'import numpy as np\n'), ((13204, 13228), 'numpy.int_', 'np.int_', (['(ys * (npca / n))'], {}), '(ys * (npca / n))\n', (13211, 13228), True, 'import numpy as np\n'), ((3523, 3538), 'numpy.sum', 'np.sum', (['Apca', '(0)'], {}), '(Apca, 0)\n', (3529, 3538), True, 'import numpy as np\n'), ((4440, 4466), 'numpy.sum', 'np.sum', (['(PSFT[i, :, :] ** 2)'], {}), '(PSFT[i, :, :] ** 2)\n', (4446, 4466), True, 'import numpy as np\n'), ((7377, 7398), 'numpy.abs', 'np.abs', (['w[j, l, :, :]'], {}), '(w[j, l, :, :])\n', (7383, 7398), True, 'import numpy as np\n'), ((13512, 13554), 'numpy.reshape', 'np.reshape', (['cubepca', '(npca ** 2, nband)', '(0)'], {}), '(cubepca, (npca ** 2, nband), 0)\n', (13522, 13554), True, 'import numpy as np\n'), ((10438, 10455), 'numpy.abs', 'np.abs', (['subweight'], {}), '(subweight)\n', (10444, 10455), True, 'import numpy as np\n'), ((10039, 10056), 'numpy.abs', 'np.abs', (['subweight'], {}), '(subweight)\n', (10045, 10056), True, 'import numpy as np\n'), ((10057, 10067), 'numpy.abs', 'np.abs', (['th'], {}), '(th)\n', (10063, 10067), True, 'import numpy as np\n'), ((10151, 10168), 'numpy.abs', 'np.abs', (['subweight'], {}), '(subweight)\n', (10157, 10168), True, 'import numpy as np\n'), ((10169, 10179), 'numpy.abs', 'np.abs', (['th'], {}), '(th)\n', (10175, 10179), True, 'import numpy as np\n'), ((10406, 10419), 'numpy.abs', 'np.abs', (['alpha'], {}), '(alpha)\n', (10412, 10419), True, 'import numpy as np\n'), ((10420, 10437), 'numpy.abs', 'np.abs', (['addweight'], {}), '(addweight)\n', (10426, 10437), True, 'import numpy as np\n'), ((5562, 5594), 'numpy.reshape', 'np.reshape', (['(Y - AX)', '(nb, n1, n2)'], {}), '(Y - AX, (nb, n1, n2))\n', (5572, 5594), True, 'import numpy as np\n'), ((10021, 10038), 'numpy.abs', 'np.abs', (['addweight'], {}), '(addweight)\n', (10027, 10038), True, 'import numpy as np\n'), ((10119, 10132), 'numpy.abs', 'np.abs', (['alpha'], {}), '(alpha)\n', (10125, 10132), True, 'import numpy as np\n'), ((10133, 10150), 'numpy.abs', 'np.abs', (['addweight'], {}), '(addweight)\n', (10139, 10150), True, 'import numpy as np\n')]
|
import os
import typing as T
import warnings
import fsspec # type: ignore
import numpy as np
import numpy.typing as NT
import pandas as pd # type: ignore
import rioxarray # type: ignore
import xarray as xr
from xarray_sentinel import conventions, esa_safe
def open_calibration_dataset(calibration: esa_safe.PathType) -> xr.Dataset:
calibration_vectors = esa_safe.parse_tag_list(
calibration, ".//calibrationVector", "calibration"
)
azimuth_time_list = []
pixel_list = []
line_list = []
sigmaNought_list = []
betaNought_list = []
gamma_list = []
dn_list = []
for vector in calibration_vectors:
azimuth_time_list.append(vector["azimuthTime"])
line_list.append(vector["line"])
pixel = np.fromstring(vector["pixel"]["$"], dtype=int, sep=" ") # type: ignore
pixel_list.append(pixel)
sigmaNought = np.fromstring(vector["sigmaNought"]["$"], dtype=float, sep=" ") # type: ignore
sigmaNought_list.append(sigmaNought)
betaNought = np.fromstring(vector["betaNought"]["$"], dtype=float, sep=" ") # type: ignore
betaNought_list.append(betaNought)
gamma = np.fromstring(vector["gamma"]["$"], dtype=float, sep=" ") # type: ignore
gamma_list.append(gamma)
dn = np.fromstring(vector["dn"]["$"], dtype=float, sep=" ") # type: ignore
dn_list.append(dn)
pixel = np.array(pixel_list)
if not np.allclose(pixel, pixel[0]):
raise ValueError(
"Unable to organise calibration vectors in a regular line-pixel grid"
)
data_vars = {
"azimuth_time": ("line", [np.datetime64(dt) for dt in azimuth_time_list]),
"sigmaNought": (("line", "pixel"), sigmaNought_list),
"betaNought": (("line", "pixel"), betaNought_list),
"gamma": (("line", "pixel"), gamma_list),
"dn": (("line", "pixel"), dn_list),
}
coords = {"line": line_list, "pixel": pixel_list[0]}
return xr.Dataset(data_vars=data_vars, coords=coords)
def open_coordinateConversion_dataset(annotation_path: esa_safe.PathType) -> xr.Dataset:
coordinate_conversion = esa_safe.parse_tag(
annotation_path, ".//coordinateConversionList"
)
gr0 = []
sr0 = []
azimuth_time = []
slant_range_time = []
srgrCoefficients: T.List[NT.NDArray[np.float_]] = []
grsrCoefficients: T.List[NT.NDArray[np.float_]] = []
for values in coordinate_conversion["coordinateConversion"]:
sr0.append(values["sr0"])
gr0.append(values["gr0"])
azimuth_time.append(values["azimuthTime"])
slant_range_time.append(values["slantRangeTime"])
srgrCoefficients.append(
np.fromstring(values["srgrCoefficients"]["$"], dtype=float, sep=" ")
)
grsrCoefficients.append(
np.fromstring(values["grsrCoefficients"]["$"], dtype=float, sep=" ")
)
coords = {
"azimuth_time": [np.datetime64(dt) for dt in azimuth_time],
"degree": list(range(len(srgrCoefficients[0]))),
}
data_vars = {
"gr0": ("azimuth_time", gr0),
"sr0": ("azimuth_time", sr0),
"slant_range_time": ("azimuth_time", slant_range_time),
"srgr_coefficients": (("azimuth_time", "degree"), srgrCoefficients),
"grsr_coefficients": (("azimuth_time", "degree"), grsrCoefficients),
}
return xr.Dataset(data_vars=data_vars, coords=coords)
def get_fs_path(
urlpath_or_path: esa_safe.PathType, fs: T.Optional[fsspec.AbstractFileSystem] = None
) -> T.Tuple[fsspec.AbstractFileSystem, str]:
if fs is None:
fs, _, paths = fsspec.get_fs_token_paths(urlpath_or_path)
if len(paths) == 0:
raise ValueError(f"file or object not found {urlpath_or_path!r}")
elif len(paths) > 1:
raise ValueError(f"multiple files or objects found {urlpath_or_path!r}")
path = paths[0]
else:
path = urlpath_or_path
return fs, path
def open_gcp_dataset(annotation: esa_safe.PathOrFileType) -> xr.Dataset:
geolocation_grid_points = esa_safe.parse_tag_list(
annotation, ".//geolocationGridPoint"
)
azimuth_time = []
slant_range_time = []
line_set = set()
pixel_set = set()
for ggp in geolocation_grid_points:
if ggp["line"] not in line_set:
azimuth_time.append(np.datetime64(ggp["azimuthTime"]))
line_set.add(ggp["line"])
if ggp["pixel"] not in pixel_set:
slant_range_time.append(ggp["slantRangeTime"])
pixel_set.add(ggp["pixel"])
shape = (len(azimuth_time), len(slant_range_time))
dims = ("azimuth_time", "slant_range_time")
data_vars = {
"latitude": (dims, np.full(shape, np.nan)),
"longitude": (dims, np.full(shape, np.nan)),
"height": (dims, np.full(shape, np.nan)),
"incidenceAngle": (dims, np.full(shape, np.nan)),
"elevationAngle": (dims, np.full(shape, np.nan)),
}
line = sorted(line_set)
pixel = sorted(pixel_set)
for ggp in geolocation_grid_points:
for var in data_vars:
j = line.index(ggp["line"])
i = pixel.index(ggp["pixel"])
data_vars[var][1][j, i] = ggp[var]
ds = xr.Dataset(
data_vars=data_vars,
coords={
"azimuth_time": [np.datetime64(dt) for dt in azimuth_time],
"slant_range_time": slant_range_time,
"line": ("azimuth_time", line),
"pixel": ("slant_range_time", pixel),
},
)
return ds
def open_attitude_dataset(annotation: esa_safe.PathOrFileType) -> xr.Dataset:
attitudes = esa_safe.parse_tag_list(annotation, ".//attitude")
variables = ["q0", "q1", "q2", "q3", "wx", "wy", "wz", "pitch", "roll", "yaw"]
azimuth_time: T.List[T.Any] = []
data_vars: T.Dict[str, T.Any] = {var: ("azimuth_time", []) for var in variables}
for attitude in attitudes:
azimuth_time.append(attitude["time"])
for var in variables:
data_vars[var][1].append(attitude[var])
ds = xr.Dataset(
data_vars=data_vars,
coords={"azimuth_time": [np.datetime64(dt) for dt in azimuth_time]},
)
return ds
def open_orbit_dataset(annotation: esa_safe.PathOrFileType) -> xr.Dataset:
orbits = esa_safe.parse_tag_list(annotation, ".//orbit")
reference_system = orbits[0]["frame"]
variables = ["position", "velocity"]
data: T.Dict[str, T.List[T.Any]] = {var: [[], [], []] for var in variables}
azimuth_time: T.List[T.Any] = []
for orbit in orbits:
azimuth_time.append(orbit["time"])
data["position"][0].append(orbit["position"]["x"])
data["position"][1].append(orbit["position"]["y"])
data["position"][2].append(orbit["position"]["z"])
data["velocity"][0].append(orbit["velocity"]["x"])
data["velocity"][1].append(orbit["velocity"]["y"])
data["velocity"][2].append(orbit["velocity"]["z"])
if orbit["frame"] != reference_system:
warnings.warn(
"reference_system is not consistent in all the state vectors. "
)
reference_system = None
position = xr.Variable(data=data["position"], dims=("axis", "azimuth_time")) # type: ignore
velocity = xr.Variable(data=data["velocity"], dims=("axis", "azimuth_time")) # type: ignore
attrs = {}
if reference_system is not None:
attrs.update({"reference_system": reference_system})
ds = xr.Dataset(
data_vars={"position": position, "velocity": velocity},
attrs=attrs,
coords={
"azimuth_time": [np.datetime64(dt) for dt in azimuth_time],
"axis": [0, 1, 2],
},
)
return ds
def open_dc_estimate_dataset(annotation: esa_safe.PathOrFileType) -> xr.Dataset:
dc_estimates = esa_safe.parse_dc_estimate(annotation)
azimuth_time = []
t0 = []
data_dc_poly = []
for dc_estimate in dc_estimates:
azimuth_time.append(dc_estimate["azimuthTime"])
t0.append(dc_estimate["t0"])
data_dc_poly.append(dc_estimate["dataDcPolynomial"])
ds = xr.Dataset(
data_vars={
"t0": ("azimuth_time", t0),
"data_dc_polynomial": (("azimuth_time", "degree"), data_dc_poly),
},
coords={
"azimuth_time": [np.datetime64(at) for at in azimuth_time],
"degree": list(range(len(data_dc_poly[0]))),
},
)
return ds
def open_azimuth_fm_rate_dataset(annotation: esa_safe.PathOrFileType) -> xr.Dataset:
azimuth_fm_rates = esa_safe.parse_azimuth_fm_rate(annotation)
azimuth_time = []
t0 = []
azimuth_fm_rate_poly = []
for azimuth_fm_rate in azimuth_fm_rates:
azimuth_time.append(azimuth_fm_rate["azimuthTime"])
t0.append(azimuth_fm_rate["t0"])
azimuth_fm_rate_poly.append(azimuth_fm_rate["azimuthFmRatePolynomial"])
ds = xr.Dataset(
data_vars={
"t0": ("azimuth_time", t0),
"azimuth_fm_rate_polynomial": (
("azimuth_time", "degree"),
azimuth_fm_rate_poly,
),
},
coords={
"azimuth_time": [np.datetime64(at) for at in azimuth_time],
"degree": list(range(len(azimuth_fm_rate_poly[0]))),
},
)
return ds
def find_avalable_groups(
ancillary_data_paths: T.Dict[str, T.Dict[str, T.Dict[str, str]]],
product_attrs: T.Dict[str, T.Any],
fs: fsspec.AbstractFileSystem = fsspec.filesystem("file"),
) -> T.Dict[str, str]:
groups: T.Dict[str, str] = {}
for subswath_id, subswath_data_path in ancillary_data_paths.items():
for pol_id, pol_data_paths in subswath_data_path.items():
try:
with fs.open(pol_data_paths["s1Level1ProductSchema"]):
pass
except FileNotFoundError:
continue
groups[subswath_id] = ""
groups[f"{subswath_id}/{pol_id}"] = pol_data_paths["s1Level1ProductSchema"]
for metadata_group in [
"gcp",
"orbit",
"attitude",
"dc_estimate",
"azimuth_fm_rate",
]:
groups[f"{subswath_id}/{pol_id}/{metadata_group}"] = pol_data_paths[
"s1Level1ProductSchema"
]
try:
with fs.open(pol_data_paths["s1Level1CalibrationSchema"]):
pass
except FileNotFoundError:
continue
groups[f"{subswath_id}/{pol_id}/calibration"] = pol_data_paths[
"s1Level1CalibrationSchema"
]
return groups
def open_pol_dataset(
measurement: esa_safe.PathType,
annotation: esa_safe.PathType,
chunks: T.Optional[T.Union[int, T.Dict[str, int]]] = None,
) -> xr.Dataset:
image_information = esa_safe.parse_tag(annotation, ".//imageInformation")
product_information = esa_safe.parse_tag(annotation, ".//productInformation")
swath_timing = esa_safe.parse_tag(annotation, ".//swathTiming")
number_of_samples = image_information["numberOfSamples"]
first_slant_range_time = image_information["slantRangeTime"]
slant_range_sampling = 1 / product_information["rangeSamplingRate"]
slant_range_time = np.linspace(
first_slant_range_time,
first_slant_range_time + slant_range_sampling * (number_of_samples - 1),
number_of_samples,
)
number_of_lines = image_information["numberOfLines"]
first_azimuth_time = image_information["productFirstLineUtcTime"]
azimuth_time_interval = image_information["azimuthTimeInterval"]
azimuth_time = pd.date_range(
start=first_azimuth_time,
periods=number_of_lines,
freq=pd.to_timedelta(azimuth_time_interval, "s"),
).values
attrs = {
"azimuth_steering_rate": product_information["azimuthSteeringRate"],
"sar:center_frequency": product_information["radarFrequency"] / 10 ** 9,
}
number_of_bursts = swath_timing["burstList"]["@count"]
if number_of_bursts:
lines_per_burst = swath_timing["linesPerBurst"]
attrs.update(
{
"number_of_bursts": number_of_bursts,
"lines_per_burst": lines_per_burst,
}
)
for burst_index, burst in enumerate(swath_timing["burstList"]["burst"]):
first_azimuth_time_burst = burst["azimuthTime"]
azimuth_time_burst = pd.date_range(
start=first_azimuth_time_burst,
periods=lines_per_burst,
freq=pd.to_timedelta(azimuth_time_interval, "s"),
)
azimuth_time[
lines_per_burst * burst_index : lines_per_burst * (burst_index + 1)
] = azimuth_time_burst
if chunks is None:
chunks = {"y": lines_per_burst}
arr = rioxarray.open_rasterio(measurement, chunks=chunks)
arr = arr.squeeze("band").drop_vars(["band", "spatial_ref"])
arr = arr.rename({"y": "line", "x": "pixel"})
arr = arr.assign_coords(
{
"pixel": np.arange(0, arr["pixel"].size, dtype=int),
"line": np.arange(0, arr["line"].size, dtype=int),
"slant_range_time": ("pixel", slant_range_time),
"azimuth_time": ("line", azimuth_time),
}
)
if number_of_bursts == 0:
arr = arr.swap_dims({"line": "azimuth_time", "pixel": "slant_range_time"})
return xr.Dataset(attrs=attrs, data_vars={"measurement": arr})
def crop_burst_dataset(pol_dataset: xr.Dataset, burst_index: int) -> xr.Dataset:
if burst_index < 0 or burst_index >= pol_dataset.attrs["number_of_bursts"]:
raise IndexError(f"{burst_index=} out of bounds")
lines_per_burst = pol_dataset.attrs["lines_per_burst"]
ds = pol_dataset.sel(
line=slice(
lines_per_burst * burst_index, lines_per_burst * (burst_index + 1) - 1
)
)
ds = ds.swap_dims({"line": "azimuth_time", "pixel": "slant_range_time"})
ds.attrs["burst_index"] = burst_index
return ds
def build_burst_id(lat: float, lon: float, relative_orbit: int) -> str:
lat = int(round(lat * 10))
lon = int(round(lon * 10))
n_or_s = "N" if lat >= 0 else "S"
e_or_w = "E" if lon >= 0 else "W"
burst_id = f"R{relative_orbit:03}" f"-{n_or_s}{lat:03}" f"-{e_or_w}{lon:04}"
return burst_id
def compute_burst_centres(
gcp: xr.Dataset,
) -> T.Tuple[NT.NDArray[np.float_], NT.NDArray[np.float_]]:
gcp_rolling = gcp.rolling(azimuth_time=2, min_periods=1)
gc_az_win = gcp_rolling.construct(azimuth_time="az_win")
centre = gc_az_win.mean(["az_win", "slant_range_time"])
centre = centre.isel(azimuth_time=slice(1, None))
return centre.latitude.values, centre.longitude.values
def normalise_group(group: T.Optional[str]) -> T.Tuple[str, T.Optional[int]]:
if group is None:
group = ""
if group.startswith("/"):
group = group[1:]
burst_index = None
parent_group, _, last_name = group.rpartition("/")
if parent_group.count("/") == 1 and last_name.isnumeric():
burst_index = int(last_name)
group = parent_group
return group, burst_index
def open_dataset(
product_urlpath: esa_safe.PathType,
*,
drop_variables: T.Optional[T.Tuple[str]] = None,
group: T.Optional[str] = None,
chunks: T.Optional[T.Union[int, T.Dict[str, int]]] = None,
fs: T.Optional[fsspec.AbstractFileSystem] = None,
) -> xr.Dataset:
group, burst_index = normalise_group(group)
absgroup = f"/{group}"
fs, manifest_path = get_fs_path(product_urlpath, fs)
if fs.isdir(manifest_path):
manifest_path = os.path.join(manifest_path, "manifest.safe")
base_path = os.path.dirname(manifest_path)
with fs.open(manifest_path) as file:
product_attrs, product_files = esa_safe.parse_manifest_sentinel1(file)
ancillary_data_paths = esa_safe.get_ancillary_data_paths(base_path, product_files)
if drop_variables is not None:
warnings.warn("'drop_variables' is currently ignored")
groups = find_avalable_groups(ancillary_data_paths, product_attrs, fs=fs)
if group != "" and group not in groups:
raise ValueError(
f"Invalid group {group!r}, please select one of the following groups:"
f"\n{list(groups.keys())}"
)
metadata = ""
if group == "":
ds = xr.Dataset()
subgroups = list(groups)
else:
subgroups = [
g[len(group) + 1 :] for g in groups if g.startswith(group) and g != group
]
if "/" not in group:
ds = xr.Dataset()
elif group.count("/") == 1:
subswath, pol = group.split("/", 1)
ds = open_pol_dataset(
ancillary_data_paths[subswath][pol]["s1Level1MeasurementSchema"],
ancillary_data_paths[subswath][pol]["s1Level1ProductSchema"],
chunks=chunks,
)
if burst_index is not None:
ds = crop_burst_dataset(ds, burst_index=burst_index)
else:
subswath, pol, metadata = group.split("/", 2)
with fs.open(groups[group]) as file:
ds = METADATA_OPENERS[metadata](file)
product_attrs["group"] = absgroup
if len(subgroups):
product_attrs["subgroups"] = subgroups
ds.attrs.update(product_attrs) # type: ignore
conventions.update_attributes(ds, group=metadata)
return ds
class Sentinel1Backend(xr.backends.common.BackendEntrypoint):
def open_dataset( # type: ignore
self,
filename_or_obj: str,
drop_variables: T.Optional[T.Tuple[str]] = None,
group: T.Optional[str] = None,
) -> xr.Dataset:
return open_dataset(filename_or_obj, drop_variables=drop_variables, group=group)
def guess_can_open(self, filename_or_obj: T.Any) -> bool:
try:
_, ext = os.path.splitext(filename_or_obj)
except TypeError:
return False
return ext.lower() in {".safe", ".safe/"}
METADATA_OPENERS = {
"gcp": open_gcp_dataset,
"orbit": open_orbit_dataset,
"attitude": open_attitude_dataset,
"dc_estimate": open_dc_estimate_dataset,
"azimuth_fm_rate": open_azimuth_fm_rate_dataset,
"calibration": open_calibration_dataset,
}
|
[
"numpy.allclose",
"xarray.Variable",
"numpy.arange",
"os.path.join",
"numpy.full",
"rioxarray.open_rasterio",
"fsspec.get_fs_token_paths",
"os.path.dirname",
"numpy.linspace",
"numpy.fromstring",
"xarray_sentinel.esa_safe.get_ancillary_data_paths",
"xarray_sentinel.conventions.update_attributes",
"xarray_sentinel.esa_safe.parse_azimuth_fm_rate",
"xarray.Dataset",
"xarray_sentinel.esa_safe.parse_tag",
"pandas.to_timedelta",
"xarray_sentinel.esa_safe.parse_manifest_sentinel1",
"numpy.datetime64",
"xarray_sentinel.esa_safe.parse_dc_estimate",
"numpy.array",
"os.path.splitext",
"xarray_sentinel.esa_safe.parse_tag_list",
"warnings.warn",
"fsspec.filesystem"
] |
[((365, 440), 'xarray_sentinel.esa_safe.parse_tag_list', 'esa_safe.parse_tag_list', (['calibration', '""".//calibrationVector"""', '"""calibration"""'], {}), "(calibration, './/calibrationVector', 'calibration')\n", (388, 440), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((1404, 1424), 'numpy.array', 'np.array', (['pixel_list'], {}), '(pixel_list)\n', (1412, 1424), True, 'import numpy as np\n'), ((1976, 2022), 'xarray.Dataset', 'xr.Dataset', ([], {'data_vars': 'data_vars', 'coords': 'coords'}), '(data_vars=data_vars, coords=coords)\n', (1986, 2022), True, 'import xarray as xr\n'), ((2142, 2208), 'xarray_sentinel.esa_safe.parse_tag', 'esa_safe.parse_tag', (['annotation_path', '""".//coordinateConversionList"""'], {}), "(annotation_path, './/coordinateConversionList')\n", (2160, 2208), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((3379, 3425), 'xarray.Dataset', 'xr.Dataset', ([], {'data_vars': 'data_vars', 'coords': 'coords'}), '(data_vars=data_vars, coords=coords)\n', (3389, 3425), True, 'import xarray as xr\n'), ((4075, 4137), 'xarray_sentinel.esa_safe.parse_tag_list', 'esa_safe.parse_tag_list', (['annotation', '""".//geolocationGridPoint"""'], {}), "(annotation, './/geolocationGridPoint')\n", (4098, 4137), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((5636, 5686), 'xarray_sentinel.esa_safe.parse_tag_list', 'esa_safe.parse_tag_list', (['annotation', '""".//attitude"""'], {}), "(annotation, './/attitude')\n", (5659, 5686), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((6291, 6338), 'xarray_sentinel.esa_safe.parse_tag_list', 'esa_safe.parse_tag_list', (['annotation', '""".//orbit"""'], {}), "(annotation, './/orbit')\n", (6314, 6338), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((7182, 7247), 'xarray.Variable', 'xr.Variable', ([], {'data': "data['position']", 'dims': "('axis', 'azimuth_time')"}), "(data=data['position'], dims=('axis', 'azimuth_time'))\n", (7193, 7247), True, 'import xarray as xr\n'), ((7279, 7344), 'xarray.Variable', 'xr.Variable', ([], {'data': "data['velocity']", 'dims': "('axis', 'azimuth_time')"}), "(data=data['velocity'], dims=('axis', 'azimuth_time'))\n", (7290, 7344), True, 'import xarray as xr\n'), ((7836, 7874), 'xarray_sentinel.esa_safe.parse_dc_estimate', 'esa_safe.parse_dc_estimate', (['annotation'], {}), '(annotation)\n', (7862, 7874), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((8581, 8623), 'xarray_sentinel.esa_safe.parse_azimuth_fm_rate', 'esa_safe.parse_azimuth_fm_rate', (['annotation'], {}), '(annotation)\n', (8611, 8623), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((9507, 9532), 'fsspec.filesystem', 'fsspec.filesystem', (['"""file"""'], {}), "('file')\n", (9524, 9532), False, 'import fsspec\n'), ((10904, 10957), 'xarray_sentinel.esa_safe.parse_tag', 'esa_safe.parse_tag', (['annotation', '""".//imageInformation"""'], {}), "(annotation, './/imageInformation')\n", (10922, 10957), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((10984, 11039), 'xarray_sentinel.esa_safe.parse_tag', 'esa_safe.parse_tag', (['annotation', '""".//productInformation"""'], {}), "(annotation, './/productInformation')\n", (11002, 11039), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((11059, 11107), 'xarray_sentinel.esa_safe.parse_tag', 'esa_safe.parse_tag', (['annotation', '""".//swathTiming"""'], {}), "(annotation, './/swathTiming')\n", (11077, 11107), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((11330, 11462), 'numpy.linspace', 'np.linspace', (['first_slant_range_time', '(first_slant_range_time + slant_range_sampling * (number_of_samples - 1))', 'number_of_samples'], {}), '(first_slant_range_time, first_slant_range_time + \n slant_range_sampling * (number_of_samples - 1), number_of_samples)\n', (11341, 11462), True, 'import numpy as np\n'), ((12928, 12979), 'rioxarray.open_rasterio', 'rioxarray.open_rasterio', (['measurement'], {'chunks': 'chunks'}), '(measurement, chunks=chunks)\n', (12951, 12979), False, 'import rioxarray\n'), ((13517, 13572), 'xarray.Dataset', 'xr.Dataset', ([], {'attrs': 'attrs', 'data_vars': "{'measurement': arr}"}), "(attrs=attrs, data_vars={'measurement': arr})\n", (13527, 13572), True, 'import xarray as xr\n'), ((15807, 15837), 'os.path.dirname', 'os.path.dirname', (['manifest_path'], {}), '(manifest_path)\n', (15822, 15837), False, 'import os\n'), ((15987, 16046), 'xarray_sentinel.esa_safe.get_ancillary_data_paths', 'esa_safe.get_ancillary_data_paths', (['base_path', 'product_files'], {}), '(base_path, product_files)\n', (16020, 16046), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((17486, 17535), 'xarray_sentinel.conventions.update_attributes', 'conventions.update_attributes', (['ds'], {'group': 'metadata'}), '(ds, group=metadata)\n', (17515, 17535), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((762, 817), 'numpy.fromstring', 'np.fromstring', (["vector['pixel']['$']"], {'dtype': 'int', 'sep': '""" """'}), "(vector['pixel']['$'], dtype=int, sep=' ')\n", (775, 817), True, 'import numpy as np\n'), ((889, 952), 'numpy.fromstring', 'np.fromstring', (["vector['sigmaNought']['$']"], {'dtype': 'float', 'sep': '""" """'}), "(vector['sigmaNought']['$'], dtype=float, sep=' ')\n", (902, 952), True, 'import numpy as np\n'), ((1035, 1097), 'numpy.fromstring', 'np.fromstring', (["vector['betaNought']['$']"], {'dtype': 'float', 'sep': '""" """'}), "(vector['betaNought']['$'], dtype=float, sep=' ')\n", (1048, 1097), True, 'import numpy as np\n'), ((1173, 1230), 'numpy.fromstring', 'np.fromstring', (["vector['gamma']['$']"], {'dtype': 'float', 'sep': '""" """'}), "(vector['gamma']['$'], dtype=float, sep=' ')\n", (1186, 1230), True, 'import numpy as np\n'), ((1293, 1347), 'numpy.fromstring', 'np.fromstring', (["vector['dn']['$']"], {'dtype': 'float', 'sep': '""" """'}), "(vector['dn']['$'], dtype=float, sep=' ')\n", (1306, 1347), True, 'import numpy as np\n'), ((1436, 1464), 'numpy.allclose', 'np.allclose', (['pixel', 'pixel[0]'], {}), '(pixel, pixel[0])\n', (1447, 1464), True, 'import numpy as np\n'), ((3622, 3664), 'fsspec.get_fs_token_paths', 'fsspec.get_fs_token_paths', (['urlpath_or_path'], {}), '(urlpath_or_path)\n', (3647, 3664), False, 'import fsspec\n'), ((15745, 15789), 'os.path.join', 'os.path.join', (['manifest_path', '"""manifest.safe"""'], {}), "(manifest_path, 'manifest.safe')\n", (15757, 15789), False, 'import os\n'), ((15919, 15958), 'xarray_sentinel.esa_safe.parse_manifest_sentinel1', 'esa_safe.parse_manifest_sentinel1', (['file'], {}), '(file)\n', (15952, 15958), False, 'from xarray_sentinel import conventions, esa_safe\n'), ((16090, 16144), 'warnings.warn', 'warnings.warn', (['"""\'drop_variables\' is currently ignored"""'], {}), '("\'drop_variables\' is currently ignored")\n', (16103, 16144), False, 'import warnings\n'), ((16480, 16492), 'xarray.Dataset', 'xr.Dataset', ([], {}), '()\n', (16490, 16492), True, 'import xarray as xr\n'), ((2699, 2767), 'numpy.fromstring', 'np.fromstring', (["values['srgrCoefficients']['$']"], {'dtype': 'float', 'sep': '""" """'}), "(values['srgrCoefficients']['$'], dtype=float, sep=' ')\n", (2712, 2767), True, 'import numpy as np\n'), ((2823, 2891), 'numpy.fromstring', 'np.fromstring', (["values['grsrCoefficients']['$']"], {'dtype': 'float', 'sep': '""" """'}), "(values['grsrCoefficients']['$'], dtype=float, sep=' ')\n", (2836, 2891), True, 'import numpy as np\n'), ((2943, 2960), 'numpy.datetime64', 'np.datetime64', (['dt'], {}), '(dt)\n', (2956, 2960), True, 'import numpy as np\n'), ((4718, 4740), 'numpy.full', 'np.full', (['shape', 'np.nan'], {}), '(shape, np.nan)\n', (4725, 4740), True, 'import numpy as np\n'), ((4771, 4793), 'numpy.full', 'np.full', (['shape', 'np.nan'], {}), '(shape, np.nan)\n', (4778, 4793), True, 'import numpy as np\n'), ((4821, 4843), 'numpy.full', 'np.full', (['shape', 'np.nan'], {}), '(shape, np.nan)\n', (4828, 4843), True, 'import numpy as np\n'), ((4879, 4901), 'numpy.full', 'np.full', (['shape', 'np.nan'], {}), '(shape, np.nan)\n', (4886, 4901), True, 'import numpy as np\n'), ((4937, 4959), 'numpy.full', 'np.full', (['shape', 'np.nan'], {}), '(shape, np.nan)\n', (4944, 4959), True, 'import numpy as np\n'), ((7021, 7099), 'warnings.warn', 'warnings.warn', (['"""reference_system is not consistent in all the state vectors. """'], {}), "('reference_system is not consistent in all the state vectors. ')\n", (7034, 7099), False, 'import warnings\n'), ((13155, 13197), 'numpy.arange', 'np.arange', (['(0)', "arr['pixel'].size"], {'dtype': 'int'}), "(0, arr['pixel'].size, dtype=int)\n", (13164, 13197), True, 'import numpy as np\n'), ((13219, 13260), 'numpy.arange', 'np.arange', (['(0)', "arr['line'].size"], {'dtype': 'int'}), "(0, arr['line'].size, dtype=int)\n", (13228, 13260), True, 'import numpy as np\n'), ((16701, 16713), 'xarray.Dataset', 'xr.Dataset', ([], {}), '()\n', (16711, 16713), True, 'import xarray as xr\n'), ((18001, 18034), 'os.path.splitext', 'os.path.splitext', (['filename_or_obj'], {}), '(filename_or_obj)\n', (18017, 18034), False, 'import os\n'), ((1636, 1653), 'numpy.datetime64', 'np.datetime64', (['dt'], {}), '(dt)\n', (1649, 1653), True, 'import numpy as np\n'), ((4356, 4389), 'numpy.datetime64', 'np.datetime64', (["ggp['azimuthTime']"], {}), "(ggp['azimuthTime'])\n", (4369, 4389), True, 'import numpy as np\n'), ((11800, 11843), 'pandas.to_timedelta', 'pd.to_timedelta', (['azimuth_time_interval', '"""s"""'], {}), "(azimuth_time_interval, 's')\n", (11815, 11843), True, 'import pandas as pd\n'), ((5322, 5339), 'numpy.datetime64', 'np.datetime64', (['dt'], {}), '(dt)\n', (5335, 5339), True, 'import numpy as np\n'), ((6136, 6153), 'numpy.datetime64', 'np.datetime64', (['dt'], {}), '(dt)\n', (6149, 6153), True, 'import numpy as np\n'), ((7628, 7645), 'numpy.datetime64', 'np.datetime64', (['dt'], {}), '(dt)\n', (7641, 7645), True, 'import numpy as np\n'), ((8340, 8357), 'numpy.datetime64', 'np.datetime64', (['at'], {}), '(at)\n', (8353, 8357), True, 'import numpy as np\n'), ((9195, 9212), 'numpy.datetime64', 'np.datetime64', (['at'], {}), '(at)\n', (9208, 9212), True, 'import numpy as np\n'), ((12642, 12685), 'pandas.to_timedelta', 'pd.to_timedelta', (['azimuth_time_interval', '"""s"""'], {}), "(azimuth_time_interval, 's')\n", (12657, 12685), True, 'import pandas as pd\n')]
|
"""
Authors: <NAME>, <NAME>
Helper functions:
1. Overall score between, explainability and performance with normalization between 0-1 (logaritmic_power, sigmoid_power).
2. An explainability minimization (smaller is better) with additive constrains according the number of leaves and the error for the optimization.
3. Accuracy score.
"""
from sklearn.metrics import accuracy_score
from sklearn.tree import _tree
import numpy as np
import math
def get_score(y_true, y_pred):
return accuracy_score(y_true, y_pred)
def logaritmic_power(x, y):
'''
Parameters:
----------
input:
x: performance (scalar)
y: explainability (scalar)
output:
factor: Normalized overall score
----------
'''
z = 1-x
l = np.log2(y ** z)
factor = x ** l
return factor
def sigmoid_power(x, y):
'''
Parameters:
----------
input:
x: performance (scalar)
y: explainability (scalar)
output:
factor: Normalized overall score
----------
'''
sigmoid = 1/(1 + math.exp(-y))
factor = x ** sigmoid
return factor
def explainability_metric(clf, x):
'''
Parameters:
----------
input:
x: performance
clf: object of decision tree
output:
minimize: explainable (scalar)
----------
'''
size_leaf = clf.tree_.n_leaves
size_node = len([z for z in clf.tree_.feature if z != _tree.TREE_UNDEFINED])
_lambda = 1
error = (1.0 - x)
minimize = error + _lambda * size_node
return minimize
|
[
"numpy.log2",
"sklearn.metrics.accuracy_score",
"math.exp"
] |
[((500, 530), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (514, 530), False, 'from sklearn.metrics import accuracy_score\n'), ((796, 811), 'numpy.log2', 'np.log2', (['(y ** z)'], {}), '(y ** z)\n', (803, 811), True, 'import numpy as np\n'), ((1104, 1116), 'math.exp', 'math.exp', (['(-y)'], {}), '(-y)\n', (1112, 1116), False, 'import math\n')]
|
# Copyright 2016, AT&T
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from mock import patch
import datetime
import wsme
from gluon.api import baseObject
from gluon.tests.api import base
class APIBaseTestCase(base.APITestCase):
def setUp(self):
super(APIBaseTestCase, self).setUp()
pass
"""
test get_fields
"""
# each APIBase object should always has two fields:
# created_at and updated_at
def test_get_fields(self):
api_base = baseObject.APIBase()
fields = api_base.get_fields()
self.assertIn("created_at", fields)
self.assertIn("updated_at", fields)
"""
test as_dict
"""
# each APIBase object should always has two fields:
# created_at and updated_at
def test_as_dict(self):
api_base = baseObject.APIBase()
# set the created_at and updated_at fields
now = datetime.datetime.now()
api_base.created_at = now
api_base.updated_at = now
fields_dict = api_base.as_dict()
self.assertIn("created_at", fields_dict)
self.assertIn("updated_at", fields_dict)
self.assertEqual(fields_dict["created_at"], now)
self.assertEqual(fields_dict["updated_at"], now)
"""
test unset_fields_except
"""
# unset created_at and keep updated_at
def test_unset_fields_except(self):
api_base = baseObject.APIBase()
# set the created_at and updated_at fields
today = datetime.datetime.today()
api_base.created_at = today
api_base.updated_at = today
except_list = ["updated_at"]
api_base.unset_fields_except(except_list)
self.assertEqual(api_base.created_at, wsme.Unset)
self.assertEqual(api_base.updated_at, today)
class APIBaseObjectTestCase(base.APITestCase):
def setUp(self):
super(APIBaseObjectTestCase, self).setUp()
pass
"""
test class_builder
"""
# new_class should have __name__ and db_model attribute
# new_class object has attribute attr_foo which only accept str value
@patch('gluon.api.baseObject.dbapi.get_instance')
def test_class_builder(self, mock_get_instance):
new_class_name = 'new_class'
_db_model = mock.Mock()
attributes = {"attr_foo": str}
new_class = baseObject.APIBaseObject.class_builder(
new_class_name, _db_model, attributes)
self.assertEqual(new_class.db_model, _db_model)
self.assertEqual(new_class.__name__, new_class_name)
mock_get_instance.assert_called_once()
# create new_class object and assign value to it
new_obj = new_class()
try:
# should throw wsme.exc.InvalidInput since attr_foo is str type
new_obj.attr_foo = 123
except wsme.exc.InvalidInput as e:
self.assertIsNotNone(e)
"""
test build
"""
# create new_class which has attribute "attr_foo"
# create mock_db_obj has attribute "attr_foo" with value "123"
# build() should return object whose attr_foo attribute is "123"
def test_build(self):
new_class_name = 'new_class'
_db_model = mock.Mock()
attributes = {"attr_foo": str}
new_class = baseObject.APIBaseObject.class_builder(
new_class_name, _db_model, attributes)
mock_db_obj = mock.Mock()
mock_db_obj.as_dict.return_value = {"attr_foo": "123"}
new_obj = new_class.build(mock_db_obj)
observed = new_obj.attr_foo
expected = "123"
self.assertEqual(observed, expected)
"""
test get_from_db
"""
# mock db and mock db_obj
# create mock_db_obj has attribute "attr_foo" with value "123"
@patch('gluon.api.baseObject.dbapi.get_instance')
def test_get_from_db(self, mock_get_instance):
# mock_db_obj has as_dict function that returns a dict
mock_db_obj = mock.Mock()
mock_db_obj.as_dict.return_value = {"attr_foo": "123"}
# mock_db has get_by_primary_key function that returns mock_db_obj
mock_db = mock.Mock()
mock_db.get_by_primary_key.return_value = mock_db_obj
# set the dbapi.get_instance to return mock_db
mock_get_instance.return_value = mock_db
# create new_class with db field pointing to mock_db
new_class_name = 'new_class'
_db_model = mock.Mock()
attributes = {"attr_foo": str}
new_class = baseObject.APIBaseObject.class_builder(
new_class_name, _db_model, attributes)
new_obj = new_class.get_from_db("any_key")
observed = new_obj.attr_foo
expected = "123"
self.assertEqual(observed, expected)
"""
test create_in_db
"""
# mock db and mock db_obj
# create mock_db_obj has attribute "attr_foo" with value "123"
@patch('gluon.api.baseObject.dbapi.get_instance')
def test_create_in_db(self, mock_get_instance):
# mock_db_obj has as_dict function that returns a dict
mock_db_obj = mock.Mock()
mock_db_obj.as_dict.return_value = {"attr_foo": "123"}
# mock_db has create function that returns mock_db_obj
mock_db = mock.Mock()
mock_db.create.return_value = mock_db_obj
# set the dbapi.get_instance to return mock_db
mock_get_instance.return_value = mock_db
# create new_class with db field pointing to mock_db
new_class_name = 'new_class'
_db_model = mock.Mock()
attributes = {"attr_foo": str}
new_class = baseObject.APIBaseObject.class_builder(
new_class_name, _db_model, attributes)
new_obj = new_class.create_in_db({"attr_foo": "123"})
observed = new_obj.attr_foo
expected = "123"
self.assertEqual(observed, expected)
# also assert create() has called once
mock_db.create.assert_called_once()
"""
test update_in_db
"""
# mock db and mock db_obj
# create mock_db_obj has attribute "attr_foo" with value "123"
@patch('gluon.api.baseObject.dbapi.get_instance')
def test_update_in_db(self, mock_get_instance):
# mock_db_obj has as_dict function that returns a dict
mock_db_obj = mock.Mock()
mock_db_obj.as_dict.return_value = {"attr_foo": "123"}
# mock_db has get_by_primary_key function that returns mock_db_obj
mock_db = mock.Mock()
mock_db.get_by_primary_key.return_value = mock_db_obj
# set the dbapi.get_instance to return mock_db
mock_get_instance.return_value = mock_db
# create new_class with db field pointing to mock_db
new_class_name = 'new_class'
_db_model = mock.Mock()
attributes = {"attr_foo": str}
new_class = baseObject.APIBaseObject.class_builder(
new_class_name, _db_model, attributes)
new_obj = new_class.update_in_db("any_key", {"attr_foo": "123"})
observed = new_obj.attr_foo
expected = "123"
self.assertEqual(observed, expected)
# also assert get_by_primary_key(), save(), update() has called once
mock_db.get_by_primary_key.assert_called_once()
mock_db_obj.update.assert_called_once()
mock_db_obj.save.assert_called_once()
"""
test delete_from_db
"""
# mock db and mock db_obj
# create mock_db_obj has attribute "attr_foo" with value "123"
@patch('gluon.api.baseObject.dbapi.get_instance')
def test_delete_from_db(self, mock_get_instance):
# mock_db_obj
mock_db_obj = mock.Mock()
# mock_db has get_by_primary_key function that returns mock_db_obj
mock_db = mock.Mock()
mock_db.get_by_primary_key.return_value = mock_db_obj
# set the dbapi.get_instance to return mock_db
mock_get_instance.return_value = mock_db
# create new_class with db field pointing to mock_db
new_class_name = 'new_class'
_db_model = mock.Mock()
attributes = {"attr_foo": str}
new_class = baseObject.APIBaseObject.class_builder(
new_class_name, _db_model, attributes)
new_class.delete_from_db("any_key")
# assert get_by_primary_key(), delete()has called once
mock_db.get_by_primary_key.assert_called_once()
mock_db_obj.delete.assert_called_once()
class APIBaseListTestCase(base.APITestCase):
def setUp(self):
super(APIBaseListTestCase, self).setUp()
pass
"""
test class_builder
"""
# create api_object_class whose instances will be elements of list
# create new_class by calling class_builder with api_object_class
@patch('gluon.api.baseObject.dbapi.get_instance')
def test_class_builder(self, mock_get_instance):
_db_model = type("FooDb", (object, ), {"foo": str})
api_object_class = baseObject.APIBaseObject.class_builder(
"FooAPI", _db_model, {"foo": str})
class_name = 'FooListAPI'
list_name = "listOfFoo"
new_class = baseObject.APIBaseList.class_builder(
class_name, list_name, api_object_class)
self.assertEqual(new_class.list_name, "listOfFoo")
self.assertEqual(new_class.api_object_class, api_object_class)
# test assigning valuse to the new_api_list
# its listOfFoo should only take list of FooClass objects
new_api_list = new_class()
# case 1: throws error if assign list of string
expected_exception = None
try:
new_api_list.listOfFoo = ["foo"]
except Exception as e:
expected_exception = e
self.assertIsNotNone(expected_exception)
# case 2: NO error if assign list of FooClass objects
expected_exception = None
try:
new_api_list.listOfFoo = [api_object_class()]
except Exception as e:
expected_exception = e
self.assertIsNone(expected_exception)
"""
test build
"""
# create api_object_class whose instances will be elements of list
# create new_class by calling class_builder with this api_object_class
# mock the db to return a list of db_objs
# call new_class.build() with the mock db data to generate api_obj_list
@patch('gluon.api.baseObject.dbapi.get_instance')
def test_build(self, mock_get_instance):
_db_model = type("FooDb", (object, ), {"foo": str})
api_object_class = baseObject.APIBaseObject.class_builder(
"FoodAPI", _db_model, {"foo": str})
class_name = 'FooListAPI'
list_name = "listOfFoo"
new_class = baseObject.APIBaseList.class_builder(
class_name, list_name, api_object_class)
# mock db setups that will return a list of db_obj
mock_db_obj = mock.Mock()
mock_db_obj.as_dict.return_value = {"foo": "123"}
mock_db = mock.Mock()
mock_db.get_list.return_value = [mock_db_obj]
mock_get_instance.return_value = mock_db
# start testing by calling build() on new_class
api_obj_list = new_class.build()
# listOfFoo should contain one element
self.assertEqual(len(api_obj_list.listOfFoo), 1)
# this element should have foo field with value "123"
api_obj = api_obj_list.listOfFoo[0]
self.assertEqual(api_obj.foo, "123")
class RootObjectControllerTestCase(base.APITestCase):
def setUp(self):
super(RootObjectControllerTestCase, self).setUp()
pass
"""
test class_builder
"""
# FIXME write test cases for this function
# have trouble with @wsme_pecan.wsexpose
@patch('gluon.managers.manager_base.get_api_manager')
@patch('gluon.api.baseObject.APIBaseList.class_builder')
@patch('gluon.api.baseObject.wsme_pecan.wsexpose')
def test_class_builder(self,
mock_wsexpose,
mock_APIBaseList_class_builder,
mock_get_api_manager):
api_object_class = mock.Mock()
name = "FooController"
primary_key_type = str
api_name = "foo"
list_object_class = mock.Mock()
mock_APIBaseList_class_builder.return_value = list_object_class
baseObject.RootObjectController.class_builder(
name, api_object_class, primary_key_type, api_name)
pass
class RootSubObjectControllerTestCase(base.APITestCase):
def setUp(self):
super(RootSubObjectControllerTestCase, self).setUp()
pass
"""
test class_builder
"""
# FIXME write test cases for this function
# have trouble with @wsme_pecan.wsexpose
def test_class_builder(self):
pass
|
[
"gluon.api.baseObject.APIBase",
"datetime.datetime.today",
"gluon.api.baseObject.RootObjectController.class_builder",
"mock.patch",
"gluon.api.baseObject.APIBaseObject.class_builder",
"gluon.api.baseObject.APIBaseList.class_builder",
"mock.Mock",
"datetime.datetime.now"
] |
[((2628, 2676), 'mock.patch', 'patch', (['"""gluon.api.baseObject.dbapi.get_instance"""'], {}), "('gluon.api.baseObject.dbapi.get_instance')\n", (2633, 2676), False, 'from mock import patch\n'), ((4263, 4311), 'mock.patch', 'patch', (['"""gluon.api.baseObject.dbapi.get_instance"""'], {}), "('gluon.api.baseObject.dbapi.get_instance')\n", (4268, 4311), False, 'from mock import patch\n'), ((5376, 5424), 'mock.patch', 'patch', (['"""gluon.api.baseObject.dbapi.get_instance"""'], {}), "('gluon.api.baseObject.dbapi.get_instance')\n", (5381, 5424), False, 'from mock import patch\n'), ((6568, 6616), 'mock.patch', 'patch', (['"""gluon.api.baseObject.dbapi.get_instance"""'], {}), "('gluon.api.baseObject.dbapi.get_instance')\n", (6573, 6616), False, 'from mock import patch\n'), ((7933, 7981), 'mock.patch', 'patch', (['"""gluon.api.baseObject.dbapi.get_instance"""'], {}), "('gluon.api.baseObject.dbapi.get_instance')\n", (7938, 7981), False, 'from mock import patch\n'), ((9176, 9224), 'mock.patch', 'patch', (['"""gluon.api.baseObject.dbapi.get_instance"""'], {}), "('gluon.api.baseObject.dbapi.get_instance')\n", (9181, 9224), False, 'from mock import patch\n'), ((10762, 10810), 'mock.patch', 'patch', (['"""gluon.api.baseObject.dbapi.get_instance"""'], {}), "('gluon.api.baseObject.dbapi.get_instance')\n", (10767, 10810), False, 'from mock import patch\n'), ((12133, 12185), 'mock.patch', 'patch', (['"""gluon.managers.manager_base.get_api_manager"""'], {}), "('gluon.managers.manager_base.get_api_manager')\n", (12138, 12185), False, 'from mock import patch\n'), ((12191, 12246), 'mock.patch', 'patch', (['"""gluon.api.baseObject.APIBaseList.class_builder"""'], {}), "('gluon.api.baseObject.APIBaseList.class_builder')\n", (12196, 12246), False, 'from mock import patch\n'), ((12252, 12301), 'mock.patch', 'patch', (['"""gluon.api.baseObject.wsme_pecan.wsexpose"""'], {}), "('gluon.api.baseObject.wsme_pecan.wsexpose')\n", (12257, 12301), False, 'from mock import patch\n'), ((1030, 1050), 'gluon.api.baseObject.APIBase', 'baseObject.APIBase', ([], {}), '()\n', (1048, 1050), False, 'from gluon.api import baseObject\n'), ((1347, 1367), 'gluon.api.baseObject.APIBase', 'baseObject.APIBase', ([], {}), '()\n', (1365, 1367), False, 'from gluon.api import baseObject\n'), ((1433, 1456), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1454, 1456), False, 'import datetime\n'), ((1928, 1948), 'gluon.api.baseObject.APIBase', 'baseObject.APIBase', ([], {}), '()\n', (1946, 1948), False, 'from gluon.api import baseObject\n'), ((2016, 2041), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (2039, 2041), False, 'import datetime\n'), ((2787, 2798), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2796, 2798), False, 'import mock\n'), ((2859, 2936), 'gluon.api.baseObject.APIBaseObject.class_builder', 'baseObject.APIBaseObject.class_builder', (['new_class_name', '_db_model', 'attributes'], {}), '(new_class_name, _db_model, attributes)\n', (2897, 2936), False, 'from gluon.api import baseObject\n'), ((3710, 3721), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (3719, 3721), False, 'import mock\n'), ((3781, 3858), 'gluon.api.baseObject.APIBaseObject.class_builder', 'baseObject.APIBaseObject.class_builder', (['new_class_name', '_db_model', 'attributes'], {}), '(new_class_name, _db_model, attributes)\n', (3819, 3858), False, 'from gluon.api import baseObject\n'), ((3894, 3905), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (3903, 3905), False, 'import mock\n'), ((4448, 4459), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4457, 4459), False, 'import mock\n'), ((4617, 4628), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4626, 4628), False, 'import mock\n'), ((4915, 4926), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4924, 4926), False, 'import mock\n'), ((4986, 5063), 'gluon.api.baseObject.APIBaseObject.class_builder', 'baseObject.APIBaseObject.class_builder', (['new_class_name', '_db_model', 'attributes'], {}), '(new_class_name, _db_model, attributes)\n', (5024, 5063), False, 'from gluon.api import baseObject\n'), ((5562, 5573), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (5571, 5573), False, 'import mock\n'), ((5719, 5730), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (5728, 5730), False, 'import mock\n'), ((6005, 6016), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (6014, 6016), False, 'import mock\n'), ((6076, 6153), 'gluon.api.baseObject.APIBaseObject.class_builder', 'baseObject.APIBaseObject.class_builder', (['new_class_name', '_db_model', 'attributes'], {}), '(new_class_name, _db_model, attributes)\n', (6114, 6153), False, 'from gluon.api import baseObject\n'), ((6754, 6765), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (6763, 6765), False, 'import mock\n'), ((6923, 6934), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (6932, 6934), False, 'import mock\n'), ((7221, 7232), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (7230, 7232), False, 'import mock\n'), ((7292, 7369), 'gluon.api.baseObject.APIBaseObject.class_builder', 'baseObject.APIBaseObject.class_builder', (['new_class_name', '_db_model', 'attributes'], {}), '(new_class_name, _db_model, attributes)\n', (7330, 7369), False, 'from gluon.api import baseObject\n'), ((8080, 8091), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (8089, 8091), False, 'import mock\n'), ((8186, 8197), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (8195, 8197), False, 'import mock\n'), ((8484, 8495), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (8493, 8495), False, 'import mock\n'), ((8555, 8632), 'gluon.api.baseObject.APIBaseObject.class_builder', 'baseObject.APIBaseObject.class_builder', (['new_class_name', '_db_model', 'attributes'], {}), '(new_class_name, _db_model, attributes)\n', (8593, 8632), False, 'from gluon.api import baseObject\n'), ((9365, 9438), 'gluon.api.baseObject.APIBaseObject.class_builder', 'baseObject.APIBaseObject.class_builder', (['"""FooAPI"""', '_db_model', "{'foo': str}"], {}), "('FooAPI', _db_model, {'foo': str})\n", (9403, 9438), False, 'from gluon.api import baseObject\n'), ((9539, 9616), 'gluon.api.baseObject.APIBaseList.class_builder', 'baseObject.APIBaseList.class_builder', (['class_name', 'list_name', 'api_object_class'], {}), '(class_name, list_name, api_object_class)\n', (9575, 9616), False, 'from gluon.api import baseObject\n'), ((10943, 11017), 'gluon.api.baseObject.APIBaseObject.class_builder', 'baseObject.APIBaseObject.class_builder', (['"""FoodAPI"""', '_db_model', "{'foo': str}"], {}), "('FoodAPI', _db_model, {'foo': str})\n", (10981, 11017), False, 'from gluon.api import baseObject\n'), ((11118, 11195), 'gluon.api.baseObject.APIBaseList.class_builder', 'baseObject.APIBaseList.class_builder', (['class_name', 'list_name', 'api_object_class'], {}), '(class_name, list_name, api_object_class)\n', (11154, 11195), False, 'from gluon.api import baseObject\n'), ((11291, 11302), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (11300, 11302), False, 'import mock\n'), ((11379, 11390), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (11388, 11390), False, 'import mock\n'), ((12513, 12524), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (12522, 12524), False, 'import mock\n'), ((12640, 12651), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (12649, 12651), False, 'import mock\n'), ((12733, 12834), 'gluon.api.baseObject.RootObjectController.class_builder', 'baseObject.RootObjectController.class_builder', (['name', 'api_object_class', 'primary_key_type', 'api_name'], {}), '(name, api_object_class,\n primary_key_type, api_name)\n', (12778, 12834), False, 'from gluon.api import baseObject\n')]
|
from SenseCells.tts import tts
def go_to_sleep():
tts('Goodbye! Have a great day!')
quit()
|
[
"SenseCells.tts.tts"
] |
[((55, 88), 'SenseCells.tts.tts', 'tts', (['"""Goodbye! Have a great day!"""'], {}), "('Goodbye! Have a great day!')\n", (58, 88), False, 'from SenseCells.tts import tts\n')]
|
"""Replicate win32 time.clock() behavior for all platforms"""
import time
import sys
_MAXFORWARD = 100
_FUDGE = 1
class RelativeTime:
def __init__(self):
self.time = time.time()
self.offset = 0
def get_time(self):
t = time.time() + self.offset
if t < self.time or t > self.time + _MAXFORWARD:
self.time += _FUDGE
self.offset += self.time - t
return self.time
self.time = t
return t
if sys.platform != 'win32':
_RTIME = RelativeTime()
def clock():
return _RTIME.get_time()
|
[
"time.time"
] |
[((182, 193), 'time.time', 'time.time', ([], {}), '()\n', (191, 193), False, 'import time\n'), ((255, 266), 'time.time', 'time.time', ([], {}), '()\n', (264, 266), False, 'import time\n')]
|
# Author : <NAME>
# Contact : <EMAIL>
# Date : Feb 16, 2020
import random
import time
import numpy as np
import random
import time
import numpy as np
try:
from CS5313_Localization_Env import maze
except:
print(
'Problem finding CS5313_Localization_Env.maze... Trying to "import maze" only...'
)
try:
import maze
print("Successfully imported maze")
except Exception as ex:
print("Could not import maze")
print("----->LOOK HERE FOR EXCEPTION MESSAGE<-----")
print(ex)
print("----->LOOK HERE FOR EXCEPTION MESSAGE<-----")
try:
from CS5313_Localization_Env import RobotLocalization as viz
except:
print(
'Problem finding CS5313_Localization_Env.RobotLocalization... Trying to "import RobotLocalization" only...'
)
try:
import RobotLocalization as viz
print("Successfully imported RobotLocalization")
except Exception as ex:
print("Could not import RobotLocalization")
print("----->LOOK HERE FOR EXCEPTION MESSAGE<-----")
print(ex)
print("----->LOOK HERE FOR EXCEPTION MESSAGE<-----")
try:
from CS5313_Localization_Env import localization_env as le
except:
print(
'Problem finding CS5313_Localization_Env.localization_env... Trying to "import localization_env" only...'
)
try:
import localization_env as le
print("Successfully imported localization_env")
except Exception as ex:
print("Could not import localization_env")
print("----->LOOK HERE FOR EXCEPTION MESSAGE<-----")
print(ex)
print("----->LOOK HERE FOR EXCEPTION MESSAGE<-----")
from enum import Enum
# Change this to true to print out information on the robot location and heading
printouts = True
# Change this to true inorder to print out the map as a dataframe to console every time move() is called, as well as the Transition Tables to csv files named "heading.csv" and "location.csv". Won't do anything if printouts is false expect import pandas
df = False
if df:
from pandas import DataFrame
class Directions(Enum):
"""An Enum containing the directions S, E, N, W, and St (stationary) and their respective (x, y) movement tuples. Ex. S = (0, 1) meaning down one row, and stationary in the columns."""
S = (0, 1)
E = (1, 0)
N = (0, -1)
W = (-1, 0)
St = (0, 0)
def get_ortho(self, value):
""" Return the Direction Enums orthogonal to the given direction
Arguements:\n
value -- The given direction for which the orthogonal directions will be based on.\n
Returns:\n
A list of directions orthogonal to the given direction.
"""
if value in [self.N, self.S]:
return [self.W, self.E]
return [self.N, self.S]
class Headings(Enum):
"""An enum containing the headings S, E, N, W and their respective (x, y) movement tuples"""
S = (0, 1)
E = (1, 0)
N = (0, -1)
W = (-1, 0)
def get_ortho(self, value):
""" Return the Headings Enums orthogonal to the given heading
Arguements:\n
value -- The given heading for which the orthogonal heading will be based on.\n
Returns:\n
A list of headings orthogonal to the given heading.
"""
if value in [self.N, self.S]:
return [self.W, self.E]
return [self.N, self.S]
class Environment:
""" An environment for testing a randomly moving robot around a maze.
Important Class Variables\n
map -- The map of the the maze. A 2d list of lists in the form list[x][y] where a value of 1 signifies there is a wall, 0 signifies the cell is traversable, and 'x' denotes the robot location.\n
location_transitions -- The table of transition probabilities for each cell. Format is [x][y][heading][direction] which will return the probabilities of moving the direction, given the robot's current x, y, and heading.\n
heading_transitions -- The table of transition probabilities for the headings given each cell. Format is [x][y][heading][heading] which will return the probabilities of each heading for the next time step given the robot's current x, y, and heading.\n
robot_location -- The current location of the robot, given as a tuple in the for (x, y).
robot_heading -- The current heading of the robot, given as a Headings enum.
"""
def __init__(
self,
action_bias,
observation_noise,
action_noise,
dimensions,
seed=None,
window_size=[750, 750],
):
"""Initializes the environment. The robot starts in a random traversable cell.
Arguements:\n
action_bias -- Provides a bias for the robots actions. Positive values increase the likelihood of South and East movements, and negative favor North and West. (float in range -1-1)\n
observation_noise -- The probability that any given observation value will flip values erroneously. (float in range 0-1)\n
action_noise -- The probability that an action will move either direction perpendicular to the inteded direction. (float in range 0-1)\n
dimensions -- The dimensions of the map, given in the form (x,y). (tuple in range (1+, 1+))\n
seed (optional) -- The random seed value. (int) default=10\n
window_size(optional) -- The [x, y] size of the display. Default is [750, 750]. Should be the same aspect ratio as the maze to avoid strange looking graphics.
Return:\n
No return
"""
# the pygame state
self.running = True
# Step counter
self.steps = 0
# save the bias, noise, and map sizze parameters
self.action_bias = action_bias
self.observation_noise = observation_noise
self.action_noise = action_noise
self.dimensions = dimensions
# set the random seed and display it
self.seed = seed if seed != None else random.randint(1, 10000)
random.seed(self.seed)
# creat the map and list of free cells
self.map = maze.make_maze(dimensions[0], dimensions[1], seed)
self.free_cells = [
(x, y)
for x in range(dimensions[0])
for y in range(dimensions[1])
if self.map[x][y] == 0
]
# create the transistion table
self.location_transitions = self.create_locations_table()
self.headings_transitions = self.create_headings_table()
if df:
DataFrame(self.location_transitions).transpose().to_csv("location.csv")
DataFrame(self.headings_transitions).transpose().to_csv("heading.csv")
# set the robot location and print
self.robot_location = self.free_cells[
random.randint(0, len(self.free_cells) - 1)
]
self.location_priors, self.heading_priors = self.compute_prior_probabilities()
self.observation_tables = self.create_observation_tables()
self.map[self.robot_location[0]][self.robot_location[1]] = "x"
# Set the robot heading
self.robot_heading = random.choice(
[
h
for h in Headings
if self.traversable(self.robot_location[0], self.robot_location[1], h)
]
)
# gen initial headings probs
probs = {}
# prob_sum = 0
for h in le.Headings:
# num = random.random()
probs[h] = 1
# prob_sum += num
# for h in le.Headings:
# probs[h] /= prob_sum
# init viz
self.window_size = window_size
self.game = viz.Game()
self.game.init_pygame(self.window_size)
self.game.update(
self.map,
self.robot_location,
self.robot_heading,
[[0] * self.dimensions[1]] * self.dimensions[0],
probs,
)
self.game.display()
if printouts:
print("Random seed:", self.seed)
print("Robot starting location:", self.robot_location)
print("Robot starting heading:", self.robot_heading)
if df:
print(DataFrame(self.map).transpose())
def compute_prior_probabilities(self):
location_priors = {}
for cell in self.free_cells:
location_priors[cell] = 1 / len(self.free_cells)
heading_priors = {}
for heading in Headings:
heading_priors[heading] = 0
for cell in self.free_cells:
for heading2 in Headings:
heading_priors[heading] += self.headings_transitions[cell[0]][
cell[1]
][heading2][heading]
heading_priors[heading] /= len(self.free_cells) * 4
return location_priors, heading_priors
def random_dictionary_sample(self, probs):
sample = random.random()
prob_sum = 0
for key in probs.keys():
prob_sum += probs[key]
if prob_sum > sample:
return key
def move(self):
"""Updates the robots heading and moves the robot to a new position based off of the transistion table and its current location and new heading.
Return:\n
A list of the observations modified by the observation noise, where 1 signifies a wall and 0 signifies an empty cell. The order of the list is [S, E, N, W]
"""
# get the new location
self.map[self.robot_location[0]][self.robot_location[1]] = 0
probs = self.location_transitions[self.robot_location[0]][
self.robot_location[1]
][self.robot_heading]
direction = self.random_dictionary_sample(probs)
self.robot_location = (
self.robot_location[0] + direction.value[0],
self.robot_location[1] + direction.value[1],
)
self.map[self.robot_location[0]][self.robot_location[1]] = "x"
# Get the new heading
h_probs = self.headings_transitions[self.robot_location[0]][
self.robot_location[1]
][self.robot_heading]
self.robot_heading = self.random_dictionary_sample(h_probs)
# # get the new location
# self.map[self.robot_location[0]][self.robot_location[1]] = 0
# probs = self.location_transitions[self.robot_location[0]][
# self.robot_location[1]
# ][self.robot_heading]
self.steps += 1
# return the new observation
if printouts:
print()
print(
"---------------------------Steps: "
+ str(self.steps)
+ " ---------------------------------"
)
print(self.robot_location)
print(self.robot_heading)
print(direction)
if df:
print(DataFrame(self.map).transpose())
# if self.running:
# self.game.update(
# self.map,
# self.robot_location,
# self.robot_heading,
# location_probs,
# headings_probs,
# )
# self.running = self.game.display()
# else:
# print("Pygame closed. Quiting...")
# self.game.quit()
return self.observe()
def update(self, location_probs, headings_probs):
"""Updates the visualizer to represent where your filtering method estimates the robot to be, and where it estimates the robot is heading.
Arguments:\n
location_probs: The probability of the robot being in any (x, y) cell in the map. Created from your project code. Format list[x][y] = float\n
headings_probs: The probability of the robot's current heading being any given heading. Created from your project code. Format dict{<Headings enum> : float, <Headings enum> : float,... }\n
"""
if self.running:
self.game.update(
self.map,
self.robot_location,
self.robot_heading,
location_probs,
headings_probs,
)
self.running = self.game.display()
else:
print("Pygame closed. Quiting...")
self.game.quit()
def observe(self):
"""Observes the walls at the current robot location
Return:\n
A list of the observations modified by the observation noise, where 1 signifies a wall and 0 signifies an empty cell. The order of the list is [S, E, N, W]
"""
# get the neighboring walls to create the true observation table
observations = [
0
if self.traversable(
self.robot_location[0], self.robot_location[1], direction
)
else 1
for direction in Directions
if direction != Directions.St
]
# apply observation noise
observations = [
1 - x if random.random() < self.observation_noise else x
for x in observations
]
return observations
def create_observation_tables(self):
observation_table = []
for x in range(self.dimensions[0]):
observation_table.append({})
for y in range(self.dimensions[1]):
if self.map[x][y] == 1:
observation_table[x][y] = -1
continue
observation_table[x][y] = {}
observations = [
0
if self.traversable(
x, y, direction
)
else 1
for direction in Directions
if direction != Directions.St
]
for a in [0, 1]:
for b in [0, 1]:
for c in [0, 1]:
for d in [0, 1]:
potential_obs = (a, b, c, d)
num_wrong = 0
for i in range(len(observations)):
if observations[i] != potential_obs[i]:
num_wrong += 1
prob = (1 - self.observation_noise) ** (len(
observations
)-num_wrong) * self.observation_noise ** num_wrong
observation_table[x][y][potential_obs] = prob
return observation_table
def create_locations_table(self):
temp = []
# loop through the x dim
for x in range(self.dimensions[0]):
temp.append([])
# loop through the y dim
for y in range(self.dimensions[1]):
# If the cell is not traversable than set its value in the transition table to -1
if self.map[x][y] == 1:
temp[x].append(-1)
continue
temp[x].append({})
for heading in list(Headings):
probs = {}
# Compute Transistion probabilities ignoring walls
for direction in Directions:
if direction.name == heading.name:
probs[direction] = 1 - self.action_noise
elif direction in Directions.get_ortho(
Directions, Directions[heading.name]
):
probs[direction] = self.action_noise / 2
else:
probs[direction] = 0
# init stationary probability
probs[Directions.St] = 0
# account for walls. If there is a wall for one of the transition probabilities add the probability to the stationary probability and set the transisition probability to 0
for direction in Directions:
if not self.traversable(x, y, direction):
probs[Directions.St] += probs[direction]
probs[direction] = 0
# add the new transistion probabilities
temp[x][y].update({heading: probs})
return temp
def create_headings_table(self):
temp = []
# loop through the x dim
for x in range(self.dimensions[0]):
temp.append([])
# loop through the y dim
for y in range(self.dimensions[1]):
# If the cell is not traversable than set its value in the transition table to -1
if self.map[x][y] == 1:
temp[x].append(-1)
continue
temp[x].append({})
for heading in Headings:
probs = {}
# Handle case when the current heading is traversable
if self.traversable(x, y, heading):
for new_heading in Headings:
if heading == new_heading:
probs[new_heading] = 1
else:
probs[new_heading] = 0
temp[x][y].update({heading: probs})
continue
# If the current heading is not traversable
# Find which headings are available
headings_traversablity = {}
for new_heading in Headings:
if self.traversable(x, y, new_heading):
headings_traversablity[new_heading] = 1
else:
headings_traversablity[new_heading] = 0
# Sum these values for later arithmetic
total_traversable = sum(list(headings_traversablity.values()))
se_traversable = (
headings_traversablity[Headings.S]
+ headings_traversablity[Headings.E]
)
nw_traversable = (
headings_traversablity[Headings.N]
+ headings_traversablity[Headings.W]
)
# Compute the heading probabilities for traversable headings
for new_heading in Headings:
if self.traversable(x, y, new_heading):
if new_heading in [Headings.S, Headings.E]:
probs[new_heading] = (
1 / total_traversable
+ self.action_bias / se_traversable
)
else:
probs[new_heading] = (
1 / total_traversable
- self.action_bias / nw_traversable
)
else:
probs[new_heading] = 0
# normalize heading probabilities
probs_sum = sum([probs[x] for x in Headings])
for h in Headings:
probs[h] /= probs_sum
# add the new transistion probabilities
temp[x][y].update({heading: probs})
return temp
def traversable(self, x, y, direction):
"""
Returns true if the cell to the given direction of (x,y) is traversable, otherwise returns false.
Arguements:\n
row -- the x coordinate of the initial cell\n
col -- the y coordinate of the initial cell\n
direction -- the direction of the cell to check for traversablility. Type: localization_env.Directions enum or localization_env.Headings\n
Return:\n
A boolean signifying whether the cell to the given direction is traversable or not
"""
# see if the cell in the direction is traversable. If statement to handle out of bounds errors
if (
x + direction.value[0] >= 0
and x + direction.value[0] < self.dimensions[0]
and y + direction.value[0] >= 0
and y + direction.value[0] < self.dimensions[1]
):
if self.map[x + direction.value[0]][y + direction.value[1]] == 0:
return True
return False
def dummy_location_and_heading_probs(self):
"""
Returns a dummy location probability table and a dummy heading probability dictionary for testing purposes
Returns:\n
location probability table: Format is list[x][y] = float between (0-1)\n
Headings probability table: Format is dict{<Heading enum> : float between (0-1)}
"""
loc_probs = list()
sum_probs = 0
for x in range(self.dimensions[0]):
loc_probs.append([])
for y in range(self.dimensions[1]):
if self.map[x][y] == 1:
loc_probs[x].append(0.0)
else:
num = random.random()
loc_probs[x].append(num)
sum_probs += num
for x in range(self.dimensions[0]):
for y in range(self.dimensions[1]):
loc_probs[x][y] /= sum_probs
hed_probs = {}
sample = np.random.rand(4)
sample = (sample / np.sum(sample)).tolist()
i = 0
for heading in le.Headings:
hed_probs[heading] = sample[i]
i += 1
return loc_probs, hed_probs
if __name__ == "__main__":
env = Environment(0.1, 0.1, 0.2, (10, 10), window_size=[1000, 1000])
# print("Starting test. Press <enter> to make move")
location, heading = env.dummy_location_and_heading_probs()
done = False
while env.running:
observation = env.move(location, heading)
if printouts:
print(observation)
time.sleep(0.25)
|
[
"pandas.DataFrame",
"numpy.sum",
"random.randint",
"RobotLocalization.Game",
"maze.make_maze",
"time.sleep",
"random.random",
"random.seed",
"numpy.random.rand"
] |
[((6124, 6146), 'random.seed', 'random.seed', (['self.seed'], {}), '(self.seed)\n', (6135, 6146), False, 'import random\n'), ((6214, 6264), 'maze.make_maze', 'maze.make_maze', (['dimensions[0]', 'dimensions[1]', 'seed'], {}), '(dimensions[0], dimensions[1], seed)\n', (6228, 6264), False, 'import maze\n'), ((7779, 7789), 'RobotLocalization.Game', 'viz.Game', ([], {}), '()\n', (7787, 7789), True, 'import RobotLocalization as viz\n'), ((9033, 9048), 'random.random', 'random.random', ([], {}), '()\n', (9046, 9048), False, 'import random\n'), ((21621, 21638), 'numpy.random.rand', 'np.random.rand', (['(4)'], {}), '(4)\n', (21635, 21638), True, 'import numpy as np\n'), ((22216, 22232), 'time.sleep', 'time.sleep', (['(0.25)'], {}), '(0.25)\n', (22226, 22232), False, 'import time\n'), ((6091, 6115), 'random.randint', 'random.randint', (['(1)', '(10000)'], {}), '(1, 10000)\n', (6105, 6115), False, 'import random\n'), ((13103, 13118), 'random.random', 'random.random', ([], {}), '()\n', (13116, 13118), False, 'import random\n'), ((21345, 21360), 'random.random', 'random.random', ([], {}), '()\n', (21358, 21360), False, 'import random\n'), ((21666, 21680), 'numpy.sum', 'np.sum', (['sample'], {}), '(sample)\n', (21672, 21680), True, 'import numpy as np\n'), ((6640, 6676), 'pandas.DataFrame', 'DataFrame', (['self.location_transitions'], {}), '(self.location_transitions)\n', (6649, 6676), False, 'from pandas import DataFrame\n'), ((6724, 6760), 'pandas.DataFrame', 'DataFrame', (['self.headings_transitions'], {}), '(self.headings_transitions)\n', (6733, 6760), False, 'from pandas import DataFrame\n'), ((8311, 8330), 'pandas.DataFrame', 'DataFrame', (['self.map'], {}), '(self.map)\n', (8320, 8330), False, 'from pandas import DataFrame\n'), ((10990, 11009), 'pandas.DataFrame', 'DataFrame', (['self.map'], {}), '(self.map)\n', (10999, 11009), False, 'from pandas import DataFrame\n')]
|
# -*- coding: utf-8 -*-
"""Simple authenticaton backend based on HTTP basic authentication.
:copyright: (c) 2016-2019 by <NAME>
:license: Apache 2.0, see LICENSE
"""
import logging
import requests
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.conf import settings
LOGGER = logging.getLogger(__name__)
def is_insensitive():
"""Check if user name should be treated case-insensitive."""
case_spec = getattr(settings, 'HTTP_BASIC_AUTH_CASE', "y")
if isinstance(case_spec, str):
return case_spec[:1].lower() in ("0", "f", "n")
return not bool(case_spec)
def canonical_username(username):
"""Return the canonical user name.
If user names should be treated case-insensitive, return lower case. Else
return do not change anything.
"""
if is_insensitive():
return username.lower()
return username
class HttpBasicAuthBackend(ModelBackend):
"""Authentication backend that uses HTTP basic authentication.
In all other aspects this backend should behave like the default
model-based backend from Django.
"""
@staticmethod
def checkpw_basic_auth(url, username, password):
"""Check authentication via HTTP basic authentication."""
LOGGER.debug("Basic-auth URL=%s, user=%s", url, username)
if url is None:
return True
try:
response = requests.head(url, auth=(username, password))
status_code = response.status_code
LOGGER.debug('Basic-auth Status-Code=%d', status_code)
return 200 <= status_code <= 299
except requests.RequestException:
LOGGER.exception(
"Unable to get authentication from '%s' for user '%s':",
url, username)
return None
def authenticate(
self, request=None, username=None, password=None, **kwargs):
"""Authenticate with an user name and a password.
Requires a setting HTTP_BASIC_AUTH_URL for specifying the URL endpoint
for checking user name / password. URL can be set to None for testing
purposes. In this case, no HTTP request is done, all checks are
successful.
Optional is a setting HTTP_BASIC_AUTH_CASE that specifies whether the
user name will be treated case sensitive or case-insensitive. Any value
that starts with a '0', 'f', or 'n' will result in a case insensitive
setting. The value of HTTP_BASIC_AUTH_CASE is case-insensitive, of
course.
"""
user_model = get_user_model()
if username is None:
username = kwargs.get(user_model.USERNAME_FIELD)
url = getattr(settings, 'HTTP_BASIC_AUTH_URL', '')
if url == '':
LOGGER.error("No HTTP_BASIC_AUTH_URL")
return None
username = canonical_username(username)
if not self.checkpw_basic_auth(
settings.HTTP_BASIC_AUTH_URL, username, password):
return None
user, _ = user_model.objects.get_or_create(**{
user_model.USERNAME_FIELD: username,
})
return user if self.user_can_authenticate(user) else None
|
[
"requests.head",
"django.contrib.auth.get_user_model",
"logging.getLogger"
] |
[((347, 374), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (364, 374), False, 'import logging\n'), ((2610, 2626), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (2624, 2626), False, 'from django.contrib.auth import get_user_model\n'), ((1438, 1483), 'requests.head', 'requests.head', (['url'], {'auth': '(username, password)'}), '(url, auth=(username, password))\n', (1451, 1483), False, 'import requests\n')]
|
#############################START LICENSE##########################################
# Copyright (C) 2019 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#############################END LICENSE##########################################
###########################################################################################
#
# Script name: qc-lightrad
#
# Description: This script performs automated EPID QC of the QC-3 phantom developed in Manitoba.
# There are other tools out there that do this but generally the ROI are fixed whereas this script
# aims to dynamically identify them using machine vision and the bibs in the phantom.
#
# Example usage: python qc-lightrad "/file/"
#
# Using MED-TEC MT-IAD-1 phantom
#
# Author: <NAME>
# <EMAIL>
# 5877000722
# Date:2019-04-09
#
###########################################################################################
import argparse
import os
from datetime import datetime
from tqdm import tqdm
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
from PIL import Image
from skimage.feature import blob_log
import pydicom
from pymedphys.labs.pedromartinez.utils import utils as u
def point_detect(imcirclist):
k = 0
detCenterXRegion = []
detCenterYRegion = []
print("Finding bibs in phantom...")
for img in tqdm(imcirclist):
grey_img = np.array(img, dtype=np.uint8) # converting the image to grayscale
blobs_log = blob_log(
grey_img, min_sigma=15, max_sigma=40, num_sigma=10, threshold=0.05
)
centerXRegion = []
centerYRegion = []
centerRRegion = []
grey_ampRegion = []
for blob in blobs_log:
y, x, r = blob
# center = (int(x), int(y))
centerXRegion.append(x)
centerYRegion.append(y)
centerRRegion.append(r)
grey_ampRegion.append(grey_img[int(y), int(x)])
# radius = int(r)
# print('center=', center, 'radius=', radius, 'value=', img[center], grey_img[center])
xindx = int(centerXRegion[np.argmin(grey_ampRegion)])
yindx = int(centerYRegion[np.argmin(grey_ampRegion)])
# rindx = int(centerRRegion[np.argmin(grey_ampRegion)])
detCenterXRegion.append(xindx)
detCenterYRegion.append(yindx)
k = k + 1
return detCenterXRegion, detCenterYRegion
def read_dicom(filenm, ioptn):
dataset = pydicom.dcmread(filenm)
now = datetime.now()
ArrayDicom = np.zeros(
(dataset.Rows, dataset.Columns), dtype=dataset.pixel_array.dtype
)
ArrayDicom = dataset.pixel_array
SID = dataset.RTImageSID
print("array_shape=", np.shape(ArrayDicom))
height = np.shape(ArrayDicom)[0]
width = np.shape(ArrayDicom)[1]
dx = 1 / (SID * (1 / dataset.ImagePlanePixelSpacing[0]) / 1000)
dy = 1 / (SID * (1 / dataset.ImagePlanePixelSpacing[1]) / 1000)
print("pixel spacing row [mm]=", dx)
print("pixel spacing col [mm]=", dy)
# creating the figure extent based on the image dimensions, we divide by 10 to get the units in cm
extent = (
0,
0 + (ArrayDicom.shape[1] * dx / 10),
0 + (ArrayDicom.shape[0] * dy / 10),
0,
)
# creating the figure extent list for the bib images
list_extent = []
# plt.figure()
# plt.imshow(ArrayDicom, extent=extent, origin='upper')
# plt.imshow(ArrayDicom)
# plt.xlabel('x distance [cm]')
# plt.ylabel('y distance [cm]')
# plt.show()
if ioptn.startswith(("y", "yeah", "yes")):
height, width = ArrayDicom.shape
ArrayDicom_mod = ArrayDicom[
:, width // 2 - height // 2 : width // 2 + height // 2
]
else:
ArrayDicom_mod = ArrayDicom
# we take a diagonal profile to avoid phantom artifacts
# im_profile = ArrayDicom_mod.diagonal()
# test to make sure image is displayed correctly bibs are high amplitude against dark background
ctr_pixel = ArrayDicom_mod[height // 2, width // 2]
corner_pixel = ArrayDicom_mod[0, 0]
if ctr_pixel > corner_pixel:
ArrayDicom = u.range_invert(ArrayDicom)
ArrayDicom = u.norm01(ArrayDicom)
# working on transforming the full image and invert it first and go from there.
if ioptn.startswith(("y", "yeah", "yes")):
ROI1 = {"edge_top": 70, "edge_bottom": 130, "edge_left": 270, "edge_right": 350}
ROI2 = {"edge_top": 70, "edge_bottom": 130, "edge_left": 680, "edge_right": 760}
ROI3 = {
"edge_top": 150,
"edge_bottom": 210,
"edge_left": 760,
"edge_right": 830,
}
ROI4 = {
"edge_top": 560,
"edge_bottom": 620,
"edge_left": 760,
"edge_right": 830,
}
ROI5 = {
"edge_top": 640,
"edge_bottom": 700,
"edge_left": 680,
"edge_right": 760,
}
ROI6 = {
"edge_top": 640,
"edge_bottom": 700,
"edge_left": 270,
"edge_right": 350,
}
ROI7 = {
"edge_top": 560,
"edge_bottom": 620,
"edge_left": 200,
"edge_right": 270,
}
ROI8 = {
"edge_top": 150,
"edge_bottom": 210,
"edge_left": 200,
"edge_right": 270,
}
else:
ROI1 = {
"edge_top": 280,
"edge_bottom": 360,
"edge_left": 360,
"edge_right": 440,
}
ROI2 = {
"edge_top": 280,
"edge_bottom": 360,
"edge_left": 830,
"edge_right": 910,
}
ROI3 = {
"edge_top": 360,
"edge_bottom": 440,
"edge_left": 940,
"edge_right": 1020,
}
ROI4 = {
"edge_top": 840,
"edge_bottom": 920,
"edge_left": 940,
"edge_right": 1020,
}
ROI5 = {
"edge_top": 930,
"edge_bottom": 1000,
"edge_left": 830,
"edge_right": 910,
}
ROI6 = {
"edge_top": 930,
"edge_bottom": 1000,
"edge_left": 360,
"edge_right": 440,
}
ROI7 = {
"edge_top": 840,
"edge_bottom": 920,
"edge_left": 280,
"edge_right": 360,
}
ROI8 = {
"edge_top": 360,
"edge_bottom": 440,
"edge_left": 280,
"edge_right": 360,
}
# images for object detection
imcirclist = []
imcirc1 = Image.fromarray(
255
* ArrayDicom[
ROI1["edge_top"] : ROI1["edge_bottom"],
ROI1["edge_left"] : ROI1["edge_right"],
]
)
imcirc1 = imcirc1.resize((imcirc1.width * 10, imcirc1.height * 10), Image.LANCZOS)
list_extent.append(
(
(ROI1["edge_left"] * dx / 10),
(ROI1["edge_right"] * dx / 10),
(ROI1["edge_bottom"] * dy / 10),
(ROI1["edge_top"] * dy / 10),
)
)
imcirc2 = Image.fromarray(
255
* ArrayDicom[
ROI2["edge_top"] : ROI2["edge_bottom"],
ROI2["edge_left"] : ROI2["edge_right"],
]
)
imcirc2 = imcirc2.resize((imcirc2.width * 10, imcirc2.height * 10), Image.LANCZOS)
list_extent.append(
(
(ROI2["edge_left"] * dx / 10),
(ROI2["edge_right"] * dx / 10),
(ROI2["edge_bottom"] * dy / 10),
(ROI2["edge_top"] * dy / 10),
)
)
imcirc3 = Image.fromarray(
255
* ArrayDicom[
ROI3["edge_top"] : ROI3["edge_bottom"],
ROI3["edge_left"] : ROI3["edge_right"],
]
)
imcirc3 = imcirc3.resize((imcirc3.width * 10, imcirc3.height * 10), Image.LANCZOS)
list_extent.append(
(
(ROI3["edge_left"] * dx / 10),
(ROI3["edge_right"] * dx / 10),
(ROI3["edge_bottom"] * dy / 10),
(ROI3["edge_top"] * dy / 10),
)
)
imcirc4 = Image.fromarray(
255
* ArrayDicom[
ROI4["edge_top"] : ROI4["edge_bottom"],
ROI4["edge_left"] : ROI4["edge_right"],
]
)
imcirc4 = imcirc4.resize((imcirc4.width * 10, imcirc4.height * 10), Image.LANCZOS)
list_extent.append(
(
(ROI4["edge_left"] * dx / 10),
(ROI4["edge_right"] * dx / 10),
(ROI4["edge_bottom"] * dy / 10),
(ROI4["edge_top"] * dy / 10),
)
)
imcirc5 = Image.fromarray(
255
* ArrayDicom[
ROI5["edge_top"] : ROI5["edge_bottom"],
ROI5["edge_left"] : ROI5["edge_right"],
]
)
imcirc5 = imcirc5.resize((imcirc5.width * 10, imcirc5.height * 10), Image.LANCZOS)
list_extent.append(
(
(ROI5["edge_left"] * dx / 10),
(ROI5["edge_right"] * dx / 10),
(ROI5["edge_bottom"] * dy / 10),
(ROI5["edge_top"] * dy / 10),
)
)
imcirc6 = Image.fromarray(
255
* ArrayDicom[
ROI6["edge_top"] : ROI6["edge_bottom"],
ROI6["edge_left"] : ROI6["edge_right"],
]
)
imcirc6 = imcirc6.resize((imcirc6.width * 10, imcirc6.height * 10), Image.LANCZOS)
list_extent.append(
(
(ROI6["edge_left"] * dx / 10),
(ROI6["edge_right"] * dx / 10),
(ROI6["edge_bottom"] * dy / 10),
(ROI6["edge_top"] * dy / 10),
)
)
imcirc7 = Image.fromarray(
255
* ArrayDicom[
ROI7["edge_top"] : ROI7["edge_bottom"],
ROI7["edge_left"] : ROI7["edge_right"],
]
)
imcirc7 = imcirc7.resize((imcirc7.width * 10, imcirc7.height * 10), Image.LANCZOS)
list_extent.append(
(
(ROI7["edge_left"] * dx / 10),
(ROI7["edge_right"] * dx / 10),
(ROI7["edge_bottom"] * dy / 10),
(ROI7["edge_top"] * dy / 10),
)
)
imcirc8 = Image.fromarray(
255
* ArrayDicom[
ROI8["edge_top"] : ROI8["edge_bottom"],
ROI8["edge_left"] : ROI8["edge_right"],
]
)
imcirc8 = imcirc8.resize((imcirc8.width * 10, imcirc8.height * 10), Image.LANCZOS)
list_extent.append(
(
(ROI8["edge_left"] * dx / 10),
(ROI8["edge_right"] * dx / 10),
(ROI8["edge_bottom"] * dy / 10),
(ROI8["edge_top"] * dy / 10),
)
)
imcirclist.append(imcirc1)
imcirclist.append(imcirc2)
imcirclist.append(imcirc3)
imcirclist.append(imcirc4)
imcirclist.append(imcirc5)
imcirclist.append(imcirc6)
imcirclist.append(imcirc7)
imcirclist.append(imcirc8)
xdet, ydet = point_detect(imcirclist)
profiles = []
profile1 = np.array(imcirc1, dtype=np.uint8)[:, xdet[0]] / 255
profile2 = np.array(imcirc2, dtype=np.uint8)[:, xdet[1]] / 255
profile3 = np.array(imcirc3, dtype=np.uint8)[ydet[2], :] / 255
profile4 = np.array(imcirc4, dtype=np.uint8)[ydet[3], :] / 255
profile5 = np.array(imcirc5, dtype=np.uint8)[:, xdet[4]] / 255
profile6 = np.array(imcirc6, dtype=np.uint8)[:, xdet[5]] / 255
profile7 = np.array(imcirc7, dtype=np.uint8)[ydet[6], :] / 255
profile8 = np.array(imcirc8, dtype=np.uint8)[ydet[7], :] / 255
profiles.append(profile1)
profiles.append(profile2)
profiles.append(profile3)
profiles.append(profile4)
profiles.append(profile5)
profiles.append(profile6)
profiles.append(profile7)
profiles.append(profile8)
k = 0
fig = plt.figure(figsize=(8, 12)) # this figure will hold the bibs
plt.subplots_adjust(hspace=0.35)
# creating the page to write the results
dirname = os.path.dirname(filenm)
# tolerance levels to change at will
tol = 1.0 # tolearance level
act = 2.0 # action level
phantom_distance = 3.0 # distance from the bib to the edge of the phantom
with PdfPages(
dirname
+ "/"
+ now.strftime("%d-%m-%Y_%H:%M_")
+ dataset[0x0008, 0x1010].value
+ "_Lightrad_report.pdf"
) as pdf:
Page = plt.figure(figsize=(4, 5))
Page.text(0.45, 0.9, "Report", size=18)
kk = 0 # counter for data points
for profile in profiles:
_, index = u.find_nearest(profile, 0.5) # find the 50% amplitude point
# value_near, index = find_nearest(profile, 0.5) # find the 50% amplitude point
if ( # pylint: disable = consider-using-in
k == 0 or k == 1 or k == 4 or k == 5
): # there are the bibs in the horizontal
offset_value_y = round(
abs((ydet[k] - index) * (dy / 10)) - phantom_distance, 2
)
txt = str(offset_value_y)
# print('offset_value_y=', offset_value_y)
if abs(offset_value_y) <= tol:
Page.text(
0.1,
0.8 - kk / 10,
"Point" + str(kk + 1) + " offset=" + txt + " mm",
color="g",
)
elif abs(offset_value_y) > tol and abs(offset_value_y) <= act:
Page.text(
0.1,
0.8 - kk / 10,
"Point" + str(kk + 1) + " offset=" + txt + " mm",
color="y",
)
else:
Page.text(
0.1,
0.8 - kk / 10,
"Point" + str(kk + 1) + " offset=" + txt + " mm",
color="r",
)
kk = kk + 1
ax = fig.add_subplot(
4, 2, k + 1
) # plotting all the figures in a single plot
ax.imshow(
np.array(imcirclist[k], dtype=np.uint8) / 255,
extent=list_extent[k],
origin="upper",
)
ax.scatter(
list_extent[k][0] + xdet[k] * dx / 100,
list_extent[k][3] + ydet[k] * dy / 100,
s=30,
marker="P",
color="y",
)
ax.set_title("Bib=" + str(k + 1))
ax.axhline(
list_extent[k][3] + index * dy / 100, color="r", linestyle="--"
)
ax.set_xlabel("x distance [cm]")
ax.set_ylabel("y distance [cm]")
else:
offset_value_x = round(
abs((xdet[k] - index) * (dx / 10)) - phantom_distance, 2
)
txt = str(offset_value_x)
if abs(offset_value_x) <= tol:
# print('1')
Page.text(
0.1,
0.8 - kk / 10,
"Point" + str(kk + 1) + " offset=" + txt + " mm",
color="g",
)
elif abs(offset_value_x) > tol and abs(offset_value_x) <= act:
# print('2')
Page.text(
0.1,
0.8 - kk / 10,
"Point" + str(kk + 1) + " offset=" + txt + " mm",
color="y",
)
else:
# print('3')
Page.text(
0.1,
0.8 - kk / 10,
"Point" + str(kk + 1) + " offset=" + txt + " mm",
color="r",
)
kk = kk + 1
ax = fig.add_subplot(
4, 2, k + 1
) # plotting all the figures in a single plot
ax.imshow(
np.array(imcirclist[k], dtype=np.uint8) / 255,
extent=list_extent[k],
origin="upper",
)
ax.scatter(
list_extent[k][0] + xdet[k] * dx / 100,
list_extent[k][3] + ydet[k] * dy / 100,
s=30,
marker="P",
color="y",
)
ax.set_title("Bib=" + str(k + 1))
ax.axvline(
list_extent[k][0] + index * dx / 100, color="r", linestyle="--"
)
ax.set_xlabel("x distance [cm]")
ax.set_ylabel("y distance [cm]")
k = k + 1
pdf.savefig()
pdf.savefig(fig)
# we now need to select a horizontal and a vertical profile to find the edge of the field from an image
# for the field size calculation
im = Image.fromarray(255 * ArrayDicom)
if ioptn.startswith(("y", "yeah", "yes")):
PROFILE = {
"horizontal": 270,
"vertical": 430,
} # location to extract the horizontal and vertical profiles if this is a linac
else:
PROFILE = {
"horizontal": 470,
"vertical": 510,
} # location to extract the horizontal and vertical profiles if this is a true beam
profilehorz = (
np.array(im, dtype=np.uint8)[PROFILE["horizontal"], :] / 255
) # we need to change these limits on a less specific criteria
profilevert = np.array(im, dtype=np.uint8)[:, PROFILE["vertical"]] / 255
# top_edge, index_top = find_nearest(profilevert[0:height//2], 0.5) # finding the edge of the field on the top
# bot_edge, index_bot = find_nearest(profilevert[height//2:height], 0.5) # finding the edge of the field on the bottom
_, index_top = u.find_nearest(
profilevert[0 : height // 2], 0.5
) # finding the edge of the field on the top
_, index_bot = u.find_nearest(
profilevert[height // 2 : height], 0.5
) # finding the edge of the field on the bottom
# l_edge, index_l = find_nearest(profilehorz[0:width//2], 0.5) #finding the edge of the field on the bottom
# r_edge, index_r = find_nearest(profilehorz[width//2:width], 0.5) #finding the edge of the field on the right
_, index_l = u.find_nearest(
profilehorz[0 : width // 2], 0.5
) # finding the edge of the field on the bottom
_, index_r = u.find_nearest(
profilehorz[width // 2 : width], 0.5
) # finding the edge of the field on the right
fig2 = plt.figure(
figsize=(7, 5)
) # this figure will show the vertical and horizontal calculated field size
ax = fig2.subplots()
ax.imshow(ArrayDicom, extent=extent, origin="upper")
ax.set_xlabel("x distance [cm]")
ax.set_ylabel("y distance [cm]")
# adding a vertical arrow
ax.annotate(
s="",
xy=(PROFILE["vertical"] * dx / 10, index_top * dy / 10),
xytext=(PROFILE["vertical"] * dx / 10, (height // 2 + index_bot) * dy / 10),
arrowprops=dict(arrowstyle="<->", color="r"),
) # example on how to plot a double headed arrow
ax.text(
(PROFILE["vertical"] + 10) * dx / 10,
(height // 1.25) * dy / 10,
"Vfs="
+ str(round((height // 2 + index_bot - index_top) * dy / 10, 2))
+ "cm",
rotation=90,
fontsize=14,
color="r",
)
# adding a horizontal arrow
# print(index_l*dx, index_l, PROFILE['horizontal']*dy, PROFILE['horizontal'])
ax.annotate(
s="",
xy=(index_l * dx / 10, PROFILE["horizontal"] * dy / 10),
xytext=((width // 2 + index_r) * dx / 10, PROFILE["horizontal"] * dy / 10),
arrowprops=dict(arrowstyle="<->", color="r"),
) # example on how to plot a double headed arrow
ax.text(
(width // 2) * dx / 10,
(PROFILE["horizontal"] - 10) * dy / 10,
"Hfs=" + str(round((width // 2 + index_r - index_l) * dx / 10, 2)) + "cm",
rotation=0,
fontsize=14,
color="r",
)
pdf.savefig(fig2)
if __name__ == "__main__":
while True: # example of infinite loops using try and except to catch only numbers
line = input("Are these files from a clinac [yes(y)/no(n)]> ")
try:
## if line == 'done':
## break
ioption = str(line.lower())
if ioption.startswith(("y", "yeah", "yes", "n", "no", "nope")):
break
except: # pylint: disable = bare-except
print("Please enter a valid option:")
parser = argparse.ArgumentParser()
parser.add_argument("file", type=str, help="Input the Light/Rad file")
args = parser.parse_args()
filename = args.file
read_dicom(filename, ioption)
|
[
"pymedphys.labs.pedromartinez.utils.utils.range_invert",
"tqdm.tqdm",
"pydicom.dcmread",
"argparse.ArgumentParser",
"os.path.dirname",
"numpy.zeros",
"skimage.feature.blob_log",
"numpy.argmin",
"pymedphys.labs.pedromartinez.utils.utils.norm01",
"numpy.shape",
"matplotlib.pyplot.figure",
"numpy.array",
"PIL.Image.fromarray",
"pymedphys.labs.pedromartinez.utils.utils.find_nearest",
"datetime.datetime.now",
"matplotlib.pyplot.subplots_adjust"
] |
[((1874, 1890), 'tqdm.tqdm', 'tqdm', (['imcirclist'], {}), '(imcirclist)\n', (1878, 1890), False, 'from tqdm import tqdm\n'), ((2983, 3006), 'pydicom.dcmread', 'pydicom.dcmread', (['filenm'], {}), '(filenm)\n', (2998, 3006), False, 'import pydicom\n'), ((3017, 3031), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3029, 3031), False, 'from datetime import datetime\n'), ((3050, 3124), 'numpy.zeros', 'np.zeros', (['(dataset.Rows, dataset.Columns)'], {'dtype': 'dataset.pixel_array.dtype'}), '((dataset.Rows, dataset.Columns), dtype=dataset.pixel_array.dtype)\n', (3058, 3124), True, 'import numpy as np\n'), ((4711, 4731), 'pymedphys.labs.pedromartinez.utils.utils.norm01', 'u.norm01', (['ArrayDicom'], {}), '(ArrayDicom)\n', (4719, 4731), True, 'from pymedphys.labs.pedromartinez.utils import utils as u\n'), ((7211, 7325), 'PIL.Image.fromarray', 'Image.fromarray', (["(255 * ArrayDicom[ROI1['edge_top']:ROI1['edge_bottom'], ROI1['edge_left']:\n ROI1['edge_right']])"], {}), "(255 * ArrayDicom[ROI1['edge_top']:ROI1['edge_bottom'], ROI1\n ['edge_left']:ROI1['edge_right']])\n", (7226, 7325), False, 'from PIL import Image\n'), ((7709, 7823), 'PIL.Image.fromarray', 'Image.fromarray', (["(255 * ArrayDicom[ROI2['edge_top']:ROI2['edge_bottom'], ROI2['edge_left']:\n ROI2['edge_right']])"], {}), "(255 * ArrayDicom[ROI2['edge_top']:ROI2['edge_bottom'], ROI2\n ['edge_left']:ROI2['edge_right']])\n", (7724, 7823), False, 'from PIL import Image\n'), ((8207, 8321), 'PIL.Image.fromarray', 'Image.fromarray', (["(255 * ArrayDicom[ROI3['edge_top']:ROI3['edge_bottom'], ROI3['edge_left']:\n ROI3['edge_right']])"], {}), "(255 * ArrayDicom[ROI3['edge_top']:ROI3['edge_bottom'], ROI3\n ['edge_left']:ROI3['edge_right']])\n", (8222, 8321), False, 'from PIL import Image\n'), ((8704, 8818), 'PIL.Image.fromarray', 'Image.fromarray', (["(255 * ArrayDicom[ROI4['edge_top']:ROI4['edge_bottom'], ROI4['edge_left']:\n ROI4['edge_right']])"], {}), "(255 * ArrayDicom[ROI4['edge_top']:ROI4['edge_bottom'], ROI4\n ['edge_left']:ROI4['edge_right']])\n", (8719, 8818), False, 'from PIL import Image\n'), ((9202, 9316), 'PIL.Image.fromarray', 'Image.fromarray', (["(255 * ArrayDicom[ROI5['edge_top']:ROI5['edge_bottom'], ROI5['edge_left']:\n ROI5['edge_right']])"], {}), "(255 * ArrayDicom[ROI5['edge_top']:ROI5['edge_bottom'], ROI5\n ['edge_left']:ROI5['edge_right']])\n", (9217, 9316), False, 'from PIL import Image\n'), ((9700, 9814), 'PIL.Image.fromarray', 'Image.fromarray', (["(255 * ArrayDicom[ROI6['edge_top']:ROI6['edge_bottom'], ROI6['edge_left']:\n ROI6['edge_right']])"], {}), "(255 * ArrayDicom[ROI6['edge_top']:ROI6['edge_bottom'], ROI6\n ['edge_left']:ROI6['edge_right']])\n", (9715, 9814), False, 'from PIL import Image\n'), ((10198, 10312), 'PIL.Image.fromarray', 'Image.fromarray', (["(255 * ArrayDicom[ROI7['edge_top']:ROI7['edge_bottom'], ROI7['edge_left']:\n ROI7['edge_right']])"], {}), "(255 * ArrayDicom[ROI7['edge_top']:ROI7['edge_bottom'], ROI7\n ['edge_left']:ROI7['edge_right']])\n", (10213, 10312), False, 'from PIL import Image\n'), ((10696, 10810), 'PIL.Image.fromarray', 'Image.fromarray', (["(255 * ArrayDicom[ROI8['edge_top']:ROI8['edge_bottom'], ROI8['edge_left']:\n ROI8['edge_right']])"], {}), "(255 * ArrayDicom[ROI8['edge_top']:ROI8['edge_bottom'], ROI8\n ['edge_left']:ROI8['edge_right']])\n", (10711, 10810), False, 'from PIL import Image\n'), ((12288, 12315), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 12)'}), '(figsize=(8, 12))\n', (12298, 12315), True, 'import matplotlib.pyplot as plt\n'), ((12354, 12386), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.35)'}), '(hspace=0.35)\n', (12373, 12386), True, 'import matplotlib.pyplot as plt\n'), ((12447, 12470), 'os.path.dirname', 'os.path.dirname', (['filenm'], {}), '(filenm)\n', (12462, 12470), False, 'import os\n'), ((21564, 21589), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (21587, 21589), False, 'import argparse\n'), ((1911, 1940), 'numpy.array', 'np.array', (['img'], {'dtype': 'np.uint8'}), '(img, dtype=np.uint8)\n', (1919, 1940), True, 'import numpy as np\n'), ((1998, 2074), 'skimage.feature.blob_log', 'blob_log', (['grey_img'], {'min_sigma': '(15)', 'max_sigma': '(40)', 'num_sigma': '(10)', 'threshold': '(0.05)'}), '(grey_img, min_sigma=15, max_sigma=40, num_sigma=10, threshold=0.05)\n', (2006, 2074), False, 'from skimage.feature import blob_log\n'), ((3231, 3251), 'numpy.shape', 'np.shape', (['ArrayDicom'], {}), '(ArrayDicom)\n', (3239, 3251), True, 'import numpy as np\n'), ((3266, 3286), 'numpy.shape', 'np.shape', (['ArrayDicom'], {}), '(ArrayDicom)\n', (3274, 3286), True, 'import numpy as np\n'), ((3302, 3322), 'numpy.shape', 'np.shape', (['ArrayDicom'], {}), '(ArrayDicom)\n', (3310, 3322), True, 'import numpy as np\n'), ((4666, 4692), 'pymedphys.labs.pedromartinez.utils.utils.range_invert', 'u.range_invert', (['ArrayDicom'], {}), '(ArrayDicom)\n', (4680, 4692), True, 'from pymedphys.labs.pedromartinez.utils import utils as u\n'), ((12850, 12876), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(4, 5)'}), '(figsize=(4, 5))\n', (12860, 12876), True, 'import matplotlib.pyplot as plt\n'), ((17561, 17594), 'PIL.Image.fromarray', 'Image.fromarray', (['(255 * ArrayDicom)'], {}), '(255 * ArrayDicom)\n', (17576, 17594), False, 'from PIL import Image\n'), ((18556, 18603), 'pymedphys.labs.pedromartinez.utils.utils.find_nearest', 'u.find_nearest', (['profilevert[0:height // 2]', '(0.5)'], {}), '(profilevert[0:height // 2], 0.5)\n', (18570, 18603), True, 'from pymedphys.labs.pedromartinez.utils import utils as u\n'), ((18695, 18747), 'pymedphys.labs.pedromartinez.utils.utils.find_nearest', 'u.find_nearest', (['profilevert[height // 2:height]', '(0.5)'], {}), '(profilevert[height // 2:height], 0.5)\n', (18709, 18747), True, 'from pymedphys.labs.pedromartinez.utils import utils as u\n'), ((19076, 19122), 'pymedphys.labs.pedromartinez.utils.utils.find_nearest', 'u.find_nearest', (['profilehorz[0:width // 2]', '(0.5)'], {}), '(profilehorz[0:width // 2], 0.5)\n', (19090, 19122), True, 'from pymedphys.labs.pedromartinez.utils import utils as u\n'), ((19215, 19265), 'pymedphys.labs.pedromartinez.utils.utils.find_nearest', 'u.find_nearest', (['profilehorz[width // 2:width]', '(0.5)'], {}), '(profilehorz[width // 2:width], 0.5)\n', (19229, 19265), True, 'from pymedphys.labs.pedromartinez.utils import utils as u\n'), ((19352, 19378), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(7, 5)'}), '(figsize=(7, 5))\n', (19362, 19378), True, 'import matplotlib.pyplot as plt\n'), ((11505, 11538), 'numpy.array', 'np.array', (['imcirc1'], {'dtype': 'np.uint8'}), '(imcirc1, dtype=np.uint8)\n', (11513, 11538), True, 'import numpy as np\n'), ((11572, 11605), 'numpy.array', 'np.array', (['imcirc2'], {'dtype': 'np.uint8'}), '(imcirc2, dtype=np.uint8)\n', (11580, 11605), True, 'import numpy as np\n'), ((11639, 11672), 'numpy.array', 'np.array', (['imcirc3'], {'dtype': 'np.uint8'}), '(imcirc3, dtype=np.uint8)\n', (11647, 11672), True, 'import numpy as np\n'), ((11706, 11739), 'numpy.array', 'np.array', (['imcirc4'], {'dtype': 'np.uint8'}), '(imcirc4, dtype=np.uint8)\n', (11714, 11739), True, 'import numpy as np\n'), ((11773, 11806), 'numpy.array', 'np.array', (['imcirc5'], {'dtype': 'np.uint8'}), '(imcirc5, dtype=np.uint8)\n', (11781, 11806), True, 'import numpy as np\n'), ((11840, 11873), 'numpy.array', 'np.array', (['imcirc6'], {'dtype': 'np.uint8'}), '(imcirc6, dtype=np.uint8)\n', (11848, 11873), True, 'import numpy as np\n'), ((11907, 11940), 'numpy.array', 'np.array', (['imcirc7'], {'dtype': 'np.uint8'}), '(imcirc7, dtype=np.uint8)\n', (11915, 11940), True, 'import numpy as np\n'), ((11974, 12007), 'numpy.array', 'np.array', (['imcirc8'], {'dtype': 'np.uint8'}), '(imcirc8, dtype=np.uint8)\n', (11982, 12007), True, 'import numpy as np\n'), ((13023, 13051), 'pymedphys.labs.pedromartinez.utils.utils.find_nearest', 'u.find_nearest', (['profile', '(0.5)'], {}), '(profile, 0.5)\n', (13037, 13051), True, 'from pymedphys.labs.pedromartinez.utils import utils as u\n'), ((2637, 2662), 'numpy.argmin', 'np.argmin', (['grey_ampRegion'], {}), '(grey_ampRegion)\n', (2646, 2662), True, 'import numpy as np\n'), ((2699, 2724), 'numpy.argmin', 'np.argmin', (['grey_ampRegion'], {}), '(grey_ampRegion)\n', (2708, 2724), True, 'import numpy as np\n'), ((18072, 18100), 'numpy.array', 'np.array', (['im'], {'dtype': 'np.uint8'}), '(im, dtype=np.uint8)\n', (18080, 18100), True, 'import numpy as np\n'), ((18227, 18255), 'numpy.array', 'np.array', (['im'], {'dtype': 'np.uint8'}), '(im, dtype=np.uint8)\n', (18235, 18255), True, 'import numpy as np\n'), ((14626, 14665), 'numpy.array', 'np.array', (['imcirclist[k]'], {'dtype': 'np.uint8'}), '(imcirclist[k], dtype=np.uint8)\n', (14634, 14665), True, 'import numpy as np\n'), ((16646, 16685), 'numpy.array', 'np.array', (['imcirclist[k]'], {'dtype': 'np.uint8'}), '(imcirclist[k], dtype=np.uint8)\n', (16654, 16685), True, 'import numpy as np\n')]
|
import nltk
import json
import numpy as np
from nltk import word_tokenize
import triton_python_backend_utils as pb_utils
class TritonPythonModel:
"""Your Python model must use the same class name. Every Python model
that is created must have "TritonPythonModel" as the class name.
"""
def initialize(self, args):
"""`initialize` is called only once when the model is being loaded.
Implementing `initialize` function is optional. This function allows
the model to intialize any state associated with this model.
Parameters
----------
args : dict
Both keys and values are strings. The dictionary keys and values are:
* model_config: A JSON string containing the model configuration
* model_instance_kind: A string containing model instance kind
* model_instance_device_id: A string containing model instance device ID
* model_repository: Model repository path
* model_version: Model version
* model_name: Model name
"""
# You must parse model_config. JSON string is not parsed here
self.model_config = model_config = json.loads(args["model_config"])
# Get OUTPUT0 configuration
output0_config = pb_utils.get_output_config_by_name(model_config, "OUTPUT0")
# Get OUTPUT1 configuration
output1_config = pb_utils.get_output_config_by_name(model_config, "OUTPUT1")
# Get OUTPUT2 configuration
output2_config = pb_utils.get_output_config_by_name(model_config, "OUTPUT2")
# Get OUTPUT3 configuration
output3_config = pb_utils.get_output_config_by_name(model_config, "OUTPUT3")
# Convert Triton types to numpy types
self.output0_dtype = pb_utils.triton_string_to_numpy(
output0_config["data_type"]
)
self.output1_dtype = pb_utils.triton_string_to_numpy(
output1_config["data_type"]
)
self.output2_dtype = pb_utils.triton_string_to_numpy(
output2_config["data_type"]
)
self.output3_dtype = pb_utils.triton_string_to_numpy(
output3_config["data_type"]
)
# Get model repository path to read labels
self.model_repository = model_repository = args["model_repository"]
print(model_repository)
# Initialize tokenizer
nltk.download("punkt")
def tokenize(self, text):
tokens = word_tokenize(text)
# split into lower-case word tokens, in numpy array with shape of (seq, 1)
words = np.array([w.lower() for w in tokens], dtype=np.object_).reshape(-1, 1)
# split words into chars, in numpy array with shape of (seq, 1, 1, 16)
chars = [[c for c in t][:16] for t in tokens]
chars = [cs + [""] * (16 - len(cs)) for cs in chars]
chars = np.array(chars, dtype=np.object_).reshape(-1, 1, 1, 16)
return words, chars
def execute(self, requests):
"""
Parameters
----------
requests : list
A list of pb_utils.InferenceRequest
Returns
-------
list
A list of pb_utils.InferenceResponse. The length of this list must
be the same as `requests`
"""
output0_dtype = self.output0_dtype
output1_dtype = self.output1_dtype
output2_dtype = self.output2_dtype
output3_dtype = self.output3_dtype
responses = []
# Every Python backend must iterate over everyone of the requests
# and create a pb_utils.InferenceResponse for each of them.
for request in requests:
# Get INPUT0
in_0 = pb_utils.get_input_tensor_by_name(request, "INPUT0")
context = in_0.as_numpy().astype(str)
print(context)
# Get INPUT1
in_0 = pb_utils.get_input_tensor_by_name(request, "INPUT1")
query = in_0.as_numpy().astype(str)
print(query)
cw, cc = self.tokenize(context[0])
qw, qc = self.tokenize(query[0])
out_0 = np.array(qw, dtype=output0_dtype)
out_1 = np.array(cc, dtype=output1_dtype)
out_2 = np.array(qc, dtype=output2_dtype)
out_3 = np.array(cw, dtype=output3_dtype)
# Create output tensors. You need pb_utils.Tensor objects to create pb_utils.InferenceResponse.
out_tensor_0 = pb_utils.Tensor("OUTPUT0", out_0)
out_tensor_1 = pb_utils.Tensor("OUTPUT1", out_1)
out_tensor_2 = pb_utils.Tensor("OUTPUT2", out_2)
out_tensor_3 = pb_utils.Tensor("OUTPUT3", out_3)
inference_response = pb_utils.InferenceResponse(
output_tensors=[out_tensor_0, out_tensor_1, out_tensor_2, out_tensor_3]
)
responses.append(inference_response)
return responses
def finalize(self):
"""`finalize` is called only once when the model is being unloaded.
Implementing `finalize` function is OPTIONAL. This function allows
the model to perform any necessary clean ups before exit.
"""
print("Cleaning up...")
|
[
"triton_python_backend_utils.get_output_config_by_name",
"json.loads",
"triton_python_backend_utils.Tensor",
"triton_python_backend_utils.get_input_tensor_by_name",
"numpy.array",
"triton_python_backend_utils.InferenceResponse",
"triton_python_backend_utils.triton_string_to_numpy",
"nltk.download",
"nltk.word_tokenize"
] |
[((1180, 1212), 'json.loads', 'json.loads', (["args['model_config']"], {}), "(args['model_config'])\n", (1190, 1212), False, 'import json\n'), ((1275, 1334), 'triton_python_backend_utils.get_output_config_by_name', 'pb_utils.get_output_config_by_name', (['model_config', '"""OUTPUT0"""'], {}), "(model_config, 'OUTPUT0')\n", (1309, 1334), True, 'import triton_python_backend_utils as pb_utils\n'), ((1397, 1456), 'triton_python_backend_utils.get_output_config_by_name', 'pb_utils.get_output_config_by_name', (['model_config', '"""OUTPUT1"""'], {}), "(model_config, 'OUTPUT1')\n", (1431, 1456), True, 'import triton_python_backend_utils as pb_utils\n'), ((1519, 1578), 'triton_python_backend_utils.get_output_config_by_name', 'pb_utils.get_output_config_by_name', (['model_config', '"""OUTPUT2"""'], {}), "(model_config, 'OUTPUT2')\n", (1553, 1578), True, 'import triton_python_backend_utils as pb_utils\n'), ((1641, 1700), 'triton_python_backend_utils.get_output_config_by_name', 'pb_utils.get_output_config_by_name', (['model_config', '"""OUTPUT3"""'], {}), "(model_config, 'OUTPUT3')\n", (1675, 1700), True, 'import triton_python_backend_utils as pb_utils\n'), ((1777, 1837), 'triton_python_backend_utils.triton_string_to_numpy', 'pb_utils.triton_string_to_numpy', (["output0_config['data_type']"], {}), "(output0_config['data_type'])\n", (1808, 1837), True, 'import triton_python_backend_utils as pb_utils\n'), ((1889, 1949), 'triton_python_backend_utils.triton_string_to_numpy', 'pb_utils.triton_string_to_numpy', (["output1_config['data_type']"], {}), "(output1_config['data_type'])\n", (1920, 1949), True, 'import triton_python_backend_utils as pb_utils\n'), ((2001, 2061), 'triton_python_backend_utils.triton_string_to_numpy', 'pb_utils.triton_string_to_numpy', (["output2_config['data_type']"], {}), "(output2_config['data_type'])\n", (2032, 2061), True, 'import triton_python_backend_utils as pb_utils\n'), ((2113, 2173), 'triton_python_backend_utils.triton_string_to_numpy', 'pb_utils.triton_string_to_numpy', (["output3_config['data_type']"], {}), "(output3_config['data_type'])\n", (2144, 2173), True, 'import triton_python_backend_utils as pb_utils\n'), ((2396, 2418), 'nltk.download', 'nltk.download', (['"""punkt"""'], {}), "('punkt')\n", (2409, 2418), False, 'import nltk\n'), ((2468, 2487), 'nltk.word_tokenize', 'word_tokenize', (['text'], {}), '(text)\n', (2481, 2487), False, 'from nltk import word_tokenize\n'), ((3697, 3749), 'triton_python_backend_utils.get_input_tensor_by_name', 'pb_utils.get_input_tensor_by_name', (['request', '"""INPUT0"""'], {}), "(request, 'INPUT0')\n", (3730, 3749), True, 'import triton_python_backend_utils as pb_utils\n'), ((3872, 3924), 'triton_python_backend_utils.get_input_tensor_by_name', 'pb_utils.get_input_tensor_by_name', (['request', '"""INPUT1"""'], {}), "(request, 'INPUT1')\n", (3905, 3924), True, 'import triton_python_backend_utils as pb_utils\n'), ((4112, 4145), 'numpy.array', 'np.array', (['qw'], {'dtype': 'output0_dtype'}), '(qw, dtype=output0_dtype)\n', (4120, 4145), True, 'import numpy as np\n'), ((4166, 4199), 'numpy.array', 'np.array', (['cc'], {'dtype': 'output1_dtype'}), '(cc, dtype=output1_dtype)\n', (4174, 4199), True, 'import numpy as np\n'), ((4220, 4253), 'numpy.array', 'np.array', (['qc'], {'dtype': 'output2_dtype'}), '(qc, dtype=output2_dtype)\n', (4228, 4253), True, 'import numpy as np\n'), ((4274, 4307), 'numpy.array', 'np.array', (['cw'], {'dtype': 'output3_dtype'}), '(cw, dtype=output3_dtype)\n', (4282, 4307), True, 'import numpy as np\n'), ((4444, 4477), 'triton_python_backend_utils.Tensor', 'pb_utils.Tensor', (['"""OUTPUT0"""', 'out_0'], {}), "('OUTPUT0', out_0)\n", (4459, 4477), True, 'import triton_python_backend_utils as pb_utils\n'), ((4505, 4538), 'triton_python_backend_utils.Tensor', 'pb_utils.Tensor', (['"""OUTPUT1"""', 'out_1'], {}), "('OUTPUT1', out_1)\n", (4520, 4538), True, 'import triton_python_backend_utils as pb_utils\n'), ((4566, 4599), 'triton_python_backend_utils.Tensor', 'pb_utils.Tensor', (['"""OUTPUT2"""', 'out_2'], {}), "('OUTPUT2', out_2)\n", (4581, 4599), True, 'import triton_python_backend_utils as pb_utils\n'), ((4627, 4660), 'triton_python_backend_utils.Tensor', 'pb_utils.Tensor', (['"""OUTPUT3"""', 'out_3'], {}), "('OUTPUT3', out_3)\n", (4642, 4660), True, 'import triton_python_backend_utils as pb_utils\n'), ((4695, 4798), 'triton_python_backend_utils.InferenceResponse', 'pb_utils.InferenceResponse', ([], {'output_tensors': '[out_tensor_0, out_tensor_1, out_tensor_2, out_tensor_3]'}), '(output_tensors=[out_tensor_0, out_tensor_1,\n out_tensor_2, out_tensor_3])\n', (4721, 4798), True, 'import triton_python_backend_utils as pb_utils\n'), ((2870, 2903), 'numpy.array', 'np.array', (['chars'], {'dtype': 'np.object_'}), '(chars, dtype=np.object_)\n', (2878, 2903), True, 'import numpy as np\n')]
|
from arlo import Arlo
from datetime import timedelta, date
import datetime
import sys
import platform
import os.path
from os import path
USERNAME = ''
PASSWORD = ''
try:
# Instantiating the Arlo object automatically calls Login(), which returns an oAuth token that gets cached.
# Subsequent successful calls to login will update the oAuth token.
arlo = Arlo(USERNAME, PASSWORD)
# At this point you're logged into Arlo.
today = (date.today()-timedelta(days=0)).strftime("%Y%m%d")
seven_days_ago = (date.today() - timedelta(days=6)).strftime("%Y%m%d")
# Get all of the recordings for a date range.
library = arlo.GetLibrary(seven_days_ago, today)
# 실행하는 기계에 따라 원드라이브 동기화 위치 변경
if platform.node() == 'surface':
storage = 'E:/OneDrive/Video/arlo/Video/'
elif platform.node() == 'home':
storage = 'D:/OneDrive/Video/arlo/Video/'
elif platform.node() == 'DESKTOP-F4EOHEL':
storage = 'D:/OneDrive/Video/arlo/Video/'
else:
storage = 'E:/OneDrive/Video/arlo/Video/'
print('platform.node() = ' + platform.node())
# 다운로드 할지말지 결정.
doDownload = False
# Iterate through the recordings in the library.
for recording in library:
videofilename = datetime.datetime.fromtimestamp(int(recording['name'])//1000).strftime('%Y-%m-%d %H-%M-%S') + ' ' + recording['uniqueId'] + '.mp4'
# 다운로드가 필요한지 확인. 파일이 없으면 다운로드. 파일이 있지만 0바이트이면 다운로드.
if path.exists(storage + videofilename) == False:
doDownload = True
elif path.exists(storage + videofilename) == True and os.path.getsize(storage + videofilename) > 0:
doDownload = False
else:
doDownload = True
# 다운로드 실행.
if doDownload == True:
stream = arlo.StreamRecording(recording['presignedContentUrl'])
with open(storage + videofilename, 'wb') as f:
for chunk in stream:
f.write(chunk)
f.close()
print('Downloaded: '+videofilename+' from '+recording['createdDate']+'.')
# else:
# print('Skipped: '+videofilename+' from '+recording['createdDate']+'.')
# Delete all of the videos you just downloaded from the Arlo library.
# Notice that you can pass the "library" object we got back from the GetLibrary() call.
# result = arlo.BatchDeleteRecordings(library)
# If we made it here without an exception, then the videos were successfully deleted.
# print('Batch deletion of videos completed successfully.')
except Exception as e:
print(e)
|
[
"platform.node",
"arlo.Arlo",
"os.path.exists",
"datetime.date.today",
"datetime.timedelta"
] |
[((361, 385), 'arlo.Arlo', 'Arlo', (['USERNAME', 'PASSWORD'], {}), '(USERNAME, PASSWORD)\n', (365, 385), False, 'from arlo import Arlo\n'), ((696, 711), 'platform.node', 'platform.node', ([], {}), '()\n', (709, 711), False, 'import platform\n'), ((777, 792), 'platform.node', 'platform.node', ([], {}), '()\n', (790, 792), False, 'import platform\n'), ((1019, 1034), 'platform.node', 'platform.node', ([], {}), '()\n', (1032, 1034), False, 'import platform\n'), ((1362, 1398), 'os.path.exists', 'path.exists', (['(storage + videofilename)'], {}), '(storage + videofilename)\n', (1373, 1398), False, 'from os import path\n'), ((439, 451), 'datetime.date.today', 'date.today', ([], {}), '()\n', (449, 451), False, 'from datetime import timedelta, date\n'), ((452, 469), 'datetime.timedelta', 'timedelta', ([], {'days': '(0)'}), '(days=0)\n', (461, 469), False, 'from datetime import timedelta, date\n'), ((509, 521), 'datetime.date.today', 'date.today', ([], {}), '()\n', (519, 521), False, 'from datetime import timedelta, date\n'), ((524, 541), 'datetime.timedelta', 'timedelta', ([], {'days': '(6)'}), '(days=6)\n', (533, 541), False, 'from datetime import timedelta, date\n'), ((855, 870), 'platform.node', 'platform.node', ([], {}), '()\n', (868, 870), False, 'import platform\n'), ((1438, 1474), 'os.path.exists', 'path.exists', (['(storage + videofilename)'], {}), '(storage + videofilename)\n', (1449, 1474), False, 'from os import path\n')]
|
# SPDX-FileCopyrightText: 2022 <NAME> <<EMAIL>>
#
# SPDX-License-Identifier: MIT
import re
from allauth.account.models import EmailAddress
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.core.exceptions import ValidationError
from django.db import models
def find_user_from_address(address):
try:
emailaddress = EmailAddress.objects.get(email=address, verified=True)
return emailaddress.user
except EmailAddress.DoesNotExist:
return None
class MailingList(models.Model):
NONE = 0
CONFIRM = 1
REQUIRE_APPROVAL = 2
CONFIRM_AND_APPROVE = 3
SUBSCRIBE_POLICY_CHOICES = [
(NONE, "None"),
(CONFIRM, "Confirm"),
(REQUIRE_APPROVAL, "Require approval"),
(CONFIRM_AND_APPROVE, "Confirm and approve"),
]
name = models.CharField(max_length=64, unique=True)
description = models.CharField(max_length=255, blank=True)
info = models.TextField(blank=True)
advertised = models.BooleanField()
subscribe_policy = models.SmallIntegerField(choices=SUBSCRIBE_POLICY_CHOICES)
archive_private = models.BooleanField()
subscribe_auto_approval = models.TextField(blank=True)
auto_unsubscribe = models.BooleanField(
default=False,
help_text="Should non-group members be automatically unsubscribed?",
)
def __str__(self):
return self.name
def check_subscribe_auto_approval(self, address):
for pattern in self.subscribe_auto_approval.split("\n"):
if pattern.startswith("^"):
if re.match(pattern, address):
return True
elif pattern.lower() == address.lower():
return True
return False
def user_can_see(self, user):
if self.advertised:
return True
if self.user_can_subscribe(user):
return True
return False
def user_can_subscribe(self, user):
if self.subscribe_policy in [self.NONE, self.CONFIRM]:
return True
for group in user.groups.all():
if self.group_policies.filter(group=group).exists():
return True
# if self.check_subscribe_auto_approval(user.email):
# return True
return False
def user_recommend(self, user):
for group in user.groups.all():
if self.group_policies.filter(
group=group, policy__gte=GroupPolicy.RECOMMEND
).exists():
return True
def user_prompt(self, user):
for group in user.groups.all():
try:
return self.group_policies.get(
group=group, policy=GroupPolicy.PROMPT
).prompt
except GroupPolicy.DoesNotExist:
pass
def user_subscribe_policy(self, user):
for policy in self.group_policies.order_by("-policy"):
if user.groups.contains(policy.group):
return policy
def address_can_remain(self, address):
if not self.auto_unsubscribe:
return True
if self.check_subscribe_auto_approval(address):
return True
user = find_user_from_address(address)
if user:
if self.user_can_subscribe(user):
return True
return False
class Meta:
permissions = [("audit_list", "Can audit the subscribers of a mailing list")]
class GroupPolicy(models.Model):
ALLOW = 0
RECOMMEND = 1
PROMPT = 2
FORCE = 3
POLICY_CHOICES = [
(ALLOW, "Allow"),
(RECOMMEND, "Recommend"),
(PROMPT, "Prompt"),
(FORCE, "Force"),
]
mailing_list = models.ForeignKey(
MailingList, on_delete=models.CASCADE, related_name="group_policies"
)
group = models.ForeignKey(
Group, on_delete=models.CASCADE, related_name="mailinglist_policies"
)
policy = models.SmallIntegerField(choices=POLICY_CHOICES, default=ALLOW)
prompt = models.TextField(blank=True)
def __str__(self):
return f"{self.mailing_list}:{self.group}:{self.get_policy_display()}"
def clean(self):
if self.policy == self.PROMPT:
if not self.prompt:
raise ValidationError("Must supply a message for a prompt policy.")
class Meta:
verbose_name_plural = "Group policies"
unique_together = ("mailing_list", "group")
class ChangeOfAddress(models.Model):
created = models.DateTimeField(null=False, blank=False, auto_now_add=True)
user = models.ForeignKey(get_user_model(), on_delete=models.PROTECT)
old_email = models.EmailField()
new_email = models.EmailField()
class Meta:
verbose_name_plural = "Changes of address"
class MailmanUser(models.Model):
user = models.OneToOneField(get_user_model(), on_delete=models.CASCADE)
advanced_mode = models.BooleanField(default=False)
|
[
"django.db.models.TextField",
"django.core.exceptions.ValidationError",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.contrib.auth.get_user_model",
"re.match",
"django.db.models.BooleanField",
"django.db.models.EmailField",
"allauth.account.models.EmailAddress.objects.get",
"django.db.models.SmallIntegerField",
"django.db.models.DateTimeField"
] |
[((854, 898), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'unique': '(True)'}), '(max_length=64, unique=True)\n', (870, 898), False, 'from django.db import models\n'), ((917, 961), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)'}), '(max_length=255, blank=True)\n', (933, 961), False, 'from django.db import models\n'), ((973, 1001), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (989, 1001), False, 'from django.db import models\n'), ((1019, 1040), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (1038, 1040), False, 'from django.db import models\n'), ((1064, 1122), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'choices': 'SUBSCRIBE_POLICY_CHOICES'}), '(choices=SUBSCRIBE_POLICY_CHOICES)\n', (1088, 1122), False, 'from django.db import models\n'), ((1145, 1166), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (1164, 1166), False, 'from django.db import models\n'), ((1197, 1225), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (1213, 1225), False, 'from django.db import models\n'), ((1249, 1357), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""Should non-group members be automatically unsubscribed?"""'}), "(default=False, help_text=\n 'Should non-group members be automatically unsubscribed?')\n", (1268, 1357), False, 'from django.db import models\n'), ((3727, 3819), 'django.db.models.ForeignKey', 'models.ForeignKey', (['MailingList'], {'on_delete': 'models.CASCADE', 'related_name': '"""group_policies"""'}), "(MailingList, on_delete=models.CASCADE, related_name=\n 'group_policies')\n", (3744, 3819), False, 'from django.db import models\n'), ((3841, 3933), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Group'], {'on_delete': 'models.CASCADE', 'related_name': '"""mailinglist_policies"""'}), "(Group, on_delete=models.CASCADE, related_name=\n 'mailinglist_policies')\n", (3858, 3933), False, 'from django.db import models\n'), ((3956, 4019), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'choices': 'POLICY_CHOICES', 'default': 'ALLOW'}), '(choices=POLICY_CHOICES, default=ALLOW)\n', (3980, 4019), False, 'from django.db import models\n'), ((4033, 4061), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (4049, 4061), False, 'from django.db import models\n'), ((4511, 4575), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(False)', 'blank': '(False)', 'auto_now_add': '(True)'}), '(null=False, blank=False, auto_now_add=True)\n', (4531, 4575), False, 'from django.db import models\n'), ((4665, 4684), 'django.db.models.EmailField', 'models.EmailField', ([], {}), '()\n', (4682, 4684), False, 'from django.db import models\n'), ((4701, 4720), 'django.db.models.EmailField', 'models.EmailField', ([], {}), '()\n', (4718, 4720), False, 'from django.db import models\n'), ((4920, 4954), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (4939, 4954), False, 'from django.db import models\n'), ((384, 438), 'allauth.account.models.EmailAddress.objects.get', 'EmailAddress.objects.get', ([], {'email': 'address', 'verified': '(True)'}), '(email=address, verified=True)\n', (408, 438), False, 'from allauth.account.models import EmailAddress\n'), ((4605, 4621), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (4619, 4621), False, 'from django.contrib.auth import get_user_model\n'), ((4856, 4872), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (4870, 4872), False, 'from django.contrib.auth import get_user_model\n'), ((1604, 1630), 're.match', 're.match', (['pattern', 'address'], {}), '(pattern, address)\n', (1612, 1630), False, 'import re\n'), ((4280, 4341), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Must supply a message for a prompt policy."""'], {}), "('Must supply a message for a prompt policy.')\n", (4295, 4341), False, 'from django.core.exceptions import ValidationError\n')]
|
from __future__ import absolute_import, division, print_function, unicode_literals
from divvy.ledger.Args import ARGS
from divvy.ledger import SearchLedgers
import json
SAFE = True
HELP = """print
Print the ledgers to stdout. The default command."""
def run_print(server):
ARGS.display(print, server, SearchLedgers.search(server))
|
[
"divvy.ledger.SearchLedgers.search"
] |
[((312, 340), 'divvy.ledger.SearchLedgers.search', 'SearchLedgers.search', (['server'], {}), '(server)\n', (332, 340), False, 'from divvy.ledger import SearchLedgers\n')]
|
#!/usr/bin/python3
from PIL import Image
import math, sys
figure=" "
regular=" "
em = " "
en = " "
scale=[
"\x1b[30m█",
"\x1b[3{}m░",
"\x1b[9{}m░",
"\x1b[3{}m▒",
"\x1b[9{}m▒",
"\x1b[3{}m▓",
"\x1b[9{}m▓",
"\x1b[3{}m█",
"\x1b[9{}m█"
]
def hue_raw(fullpixel):
global threshhold_global
hue_arr = [False,False,False]
if (fullpixel[0] >=threshhold_global ):
hue_arr[0] = True
if (fullpixel[1] >= threshhold_global):
hue_arr[1] = True
if (fullpixel[2] >= threshhold_global ):
hue_arr[2] = True
return hue_arr
def hue(fullpixel):
global threshhold_global
hue_arr=hue_raw(fullpixel)
return hue_arr[0]*1+hue_arr[1]*2+hue_arr[2]*4
def weighted_average(fullpixel):
global threshhold_global
hue_arr=hue_raw(fullpixel)
count=0
pixsum=0
for i in range(3):
if (hue_arr[i]):
count+=1
pixsum+=fullpixel[i]
if count==0:
count=3
return pixsum//count
threshhold_global=127
def image_2_block_ansi(filename, output="", dither=True, double_flag = False):
global threshhold_global
if double_flag:
pixelWidth=2
else:
pixelWidth=1
image = Image.open(filename).convert("RGB")
imgdump = image.load()
if output != "":
output_file = open(output,"wb")
for y in range(image.height):
string = "\x1b[40m"
#output_file.write(bytes([9]))
for x in range(image.width):
pixel = imgdump[x,y]
for i in range(3):
if (pixel[i] > 256):
pixel[i] = 256
hue_arr=hue_raw(pixel)
color=hue(pixel)
brightness=weighted_average(pixel) // 29
string += scale[brightness].format(color) * pixelWidth
if dither:
new_pixel=[].extend(pixel)
pixels_i_care_about=[None,None,None,None]
if x!=image.width-1:
pixels_i_care_about[0]=[].extend(imgdump[x+1,y])
if y!=image.height-1 and x!=image.width-1:
pixels_i_care_about[1]=[].extend(imgdump[x+1,y+1])
if y!=image.height-1:
pixels_i_care_about[2]=[].extend(imgdump[x,y+1])
if x!=0 and y!=image.height-1:
pixels_i_care_about[3]=[].extend(imgdump[x-1,y+1])
for i in range(3):
if (hue_arr[i]):
error=pixel[i]%29
else:
error=0
if pixels_i_care_about[0]!=None:
pixels_i_care_about[0][i] += math.floor((7/16)*error)
if pixels_i_care_about[1]!=None:
pixels_i_care_about[1][i] += math.floor((1/16)*error)
if pixels_i_care_about[2]!=None:
pixels_i_care_about[2][i] += math.floor((5/16)*error)
if pixels_i_care_about[3]!=None:
pixels_i_care_about[3][i] += math.floor((3/16)*error)
if pixels_i_care_about[0]!=None:
imgdump[x+1,y] = tuple(pixels_i_care_about[0])
if pixels_i_care_about[1]!=None:
imgdump[x+1,y+1] = tuple(pixels_i_care_about[1])
if pixels_i_care_about[2]!=None:
imgdump[x,y+1] = tuple(pixels_i_care_about[2])
if pixels_i_care_about[3]!=None:
imgdump[x-1,y+1] = tuple(pixels_i_care_about[3])
if output == "":
print(string)
else:
output_file.write(string.encode('utf_8'))
output_file.write(bytes([13,10]))
if output != "":
output_file.close()
help_text="""
Usage: printBlockAnsi.py image [-h] [-d] [-s] [-i] [-w] [-t #n]
-h: Print this
-d: Disable Dithering
-w: print every character twice
-t: use threshhold for hue selection
"""
if __name__ == "__main__":
argc=len(sys.argv)
if (argc==1):
print ("NO")
sys.exit(1)
in_file=sys.argv[1]
dither_flag=True
pixel_width_flag=False
while (argc>2 and sys.argv[2][0]=="-"):
if sys.argv[2]=="-h":
print(help_text)
sys.exit(0)
elif sys.argv[2]=="-d":
dither_flag=False
elif sys.argv[2]=="-w":
pixel_width_flag=True
elif sys.argv[2]=="-t":
if argc>3 and sys.argv[3].isnumeric() and int(sys.argv[3])>=0:
threshhold_global=int(sys.argv[3])
for i in range(3,len(sys.argv)-1):
sys.argv[i]=sys.argv[i+1]
argc=argc-1
for i in range(2,len(sys.argv)-1):
sys.argv[i]=sys.argv[i+1]
argc=argc-1
if (argc>2):
out_file=sys.argv[2]
else:
out_file=""
image_2_block_ansi(in_file, output="", dither=dither_flag, double_flag = pixel_width_flag)
|
[
"math.floor",
"sys.exit",
"PIL.Image.open"
] |
[((3302, 3313), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3310, 3313), False, 'import math, sys\n'), ((1076, 1096), 'PIL.Image.open', 'Image.open', (['filename'], {}), '(filename)\n', (1086, 1096), False, 'from PIL import Image\n'), ((3465, 3476), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3473, 3476), False, 'import math, sys\n'), ((2180, 2206), 'math.floor', 'math.floor', (['(7 / 16 * error)'], {}), '(7 / 16 * error)\n', (2190, 2206), False, 'import math, sys\n'), ((2278, 2304), 'math.floor', 'math.floor', (['(1 / 16 * error)'], {}), '(1 / 16 * error)\n', (2288, 2304), False, 'import math, sys\n'), ((2376, 2402), 'math.floor', 'math.floor', (['(5 / 16 * error)'], {}), '(5 / 16 * error)\n', (2386, 2402), False, 'import math, sys\n'), ((2474, 2500), 'math.floor', 'math.floor', (['(3 / 16 * error)'], {}), '(3 / 16 * error)\n', (2484, 2500), False, 'import math, sys\n')]
|