code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import datetime
import functools
import io
import os
import zipfile
import httpx
import pytest
from coverage_comment import coverage as coverage_module
from coverage_comment import github_client, settings
@pytest.fixture
def base_config():
def _(**kwargs):
defaults = {
# GitHub stuff
"GITHUB_TOKEN": "foo",
"GITHUB_PR_RUN_ID": 123,
"GITHUB_REPOSITORY": "ewjoachim/foobar",
# Action settings
"MERGE_COVERAGE_FILES": True,
"VERBOSE": False,
}
return settings.Config(**(defaults | kwargs))
return _
@pytest.fixture
def push_config(base_config):
def _(**kwargs):
defaults = {
# GitHub stuff
"GITHUB_BASE_REF": "",
"GITHUB_REF": "refs/heads/main",
"GITHUB_EVENT_NAME": "push",
}
return base_config(**(defaults | kwargs))
return _
@pytest.fixture
def pull_request_config(base_config):
def _(**kwargs):
defaults = {
# GitHub stuff
"GITHUB_BASE_REF": "main",
"GITHUB_REF": "refs/pull/2/merge",
"GITHUB_EVENT_NAME": "pull_request",
}
return base_config(**(defaults | kwargs))
return _
@pytest.fixture
def workflow_run_config(base_config):
def _(**kwargs):
defaults = {
# GitHub stuff
"GITHUB_BASE_REF": "",
"GITHUB_REF": "refs/heads/main",
"GITHUB_EVENT_NAME": "workflow_run",
}
return base_config(**(defaults | kwargs))
return _
@pytest.fixture
def coverage_json():
return {
"meta": {
"version": "1.2.3",
"timestamp": "2000-01-01T00:00:00",
"branch_coverage": True,
"show_contexts": False,
},
"files": {
"codebase/code.py": {
"executed_lines": [1, 2, 5, 6, 9],
"summary": {
"covered_lines": 5,
"num_statements": 6,
"percent_covered": 75.0,
"missing_lines": 1,
"excluded_lines": 0,
"num_branches": 2,
"num_partial_branches": 1,
"covered_branches": 1,
"missing_branches": 1,
},
"missing_lines": [7, 9],
"excluded_lines": [],
}
},
"totals": {
"covered_lines": 5,
"num_statements": 6,
"percent_covered": 75.0,
"missing_lines": 1,
"excluded_lines": 0,
"num_branches": 2,
"num_partial_branches": 1,
"covered_branches": 1,
"missing_branches": 1,
},
}
@pytest.fixture
def diff_coverage_json():
return {
"report_name": "XML",
"diff_name": "master...HEAD, staged and unstaged changes",
"src_stats": {
"codebase/code.py": {
"percent_covered": 80.0,
"violation_lines": [9],
"violations": [[9, None]],
}
},
"total_num_lines": 5,
"total_num_violations": 1,
"total_percent_covered": 80,
"num_changed_lines": 39,
}
@pytest.fixture
def coverage_obj():
return coverage_module.Coverage(
meta=coverage_module.CoverageMetadata(
version="1.2.3",
timestamp=datetime.datetime(2000, 1, 1),
branch_coverage=True,
show_contexts=False,
),
info=coverage_module.CoverageInfo(
covered_lines=5,
num_statements=6,
percent_covered=0.75,
missing_lines=1,
excluded_lines=0,
num_branches=2,
num_partial_branches=1,
covered_branches=1,
missing_branches=1,
),
files={
"codebase/code.py": coverage_module.FileCoverage(
path="codebase/code.py",
executed_lines=[1, 2, 5, 6, 9],
missing_lines=[7, 9],
excluded_lines=[],
info=coverage_module.CoverageInfo(
covered_lines=5,
num_statements=6,
percent_covered=0.75,
missing_lines=1,
excluded_lines=0,
num_branches=2,
num_partial_branches=1,
covered_branches=1,
missing_branches=1,
),
)
},
)
@pytest.fixture
def coverage_obj_no_branch():
return coverage_module.Coverage(
meta=coverage_module.CoverageMetadata(
version="1.2.3",
timestamp=datetime.datetime(2000, 1, 1),
branch_coverage=False,
show_contexts=False,
),
info=coverage_module.CoverageInfo(
covered_lines=5,
num_statements=6,
percent_covered=0.75,
missing_lines=1,
excluded_lines=0,
num_branches=None,
num_partial_branches=None,
covered_branches=None,
missing_branches=None,
),
files={
"codebase/code.py": coverage_module.FileCoverage(
path="codebase/code.py",
executed_lines=[1, 2, 5, 6, 9],
missing_lines=[7],
excluded_lines=[],
info=coverage_module.CoverageInfo(
covered_lines=5,
num_statements=6,
percent_covered=0.75,
missing_lines=1,
excluded_lines=0,
num_branches=None,
num_partial_branches=None,
covered_branches=None,
missing_branches=None,
),
)
},
)
@pytest.fixture
def diff_coverage_obj():
return coverage_module.DiffCoverage(
total_num_lines=5,
total_num_violations=1,
total_percent_covered=0.8,
num_changed_lines=39,
files={
"codebase/code.py": coverage_module.FileDiffCoverage(
path="codebase/code.py",
percent_covered=0.8,
violation_lines=[7, 9],
)
},
)
@pytest.fixture
def session():
"""
You get a session object. Register responses on it:
session.register(method="GET", path="/a/b")(status_code=200)
or
session.register(method="GET", path="/a/b", json=checker)(status_code=200)
(where checker is a function receiving the json value, and returning True if it
matches)
if session.request(method="GET", path="/a/b") is called, it will return a response
with status_code 200. Also, if not called by the end of the test, it will raise.
"""
class Session:
responses = [] # List[Tuples[request kwargs, response kwargs]]
def request(self, method, path, **kwargs):
request_kwargs = {"method": method, "path": path} | kwargs
for i, (match_kwargs, response_kwargs) in enumerate(self.responses):
match = True
for key, match_value in match_kwargs.items():
if key not in request_kwargs:
match = False
break
request_value = request_kwargs[key]
if hasattr(match_value, "__call__"):
try:
assert match_value(request_value)
except Exception:
match = False
break
else:
if not match_value == request_value:
match = False
break
if match:
self.responses.pop(i)
return httpx.Response(
**response_kwargs,
request=httpx.Request(method=method, url=path),
)
assert (
False
), f"No response found for kwargs {request_kwargs}\nExpected answers are {self.responses}"
def __getattr__(self, value):
if value in ["get", "post", "patch", "delete", "put"]:
return functools.partial(self.request, value.upper())
raise AttributeError(value)
def register(self, method, path, **request_kwargs):
request_kwargs = {"method": method, "path": path} | request_kwargs
def _(**response_kwargs):
response_kwargs.setdefault("status_code", 200)
self.responses.append((request_kwargs, response_kwargs))
return _
session = Session()
yield session
assert not session.responses
@pytest.fixture
def gh(session):
return github_client.GitHub(session=session)
@pytest.fixture
def get_logs(caplog):
caplog.set_level("DEBUG")
def get_logs(level=None, match=None):
return [
log.message
for log in caplog.records
if (level is None or level == log.levelname)
and (match is None or match in log.message)
]
return get_logs
@pytest.fixture
def in_tmp_path(tmp_path):
curdir = os.getcwd()
os.chdir(tmp_path)
yield tmp_path
os.chdir(curdir)
@pytest.fixture
def zip_bytes():
def _(filename, content):
file = io.BytesIO()
with zipfile.ZipFile(file, mode="w") as zipf:
with zipf.open(filename, "w") as subfile:
subfile.write(content.encode("utf-8"))
zip_bytes = file.getvalue()
assert zip_bytes.startswith(b"PK")
return zip_bytes
return _
| [
"datetime.datetime",
"zipfile.ZipFile",
"coverage_comment.github_client.GitHub",
"io.BytesIO",
"coverage_comment.coverage.FileDiffCoverage",
"os.getcwd",
"os.chdir",
"coverage_comment.settings.Config",
"httpx.Request",
"coverage_comment.coverage.CoverageInfo"
] | [((8990, 9027), 'coverage_comment.github_client.GitHub', 'github_client.GitHub', ([], {'session': 'session'}), '(session=session)\n', (9010, 9027), False, 'from coverage_comment import github_client, settings\n'), ((9422, 9433), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (9431, 9433), False, 'import os\n'), ((9438, 9456), 'os.chdir', 'os.chdir', (['tmp_path'], {}), '(tmp_path)\n', (9446, 9456), False, 'import os\n'), ((9480, 9496), 'os.chdir', 'os.chdir', (['curdir'], {}), '(curdir)\n', (9488, 9496), False, 'import os\n'), ((565, 601), 'coverage_comment.settings.Config', 'settings.Config', ([], {}), '(**defaults | kwargs)\n', (580, 601), False, 'from coverage_comment import github_client, settings\n'), ((9577, 9589), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (9587, 9589), False, 'import io\n'), ((3601, 3809), 'coverage_comment.coverage.CoverageInfo', 'coverage_module.CoverageInfo', ([], {'covered_lines': '(5)', 'num_statements': '(6)', 'percent_covered': '(0.75)', 'missing_lines': '(1)', 'excluded_lines': '(0)', 'num_branches': '(2)', 'num_partial_branches': '(1)', 'covered_branches': '(1)', 'missing_branches': '(1)'}), '(covered_lines=5, num_statements=6,\n percent_covered=0.75, missing_lines=1, excluded_lines=0, num_branches=2,\n num_partial_branches=1, covered_branches=1, missing_branches=1)\n', (3629, 3809), True, 'from coverage_comment import coverage as coverage_module\n'), ((4921, 5146), 'coverage_comment.coverage.CoverageInfo', 'coverage_module.CoverageInfo', ([], {'covered_lines': '(5)', 'num_statements': '(6)', 'percent_covered': '(0.75)', 'missing_lines': '(1)', 'excluded_lines': '(0)', 'num_branches': 'None', 'num_partial_branches': 'None', 'covered_branches': 'None', 'missing_branches': 'None'}), '(covered_lines=5, num_statements=6,\n percent_covered=0.75, missing_lines=1, excluded_lines=0, num_branches=\n None, num_partial_branches=None, covered_branches=None,\n missing_branches=None)\n', (4949, 5146), True, 'from coverage_comment import coverage as coverage_module\n'), ((9603, 9634), 'zipfile.ZipFile', 'zipfile.ZipFile', (['file'], {'mode': '"""w"""'}), "(file, mode='w')\n", (9618, 9634), False, 'import zipfile\n'), ((6212, 6319), 'coverage_comment.coverage.FileDiffCoverage', 'coverage_module.FileDiffCoverage', ([], {'path': '"""codebase/code.py"""', 'percent_covered': '(0.8)', 'violation_lines': '[7, 9]'}), "(path='codebase/code.py', percent_covered=\n 0.8, violation_lines=[7, 9])\n", (6244, 6319), True, 'from coverage_comment import coverage as coverage_module\n'), ((3479, 3508), 'datetime.datetime', 'datetime.datetime', (['(2000)', '(1)', '(1)'], {}), '(2000, 1, 1)\n', (3496, 3508), False, 'import datetime\n'), ((4798, 4827), 'datetime.datetime', 'datetime.datetime', (['(2000)', '(1)', '(1)'], {}), '(2000, 1, 1)\n', (4815, 4827), False, 'import datetime\n'), ((4183, 4391), 'coverage_comment.coverage.CoverageInfo', 'coverage_module.CoverageInfo', ([], {'covered_lines': '(5)', 'num_statements': '(6)', 'percent_covered': '(0.75)', 'missing_lines': '(1)', 'excluded_lines': '(0)', 'num_branches': '(2)', 'num_partial_branches': '(1)', 'covered_branches': '(1)', 'missing_branches': '(1)'}), '(covered_lines=5, num_statements=6,\n percent_covered=0.75, missing_lines=1, excluded_lines=0, num_branches=2,\n num_partial_branches=1, covered_branches=1, missing_branches=1)\n', (4211, 4391), True, 'from coverage_comment import coverage as coverage_module\n'), ((5512, 5737), 'coverage_comment.coverage.CoverageInfo', 'coverage_module.CoverageInfo', ([], {'covered_lines': '(5)', 'num_statements': '(6)', 'percent_covered': '(0.75)', 'missing_lines': '(1)', 'excluded_lines': '(0)', 'num_branches': 'None', 'num_partial_branches': 'None', 'covered_branches': 'None', 'missing_branches': 'None'}), '(covered_lines=5, num_statements=6,\n percent_covered=0.75, missing_lines=1, excluded_lines=0, num_branches=\n None, num_partial_branches=None, covered_branches=None,\n missing_branches=None)\n', (5540, 5737), True, 'from coverage_comment import coverage as coverage_module\n'), ((8107, 8145), 'httpx.Request', 'httpx.Request', ([], {'method': 'method', 'url': 'path'}), '(method=method, url=path)\n', (8120, 8145), False, 'import httpx\n')] |
from colorama import Fore, Style
from Player import Player
class InputService:
def __init__(self):
self.game_board = ['#', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']
self.available_tiles = list(range(1, 10))
@staticmethod
def get_player_names():
player_1_name_input = input("Player 1 - Please enter you name\n")
player_2_name_input = input("Player 2 - Please enter you name\n")
return player_1_name_input.title(), player_2_name_input.title()
@staticmethod
def get_player_move_choice(player_1):
input_styles = ['X', 'O']
player_style_input = input(f"{player_1}, please pick a marker 'X' or 'O'\n")
while player_style_input.upper() not in input_styles:
print(Fore.RED + f'Invalid input : {player_style_input}' + Style.RESET_ALL)
player_style_input = input(
f"{player_1}, please choose a marker only between" + Fore.GREEN + f" {input_styles[0]} " + Style.RESET_ALL +
"or " + Fore.GREEN + f"{input_styles[1]}" + Style.RESET_ALL)
player1_choice = player_style_input.upper()
input_styles.pop(input_styles.index(player_style_input.upper()))
player2_choice = input_styles[0]
return player1_choice, player2_choice
def create_players(self):
player_1_name_input, player_2_name_input = self.get_player_names()
player_1_move, player_2_move = self.get_player_move_choice(player_1_name_input)
player_1 = Player.player_from_input(player_1_name_input, player_1_move)
player_2 = Player.player_from_input(player_2_name_input, player_2_move)
return player_1, player_2
def play_moves(self, selected_player):
print(selected_player.color)
player_tile_input = input(f'{selected_player.name}, please choose a tile to play.\n')
print(Style.RESET_ALL)
while not player_tile_input.isdigit() or int(player_tile_input) not in self.available_tiles:
print(Fore.RED + f'Invalid input : {player_tile_input}' + Style.RESET_ALL)
print(
'Please choose a tile to play among values : ' + Fore.YELLOW + f'{self.available_tiles} \n' + Style.RESET_ALL)
player_tile_input = input()
print(f'Player Tile input is : {player_tile_input}')
self.game_board[int(player_tile_input)] = selected_player.move_style
self.available_tiles.pop(self.available_tiles.index(int(player_tile_input)))
@staticmethod
def ask_play_again():
options = ('Y', 'N')
while True:
play_again_input = input('Do You want to play another game ? Y/N\n')
if play_again_input.isdigit() or play_again_input.upper() not in options:
print(f'Incorrect input, please choose an option from : {options}')
continue
else:
break
return play_again_input
def reset_board(self):
self.game_board = ['#', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']
self.available_tiles = list(range(1, 10))
| [
"Player.Player.player_from_input"
] | [((1501, 1561), 'Player.Player.player_from_input', 'Player.player_from_input', (['player_1_name_input', 'player_1_move'], {}), '(player_1_name_input, player_1_move)\n', (1525, 1561), False, 'from Player import Player\n'), ((1581, 1641), 'Player.Player.player_from_input', 'Player.player_from_input', (['player_2_name_input', 'player_2_move'], {}), '(player_2_name_input, player_2_move)\n', (1605, 1641), False, 'from Player import Player\n')] |
"""Provides the blueprint for the fulltext API."""
from typing import Optional, Callable, Any, List
from flask import request, Blueprint, Response, make_response
from werkzeug.exceptions import NotAcceptable, BadRequest, NotFound
from flask.json import jsonify
from arxiv import status
from arxiv.users.domain import Session, Scope
from arxiv.users.auth import scopes
from arxiv.users.auth.decorators import scoped
from arxiv.base import logging
from fulltext import controllers
from .domain import SupportedBuckets, SupportedFormats
logger = logging.getLogger(__name__)
ARXIV_PREFIX = '/<id_type>/<arxiv:identifier>'
SUBMISSION_PREFIX = '/<id_type>/<source:identifier>'
blueprint = Blueprint('fulltext', __name__, url_prefix='')
Authorizer = Callable[[str, Optional[str]], bool]
def make_authorizer(scope: Scope) -> Authorizer:
"""Make an authorizer function for injection into a controller."""
def inner(identifier: str, owner_id: Optional[str]) -> bool:
"""Check whether the session is authorized for a specific resource."""
logger.debug('Authorize for %s owned by %s', identifier, owner_id)
logger.debug('Client user id is %s', request.auth.user.user_id)
try:
source_id, checksum = identifier.split('/', 1)
except ValueError as e:
logger.debug('Bad identifier? %s', e)
raise NotFound('Unsupported identifier') from e
return (request.auth.is_authorized(scope, source_id)
or (request.auth.user
and str(request.auth.user.user_id) == owner_id))
return inner
def resource_id(id_type: str, identifier: str, *args: Any, **kw: Any) -> str:
"""Get the resource ID for an endpoint."""
if id_type == SupportedBuckets.SUBMISSION:
return identifier.split('/', 1)[0]
return identifier
def best_match(available: List[str], default: str) -> str:
"""Determine best content type given Accept header and available types."""
if 'Accept' not in request.headers:
return default
ctype: str = request.accept_mimetypes.best_match(available)
return ctype
@blueprint.route('/status')
def ok() -> Response:
"""Provide current integration status information for health checks."""
data, code, headers = controllers.service_status()
response: Response = make_response(jsonify(data), code, headers)
return response
@blueprint.route(ARXIV_PREFIX, methods=['POST'])
@blueprint.route(SUBMISSION_PREFIX, methods=['POST'])
@scoped(scopes.CREATE_FULLTEXT, resource=resource_id)
def start_extraction(id_type: str, identifier: str) -> Response:
"""Handle requests for fulltext extraction."""
payload: Optional[dict] = request.get_json()
force: bool = payload.get('force', False) if payload is not None else False
token = request.environ['token']
# Authorization is required to work with submissions.
authorizer: Optional[Authorizer] = None
if id_type == SupportedBuckets.SUBMISSION:
authorizer = make_authorizer(scopes.READ_COMPILE)
data, code, headers = \
controllers.start_extraction(id_type, identifier, token, force=force,
authorizer=authorizer)
response: Response = make_response(jsonify(data), code, headers)
return response
@blueprint.route(ARXIV_PREFIX + '/version/<version>/format/<content_fmt>')
@blueprint.route(ARXIV_PREFIX + '/version/<version>')
@blueprint.route(ARXIV_PREFIX + '/format/<content_fmt>')
@blueprint.route(ARXIV_PREFIX)
@blueprint.route(SUBMISSION_PREFIX + '/version/<version>/format/<content_fmt>')
@blueprint.route(SUBMISSION_PREFIX + '/version/<version>')
@blueprint.route(SUBMISSION_PREFIX + '/format/<content_fmt>')
@blueprint.route(SUBMISSION_PREFIX)
@scoped(scopes.READ_FULLTEXT, resource=resource_id)
def retrieve(id_type: str, identifier: str, version: Optional[str] = None,
content_fmt: str = SupportedFormats.PLAIN) -> Response:
"""Retrieve full-text content for an arXiv paper."""
if identifier is None:
raise BadRequest('identifier missing in request')
available = ['application/json', 'text/plain']
content_type = best_match(available, 'application/json')
# Authorization is required to work with submissions.
authorizer: Optional[Authorizer] = None
if id_type == SupportedBuckets.SUBMISSION:
authorizer = make_authorizer(scopes.READ_COMPILE)
data, code, headers = controllers.retrieve(identifier, id_type, version,
content_fmt=content_fmt,
authorizer=authorizer)
if content_type == 'text/plain':
response_data = Response(data['content'], content_type='text/plain')
elif content_type == 'application/json':
if 'content' in data:
data['content'] = data['content']
response_data = jsonify(data)
else:
raise NotAcceptable('unsupported content type')
response: Response = make_response(response_data, code, headers)
return response
@blueprint.route(ARXIV_PREFIX + '/version/<version>/status')
@blueprint.route(ARXIV_PREFIX + '/status')
@blueprint.route(SUBMISSION_PREFIX + '/version/<version>/status')
@blueprint.route(SUBMISSION_PREFIX + '/status')
@scoped(scopes.READ_FULLTEXT, resource=resource_id)
def task_status(id_type: str, identifier: str,
version: Optional[str] = None) -> Response:
"""Get the status of a text extraction task."""
# Authorization is required to work with submissions.
authorizer: Optional[Authorizer] = None
if id_type == SupportedBuckets.SUBMISSION:
authorizer = make_authorizer(scopes.READ_COMPILE)
data, code, headers = controllers.get_task_status(identifier, id_type,
version=version,
authorizer=authorizer)
response: Response = make_response(jsonify(data), code, headers)
return response
| [
"werkzeug.exceptions.BadRequest",
"flask.request.auth.is_authorized",
"flask.json.jsonify",
"werkzeug.exceptions.NotAcceptable",
"arxiv.base.logging.getLogger",
"werkzeug.exceptions.NotFound",
"fulltext.controllers.get_task_status",
"fulltext.controllers.start_extraction",
"fulltext.controllers.retr... | [((545, 572), 'arxiv.base.logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (562, 572), False, 'from arxiv.base import logging\n'), ((687, 733), 'flask.Blueprint', 'Blueprint', (['"""fulltext"""', '__name__'], {'url_prefix': '""""""'}), "('fulltext', __name__, url_prefix='')\n", (696, 733), False, 'from flask import request, Blueprint, Response, make_response\n'), ((2499, 2551), 'arxiv.users.auth.decorators.scoped', 'scoped', (['scopes.CREATE_FULLTEXT'], {'resource': 'resource_id'}), '(scopes.CREATE_FULLTEXT, resource=resource_id)\n', (2505, 2551), False, 'from arxiv.users.auth.decorators import scoped\n'), ((3755, 3805), 'arxiv.users.auth.decorators.scoped', 'scoped', (['scopes.READ_FULLTEXT'], {'resource': 'resource_id'}), '(scopes.READ_FULLTEXT, resource=resource_id)\n', (3761, 3805), False, 'from arxiv.users.auth.decorators import scoped\n'), ((5281, 5331), 'arxiv.users.auth.decorators.scoped', 'scoped', (['scopes.READ_FULLTEXT'], {'resource': 'resource_id'}), '(scopes.READ_FULLTEXT, resource=resource_id)\n', (5287, 5331), False, 'from arxiv.users.auth.decorators import scoped\n'), ((2056, 2102), 'flask.request.accept_mimetypes.best_match', 'request.accept_mimetypes.best_match', (['available'], {}), '(available)\n', (2091, 2102), False, 'from flask import request, Blueprint, Response, make_response\n'), ((2274, 2302), 'fulltext.controllers.service_status', 'controllers.service_status', ([], {}), '()\n', (2300, 2302), False, 'from fulltext import controllers\n'), ((2698, 2716), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (2714, 2716), False, 'from flask import request, Blueprint, Response, make_response\n'), ((3079, 3175), 'fulltext.controllers.start_extraction', 'controllers.start_extraction', (['id_type', 'identifier', 'token'], {'force': 'force', 'authorizer': 'authorizer'}), '(id_type, identifier, token, force=force,\n authorizer=authorizer)\n', (3107, 3175), False, 'from fulltext import controllers\n'), ((4439, 4541), 'fulltext.controllers.retrieve', 'controllers.retrieve', (['identifier', 'id_type', 'version'], {'content_fmt': 'content_fmt', 'authorizer': 'authorizer'}), '(identifier, id_type, version, content_fmt=content_fmt,\n authorizer=authorizer)\n', (4459, 4541), False, 'from fulltext import controllers\n'), ((4996, 5039), 'flask.make_response', 'make_response', (['response_data', 'code', 'headers'], {}), '(response_data, code, headers)\n', (5009, 5039), False, 'from flask import request, Blueprint, Response, make_response\n'), ((5725, 5817), 'fulltext.controllers.get_task_status', 'controllers.get_task_status', (['identifier', 'id_type'], {'version': 'version', 'authorizer': 'authorizer'}), '(identifier, id_type, version=version,\n authorizer=authorizer)\n', (5752, 5817), False, 'from fulltext import controllers\n'), ((2342, 2355), 'flask.json.jsonify', 'jsonify', (['data'], {}), '(data)\n', (2349, 2355), False, 'from flask.json import jsonify\n'), ((3248, 3261), 'flask.json.jsonify', 'jsonify', (['data'], {}), '(data)\n', (3255, 3261), False, 'from flask.json import jsonify\n'), ((4048, 4091), 'werkzeug.exceptions.BadRequest', 'BadRequest', (['"""identifier missing in request"""'], {}), "('identifier missing in request')\n", (4058, 4091), False, 'from werkzeug.exceptions import NotAcceptable, BadRequest, NotFound\n'), ((4693, 4745), 'flask.Response', 'Response', (["data['content']"], {'content_type': '"""text/plain"""'}), "(data['content'], content_type='text/plain')\n", (4701, 4745), False, 'from flask import request, Blueprint, Response, make_response\n'), ((5961, 5974), 'flask.json.jsonify', 'jsonify', (['data'], {}), '(data)\n', (5968, 5974), False, 'from flask.json import jsonify\n'), ((1428, 1472), 'flask.request.auth.is_authorized', 'request.auth.is_authorized', (['scope', 'source_id'], {}), '(scope, source_id)\n', (1454, 1472), False, 'from flask import request, Blueprint, Response, make_response\n'), ((4891, 4904), 'flask.json.jsonify', 'jsonify', (['data'], {}), '(data)\n', (4898, 4904), False, 'from flask.json import jsonify\n'), ((4929, 4970), 'werkzeug.exceptions.NotAcceptable', 'NotAcceptable', (['"""unsupported content type"""'], {}), "('unsupported content type')\n", (4942, 4970), False, 'from werkzeug.exceptions import NotAcceptable, BadRequest, NotFound\n'), ((1370, 1404), 'werkzeug.exceptions.NotFound', 'NotFound', (['"""Unsupported identifier"""'], {}), "('Unsupported identifier')\n", (1378, 1404), False, 'from werkzeug.exceptions import NotAcceptable, BadRequest, NotFound\n')] |
from math import exp, log
from random import random
from pandas import DataFrame
from BaseBanditAlgorithm import BaseBanditAlgorithm
class Softmax(BaseBanditAlgorithm):
"""
Implementation of the Softmax algorithm for Multi-Armed Bandit
"""
def __init__(self, temperature=0.1, annealing=False, counts=[], values=[]):
"""
Constructor for both standard and annealing Softmax.
Inputs:
temperature: float -- controls the exploration phase
annealing: bool -- If True, temperature changes with time.
counts: List[int] -- Initial counts for each arm
values: List[float] -- Initial average reward for each arm
"""
self.arms = DataFrame({'Iteration':counts, 'Reward':values})
self.arms.index.name = 'Arm'
self.annealing = annealing
if annealing:
self.update_temperature()
else:
self.temperature = temperature
return
def initialize(self, n_arms):
"""Initiates n_arms arms as blank slates."""
self.arms = DataFrame({'Iteration':[0], 'Reward':[0.0]}, range(n_arms))
self.arms.index.name = 'Arm'
return
def select_arm(self):
if self.annealing:
self.update_temperature()
probs = self.arms['Reward'].map(lambda x: exp(x/self.temperature))
probs /= float(probs.sum())
z = random()
cum_prob = probs.cumsum()
return cum_prob[cum_prob > z].index[0]
def update(self, chosen_arm, reward):
arm = int(chosen_arm)
n = self.arms.ix[arm, 'Iteration'] + 1
self.arms.ix[arm, 'Iteration'] = n
self.arms.ix[arm, 'Reward'] *= (n-1)/float(n)
self.arms.ix[arm, 'Reward'] += reward/float(n)
return
def update_temperature(self):
t = 1 + self.arms['Iteration'].sum()
self.temperature = 1/log(t + 0.0000001)
return
| [
"pandas.DataFrame",
"math.exp",
"random.random",
"math.log"
] | [((716, 766), 'pandas.DataFrame', 'DataFrame', (["{'Iteration': counts, 'Reward': values}"], {}), "({'Iteration': counts, 'Reward': values})\n", (725, 766), False, 'from pandas import DataFrame\n'), ((1419, 1427), 'random.random', 'random', ([], {}), '()\n', (1425, 1427), False, 'from random import random\n'), ((1909, 1923), 'math.log', 'log', (['(t + 1e-07)'], {}), '(t + 1e-07)\n', (1912, 1923), False, 'from math import exp, log\n'), ((1345, 1370), 'math.exp', 'exp', (['(x / self.temperature)'], {}), '(x / self.temperature)\n', (1348, 1370), False, 'from math import exp, log\n')] |
# ABC066a
import sys
input = sys.stdin.readline
sys.setrecursionlimit(10**6)
bell = tuple(map(int, input().split()))
print(sum(bell)-max(bell))
| [
"sys.setrecursionlimit"
] | [((48, 78), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(10 ** 6)'], {}), '(10 ** 6)\n', (69, 78), False, 'import sys\n')] |
# Generated by Django 2.2.12 on 2020-05-21 03:10
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('accounts', '0002_auto_20200501_0524'),
('classifications', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='RoomRequest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(blank=True, default=uuid.uuid1, unique=True)),
('status', models.BooleanField(default=False)),
('type', models.CharField(blank=True, choices=[('Invite', 'Invite'), ('Inquiry', 'Inquiry')], max_length=9)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('receiver', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='room_requests_received', to='accounts.Professional')),
('sender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='room_requests_sent', to='accounts.Professional')),
],
),
migrations.CreateModel(
name='Room',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(blank=True, max_length=80, unique=True)),
('name', models.CharField(blank=True, max_length=60, unique=True)),
('access', models.CharField(blank=True, choices=[('Public', 'Public'), ('Private', 'Private')], max_length=9)),
('is_active', models.BooleanField(default=True)),
('black_list', models.ManyToManyField(blank=True, related_name='rooms_forbidden', to='accounts.Professional')),
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='rooms', to='classifications.Category')),
('host', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, related_name='rooms', to='accounts.Professional')),
('members', models.ManyToManyField(blank=True, related_name='room_memberships', to='accounts.Professional')),
('tags', models.ManyToManyField(blank=True, related_name='rooms', to='classifications.Tag')),
],
),
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(blank=True, default=uuid.uuid1, unique=True)),
('message', models.TextField()),
('timestamp', models.DateTimeField(auto_now_add=True)),
('room', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, related_name='messages', to='conversations.Room')),
('sender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='messages', to='accounts.Professional')),
],
),
]
| [
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.BooleanField",
"django.db.models.SlugField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((447, 540), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (463, 540), False, 'from django.db import migrations, models\n'), ((564, 625), 'django.db.models.SlugField', 'models.SlugField', ([], {'blank': '(True)', 'default': 'uuid.uuid1', 'unique': '(True)'}), '(blank=True, default=uuid.uuid1, unique=True)\n', (580, 625), False, 'from django.db import migrations, models\n'), ((655, 689), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (674, 689), False, 'from django.db import migrations, models\n'), ((717, 819), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('Invite', 'Invite'), ('Inquiry', 'Inquiry')]", 'max_length': '(9)'}), "(blank=True, choices=[('Invite', 'Invite'), ('Inquiry',\n 'Inquiry')], max_length=9)\n", (733, 819), False, 'from django.db import migrations, models\n'), ((849, 888), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (869, 888), False, 'from django.db import migrations, models\n'), ((922, 957), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (942, 957), False, 'from django.db import migrations, models\n'), ((989, 1123), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""room_requests_received"""', 'to': '"""accounts.Professional"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='room_requests_received', to='accounts.Professional')\n", (1006, 1123), False, 'from django.db import migrations, models\n'), ((1148, 1278), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""room_requests_sent"""', 'to': '"""accounts.Professional"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='room_requests_sent', to='accounts.Professional')\n", (1165, 1278), False, 'from django.db import migrations, models\n'), ((1403, 1496), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1419, 1496), False, 'from django.db import migrations, models\n'), ((1520, 1576), 'django.db.models.SlugField', 'models.SlugField', ([], {'blank': '(True)', 'max_length': '(80)', 'unique': '(True)'}), '(blank=True, max_length=80, unique=True)\n', (1536, 1576), False, 'from django.db import migrations, models\n'), ((1604, 1660), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(60)', 'unique': '(True)'}), '(blank=True, max_length=60, unique=True)\n', (1620, 1660), False, 'from django.db import migrations, models\n'), ((1690, 1792), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('Public', 'Public'), ('Private', 'Private')]", 'max_length': '(9)'}), "(blank=True, choices=[('Public', 'Public'), ('Private',\n 'Private')], max_length=9)\n", (1706, 1792), False, 'from django.db import migrations, models\n'), ((1821, 1854), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1840, 1854), False, 'from django.db import migrations, models\n'), ((1888, 1987), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'related_name': '"""rooms_forbidden"""', 'to': '"""accounts.Professional"""'}), "(blank=True, related_name='rooms_forbidden', to=\n 'accounts.Professional')\n", (1910, 1987), False, 'from django.db import migrations, models\n'), ((2014, 2158), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""rooms"""', 'to': '"""classifications.Category"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.SET_NULL, related_name='rooms', to='classifications.Category')\n", (2031, 2158), False, 'from django.db import migrations, models\n'), ((2181, 2309), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""rooms"""', 'to': '"""accounts.Professional"""'}), "(blank=True, on_delete=django.db.models.deletion.CASCADE,\n related_name='rooms', to='accounts.Professional')\n", (2198, 2309), False, 'from django.db import migrations, models\n'), ((2336, 2436), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'related_name': '"""room_memberships"""', 'to': '"""accounts.Professional"""'}), "(blank=True, related_name='room_memberships', to=\n 'accounts.Professional')\n", (2358, 2436), False, 'from django.db import migrations, models\n'), ((2459, 2546), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'related_name': '"""rooms"""', 'to': '"""classifications.Tag"""'}), "(blank=True, related_name='rooms', to=\n 'classifications.Tag')\n", (2481, 2546), False, 'from django.db import migrations, models\n'), ((2674, 2767), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2690, 2767), False, 'from django.db import migrations, models\n'), ((2791, 2852), 'django.db.models.SlugField', 'models.SlugField', ([], {'blank': '(True)', 'default': 'uuid.uuid1', 'unique': '(True)'}), '(blank=True, default=uuid.uuid1, unique=True)\n', (2807, 2852), False, 'from django.db import migrations, models\n'), ((2883, 2901), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2899, 2901), False, 'from django.db import migrations, models\n'), ((2934, 2973), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2954, 2973), False, 'from django.db import migrations, models\n'), ((3001, 3129), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""messages"""', 'to': '"""conversations.Room"""'}), "(blank=True, on_delete=django.db.models.deletion.CASCADE,\n related_name='messages', to='conversations.Room')\n", (3018, 3129), False, 'from django.db import migrations, models\n'), ((3155, 3275), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""messages"""', 'to': '"""accounts.Professional"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='messages', to='accounts.Professional')\n", (3172, 3275), False, 'from django.db import migrations, models\n')] |
# ============================================================================
# FILE: junkfile.py
# AUTHOR: <NAME> <Shougo.Matsu at gmail.<EMAIL>>
# License: MIT license
# ============================================================================
from .base import Base
from time import strftime
from denite.util import expand
import os
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = 'junkfile'
self.kind = 'file'
def gather_candidates(self, context):
self.vim.call('junkfile#init')
base = expand(self.vim.vars['junkfile#directory'])
candidates = []
if context['args'] and context['args'][0] == 'new':
context['is_interactive'] = True
filename = strftime('%Y/%m/%Y-%m-%d-%H%M%S.') + context['input']
candidates.append({
'word': os.path.basename(filename),
'abbr': '[new] ' + os.path.basename(filename),
'action__path': os.path.join(base, filename),
})
else:
for root, dirs, files in os.walk(base):
for f in files:
candidates.append({
'word': f,
'action__path': os.path.join(root, f),
})
candidates = sorted(candidates, key=lambda x:
os.path.getmtime(x['action__path']),
reverse=True)
return candidates
| [
"time.strftime",
"os.path.join",
"denite.util.expand",
"os.path.basename",
"os.path.getmtime",
"os.walk"
] | [((579, 622), 'denite.util.expand', 'expand', (["self.vim.vars['junkfile#directory']"], {}), "(self.vim.vars['junkfile#directory'])\n", (585, 622), False, 'from denite.util import expand\n'), ((1105, 1118), 'os.walk', 'os.walk', (['base'], {}), '(base)\n', (1112, 1118), False, 'import os\n'), ((776, 810), 'time.strftime', 'strftime', (['"""%Y/%m/%Y-%m-%d-%H%M%S."""'], {}), "('%Y/%m/%Y-%m-%d-%H%M%S.')\n", (784, 810), False, 'from time import strftime\n'), ((886, 912), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (902, 912), False, 'import os\n'), ((1009, 1037), 'os.path.join', 'os.path.join', (['base', 'filename'], {}), '(base, filename)\n', (1021, 1037), False, 'import os\n'), ((949, 975), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (965, 975), False, 'import os\n'), ((1403, 1438), 'os.path.getmtime', 'os.path.getmtime', (["x['action__path']"], {}), "(x['action__path'])\n", (1419, 1438), False, 'import os\n'), ((1267, 1288), 'os.path.join', 'os.path.join', (['root', 'f'], {}), '(root, f)\n', (1279, 1288), False, 'import os\n')] |
# Generated by Django 3.1.5 on 2021-01-29 09:54
import api.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_code', models.CharField(max_length=8)),
],
),
migrations.AlterField(
model_name='room',
name='code',
field=models.CharField(default=api.models.generate_unique_code, max_length=8, unique=True),
),
]
| [
"django.db.models.AutoField",
"django.db.models.CharField"
] | [((617, 705), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'api.models.generate_unique_code', 'max_length': '(8)', 'unique': '(True)'}), '(default=api.models.generate_unique_code, max_length=8,\n unique=True)\n', (633, 705), False, 'from django.db import migrations, models\n'), ((331, 424), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (347, 424), False, 'from django.db import migrations, models\n'), ((453, 483), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(8)'}), '(max_length=8)\n', (469, 483), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python
# pylint: disable=wrong-import-position
# -*- coding: utf-8 -*-
"""
To run this script uncomment the following lines in the
[options.entry_points] section in setup.cfg:
console_scripts =
fibonacci = dbupdater.skeleton:run
Then run `python setup.py install` which will install the command `fibonacci`
inside your current environment.
"""
import argparse
import sys
import logging
import twitter
import pickle
import json
from inspect import getframeinfo, currentframe
from pathlib import Path
import psycopg2
from psycopg2.extras import execute_values
import geocoder
from os.path import expanduser
from time import sleep
import pyrebase
import firebase_admin
from firebase_admin import credentials
from firebase_admin import db
# Ensure filepath for data files is in module directory regardless of cwd.
FILENAME = getframeinfo(currentframe()).filename
PARENT = Path(FILENAME).resolve().parent
INCIDENT_TYPE_FILE = PARENT / 'incidentTypes.json'
DB_FILE = PARENT / 'db.json'
CONFIG_FILE = PARENT / 'config.json'
JSON_SERVER_FILE = expanduser("~/Projects/twitter_map_react/db.json")
FIREBASE_ADMIN = PARENT / 'incident-report-map-firebase-adminsdk-rx0ey-6ec9058686.json'
incident_types = {}
tweets = {}
config = {}
fb_admin_config = {}
def create_conn():
if (config == {}):
print("Error: Config has not been loaded.")
return
else:
return psycopg2.connect(
f"dbname='{config['dbName']}' user='{config['dbUser']}' password='{config['dbPassword']}'")
def search_for_category():
# initiate a twitter search for each category
print("Searching for new Tweets...")
for key in incident_types:
search_string = incident_types[key]['searchString']
search_twitter(key, search_string)
print(f'{len(tweets.keys())} Tweets found')
def search_twitter(incident_type, search_string):
global tweets
api = twitter.Api(config["CONSUMER_KEY"], config["CONSUMER_SECRET"],
config["ACCESS_TOKEN"], config["ACCESS_TOKEN_SECRET"])
verified = r"filter:verified"
raw_query = search_string.replace(' ', '') + verified
results = api.GetSearch(raw_query)
for tweet in results:
tweets[tweet.id] = tweet._json
tweets[tweet.id]['incidentType'] = incident_type
def load_json(file):
with open(file, 'r') as f:
return json.load(f)
def save_to_json_server():
print(f"Replacing JSON Server file at {JSON_SERVER_FILE}")
json_server_data = {
"posts": tweets,
"comments": [
{
"id": 1,
"body": "some comment",
"postId": 1
}
],
"profile": {
"name": "typicode"
}
}
with open(JSON_SERVER_FILE, 'w') as f:
json.dump(json_server_data, f)
def save_id_to_dab():
sql = """
INSERT INTO incidenttypes (id, displayname, searchstring, crisistype, regex)
VALUES(%s);
"""
conn = None
try:
# connect to the PostgreSQL database
conn = psycopg2.connect(
f"dbname='{config['dbName']}' user='{config['dbUser']}' password='{config['dbPassword']}'")
# create a new cursor
cur = conn.cursor()
# create values list
values_list = []
for key, value in incident_types.items():
values_list.append(
(key, value['displayname'], value['searchstring'], value['crisistype'], value['regex']))
# execute the INSERT statement
psycopg2.extras.execute_values(cur, sql, values_list)
# commit the changes to the database
conn.commit()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
def db_load():
print("Loading Tweets from database...")
global tweets
tweets = {}
conn = create_conn()
cur = conn.cursor()
cur.execute(
"SELECT id, incidenttype, latitude, longitude, serialized FROM tweets;")
rows = cur.fetchall()
for row in rows:
tweet = row[4]
if (tweet["coordinates"] == None):
tweet["coordinates"] = {
"Latitude": row[2],
"Longitude": row[3]
}
tweet["incidentType"] = row[1]
tweets[tweet["id"]] = tweet
pass
def saveToPosgres():
print("Saving Tweets to database...")
# For use with psycopg2.extras.execute_many.
sql = """
INSERT INTO public.tweets(id, incidenttype, latitude, longitude, serialized)
VALUES %s
ON CONFLICT DO NOTHING;
"""
# For use with default execute.
sql2 = '''
INSERT INTO tweets (id, incidenttype, latitude, longitude, serialized)
VALUES (%s, %s, %s, %s, %s)
ON CONFLICT DO NOTHING;
'''
conn = None
try:
# connect to the PostgreSQL database
conn = create_conn()
# create a new cursor
cur = conn.cursor()
# test
# cur.execute("SELECT * FROM incidenttypes;")
# res = cur.fetchone(fetchone)
# print(f'First row: {res}')
# create values list
values_list = []
for key, value in tweets.items():
lat = lng = None
coords = value.get('coordinates')
if coords:
lat = coords['Latitude']
lng = coords['Longitude']
else:
location = value['user']['location']
if (location):
g = geocoder.google(location, key=config['googleMapsApiKey'])
lat = g.lat
lng = g.lng
else:
# don't add this tweet if we can't get coordinates
continue
values = (key, value['incidentType'],
lat, lng, json.dumps(value))
values_list.append(values)
# cur.execute(sql2, values)
# conn.commit()
# execute the INSERT statement
print("Executing SQL")
execute_values(cur, sql, values_list)
# commit the changes to the database
conn.commit()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as e:
print(f"Error: {e}")
finally:
if conn is not None:
conn.close()
def countdown(seconds):
while (seconds >= 0):
print(f"Restarting in {seconds} seconds; Press Ctrl-C to abort",
end='\r', flush=True)
seconds -= 1
sleep(1)
def update_firebase(data):
print("Pushing to Firebase...")
import firebase_admin
from firebase_admin import credentials
# Fetch the service account key JSON file contents
try:
cred = credentials.Certificate(fb_admin_config)
# Initialize the app with a service account, granting admin privileges
firebase_admin.initialize_app(cred, {
'databaseURL': "https://incident-report-map.firebaseio.com"
})
except ValueError:
# The connection has already been initialized
pass
# As an admin, the app has access to read and write all data, regradless of Security Rules
ref = db.reference('tweets')
print(ref.get())
# Push data
ref.update(tweets)
def load_config():
global config
config = load_json(CONFIG_FILE)
config['firebase']['serviceAccount'] = FIREBASE_ADMIN
global incident_types
incident_types = load_json(INCIDENT_TYPE_FILE)
global fb_admin_config
fb_admin_config = load_json(FIREBASE_ADMIN)
def main(args):
while True:
print('Starting main')
load_config()
search_for_category()
saveToPosgres()
print('Done')
db_load()
save_to_json_server()
update_firebase(tweets)
countdown((120))
def run():
"""Entry point for console_scripts
"""
main(sys.argv[1:])
if __name__ == "__main__":
run()
| [
"firebase_admin.db.reference",
"psycopg2.connect",
"firebase_admin.initialize_app",
"pathlib.Path",
"inspect.currentframe",
"json.dump",
"json.dumps",
"geocoder.google",
"time.sleep",
"firebase_admin.credentials.Certificate",
"twitter.Api",
"json.load",
"psycopg2.extras.execute_values",
"o... | [((1068, 1118), 'os.path.expanduser', 'expanduser', (['"""~/Projects/twitter_map_react/db.json"""'], {}), "('~/Projects/twitter_map_react/db.json')\n", (1078, 1118), False, 'from os.path import expanduser\n'), ((1913, 2035), 'twitter.Api', 'twitter.Api', (["config['CONSUMER_KEY']", "config['CONSUMER_SECRET']", "config['ACCESS_TOKEN']", "config['ACCESS_TOKEN_SECRET']"], {}), "(config['CONSUMER_KEY'], config['CONSUMER_SECRET'], config[\n 'ACCESS_TOKEN'], config['ACCESS_TOKEN_SECRET'])\n", (1924, 2035), False, 'import twitter\n'), ((7302, 7324), 'firebase_admin.db.reference', 'db.reference', (['"""tweets"""'], {}), "('tweets')\n", (7314, 7324), False, 'from firebase_admin import db\n'), ((866, 880), 'inspect.currentframe', 'currentframe', ([], {}), '()\n', (878, 880), False, 'from inspect import getframeinfo, currentframe\n'), ((1409, 1527), 'psycopg2.connect', 'psycopg2.connect', (['f"""dbname=\'{config[\'dbName\']}\' user=\'{config[\'dbUser\']}\' password=\'{config[\'dbPassword\']}\'"""'], {}), '(\n f"dbname=\'{config[\'dbName\']}\' user=\'{config[\'dbUser\']}\' password=\'{config[\'dbPassword\']}\'"\n )\n', (1425, 1527), False, 'import psycopg2\n'), ((2375, 2387), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2384, 2387), False, 'import json\n'), ((2804, 2834), 'json.dump', 'json.dump', (['json_server_data', 'f'], {}), '(json_server_data, f)\n', (2813, 2834), False, 'import json\n'), ((3064, 3182), 'psycopg2.connect', 'psycopg2.connect', (['f"""dbname=\'{config[\'dbName\']}\' user=\'{config[\'dbUser\']}\' password=\'{config[\'dbPassword\']}\'"""'], {}), '(\n f"dbname=\'{config[\'dbName\']}\' user=\'{config[\'dbUser\']}\' password=\'{config[\'dbPassword\']}\'"\n )\n', (3080, 3182), False, 'import psycopg2\n'), ((3532, 3585), 'psycopg2.extras.execute_values', 'psycopg2.extras.execute_values', (['cur', 'sql', 'values_list'], {}), '(cur, sql, values_list)\n', (3562, 3585), False, 'import psycopg2\n'), ((6121, 6158), 'psycopg2.extras.execute_values', 'execute_values', (['cur', 'sql', 'values_list'], {}), '(cur, sql, values_list)\n', (6135, 6158), False, 'from psycopg2.extras import execute_values\n'), ((6633, 6641), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (6638, 6641), False, 'from time import sleep\n'), ((6856, 6896), 'firebase_admin.credentials.Certificate', 'credentials.Certificate', (['fb_admin_config'], {}), '(fb_admin_config)\n', (6879, 6896), False, 'from firebase_admin import credentials\n'), ((6985, 7087), 'firebase_admin.initialize_app', 'firebase_admin.initialize_app', (['cred', "{'databaseURL': 'https://incident-report-map.firebaseio.com'}"], {}), "(cred, {'databaseURL':\n 'https://incident-report-map.firebaseio.com'})\n", (7014, 7087), False, 'import firebase_admin\n'), ((900, 914), 'pathlib.Path', 'Path', (['FILENAME'], {}), '(FILENAME)\n', (904, 914), False, 'from pathlib import Path\n'), ((5917, 5934), 'json.dumps', 'json.dumps', (['value'], {}), '(value)\n', (5927, 5934), False, 'import json\n'), ((5591, 5648), 'geocoder.google', 'geocoder.google', (['location'], {'key': "config['googleMapsApiKey']"}), "(location, key=config['googleMapsApiKey'])\n", (5606, 5648), False, 'import geocoder\n')] |
import sys,os
for path in [
'rindow/framework/lib',
]:
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), path)))
| [
"os.path.abspath"
] | [((118, 143), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (133, 143), False, 'import sys, os\n')] |
"""
Generate download locations within a country and download them.
Written by <NAME>.
5/2020
"""
import os
import configparser
import math
import pandas as pd
import numpy as np
import random
import geopandas as gpd
from shapely.geometry import Point
import requests
import matplotlib.pyplot as plt
from PIL import Image
from io import BytesIO
from tqdm import tqdm
import logging
import time
BASE_DIR = '.'
# repo imports
import sys
sys.path.append(BASE_DIR)
from utils import PlanetDownloader
from config import VIS_CONFIG, RANDOM_SEED
COUNTRY_ABBRV = VIS_CONFIG['COUNTRY_ABBRV']
COUNTRIES_DIR = os.path.join(BASE_DIR, 'data', 'countries')
GRID_DIR = os.path.join(COUNTRIES_DIR, COUNTRY_ABBRV, 'grid')
IMAGE_DIR = os.path.join(COUNTRIES_DIR, COUNTRY_ABBRV, 'images')
ACCESS_TOKEN_DIR = os.path.join(BASE_DIR, 'planet_api_key.txt')
ACCESS_TOKEN = None
with open(ACCESS_TOKEN_DIR, 'r') as f:
ACCESS_TOKEN = f.readlines()[0]
assert ACCESS_TOKEN is not None, print("Access token is not valid")
def create_folders():
"""
Function to create new folders.
"""
os.makedirs(IMAGE_DIR, exist_ok=True)
def get_polygon_download_locations(polygon, number, seed=7):
"""
Samples NUMBER points evenly but randomly from a polygon. Seed is set for
reproducibility.
At first tries to create sub-grid of size n x n where n = sqrt(number)
It checks these coordinates and if they are in the polygon it uses them
If the number of points found is still less than the desired number,
samples are taken randomly from the polygon until the required number
is achieved.
"""
random.seed(seed)
min_x, min_y, max_x, max_y = polygon.bounds
edge_num = math.floor(math.sqrt(number))
lats = np.linspace(min_y, max_y, edge_num)
lons = np.linspace(min_x, max_x, edge_num)
# performs cartesian product
evenly_spaced_points = np.transpose(
[np.tile(lats, len(lons)), np.repeat(lons, len(lats))])
assert len(evenly_spaced_points) <= number
# tries using evenly spaced points
points = []
for proposed_lat, proposed_lon in evenly_spaced_points:
point = Point(proposed_lon, proposed_lat)
if polygon.contains(point):
points.append([proposed_lat, proposed_lon])
# fills the remainder with random points
while len(points) < number:
point = Point(random.uniform(min_x, max_x),
random.uniform(min_y, max_y))
if polygon.contains(point):
points.append([point.y, point.x])
return points # returns list of lat/lon pairs
def generate_country_download_locations(min_population=100, num_per_grid=4):
"""
Generates a defined number of download locations (NUM_PER_GRID) for each
grid with at least the minimum number of specified residents (MIN_
POPULATION).
"""
grid = gpd.read_file(os.path.join(GRID_DIR, 'grid.shp'))
grid = grid[grid['population'] >= min_population]
lat_lon_pairs = grid['geometry'].apply(
lambda polygon: get_polygon_download_locations(
polygon, number=num_per_grid))
centroids = grid['geometry'].centroid
columns = [
'centroid_lat', 'centroid_lon', 'image_lat', 'image_lon', 'image_name'
]
with open(os.path.join(GRID_DIR, 'image_download_locs.csv'), 'w') as f:
f.write(','.join(columns) + '\n')
for lat_lons, centroid in zip(lat_lon_pairs, centroids):
for lat, lon in lat_lons:
name = str(lat) + '_' + str(lon) + '.png'
to_write = [
str(centroid.y), str(centroid.x), str(lat), str(lon), name]
f.write(','.join(to_write) + '\n')
print('Generated image download locations and saved at {}'.format(
os.path.join(GRID_DIR, 'image_download_locs.csv')))
def download_images(df):
"""
Download images using a pandas DataFrame that has "image_lat", "image_lon",
"image_name" as columns.
"""
imd = PlanetDownloader(ACCESS_TOKEN)
zoom = 16
NUM_RETRIES = 20
WAIT_TIME = 0.1 # seconds
# drops what is already downloaded
already_downloaded = os.listdir(IMAGE_DIR)
print('Already downloaded ' + str(len(already_downloaded)))
df = df.set_index('image_name').drop(already_downloaded).reset_index()
print('Need to download ' + str(len(df)))
# use three years of images to find one that matches search critera
min_year = 2014
min_month = 1
max_year = 2016
max_month = 12
for _, r in tqdm(df.iterrows(), total=df.shape[0]):
lat = r.image_lat
lon = r.image_lon
name = r.image_name
try:
im = imd.download_image(lat, lon, min_year, min_month, max_year, max_month)
if im is None:
resolved = False
for _ in range(num_retries):
time.sleep(wait_time)
im = imd.download_image(lat, lon, min_year, min_month, max_year, max_month)
if im is None:
continue
else:
plt.imsave(image_save_path, im)
resolved = True
break
if not resolved:
# print(f'Could not download {lat}, {lon} despite several retries and waiting')
continue
else:
pass
else:
# no issues, save according to naming convention
plt.imsave(os.path.join(IMAGE_DIR, name), im)
except Exception as e:
# logging.error(f"Error-could not download {lat}, {lon}", exc_info=True)
continue
return
if __name__ == '__main__':
create_folders()
arg = '--all'
if len(sys.argv) >= 2:
arg = sys.argv[1]
assert arg in ['--all', '--generate-download-locations', '--download-images']
if arg == '--all':
print('Generating download locations...')
generate_country_download_locations()
df_download = pd.read_csv(os.path.join(GRID_DIR, 'image_download_locs.csv'))
print('Downloading images. Might take a while...')
download_images(df_download)
elif arg == '--generate-download-locations':
print('Generating download locations...')
generate_country_download_locations()
elif arg == '--download-images':
df_download = pd.read_csv(os.path.join(GRID_DIR, 'image_download_locs.csv'))
print('Downloading images. Might take a while...')
download_images(df_download)
else:
raise ValueError('Args not handled correctly')
| [
"random.uniform",
"os.listdir",
"os.makedirs",
"matplotlib.pyplot.imsave",
"os.path.join",
"math.sqrt",
"random.seed",
"shapely.geometry.Point",
"time.sleep",
"numpy.linspace",
"utils.PlanetDownloader",
"sys.path.append"
] | [((437, 462), 'sys.path.append', 'sys.path.append', (['BASE_DIR'], {}), '(BASE_DIR)\n', (452, 462), False, 'import sys\n'), ((602, 645), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""data"""', '"""countries"""'], {}), "(BASE_DIR, 'data', 'countries')\n", (614, 645), False, 'import os\n'), ((657, 707), 'os.path.join', 'os.path.join', (['COUNTRIES_DIR', 'COUNTRY_ABBRV', '"""grid"""'], {}), "(COUNTRIES_DIR, COUNTRY_ABBRV, 'grid')\n", (669, 707), False, 'import os\n'), ((720, 772), 'os.path.join', 'os.path.join', (['COUNTRIES_DIR', 'COUNTRY_ABBRV', '"""images"""'], {}), "(COUNTRIES_DIR, COUNTRY_ABBRV, 'images')\n", (732, 772), False, 'import os\n'), ((793, 837), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""planet_api_key.txt"""'], {}), "(BASE_DIR, 'planet_api_key.txt')\n", (805, 837), False, 'import os\n'), ((1081, 1118), 'os.makedirs', 'os.makedirs', (['IMAGE_DIR'], {'exist_ok': '(True)'}), '(IMAGE_DIR, exist_ok=True)\n', (1092, 1118), False, 'import os\n'), ((1618, 1635), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (1629, 1635), False, 'import random\n'), ((1741, 1776), 'numpy.linspace', 'np.linspace', (['min_y', 'max_y', 'edge_num'], {}), '(min_y, max_y, edge_num)\n', (1752, 1776), True, 'import numpy as np\n'), ((1788, 1823), 'numpy.linspace', 'np.linspace', (['min_x', 'max_x', 'edge_num'], {}), '(min_x, max_x, edge_num)\n', (1799, 1823), True, 'import numpy as np\n'), ((3975, 4005), 'utils.PlanetDownloader', 'PlanetDownloader', (['ACCESS_TOKEN'], {}), '(ACCESS_TOKEN)\n', (3991, 4005), False, 'from utils import PlanetDownloader\n'), ((4136, 4157), 'os.listdir', 'os.listdir', (['IMAGE_DIR'], {}), '(IMAGE_DIR)\n', (4146, 4157), False, 'import os\n'), ((1711, 1728), 'math.sqrt', 'math.sqrt', (['number'], {}), '(number)\n', (1720, 1728), False, 'import math\n'), ((2142, 2175), 'shapely.geometry.Point', 'Point', (['proposed_lon', 'proposed_lat'], {}), '(proposed_lon, proposed_lat)\n', (2147, 2175), False, 'from shapely.geometry import Point\n'), ((2861, 2895), 'os.path.join', 'os.path.join', (['GRID_DIR', '"""grid.shp"""'], {}), "(GRID_DIR, 'grid.shp')\n", (2873, 2895), False, 'import os\n'), ((2369, 2397), 'random.uniform', 'random.uniform', (['min_x', 'max_x'], {}), '(min_x, max_x)\n', (2383, 2397), False, 'import random\n'), ((2411, 2439), 'random.uniform', 'random.uniform', (['min_y', 'max_y'], {}), '(min_y, max_y)\n', (2425, 2439), False, 'import random\n'), ((3255, 3304), 'os.path.join', 'os.path.join', (['GRID_DIR', '"""image_download_locs.csv"""'], {}), "(GRID_DIR, 'image_download_locs.csv')\n", (3267, 3304), False, 'import os\n'), ((3761, 3810), 'os.path.join', 'os.path.join', (['GRID_DIR', '"""image_download_locs.csv"""'], {}), "(GRID_DIR, 'image_download_locs.csv')\n", (3773, 3810), False, 'import os\n'), ((6061, 6110), 'os.path.join', 'os.path.join', (['GRID_DIR', '"""image_download_locs.csv"""'], {}), "(GRID_DIR, 'image_download_locs.csv')\n", (6073, 6110), False, 'import os\n'), ((4857, 4878), 'time.sleep', 'time.sleep', (['wait_time'], {}), '(wait_time)\n', (4867, 4878), False, 'import time\n'), ((5514, 5543), 'os.path.join', 'os.path.join', (['IMAGE_DIR', 'name'], {}), '(IMAGE_DIR, name)\n', (5526, 5543), False, 'import os\n'), ((6427, 6476), 'os.path.join', 'os.path.join', (['GRID_DIR', '"""image_download_locs.csv"""'], {}), "(GRID_DIR, 'image_download_locs.csv')\n", (6439, 6476), False, 'import os\n'), ((5093, 5124), 'matplotlib.pyplot.imsave', 'plt.imsave', (['image_save_path', 'im'], {}), '(image_save_path, im)\n', (5103, 5124), True, 'import matplotlib.pyplot as plt\n')] |
# Enter script code
import re
winClass = window.get_active_class()
isTerminalWin1 = re.search("konsole\\.konsole", winClass)
isTerminalWin2 = re.search("x+terminal.*", winClass)
if isTerminalWin1 or isTerminalWin2:
keyboard.send_keys("<ctrl>+<shift>+t")
else:
keyboard.send_keys("<ctrl>+t")
| [
"re.search"
] | [((84, 124), 're.search', 're.search', (['"""konsole\\\\.konsole"""', 'winClass'], {}), "('konsole\\\\.konsole', winClass)\n", (93, 124), False, 'import re\n'), ((142, 177), 're.search', 're.search', (['"""x+terminal.*"""', 'winClass'], {}), "('x+terminal.*', winClass)\n", (151, 177), False, 'import re\n')] |
# -*- coding: utf-8 -*-
""" The Neural Network classifier for IRIS. """
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import urllib
import numpy as np
import tensorflow as tf
# Data sets
IRIS_TRAINING = "IRIS_data/iris_training.csv"
IRIS_TRAINING_URL = "http://download.tensorflow.org/data/iris_training.csv"
IRIS_TEST = "IRIS_data/iris_test.csv"
IRIS_TEST_URL = "http://download.tensorflow.org/data/iris_test.csv"
def main():
# If the training and test sets aren't stored locally, download them.
if not os.path.exists(IRIS_TRAINING):
raw = urllib.request.urlopen(IRIS_TRAINING_URL).read().decode()
with open(IRIS_TRAINING, "w") as f:
f.write(raw)
if not os.path.exists(IRIS_TEST):
raw = urllib.request.urlopen(IRIS_TEST_URL).read().decode()
with open(IRIS_TEST, "w") as f:
f.write(raw)
# Load datasets
training_set = tf.contrib.learn.datasets.base.load_csv_with_header(
filename=IRIS_TRAINING,
target_dtype=np.int,
features_dtype=np.float)
test_set = tf.contrib.learn.datasets.base.load_csv_with_header(
filename=IRIS_TEST,
target_dtype=np.int,
features_dtype=np.float)
# Specify that all features have real-value data
feature_columns = [tf.feature_column.numeric_column("x", shape=[4])]
# Build 3 layers DNN with 10,20,10 units respectively
classifier = tf.estimator.DNNClassifier(feature_columns=feature_columns,
hidden_units=[10,20,10],
n_classes=3,
model_dir="IRIS/iris_model")
# Define the training inputs
train_input_fn = tf.estimator.inputs.numpy_input_fn(
x={"x": np.array(training_set.data)},
y=np.array(training_set.target),
num_epochs=None,
shuffle=True
)
# Train model
classifier.train(input_fn=train_input_fn, steps=2000)
# Define the test inputs
test_input_fn = tf.estimator.inputs.numpy_input_fn(
x={"x": np.array(test_set.data)},
y=np.array(test_set.target),
num_epochs=1,
shuffle=False
)
# Evaluate accuracy
accuracy_score = classifier.evaluate(input_fn=test_input_fn)["accuracy"]
print("\nTest Accuracy: {0:f}\n".format(accuracy_score))
# Classify two new flower samples.
new_samples = np.array(
[[6.4, 3.2, 4.5, 1.5],
[5.8, 3.1, 5.0, 1.7]], dtype=np.float
)
predict_input_fn = tf.estimator.inputs.numpy_input_fn(
x={"x": new_samples},
num_epochs=1,
shuffle=False
)
predictions = list(classifier.predict(input_fn=predict_input_fn))
predicted_classes = [p["classes"] for p in predictions]
print(
"New samples, Class Predictions: {}\n"
.format(predicted_classes)
)
if __name__ == "__main__":
main()
| [
"os.path.exists",
"tensorflow.estimator.DNNClassifier",
"tensorflow.contrib.learn.datasets.base.load_csv_with_header",
"tensorflow.estimator.inputs.numpy_input_fn",
"tensorflow.feature_column.numeric_column",
"numpy.array",
"urllib.request.urlopen"
] | [((982, 1107), 'tensorflow.contrib.learn.datasets.base.load_csv_with_header', 'tf.contrib.learn.datasets.base.load_csv_with_header', ([], {'filename': 'IRIS_TRAINING', 'target_dtype': 'np.int', 'features_dtype': 'np.float'}), '(filename=IRIS_TRAINING,\n target_dtype=np.int, features_dtype=np.float)\n', (1033, 1107), True, 'import tensorflow as tf\n'), ((1144, 1265), 'tensorflow.contrib.learn.datasets.base.load_csv_with_header', 'tf.contrib.learn.datasets.base.load_csv_with_header', ([], {'filename': 'IRIS_TEST', 'target_dtype': 'np.int', 'features_dtype': 'np.float'}), '(filename=IRIS_TEST,\n target_dtype=np.int, features_dtype=np.float)\n', (1195, 1265), True, 'import tensorflow as tf\n'), ((1490, 1623), 'tensorflow.estimator.DNNClassifier', 'tf.estimator.DNNClassifier', ([], {'feature_columns': 'feature_columns', 'hidden_units': '[10, 20, 10]', 'n_classes': '(3)', 'model_dir': '"""IRIS/iris_model"""'}), "(feature_columns=feature_columns, hidden_units=[\n 10, 20, 10], n_classes=3, model_dir='IRIS/iris_model')\n", (1516, 1623), True, 'import tensorflow as tf\n'), ((2492, 2562), 'numpy.array', 'np.array', (['[[6.4, 3.2, 4.5, 1.5], [5.8, 3.1, 5.0, 1.7]]'], {'dtype': 'np.float'}), '([[6.4, 3.2, 4.5, 1.5], [5.8, 3.1, 5.0, 1.7]], dtype=np.float)\n', (2500, 2562), True, 'import numpy as np\n'), ((2608, 2697), 'tensorflow.estimator.inputs.numpy_input_fn', 'tf.estimator.inputs.numpy_input_fn', ([], {'x': "{'x': new_samples}", 'num_epochs': '(1)', 'shuffle': '(False)'}), "(x={'x': new_samples}, num_epochs=1,\n shuffle=False)\n", (2642, 2697), True, 'import tensorflow as tf\n'), ((590, 619), 'os.path.exists', 'os.path.exists', (['IRIS_TRAINING'], {}), '(IRIS_TRAINING)\n', (604, 619), False, 'import os\n'), ((778, 803), 'os.path.exists', 'os.path.exists', (['IRIS_TEST'], {}), '(IRIS_TEST)\n', (792, 803), False, 'import os\n'), ((1364, 1412), 'tensorflow.feature_column.numeric_column', 'tf.feature_column.numeric_column', (['"""x"""'], {'shape': '[4]'}), "('x', shape=[4])\n", (1396, 1412), True, 'import tensorflow as tf\n'), ((1895, 1924), 'numpy.array', 'np.array', (['training_set.target'], {}), '(training_set.target)\n', (1903, 1924), True, 'import numpy as np\n'), ((2193, 2218), 'numpy.array', 'np.array', (['test_set.target'], {}), '(test_set.target)\n', (2201, 2218), True, 'import numpy as np\n'), ((1855, 1882), 'numpy.array', 'np.array', (['training_set.data'], {}), '(training_set.data)\n', (1863, 1882), True, 'import numpy as np\n'), ((2157, 2180), 'numpy.array', 'np.array', (['test_set.data'], {}), '(test_set.data)\n', (2165, 2180), True, 'import numpy as np\n'), ((635, 676), 'urllib.request.urlopen', 'urllib.request.urlopen', (['IRIS_TRAINING_URL'], {}), '(IRIS_TRAINING_URL)\n', (657, 676), False, 'import urllib\n'), ((819, 856), 'urllib.request.urlopen', 'urllib.request.urlopen', (['IRIS_TEST_URL'], {}), '(IRIS_TEST_URL)\n', (841, 856), False, 'import urllib\n')] |
from drift import *
from hard_edge_transport import *
from hard_edge_sol import *
from accel import *
import sys
class Stage(HardEdgeTransport):
"""
A final cooling stage comprises:
HardEdgeTransport with transport field comprising:
(1) Drift (d1)
(2) HardEdgeSol
(3) Drift (d2)
(4) Accel (Model 1 for now)
(5) Drift (d3)
"""
num_params = 3
command_params_ext = {
'd1_len': {'desc': 'Length of drift 1',
'doc': 'Initial drift region of stage length from entrance of stage to HardEdgeSol',
'type': 'FLoat',
'req': True,
'pos': None},
'd2_len': {'desc': 'Length of drift 2',
'doc': 'Drift region between HardEdgeSol and Accel',
'type': 'FLoat',
'req': True,
'pos': None},
'd3_len': {'desc': 'Length of drift 3',
'doc': 'Drift region between Accel and exit of stage',
'type': 'FLoat',
'req': True,
'pos': None},
'transport_field': {'desc': 'Transport field strength (Tesla)',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'absorber_field': {'desc': 'Field strength (Tesla)',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'absorber_length': {'desc': 'Length of absorber region',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'rf_length': {'desc': 'Length of rf region',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'zstep': {'desc': 'Z step',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'outstep': {'desc': 'Output stepping (Meter)',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None},
'rhigh': {'desc': 'R high',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'hard_edge_sol': {'desc': 'Output stepping (Meter)',
'doc': 'Increment for output steps for constant B Field region',
'type': 'HardEdgeSol',
'req': False,
'pos': None},
'drift1': {'desc': 'Output stepping (Meter)',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Drift',
'req': False,
'pos': None},
'drift2': {'desc': 'Output stepping (Meter)',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Drift',
'req': False,
'pos': None},
'drift3': {'desc': 'Output stepping (Meter)',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Drift',
'req': False,
'pos': None},
'freq': {'desc': 'Frequency in MHz',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None},
'grad': {'desc': 'Gradient on-axis at center of gap [MV/m]',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None},
'phase': {'desc': 'Phase shift [deg] {0-360}.',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None},
'rect_cyn': {'desc': 'Phase shift [deg] {0-360}.',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None},
'mode': {'desc': 'Phase shift [deg] {0-360}.',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None}}
def __init__(self, **kwargs):
if ICoolObject.check_command_params_init(self, Stage.command_params_ext, **kwargs) is False:
sys.exit(0)
HardEdgeTransport.__init__(self, flip=False, bs=self.transport_field)
drift1=Drift(slen=self.d1_len, zstep=self.zstep, rhigh=self.rhigh, outstep=self.outstep)
drift2=Drift(slen=self.d2_len, zstep=self.zstep, rhigh=self.rhigh, outstep=self.outstep)
drift3=Drift(slen=self.d3_len, zstep=self.zstep, rhigh=self.rhigh, outstep=self.outstep)
rf=Accel(model='ez', freq=self.freq, phase=self.phase, grad=self.grad, rect_cyn=self.rect_cyn, mode=self.mode)
hard_edge_sol=HardEdgeSol(slen=self.absorber_length, outstep=self.outstep, mtag='LH', geom='CBLOCK', zstep=self.zstep, bs=self.absorber_field, rhigh=self.rhigh)
self.add_enclosed_command(drift1)
self.add_enclosed_command(hard_edge_sol)
self.add_enclosed_command(drift2)
rf_region = SRegion(slen=self.rf_length, nrreg=1, zstep=self.zstep)
material = Material(mtag='VAC', geom='CBLOCK')
rf_subregion = SubRegion(irreg=1, rlow=0, rhigh=self.rhigh, field=rf, material=material)
rf_region.add_enclosed_command(rf_subregion)
self.add_enclosed_command(rf_region)
self.add_enclosed_command(drift3)
def __call__(self, **kwargs):
pass
def __setattr__(self, name, value):
self.__icool_setattr__(name, value)
def __str__(self):
return 'Stage'
def gen_for001(self, file):
HardEdgeTransport.gen_for001(self, file)
| [
"sys.exit"
] | [((4802, 4813), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4810, 4813), False, 'import sys\n')] |
import math
class Vec3:
def __init__(self, x=.0, y=.0, z=.0):
self.x = float(x)
self.y = float(y)
self.z = float(z)
def __str__(self):
return "Vector(%.4f, %.4f, %.4f)" % (self.x, self.y, self.z)
def __add__(self, vec):
return Vec3(self.x+vec.x, self.y+vec.y, self.z+vec.z)
def __sub__(self, vec):
return Vec3(self.x-vec.x, self.y-vec.y, self.z-vec.z)
def __mul__(self, num):
return Vec3(self.x*num, self.y*num, self.z*num)
def __rmul__(self, num):
return self.__mul__(num)
def __truediv__(self, num):
return Vec3(self.x/num, self.y/num, self.z/num)
def dotproduct(self, vec):
return self.x*vec.x + self.y*vec.y + self.z*vec.z
def magnitude(self):
return math.sqrt(self.x**2 + self.y**2 + self.z**2)
def normalize(self):
return self/self.magnitude()
| [
"math.sqrt"
] | [((830, 880), 'math.sqrt', 'math.sqrt', (['(self.x ** 2 + self.y ** 2 + self.z ** 2)'], {}), '(self.x ** 2 + self.y ** 2 + self.z ** 2)\n', (839, 880), False, 'import math\n')] |
import os
import re
import shutil
from ._base import DanubeCloudCommand, CommandOption, CommandError, lcd
class Command(DanubeCloudCommand):
help = 'Generate documentation files displayed in GUI.'
DOC_REPO = 'https://github.com/erigones/esdc-docs.git'
DOC_TMP_DIR = '/var/tmp/esdc-docs'
options = (
CommandOption('--api', '--api-only', action='store_true', dest='api_only', default=False,
help='Generate only the API documentation.'),
CommandOption('--user-guide', '--user-guide-only', action='store_true', dest='user_guide_only', default=False,
help='Generate only the User Guide.'),
)
def gendoc_api(self):
"""Generate api documentation"""
with lcd(self.PROJECT_DIR):
doc_dir = self._path(self.PROJECT_DIR, 'doc', 'api')
doc_dst = self._path(self.PROJECT_DIR, 'api', 'static', 'api', 'doc')
bin_dst = self._path(self.PROJECT_DIR, 'api', 'static', 'api', 'bin')
# Build sphinx docs
with lcd(doc_dir):
self.local('make esdc-clean; make esdc ESDOCDIR="%s"' % doc_dst)
# Create es script suitable for download
es_src = self._path(self.PROJECT_DIR, 'bin', 'es')
es_dst = self._path(bin_dst, 'es')
es_current = os.path.join(self.settings.PROJECT_DIR, 'var', 'www', 'static', 'api', 'bin', 'es')
api_url = "API_URL = '%s'" % (self.settings.SITE_LINK + '/api')
if os.path.isfile(es_current):
with open(es_current, 'r') as es0:
for line in es0:
if line.startswith("API_URL = '"):
api_url = line
break
with open(es_src) as es1:
with os.fdopen(os.open(es_dst, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o644), 'w') as es2:
es2.write(es1.read().replace("API_URL = 'http://127.0.0.1:8000/api'", api_url))
# Copy es_bash_completion.sh to download location
es_bc_src = self._path(doc_dir, 'es_bash_completion.sh')
self.local('cp %s %s' % (es_bc_src, bin_dst))
self.display('API documentation built successfully.', color='green')
def gendoc_user_guide(self, fallback_branch='master'):
"""Generate user guide"""
doc_dst = self._path(self.PROJECT_DIR, 'gui', 'static', 'user-guide')
with lcd(self.PROJECT_DIR):
try:
branch = self.get_git_version()[0] # Git tag or branch name
except CommandError:
self.display('Could not determine our branch or tag', color='yellow')
branch = fallback_branch
self.display('Falling back to "%s" branch' % branch, color='yellow')
else:
self.display('We are on branch "%s"' % branch)
if self._path_exists(self.DOC_TMP_DIR, 'user-guide', 'conf.py'):
existing_repo = True
self.display('%s already exists in %s' % (self.DOC_REPO, self.DOC_TMP_DIR), color='yellow')
with lcd(self.DOC_TMP_DIR):
self.local('git fetch')
self.display('%s has been successfully updated.' % self.DOC_REPO, color='green')
else:
if self._path_exists(self.DOC_TMP_DIR):
self.display('Removing stale %s', self.DOC_TMP_DIR, color='yellow')
shutil.rmtree(self.DOC_TMP_DIR)
existing_repo = False
self.local('git clone %s %s' % (self.DOC_REPO, self.DOC_TMP_DIR))
self.display('%s has been successfully cloned.' % self.DOC_TMP_DIR, color='green')
with lcd(self.DOC_TMP_DIR):
if self.local('git checkout %s' % branch, raise_on_error=False) != 0:
self.display('Could not checkout esdc-docs branch "%s"' % branch, color='yellow')
branch = fallback_branch
self.display('Falling back to "%s" branch' % branch, color='yellow')
self.local('git checkout %s' % branch)
self.display('Checked out esdc-docs branch "%s"' % branch, color='green')
# If the branch is no a tag name, then we need to merge/pull
if existing_repo and not re.search('^v[0-9]', branch):
self.local('git merge --ff-only origin/%s' % branch)
self.display('Merged esdc-docs branch "%s"' % branch, color='green')
# Build sphinx docs
with lcd(self._path(self.DOC_TMP_DIR, 'user-guide')):
self.local('make esdc-clean; make esdc ESDOCDIR="%s"' % doc_dst)
self.display('User guide built successfully.', color='green')
def handle(self, api_only=False, user_guide_only=False, **options):
if api_only and user_guide_only:
pass
elif api_only:
self.gendoc_api()
return
elif user_guide_only:
self.gendoc_user_guide()
return
self.gendoc_api()
self.display('\n\n', stderr=True)
self.gendoc_user_guide()
| [
"os.open",
"os.path.join",
"os.path.isfile",
"shutil.rmtree",
"re.search"
] | [((1337, 1424), 'os.path.join', 'os.path.join', (['self.settings.PROJECT_DIR', '"""var"""', '"""www"""', '"""static"""', '"""api"""', '"""bin"""', '"""es"""'], {}), "(self.settings.PROJECT_DIR, 'var', 'www', 'static', 'api',\n 'bin', 'es')\n", (1349, 1424), False, 'import os\n'), ((1513, 1539), 'os.path.isfile', 'os.path.isfile', (['es_current'], {}), '(es_current)\n', (1527, 1539), False, 'import os\n'), ((3458, 3489), 'shutil.rmtree', 'shutil.rmtree', (['self.DOC_TMP_DIR'], {}), '(self.DOC_TMP_DIR)\n', (3471, 3489), False, 'import shutil\n'), ((4293, 4321), 're.search', 're.search', (['"""^v[0-9]"""', 'branch'], {}), "('^v[0-9]', branch)\n", (4302, 4321), False, 'import re\n'), ((1835, 1894), 'os.open', 'os.open', (['es_dst', '(os.O_WRONLY | os.O_CREAT | os.O_TRUNC)', '(420)'], {}), '(es_dst, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 420)\n', (1842, 1894), False, 'import os\n')] |
# -*- coding: utf-8 -*-
"""
Created on 09 Nov 2020 22:25:38
@author: jiahuei
cd caption_vae
python -m scripts.plot_nonzero_weights_kde --log_dir x --id y
/home/jiahuei/Documents/1_TF_files/prune/mscoco_v3
word_w256_LSTM_r512_h1_ind_xu_REG_1.0e+02_init_5.0_L1_wg_60.0_ann_sps_0.975_dec_prune_cnnFT/run_01_sparse
/home/jiahuei/Documents/1_TF_files/relation_trans/mscoco_v1
UpDownLSTM__supermask__0.991__wg_120.0
RTrans__supermask__0.991__wg_120.0
"""
import os
import logging
import torch
import numpy as np
import seaborn as sns
from scipy.stats import mstats
from matplotlib import pyplot as plt
from argparse import ArgumentParser, Namespace, ArgumentDefaultsHelpFormatter
from pruning import prune
from utils.model_utils import densify_state_dict
from utils.misc import replace_from_right, configure_logging
from utils.file import list_dir
from utils.config import Config
logger = logging.getLogger(__name__)
gray3 = sns.color_palette("gray_r", n_colors=3)
crest3 = sns.color_palette("crest_r", n_colors=3)
summer3 = sns.color_palette("summer_r", n_colors=4)[1:]
mako3 = sns.color_palette("mako_r", n_colors=3)
flare3 = sns.color_palette("flare", n_colors=3)
blue3 = sns.cubehelix_palette(3, start=.5, rot=-.5)
cranberry3 = sns.dark_palette("#b2124d", n_colors=3, reverse=True)[:3]
coffee3 = sns.dark_palette("#a6814c", n_colors=4, reverse=True)[:3]
# sns.set_theme(style="darkgrid", rc={"legend.loc": "lower left", "legend.framealpha": 0.7})
sns.set_theme(
style="whitegrid",
rc={
"axes.edgecolor": ".3", "grid.color": "0.9", # "axes.grid.axis": "y",
"legend.loc": "lower left", "legend.framealpha": "0.6"
}
)
def is_white_style():
return plt.rcParams["axes.facecolor"] == "white"
def despine_white(fig):
# Despine whitegrid
if is_white_style():
sns.despine(fig=fig, top=False, right=False, left=False, bottom=False, offset=None, trim=False)
def process_output_path(output_path):
output_name, output_ext = os.path.splitext(output_path)
if is_white_style():
output_name += " (w)"
else:
output_name += " (d)"
output_path = output_name + output_ext
return output_path
class KDE:
CONTEXT = "paper"
FIG_SCALE = 1.5
FIG_DPI = 600
PRUNE_TYPE_TITLE = {
prune.REGULAR: "Proposed",
prune.MAG_GRAD_BLIND: "Gradual (blind)",
prune.MAG_GRAD_UNIFORM: "Gradual (uniform)",
prune.MAG_GRAD_DIST: "Gradual (distribution)",
prune.LOTTERY_MASK_FREEZE: "Lottery (gradual)", # For now, we only pair this with MAG_GRAD_UNIFORM
prune.LOTTERY_MAG_BLIND: "Lottery (hard-blind)",
prune.LOTTERY_MAG_UNIFORM: "Lottery (hard-uniform)",
prune.LOTTERY_MAG_DIST: "Lottery (hard-distribution)",
prune.MAG_BLIND: "Hard-blind",
prune.MAG_UNIFORM: "Hard-uniform",
prune.MAG_DIST: "Hard-distribution",
prune.SNIP: "SNIP",
}
def __init__(self):
self.config = self.parse_opt()
self.config.model_file = self.config.model_file.split(",")
def __call__(self, model_dir, visualise_weights_only=True):
print(f"Processing `{model_dir}`")
try:
model_config = Config.load_config_json(os.path.join(model_dir, "config.json"))
ckpt_path = [os.path.join(model_dir, _) for _ in self.config.model_file]
ckpt_path = list(filter(os.path.isfile, ckpt_path))
if len(ckpt_path) > 0:
ckpt_path = ckpt_path[0]
else:
return None
state_dict = densify_state_dict(torch.load(ckpt_path, map_location=torch.device("cpu")))
print(f"Model weights loaded from `{ckpt_path}`")
if visualise_weights_only:
state_dict = {k: v for k, v in state_dict.items() if "weight" in k}
flat_weights_np = np.concatenate([_.view(-1).numpy() for _ in state_dict.values()])
except FileNotFoundError:
flat_weights_np = np.load(os.path.join(model_dir, "nonzero_weights_flat.npy"))
model_config = {
# Just hard-code this for now
"caption_model": "Soft-Attention LSTM",
"prune_type": prune.REGULAR if "REG" in model_dir else prune.MAG_GRAD_BLIND,
"prune_sparsity_target": 0.975
}
nonzero_weights = flat_weights_np[flat_weights_np != 0]
np.save(os.path.join(model_dir, "nonzero_weights_flat.npy"), nonzero_weights)
# Output Naming
net_name = model_config.get("caption_model", None)
if net_name.endswith("_prune"):
net_name = replace_from_right(net_name, "_prune", "", 1)
# net_name = net_name.replace("net", "Net")
output_suffix = net_name
fig_title = ""
pruning_type = model_config.get("prune_type", "")
if pruning_type:
if pruning_type == prune.MASK_FREEZE:
logger.warning(f"Mask type = {prune.MASK_FREEZE} not supported")
return None
try:
fig_title = f"{self.PRUNE_TYPE_TITLE[pruning_type]}, "
except KeyError:
raise ValueError(f"Invalid pruning type: `{pruning_type}`")
sparsity = model_config.get("prune_sparsity_target", 0) * 100
fig_title += f"{sparsity:.1f}% sparse, "
# TexStudio cannot accept filename with dot
output_suffix += f"_{int(sparsity)}_{pruning_type}"
fig_title += " ".join(_.title() for _ in net_name.split("_"))
fig_title = fig_title.replace("Lstm", "LSTM")
# TexStudio will annoyingly highlight underscores in filenames
output_suffix = output_suffix.replace("_", "-")
# Histogram and KDE
for i, clip_pct in enumerate([0.005, 0.001]):
# noinspection PyTypeChecker
self.plot_kde(
data=mstats.winsorize(nonzero_weights, limits=clip_pct),
# TexStudio will annoyingly highlight underscores in filenames
output_fig_path=process_output_path(os.path.join(model_dir, f"KDE-{i}-{output_suffix}.png")),
fig_title="",
fig_footnote=f"* {clip_pct * 100:.1f}% winsorization",
)
logger.info(f"Saved graph: clip percent = {clip_pct} (as float between 0. and 1.)")
print("")
def plot_kde(self, data, output_fig_path, fig_title, fig_footnote=None):
sns.set_context(self.CONTEXT)
# colours = ("goldenrod", "sandybrown", "chocolate", "peru")
# colours = ("c", "cadetblue", "lightseagreen", "skyblue")
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(4. * self.FIG_SCALE, 3. * self.FIG_SCALE))
ax = sns.kdeplot(
data,
fill=True, common_norm=False, # palette="crest",
alpha=.5, linewidth=0,
color="c",
ax=ax,
)
if fig_title:
ax.set_title(fig_title, pad=plt.rcParams["font.size"] * 1.5)
if isinstance(fig_footnote, str):
plt.figtext(
0.90, 0.025,
fig_footnote,
horizontalalignment="right",
fontsize="xx-small",
)
despine_white(fig)
# Adjust margins and layout
plt.tight_layout(pad=1.5)
plt.savefig(output_fig_path, dpi=self.FIG_DPI)
print(f"Saved figure: `{output_fig_path}`")
plt.clf()
plt.close("all")
@staticmethod
def parse_opt() -> Namespace:
# fmt: off
# noinspection PyTypeChecker
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument(
"--log_dir", type=str, default="",
help="str: Logging / Saving directory."
)
parser.add_argument(
"--id", type=str, default="",
help="An id identifying this run/job."
)
parser.add_argument(
"--model_file", type=str, default="model_best_pruned_sparse.pth,model_best.pth",
help="str: Model checkpoint file."
)
parser.add_argument(
"--logging_level",
type=str,
default="INFO",
choices=["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"],
help="str: Logging level.",
)
args = parser.parse_args()
return args
if __name__ == '__main__':
configure_logging("WARNING")
kde = KDE()
if kde.config.id:
dirs = [os.path.join(kde.config.log_dir, kde.config.id)]
else:
dirs = list(filter(os.path.isdir, list_dir(kde.config.log_dir)))
for d in dirs:
kde(d)
| [
"logging.getLogger",
"seaborn.cubehelix_palette",
"utils.misc.configure_logging",
"scipy.stats.mstats.winsorize",
"seaborn.color_palette",
"seaborn.despine",
"argparse.ArgumentParser",
"matplotlib.pyplot.figtext",
"matplotlib.pyplot.close",
"matplotlib.pyplot.savefig",
"seaborn.set_context",
"... | [((887, 914), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (904, 914), False, 'import logging\n'), ((923, 962), 'seaborn.color_palette', 'sns.color_palette', (['"""gray_r"""'], {'n_colors': '(3)'}), "('gray_r', n_colors=3)\n", (940, 962), True, 'import seaborn as sns\n'), ((972, 1012), 'seaborn.color_palette', 'sns.color_palette', (['"""crest_r"""'], {'n_colors': '(3)'}), "('crest_r', n_colors=3)\n", (989, 1012), True, 'import seaborn as sns\n'), ((1077, 1116), 'seaborn.color_palette', 'sns.color_palette', (['"""mako_r"""'], {'n_colors': '(3)'}), "('mako_r', n_colors=3)\n", (1094, 1116), True, 'import seaborn as sns\n'), ((1126, 1164), 'seaborn.color_palette', 'sns.color_palette', (['"""flare"""'], {'n_colors': '(3)'}), "('flare', n_colors=3)\n", (1143, 1164), True, 'import seaborn as sns\n'), ((1173, 1218), 'seaborn.cubehelix_palette', 'sns.cubehelix_palette', (['(3)'], {'start': '(0.5)', 'rot': '(-0.5)'}), '(3, start=0.5, rot=-0.5)\n', (1194, 1218), True, 'import seaborn as sns\n'), ((1450, 1592), 'seaborn.set_theme', 'sns.set_theme', ([], {'style': '"""whitegrid"""', 'rc': "{'axes.edgecolor': '.3', 'grid.color': '0.9', 'legend.loc': 'lower left',\n 'legend.framealpha': '0.6'}"}), "(style='whitegrid', rc={'axes.edgecolor': '.3', 'grid.color':\n '0.9', 'legend.loc': 'lower left', 'legend.framealpha': '0.6'})\n", (1463, 1592), True, 'import seaborn as sns\n'), ((1023, 1064), 'seaborn.color_palette', 'sns.color_palette', (['"""summer_r"""'], {'n_colors': '(4)'}), "('summer_r', n_colors=4)\n", (1040, 1064), True, 'import seaborn as sns\n'), ((1230, 1283), 'seaborn.dark_palette', 'sns.dark_palette', (['"""#b2124d"""'], {'n_colors': '(3)', 'reverse': '(True)'}), "('#b2124d', n_colors=3, reverse=True)\n", (1246, 1283), True, 'import seaborn as sns\n'), ((1298, 1351), 'seaborn.dark_palette', 'sns.dark_palette', (['"""#a6814c"""'], {'n_colors': '(4)', 'reverse': '(True)'}), "('#a6814c', n_colors=4, reverse=True)\n", (1314, 1351), True, 'import seaborn as sns\n'), ((1973, 2002), 'os.path.splitext', 'os.path.splitext', (['output_path'], {}), '(output_path)\n', (1989, 2002), False, 'import os\n'), ((8406, 8434), 'utils.misc.configure_logging', 'configure_logging', (['"""WARNING"""'], {}), "('WARNING')\n", (8423, 8434), False, 'from utils.misc import replace_from_right, configure_logging\n'), ((1807, 1906), 'seaborn.despine', 'sns.despine', ([], {'fig': 'fig', 'top': '(False)', 'right': '(False)', 'left': '(False)', 'bottom': '(False)', 'offset': 'None', 'trim': '(False)'}), '(fig=fig, top=False, right=False, left=False, bottom=False,\n offset=None, trim=False)\n', (1818, 1906), True, 'import seaborn as sns\n'), ((6426, 6455), 'seaborn.set_context', 'sns.set_context', (['self.CONTEXT'], {}), '(self.CONTEXT)\n', (6441, 6455), True, 'import seaborn as sns\n'), ((6610, 6699), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(1)', 'figsize': '(4.0 * self.FIG_SCALE, 3.0 * self.FIG_SCALE)'}), '(nrows=1, ncols=1, figsize=(4.0 * self.FIG_SCALE, 3.0 * self.\n FIG_SCALE))\n', (6622, 6699), True, 'from matplotlib import pyplot as plt\n'), ((6706, 6799), 'seaborn.kdeplot', 'sns.kdeplot', (['data'], {'fill': '(True)', 'common_norm': '(False)', 'alpha': '(0.5)', 'linewidth': '(0)', 'color': '"""c"""', 'ax': 'ax'}), "(data, fill=True, common_norm=False, alpha=0.5, linewidth=0,\n color='c', ax=ax)\n", (6717, 6799), True, 'import seaborn as sns\n'), ((7274, 7299), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {'pad': '(1.5)'}), '(pad=1.5)\n', (7290, 7299), True, 'from matplotlib import pyplot as plt\n'), ((7308, 7354), 'matplotlib.pyplot.savefig', 'plt.savefig', (['output_fig_path'], {'dpi': 'self.FIG_DPI'}), '(output_fig_path, dpi=self.FIG_DPI)\n', (7319, 7354), True, 'from matplotlib import pyplot as plt\n'), ((7415, 7424), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (7422, 7424), True, 'from matplotlib import pyplot as plt\n'), ((7433, 7449), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (7442, 7449), True, 'from matplotlib import pyplot as plt\n'), ((7576, 7637), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'formatter_class': 'ArgumentDefaultsHelpFormatter'}), '(formatter_class=ArgumentDefaultsHelpFormatter)\n', (7590, 7637), False, 'from argparse import ArgumentParser, Namespace, ArgumentDefaultsHelpFormatter\n'), ((4392, 4443), 'os.path.join', 'os.path.join', (['model_dir', '"""nonzero_weights_flat.npy"""'], {}), "(model_dir, 'nonzero_weights_flat.npy')\n", (4404, 4443), False, 'import os\n'), ((4609, 4654), 'utils.misc.replace_from_right', 'replace_from_right', (['net_name', '"""_prune"""', '""""""', '(1)'], {}), "(net_name, '_prune', '', 1)\n", (4627, 4654), False, 'from utils.misc import replace_from_right, configure_logging\n'), ((7035, 7127), 'matplotlib.pyplot.figtext', 'plt.figtext', (['(0.9)', '(0.025)', 'fig_footnote'], {'horizontalalignment': '"""right"""', 'fontsize': '"""xx-small"""'}), "(0.9, 0.025, fig_footnote, horizontalalignment='right', fontsize\n ='xx-small')\n", (7046, 7127), True, 'from matplotlib import pyplot as plt\n'), ((8489, 8536), 'os.path.join', 'os.path.join', (['kde.config.log_dir', 'kde.config.id'], {}), '(kde.config.log_dir, kde.config.id)\n', (8501, 8536), False, 'import os\n'), ((3207, 3245), 'os.path.join', 'os.path.join', (['model_dir', '"""config.json"""'], {}), "(model_dir, 'config.json')\n", (3219, 3245), False, 'import os\n'), ((3272, 3298), 'os.path.join', 'os.path.join', (['model_dir', '_'], {}), '(model_dir, _)\n', (3284, 3298), False, 'import os\n'), ((8590, 8618), 'utils.file.list_dir', 'list_dir', (['kde.config.log_dir'], {}), '(kde.config.log_dir)\n', (8598, 8618), False, 'from utils.file import list_dir\n'), ((3973, 4024), 'os.path.join', 'os.path.join', (['model_dir', '"""nonzero_weights_flat.npy"""'], {}), "(model_dir, 'nonzero_weights_flat.npy')\n", (3985, 4024), False, 'import os\n'), ((5870, 5920), 'scipy.stats.mstats.winsorize', 'mstats.winsorize', (['nonzero_weights'], {'limits': 'clip_pct'}), '(nonzero_weights, limits=clip_pct)\n', (5886, 5920), False, 'from scipy.stats import mstats\n'), ((3597, 3616), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (3609, 3616), False, 'import torch\n'), ((6053, 6108), 'os.path.join', 'os.path.join', (['model_dir', 'f"""KDE-{i}-{output_suffix}.png"""'], {}), "(model_dir, f'KDE-{i}-{output_suffix}.png')\n", (6065, 6108), False, 'import os\n')] |
from django.contrib.auth import get_user_model
from django.db import models
class Log(models.Model):
"""
Model that describe a Log object, it contains information about daily
executions.
"""
execution_time = models.FloatField()
users = models.IntegerField()
lessons = models.IntegerField()
date = models.DateField(auto_now_add=True)
def __str__(self):
return f"{self.date}"
@property
def average_user_execution_time(self):
return self.execution_time / self.users if self.users > 0 else 0
@property
def average_lesson_execution_time(self):
"""
This property returns a useful data about the average execution time of
a lesson.
Returns:
float: average time resulted
"""
return self.execution_time / self.lessons if self.lessons > 0 else 0
class Feedback(models.Model):
"""
Model that describe a user feedback of the daily reservations.
"""
user = models.ForeignKey(get_user_model(), on_delete=models.SET_NULL, related_name='feedbacks', null=True)
ok = models.BooleanField()
date = models.DateField(auto_now_add=True)
def __str__(self):
return f'{self.user.username} {self.ok}'
class Stats(models.Model):
unsubscribed_users = models.IntegerField(default=0)
| [
"django.contrib.auth.get_user_model",
"django.db.models.DateField",
"django.db.models.FloatField",
"django.db.models.IntegerField",
"django.db.models.BooleanField"
] | [((230, 249), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (247, 249), False, 'from django.db import models\n'), ((262, 283), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (281, 283), False, 'from django.db import models\n'), ((298, 319), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (317, 319), False, 'from django.db import models\n'), ((331, 366), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (347, 366), False, 'from django.db import models\n'), ((1105, 1126), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (1124, 1126), False, 'from django.db import models\n'), ((1138, 1173), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1154, 1173), False, 'from django.db import models\n'), ((1301, 1331), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1320, 1331), False, 'from django.db import models\n'), ((1014, 1030), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1028, 1030), False, 'from django.contrib.auth import get_user_model\n')] |
import logging
from typing import Any, Dict, List
from unittest import TestCase
import pytest
from grapl_analyzerlib.nodes.lens import LensQuery, LensView
from grapl_tests_common.clients.engagement_edge_client import EngagementEdgeClient
from grapl_tests_common.clients.graphql_endpoint_client import GraphqlEndpointClient
from grapl_tests_common.wait import WaitForCondition, WaitForQuery, wait_for_one
LENS_NAME = "DESKTOP-FVSHABR"
GqlLensDict = Dict[str, Any]
@pytest.mark.integration_test
class TestEndToEnd(TestCase):
def test_expected_data_in_dgraph(self) -> None:
# There is some unidentified, nondeterministic failure with e2e.
# We fall into one of three buckets:
# - No lens
# - Lens with 3 scope
# - Lens with 4 scope
# - Lens with 5 scope (correct)
query = LensQuery().with_lens_name(LENS_NAME)
lens: LensView = wait_for_one(WaitForQuery(query), timeout_secs=120)
assert lens.get_lens_name() == LENS_NAME
# lens scope is not atomic
def condition() -> bool:
length = len(lens.get_scope())
logging.info(f"Expected 3-5 nodes in scope, currently is {length}")
# The correct answer for this is 5.
# We are temp 'allowing' below that because it means the pipeline is, _mostly_, working.
return length in (
3,
4,
5,
)
wait_for_one(WaitForCondition(condition), timeout_secs=240)
gql_client = GraphqlEndpointClient(jwt=EngagementEdgeClient().get_jwt())
ensure_graphql_lens_scope_no_errors(gql_client, LENS_NAME)
def ensure_graphql_lens_scope_no_errors(
gql_client: GraphqlEndpointClient,
lens_name: str,
) -> None:
"""
Eventually we'd want more-robust checks here, but this is an acceptable
smoke test in the mean time.
"""
gql_lens = gql_client.query_for_scope(lens_name=lens_name)
assert len(gql_lens["scope"]) in (3, 4, 5)
# Accumulate ["Asset"], ["Process"] into Set("Asset, Process")
all_types_in_scope = set(
sum((node["dgraph_type"] for node in gql_lens["scope"]), [])
)
assert all_types_in_scope == set(
(
"Asset",
"Process",
)
)
| [
"grapl_analyzerlib.nodes.lens.LensQuery",
"grapl_tests_common.wait.WaitForQuery",
"grapl_tests_common.wait.WaitForCondition",
"logging.info",
"grapl_tests_common.clients.engagement_edge_client.EngagementEdgeClient"
] | [((911, 930), 'grapl_tests_common.wait.WaitForQuery', 'WaitForQuery', (['query'], {}), '(query)\n', (923, 930), False, 'from grapl_tests_common.wait import WaitForCondition, WaitForQuery, wait_for_one\n'), ((1123, 1190), 'logging.info', 'logging.info', (['f"""Expected 3-5 nodes in scope, currently is {length}"""'], {}), "(f'Expected 3-5 nodes in scope, currently is {length}')\n", (1135, 1190), False, 'import logging\n'), ((1465, 1492), 'grapl_tests_common.wait.WaitForCondition', 'WaitForCondition', (['condition'], {}), '(condition)\n', (1481, 1492), False, 'from grapl_tests_common.wait import WaitForCondition, WaitForQuery, wait_for_one\n'), ((835, 846), 'grapl_analyzerlib.nodes.lens.LensQuery', 'LensQuery', ([], {}), '()\n', (844, 846), False, 'from grapl_analyzerlib.nodes.lens import LensQuery, LensView\n'), ((1560, 1582), 'grapl_tests_common.clients.engagement_edge_client.EngagementEdgeClient', 'EngagementEdgeClient', ([], {}), '()\n', (1580, 1582), False, 'from grapl_tests_common.clients.engagement_edge_client import EngagementEdgeClient\n')] |
import json
import os
from eg import config
from eg import substitute
from eg import util
from mock import Mock
from mock import patch
PATH_UNSQUEEZED_FILE = os.path.join(
'test',
'assets',
'pwd_unsqueezed.md'
)
PATH_SQUEEZED_FILE = os.path.join(
'test',
'assets',
'pwd_squeezed.md'
)
def _create_config(
examples_dir=None,
custom_dir=None,
color_config=None,
use_color=True,
pager_cmd=None,
editor_cmd=None,
squeeze=False,
subs=None
):
"""
Create a config.Config object with default values for expediency in
testing.
"""
return config.Config(
examples_dir=examples_dir,
custom_dir=custom_dir,
color_config=color_config,
use_color=use_color,
pager_cmd=pager_cmd,
editor_cmd=editor_cmd,
squeeze=squeeze,
subs=subs
)
@patch('os.walk')
def test_get_file_paths_for_program_with_single(mock_walk):
program = 'cp'
examples_dir = '/Users/tyrion'
program_file = program + util.EXAMPLE_FILE_SUFFIX
expected = ['/Users/tyrion/cp.md']
mock_walk.return_value = [
[examples_dir, [], [program_file, 'cp.txt', 'other_file.md']],
]
actual = util.get_file_paths_for_program(program, examples_dir)
assert actual == expected
mock_walk.assert_called_once_with(examples_dir)
@patch('os.walk')
def test_get_file_paths_for_program_with_nested(mock_walk):
program = 'cp'
examples_dir = '/Users/tyrion'
program_file = 'cp.md'
mock_walk.return_value = [
[
examples_dir,
['dirA', 'dirB'],
[program_file, 'cp.txt', 'other_file.md'],
],
[
examples_dir + '/dirA',
['dirA-child'],
[program_file, 'bad.md'],
],
[
examples_dir + '/dirA/dirA-child',
[],
['bad.md', program_file, 'wtf.md'],
],
[
examples_dir + '/dirB',
[],
['foo.md', program_file],
],
]
expected = [
'/Users/tyrion/cp.md',
'/Users/tyrion/dirA/cp.md',
'/Users/tyrion/dirA/dirA-child/cp.md',
'/Users/tyrion/dirB/cp.md',
]
actual = util.get_file_paths_for_program(program, examples_dir)
assert actual == expected
mock_walk.assert_called_once_with(examples_dir)
@patch('os.walk')
def test_get_file_paths_for_program_with_none(mock_walk):
expected = []
mock_walk.return_value = []
actual = util.get_file_paths_for_program('cp', '/Users/tyrion')
assert actual == expected
mock_walk.assert_called_once_with('/Users/tyrion')
@patch('os.walk')
def test_get_file_paths_for_program_with_no_dir(mock_walk):
assert util.get_file_paths_for_program('cp', None) == []
@patch('eg.util.page_string')
@patch('eg.util.get_formatted_contents')
@patch('eg.util.get_contents_from_files')
@patch('eg.util.get_resolved_program')
def test_handle_program_no_entries(
mock_resolve_program,
mock_get_contents,
mock_format,
mock_page_string,
):
"""
We should do the right thing if there are no entries for a given program.
"""
program = 'cp'
test_config = _create_config()
mock_resolve_program.return_value = program
util.handle_program(program, test_config)
mock_resolve_program.assert_called_once_with(
program,
test_config
)
# We should have aborted and not called any of the
# other methods.
assert mock_get_contents.call_count == 0
assert mock_format.call_count == 0
assert mock_page_string.call_count == 0
@patch('eg.util.get_resolved_program')
@patch('eg.util.get_contents_from_files')
@patch('eg.util.get_file_paths_for_program')
@patch('eg.util.get_formatted_contents')
@patch('eg.util.page_string')
def test_handle_program_finds_paths_and_calls_open_pager_no_alias(
mock_page,
mock_format,
mock_get_paths,
mock_get_contents,
mock_resolve,
):
"""
If there are entries for the program, handle_program needs to get the
paths, get the contents, format the contents, and page the resulting
string.
"""
program = 'mv'
examples_dir = 'test-eg-dir'
custom_dir = 'test-custom-dir'
color_config = None
use_color = False
pager_cmd = 'foo bar'
squeeze = False
subs = ['foo', 'bar']
file_contents = 'I am the contents of mv.md.'
formatted_contents = 'and I am the formatted contents of mv.md.'
test_config = _create_config(
examples_dir=examples_dir,
custom_dir=custom_dir,
color_config=color_config,
use_color=use_color,
pager_cmd=pager_cmd,
squeeze=squeeze,
subs=subs
)
default_paths = ['test-eg-dir/mv.md', 'test-eg-dir/foo/mv.md']
custom_paths = ['test-custom-dir/mv.md', 'test-custom-dir/bar.md']
def return_correct_path(*args, **kwargs):
program_param = args[0]
dir_param = args[1]
if program_param != program:
raise NameError('expected ' + program + ', got ' + program_param)
if dir_param == examples_dir:
return default_paths
elif dir_param == custom_dir:
return custom_paths
else:
raise NameError(
'got ' +
dir_param +
', expected ' +
examples_dir +
' or ' +
custom_dir)
mock_format.return_value = formatted_contents
mock_get_paths.side_effect=return_correct_path
mock_get_contents.return_value = file_contents
mock_resolve.return_value = program
util.handle_program(program, test_config)
mock_resolve.assert_called_once_with(
program,
test_config
)
mock_get_paths.assert_any_call(
program,
examples_dir
)
mock_get_paths.assert_any_call(
program,
custom_dir,
)
mock_get_contents.assert_called_once_with(
custom_paths[0],
custom_paths[1],
default_paths[0],
default_paths[1],
)
mock_format.assert_called_once_with(
file_contents,
use_color=test_config.use_color,
color_config=test_config.color_config,
squeeze=test_config.squeeze,
subs=test_config.subs
)
mock_page.assert_called_once_with(
formatted_contents,
test_config.pager_cmd
)
@patch('eg.util.get_resolved_program')
@patch('eg.util.get_contents_from_files')
@patch('eg.util.get_file_paths_for_program')
@patch('eg.util.get_formatted_contents')
@patch('eg.util.page_string')
def test_handle_program_finds_paths_and_calls_open_pager_with_alias(
mock_page,
mock_format,
mock_get_paths,
mock_get_contents,
mock_resolve,
):
"""
If there are entries for the program, handle_program needs to get the
paths, get the contents, format the contents, and page the resulting
string.
"""
alias_for_program = 'link'
resolved_program = 'ln'
examples_dir = 'test-eg-dir'
custom_dir = 'test-custom-dir'
color_config = None
use_color = False
pager_cmd = 'foo bar'
squeeze = False
subs = ['foo', 'bar']
file_contents = 'I am the contents of ln.md.'
formatted_contents = 'and I am the formatted contents of ln.md.'
test_config = _create_config(
examples_dir=examples_dir,
custom_dir=custom_dir,
color_config=color_config,
use_color=use_color,
pager_cmd=pager_cmd,
squeeze=squeeze,
subs=subs
)
default_paths = ['test-eg-dir/ln.md']
custom_paths = ['test-custom-dir/ln.md']
def return_correct_path(*args, **kwargs):
program_param = args[0]
dir_param = args[1]
if program_param != resolved_program:
raise NameError(
'expected ' +
resolved_program +
', got ' +
program_param
)
if dir_param == examples_dir:
return default_paths
elif dir_param == custom_dir:
return custom_paths
else:
raise NameError(
'got ' +
dir_param +
', expected ' +
examples_dir +
' or ' +
custom_dir)
mock_format.return_value = formatted_contents
mock_get_paths.side_effect = return_correct_path
mock_get_contents.return_value = file_contents
mock_resolve.return_value = resolved_program
util.handle_program(
alias_for_program,
test_config
)
mock_resolve.assert_called_once_with(
alias_for_program,
test_config
)
mock_get_paths.assert_any_call(
resolved_program,
examples_dir
)
mock_get_paths.assert_any_call(
resolved_program,
custom_dir,
)
mock_get_contents.assert_called_once_with(
custom_paths[0],
default_paths[0]
)
mock_format.assert_called_once_with(
file_contents,
use_color=test_config.use_color,
color_config=test_config.color_config,
squeeze=test_config.squeeze,
subs=test_config.subs
)
mock_page.assert_called_once_with(
formatted_contents,
test_config.pager_cmd
)
def test_get_list_of_all_supported_commands(tmpdir):
dir_example = tmpdir.mkdir('examples')
dir_custom = tmpdir.mkdir('custom')
config = _create_config(
examples_dir=str(dir_example),
custom_dir=str(dir_custom),
)
expected = [
'a-only-default',
'b-both *',
'c-only-custom +',
'd-only-custom-nested +',
'e-only-default-nested',
'f-default-custom-nested',
'g-both-different-levels *',
't-a-only-default-alias -> a-only-default',
'u-b-both-alias -> b-both *',
'v-c-only-custom-alias -> c-only-custom +'
]
aliases = {
't-a-only-default-alias': 'a-only-default',
'u-b-both-alias': 'b-both',
'v-c-only-custom-alias': 'c-only-custom'
}
# Make the directory structure we expect.
dir_example_nested = dir_example.mkdir('default-nested')
dir_custom_nested = dir_custom.mkdir('custom-nested')
dir_example.join('a-only-default.md').write('foo')
dir_example.join('b-both.md').write('foo')
dir_custom.join('b-both.md').write('foo')
dir_custom.join('c-only-custom.md').write('foo')
dir_custom_nested.join('d-only-custom-nested.md').write('foo')
dir_example_nested.join('e-only-default-nested.md').write('foo')
dir_example_nested.join('f-default-custom-nested.md').write('foo')
dir_example.join('g-both-different-levels.md').write('foo')
dir_custom_nested.join('g-both-different-levels.md').write('foo')
# Use the 'with' context manager rather than the @decorator, because the
# tmpdir fixture doesn't play nice with the decorator.
with patch('eg.util.get_alias_dict') as mock_get_alias:
mock_get_alias.return_value = aliases
actual = util.get_list_of_all_supported_commands(config)
assert actual == expected
mock_get_alias.assert_called_once_with(config)
def test_list_supported_programs_fails_gracefully_if_no_dirs():
test_config = _create_config()
actual = util.get_list_of_all_supported_commands(test_config)
target = []
assert actual == target
def test_calls_pipepager_if_not_less():
"""
We're special casing less a bit, as it is the default value, so if a custom
command has been set that is NOT less, we should call pipepager straight
away.
"""
_helper_assert_about_pager('page me plz', 'cat', False)
def test_calls_fallback_pager_if_none():
"""
If pager_cmd is None, we should just use the fallback pager.
"""
_helper_assert_about_pager('page me plz', None, True)
def test_calls_pipepager_if_less():
"""
We should call pipepager if we ask to use less and less is installed on the
machine.
"""
_helper_assert_about_pager('a fancy value to page', 'less -R', False)
def test_calls_fallback_if_cmd_is_flag_string():
"""
We are using a flag string to indicate if we should use the fallback pager.
"""
_helper_assert_about_pager(
'page via fallback',
util.FLAG_FALLBACK,
True
)
@patch('pydoc.pager')
@patch('pydoc.pipepager')
def _helper_assert_about_pager(
str_to_page,
pager_cmd,
use_fallback,
pipepager,
default_pager,
):
"""
Help with asserting about pager.
str_to_page: what you're paging
pager_cmd: the string you're passing to pipepager (or None)
use_default: false if we should actually use pydoc.pipepager, true if we
instead are going to fallback to pydoc.pager
"""
util.page_string(str_to_page, pager_cmd)
if use_fallback:
default_pager.assert_called_once_with(str_to_page)
assert pipepager.call_count == 0
else:
assert default_pager.call_count == 0
pipepager.assert_called_once_with(
str_to_page,
cmd=pager_cmd
)
@patch('eg.util.pydoc.pipepager', side_effect=KeyboardInterrupt)
def test_page_string_excepts_keyboard_interrupt_if_not_less(pipepager_mock):
"""
Do not fail when user hits ctrl-c while in pager.
"""
try:
util.page_string('page me plz', 'cat')
except KeyboardInterrupt:
raise AssertionError('Should not have got this far')
pipepager_mock.assert_called_once_with('page me plz', cmd='cat')
@patch('eg.util.pydoc.pager', side_effect=KeyboardInterrupt)
def test_page_string_excepts_keyboard_interrupt_if_none(pager_mock):
"""
Do not fail when user hits ctrl-c while in pipepager.
"""
try:
util.page_string('page me plz', None)
except KeyboardInterrupt:
raise AssertionError('Should not have got this far')
pager_mock.assert_called_once_with('page me plz')
def test_get_contents_from_files_handles_none():
"""
Empty string if no files.
"""
_helper_assert_file_contents(
[],
''
)
def test_get_contents_from_files_handles_one():
file_infos = [
{
'path': 'test/path',
'contents': 'contents of file'
}
]
combined_contents = 'contents of file'
_helper_assert_file_contents(
file_infos,
combined_contents
)
def test_get_contents_from_files_handles_multiple():
file_infos = [
{
'path': 'path/1',
'contents': 'foo\n'
},
{
'path': 'path/2/foo',
'contents': 'bar\n'
},
{
'path': 'another/path',
'contents': 'baz'
}
]
combined_contents = 'foo\nbar\nbaz'
_helper_assert_file_contents(
file_infos,
combined_contents
)
@patch('eg.util._get_contents_of_file')
def _helper_assert_file_contents(
file_infos,
target_contents,
get_contents_mock,
):
"""
Helper method to assert things about the get_contents_from_files method.
Does not actually hit the disk.
file_infos: array of { path, contents } dicts representing files. Array so
that we can assert proper order calling
target_contents: the final combined contents that should be returned by the
get_contents_from_files method.
"""
# This method will be used by the mock framework to return the right file
# contents based on the file name.
def return_file_contents(*args, **kwargs):
for file_info in file_infos:
if file_info['path'] == args[0]:
return file_info['contents']
raise TypeError('did not find path in test obj')
get_contents_mock.side_effect = return_file_contents
paths = [el['path'] for el in file_infos]
actual = util.get_contents_from_files(*paths)
assert actual == target_contents
@patch('eg.util.get_colorized_contents')
@patch('eg.util.get_squeezed_contents')
@patch('eg.util.get_substituted_contents')
def _helper_assert_formatted_contents(
starting_contents,
use_color,
color_config,
squeeze,
subs,
colorized_contents,
squeezed_contents,
subbed_contents,
formatted_result,
sub_method,
squeeze_method,
color_method,
):
"""
Helper method to assist in asserting things about the
get_formatted_contents method.
starting_contents: the starting string that we are working with
use_color: True if we should use color
color_config: the color config to be passed to get_colorized_contents
squeeze: True if we should squeeze
subs: the list of Substitutions that we should pass to
get_substituted_contents
colored_contents: the result of get_colorized_contents
squeezed_contents: the result of get_squeezed_contents
subbed_contents: the result of subbed_contents
formatted_result: the final, formatted string that should be returned
"""
sub_method.return_value = subbed_contents
squeeze_method.return_value = squeezed_contents
color_method.return_value = colorized_contents
actual = util.get_formatted_contents(
starting_contents,
use_color,
color_config,
squeeze,
subs
)
# We'll update the contents as they get formatted to make sure
# we pass the right thing to the various methods.
contents_thus_far = starting_contents
if use_color:
color_method.assert_called_once_with(
contents_thus_far,
color_config
)
contents_thus_far = colorized_contents
else:
assert color_method.call_count == 0
if squeeze:
squeeze_method.assert_called_once_with(contents_thus_far)
contents_thus_far = squeezed_contents
else:
assert squeeze_method.call_count == 0
if subs:
sub_method.assert_called_once_with(
contents_thus_far,
subs
)
contents_thus_far = subbed_contents
else:
assert sub_method.call_count == 0
assert actual == formatted_result
def test_get_formatted_contents_does_not_format_methods_if_all_falsey():
"""
We should invoke none of the formatter methods if the flags are false and
subs is not truthy.
"""
starting_contents = 'this is where we start'
_helper_assert_formatted_contents(
starting_contents,
False,
'some color config',
False,
None,
'this was colored',
'this was squeezed',
'these contents were subbed',
starting_contents
)
def test_get_formatted_contents_calls_colorize_if_use_color():
"""
Colorize the contents if use_color = True.
"""
starting_contents = 'this is where we start'
colorized_contents = 'COLORIZED: this is where we start'
_helper_assert_formatted_contents(
starting_contents,
True,
'some color config',
False,
None,
colorized_contents,
'this was squeezed',
'these contents were subbed',
colorized_contents
)
def test_get_formatted_contents_squeezes():
"""If squeeze, we need to squeeze."""
starting_contents = 'this is where we start'
squeezed_contents = 'this is the result of a squeezing'
_helper_assert_formatted_contents(
starting_contents,
False,
'some color config',
True,
None,
'this was colored',
squeezed_contents,
'these contents were subbed',
squeezed_contents
)
def test_get_formatted_contents_subsitutes():
"""If subs is truthy, get_substituted contents should be called."""
starting_contents = 'this is where we start'
subbed_contents = 'substituted like a teacher'
_helper_assert_formatted_contents(
starting_contents,
False,
'some color config',
False,
['truthy', 'list'],
'this was colored',
'this was squeezed',
subbed_contents,
subbed_contents
)
def test_perform_all_formatting():
"""
When use_color, squeeze, and subs are all truthy, all the formatting
should be applied in that order.
"""
starting_contents = 'the starting point for grand formatting'
subbed_contents = 'subbed is the last thing called so should be the result'
_helper_assert_formatted_contents(
starting_contents,
True,
'some color config',
True,
['truthy', 'list'],
'this was colored',
'this was squeezed',
subbed_contents,
subbed_contents
)
def _get_file_as_string(path):
"""Get the contents of the file as a string."""
with open(path, 'r') as f:
data = f.read()
return data
def test_get_squeezed_contents_correctly_squeezes():
"""
Our squeeze method should follow our convention, which is to remove the
blank line between a description and an example, to keep two blank lines
between sections, and otherwise have only single blank lines.
"""
unsqueezed = _get_file_as_string(PATH_UNSQUEEZED_FILE)
# the target squeezed output is a reference implementation in
# pwd_squeezed.md.
target = _get_file_as_string(PATH_SQUEEZED_FILE)
actual = util.get_squeezed_contents(unsqueezed)
assert actual == target
def test_get_substituted_contents_handles_empty_subs():
"""Nothing should be formatted if there are no substitutions."""
raw_contents = 'this should not be subbed'
actual = util.get_substituted_contents(raw_contents, [])
assert actual == raw_contents
def test_get_substituted_contents_substitutes_calls_correct_methods():
"""
The get_substituted_contents method calls things in the correct order.
"""
sub_one = Mock(auto_spec=substitute.Substitution)
sub_one_result = 'result of sub one'
sub_one.apply_and_get_result.return_value = sub_one_result
sub_two = Mock(auto_spec=substitute.Substitution)
sub_two_result = 'result of sub two'
sub_two.apply_and_get_result.return_value = sub_two_result
starting_contents = 'the string we should be substituting into'
target = sub_two_result
subs = [sub_one, sub_two]
actual = util.get_substituted_contents(starting_contents, subs)
sub_one.apply_and_get_result.assert_called_once_with(starting_contents)
sub_two.apply_and_get_result.assert_called_once_with(sub_one_result)
assert actual == target
def test_get_substituted_contents_substitutes_correctly():
"""
Basic test to make sure Substitutions can get applied correctly.
"""
sub_one = substitute.Substitution('foo', 'bar', False)
sub_two = substitute.Substitution('bar\n\n', 'baz\n', True)
start = 'foo\n\n something else\n\n bar\n\n'
target = 'baz\n something else\n\n baz\n'
subs = [sub_one, sub_two]
actual = util.get_substituted_contents(start, subs)
assert actual == target
@patch('eg.color.EgColorizer')
def test_get_colorized_contents_calls_methods(patched_colorizer_class):
"""
We should call the correct methods on the EgColorizer objects when we color
a file.
"""
raw_contents = 'these are uncolored contents'
colored_contents = 'COLORED: ' + raw_contents
color_config = 'some color config'
# The actual instance created by these calls is stored at return_value.
colorizer_instance = patched_colorizer_class.return_value
colorizer_instance.colorize_text.return_value = colored_contents
actual = util.get_colorized_contents(raw_contents, color_config)
assert actual == colored_contents
colorizer_instance.colorize_text.assert_called_once_with(raw_contents)
@patch('eg.util.get_alias_dict')
def _helper_assert_get_resolved_program(
program,
resolved_program,
config_obj,
alias_dict,
mock_dict,
):
"""
program: the program to resolved for as an alias
resolved_program: the result of the resolution.
config_obj: the config_obj to use toe resolve the alias path
alias_dict: the dict of aliases to be returned
"""
mock_dict.return_value = alias_dict
actual = util.get_resolved_program(program, config_obj)
assert actual == resolved_program
mock_dict.assert_called_once_with(config_obj)
def test_get_resolved_program_no_alias():
"""
A program that is not an alias should return itself.
"""
alias_dict = {
'link': 'ln',
'nc': 'netcat'
}
config_obj = 'a config'
_helper_assert_get_resolved_program('link', 'ln', config_obj, alias_dict)
def test_get_resolved_program_is_alias():
"""
A program that is an alias should return the resolved value.
"""
alias_dict = {
'link': 'ln',
'nc': 'netcat'
}
config_obj = 'some new config'
_helper_assert_get_resolved_program('cp', 'cp', config_obj, alias_dict)
def test_get_alias_dict_returns_contents_of_correct_file():
"""
get_alias_dict should read data from the file at the default path.
"""
alias_dict = {
'link': 'ln',
'nc': 'netcat'
}
config_obj = _create_config(
examples_dir='path/to/examples/dir',
)
alias_file_path = 'path/to/alias/file'
alias_dict_str = json.dumps(alias_dict)
_helper_assert_get_alias_dict(
alias_dict_str,
alias_dict,
config_obj,
alias_file_path,
True
)
def test_get_alias_dict_fails_gracefully_if_not_file():
"""
Since users can specify a directory for examples that might not contain the
aliases file, we want to fail gracefully if the file doesn't exist.
"""
contents_of_alias_dict_file = 'should never be reached'
config_obj = _create_config(
examples_dir='path/to/examples/dir',
)
alias_file_path = 'path/to/the/alias/file'
_helper_assert_get_alias_dict(
contents_of_alias_dict_file,
{},
config_obj,
alias_file_path,
False
)
@patch('eg.util._get_contents_of_file')
@patch('eg.util._get_alias_file_path')
@patch('os.path.isfile')
def _helper_assert_get_alias_dict(
contents_of_alias_dict_file,
target_alias_dict,
config_obj,
alias_file_path,
alias_file_path_is_file,
mock_is_file,
mock_get_alias_file_path,
mock_get_contents,
):
"""
contents_of_alias_dict_file: the string contents of the file storing the
dictionary of aliases
target_alias_dict: the target result of get_alias_dict
config_obj: the Config object
alias_file_path: the path to be returned by _get_alias_file_path
alias_file_path_is_file: True if the alias path is a file, else False
"""
mock_is_file.return_value = alias_file_path_is_file
mock_get_alias_file_path.return_value = alias_file_path
mock_get_contents.return_value = contents_of_alias_dict_file
actual = util.get_alias_dict(config_obj)
assert actual == target_alias_dict
mock_get_alias_file_path.assert_called_once_with(config_obj)
mock_is_file.assert_called_once_with(alias_file_path)
if alias_file_path_is_file:
mock_get_contents.assert_called_once_with(alias_file_path)
else:
assert mock_get_contents.call_count == 0
@patch('os.path.join')
def test_get_alias_file_path(mock_join):
"""
_get_alias_file_path should just join the example dir and the alias file
name, to make sure we look in the right place for the file.
"""
config_obj = _create_config(
examples_dir='handy/dandy/examples/dir',
)
join_result = 'joined path'
mock_join.return_value = join_result
actual = util._get_alias_file_path(config_obj)
assert actual == join_result
mock_join.assert_called_once_with(
config_obj.examples_dir,
util.ALIAS_FILE_NAME
)
def test_is_example_file_true_if_has_suffix():
"""
Should be true if ends in EXAMPLE_FILE_SUFFIX.
"""
file_name = 'find.md'
actual = util._is_example_file(file_name)
assert actual == True
def test_is_example_file_true_if_not_suffix():
"""
Should be false if the file does not end in EXAMPLE_FILE_SUFFIX.
"""
file_name = 'aliases.json'
actual = util._is_example_file(file_name)
assert actual == False
def test_can_parse_alias_file():
"""
Make sure aliases.json file can be parsed.
This is to make sure an edit doesn't accidentally corrupt it.
"""
# We'll have to hardcode this.
alias_file_path = os.path.join(
config.DEFAULT_EXAMPLES_DIR,
util.ALIAS_FILE_NAME
)
alias_file_contents = util._get_contents_of_file(alias_file_path)
alias_dict = json.loads(alias_file_contents)
# We'll check that link goes to ln, as we know that one will be present.
assert alias_dict['link'] == 'ln'
@patch('os.path.exists')
@patch('eg.util._inform_cannot_edit_no_custom_dir')
@patch('eg.util.get_resolved_program')
@patch('eg.util.get_file_paths_for_program')
@patch('subprocess.call')
def test_edit_custom_examples_correct_with_custom_dir(
mock_call,
mock_get_paths,
mock_get_program,
mock_inform,
mock_exists,
):
"""
We should resolve aliases, get the custom file path, and call subprocess.
"""
program = 'du'
resolved_program = 'alias for du'
config = _create_config(custom_dir='path/to/custom', editor_cmd='nano')
paths = ['path/to/custom/du.md', 'foo.md']
mock_get_program.return_value = resolved_program
mock_get_paths.return_value = paths
mock_exists.return_value = True
util.edit_custom_examples(program, config)
mock_get_program.assert_called_once_with(program, config)
mock_get_paths.assert_called_once_with(resolved_program, config.custom_dir)
mock_call.assert_called_once_with([config.editor_cmd, paths[0]])
assert mock_inform.call_count == 0
@patch('os.path.exists')
@patch('eg.util._inform_cannot_edit_no_custom_dir')
@patch('eg.util.get_resolved_program')
@patch('eg.util.get_file_paths_for_program')
@patch('subprocess.call')
def test_edit_custom_examples_creates_file_if_none_exist(
mock_call,
mock_get_paths,
mock_get_program,
mock_inform,
mock_exists,
):
program = 'du'
resolved_program = 'alias-for-du'
config = _create_config(custom_dir='path/to/custom', editor_cmd='nano')
paths = []
mock_get_program.return_value = resolved_program
mock_get_paths.return_value = paths
mock_exists.return_value = True
util.edit_custom_examples(program, config)
mock_get_program.assert_called_once_with(program, config)
mock_get_paths.assert_called_once_with(resolved_program, config.custom_dir)
mock_call.assert_called_once_with(
[config.editor_cmd, 'path/to/custom/alias-for-du.md'])
assert mock_inform.call_count == 0
@patch('os.path.exists')
@patch('eg.util._inform_cannot_edit_no_custom_dir')
@patch('eg.util.get_resolved_program')
@patch('eg.util.get_file_paths_for_program')
@patch('subprocess.call')
def test_edit_custom_examples_informs_if_no_custom_dir(
mock_call,
mock_get_paths,
mock_get_program,
mock_inform,
mock_exists,
):
"""
We should inform the user if they are trying to edit with no custom dir.
This should be true if it is not set and if the path does not exist.
"""
program = 'awk'
# First with no custom dir set.
config = _create_config(editor_cmd='vi -e')
mock_exists.return_value = True
util.edit_custom_examples(program, config)
assert mock_inform.call_count == 1
# And now with it set but a nonexistent path.
config = _create_config(custom_dir='/path/to/custom', editor_cmd='vi -e')
mock_exists.return_value = False
util.edit_custom_examples(program, config)
assert mock_inform.call_count == 2
assert mock_call.call_count == 0
assert mock_get_paths.call_count == 0
assert mock_get_program.call_count == 0
| [
"mock.Mock",
"eg.util.get_file_paths_for_program",
"eg.util._is_example_file",
"eg.util.handle_program",
"eg.util.get_list_of_all_supported_commands",
"eg.util._get_alias_file_path",
"eg.substitute.Substitution",
"mock.patch",
"eg.util.page_string",
"eg.util.get_squeezed_contents",
"json.dumps",... | [((160, 211), 'os.path.join', 'os.path.join', (['"""test"""', '"""assets"""', '"""pwd_unsqueezed.md"""'], {}), "('test', 'assets', 'pwd_unsqueezed.md')\n", (172, 211), False, 'import os\n'), ((247, 296), 'os.path.join', 'os.path.join', (['"""test"""', '"""assets"""', '"""pwd_squeezed.md"""'], {}), "('test', 'assets', 'pwd_squeezed.md')\n", (259, 296), False, 'import os\n'), ((866, 882), 'mock.patch', 'patch', (['"""os.walk"""'], {}), "('os.walk')\n", (871, 882), False, 'from mock import patch\n'), ((1353, 1369), 'mock.patch', 'patch', (['"""os.walk"""'], {}), "('os.walk')\n", (1358, 1369), False, 'from mock import patch\n'), ((2375, 2391), 'mock.patch', 'patch', (['"""os.walk"""'], {}), "('os.walk')\n", (2380, 2391), False, 'from mock import patch\n'), ((2657, 2673), 'mock.patch', 'patch', (['"""os.walk"""'], {}), "('os.walk')\n", (2662, 2673), False, 'from mock import patch\n'), ((2798, 2826), 'mock.patch', 'patch', (['"""eg.util.page_string"""'], {}), "('eg.util.page_string')\n", (2803, 2826), False, 'from mock import patch\n'), ((2828, 2867), 'mock.patch', 'patch', (['"""eg.util.get_formatted_contents"""'], {}), "('eg.util.get_formatted_contents')\n", (2833, 2867), False, 'from mock import patch\n'), ((2869, 2909), 'mock.patch', 'patch', (['"""eg.util.get_contents_from_files"""'], {}), "('eg.util.get_contents_from_files')\n", (2874, 2909), False, 'from mock import patch\n'), ((2911, 2948), 'mock.patch', 'patch', (['"""eg.util.get_resolved_program"""'], {}), "('eg.util.get_resolved_program')\n", (2916, 2948), False, 'from mock import patch\n'), ((3622, 3659), 'mock.patch', 'patch', (['"""eg.util.get_resolved_program"""'], {}), "('eg.util.get_resolved_program')\n", (3627, 3659), False, 'from mock import patch\n'), ((3661, 3701), 'mock.patch', 'patch', (['"""eg.util.get_contents_from_files"""'], {}), "('eg.util.get_contents_from_files')\n", (3666, 3701), False, 'from mock import patch\n'), ((3703, 3746), 'mock.patch', 'patch', (['"""eg.util.get_file_paths_for_program"""'], {}), "('eg.util.get_file_paths_for_program')\n", (3708, 3746), False, 'from mock import patch\n'), ((3748, 3787), 'mock.patch', 'patch', (['"""eg.util.get_formatted_contents"""'], {}), "('eg.util.get_formatted_contents')\n", (3753, 3787), False, 'from mock import patch\n'), ((3789, 3817), 'mock.patch', 'patch', (['"""eg.util.page_string"""'], {}), "('eg.util.page_string')\n", (3794, 3817), False, 'from mock import patch\n'), ((6414, 6451), 'mock.patch', 'patch', (['"""eg.util.get_resolved_program"""'], {}), "('eg.util.get_resolved_program')\n", (6419, 6451), False, 'from mock import patch\n'), ((6453, 6493), 'mock.patch', 'patch', (['"""eg.util.get_contents_from_files"""'], {}), "('eg.util.get_contents_from_files')\n", (6458, 6493), False, 'from mock import patch\n'), ((6495, 6538), 'mock.patch', 'patch', (['"""eg.util.get_file_paths_for_program"""'], {}), "('eg.util.get_file_paths_for_program')\n", (6500, 6538), False, 'from mock import patch\n'), ((6540, 6579), 'mock.patch', 'patch', (['"""eg.util.get_formatted_contents"""'], {}), "('eg.util.get_formatted_contents')\n", (6545, 6579), False, 'from mock import patch\n'), ((6581, 6609), 'mock.patch', 'patch', (['"""eg.util.page_string"""'], {}), "('eg.util.page_string')\n", (6586, 6609), False, 'from mock import patch\n'), ((12366, 12386), 'mock.patch', 'patch', (['"""pydoc.pager"""'], {}), "('pydoc.pager')\n", (12371, 12386), False, 'from mock import patch\n'), ((12388, 12412), 'mock.patch', 'patch', (['"""pydoc.pipepager"""'], {}), "('pydoc.pipepager')\n", (12393, 12412), False, 'from mock import patch\n'), ((13145, 13208), 'mock.patch', 'patch', (['"""eg.util.pydoc.pipepager"""'], {'side_effect': 'KeyboardInterrupt'}), "('eg.util.pydoc.pipepager', side_effect=KeyboardInterrupt)\n", (13150, 13208), False, 'from mock import patch\n'), ((13575, 13634), 'mock.patch', 'patch', (['"""eg.util.pydoc.pager"""'], {'side_effect': 'KeyboardInterrupt'}), "('eg.util.pydoc.pager', side_effect=KeyboardInterrupt)\n", (13580, 13634), False, 'from mock import patch\n'), ((14905, 14943), 'mock.patch', 'patch', (['"""eg.util._get_contents_of_file"""'], {}), "('eg.util._get_contents_of_file')\n", (14910, 14943), False, 'from mock import patch\n'), ((15961, 16000), 'mock.patch', 'patch', (['"""eg.util.get_colorized_contents"""'], {}), "('eg.util.get_colorized_contents')\n", (15966, 16000), False, 'from mock import patch\n'), ((16002, 16040), 'mock.patch', 'patch', (['"""eg.util.get_squeezed_contents"""'], {}), "('eg.util.get_squeezed_contents')\n", (16007, 16040), False, 'from mock import patch\n'), ((16042, 16083), 'mock.patch', 'patch', (['"""eg.util.get_substituted_contents"""'], {}), "('eg.util.get_substituted_contents')\n", (16047, 16083), False, 'from mock import patch\n'), ((23018, 23047), 'mock.patch', 'patch', (['"""eg.color.EgColorizer"""'], {}), "('eg.color.EgColorizer')\n", (23023, 23047), False, 'from mock import patch\n'), ((23762, 23793), 'mock.patch', 'patch', (['"""eg.util.get_alias_dict"""'], {}), "('eg.util.get_alias_dict')\n", (23767, 23793), False, 'from mock import patch\n'), ((26048, 26086), 'mock.patch', 'patch', (['"""eg.util._get_contents_of_file"""'], {}), "('eg.util._get_contents_of_file')\n", (26053, 26086), False, 'from mock import patch\n'), ((26088, 26125), 'mock.patch', 'patch', (['"""eg.util._get_alias_file_path"""'], {}), "('eg.util._get_alias_file_path')\n", (26093, 26125), False, 'from mock import patch\n'), ((26127, 26150), 'mock.patch', 'patch', (['"""os.path.isfile"""'], {}), "('os.path.isfile')\n", (26132, 26150), False, 'from mock import patch\n'), ((27294, 27315), 'mock.patch', 'patch', (['"""os.path.join"""'], {}), "('os.path.join')\n", (27299, 27315), False, 'from mock import patch\n'), ((28865, 28888), 'mock.patch', 'patch', (['"""os.path.exists"""'], {}), "('os.path.exists')\n", (28870, 28888), False, 'from mock import patch\n'), ((28890, 28940), 'mock.patch', 'patch', (['"""eg.util._inform_cannot_edit_no_custom_dir"""'], {}), "('eg.util._inform_cannot_edit_no_custom_dir')\n", (28895, 28940), False, 'from mock import patch\n'), ((28942, 28979), 'mock.patch', 'patch', (['"""eg.util.get_resolved_program"""'], {}), "('eg.util.get_resolved_program')\n", (28947, 28979), False, 'from mock import patch\n'), ((28981, 29024), 'mock.patch', 'patch', (['"""eg.util.get_file_paths_for_program"""'], {}), "('eg.util.get_file_paths_for_program')\n", (28986, 29024), False, 'from mock import patch\n'), ((29026, 29050), 'mock.patch', 'patch', (['"""subprocess.call"""'], {}), "('subprocess.call')\n", (29031, 29050), False, 'from mock import patch\n'), ((29906, 29929), 'mock.patch', 'patch', (['"""os.path.exists"""'], {}), "('os.path.exists')\n", (29911, 29929), False, 'from mock import patch\n'), ((29931, 29981), 'mock.patch', 'patch', (['"""eg.util._inform_cannot_edit_no_custom_dir"""'], {}), "('eg.util._inform_cannot_edit_no_custom_dir')\n", (29936, 29981), False, 'from mock import patch\n'), ((29983, 30020), 'mock.patch', 'patch', (['"""eg.util.get_resolved_program"""'], {}), "('eg.util.get_resolved_program')\n", (29988, 30020), False, 'from mock import patch\n'), ((30022, 30065), 'mock.patch', 'patch', (['"""eg.util.get_file_paths_for_program"""'], {}), "('eg.util.get_file_paths_for_program')\n", (30027, 30065), False, 'from mock import patch\n'), ((30067, 30091), 'mock.patch', 'patch', (['"""subprocess.call"""'], {}), "('subprocess.call')\n", (30072, 30091), False, 'from mock import patch\n'), ((30858, 30881), 'mock.patch', 'patch', (['"""os.path.exists"""'], {}), "('os.path.exists')\n", (30863, 30881), False, 'from mock import patch\n'), ((30883, 30933), 'mock.patch', 'patch', (['"""eg.util._inform_cannot_edit_no_custom_dir"""'], {}), "('eg.util._inform_cannot_edit_no_custom_dir')\n", (30888, 30933), False, 'from mock import patch\n'), ((30935, 30972), 'mock.patch', 'patch', (['"""eg.util.get_resolved_program"""'], {}), "('eg.util.get_resolved_program')\n", (30940, 30972), False, 'from mock import patch\n'), ((30974, 31017), 'mock.patch', 'patch', (['"""eg.util.get_file_paths_for_program"""'], {}), "('eg.util.get_file_paths_for_program')\n", (30979, 31017), False, 'from mock import patch\n'), ((31019, 31043), 'mock.patch', 'patch', (['"""subprocess.call"""'], {}), "('subprocess.call')\n", (31024, 31043), False, 'from mock import patch\n'), ((609, 800), 'eg.config.Config', 'config.Config', ([], {'examples_dir': 'examples_dir', 'custom_dir': 'custom_dir', 'color_config': 'color_config', 'use_color': 'use_color', 'pager_cmd': 'pager_cmd', 'editor_cmd': 'editor_cmd', 'squeeze': 'squeeze', 'subs': 'subs'}), '(examples_dir=examples_dir, custom_dir=custom_dir,\n color_config=color_config, use_color=use_color, pager_cmd=pager_cmd,\n editor_cmd=editor_cmd, squeeze=squeeze, subs=subs)\n', (622, 800), False, 'from eg import config\n'), ((1213, 1267), 'eg.util.get_file_paths_for_program', 'util.get_file_paths_for_program', (['program', 'examples_dir'], {}), '(program, examples_dir)\n', (1244, 1267), False, 'from eg import util\n'), ((2235, 2289), 'eg.util.get_file_paths_for_program', 'util.get_file_paths_for_program', (['program', 'examples_dir'], {}), '(program, examples_dir)\n', (2266, 2289), False, 'from eg import util\n'), ((2514, 2568), 'eg.util.get_file_paths_for_program', 'util.get_file_paths_for_program', (['"""cp"""', '"""/Users/tyrion"""'], {}), "('cp', '/Users/tyrion')\n", (2545, 2568), False, 'from eg import util\n'), ((3278, 3319), 'eg.util.handle_program', 'util.handle_program', (['program', 'test_config'], {}), '(program, test_config)\n', (3297, 3319), False, 'from eg import util\n'), ((5637, 5678), 'eg.util.handle_program', 'util.handle_program', (['program', 'test_config'], {}), '(program, test_config)\n', (5656, 5678), False, 'from eg import util\n'), ((8527, 8578), 'eg.util.handle_program', 'util.handle_program', (['alias_for_program', 'test_config'], {}), '(alias_for_program, test_config)\n', (8546, 8578), False, 'from eg import util\n'), ((11322, 11374), 'eg.util.get_list_of_all_supported_commands', 'util.get_list_of_all_supported_commands', (['test_config'], {}), '(test_config)\n', (11361, 11374), False, 'from eg import util\n'), ((12820, 12860), 'eg.util.page_string', 'util.page_string', (['str_to_page', 'pager_cmd'], {}), '(str_to_page, pager_cmd)\n', (12836, 12860), False, 'from eg import util\n'), ((15885, 15921), 'eg.util.get_contents_from_files', 'util.get_contents_from_files', (['*paths'], {}), '(*paths)\n', (15913, 15921), False, 'from eg import util\n'), ((17181, 17271), 'eg.util.get_formatted_contents', 'util.get_formatted_contents', (['starting_contents', 'use_color', 'color_config', 'squeeze', 'subs'], {}), '(starting_contents, use_color, color_config,\n squeeze, subs)\n', (17208, 17271), False, 'from eg import util\n'), ((21335, 21373), 'eg.util.get_squeezed_contents', 'util.get_squeezed_contents', (['unsqueezed'], {}), '(unsqueezed)\n', (21361, 21373), False, 'from eg import util\n'), ((21590, 21637), 'eg.util.get_substituted_contents', 'util.get_substituted_contents', (['raw_contents', '[]'], {}), '(raw_contents, [])\n', (21619, 21637), False, 'from eg import util\n'), ((21850, 21889), 'mock.Mock', 'Mock', ([], {'auto_spec': 'substitute.Substitution'}), '(auto_spec=substitute.Substitution)\n', (21854, 21889), False, 'from mock import Mock\n'), ((22009, 22048), 'mock.Mock', 'Mock', ([], {'auto_spec': 'substitute.Substitution'}), '(auto_spec=substitute.Substitution)\n', (22013, 22048), False, 'from mock import Mock\n'), ((22294, 22348), 'eg.util.get_substituted_contents', 'util.get_substituted_contents', (['starting_contents', 'subs'], {}), '(starting_contents, subs)\n', (22323, 22348), False, 'from eg import util\n'), ((22688, 22732), 'eg.substitute.Substitution', 'substitute.Substitution', (['"""foo"""', '"""bar"""', '(False)'], {}), "('foo', 'bar', False)\n", (22711, 22732), False, 'from eg import substitute\n'), ((22747, 22796), 'eg.substitute.Substitution', 'substitute.Substitution', (['"""bar\n\n"""', '"""baz\n"""', '(True)'], {}), "('bar\\n\\n', 'baz\\n', True)\n", (22770, 22796), False, 'from eg import substitute\n'), ((22943, 22985), 'eg.util.get_substituted_contents', 'util.get_substituted_contents', (['start', 'subs'], {}), '(start, subs)\n', (22972, 22985), False, 'from eg import util\n'), ((23589, 23644), 'eg.util.get_colorized_contents', 'util.get_colorized_contents', (['raw_contents', 'color_config'], {}), '(raw_contents, color_config)\n', (23616, 23644), False, 'from eg import util\n'), ((24211, 24257), 'eg.util.get_resolved_program', 'util.get_resolved_program', (['program', 'config_obj'], {}), '(program, config_obj)\n', (24236, 24257), False, 'from eg import util\n'), ((25313, 25335), 'json.dumps', 'json.dumps', (['alias_dict'], {}), '(alias_dict)\n', (25323, 25335), False, 'import json\n'), ((26936, 26967), 'eg.util.get_alias_dict', 'util.get_alias_dict', (['config_obj'], {}), '(config_obj)\n', (26955, 26967), False, 'from eg import util\n'), ((27690, 27727), 'eg.util._get_alias_file_path', 'util._get_alias_file_path', (['config_obj'], {}), '(config_obj)\n', (27715, 27727), False, 'from eg import util\n'), ((28023, 28055), 'eg.util._is_example_file', 'util._is_example_file', (['file_name'], {}), '(file_name)\n', (28044, 28055), False, 'from eg import util\n'), ((28260, 28292), 'eg.util._is_example_file', 'util._is_example_file', (['file_name'], {}), '(file_name)\n', (28281, 28292), False, 'from eg import util\n'), ((28542, 28605), 'os.path.join', 'os.path.join', (['config.DEFAULT_EXAMPLES_DIR', 'util.ALIAS_FILE_NAME'], {}), '(config.DEFAULT_EXAMPLES_DIR, util.ALIAS_FILE_NAME)\n', (28554, 28605), False, 'import os\n'), ((28654, 28697), 'eg.util._get_contents_of_file', 'util._get_contents_of_file', (['alias_file_path'], {}), '(alias_file_path)\n', (28680, 28697), False, 'from eg import util\n'), ((28715, 28746), 'json.loads', 'json.loads', (['alias_file_contents'], {}), '(alias_file_contents)\n', (28725, 28746), False, 'import json\n'), ((29609, 29651), 'eg.util.edit_custom_examples', 'util.edit_custom_examples', (['program', 'config'], {}), '(program, config)\n', (29634, 29651), False, 'from eg import util\n'), ((30527, 30569), 'eg.util.edit_custom_examples', 'util.edit_custom_examples', (['program', 'config'], {}), '(program, config)\n', (30552, 30569), False, 'from eg import util\n'), ((31506, 31548), 'eg.util.edit_custom_examples', 'util.edit_custom_examples', (['program', 'config'], {}), '(program, config)\n', (31531, 31548), False, 'from eg import util\n'), ((31758, 31800), 'eg.util.edit_custom_examples', 'util.edit_custom_examples', (['program', 'config'], {}), '(program, config)\n', (31783, 31800), False, 'from eg import util\n'), ((2745, 2788), 'eg.util.get_file_paths_for_program', 'util.get_file_paths_for_program', (['"""cp"""', 'None'], {}), "('cp', None)\n", (2776, 2788), False, 'from eg import util\n'), ((10956, 10987), 'mock.patch', 'patch', (['"""eg.util.get_alias_dict"""'], {}), "('eg.util.get_alias_dict')\n", (10961, 10987), False, 'from mock import patch\n'), ((11070, 11117), 'eg.util.get_list_of_all_supported_commands', 'util.get_list_of_all_supported_commands', (['config'], {}), '(config)\n', (11109, 11117), False, 'from eg import util\n'), ((13373, 13411), 'eg.util.page_string', 'util.page_string', (['"""page me plz"""', '"""cat"""'], {}), "('page me plz', 'cat')\n", (13389, 13411), False, 'from eg import util\n'), ((13795, 13832), 'eg.util.page_string', 'util.page_string', (['"""page me plz"""', 'None'], {}), "('page me plz', None)\n", (13811, 13832), False, 'from eg import util\n')] |
def format_card(card_num):
"""
Formats card numbers to remove any spaces, unnecessary characters, etc
Input: Card number, integer or string
Output: Correctly formatted card number, string
"""
import re
card_num = str(card_num)
# Regex to remove any nondigit characters
return re.sub(r"\D", "", card_num)
def validate_card(card_num):
"""
Check if credit card is valid using the Luhn algorithm
Input: Card number, integer or string
Output: Valid?, boolean
"""
double = 0
total = 0
digits = str(card_num)
for i in range(len(digits) - 1, -1, -1):
for c in str((double + 1) * int(digits[i])):
total += int(c)
double = (double + 1) % 2
return (total % 10) == 0
| [
"re.sub"
] | [((313, 340), 're.sub', 're.sub', (['"""\\\\D"""', '""""""', 'card_num'], {}), "('\\\\D', '', card_num)\n", (319, 340), False, 'import re\n')] |
from abc import ABC, abstractmethod
from oop_di import ContainerDefinition, Extension
# #############Mailer bounded context###############
class MailerInterface(ABC):
@abstractmethod
def send_mail(self):
...
class Mailer(MailerInterface):
def __init__(self, from_email):
self.from_email = from_email
def send_mail(self):
print(f"Sending from {self.from_email}...")
print("Sent")
class MailExtension(Extension):
def define(self):
self.add_param("from_email", "<EMAIL>")
self.add_named_service(MailerInterface, Mailer)
# ############Product bounded context###########
class ProductService:
def __init__(self, mailer: MailerInterface):
self.mailer = mailer
def process_product(self):
print("processing product")
self.mailer.send_mail()
class ProductExtension(Extension):
def define(self):
self.add_service(ProductService)
# #################Application
container_definition = ContainerDefinition()
container_definition.add_extension(ProductExtension())
container_definition.add_extension(MailExtension())
container = container_definition.compile()
@container.inject()
def process_product_endpoint(something, *, product_service: ProductService):
print(something)
product_service.process_product()
process_product_endpoint("doing something before calling product service")
| [
"oop_di.ContainerDefinition"
] | [((1005, 1026), 'oop_di.ContainerDefinition', 'ContainerDefinition', ([], {}), '()\n', (1024, 1026), False, 'from oop_di import ContainerDefinition, Extension\n')] |
#!/usr/bin/env python
"""
nearest_cloud.py - Version 1.0 2013-07-28
Compute the COG of the nearest object in x-y-z space and publish as a PoseStamped message.
Relies on PCL ROS nodelets in the launch file to pre-filter the
cloud on the x, y and z dimensions.
Based on the follower application by <NAME> at:
http://ros.org/wiki/turtlebot_follower
Created for the Pi Robot Project: http://www.pirobot.org
Copyright (c) 2013 <NAME>. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details at:
http://www.gnu.org/licenses/gpl.html
"""
import rospy
from roslib import message
from sensor_msgs.msg import PointCloud2
from sensor_msgs import point_cloud2
from geometry_msgs.msg import Point, PoseStamped, Quaternion
from tf.transformations import quaternion_from_euler
import numpy as np
import cv2
from math import pi, radians
class NearestCloud():
def __init__(self):
rospy.init_node("nearest_cloud")
self.min_points = rospy.get_param("~min_points", 25)
self.z_percentile = rospy.get_param("~z_percentile", 100)
# Define the target publisher
self.target_pub = rospy.Publisher('target_pose', PoseStamped)
rospy.Subscriber('point_cloud', PointCloud2, self.get_nearest_cloud)
# Wait for the pointcloud topic to become available
rospy.wait_for_message('point_cloud', PointCloud2)
def get_nearest_cloud(self, msg):
points = list()
points_xy = list()
# Get all the points in the visible cloud (may be prefiltered by other nodes)
for point in point_cloud2.read_points(msg, skip_nans=True):
points.append(point[:3])
points_xy.append(point[:2])
# Convert to a numpy array
points_arr = np.float32([p for p in points]).reshape(-1, 1, 3)
# Compute the COG
cog = np.mean(points_arr, 0)
# Convert to a Point
cog_point = Point()
cog_point.x = cog[0][0]
cog_point.y = cog[0][1]
cog_point.z = cog[0][2]
#cog_point.z = 0.35
# Abort if we get an NaN in any component
if np.isnan(np.sum(cog)):
return
# If we have enough points, find the best fit ellipse around them
try:
if len(points_xy) > 6:
points_xy_arr = np.float32([p for p in points_xy]).reshape(-1, 1, 2)
track_box = cv2.fitEllipse(points_xy_arr)
else:
# Otherwise, find the best fitting rectangle
track_box = cv2.boundingRect(points_xy_arr)
angle = pi - radians(track_box[2])
except:
return
#print angle
# Convert the rotation angle to a quaternion
q_angle = quaternion_from_euler(0, angle, 0, axes='sxyz')
q = Quaternion(*q_angle)
q.x = 0.707
q.y = 0
q.z = 0.707
q.w = 0
# Publish the COG and orientation
target = PoseStamped()
target.header.stamp = rospy.Time.now()
target.header.frame_id = msg.header.frame_id
target.pose.position = cog_point
target.pose.orientation = q
# Publish the movement command
self.target_pub.publish(target)
if __name__ == '__main__':
try:
NearestCloud()
rospy.spin()
except rospy.ROSInterruptException:
rospy.loginfo("Nearest cloud node terminated.")
| [
"rospy.init_node",
"cv2.fitEllipse",
"sensor_msgs.point_cloud2.read_points",
"numpy.mean",
"geometry_msgs.msg.Quaternion",
"rospy.spin",
"rospy.Subscriber",
"rospy.get_param",
"math.radians",
"rospy.Time.now",
"geometry_msgs.msg.Point",
"rospy.Publisher",
"rospy.loginfo",
"rospy.wait_for_m... | [((1422, 1454), 'rospy.init_node', 'rospy.init_node', (['"""nearest_cloud"""'], {}), "('nearest_cloud')\n", (1437, 1454), False, 'import rospy\n'), ((1490, 1524), 'rospy.get_param', 'rospy.get_param', (['"""~min_points"""', '(25)'], {}), "('~min_points', 25)\n", (1505, 1524), False, 'import rospy\n'), ((1553, 1590), 'rospy.get_param', 'rospy.get_param', (['"""~z_percentile"""', '(100)'], {}), "('~z_percentile', 100)\n", (1568, 1590), False, 'import rospy\n'), ((1657, 1700), 'rospy.Publisher', 'rospy.Publisher', (['"""target_pose"""', 'PoseStamped'], {}), "('target_pose', PoseStamped)\n", (1672, 1700), False, 'import rospy\n'), ((1718, 1786), 'rospy.Subscriber', 'rospy.Subscriber', (['"""point_cloud"""', 'PointCloud2', 'self.get_nearest_cloud'], {}), "('point_cloud', PointCloud2, self.get_nearest_cloud)\n", (1734, 1786), False, 'import rospy\n'), ((1864, 1914), 'rospy.wait_for_message', 'rospy.wait_for_message', (['"""point_cloud"""', 'PointCloud2'], {}), "('point_cloud', PointCloud2)\n", (1886, 1914), False, 'import rospy\n'), ((2129, 2174), 'sensor_msgs.point_cloud2.read_points', 'point_cloud2.read_points', (['msg'], {'skip_nans': '(True)'}), '(msg, skip_nans=True)\n', (2153, 2174), False, 'from sensor_msgs import point_cloud2\n'), ((2423, 2445), 'numpy.mean', 'np.mean', (['points_arr', '(0)'], {}), '(points_arr, 0)\n', (2430, 2445), True, 'import numpy as np\n'), ((2504, 2511), 'geometry_msgs.msg.Point', 'Point', ([], {}), '()\n', (2509, 2511), False, 'from geometry_msgs.msg import Point, PoseStamped, Quaternion\n'), ((3368, 3415), 'tf.transformations.quaternion_from_euler', 'quaternion_from_euler', (['(0)', 'angle', '(0)'], {'axes': '"""sxyz"""'}), "(0, angle, 0, axes='sxyz')\n", (3389, 3415), False, 'from tf.transformations import quaternion_from_euler\n'), ((3428, 3448), 'geometry_msgs.msg.Quaternion', 'Quaternion', (['*q_angle'], {}), '(*q_angle)\n', (3438, 3448), False, 'from geometry_msgs.msg import Point, PoseStamped, Quaternion\n'), ((3590, 3603), 'geometry_msgs.msg.PoseStamped', 'PoseStamped', ([], {}), '()\n', (3601, 3603), False, 'from geometry_msgs.msg import Point, PoseStamped, Quaternion\n'), ((3634, 3650), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (3648, 3650), False, 'import rospy\n'), ((3968, 3980), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (3978, 3980), False, 'import rospy\n'), ((2715, 2726), 'numpy.sum', 'np.sum', (['cog'], {}), '(cog)\n', (2721, 2726), True, 'import numpy as np\n'), ((4029, 4076), 'rospy.loginfo', 'rospy.loginfo', (['"""Nearest cloud node terminated."""'], {}), "('Nearest cloud node terminated.')\n", (4042, 4076), False, 'import rospy\n'), ((2322, 2353), 'numpy.float32', 'np.float32', (['[p for p in points]'], {}), '([p for p in points])\n', (2332, 2353), True, 'import numpy as np\n'), ((2994, 3023), 'cv2.fitEllipse', 'cv2.fitEllipse', (['points_xy_arr'], {}), '(points_xy_arr)\n', (3008, 3023), False, 'import cv2\n'), ((3131, 3162), 'cv2.boundingRect', 'cv2.boundingRect', (['points_xy_arr'], {}), '(points_xy_arr)\n', (3147, 3162), False, 'import cv2\n'), ((3201, 3222), 'math.radians', 'radians', (['track_box[2]'], {}), '(track_box[2])\n', (3208, 3222), False, 'from math import pi, radians\n'), ((2911, 2945), 'numpy.float32', 'np.float32', (['[p for p in points_xy]'], {}), '([p for p in points_xy])\n', (2921, 2945), True, 'import numpy as np\n')] |
import lvgl as lv
import utime
# RESOURCES_ROOT = "S:/Users/liujuncheng/workspace/iot/esp32/solution/HaaSPython/solutions/smart_panel/"
RESOURCES_ROOT = "S:/data/pyamp/"
def drawOver(e):
global g_clickTime
if (g_clickTime != 0):
currentTime = utime.ticks_ms()
print("create Environment page use: %dms" % int((currentTime - g_clickTime)))
g_clickTime = 0
environment_alive = False
def environment_back_click_callback(e, win):
global environment_alive
if (environment_alive):
from smart_panel import load_smart_panel
load_smart_panel()
environment_alive = False
def environment_back_press_callback(e, back_image):
back_image.set_zoom(280)
def environment_back_release_callback(e, back_image):
back_image.set_zoom(250)
class Environment:
def createPage(self):
global environment_alive
global g_clickTime
g_clickTime = utime.ticks_ms()
# init scr
scr = lv.obj()
win = lv.obj(scr)
win.set_size(scr.get_width(), scr.get_height())
win.set_style_border_opa(0, 0)
win.set_style_bg_color(lv.color_black(), 0)
win.set_style_radius(0, 0)
win.clear_flag(lv.obj.FLAG.SCROLLABLE)
win.add_event_cb(drawOver, lv.EVENT.DRAW_POST_END, None)
backImg=lv.img(win)
backImg.set_src(RESOURCES_ROOT + "images/back.png")
backImg.set_style_align(lv.ALIGN.LEFT_MID, 0)
backImg.add_flag(lv.obj.FLAG.CLICKABLE)
backImg.add_event_cb(lambda e: environment_back_click_callback(e, win), lv.EVENT.CLICKED, None)
backImg.add_event_cb(lambda e: environment_back_press_callback(e, backImg), lv.EVENT.PRESSED, None)
backImg.add_event_cb(lambda e: environment_back_release_callback(e, backImg), lv.EVENT.RELEASED, None)
backImg.set_ext_click_area(20)
container = lv.obj(win)
container.set_style_bg_opa(0, 0)
container.set_style_border_opa(0, 0)
container.set_size(lv.SIZE.CONTENT, lv.SIZE.CONTENT)
container.set_flex_flow(lv.FLEX_FLOW.COLUMN)
container.set_style_align(lv.ALIGN.CENTER, 0)
container.set_style_pad_left(0, 0)
self.createItem(container, RESOURCES_ROOT + "images/temperature.png", "25",
RESOURCES_ROOT + "images/centigrade_l.png", "Temperature")
self.createInterval(container, 25)
self.createItem(container, RESOURCES_ROOT + "images/humidity.png", "41 %", "", "Humidity")
from smart_panel import needAnimation
if (needAnimation):
lv.scr_load_anim(scr, lv.SCR_LOAD_ANIM.MOVE_LEFT, 500, 0, True)
else:
lv.scr_load_anim(scr, lv.SCR_LOAD_ANIM.NONE, 0, 0, True)
environment_alive = True
currentTime = utime.ticks_ms()
print("run python code use: %dms" % int((currentTime - g_clickTime)))
def createItem(self, parent, iconPath, value, unityPath, tips):
col_dsc = [lv.GRID.CONTENT, 5, lv.GRID.CONTENT, lv.GRID.CONTENT, lv.GRID_TEMPLATE.LAST]
row_dsc = [lv.GRID.CONTENT, lv.GRID.CONTENT, lv.GRID_TEMPLATE.LAST]
cont = lv.obj(parent)
cont.set_style_bg_opa(0, 0)
cont.set_style_border_opa(0, 0)
cont.set_style_pad_all(0, 0)
cont.set_size(lv.SIZE.CONTENT, lv.SIZE.CONTENT)
cont.set_style_grid_column_dsc_array(col_dsc, 0)
cont.set_style_grid_row_dsc_array(row_dsc, 0)
cont.set_layout(lv.LAYOUT_GRID.value)
img = lv.img(cont)
img.set_src(iconPath)
img.set_grid_cell(lv.GRID_ALIGN.START, 0, 1, lv.GRID_ALIGN.CENTER, 0, 2)
label = lv.label(cont)
label.set_text(value)
label.set_style_text_color(lv.color_white(), 0)
label.set_style_text_font(lv.font_montserrat_48, 0)
label.set_style_pad_all(0, 0)
label.set_grid_cell(lv.GRID_ALIGN.START, 2, 1, lv.GRID_ALIGN.CENTER, 0, 1)
if (unityPath.strip()):
iconImg = lv.img(cont)
iconImg.set_src(unityPath)
iconImg.set_zoom(205)
iconImg.set_style_pad_bottom(0, 0)
iconImg.set_grid_cell(lv.GRID_ALIGN.START, 3, 1, lv.GRID_ALIGN.CENTER, 0, 1)
tip = lv.label(cont)
tip.set_text(tips)
tip.set_style_text_color(lv.color_make(0xCC, 0xCC, 0xCC), 0)
tip.set_grid_cell(lv.GRID_ALIGN.START, 2, 2, lv.GRID_ALIGN.START, 1, 1)
def createInterval(self, parent, size):
interval = lv.obj(parent)
interval.set_style_bg_opa(0, 0)
interval.set_style_border_opa(0, 0)
interval.set_height(size)
interval.set_width(0)
| [
"lvgl.color_make",
"lvgl.scr_load_anim",
"lvgl.label",
"lvgl.img",
"lvgl.color_black",
"smart_panel.load_smart_panel",
"utime.ticks_ms",
"lvgl.obj",
"lvgl.color_white"
] | [((262, 278), 'utime.ticks_ms', 'utime.ticks_ms', ([], {}), '()\n', (276, 278), False, 'import utime\n'), ((577, 595), 'smart_panel.load_smart_panel', 'load_smart_panel', ([], {}), '()\n', (593, 595), False, 'from smart_panel import load_smart_panel\n'), ((925, 941), 'utime.ticks_ms', 'utime.ticks_ms', ([], {}), '()\n', (939, 941), False, 'import utime\n'), ((976, 984), 'lvgl.obj', 'lv.obj', ([], {}), '()\n', (982, 984), True, 'import lvgl as lv\n'), ((1000, 1011), 'lvgl.obj', 'lv.obj', (['scr'], {}), '(scr)\n', (1006, 1011), True, 'import lvgl as lv\n'), ((1323, 1334), 'lvgl.img', 'lv.img', (['win'], {}), '(win)\n', (1329, 1334), True, 'import lvgl as lv\n'), ((1880, 1891), 'lvgl.obj', 'lv.obj', (['win'], {}), '(win)\n', (1886, 1891), True, 'import lvgl as lv\n'), ((2781, 2797), 'utime.ticks_ms', 'utime.ticks_ms', ([], {}), '()\n', (2795, 2797), False, 'import utime\n'), ((3133, 3147), 'lvgl.obj', 'lv.obj', (['parent'], {}), '(parent)\n', (3139, 3147), True, 'import lvgl as lv\n'), ((3489, 3501), 'lvgl.img', 'lv.img', (['cont'], {}), '(cont)\n', (3495, 3501), True, 'import lvgl as lv\n'), ((3630, 3644), 'lvgl.label', 'lv.label', (['cont'], {}), '(cont)\n', (3638, 3644), True, 'import lvgl as lv\n'), ((4204, 4218), 'lvgl.label', 'lv.label', (['cont'], {}), '(cont)\n', (4212, 4218), True, 'import lvgl as lv\n'), ((4460, 4474), 'lvgl.obj', 'lv.obj', (['parent'], {}), '(parent)\n', (4466, 4474), True, 'import lvgl as lv\n'), ((1138, 1154), 'lvgl.color_black', 'lv.color_black', ([], {}), '()\n', (1152, 1154), True, 'import lvgl as lv\n'), ((2578, 2641), 'lvgl.scr_load_anim', 'lv.scr_load_anim', (['scr', 'lv.SCR_LOAD_ANIM.MOVE_LEFT', '(500)', '(0)', '(True)'], {}), '(scr, lv.SCR_LOAD_ANIM.MOVE_LEFT, 500, 0, True)\n', (2594, 2641), True, 'import lvgl as lv\n'), ((2668, 2724), 'lvgl.scr_load_anim', 'lv.scr_load_anim', (['scr', 'lv.SCR_LOAD_ANIM.NONE', '(0)', '(0)', '(True)'], {}), '(scr, lv.SCR_LOAD_ANIM.NONE, 0, 0, True)\n', (2684, 2724), True, 'import lvgl as lv\n'), ((3710, 3726), 'lvgl.color_white', 'lv.color_white', ([], {}), '()\n', (3724, 3726), True, 'import lvgl as lv\n'), ((3967, 3979), 'lvgl.img', 'lv.img', (['cont'], {}), '(cont)\n', (3973, 3979), True, 'import lvgl as lv\n'), ((4279, 4307), 'lvgl.color_make', 'lv.color_make', (['(204)', '(204)', '(204)'], {}), '(204, 204, 204)\n', (4292, 4307), True, 'import lvgl as lv\n')] |
import logging
import os
import threading
from xml.etree import ElementTree # nosec
import xbmc
from lib import routes # noqa
from lib.httpserver import threaded_http_server
from lib.kodi import ADDON_PATH, get_repository_port, set_logger
def update_repository_port(port, xml_path=os.path.join(ADDON_PATH, "addon.xml")):
base_url = "http://127.0.0.1:{}/".format(port)
tree = ElementTree.parse(xml_path)
tree.find("<EMAIL>='<EMAIL>']/info").text = base_url + "addons.xml"
tree.find("<EMAIL>='<EMAIL>']/checksum").text = base_url + "addons.xml.md5"
tree.find("<EMAIL>='<EMAIL>']/datadir").text = base_url
tree.write(xml_path, encoding="UTF-8", xml_declaration=True)
class ServiceMonitor(xbmc.Monitor):
def __init__(self, port):
super(ServiceMonitor, self).__init__()
self._port = port
def onSettingsChanged(self):
port = get_repository_port()
if port != self._port:
update_repository_port(port)
self._port = port
class HTTPServerRunner(threading.Thread):
def __init__(self, port):
self._port = port
self._server = None
super(HTTPServerRunner, self).__init__()
def run(self):
self._server = server = threaded_http_server("", self._port)
logging.debug("Server started at port %d", self._port)
server.serve_forever()
logging.debug("Closing server")
server.server_close()
logging.debug("Server terminated")
def stop(self):
if self._server is not None:
self._server.shutdown()
self._server = None
def __enter__(self):
self.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop()
self.join()
return False
def run():
set_logger()
port = get_repository_port()
with HTTPServerRunner(port):
ServiceMonitor(port).waitForAbort()
| [
"xml.etree.ElementTree.parse",
"logging.debug",
"lib.kodi.get_repository_port",
"os.path.join",
"lib.kodi.set_logger",
"lib.httpserver.threaded_http_server"
] | [((287, 324), 'os.path.join', 'os.path.join', (['ADDON_PATH', '"""addon.xml"""'], {}), "(ADDON_PATH, 'addon.xml')\n", (299, 324), False, 'import os\n'), ((389, 416), 'xml.etree.ElementTree.parse', 'ElementTree.parse', (['xml_path'], {}), '(xml_path)\n', (406, 416), False, 'from xml.etree import ElementTree\n'), ((1804, 1816), 'lib.kodi.set_logger', 'set_logger', ([], {}), '()\n', (1814, 1816), False, 'from lib.kodi import ADDON_PATH, get_repository_port, set_logger\n'), ((1828, 1849), 'lib.kodi.get_repository_port', 'get_repository_port', ([], {}), '()\n', (1847, 1849), False, 'from lib.kodi import ADDON_PATH, get_repository_port, set_logger\n'), ((884, 905), 'lib.kodi.get_repository_port', 'get_repository_port', ([], {}), '()\n', (903, 905), False, 'from lib.kodi import ADDON_PATH, get_repository_port, set_logger\n'), ((1237, 1273), 'lib.httpserver.threaded_http_server', 'threaded_http_server', (['""""""', 'self._port'], {}), "('', self._port)\n", (1257, 1273), False, 'from lib.httpserver import threaded_http_server\n'), ((1282, 1336), 'logging.debug', 'logging.debug', (['"""Server started at port %d"""', 'self._port'], {}), "('Server started at port %d', self._port)\n", (1295, 1336), False, 'import logging\n'), ((1376, 1407), 'logging.debug', 'logging.debug', (['"""Closing server"""'], {}), "('Closing server')\n", (1389, 1407), False, 'import logging\n'), ((1446, 1480), 'logging.debug', 'logging.debug', (['"""Server terminated"""'], {}), "('Server terminated')\n", (1459, 1480), False, 'import logging\n')] |
import torch
from data import get_diff
import editdistance
import re
from data import chars
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
import matplotlib.pyplot as plt
from matplotlib import pylab
import numpy as np
char_to_idx = {ch: i for i, ch in enumerate(chars)}
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
torch.manual_seed(0) # for reproducibility
torch.backends.cudnn.deterministic = True # for reproducibility
torch.backends.cudnn.benchmark = False
def bashRun(args):
"""
When bash running, convert string arguments to an appropriate type.
"""
if type(args.lr) is str:
args.lr = float(args.lr)
if type(args.gru_lr_down) is str:
args.gru_lr_down = float(args.gru_lr_down)
if type(args.step_lr) is str:
args.step_lr = float(args.step_lr)
if type(args.seed) is str:
args.seed = int(args.seed)
if type(args.test_seed) is str:
args.test_seed = int(args.test_seed)
if type(args.epoch) is str:
args.epoch = int(args.epoch)
if type(args.case) is str:
args.case = int(args.case)
if type(args.nhid) is str:
args.nhid = int(args.nhid)
if type(args.nhead) is str:
args.nhead = int(args.nhead)
if type(args.nlayers) is str:
args.nlayers = int(args.nlayers)
if type(args.batch_size) is str:
args.batch_size = int(args.batch_size)
if str(args.excessive_output) == 'True':
args.excessive_output = True
else:
args.excessive_output = False
if str(args.intermediate_loss) == 'True':
args.intermediate_loss = True
else:
args.intermediate_loss = False
if str(args.augment) == 'True':
args.augment = True
else:
args.augment = False
if str(args.multi_gpu) == 'True':
args.multi_gpu = True
else:
args.multi_gpu = False
return args
def evaluate(dataloader, predictor, criterion, args, state='val', pretrained=None, print_result=False):
"""
compute and print loss and accuracy
If long_term or semantic, print (input, prediction, label) pair.
"""
if pretrained is not None:
predictor.load_state_dict(pretrained.state_dict())
predictor.eval()
total = 0.0
correct = 0
step = 0
total_loss = 0.
accuracy = 0
semantic = True
with torch.no_grad():
for x_batch, y_batch, input_len in dataloader:
step += 1
x_batch = x_batch.to(device)
y_batch = y_batch.to(device)
if args.sa_ncd:
output_stat, output, all_hidden_states = predictor(x_batch, input_len)
else:
output = predictor(x_batch, input_len)
if args.ikeyboard:
loss = criterion(output[1].permute(0, 2, 1), y_batch)
loss += criterion(output[0].permute(0, 2, 1), y_batch)
_, top_k = torch.topk(output[1].permute(0, 2, 1), 3, dim=1)
top1_predicted = top_k[:, 0, :]
top2_predicted = top_k[:, 1, :]
top3_predicted = top_k[:, 2, :]
else:
loss = criterion(output, y_batch)
_, top_k = torch.topk(output, 3, dim=1)
top1_predicted = top_k[:, 0, :]
top2_predicted = top_k[:, 1, :]
top3_predicted = top_k[:, 2, :]
if args.various_length:
total += sum(input_len)
correct_packed = pack_padded_sequence(top1_predicted == y_batch, input_len, batch_first=True,
enforce_sorted=False)
correct += correct_packed.data.sum()
else:
pass
accuracy = 100 * correct / total
total_loss += loss
avg_loss = total_loss / step
if print_result:
if semantic:
if len(x_batch.size()) == 3:
print("Input typo:" + idx2chars(x_batch[0, :, 0].long()))
else:
print("Input typo:" + idx2chars(x_batch[0, :].long()))
pred_idx = top1_predicted[0]
label = y_batch[0]
print("original sentence:" + idx2chars(label))
print("top1_predicted sentence:" + idx2chars(pred_idx))
print("top2_predicted sentence:" + idx2chars(top2_predicted[0]))
print("top3_predicted sentence:" + idx2chars(top3_predicted[0]) + '\n')
if print_result:
print('Accuracy on the ' + state + ' data: {:2.2f} % \n'.format(accuracy))
return avg_loss, accuracy
def three_hot_encoder(output, cls=False):
# output : [batch_size, vocab_size, length]
if cls:
output = output[:, :, 1:]
else:
output = output
zero_tensor = torch.zeros(output.shape)
_, topk = torch.topk(output, 3, dim=1)
top1_predicted = topk[:, 0, :]
top2_predicted = topk[:, 1, :]
top3_predicted = topk[:, 2, :]
one_hot1 = (torch.arange(output.shape[1]) == top1_predicted[..., None]).long()
one_hot2 = (torch.arange(output.shape[1]) == top2_predicted[..., None]).long()
one_hot3 = (torch.arange(output.shape[1]) == top3_predicted[..., None]).long()
return one_hot1 + one_hot2 + one_hot3
def save_model(state_dict_model, path):
torch.save(state_dict_model, path, _use_new_zipfile_serialization=False)
def load_model(init_model, path, evaluate_mode=False):
if torch.cuda.is_available():
init_model.load_state_dict(torch.load(path))
else:
init_model.load_state_dict(torch.load(path, map_location=torch.device('cpu')))
if evaluate_mode:
init_model.eval()
return init_model
def idx2chars(indices):
'''
chars_from_indices = ''
for idx in indices:
chars_from_indices = chars_from_indices + chars[idx]
'''
chars_from_indices = ''.join([chars[ind] for ind in indices])
return chars_from_indices
def remove_non_silence_noises(input_text):
"""
Removes non_silence noises from a transcript
"""
non_silence_noises = ["noise", "um", "ah", "er", "umm", "uh", "mm", "mn", "mhm", "mnh", "<START>", "<END>"]
re_non_silence_noises = re.compile(r"\b({})\b".format("|".join(non_silence_noises)))
return re.sub(re_non_silence_noises, '', input_text)
def wer(ref, hyp, remove_nsns=False):
"""
Calculate word error rate between two string or time_aligned_text objects
>>> wer("this is a cat", "this is a dog")
25.0
"""
# remove tagged noises
# ref = re.sub(re_tagged_noises, ' ', ref)
# hyp = re.sub(re_tagged_noises, ' ', hyp)
ref = re.sub('^<START>|<EOS>$', '', ref)
hyp = re.sub('^<START>|<EOS>$', '', hyp)
# optionally, remove non silence noises
if remove_nsns:
ref = remove_non_silence_noises(ref)
hyp = remove_non_silence_noises(hyp)
# clean punctuation, etc.
# ref = clean_up(ref)
# hyp = clean_up(hyp)
# calculate WER
return editdistance.eval(ref.split(' '), hyp.split(' ')), len(ref.split(' '))
def cer(ref, hyp, remove_nsns=False):
"""
Calculate character error rate between two strings or time_aligned_text objects
>>> cer("this cat", "this bad")
25.0
"""
ref = re.sub('^<START>|<EOS>$', '', ref)
hyp = re.sub('^<START>|<EOS>$', '', hyp)
if remove_nsns:
ref = remove_non_silence_noises(ref)
hyp = remove_non_silence_noises(hyp)
# ref = clean_up(ref)
# hyp = clean_up(hyp)
# calculate per line CER
return editdistance.eval(ref, hyp), len(ref)
def test_plot(x_batch, label, save_path):
x_dic = {}
y_dic = {}
width = x_batch[0, 0, 3]
height = x_batch[0, 0, 4]
for i, char in enumerate(idx2chars(label)):
x_value = float(x_batch[0][i][1] * width * 100)
y_value = -float(x_batch[0][i][2] * height * 100)
x_dic.setdefault(char, []).append(x_value)
y_dic.setdefault(char, []).append(y_value)
# Plot all points per name
fig, ax = plt.subplots()
##only the mean points
# for c in x_dic.keys():
# plt.scatter(np.mean(x_dic[c]), np.mean(y_dic[c]), s=10)
# pylab.ylim([-100, 550])
#
# for char in x_dic.keys():
# ax.annotate(char, (np.mean(x_dic[char]), np.mean(y_dic[char])), xytext=(np.mean(x_dic[char]) + 10,
# np.mean(y_dic[char]) + 10))
# ax.text(500, 1, 'user analysis', verticalalignment='bottom', horizontalalignment='right', color='green', fontsize=15)
# for all the points
for c in x_dic.keys():
plt.scatter(x_dic[c], y_dic[c], s=2)
# pylab.ylim([-100, 550])
for d in x_dic.keys():
plt.scatter(np.mean(x_dic[d]), np.mean(y_dic[d]), s=10, c='black')
for char in x_dic.keys():
# ax.annotate(char, (np.mean(x_dic[char]), np.mean(y_dic[char])), xytext=(np.mean(x_dic[char]) + 10, \
# np.mean(y_dic[char]) + 10))
plt.text(np.mean(x_dic[char]) + 5, np.mean(y_dic[char]) + 5, char, weight='bold')
# ax.text(900, -50, 'user analysis', verticalalignment='bottom', horizontalalignment='right', color='green', fontsize=15)
# plt.legend(loc='best')
fig.savefig(save_path)
plt.show()
plt.close(fig)
if __name__ == "__main__":
distance, length = wer('I am a boy but you are a girl.', 'I as d d s a a asdfg fga sd you are s girl.', True)
print(distance)
print(length)
| [
"torch.manual_seed",
"numpy.mean",
"torch.device",
"torch.topk",
"torch.load",
"matplotlib.pyplot.close",
"torch.no_grad",
"torch.cuda.is_available",
"matplotlib.pyplot.subplots",
"torch.save",
"matplotlib.pyplot.scatter",
"torch.nn.utils.rnn.pack_padded_sequence",
"re.sub",
"editdistance.... | [((371, 391), 'torch.manual_seed', 'torch.manual_seed', (['(0)'], {}), '(0)\n', (388, 391), False, 'import torch\n'), ((4898, 4923), 'torch.zeros', 'torch.zeros', (['output.shape'], {}), '(output.shape)\n', (4909, 4923), False, 'import torch\n'), ((4939, 4967), 'torch.topk', 'torch.topk', (['output', '(3)'], {'dim': '(1)'}), '(output, 3, dim=1)\n', (4949, 4967), False, 'import torch\n'), ((5412, 5484), 'torch.save', 'torch.save', (['state_dict_model', 'path'], {'_use_new_zipfile_serialization': '(False)'}), '(state_dict_model, path, _use_new_zipfile_serialization=False)\n', (5422, 5484), False, 'import torch\n'), ((5549, 5574), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (5572, 5574), False, 'import torch\n'), ((6372, 6417), 're.sub', 're.sub', (['re_non_silence_noises', '""""""', 'input_text'], {}), "(re_non_silence_noises, '', input_text)\n", (6378, 6417), False, 'import re\n'), ((6744, 6778), 're.sub', 're.sub', (['"""^<START>|<EOS>$"""', '""""""', 'ref'], {}), "('^<START>|<EOS>$', '', ref)\n", (6750, 6778), False, 'import re\n'), ((6789, 6823), 're.sub', 're.sub', (['"""^<START>|<EOS>$"""', '""""""', 'hyp'], {}), "('^<START>|<EOS>$', '', hyp)\n", (6795, 6823), False, 'import re\n'), ((7366, 7400), 're.sub', 're.sub', (['"""^<START>|<EOS>$"""', '""""""', 'ref'], {}), "('^<START>|<EOS>$', '', ref)\n", (7372, 7400), False, 'import re\n'), ((7411, 7445), 're.sub', 're.sub', (['"""^<START>|<EOS>$"""', '""""""', 'hyp'], {}), "('^<START>|<EOS>$', '', hyp)\n", (7417, 7445), False, 'import re\n'), ((8133, 8147), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (8145, 8147), True, 'import matplotlib.pyplot as plt\n'), ((9452, 9462), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (9460, 9462), True, 'import matplotlib.pyplot as plt\n'), ((9467, 9481), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (9476, 9481), True, 'import matplotlib.pyplot as plt\n'), ((331, 356), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (354, 356), False, 'import torch\n'), ((2423, 2438), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2436, 2438), False, 'import torch\n'), ((7651, 7678), 'editdistance.eval', 'editdistance.eval', (['ref', 'hyp'], {}), '(ref, hyp)\n', (7668, 7678), False, 'import editdistance\n'), ((8748, 8784), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x_dic[c]', 'y_dic[c]'], {'s': '(2)'}), '(x_dic[c], y_dic[c], s=2)\n', (8759, 8784), True, 'import matplotlib.pyplot as plt\n'), ((5611, 5627), 'torch.load', 'torch.load', (['path'], {}), '(path)\n', (5621, 5627), False, 'import torch\n'), ((8867, 8884), 'numpy.mean', 'np.mean', (['x_dic[d]'], {}), '(x_dic[d])\n', (8874, 8884), True, 'import numpy as np\n'), ((8886, 8903), 'numpy.mean', 'np.mean', (['y_dic[d]'], {}), '(y_dic[d])\n', (8893, 8903), True, 'import numpy as np\n'), ((3274, 3302), 'torch.topk', 'torch.topk', (['output', '(3)'], {'dim': '(1)'}), '(output, 3, dim=1)\n', (3284, 3302), False, 'import torch\n'), ((3557, 3659), 'torch.nn.utils.rnn.pack_padded_sequence', 'pack_padded_sequence', (['(top1_predicted == y_batch)', 'input_len'], {'batch_first': '(True)', 'enforce_sorted': '(False)'}), '(top1_predicted == y_batch, input_len, batch_first=True,\n enforce_sorted=False)\n', (3577, 3659), False, 'from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence\n'), ((5090, 5119), 'torch.arange', 'torch.arange', (['output.shape[1]'], {}), '(output.shape[1])\n', (5102, 5119), False, 'import torch\n'), ((5173, 5202), 'torch.arange', 'torch.arange', (['output.shape[1]'], {}), '(output.shape[1])\n', (5185, 5202), False, 'import torch\n'), ((5256, 5285), 'torch.arange', 'torch.arange', (['output.shape[1]'], {}), '(output.shape[1])\n', (5268, 5285), False, 'import torch\n'), ((9191, 9211), 'numpy.mean', 'np.mean', (['x_dic[char]'], {}), '(x_dic[char])\n', (9198, 9211), True, 'import numpy as np\n'), ((9217, 9237), 'numpy.mean', 'np.mean', (['y_dic[char]'], {}), '(y_dic[char])\n', (9224, 9237), True, 'import numpy as np\n'), ((5704, 5723), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (5716, 5723), False, 'import torch\n')] |
"""
A simple stream constructor that constructs a Stream by evaluating
parameter substitutions from a dictionary parameters. Finds tokens of the form
\{\{([a-zA-Z_-][\w-]*)\}\}
and replaces {{var}} with the contents of gettattr(parameters, var) in
the new stream.
"""
import re
from StringIO import StringIO
class TemplatedStream(StringIO):
"""
StringIO stream that expands template parameters of the form {{var}}
"""
dbrace_re = re.compile(r'\{\{([a-zA-Z_][\w-]*)\}\}')
def __init__(self, stream, parameters):
"""
Create a TemplatedStream by populating variables from the
parameters mapping. Silently passes matching strings that
do not have a corresponding key defined in parameters as empty strings.
"""
StringIO.__init__(self)
def dbrace_expand(match):
if match.group(1) in parameters:
# we may occassionally be handed non-string object in
# parameters. Just convert them to string, they will
# be re-run through the YAML parser anyway.
return str(parameters[match.group(1)])
else:
return ''
for line in stream:
self.write(self.dbrace_re.sub(dbrace_expand, line))
self.seek(0)
| [
"StringIO.StringIO.__init__",
"re.compile"
] | [((451, 495), 're.compile', 're.compile', (['"""\\\\{\\\\{([a-zA-Z_][\\\\w-]*)\\\\}\\\\}"""'], {}), "('\\\\{\\\\{([a-zA-Z_][\\\\w-]*)\\\\}\\\\}')\n", (461, 495), False, 'import re\n'), ((787, 810), 'StringIO.StringIO.__init__', 'StringIO.__init__', (['self'], {}), '(self)\n', (804, 810), False, 'from StringIO import StringIO\n')] |
import pyxel
import constants as c
import random
class Gachi:
def __init__(self, x, y):
self.x = x
self.y = y
self.x_side = [-16, 16, 16, 16, 16, 16]
self.y_side = [16, -16, 16, 16, 16, 16, 16, 16, 16, 16]
self.hp = c.gachi_hp
def draw(self):
pyxel.blt(self.x, self.y, 0, 16, 48, self.x_side[random.randint(0,len(self.x_side) - 1)]
, self.y_side[random.randint(0,len(self.y_side) - 1)]
, 0)
def draw_hp_bar(self):
pyxel.text(17, 10, self.get_hp(), 8)
pyxel.blt(-2, 10, 0, 48, 48, 16, 16, 0)
pyxel.blt(-18, 10, 0, 48+16, 48, 16, 16, 0)
pyxel.blt(-34, 10, 0, 48+32, 48, 16, 16, 0)
pyxel.blt(-50, 10, 0, 48+48, 48, 16, 16, 0)
def get_hp(self):
return (f"{self.hp}") | [
"pyxel.blt"
] | [((559, 598), 'pyxel.blt', 'pyxel.blt', (['(-2)', '(10)', '(0)', '(48)', '(48)', '(16)', '(16)', '(0)'], {}), '(-2, 10, 0, 48, 48, 16, 16, 0)\n', (568, 598), False, 'import pyxel\n'), ((607, 652), 'pyxel.blt', 'pyxel.blt', (['(-18)', '(10)', '(0)', '(48 + 16)', '(48)', '(16)', '(16)', '(0)'], {}), '(-18, 10, 0, 48 + 16, 48, 16, 16, 0)\n', (616, 652), False, 'import pyxel\n'), ((659, 704), 'pyxel.blt', 'pyxel.blt', (['(-34)', '(10)', '(0)', '(48 + 32)', '(48)', '(16)', '(16)', '(0)'], {}), '(-34, 10, 0, 48 + 32, 48, 16, 16, 0)\n', (668, 704), False, 'import pyxel\n'), ((711, 756), 'pyxel.blt', 'pyxel.blt', (['(-50)', '(10)', '(0)', '(48 + 48)', '(48)', '(16)', '(16)', '(0)'], {}), '(-50, 10, 0, 48 + 48, 48, 16, 16, 0)\n', (720, 756), False, 'import pyxel\n')] |
import numpy as np
import pydicom as dicom
def read_dicom(filename):
"""Read DICOM file and convert it to a decent quality uint8 image.
Parameters
----------
filename: str
Existing DICOM file filename.
"""
try:
data = dicom.read_file(filename)
img = np.frombuffer(data.PixelData, dtype=np.uint16).copy()
if data.PhotometricInterpretation == 'MONOCHROME1':
img = img.max() - img
img = img.reshape((data.Rows, data.Columns))
return img, data.ImagerPixelSpacing[0]
except:
return None
def preprocess_xray(img, cut_min=5., cut_max=99.):
"""Preprocess the X-ray image using histogram clipping and global contrast normalization.
Parameters
----------
cut_min: int
Lowest percentile which is used to cut the image histogram.
cut_max: int
Highest percentile.
"""
img = img.astype(np.float64)
lim1, lim2 = np.percentile(img, [cut_min, cut_max])
img[img < lim1] = lim1
img[img > lim2] = lim2
img -= lim1
img /= img.max()
img *= 255
return img.astype(np.uint8, casting='unsafe')
def get_joint_y_proposals(img, av_points=11, margin=0.25):
"""Return Y-coordinates of the joint approximate locations."""
R, C = img.shape
# Sum the middle if the leg is along the X-axis
segm_line = np.sum(img[int(R * margin):int(R * (1 - margin)),
int(C / 3):int(C - C / 3)], axis=1)
# Smooth the segmentation line and find the absolute of the derivative
segm_line = np.abs(np.convolve(
np.diff(segm_line), np.ones((av_points, )) / av_points)[(av_points-1):])
# Get top tau % of the peaks
peaks = np.argsort(segm_line)[::-1][:int(0.1 * R * (1 - 2 * margin))]
return peaks[::10] + int(R * margin)
| [
"numpy.frombuffer",
"numpy.ones",
"numpy.diff",
"numpy.argsort",
"pydicom.read_file",
"numpy.percentile"
] | [((950, 988), 'numpy.percentile', 'np.percentile', (['img', '[cut_min, cut_max]'], {}), '(img, [cut_min, cut_max])\n', (963, 988), True, 'import numpy as np\n'), ((261, 286), 'pydicom.read_file', 'dicom.read_file', (['filename'], {}), '(filename)\n', (276, 286), True, 'import pydicom as dicom\n'), ((1719, 1740), 'numpy.argsort', 'np.argsort', (['segm_line'], {}), '(segm_line)\n', (1729, 1740), True, 'import numpy as np\n'), ((301, 347), 'numpy.frombuffer', 'np.frombuffer', (['data.PixelData'], {'dtype': 'np.uint16'}), '(data.PixelData, dtype=np.uint16)\n', (314, 347), True, 'import numpy as np\n'), ((1600, 1618), 'numpy.diff', 'np.diff', (['segm_line'], {}), '(segm_line)\n', (1607, 1618), True, 'import numpy as np\n'), ((1620, 1641), 'numpy.ones', 'np.ones', (['(av_points,)'], {}), '((av_points,))\n', (1627, 1641), True, 'import numpy as np\n')] |
# desafio 21
import pygame
pygame.mixer.init()
pygame.mixer.music.load('Deutschland.mp3')
pygame.mixer.music.play()
while (pygame.mixer.music.get_busy()): pass | [
"pygame.mixer.music.play",
"pygame.mixer.music.get_busy",
"pygame.mixer.init",
"pygame.mixer.music.load"
] | [((28, 47), 'pygame.mixer.init', 'pygame.mixer.init', ([], {}), '()\n', (45, 47), False, 'import pygame\n'), ((48, 90), 'pygame.mixer.music.load', 'pygame.mixer.music.load', (['"""Deutschland.mp3"""'], {}), "('Deutschland.mp3')\n", (71, 90), False, 'import pygame\n'), ((91, 116), 'pygame.mixer.music.play', 'pygame.mixer.music.play', ([], {}), '()\n', (114, 116), False, 'import pygame\n'), ((124, 153), 'pygame.mixer.music.get_busy', 'pygame.mixer.music.get_busy', ([], {}), '()\n', (151, 153), False, 'import pygame\n')] |
import datetime
import appdaemon.plugins.hass.hassapi as hass
import calendar
SHOULDER_START_HOUR = 13
PEAK_START_HOUR = 15
PEAK_END_HOUR = 19
# SHOULDER_END_HOUR = 21
SUMMER_MONTHS = [6, 7, 8, 9]
ON_PEAK = 'on-peak'
SHOULDER = 'shoulder'
OFF_PEAK = 'off-peak'
# PCCA = 0.00401
# DSMCA = 0.00159
# TCA = 0.00203
# CACJA = 0.00301
# ECA_ON_PEAK = 0.04170
# ECA_OFF_PEAK = 0.02574
RATE_SUMMER_ON_PEAK = 0.18527
RATE_SUMMER_SHOULDER = 0.13025
RATE_SUMMER_OFF_PEAK = 0.08018
RATE_WINTER_ON_PEAK = 0.19178
RATE_WINTER_SHOULDER = 0.13676
RATE_WINTER_OFF_PEAK = 0.08458
class Holiday(object):
def __init__(self, name, month, dow=None, wom=None, day=None):
'''
:param name: the name of the holiday
:param month: month of the holiday (1 = January)
:param dow: day of the week of the holiday (0-indexed, starting with Monday)
:param wom: week of the month of the holiday (0-indexed, use -1 for last)
:param day: day of the month (of dow and wom are not used)
'''
self.name = name
self.month = month
self.dow = dow
self.wom = wom
self.day = day
self.dt = None
self.year = datetime.datetime.now().year
self.__parse_holiday()
def __parse_holiday(self):
if self.day is not None:
self.dt = datetime.datetime(self.year, self.month, self.day)
if self.dt.weekday() == 5:
self.dt = self.dt - datetime.timedelta(days=1)
elif self.dt.weekday() == 6:
self.dt = self.dt + datetime.timedelta(days=1)
elif self.dow is not None \
and self.wom is not None:
month_cal = calendar.monthcalendar(self.year, self.month)
found_weeks = []
for week in month_cal:
if week[self.dow] != 0:
found_weeks.append(week)
day = (found_weeks[self.wom][self.dow])
self.dt = datetime.datetime(self.year, self.month, day)
class StateManagerXcelColorado(hass.Hass):
def initialize(self):
self.log("initialize()", level="DEBUG")
self.log("args: {0}".format(self.args), level="INFO")
self.device = None
self.state = None
if "device" in self.args:
self.device = self.args["device"]
self.run_hourly(self.update_state, datetime.time())
self.update_state(None)
else:
self.log("No device specified. Doing nothing.", level="ERROR")
def is_holiday(self, dt):
self.log("is_holiday({0})".format(dt), level="DEBUG")
tou_holidays = [
Holiday("New Year's Day", 1, day=1),
Holiday("Memorial Day", 5, dow=0, wom=-1),
Holiday("Independence Day", 7, day=4),
Holiday("Labor Day", 9, dow=0, wom=0),
Holiday("Thanksgiving Day", 11, dow=3, wom=3),
Holiday("Christmas Day", 12, day=25)
]
return dt.date() in [h.dt.date() for h in tou_holidays]
def update_state(self, kwargs):
self.log("update_state({0})".format(kwargs), level="DEBUG")
now = datetime.datetime.now()
tou_mode = OFF_PEAK
lametric_icon = "a11218"
if now.weekday() not in [5, 6] \
and not self.is_holiday(now):
if PEAK_END_HOUR > now.hour >= PEAK_START_HOUR:
tou_mode = ON_PEAK
lametric_icon = "a11217"
elif PEAK_START_HOUR > now.hour >= SHOULDER_START_HOUR:
tou_mode = SHOULDER
lametric_icon = "a11219"
attributes = {
"lametric_icon": lametric_icon,
"rate": self.get_rate(tou_mode)
}
if tou_mode != self.state:
self.log("{0} is now {1}...".format(self.device, tou_mode), level="INFO")
self.log("...with attributes: {0}".format(attributes), level="DEBUG")
self.state = tou_mode
self.set_state(self.device, state=tou_mode, attributes=attributes)
def get_rate(self, tou_mode):
self.log("get_rate({0})".format(tou_mode), level="DEBUG")
rate = 0.00
# eca = ECA_OFF_PEAK
if tou_mode == ON_PEAK:
# eca = ECA_ON_PEAK
if datetime.datetime.now().month in SUMMER_MONTHS:
rate = RATE_SUMMER_ON_PEAK
else:
rate = RATE_WINTER_ON_PEAK
elif tou_mode == SHOULDER:
if datetime.datetime.now().month in SUMMER_MONTHS:
rate = RATE_SUMMER_SHOULDER
else:
rate = RATE_WINTER_SHOULDER
elif tou_mode == OFF_PEAK:
if datetime.datetime.now().month in SUMMER_MONTHS:
rate = RATE_SUMMER_OFF_PEAK
else:
rate = RATE_WINTER_OFF_PEAK
# return rate + eca + PCCA + DSMCA + TCA + CACJA
return rate
| [
"datetime.datetime",
"datetime.time",
"datetime.datetime.now",
"calendar.monthcalendar",
"datetime.timedelta"
] | [((3141, 3164), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3162, 3164), False, 'import datetime\n'), ((1186, 1209), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1207, 1209), False, 'import datetime\n'), ((1333, 1383), 'datetime.datetime', 'datetime.datetime', (['self.year', 'self.month', 'self.day'], {}), '(self.year, self.month, self.day)\n', (1350, 1383), False, 'import datetime\n'), ((1692, 1737), 'calendar.monthcalendar', 'calendar.monthcalendar', (['self.year', 'self.month'], {}), '(self.year, self.month)\n', (1714, 1737), False, 'import calendar\n'), ((1961, 2006), 'datetime.datetime', 'datetime.datetime', (['self.year', 'self.month', 'day'], {}), '(self.year, self.month, day)\n', (1978, 2006), False, 'import datetime\n'), ((2371, 2386), 'datetime.time', 'datetime.time', ([], {}), '()\n', (2384, 2386), False, 'import datetime\n'), ((1459, 1485), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1477, 1485), False, 'import datetime\n'), ((4265, 4288), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4286, 4288), False, 'import datetime\n'), ((1563, 1589), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1581, 1589), False, 'import datetime\n'), ((4467, 4490), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4488, 4490), False, 'import datetime\n'), ((4671, 4694), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4692, 4694), False, 'import datetime\n')] |
import sys
from itertools import permutations
read = sys.stdin.readline
n = int(read())
arr = list(map(int, read().split()))
# 순열로 조합한다.
cases = list(permutations(arr))
result = 0
for card in cases:
ans = 0
for idx in range(n - 1):
ans += abs(card[idx] - card[idx + 1])
result = max(result, ans)
print(result)
| [
"itertools.permutations"
] | [((154, 171), 'itertools.permutations', 'permutations', (['arr'], {}), '(arr)\n', (166, 171), False, 'from itertools import permutations\n')] |
"""
It contains customadmin's models. It's used to customize admin's interface
"""
from upy.contrib.tree.models import _
from django.db import models
from upy.contrib.colors.fields import ColorField
from upy.contrib.sortable.models import PositionModel
from django.conf import settings
from imagekit.models import ImageSpecField, ProcessedImageField
from pilkit.processors import ResizeToFit
from upy.fields import NullTrueField
def verifyApp(app):
return app in ['django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.sitemaps',
'mptt',
'imagekit',
'upy',
'south',
'upy.contrib.inspect',
'modeltranslation',
'upy.contrib.tabbed_translation',
'upy.contrib.cked',
'upy.contrib.colors',
'upy.contrib.rosetta']
def all_apps():
"""
it returns a list of tuples with the name of all installed apps with admin's registration.
"""
list_apps = []
for app in settings.INSTALLED_APPS:
if not verifyApp(app):
list_apps.append([app, app.split(".")[-1].title()])
return list_apps
def list_apps():
"""
it returns a list of tuples with the name of all installed apps with admin's registration.
"""
list_apps = []
for app in settings.INSTALLED_APPS:
if not verifyApp(app):
try:
CustomApp.objects.get(application=app.split(".")[-1].title())
except:
list_apps.append([app.split(".")[-1].title()] * 2)
return list_apps
def list_models():
"""
It returns a list of tuple with the name of all models in installed apps
"""
list_models = []
for app in settings.INSTALLED_APPS:
if not verifyApp(app):
list_models_app = []
try:
all_models = models.get_models(models.get_app(app.split(".")[-1]))
except:
pass#app doesn't have model.py module
for m in all_models:
try:
CustomModel.objects.get(app=app, model=m.__name__)
except:
list_models_app.append([m._meta.verbose_name_plural] * 2)
list_models.append((app.split(".")[-1].title(), list_models_app))
return list_models
class CustomAdmin(models.Model):
"""
This object define parameters to customize admin layout. It has sense if you use only a record
of this class. Infact base template use the first occurence find in the database
"""
branding = models.CharField(max_length=200, null=True, blank=True,
default=u"upyproject.com",
help_text=_(u"Set branding"),
verbose_name=_(u"Branding"))
branding_link = models.CharField(max_length=200, null=True, blank=True,
default=u"www.upyproject.com",
help_text=_(u"Set branding's link"),
verbose_name=_(u"Branding link"))
branding_image = models.FilePathField(path=settings.RELATIVE_STATIC_ROOT, null=True, blank=True,
match="\.jpg|\.jpeg|.png|\.gif", recursive=True,
help_text=_(u"Set brand's image."),
verbose_name=_(u"Branding image"))
default = NullTrueField(_(u"Default"), help_text=_(u"Select it if you want use this as default customization."),
unique=True)
default_app_image = ProcessedImageField(verbose_name=_(u"Default app image"),
help_text=_(u"Insert a default application image"), null=True, blank=True,
upload_to='customadmin')
default_model_image = ProcessedImageField(verbose_name=_(u"Default model image"),
help_text=_(u"Insert a default model image"), null=True, blank=True,
upload_to='customadmin')
app_image = ImageSpecField([ResizeToFit(128, 128)], source='default_app_image',
options={'quality': 90}) #format='JPEG',
model_image = ImageSpecField([ResizeToFit(50, 50)], source='default_model_image', options={'quality': 90})
bg_header = ColorField(max_length=200, null=True, blank=True,
help_text=_(u"Set header's background color."),
verbose_name=_(u"BG Header"))
sitename_font = models.CharField(max_length=200, null=True, blank=True,
help_text=_(u"Set sitename font."),
verbose_name=_(u"Sitename font"))
sitename_font_size = models.CharField(max_length=200, null=True, blank=True,
help_text=_(u"Set sitename font size."),
verbose_name=_(u"Sitename font size"))
sitename_font_weight = models.CharField(max_length=200, null=True, blank=True,
help_text=_(u"Set sitename font weight."),
verbose_name=_(u"Sitename font weight"))
table_title_bg = ColorField(max_length=200, null=True, blank=True,
help_text=_(u"Set the background of title in tables."),
verbose_name=_(u"BG table title "))
table_title_color = ColorField(max_length=200, null=True, blank=True,
help_text=_(u"Set the color of title in tables."),
verbose_name=_(u"Table title color"))
h2_color = ColorField(max_length=200, null=True, blank=True,
help_text=_(u"Set h2 color."), verbose_name=_(u"H2 color"))
h2_size = models.CharField(max_length=200, null=True, blank=True,
help_text=_(u"Set h2 size."), verbose_name=_(u"H2 size"))
h3_color = ColorField(max_length=200, null=True, blank=True,
help_text=_(u"Set h3 color."), verbose_name=_(u"H3 color"))
h3_size = models.CharField(max_length=200, null=True, blank=True,
help_text=_(u"Set h3 size."), verbose_name=_(u"H3 size"))
link_color = ColorField(max_length=200, null=True, blank=True,
help_text=_(u"Set link's color"), verbose_name=_(u"Link color"))
link_hover_color = ColorField(max_length=200, null=True, blank=True,
help_text=_(u"Set link's color when hover"),
verbose_name=_(u"Link hover color"))
html_head = models.TextField(null=True, blank=True,
help_text=_(u"Set other html code to put in HEAD section. "),
verbose_name=_(u"Html head"))
css_code = models.TextField(null=True, blank=True,
help_text=_(u"Set the css code. "),
verbose_name=_(u"Css code"))
use_css_code = models.BooleanField(help_text=_(u"Check it if you want use css code to extends style."),
verbose_name=_(u"Use css code"), default=False)
use_log_sidebar = models.BooleanField(default=False,
help_text=_(u"Check it if you want use log sidebar in index template."),
verbose_name=_(u"Use log sidebar"))
view_mode = models.CharField(max_length=250, null=True, blank=True,
choices=(('use_custom_app', _('Use custom app system')),
('use_app_icons', _("Use apps' icons system")),
('use_app_and_model_icons', _("Use apps and models icons system")),
('use_model_icons',
_("Use models' icons system in index group models by app")),
('use_total_model_icons',
_("Use models' icons system in index ungroup models by app"))),
help_text=_(u"Choose the view mode"),
verbose_name=_(u"View mode"))
autocomplete_app_list = models.BooleanField(default=True,
help_text=_(
u"Check it if you want complete the custom app list with the default app list."),
verbose_name=_(u"Autocomplete App"))
autocomplete_models_list = models.BooleanField(default=True,
help_text=_(
u"Check it if you want complete the custom models list with the default models list."),
verbose_name=_(u"Autocomplete model"))
@property
def customization(self):
"""
It returns branding if defined, else image, else only his primary key.
"""
if self.branding:
return self.branding
elif self.branding_image:
res = self.branding_image.split("/")[-1]
return res
else:
return self.pk
@property
def branding_image_url(self):
return self.branding_image.replace(settings.RELATIVE_STATIC_ROOT, settings.STATIC_URL).replace("//", "/")
def save(self, *args, **kwargs):
appicons = CustomApp.objects.all()
if self.view_mode == "use_app_icons" and not appicons:
for app in list_apps():
new_app = CustomApp(application=app[0], verbose_app_name=app[1])
new_app.save()
super(CustomAdmin, self).save(*args, **kwargs)
def __unicode__(self):
return u"%s" % (self.branding)
class Meta:
verbose_name = _(u"Custom Admin")
verbose_name_plural = _(u"Custom Admin")
ordering = ['branding']
class CustomApp(PositionModel):
"""
This object links the installed_apps with an icon to use if CustomAdmin.use_app_icons is True
"""
application = models.CharField(max_length=250,
unique=True, help_text=_(u"Select the application"),
verbose_name=_(u"Application"))
verbose_app_name = models.CharField(max_length=250, unique=True,
help_text=_(u"Write the verbose name to show"),
verbose_name=_(u"Verbose app name"))
image = models.ImageField(_(u'Image'), null=True, blank=True, upload_to='upyimage')
thumb = ImageSpecField([ResizeToFit(80, 80)],
source='image',
format='png')
show_models = models.BooleanField(
default=True,
help_text=_(u"If use_app_icons is False in Customadmin, you can choose wheter or not show the model list."),
verbose_name=_(u"Show models")
)
def __unicode__(self):
return self.application
class Meta:
verbose_name = _(u"Custom App")
verbose_name_plural = _(u"Custom Apps")
ordering = ['position']
class CustomLink(PositionModel):
"""
This object links the installed_apps with an icon to use
if CustomAdmin.use_app_icons is True
"""
link_url = models.CharField(max_length=250, default="/admin/",
help_text=_(u"Select the url you want to link"),
verbose_name=_(u"Link Url"))
verbose_url_name = models.CharField(max_length=250, unique=True,
help_text=_(u"Write the verbose name to show"),
verbose_name=_(u"Verbose url name"))
image = models.ImageField(_(u'Image'), null=True, blank=True, upload_to='upyimage')
thumb = ImageSpecField([ResizeToFit(80, 80)], source='image', format='png')
def __unicode__(self):
return self.link_url
class Meta:
verbose_name = _(u"Custom Link")
verbose_name_plural = _(u"Custom Link")
ordering = ['position']
class CustomModel(PositionModel):
"""
This object links models in installed_apps with an icon to use
if CustomAdmin.view_mode == "use_model_icons" or CustomAdmin.view_mode == "use_inner_model_icons"
"""
app = models.CharField(max_length=250,
help_text=_(u"Select an appplication"),
verbose_name=_(u"App"))
model = models.CharField(max_length=250,
help_text=_(u"Select a model"),
verbose_name=_(u"Model"))
image = models.ImageField(_(u'Image'), null=True, blank=True, upload_to='upyimage')
thumb = ImageSpecField([ResizeToFit(50, 50)],
source='image',
format='png')
def __unicode__(self):
return self.model
class Meta:
verbose_name = _(u"Custom Model")
verbose_name_plural = _(u"Custom Models")
unique_together = ('app', 'model')
ordering = ['position']
| [
"upy.contrib.tree.models._",
"pilkit.processors.ResizeToFit"
] | [((3671, 3684), 'upy.contrib.tree.models._', '_', (['u"""Default"""'], {}), "(u'Default')\n", (3672, 3684), False, 'from upy.contrib.tree.models import _\n'), ((10352, 10370), 'upy.contrib.tree.models._', '_', (['u"""Custom Admin"""'], {}), "(u'Custom Admin')\n", (10353, 10370), False, 'from upy.contrib.tree.models import _\n'), ((10401, 10419), 'upy.contrib.tree.models._', '_', (['u"""Custom Admin"""'], {}), "(u'Custom Admin')\n", (10402, 10419), False, 'from upy.contrib.tree.models import _\n'), ((11070, 11081), 'upy.contrib.tree.models._', '_', (['u"""Image"""'], {}), "(u'Image')\n", (11071, 11081), False, 'from upy.contrib.tree.models import _\n'), ((11585, 11601), 'upy.contrib.tree.models._', '_', (['u"""Custom App"""'], {}), "(u'Custom App')\n", (11586, 11601), False, 'from upy.contrib.tree.models import _\n'), ((11632, 11649), 'upy.contrib.tree.models._', '_', (['u"""Custom Apps"""'], {}), "(u'Custom Apps')\n", (11633, 11649), False, 'from upy.contrib.tree.models import _\n'), ((12309, 12320), 'upy.contrib.tree.models._', '_', (['u"""Image"""'], {}), "(u'Image')\n", (12310, 12320), False, 'from upy.contrib.tree.models import _\n'), ((12544, 12561), 'upy.contrib.tree.models._', '_', (['u"""Custom Link"""'], {}), "(u'Custom Link')\n", (12545, 12561), False, 'from upy.contrib.tree.models import _\n'), ((12592, 12609), 'upy.contrib.tree.models._', '_', (['u"""Custom Link"""'], {}), "(u'Custom Link')\n", (12593, 12609), False, 'from upy.contrib.tree.models import _\n'), ((13215, 13226), 'upy.contrib.tree.models._', '_', (['u"""Image"""'], {}), "(u'Image')\n", (13216, 13226), False, 'from upy.contrib.tree.models import _\n'), ((13501, 13519), 'upy.contrib.tree.models._', '_', (['u"""Custom Model"""'], {}), "(u'Custom Model')\n", (13502, 13519), False, 'from upy.contrib.tree.models import _\n'), ((13550, 13569), 'upy.contrib.tree.models._', '_', (['u"""Custom Models"""'], {}), "(u'Custom Models')\n", (13551, 13569), False, 'from upy.contrib.tree.models import _\n'), ((2926, 2944), 'upy.contrib.tree.models._', '_', (['u"""Set branding"""'], {}), "(u'Set branding')\n", (2927, 2944), False, 'from upy.contrib.tree.models import _\n'), ((2991, 3005), 'upy.contrib.tree.models._', '_', (['u"""Branding"""'], {}), "(u'Branding')\n", (2992, 3005), False, 'from upy.contrib.tree.models import _\n'), ((3198, 3223), 'upy.contrib.tree.models._', '_', (['u"""Set branding\'s link"""'], {}), '(u"Set branding\'s link")\n', (3199, 3223), False, 'from upy.contrib.tree.models import _\n'), ((3275, 3294), 'upy.contrib.tree.models._', '_', (['u"""Branding link"""'], {}), "(u'Branding link')\n", (3276, 3294), False, 'from upy.contrib.tree.models import _\n'), ((3540, 3564), 'upy.contrib.tree.models._', '_', (['u"""Set brand\'s image."""'], {}), '(u"Set brand\'s image.")\n', (3541, 3564), False, 'from upy.contrib.tree.models import _\n'), ((3621, 3641), 'upy.contrib.tree.models._', '_', (['u"""Branding image"""'], {}), "(u'Branding image')\n", (3622, 3641), False, 'from upy.contrib.tree.models import _\n'), ((3696, 3758), 'upy.contrib.tree.models._', '_', (['u"""Select it if you want use this as default customization."""'], {}), "(u'Select it if you want use this as default customization.')\n", (3697, 3758), False, 'from upy.contrib.tree.models import _\n'), ((3858, 3881), 'upy.contrib.tree.models._', '_', (['u"""Default app image"""'], {}), "(u'Default app image')\n", (3859, 3881), False, 'from upy.contrib.tree.models import _\n'), ((3937, 3977), 'upy.contrib.tree.models._', '_', (['u"""Insert a default application image"""'], {}), "(u'Insert a default application image')\n", (3938, 3977), False, 'from upy.contrib.tree.models import _\n'), ((4130, 4155), 'upy.contrib.tree.models._', '_', (['u"""Default model image"""'], {}), "(u'Default model image')\n", (4131, 4155), False, 'from upy.contrib.tree.models import _\n'), ((4213, 4247), 'upy.contrib.tree.models._', '_', (['u"""Insert a default model image"""'], {}), "(u'Insert a default model image')\n", (4214, 4247), False, 'from upy.contrib.tree.models import _\n'), ((4376, 4397), 'pilkit.processors.ResizeToFit', 'ResizeToFit', (['(128)', '(128)'], {}), '(128, 128)\n', (4387, 4397), False, 'from pilkit.processors import ResizeToFit\n'), ((4535, 4554), 'pilkit.processors.ResizeToFit', 'ResizeToFit', (['(50)', '(50)'], {}), '(50, 50)\n', (4546, 4554), False, 'from pilkit.processors import ResizeToFit\n'), ((4716, 4752), 'upy.contrib.tree.models._', '_', (['u"""Set header\'s background color."""'], {}), '(u"Set header\'s background color.")\n', (4717, 4752), False, 'from upy.contrib.tree.models import _\n'), ((4794, 4809), 'upy.contrib.tree.models._', '_', (['u"""BG Header"""'], {}), "(u'BG Header')\n", (4795, 4809), False, 'from upy.contrib.tree.models import _\n'), ((4934, 4958), 'upy.contrib.tree.models._', '_', (['u"""Set sitename font."""'], {}), "(u'Set sitename font.')\n", (4935, 4958), False, 'from upy.contrib.tree.models import _\n'), ((5010, 5029), 'upy.contrib.tree.models._', '_', (['u"""Sitename font"""'], {}), "(u'Sitename font')\n", (5011, 5029), False, 'from upy.contrib.tree.models import _\n'), ((5164, 5193), 'upy.contrib.tree.models._', '_', (['u"""Set sitename font size."""'], {}), "(u'Set sitename font size.')\n", (5165, 5193), False, 'from upy.contrib.tree.models import _\n'), ((5250, 5274), 'upy.contrib.tree.models._', '_', (['u"""Sitename font size"""'], {}), "(u'Sitename font size')\n", (5251, 5274), False, 'from upy.contrib.tree.models import _\n'), ((5413, 5444), 'upy.contrib.tree.models._', '_', (['u"""Set sitename font weight."""'], {}), "(u'Set sitename font weight.')\n", (5414, 5444), False, 'from upy.contrib.tree.models import _\n'), ((5503, 5529), 'upy.contrib.tree.models._', '_', (['u"""Sitename font weight"""'], {}), "(u'Sitename font weight')\n", (5504, 5529), False, 'from upy.contrib.tree.models import _\n'), ((5644, 5688), 'upy.contrib.tree.models._', '_', (['u"""Set the background of title in tables."""'], {}), "(u'Set the background of title in tables.')\n", (5645, 5688), False, 'from upy.contrib.tree.models import _\n'), ((5735, 5756), 'upy.contrib.tree.models._', '_', (['u"""BG table title """'], {}), "(u'BG table title ')\n", (5736, 5756), False, 'from upy.contrib.tree.models import _\n'), ((5877, 5916), 'upy.contrib.tree.models._', '_', (['u"""Set the color of title in tables."""'], {}), "(u'Set the color of title in tables.')\n", (5878, 5916), False, 'from upy.contrib.tree.models import _\n'), ((5966, 5989), 'upy.contrib.tree.models._', '_', (['u"""Table title color"""'], {}), "(u'Table title color')\n", (5967, 5989), False, 'from upy.contrib.tree.models import _\n'), ((6092, 6111), 'upy.contrib.tree.models._', '_', (['u"""Set h2 color."""'], {}), "(u'Set h2 color.')\n", (6093, 6111), False, 'from upy.contrib.tree.models import _\n'), ((6126, 6140), 'upy.contrib.tree.models._', '_', (['u"""H2 color"""'], {}), "(u'H2 color')\n", (6127, 6140), False, 'from upy.contrib.tree.models import _\n'), ((6253, 6271), 'upy.contrib.tree.models._', '_', (['u"""Set h2 size."""'], {}), "(u'Set h2 size.')\n", (6254, 6271), False, 'from upy.contrib.tree.models import _\n'), ((6286, 6299), 'upy.contrib.tree.models._', '_', (['u"""H2 size"""'], {}), "(u'H2 size')\n", (6287, 6299), False, 'from upy.contrib.tree.models import _\n'), ((6402, 6421), 'upy.contrib.tree.models._', '_', (['u"""Set h3 color."""'], {}), "(u'Set h3 color.')\n", (6403, 6421), False, 'from upy.contrib.tree.models import _\n'), ((6436, 6450), 'upy.contrib.tree.models._', '_', (['u"""H3 color"""'], {}), "(u'H3 color')\n", (6437, 6450), False, 'from upy.contrib.tree.models import _\n'), ((6563, 6581), 'upy.contrib.tree.models._', '_', (['u"""Set h3 size."""'], {}), "(u'Set h3 size.')\n", (6564, 6581), False, 'from upy.contrib.tree.models import _\n'), ((6596, 6609), 'upy.contrib.tree.models._', '_', (['u"""H3 size"""'], {}), "(u'H3 size')\n", (6597, 6609), False, 'from upy.contrib.tree.models import _\n'), ((6716, 6738), 'upy.contrib.tree.models._', '_', (['u"""Set link\'s color"""'], {}), '(u"Set link\'s color")\n', (6717, 6738), False, 'from upy.contrib.tree.models import _\n'), ((6753, 6769), 'upy.contrib.tree.models._', '_', (['u"""Link color"""'], {}), "(u'Link color')\n", (6754, 6769), False, 'from upy.contrib.tree.models import _\n'), ((6888, 6921), 'upy.contrib.tree.models._', '_', (['u"""Set link\'s color when hover"""'], {}), '(u"Set link\'s color when hover")\n', (6889, 6921), False, 'from upy.contrib.tree.models import _\n'), ((6970, 6992), 'upy.contrib.tree.models._', '_', (['u"""Link hover color"""'], {}), "(u'Link hover color')\n", (6971, 6992), False, 'from upy.contrib.tree.models import _\n'), ((7093, 7143), 'upy.contrib.tree.models._', '_', (['u"""Set other html code to put in HEAD section. """'], {}), "(u'Set other html code to put in HEAD section. ')\n", (7094, 7143), False, 'from upy.contrib.tree.models import _\n'), ((7191, 7206), 'upy.contrib.tree.models._', '_', (['u"""Html head"""'], {}), "(u'Html head')\n", (7192, 7206), False, 'from upy.contrib.tree.models import _\n'), ((7305, 7329), 'upy.contrib.tree.models._', '_', (['u"""Set the css code. """'], {}), "(u'Set the css code. ')\n", (7306, 7329), False, 'from upy.contrib.tree.models import _\n'), ((7376, 7390), 'upy.contrib.tree.models._', '_', (['u"""Css code"""'], {}), "(u'Css code')\n", (7377, 7390), False, 'from upy.contrib.tree.models import _\n'), ((7441, 7498), 'upy.contrib.tree.models._', '_', (['u"""Check it if you want use css code to extends style."""'], {}), "(u'Check it if you want use css code to extends style.')\n", (7442, 7498), False, 'from upy.contrib.tree.models import _\n'), ((7552, 7570), 'upy.contrib.tree.models._', '_', (['u"""Use css code"""'], {}), "(u'Use css code')\n", (7553, 7570), False, 'from upy.contrib.tree.models import _\n'), ((7696, 7757), 'upy.contrib.tree.models._', '_', (['u"""Check it if you want use log sidebar in index template."""'], {}), "(u'Check it if you want use log sidebar in index template.')\n", (7697, 7757), False, 'from upy.contrib.tree.models import _\n'), ((7814, 7835), 'upy.contrib.tree.models._', '_', (['u"""Use log sidebar"""'], {}), "(u'Use log sidebar')\n", (7815, 7835), False, 'from upy.contrib.tree.models import _\n'), ((8583, 8609), 'upy.contrib.tree.models._', '_', (['u"""Choose the view mode"""'], {}), "(u'Choose the view mode')\n", (8584, 8609), False, 'from upy.contrib.tree.models import _\n'), ((8657, 8672), 'upy.contrib.tree.models._', '_', (['u"""View mode"""'], {}), "(u'View mode')\n", (8658, 8672), False, 'from upy.contrib.tree.models import _\n'), ((8794, 8881), 'upy.contrib.tree.models._', '_', (['u"""Check it if you want complete the custom app list with the default app list."""'], {}), "(u'Check it if you want complete the custom app list with the default app list.'\n )\n", (8795, 8881), False, 'from upy.contrib.tree.models import _\n'), ((8992, 9014), 'upy.contrib.tree.models._', '_', (['u"""Autocomplete App"""'], {}), "(u'Autocomplete App')\n", (8993, 9014), False, 'from upy.contrib.tree.models import _\n'), ((9142, 9235), 'upy.contrib.tree.models._', '_', (['u"""Check it if you want complete the custom models list with the default models list."""'], {}), "(u'Check it if you want complete the custom models list with the default models list.'\n )\n", (9143, 9235), False, 'from upy.contrib.tree.models import _\n'), ((9352, 9376), 'upy.contrib.tree.models._', '_', (['u"""Autocomplete model"""'], {}), "(u'Autocomplete model')\n", (9353, 9376), False, 'from upy.contrib.tree.models import _\n'), ((10709, 10737), 'upy.contrib.tree.models._', '_', (['u"""Select the application"""'], {}), "(u'Select the application')\n", (10710, 10737), False, 'from upy.contrib.tree.models import _\n'), ((10787, 10804), 'upy.contrib.tree.models._', '_', (['u"""Application"""'], {}), "(u'Application')\n", (10788, 10804), False, 'from upy.contrib.tree.models import _\n'), ((10925, 10961), 'upy.contrib.tree.models._', '_', (['u"""Write the verbose name to show"""'], {}), "(u'Write the verbose name to show')\n", (10926, 10961), False, 'from upy.contrib.tree.models import _\n'), ((11016, 11038), 'upy.contrib.tree.models._', '_', (['u"""Verbose app name"""'], {}), "(u'Verbose app name')\n", (11017, 11038), False, 'from upy.contrib.tree.models import _\n'), ((11156, 11175), 'pilkit.processors.ResizeToFit', 'ResizeToFit', (['(80)', '(80)'], {}), '(80, 80)\n', (11167, 11175), False, 'from pilkit.processors import ResizeToFit\n'), ((11341, 11443), 'upy.contrib.tree.models._', '_', (['u"""If use_app_icons is False in Customadmin, you can choose wheter or not show the model list."""'], {}), "(u'If use_app_icons is False in Customadmin, you can choose wheter or not show the model list.'\n )\n", (11342, 11443), False, 'from upy.contrib.tree.models import _\n'), ((11461, 11478), 'upy.contrib.tree.models._', '_', (['u"""Show models"""'], {}), "(u'Show models')\n", (11462, 11478), False, 'from upy.contrib.tree.models import _\n'), ((11945, 11982), 'upy.contrib.tree.models._', '_', (['u"""Select the url you want to link"""'], {}), "(u'Select the url you want to link')\n", (11946, 11982), False, 'from upy.contrib.tree.models import _\n'), ((12029, 12043), 'upy.contrib.tree.models._', '_', (['u"""Link Url"""'], {}), "(u'Link Url')\n", (12030, 12043), False, 'from upy.contrib.tree.models import _\n'), ((12164, 12200), 'upy.contrib.tree.models._', '_', (['u"""Write the verbose name to show"""'], {}), "(u'Write the verbose name to show')\n", (12165, 12200), False, 'from upy.contrib.tree.models import _\n'), ((12255, 12277), 'upy.contrib.tree.models._', '_', (['u"""Verbose url name"""'], {}), "(u'Verbose url name')\n", (12256, 12277), False, 'from upy.contrib.tree.models import _\n'), ((12395, 12414), 'pilkit.processors.ResizeToFit', 'ResizeToFit', (['(80)', '(80)'], {}), '(80, 80)\n', (12406, 12414), False, 'from pilkit.processors import ResizeToFit\n'), ((12943, 12971), 'upy.contrib.tree.models._', '_', (['u"""Select an appplication"""'], {}), "(u'Select an appplication')\n", (12944, 12971), False, 'from upy.contrib.tree.models import _\n'), ((13013, 13022), 'upy.contrib.tree.models._', '_', (['u"""App"""'], {}), "(u'App')\n", (13014, 13022), False, 'from upy.contrib.tree.models import _\n'), ((13108, 13128), 'upy.contrib.tree.models._', '_', (['u"""Select a model"""'], {}), "(u'Select a model')\n", (13109, 13128), False, 'from upy.contrib.tree.models import _\n'), ((13172, 13183), 'upy.contrib.tree.models._', '_', (['u"""Model"""'], {}), "(u'Model')\n", (13173, 13183), False, 'from upy.contrib.tree.models import _\n'), ((13301, 13320), 'pilkit.processors.ResizeToFit', 'ResizeToFit', (['(50)', '(50)'], {}), '(50, 50)\n', (13312, 13320), False, 'from pilkit.processors import ResizeToFit\n'), ((7970, 7996), 'upy.contrib.tree.models._', '_', (['"""Use custom app system"""'], {}), "('Use custom app system')\n", (7971, 7996), False, 'from upy.contrib.tree.models import _\n'), ((8059, 8086), 'upy.contrib.tree.models._', '_', (['"""Use apps\' icons system"""'], {}), '("Use apps\' icons system")\n', (8060, 8086), False, 'from upy.contrib.tree.models import _\n'), ((8159, 8196), 'upy.contrib.tree.models._', '_', (['"""Use apps and models icons system"""'], {}), "('Use apps and models icons system')\n", (8160, 8196), False, 'from upy.contrib.tree.models import _\n'), ((8304, 8362), 'upy.contrib.tree.models._', '_', (['"""Use models\' icons system in index group models by app"""'], {}), '("Use models\' icons system in index group models by app")\n', (8305, 8362), False, 'from upy.contrib.tree.models import _\n'), ((8476, 8536), 'upy.contrib.tree.models._', '_', (['"""Use models\' icons system in index ungroup models by app"""'], {}), '("Use models\' icons system in index ungroup models by app")\n', (8477, 8536), False, 'from upy.contrib.tree.models import _\n')] |
#! /usr/bin/env python
import os
import shutil
import sys
import gdal
import wetland_id_defaults as default
"""
Folder structure for pyGeoNet is as follows
geoNetHomeDir : defines where files will be written
e.g.
geoNetHomeDir = "C:\\Mystuff\\IO_Data\\"
--- \\data (input lidar files will be read from this folder)
--- \\results (outputs from pygeonet will be written to this folder)
--- \\basinTiffs (intermediate GRASS GIS files will be written
and deleted from this location. some times these
files could be huge, so have enough space)
pmGrassGISfileName -- this is an important intermediate GRASS GIS file name.
# Skfmm parameters
numBasinsElements = 6
# Some used demFileNames
#ikawa_roi1_nutm54_clipped
#dem_2012_mission_v1
#PLEASE DO NOT CHANGE VARIABLES,UNLESS YOU KNOW WHAT YOU ARE DOING
"""
# Prepare GeoNet parameters just prior to main code execution
currentWorkingDir = os.getcwd()
geoNetHomeDir = r"D:\2ndStudy_ONeil\Scripts\wetland_identification"
demFileName = default.input_dem
demDataFilePath = default.roi_dems
geonetResultsDir = default.roi_geonet
geonetResultsBasinDir = default.roi_geonet
#channelheadFileName = "channelhead.shp"
channelheadFileName = "Hou_weights.tif"
channeljunctionFileName = "junction.shp"
gdal.UseExceptions()
ds = gdal.Open(os.path.join(demDataFilePath, demFileName), gdal.GA_ReadOnly)
driver = ds.GetDriver()
geotransform = ds.GetGeoTransform()
ary = ds.GetRasterBand(1).ReadAsArray()
demPixelScale = float(geotransform[1])
xLowerLeftCoord = float(geotransform[0])
yLowerLeftCoord = float(geotransform[3])
inputwktInfo = ds.GetProjection()
#GRASS GIS parameters
#set grass7bin variable based on OS, enter correct path to grass72.bat file, or add path to file to PATH
def set_grassbin():
if sys.platform.startswith('win'):
# MS Windows
grass7bin = r'C:\Program Files\GRASS GIS 7.2.2\grass72.bat'
# uncomment when using standalone WinGRASS installer
# grass7bin = r'C:\Program Files (x86)\GRASS GIS 7.2.0\grass72.bat'
# this can be avoided if GRASS executable is added to PATH
elif sys.platform == 'darwin':
# Mac OS X
# TODO: this have to be checked, maybe unix way is good enough
grass7bin = '/Applications/GRASS/GRASS-7.2.app/'
return grass7bin
location = default.location
mapset = default.mapset
# Write shapefile file paths
shapefilepath = default.roi_geonet
driverName = 'ESRI Shapefile'
pointshapefileName = demFileName[:-4]+"_channelHeads"
pointFileName = os.path.join(shapefilepath, pointshapefileName+".shp")
drainagelinefileName = demFileName[:-4]+"_channelNetwork"
drainagelineFileName = os.path.join(shapefilepath, drainagelinefileName+".shp")
junctionshapefileName = demFileName[:-4]+"_channelJunctions"
junctionFileName = os.path.join(shapefilepath, junctionshapefileName+".shp")
streamcellFileName = os.path.join(geonetResultsDir,
demFileName[:-4]+"_streamcell.csv")
xsshapefileName = demFileName[:-4]+"_crossSections"
xsFileName = os.path.join(shapefilepath, xsshapefileName+".shp")
banklinefileName = demFileName[:-4]+"_bankLines"
banklineFileName = os.path.join(shapefilepath, banklinefileName+".shp")
"""Things to be changed"""
# PM Filtered DEM to be used in GRASS GIS for flow accumulation
pmGrassGISfileName = os.path.join(geonetResultsDir, "PM_filtered_grassgis.tif")
#pmGrassGISfileName = os.path.join(demDataFilePath,demFileName)
# Skfmm parameters
numBasinsElements = 2
## Clean up previous results and recreate output folders
#if os.path.exists(geonetResultsBasinDir):
# print "Cleaning old basinTiffs"
# shutil.rmtree(geonetResultsBasinDir)
#
#if os.path.exists(geonetResultsDir):
# print "Cleaning old results"
# shutil.rmtree(geonetResultsDir)
###
#print "Making basinTiffs"
#os.mkdir(geonetResultsBasinDir)
####
#print "Making results"
#if not os.path.exists(geonetResultsDir):
# os.mkdir(geonetResultsDir)
| [
"gdal.UseExceptions",
"os.path.join",
"sys.platform.startswith",
"os.getcwd"
] | [((999, 1010), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1008, 1010), False, 'import os\n'), ((1372, 1392), 'gdal.UseExceptions', 'gdal.UseExceptions', ([], {}), '()\n', (1390, 1392), False, 'import gdal\n'), ((2616, 2672), 'os.path.join', 'os.path.join', (['shapefilepath', "(pointshapefileName + '.shp')"], {}), "(shapefilepath, pointshapefileName + '.shp')\n", (2628, 2672), False, 'import os\n'), ((2756, 2814), 'os.path.join', 'os.path.join', (['shapefilepath', "(drainagelinefileName + '.shp')"], {}), "(shapefilepath, drainagelinefileName + '.shp')\n", (2768, 2814), False, 'import os\n'), ((2897, 2956), 'os.path.join', 'os.path.join', (['shapefilepath', "(junctionshapefileName + '.shp')"], {}), "(shapefilepath, junctionshapefileName + '.shp')\n", (2909, 2956), False, 'import os\n'), ((2979, 3047), 'os.path.join', 'os.path.join', (['geonetResultsDir', "(demFileName[:-4] + '_streamcell.csv')"], {}), "(geonetResultsDir, demFileName[:-4] + '_streamcell.csv')\n", (2991, 3047), False, 'import os\n'), ((3150, 3203), 'os.path.join', 'os.path.join', (['shapefilepath', "(xsshapefileName + '.shp')"], {}), "(shapefilepath, xsshapefileName + '.shp')\n", (3162, 3203), False, 'import os\n'), ((3274, 3328), 'os.path.join', 'os.path.join', (['shapefilepath', "(banklinefileName + '.shp')"], {}), "(shapefilepath, banklinefileName + '.shp')\n", (3286, 3328), False, 'import os\n'), ((3446, 3504), 'os.path.join', 'os.path.join', (['geonetResultsDir', '"""PM_filtered_grassgis.tif"""'], {}), "(geonetResultsDir, 'PM_filtered_grassgis.tif')\n", (3458, 3504), False, 'import os\n'), ((1409, 1451), 'os.path.join', 'os.path.join', (['demDataFilePath', 'demFileName'], {}), '(demDataFilePath, demFileName)\n', (1421, 1451), False, 'import os\n'), ((1892, 1922), 'sys.platform.startswith', 'sys.platform.startswith', (['"""win"""'], {}), "('win')\n", (1915, 1922), False, 'import sys\n')] |
# AUTOGENERATED! DO NOT EDIT! File to edit: ttbarzp.ipynb (unless otherwise specified).
__all__ = ['get_elijah_ttbarzp_cs', 'get_manuel_ttbarzp_cs', 'import47Ddata', 'get47Dfeatures']
# Cell
import numpy as np
import tensorflow as tf
# Cell
def get_elijah_ttbarzp_cs():
r"""
Contains cross section information produced by Elijah for $pp \to t\overline{t} \; Z'$ collider phenomenology.
Returns list containing signal masses, signal cross sections (for those masses, in pb), and background cross sections
(also in pb)
"""
# Z' masses (GeV) for which Elijah created signal samples
elijah_masses = [10, 50, 100, 200, 350, 500, 1000, 2000, 5000]
# signal cross sections (pb)
elijah_sig_css = [9.801, 0.5445, 0.1442, 0.03622, 0.009998, 0.003802, 0.0003936, 2.034e-05, 2.748e-08]
# background cross sections (pb)
elijah_bg_css = [0.106, 0.0117, 5.58]
return [elijah_masses, elijah_sig_css, elijah_bg_css]
# Cell
def get_manuel_ttbarzp_cs():
r"""
Contains cross section information produced through MadGraph by Manuel for collider phenomenology regarding
the semihadronic, semileptonic $pp \to t\overline{t} \; Z', Z' \to b\overline{b}$ channel
"""
# Z' masses (GeV) for which I (Elijah) created signal samples
manuel_masses = [350, 500, 750, 1000, 2000, 3000, 4000]
# signal cross sections (pb)
manuel_sig_css = [0.001395, 0.0007823, 0.0003429, 0.0001692, 1.808e-05, 1.325e-06, 4.456e-07]
# background cross sections (pb)
manuel_bg_css = [0.1339, 0.01187, 5.603]
return [manuel_masses, manuel_sig_css, manuel_bg_css]
# Cell
def import47Ddata(name):
r"""
Imports `name.npy` file containing 47-dimensional data for training
Available files:
- bgh.npy (Standard Model background 1, $pp \to t\overline{t}h$)
- bg4t.npy (Standard Model background 2, $pp \to t\overline{t}t\overline{t}$)
- bgnoh.npy (Standard Model background 3, $pp \to t\overline{t} \; \setminus \; h$)
- sig350G.npy ($Z'$ signal, $m_{Z'} = 350$ GeV)
- sig500G.npy ($Z'$ signal, $m_{Z'} = 500$ GeV)
- sig1T.npy ($Z'$ signal, $m_{Z'} = 1$ TeV)
- sig2T.npy ($Z'$ signal, $m_{Z'} = 2$ TeV)
- sig4T.npy ($Z'$ signal, $m_{Z'} = 4$ TeV)
"""
if name[-4:] == '.npy':
name = name[:-4]
url = 'https://storage.googleapis.com/ttbarzp/47dim/'
try:
path = tf.keras.utils.get_file(f'{name}.npy', url + name + '.npy')
data = np.load(path)
return data
except:
print(f"{name}.npy doesn't appear to exist")
# Cell
def get47Dfeatures():
"""
Returns list containing the names of the 47 features found in the data accessible through
`ttbarzp.import47Ddata()`
"""
return [
'pT b1', 'pT b2', 'pT b3', 'pT b4',
'sdEta b1 b2', 'sdEta b1 b3', 'sdEta b1 b4', 'sdEta b2 b3', 'sdEta b2 b4', 'sdEta b3 b4',
'sdPhi b1 b2', 'sdPhi b1 b3', 'sdPhi b1 b4', 'sdPhi b2 b3', 'sdPhi b2 b4', 'sdPhi b3 b4',
'dR b1 b2', 'dR b1 b3', 'dR b1 b4', 'dR b2 b3', 'dR b2 b4', 'dR b3 b4',
'MET', 'pT l', 'MT l MET',
'M b1 b2', 'M b1 b3', 'M b1 b4', 'M b2 b3', 'M b2 b4', 'M b3 b4',
'MT b1 l MET', 'MT b2 l MET', 'MT b3 l MET', 'MT b4 l MET',
'M j1 j2', 'pT j1', 'pT j2', 'dR j1 j2',
'dR b1 l', 'dR b2 l', 'dR b3 l', 'dR b4 l',
'sdPhi b1 l', 'sdPhi b2 l', 'sdPhi b3 l', 'sdPhi b4 l'] | [
"numpy.load",
"tensorflow.keras.utils.get_file"
] | [((2383, 2442), 'tensorflow.keras.utils.get_file', 'tf.keras.utils.get_file', (['f"""{name}.npy"""', "(url + name + '.npy')"], {}), "(f'{name}.npy', url + name + '.npy')\n", (2406, 2442), True, 'import tensorflow as tf\n'), ((2458, 2471), 'numpy.load', 'np.load', (['path'], {}), '(path)\n', (2465, 2471), True, 'import numpy as np\n')] |
# SPDX-FileCopyrightText: 2021 iteratec GmbH
#
# SPDX-License-Identifier: Apache-2.0
import argparse
import logging
import sys
from zapv2 import ZAPv2
from .zap_automation import ZapAutomation
# set up logging to file - see previous section for more details
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s: %(message)s',
datefmt='%Y-%m-%d %H:%M')
logging = logging.getLogger('zapclient')
def main():
args = get_parser_args()
if args.target is None or len(args.target) <= 0:
logging.info('Argument error: No target specified!')
sys.exit(1)
process(args)
# logging.info('Write findings to file...')
# write_findings_to_file(args.output_folder, findings)
logging.info('Finished :-) !')
def process(args):
api_key = None
if args.api_key is not None and len(args.api_key) > 0:
api_key = args.api_key
# MANDATORY. Define the listening address of ZAP instance
zap_proxy = {
"http": "http://127.0.0.1:8080",
"https": "http://127.0.0.1:8080"
}
if args.zap_url is not None and len(args.zap_url) > 0:
zap_proxy = {
"http": "http://" + args.zap_url,
"https": "http://" + args.zap_url
}
logging.info(':: Configuring ZAP Instance with %s', zap_proxy)
# Connect ZAP API client to the listening address of ZAP instance
zap = ZAPv2(proxies=zap_proxy, apikey=api_key)
logging.info(':: Starting SCB ZAP Automation Framework with config %s', args.config_folder)
zap_automation = ZapAutomation(zap=zap, config_dir=args.config_folder)
try:
logging.info(':: Starting SCB ZAP Scan with target %s', args.target)
zap_automation.scan_target(target=args.target)
alerts = zap_automation.get_zap_scanner.get_alerts(args.target, [], [])
logging.info(':: Found ZAP Alerts: %s', str(len(alerts)))
summary = zap.alert.alerts_summary(baseurl=args.target)
logging.info(':: ZAP Alerts Summary: %s', str(summary))
zap_automation.generate_report_file(file_path=args.output_folder, report_type=args.report_type)
zap_automation.zap_shutdown()
logging.info(':: Finished !')
except argparse.ArgumentError as e:
logging.exception(f'Argument error: {e}')
sys.exit(1)
except Exception as e:
logging.exception(f'Unexpected error: {e}')
zap_automation.zap_shutdown()
sys.exit(3)
def get_parser_args(args=None):
parser = argparse.ArgumentParser(prog='zap-client',
description='OWASP secureCodeBox OWASP ZAP Client (can be used to automate OWASP ZAP instances based on YAML configuration files.)')
parser.add_argument("-z",
"--zap-url",
help='The ZAP API Url used to call the ZAP API.',
default=None,
required=True),
parser.add_argument("-a",
"--api-key",
help='The ZAP API Key used to call the ZAP API.',
default=None,
required=False),
parser.add_argument("-c",
"--config-folder",
help='The path to a local folder containing the additional ZAP configuration YAMLs used to configure OWASP ZAP.',
default='/home/securecodebox/configs/',
required=False)
parser.add_argument("-t",
"--target",
help="The target to scan with OWASP ZAP.",
default=None,
required=True),
parser.add_argument("-o",
"--output-folder",
help='The path to a local folder used to store the output files, eg. the ZAP Report or logfiles.',
default='./',
required=False)
parser.add_argument("-r",
"--report-type",
help='The OWASP ZAP Report Type.',
choices=['XML', 'JSON', 'HTML', 'MD'],
default=None,
required=False)
return parser.parse_args(args)
if __name__ == '__main__':
main()
| [
"logging.basicConfig",
"logging.getLogger",
"argparse.ArgumentParser",
"logging.exception",
"sys.exit",
"zapv2.ZAPv2",
"logging.info"
] | [((262, 400), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s %(name)-12s %(levelname)-8s: %(message)s"""', 'datefmt': '"""%Y-%m-%d %H:%M"""'}), "(level=logging.INFO, format=\n '%(asctime)s %(name)-12s %(levelname)-8s: %(message)s', datefmt=\n '%Y-%m-%d %H:%M')\n", (281, 400), False, 'import logging\n'), ((415, 445), 'logging.getLogger', 'logging.getLogger', (['"""zapclient"""'], {}), "('zapclient')\n", (432, 445), False, 'import logging\n'), ((754, 784), 'logging.info', 'logging.info', (['"""Finished :-) !"""'], {}), "('Finished :-) !')\n", (766, 784), False, 'import logging\n'), ((1276, 1338), 'logging.info', 'logging.info', (['""":: Configuring ZAP Instance with %s"""', 'zap_proxy'], {}), "(':: Configuring ZAP Instance with %s', zap_proxy)\n", (1288, 1338), False, 'import logging\n'), ((1419, 1459), 'zapv2.ZAPv2', 'ZAPv2', ([], {'proxies': 'zap_proxy', 'apikey': 'api_key'}), '(proxies=zap_proxy, apikey=api_key)\n', (1424, 1459), False, 'from zapv2 import ZAPv2\n'), ((1465, 1560), 'logging.info', 'logging.info', (['""":: Starting SCB ZAP Automation Framework with config %s"""', 'args.config_folder'], {}), "(':: Starting SCB ZAP Automation Framework with config %s',\n args.config_folder)\n", (1477, 1560), False, 'import logging\n'), ((2530, 2715), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""zap-client"""', 'description': '"""OWASP secureCodeBox OWASP ZAP Client (can be used to automate OWASP ZAP instances based on YAML configuration files.)"""'}), "(prog='zap-client', description=\n 'OWASP secureCodeBox OWASP ZAP Client (can be used to automate OWASP ZAP instances based on YAML configuration files.)'\n )\n", (2553, 2715), False, 'import argparse\n'), ((550, 602), 'logging.info', 'logging.info', (['"""Argument error: No target specified!"""'], {}), "('Argument error: No target specified!')\n", (562, 602), False, 'import logging\n'), ((611, 622), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (619, 622), False, 'import sys\n'), ((1654, 1722), 'logging.info', 'logging.info', (['""":: Starting SCB ZAP Scan with target %s"""', 'args.target'], {}), "(':: Starting SCB ZAP Scan with target %s', args.target)\n", (1666, 1722), False, 'import logging\n'), ((2206, 2235), 'logging.info', 'logging.info', (['""":: Finished !"""'], {}), "(':: Finished !')\n", (2218, 2235), False, 'import logging\n'), ((2285, 2326), 'logging.exception', 'logging.exception', (['f"""Argument error: {e}"""'], {}), "(f'Argument error: {e}')\n", (2302, 2326), False, 'import logging\n'), ((2335, 2346), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2343, 2346), False, 'import sys\n'), ((2382, 2425), 'logging.exception', 'logging.exception', (['f"""Unexpected error: {e}"""'], {}), "(f'Unexpected error: {e}')\n", (2399, 2425), False, 'import logging\n'), ((2472, 2483), 'sys.exit', 'sys.exit', (['(3)'], {}), '(3)\n', (2480, 2483), False, 'import sys\n')] |
# ===========================================================================
# Copyright 2013 University of Limerick
#
# This file is part of DREAM.
#
# DREAM is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DREAM is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DREAM. If not, see <http://www.gnu.org/licenses/>.
# ===========================================================================
"""
Created on 6 Dec 2012
@author: Ioannis
"""
"""
Class that acts as an abstract. All the compound Objects (i.e. compoundMachine) should inherit from it
The information about the objects that will constitute the compound object must be provided in the form of dictionaries
sample Dictionary argument:
{
'type':'Machine',
'processingTime':{
'distributionType':'Fixed',
'mean':'0',
'stdev':'0',
'min':'0',
'max':'0'
},
'failures':{
'failureDistribution','Fixed',
'MTTF':'0',
'MTTR':'0',
'availability':'0'
}
}
The Compound object must also define the routing among the components.
If the routing type is 'Series' then the sequence objects provided is the sequence used to set successors
e.g. if the sequence given is -queue - machine then a queue must be placed in front of the machine
"""
from SimPy.Simulation import (
Process,
Resource,
now,
activate,
passivate,
waituntil,
hold,
)
from .CoreObject import CoreObject
# from SimpleOperatedMachine2 import OperatedMachine
from OperatedMachine import OperatedMachine
from .Queue import Queue
from .Operator import Operator
class RoutingTypeError(Exception):
def __init__(self, routingError):
Exception.__init__(self, routingError)
class ReceiverObjectError(Exception):
def __init__(self, receiverError):
Exception.__init__(self, receiverError)
class NoneCallerObjectError(Exception):
def __init__(self, callerError):
Exception.__init__(self, callerError)
# ===========================================================================
# the compound object
# ===========================================================================
class CompoundObject(CoreObject, Queue):
# object arguments may provide information on the type of the object, and the arguments needed to initiate it
def __init__(self, id, name, capacity, routing="Series", *objects):
CoreObject.__init__(self, id, name)
# it would be a good idea to have the arguments provided as dictionary
self.type = "CompoundObject"
# variable that can hold according to this implementation two different values
# 'Parallel'
# 'Series'
self.routing = routing
# assert that there are arguments provided, and that the type of the arguments provided is dictionary
assert (len(objects)) > 0, "the number of objects provided is 0"
assert type(objects) is dict, "the given arguments are not dictionaries"
self.numberOfObjects = len(objects)
# the capacity of the compound object
# have to be careful, the capacity of the compound object should not exceed the
# combined capacity of the internal objects
self.capacity = capacity
# a tuple that holds the arguments provided
self.objects = objects
# list of the objects that the compoundObject consists of
self.coreObjects = []
# list of the objects' IDs that constitute the compooundObject
self.coreObjectIds = []
# list of machines
self.machines = []
# list of queues
self.queues = []
# list with the resources assigned to the object
self.resources = []
# list with the repairmen assigned to the object
self.repairmen = []
# list with the operators assigned to the object
self.operators = []
# list with the inner objects that can receive from the compound object
self.innerNext = []
# list with the inner objects that can deliver to the compound object
self.innerPrevious = []
# # variable that shows if the entity received is to be processed internally
# # or if the internal processing is concluded and can now be handed in to the successor of the compoundOjbect
# self.entityToBeProcessedInternally = False
# variable which informs that a new entity was just received
self.newEntityWillBeReceived = False
# variables used to define the sorting of the entities in the internal queue
self.schedulingRule = (
schedulingRule # the scheduling rule that the Queue follows
)
self.multipleCriterionList = (
[]
) # list with the criteria used to sort the Entities in the Queue
if schedulingRule.startswith(
"MC"
): # if the first criterion is MC aka multiple criteria
SRlist = schedulingRule.split(
"-"
) # split the string of the criteria (delimiter -)
self.schedulingRule = SRlist.pop(0) # take the first criterion of the list
self.multipleCriterionList = (
SRlist # hold the criteria list in the property multipleCriterionList
)
# ===================================================================
# first assign the operators to the compoundObject
# if the operators are described as dictionaries
# in the arguments then create them
# ===================================================================
objectIndex = 0
for (
object
) in self.objects: # check if there are repairmen or operators provided
try:
if object.type == "Operator":
object.coreObjectsIds.append(self.id)
object.coreObjects.append(self)
self.resources.append(object)
self.operators.append(object)
# currently only one repairman can work on a machine (the capacity may vary)
elif object.type == "Repairman":
object.coreObjectsIds.append(self.id)
object.coreObjects.append(self)
self.resources.append(object)
self.repairmen.append(object)
except:
type = object.get("type", "not found")
if type == "Repairman":
capacity = object.get("capacity", "1")
componentName = (
self.name + str(self.id) + "_" + type + "_" + str(objecIndex)
)
compoentId = str(self.id) + str(objectIndex)
r = Repairman(id=componentId, name=componentName, capacity=capacity)
r.coreObjectIds.append(self.id)
r.coreObjects.append(self)
self.resources.append(r)
self.repairmen.append(r)
elif type == "Operator":
capacity = object.get("capacity", "1")
componentName = (
self.name + str(self.id) + "_" + type + "_" + str(objecIndex)
)
compoentId = str(self.id) + str(objectIndex)
o = Operator(id=componentId, name=componentName, capacity=capacity)
o.coreObjectIds.append(self.id)
o.coreObjects.append(self)
self.resources.append(o)
self.operators.append(o)
objectIndex += 1
# ===================================================================
# walk through the objects of type Machine and Queue and initiate them
# the simple objects making up the compoundOjbect
# can only be queues and machines for the moment
# ===================================================================
objectIndex = 0
for object in self.objects:
# if the inner-objects are created out of the compound object then
# they will be passed to it ass they are
try:
if object.type == "Machine":
self.machines.append(object)
elif object.type == "Queue":
self.queues.append(object)
self.coreObjectIds.append(object.id)
self.coreObjects.append(object)
# if they are not created out of the composite object then they should
# be created in the object
except:
type = object.get("type", "not found")
# object type machine
if type == "Machine":
componentName = (
self.name + str(self.id) + "_" + type + "_" + str(objectIndex)
)
componentId = str(self.id) + str(objectIndex)
processingTime = object.get("processingTime", "not found")
distributionType = processingTime.get("distributionType", "Fixed")
mean = float(processingTime.get("mean", "0"))
stdev = float(processingTime.get("stdev", "0"))
min = float(processingTime.get("min", "0"))
max = float(processingTime.get("max", "0"))
failures = object.get("failures", "not found")
failureDistribution = failures.get("failureDistribution", "Fixed")
MTTF = float(failures.get("MTTF", "0"))
MTTR = float(failures.get("MTTR", "0"))
availability = float(failures.get("availability", "0"))
for repairman in self.repairmen:
if self.id in repairman.coreObjectIds:
R = repairman
O = []
for operator in self.operators:
if self.id in operator.coreObjectIds:
O.append(operator)
# there must be an implementation of a machine where the failure is passed as argument
# this way it will be possible to have the same failure-interruption for all the inner objects
M = OperatedMachine(
id,
name,
1,
distribution=distributionType,
failureDistribution=failureDistribution,
MTTF=MTTF,
MTTR=MTTR,
availability=availability,
repairman=R,
mean=mean,
stdev=stdev,
min=min,
max=max,
operatorPool=O,
)
self.coreObjectIds.append(M.id)
self.coreObjects.append(M)
self.machines.append(M)
# object type Queue
if type == "Queue":
componentName = (
self.name + str(self.id) + "_" + type + "_" + str(objectIndex)
)
componentId = str(self.id) + str(objectIndex)
capacity = int(object.get("capacity", "1"))
isDummy = bool(object.get("isDummy", "0"))
schedulingRule = object.get("schedulingRule", "FIFO")
Q = Queue(
id=componentId,
name=componentName,
capacity=capacity,
isDummy=isDummy,
schedulingRule=schedulingRule,
)
self.coreObjectIds.append(Q.id)
self.coreObjects.append(Q)
self.queues.append(Q)
objectIndex += 1
# the total time the machine has been waiting for the operator
self.totalTimeWaitingForOperator = 0
# =======================================================================
# sets the routing in and out elements for the Object as well as
# the inner routing (routing in and out for the constituting objects)
# =======================================================================
def defineRouting(self, predecessorList=[], successorList=[]):
self.next = successorList
self.previous = predecessorList
# define the routing of each constituting object and initialize it
try:
objectIndex = 0
for object in self.coreObjects:
objectIndex += 1
if self.routing == "Series":
if objectIndex == 1:
object.defineRouting([], [self.coreObjects[objectIndex]])
self.innerNext.append(object)
elif objectIndex == len(self.coreObjectIds):
object.defineRouting(
[self.coreObjects[objectIndex - 2]], [self]
)
self.innerPrevious.append(object)
else:
object.defineRouting(
[self.coreObjects[objectIndex - 2]],
[self.coreObjects[objectIndex]],
)
elif self.routing == "Parallel":
object.defineRouting([], [self])
self.innerNext.append(object)
self.innerPrevious.append(object)
else:
raise RoutingTypeError(
"The type of the routing is neither Parallel or Series"
)
except RoutingTypeError as routingError:
print(("Routing type error: {0}".format(routingError)))
# =======================================================================
# initialize the compound object
# =======================================================================
def initialize(self):
CoreObject.initialize(self)
# queue to hold the entities all through the stay of an entity in the composite object
self.Res = Resource(self.capacity)
# the inner Giver that will feed the compound object receiver
self.innerGiver = None
# the inner object that will receive the object most recently added to the self.Res.activeQ
self.innerReceiver = None
# entity that will passed on to the innerReceiver
self.entityForInternalProc = None
# # inner queues that buffer the entities before they are handed in to the inner objects
# # and receive them again after the internal processing
# self.entryRes = Resource(self.capacity)
# self.exitRes = Resource(self.capacity)
# initialize all resources
# will have to reconsider as some of the resources may have been already initialized
for resource in self.resources:
if not resource.isInitialized():
resource.initialize()
# initialize all objects - and the entrance object
# self.firstObject.initialize()
for object in self.coreObjects:
object.initialize()
# =======================================================================
# the main process of the composite object object
# it's main function is to activate the inner objects
# =======================================================================
def run(self):
# activate every object in the coreOjbects list
for object in self.coreObjects:
activate(object, object.run())
while 1:
yield waituntil, self, self.canAcceptAndIsRequested
# =======================================================================
# get the entity from the previous object
# has to update the inner receiver object
# and update its internal queue in a way
# that its canAcceptAndIsRequested method
# will be updated
# =======================================================================
def getEntity(self):
activeEntity = coreObject.getEntity(self)
self.entityForInternalProc = activeEntity
return activeEntity
# =======================================================================
# removes an entity from the Object
# it must be run on the last object if the routing is 'Series'
# or on the object that has a kind of priority and is waiting to dispose
# if the routing is 'Parallel'
# =======================================================================
def removeEntity(self):
actibeObject = self.getActiveObject()
activeObjectQueue = self.getActiveObjectQueue()
try:
receiverObject = self.getReceiverObject()
# internal logic ================================================
# the activeEntity is not removed, the removeEntity method just returns the activeEntity
# the entity remains in the activeObjectQueue till it is removed by the external logic
# the entity's flag 'internal' is changed to True to signify the start of the internal processing
if receiverObject in self.innerNext:
activeEntity = next(
entity == self.entityForInternalProc for entity in activeObjectQueue
)
# activeIndex = activeObjectQueue.index(activeEntity)
activeEntity.internal = True
# empty the previous list of the innerReceiver so that its
# canAcceptAndIsRequested method can be controlled via
# the compoundObject's canAcceptAndIsRequested method
self.innerReceiver.previous = []
# external logic ================================================
elif receiverObject in self.next:
activeEntity = self.innerGiver.getActiveObjectQueue[0]
# find the index of the entity to be removed in the activeObjectQueue
activeIndex = activeObjectQueue.index(activeEntity)
# remove the entity from the internal queue and from the innerGiver queue
self.innerGiver.getActiveObjectQueue().pop(0)
activeObjectQueue.pop(activeIndex)
# update the time the last entity left
self.timeLastEntityLeft = now()
else:
raise ReceiverObjectError(
"""the receiver has not been defined, the composite object needs info on the receiver\
in order to decide where to deliver"""
)
except ReceiverObjectError as receiverError:
print(("Receiver object error: {0}".format(receiverError)))
try:
self.outputTrace(activeEntity.name, activeEntity.id, "released " + self.objName)
except TypeError:
pass
return activeEntity
# =======================================================================
# checks if the Object can dispose an entity to the following object
# =======================================================================
def haveToDispose(self, callerObject=None):
# get the active object
activeObject = self.getActiveOject()
activeObjectQueue = activeObject.getActiveObjectQueue()
theCaller = callerObject
assert theCaller != None, "the caller object of a compound cannot be None"
try:
if theCaller != None:
# internal logic ============================================
if theCaller in self.innerNext:
if len(activeObject.innerNext) == 1:
return len(activeObjectQueue) > 0 and any(
entity == self.entityForInternalProc
for entity in activeObjectQueue
)
maxTimeWaiting = 0
for object in activeObject.innerNext:
if object.canAccept(self): # if the object can accept
timeWaiting = (
now() - object.timeLastEntityLeft
) # compare the time that it has been waiting
if (
timeWaiting > maxTimeWaiting or maxTimeWaiting == 0
): # with the others'
maxTimeWaiting = timeWaiting
self.receiver = object
receiverObject = activeObject.getReceiverObject()
return (
len(activeObjectQueue) > 0
and any(
entity == self.entityForInternalProc
for entity in activeObjectQueue
)
and (theCaller is receiverObject)
and self.innerReceiver == receiverObject
)
# external logic ============================================
elif theCaller in self.next:
innerObjectHaveToDispose = False
maxTimeWaiting = 0
# find which object from those have something to dispose
# is waiting the most
for object in self.innerPrevious:
if object.haveToDispose(activeObject):
innerObjectHaveToDispose = True
if object.downTimeInTryingToReleaseCurrentEntity > 0:
timeWaiting = now() - object.timeLastFailureEnded
else:
timeWaiting = now() - object.timeLastFailureEnded
if timeWaiting >= maxTimeWaiting:
activeObject.innerGiver = object
maxTimeWaiting = timeWaiting
# if there is only one successor then check if there is something to be moved on
# and if the entity to be moved on is in the object's activeObjectQueue
if len(activeObject.Next) == 1:
return (
(len(activeObjectQueue) > 0)
and innerObjectHaveToDispose
and (
self.innerGiver.getActiveObjectQueue()[0]
in activeObjectQueue
)
)
# if there are more than one successors then find the one waiting the most and
# assign it as receiver
maxTimeWaiting = 0
for object in activeObject.next:
if object.canAccept(self): # if the object can accept
timeWaiting = (
now() - object.timeLastEntityLeft
) # compare the time that it has been waiting
if (
timeWaiting > maxTimeWaiting or maxTimeWaiting == 0
): # with the others'
maxTimeWaiting = timeWaiting
self.receiver = object # and update the receiver to the index of this object
# return true only to the predecessor from which the queue will take
receiverObject = activeObject.getReceiverObject()
return (
len(activeObjectQueue) > 0
and innerObjectHaveToDispose
and (
self.innerGiver.getActiveObjectQueue()[0]
in activeObjectQueue
)
and (thecaller is receiverObject)
)
else:
raise NoneCallerObjectError(
"The caller of a CompoundObject method have to dispose cannot be None"
)
except NoneCallerObjectError as callerError:
print(("None-Caller object error: {0}".format(callerError)))
# =======================================================================
# the canAcceptAndIsRequested method that implements the
# logic for both internal and external objects
# =======================================================================
# def canAcceptAndIsRequested(self):
# get the active object
# activeObject = self.getActiveObject()
# # dummy boolean variable to check if any predecessor has something to hand in
# isRequested=False
# # the variable that checks weather there is an external entity to be received is reset each
# # time the canAcceptAndIsRequested method is invoked
# self.newEntityWillBeReceived = False
# # dummy boolean to check if the compound is requested internally
# isRequestedInternally = False
# # dummy timer to check which predecessor has been waiting the most
# maxTimeWaiting=0
# # check first if there are inner objects that have to dispose
# for object in self.innerPrevious:
# if(object.haveToDispose(activeObject) and object.receiver==self):
# isRequested=True
# isRequestedInternally = True
# if(object.downTimeInTryingToReleaseCurrentEntity>0):
# timeWaiting=now()-object.timeLastFailureEnded
# else:
# timeWaiting=now()-object.timeLastEntityEnded
# if (timeWaiting>=maxTimeWaiting):
# activeObject.giver=object
# maxTimeWaiting=timeWaiting
# # if it is not requested internally then receive from the external predecessors
# if not isRequestedInternally:
# #loop through the predecessors to see which have to dispose and which is the one blocked for longer
# for object in self.previous:
# if(object.haveToDispose(activeObject) and object.receiver==self): # if they have something to dispose off
# isRequested=True
# if(object.downTimeInTryingToReleaseCurrentEntity>0):# if the predecessor has failed wile waiting
# timeWaiting=now()-object.timeLastFailureEnded # then update according the timeWaiting to be compared with the ones
# else: # of the other machines
# timeWaiting=now()-object.timeLastEntityEnded
# #if more than one predecessor have to dispose take the part from the one that is blocked longer
# if(timeWaiting>=maxTimeWaiting):
# activeObject.giver=object # pick the predecessor waiting the more
# maxTimeWaiting=timeWaiting
# # have to find out if the logic to be implemented is internal or external
# if giverObject in self.previous:
# self.newEntry()
# # now that we now if it is a new entry we can implement the corresponding logic for the getActiveObjectQueue
# activeObjectQueue=self.getActiveObjectQueue()
# # if it is a new entry then perform the normal logic
# if self.isNewEntry():
# # return true when the Queue is not fully occupied and a predecessor is requesting it
# return len(activeObjectQueue)<self.capacity and isRequested
# # otherwise, if the object is returned from the internal logic
# elif not self.isNewEntry():
# assert ((giverObjectQueue[0] in self.Res.activeQ)\
# and isRequested),\
# "The entity to be returned from the internal logic is not present in the external queue/res"
# return (giverObjectQueue[0] in self.Res.activeQ)\
# and isRequested
# ---------------------------------------------------------------------------
def canAcceptAndIsRequested(self):
activeObject = self.getActiveObject()
giverObject = self.getGiverObject()
giverObjectQueue = self.getGiverObjectQueue()
# find the inner object that will get the entity ====================
innerCanAccept = False
maxTimeWaiting = 0
for object in activeObject.innerNext:
if object.canAccept():
innerCanAccept = True
timeWaiting = now() - object.timeLastEntityLeft
if timeWaiting > maxTimeWaiting or maxTimeWaiting == 0:
maxTimeWaiting = timeWaiting
activeObject.innerReceiver = object
activeObject.innerReceiver.previous.append(activeObject)
# if there is only one predecessor assign it as giver ===============
if len(activeObject.previous) == 1:
return (
giverObject.haveToDispose(activeObject)
and (len(activeObjectQueue) < activeObject.capacity)
and innerCanAccept
)
# find the giver object among the predecessors ======================
maxTimeWaiting = 0
isRequested = False
for object in activeObject.previous:
if object.haveToDispose(activeObject) and object.receiver == activeObject:
isRequested = True
if (
object.downTimeInTryingToReleaseCurrentEntity > 0
): # if the predecessor has failed wile waiting
timeWaiting = (
now() - object.timeLastFailureEnded
) # then update according the timeWaiting to be compared with the ones
else: # of the other machines
timeWaiting = now() - object.timeLastEntityEnded
# if more than one predecessor have to dispose take the part from the one that is blocked longer
if timeWaiting >= maxTimeWaiting:
activeObject.giver = object # pick the predecessor waiting the more
maxTimeWaiting = timeWaiting
return (
isRequested
and innerCanAccept
and len(activeObjectQueue) < activeObject.capacity
)
# =======================================================================
# checks if the Object can accept an entity
# have to re-assess the invocation of this method
# many times this method is called without callerObject argument
# =======================================================================
# def canAccept(self, callerObject=None):
# # get the active object
# activeObject = self.getActiveObject()
# giverObject = self.getGiverObject()
# theCaller = callerObject
# try:
# if(theCaller!=None):
# # dummy boolean to check if the compound is requested internally
# isRequestedInternally = any(object is theCaller for object in self.innerPrevious)
# if isRequestedInternally:
# return len(self.extRes.activeQ)<activeObject.capacity
# else:
# return len(self.Res.activeQ)<activeObject.capacity
# else:
# raise NoneCallerObjectError("The caller of a CompoundObject method have to dispose cannot be None")
# except NoneCallerObjectError as callerError:
# print "None-CallerObject error: {0}".format(callerError)
# ---------------------------------------------------------------------------
def canAccept(self, callerObject=None):
activeObject = self.getActiveObject()
activeObjectQueue = activeObject.getActiveObjectQueue()
giverObject = activeObject.getGiverObject()
theCaller = callerObject
innerCanAccept = False
maxTimeWaiting = 0
if len(activeObject.previous) == 1 or callerObject == None:
return (
any(object.canAccept() for object in activeObject.innerNext)
and len(activeObjectQueue) < activeObject.capacity
)
return (
any(object.canAccept() for object in activeObject.innerNext)
and len(activeObjectQueue) < activeObject.capacity
and giverObject == theCaller
)
# # =======================================================================
# # check if the object has an entity to dispose externally
# # =======================================================================
# def isNewEntry(self):
# return self.newEntityWillBeReceived
#
# # =======================================================================
# # define that a new entity was just received
# # =======================================================================
# def newEntry(self):
# self.newEntityWillBeReceived = True
| [
"SimPy.Simulation.now",
"OperatedMachine.OperatedMachine",
"SimPy.Simulation.Resource"
] | [((14945, 14968), 'SimPy.Simulation.Resource', 'Resource', (['self.capacity'], {}), '(self.capacity)\n', (14953, 14968), False, 'from SimPy.Simulation import Process, Resource, now, activate, passivate, waituntil, hold\n'), ((19250, 19255), 'SimPy.Simulation.now', 'now', ([], {}), '()\n', (19253, 19255), False, 'from SimPy.Simulation import Process, Resource, now, activate, passivate, waituntil, hold\n'), ((29828, 29833), 'SimPy.Simulation.now', 'now', ([], {}), '()\n', (29831, 29833), False, 'from SimPy.Simulation import Process, Resource, now, activate, passivate, waituntil, hold\n'), ((10962, 11194), 'OperatedMachine.OperatedMachine', 'OperatedMachine', (['id', 'name', '(1)'], {'distribution': 'distributionType', 'failureDistribution': 'failureDistribution', 'MTTF': 'MTTF', 'MTTR': 'MTTR', 'availability': 'availability', 'repairman': 'R', 'mean': 'mean', 'stdev': 'stdev', 'min': 'min', 'max': 'max', 'operatorPool': 'O'}), '(id, name, 1, distribution=distributionType,\n failureDistribution=failureDistribution, MTTF=MTTF, MTTR=MTTR,\n availability=availability, repairman=R, mean=mean, stdev=stdev, min=min,\n max=max, operatorPool=O)\n', (10977, 11194), False, 'from OperatedMachine import OperatedMachine\n'), ((30937, 30942), 'SimPy.Simulation.now', 'now', ([], {}), '()\n', (30940, 30942), False, 'from SimPy.Simulation import Process, Resource, now, activate, passivate, waituntil, hold\n'), ((31146, 31151), 'SimPy.Simulation.now', 'now', ([], {}), '()\n', (31149, 31151), False, 'from SimPy.Simulation import Process, Resource, now, activate, passivate, waituntil, hold\n'), ((21063, 21068), 'SimPy.Simulation.now', 'now', ([], {}), '()\n', (21066, 21068), False, 'from SimPy.Simulation import Process, Resource, now, activate, passivate, waituntil, hold\n'), ((23913, 23918), 'SimPy.Simulation.now', 'now', ([], {}), '()\n', (23916, 23918), False, 'from SimPy.Simulation import Process, Resource, now, activate, passivate, waituntil, hold\n'), ((22574, 22579), 'SimPy.Simulation.now', 'now', ([], {}), '()\n', (22577, 22579), False, 'from SimPy.Simulation import Process, Resource, now, activate, passivate, waituntil, hold\n'), ((22690, 22695), 'SimPy.Simulation.now', 'now', ([], {}), '()\n', (22693, 22695), False, 'from SimPy.Simulation import Process, Resource, now, activate, passivate, waituntil, hold\n')] |
''' An example of playing randomly in RLCard
'''
import argparse
import pprint
import rlcard
from rlcard.agents import RandomAgent
from rlcard.utils import set_seed
def run(args):
# Make environment
env = rlcard.make(args.env, config={'seed': 42})
# Seed numpy, torch, random
set_seed(42)
# Set agents
agent = RandomAgent(num_actions=env.num_actions)
env.set_agents([agent for _ in range(env.num_players)])
# Generate data from the environment
trajectories, player_wins = env.run(is_training=False)
# Print out the trajectories
print('\nTrajectories:')
print(trajectories)
print('\nSample raw observation:')
pprint.pprint(trajectories[0][0]['raw_obs'])
print('\nSample raw legal_actions:')
pprint.pprint(trajectories[0][0]['raw_legal_actions'])
if __name__ == '__main__':
parser = argparse.ArgumentParser("Random example in RLCard")
parser.add_argument('--env', type=str, default='leduc-holdem',
choices=['blackjack', 'leduc-holdem', 'limit-holdem', 'doudizhu', 'mahjong', 'no-limit-holdem', 'uno', 'gin-rummy', 'bridge', 'dummy'])
args = parser.parse_args()
run(args)
| [
"rlcard.make",
"argparse.ArgumentParser",
"rlcard.utils.set_seed",
"rlcard.agents.RandomAgent",
"pprint.pprint"
] | [((215, 257), 'rlcard.make', 'rlcard.make', (['args.env'], {'config': "{'seed': 42}"}), "(args.env, config={'seed': 42})\n", (226, 257), False, 'import rlcard\n'), ((295, 307), 'rlcard.utils.set_seed', 'set_seed', (['(42)'], {}), '(42)\n', (303, 307), False, 'from rlcard.utils import set_seed\n'), ((338, 378), 'rlcard.agents.RandomAgent', 'RandomAgent', ([], {'num_actions': 'env.num_actions'}), '(num_actions=env.num_actions)\n', (349, 378), False, 'from rlcard.agents import RandomAgent\n'), ((669, 713), 'pprint.pprint', 'pprint.pprint', (["trajectories[0][0]['raw_obs']"], {}), "(trajectories[0][0]['raw_obs'])\n", (682, 713), False, 'import pprint\n'), ((759, 813), 'pprint.pprint', 'pprint.pprint', (["trajectories[0][0]['raw_legal_actions']"], {}), "(trajectories[0][0]['raw_legal_actions'])\n", (772, 813), False, 'import pprint\n'), ((855, 906), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Random example in RLCard"""'], {}), "('Random example in RLCard')\n", (878, 906), False, 'import argparse\n')] |
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 4 11:01:16 2015
@author: hehu
"""
import matplotlib.pyplot as plt
import numpy as np
from sklearn.neighbors import KNeighborsClassifier
from sklearn.lda import LDA
from sklearn.svm import SVC, LinearSVC
from sklearn.linear_model import LogisticRegression
from sklearn.naive_bayes import GaussianNB
from sklearn.ensemble import RandomForestClassifier
from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid1.inset_locator import mark_inset
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.ticker import NullFormatter
from scipy.linalg import eig
def gaussian(x, mu, sig):
return np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))
def visualize(X, y, clf):
fig, ax = plt.subplots(figsize=[6,6])
plt.axis('equal')
# create a mesh to plot in
#x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
#y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
x_min, x_max = -9, 3
y_min, y_max = -7, 5
if clf is not None:
h = .01 # step size in the mesh
xx, yy = np.meshgrid(np.arange(x_min-1, x_max+1, h),
np.arange(y_min-1, y_max+1, h))
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
ax.contourf(xx, yy, Z, cmap='bwr', alpha=0.5) #plt.cm.Paired cmap='bwr',
ymin, ymax = ax.get_ylim()
xmin, xmax = ax.get_xlim()
if clf.kernel == "linear":
# get the separating hyperplane
w = clf.coef_[0]
a = -w[0] / w[1]
xx = np.linspace(-10, 5, 500)
yy = a * xx - (clf.intercept_[0]) / w[1]
# plot the parallels to the separating hyperplane that pass through the
# support vectors
margin = 1 / np.sqrt(np.sum(clf.coef_ ** 2))
yy_down = yy + a * margin
yy_up = yy - a * margin
ax.plot(xx, yy, 'k-')
ax.plot(xx, yy_down, 'k--')
ax.plot(xx, yy_up, 'k--')
for svIdx in range(clf.support_vectors_.shape[0]):
sv = [clf.support_vectors_[svIdx, 0], clf.support_vectors_[svIdx, 1]]
ax.annotate("Support Vectors",
sv,
xytext=(-6, 3),
size=13,
bbox=dict(boxstyle="round4", fc="w", ec = "g"),
arrowprops=dict(arrowstyle="simple",
connectionstyle="arc3,rad=0.2",
shrinkA = 0,
shrinkB = 8,
fc = "g",
ec = "g"),
horizontalalignment='center',
verticalalignment='middle')
# Plot margin
x0 = -0.5
y0 = a * x0 - (clf.intercept_[0]) / w[1]
distances = np.hypot(x0 - xx, y0 - yy_down)
minIdx = np.argmin(distances)
x1 = xx[minIdx]
y1 = yy_down[minIdx]
ax.annotate("",
xy=(x0, y0), xycoords='data',
xytext=(x1, y1), textcoords='data',
arrowprops=dict(arrowstyle="<->",
connectionstyle="arc3"),
)
distances = np.hypot(x0 - xx, y0 - yy_up)
minIdx = np.argmin(distances)
x2 = xx[minIdx]
y2 = yy_up[minIdx]
ax.annotate("",
xy=(x0, y0), xycoords='data',
xytext=(x2, y2), textcoords='data',
arrowprops=dict(arrowstyle="<->",
connectionstyle="arc3"),
)
ax.annotate("Margin",
(0.5*(x0+x1), 0.5*(y0+y1)),
xytext=(1.5, -6.7),
size=13,
bbox=dict(boxstyle="round4", fc="w", ec = "g"),
arrowprops=dict(arrowstyle="simple",
connectionstyle="arc3,rad=-0.2",
shrinkA = 0,
shrinkB = 8,
fc = "g",
ec = "g"),
horizontalalignment='center',
verticalalignment='middle')
ax.annotate("Margin",
(0.5*(x0+x2), 0.5*(y0+y2)),
xytext=(1.5, -6.7),
size=13,
bbox=dict(boxstyle="round4", fc="w", ec = "g"),
arrowprops=dict(arrowstyle="simple",
connectionstyle="arc3,rad=-0.2",
shrinkA = 0,
shrinkB = 8,
fc = "g",
ec = "g"),
horizontalalignment='center',
verticalalignment='middle')
ax.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1], s=80,
facecolors='none', zorder=10)
#ax.scatter(X[:, 0], X[:, 1], c=y, zorder=10, cmap=plt.cm.Paired)
ax.set_ylim(y_min, y_max)
ax.set_xlim(x_min, x_max)
X1 = X[y==1, :]
X2 = X[y==0, :]
ax.plot(X1[:, 0], X1[:, 1], 'ro', zorder = 1, alpha = 0.6)
ax.plot(X2[:, 0], X2[:, 1], 'bx', zorder = 1)
def generate_data(N):
X1 = np.random.randn(2,N)
X2 = np.random.randn(2,N)
M1 = 0.7*np.array([[1.5151, -0.1129], [0.1399, 0.6287]])
M2 = 0.7*np.array([[0.8602, 1.2461], [-0.0737, -1.5240]])
T1 = np.array([-1, 1]).reshape((2,1))
T2 = np.array([-2, -5]).reshape((2,1))
X1 = np.dot(M1, X1) + np.tile(T1, [1,N])
X2 = np.dot(M2, X2) + np.tile(T2, [1,N])
X1 = X1[::-1,:]
X2 = X2[::-1,:]
return X1, X2
if __name__ == "__main__":
plt.close("all")
# Generate random training data
N = 200
np.random.seed(2014)
X1, X2 = generate_data(N)
X = np.concatenate((X1.T, X2.T))
y = np.concatenate((np.ones(N), np.zeros(N)))
# Generate test sample
np.random.seed(2016)
X1_test, X2_test = generate_data(N)
X_test = np.concatenate((X1_test.T, X2_test.T))
y_test = np.concatenate((np.ones(N), np.zeros(N)))
clf = SVC(kernel = 'linear', C = 100)
clf.fit(X, y)
visualize(X, y, None)
plt.savefig("../images/SVM_data.pdf", bbox_inches = "tight", transparent = True)
visualize(X, y, clf)
plt.savefig("../images/SVM_boundary.pdf", bbox_inches = "tight", transparent = True)
clf = SVC(kernel = 'poly', degree = 2, C = 1)
clf.fit(X, y)
visualize(X, y, clf)
plt.title("SVM with 2nd order Polynomial Kernel")
plt.savefig("../images/SVM_boundary_poly2.pdf", bbox_inches = "tight", transparent = True)
clf = SVC(kernel = 'rbf', C = 1)
clf.fit(X, y)
visualize(X, y, clf)
plt.title("SVM with the RBF Kernel")
plt.savefig("../images/SVM_boundary_RBF.pdf", bbox_inches = "tight", transparent = True)
| [
"numpy.array",
"numpy.arange",
"matplotlib.pyplot.close",
"numpy.dot",
"numpy.linspace",
"numpy.random.seed",
"numpy.concatenate",
"numpy.argmin",
"numpy.hypot",
"matplotlib.pyplot.axis",
"numpy.tile",
"matplotlib.pyplot.savefig",
"numpy.ones",
"matplotlib.pyplot.title",
"numpy.random.ra... | [((788, 816), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '[6, 6]'}), '(figsize=[6, 6])\n', (800, 816), True, 'import matplotlib.pyplot as plt\n'), ((820, 837), 'matplotlib.pyplot.axis', 'plt.axis', (['"""equal"""'], {}), "('equal')\n", (828, 837), True, 'import matplotlib.pyplot as plt\n'), ((5852, 5873), 'numpy.random.randn', 'np.random.randn', (['(2)', 'N'], {}), '(2, N)\n', (5867, 5873), True, 'import numpy as np\n'), ((5882, 5903), 'numpy.random.randn', 'np.random.randn', (['(2)', 'N'], {}), '(2, N)\n', (5897, 5903), True, 'import numpy as np\n'), ((6325, 6341), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (6334, 6341), True, 'import matplotlib.pyplot as plt\n'), ((6414, 6434), 'numpy.random.seed', 'np.random.seed', (['(2014)'], {}), '(2014)\n', (6428, 6434), True, 'import numpy as np\n'), ((6483, 6511), 'numpy.concatenate', 'np.concatenate', (['(X1.T, X2.T)'], {}), '((X1.T, X2.T))\n', (6497, 6511), True, 'import numpy as np\n'), ((6607, 6627), 'numpy.random.seed', 'np.random.seed', (['(2016)'], {}), '(2016)\n', (6621, 6627), True, 'import numpy as np\n'), ((6691, 6729), 'numpy.concatenate', 'np.concatenate', (['(X1_test.T, X2_test.T)'], {}), '((X1_test.T, X2_test.T))\n', (6705, 6729), True, 'import numpy as np\n'), ((6800, 6827), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""linear"""', 'C': '(100)'}), "(kernel='linear', C=100)\n", (6803, 6827), False, 'from sklearn.svm import SVC, LinearSVC\n'), ((6885, 6961), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""../images/SVM_data.pdf"""'], {'bbox_inches': '"""tight"""', 'transparent': '(True)'}), "('../images/SVM_data.pdf', bbox_inches='tight', transparent=True)\n", (6896, 6961), True, 'import matplotlib.pyplot as plt\n'), ((7000, 7085), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""../images/SVM_boundary.pdf"""'], {'bbox_inches': '"""tight"""', 'transparent': '(True)'}), "('../images/SVM_boundary.pdf', bbox_inches='tight', transparent=True\n )\n", (7011, 7085), True, 'import matplotlib.pyplot as plt\n'), ((7104, 7137), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""poly"""', 'degree': '(2)', 'C': '(1)'}), "(kernel='poly', degree=2, C=1)\n", (7107, 7137), False, 'from sklearn.svm import SVC, LinearSVC\n'), ((7196, 7245), 'matplotlib.pyplot.title', 'plt.title', (['"""SVM with 2nd order Polynomial Kernel"""'], {}), "('SVM with 2nd order Polynomial Kernel')\n", (7205, 7245), True, 'import matplotlib.pyplot as plt\n'), ((7250, 7340), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""../images/SVM_boundary_poly2.pdf"""'], {'bbox_inches': '"""tight"""', 'transparent': '(True)'}), "('../images/SVM_boundary_poly2.pdf', bbox_inches='tight',\n transparent=True)\n", (7261, 7340), True, 'import matplotlib.pyplot as plt\n'), ((7356, 7378), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""rbf"""', 'C': '(1)'}), "(kernel='rbf', C=1)\n", (7359, 7378), False, 'from sklearn.svm import SVC, LinearSVC\n'), ((7435, 7471), 'matplotlib.pyplot.title', 'plt.title', (['"""SVM with the RBF Kernel"""'], {}), "('SVM with the RBF Kernel')\n", (7444, 7471), True, 'import matplotlib.pyplot as plt\n'), ((7476, 7564), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""../images/SVM_boundary_RBF.pdf"""'], {'bbox_inches': '"""tight"""', 'transparent': '(True)'}), "('../images/SVM_boundary_RBF.pdf', bbox_inches='tight',\n transparent=True)\n", (7487, 7564), True, 'import matplotlib.pyplot as plt\n'), ((5921, 5968), 'numpy.array', 'np.array', (['[[1.5151, -0.1129], [0.1399, 0.6287]]'], {}), '([[1.5151, -0.1129], [0.1399, 0.6287]])\n', (5929, 5968), True, 'import numpy as np\n'), ((5982, 6029), 'numpy.array', 'np.array', (['[[0.8602, 1.2461], [-0.0737, -1.524]]'], {}), '([[0.8602, 1.2461], [-0.0737, -1.524]])\n', (5990, 6029), True, 'import numpy as np\n'), ((6135, 6149), 'numpy.dot', 'np.dot', (['M1', 'X1'], {}), '(M1, X1)\n', (6141, 6149), True, 'import numpy as np\n'), ((6152, 6171), 'numpy.tile', 'np.tile', (['T1', '[1, N]'], {}), '(T1, [1, N])\n', (6159, 6171), True, 'import numpy as np\n'), ((6180, 6194), 'numpy.dot', 'np.dot', (['M2', 'X2'], {}), '(M2, X2)\n', (6186, 6194), True, 'import numpy as np\n'), ((6197, 6216), 'numpy.tile', 'np.tile', (['T2', '[1, N]'], {}), '(T2, [1, N])\n', (6204, 6216), True, 'import numpy as np\n'), ((1152, 1186), 'numpy.arange', 'np.arange', (['(x_min - 1)', '(x_max + 1)', 'h'], {}), '(x_min - 1, x_max + 1, h)\n', (1161, 1186), True, 'import numpy as np\n'), ((1213, 1247), 'numpy.arange', 'np.arange', (['(y_min - 1)', '(y_max + 1)', 'h'], {}), '(y_min - 1, y_max + 1, h)\n', (1222, 1247), True, 'import numpy as np\n'), ((1709, 1733), 'numpy.linspace', 'np.linspace', (['(-10)', '(5)', '(500)'], {}), '(-10, 5, 500)\n', (1720, 1733), True, 'import numpy as np\n'), ((3158, 3189), 'numpy.hypot', 'np.hypot', (['(x0 - xx)', '(y0 - yy_down)'], {}), '(x0 - xx, y0 - yy_down)\n', (3166, 3189), True, 'import numpy as np\n'), ((3211, 3231), 'numpy.argmin', 'np.argmin', (['distances'], {}), '(distances)\n', (3220, 3231), True, 'import numpy as np\n'), ((3573, 3602), 'numpy.hypot', 'np.hypot', (['(x0 - xx)', '(y0 - yy_up)'], {}), '(x0 - xx, y0 - yy_up)\n', (3581, 3602), True, 'import numpy as np\n'), ((3624, 3644), 'numpy.argmin', 'np.argmin', (['distances'], {}), '(distances)\n', (3633, 3644), True, 'import numpy as np\n'), ((6045, 6062), 'numpy.array', 'np.array', (['[-1, 1]'], {}), '([-1, 1])\n', (6053, 6062), True, 'import numpy as np\n'), ((6087, 6105), 'numpy.array', 'np.array', (['[-2, -5]'], {}), '([-2, -5])\n', (6095, 6105), True, 'import numpy as np\n'), ((6536, 6546), 'numpy.ones', 'np.ones', (['N'], {}), '(N)\n', (6543, 6546), True, 'import numpy as np\n'), ((6548, 6559), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (6556, 6559), True, 'import numpy as np\n'), ((6759, 6769), 'numpy.ones', 'np.ones', (['N'], {}), '(N)\n', (6766, 6769), True, 'import numpy as np\n'), ((6771, 6782), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (6779, 6782), True, 'import numpy as np\n'), ((690, 711), 'numpy.power', 'np.power', (['(x - mu)', '(2.0)'], {}), '(x - mu, 2.0)\n', (698, 711), True, 'import numpy as np\n'), ((718, 736), 'numpy.power', 'np.power', (['sig', '(2.0)'], {}), '(sig, 2.0)\n', (726, 736), True, 'import numpy as np\n'), ((1943, 1965), 'numpy.sum', 'np.sum', (['(clf.coef_ ** 2)'], {}), '(clf.coef_ ** 2)\n', (1949, 1965), True, 'import numpy as np\n')] |
import argparse
import os
import random
import pandas as pd
class DatasetSplitter:
""" Class that can be used to create a reproducible random-split of a dataset into train/validation/test sets """
def split_annotations_into_training_validation_and_test_set(self, dataset_directory: str,
train_fraction: float = 0.6,
validation_fraction: float = 0.2,
test_fraction: float = 0.2,
seed: int = 0) -> None:
if (train_fraction + validation_fraction + test_fraction) != 1.0:
print("Invalid split requested. Should sum up to 1.0")
return
annotations = pd.read_csv(os.path.join(dataset_directory, "annotations.csv"))
all_file_paths = annotations['path_to_image'].unique()
all_file_paths.sort()
dataset_size = all_file_paths.shape[0]
random.seed(seed)
all_indices = list(range(0, dataset_size))
validation_sample_size = int(dataset_size * validation_fraction)
test_sample_size = int(dataset_size * test_fraction)
validation_sample_indices = random.sample(all_indices, validation_sample_size)
test_sample_indices = random.sample((set(all_indices) - set(validation_sample_indices)), test_sample_size)
training_sample_indices = list(set(all_indices) - set(validation_sample_indices) - set(test_sample_indices))
print("Splitting annotations for {0} training, {1} validation and {2} test images..."
.format(len(training_sample_indices), len(validation_sample_indices), len(test_sample_indices)))
training_file_paths = all_file_paths[training_sample_indices]
validation_file_paths = all_file_paths[validation_sample_indices]
test_file_paths = all_file_paths[test_sample_indices]
training_annotations = annotations.loc[annotations['path_to_image'].isin(training_file_paths)]
validation_annotations = annotations.loc[annotations['path_to_image'].isin(validation_file_paths)]
test_annotations = annotations.loc[annotations['path_to_image'].isin(test_file_paths)]
training_annotations.to_csv(os.path.join(dataset_directory, "training.csv"), index=False, float_format="%.0f")
validation_annotations.to_csv(os.path.join(dataset_directory, "validation.csv"), index=False,
float_format="%.0f")
test_annotations.to_csv(os.path.join(dataset_directory, "test.csv"), index=False, float_format="%.0f")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--dataset_directory", type=str, default="../data/normalized/deepscores",
help="The base directory for the normalized dataset")
flags, unparsed = parser.parse_known_args()
datasest = DatasetSplitter()
datasest.split_annotations_into_training_validation_and_test_set(flags.dataset_directory)
| [
"random.sample",
"os.path.join",
"argparse.ArgumentParser",
"random.seed"
] | [((2740, 2765), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2763, 2765), False, 'import argparse\n'), ((1065, 1082), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (1076, 1082), False, 'import random\n'), ((1304, 1354), 'random.sample', 'random.sample', (['all_indices', 'validation_sample_size'], {}), '(all_indices, validation_sample_size)\n', (1317, 1354), False, 'import random\n'), ((864, 914), 'os.path.join', 'os.path.join', (['dataset_directory', '"""annotations.csv"""'], {}), "(dataset_directory, 'annotations.csv')\n", (876, 914), False, 'import os\n'), ((2343, 2390), 'os.path.join', 'os.path.join', (['dataset_directory', '"""training.csv"""'], {}), "(dataset_directory, 'training.csv')\n", (2355, 2390), False, 'import os\n'), ((2464, 2513), 'os.path.join', 'os.path.join', (['dataset_directory', '"""validation.csv"""'], {}), "(dataset_directory, 'validation.csv')\n", (2476, 2513), False, 'import os\n'), ((2619, 2662), 'os.path.join', 'os.path.join', (['dataset_directory', '"""test.csv"""'], {}), "(dataset_directory, 'test.csv')\n", (2631, 2662), False, 'import os\n')] |
# coding: utf-8
"""
Translator Knowledge Beacon API
This is the Translator Knowledge Beacon web service application programming interface (API). # noqa: E501
The version of the OpenAPI document: 1.3.0
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import tkbeacon
from tkbeacon.api.beacon_api import BeaconApi # noqa: E501
from tkbeacon.rest import ApiException
class TestBeaconApi(unittest.TestCase):
"""BeaconApi unit test stubs"""
def setUp(self):
self.api = tkbeacon.api.beacon_api.BeaconApi() # noqa: E501
def tearDown(self):
pass
def test_get_concept_categories(self):
"""Test case for get_concept_categories
"""
pass
def test_get_concept_details(self):
"""Test case for get_concept_details
"""
pass
def test_get_concepts(self):
"""Test case for get_concepts
"""
pass
def test_get_exact_matches_to_concept_list(self):
"""Test case for get_exact_matches_to_concept_list
"""
pass
def test_get_knowledge_map(self):
"""Test case for get_knowledge_map
"""
pass
def test_get_predicates(self):
"""Test case for get_predicates
"""
pass
def test_get_statement_details(self):
"""Test case for get_statement_details
"""
pass
def test_get_statements(self):
"""Test case for get_statements
"""
pass
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"tkbeacon.api.beacon_api.BeaconApi"
] | [((1602, 1617), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1615, 1617), False, 'import unittest\n'), ((585, 620), 'tkbeacon.api.beacon_api.BeaconApi', 'tkbeacon.api.beacon_api.BeaconApi', ([], {}), '()\n', (618, 620), False, 'import tkbeacon\n')] |
import FWCore.ParameterSet.Config as cms
BtagPerformanceESProducer_TTBARWPBTAGCSVL = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGCSVL'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGCSVLtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGCSVLwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGCSVM = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGCSVM'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGCSVMtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGCSVMwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGCSVT = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGCSVT'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGCSVTtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGCSVTwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGJPL = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGJPL'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGJPLtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGJPLwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGJPM = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGJPM'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGJPMtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGJPMwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGJPT = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGJPT'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGJPTtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGJPTwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGJBPL = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGJBPL'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGJBPLtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGJBPLwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGJBPM = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGJBPM'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGJBPMtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGJBPMwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGJBPT = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGJBPT'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGJBPTtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGJBPTwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGJBPL = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGJBPL'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGJBPLtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGJBPLwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGJBPM = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGJBPM'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGJBPMtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGJBPMwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGJBPT = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGJBPT'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGJBPTtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGJBPTwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGSSVHEM = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGSSVHEM'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGSSVHEMtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGSSVHEMwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGSSVHET = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGSSVHET'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGSSVHETtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGSSVHETwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGSSVHPT = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGSSVHPT'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGSSVHPTtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGSSVHPTwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGTCHEL = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGTCHEL'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGTCHELtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGTCHELwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGTCHEM = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGTCHEM'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGTCHEMtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGTCHEMwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGTCHET = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGTCHET'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGTCHETtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGTCHETwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGTCHPL = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGTCHPL'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGTCHPLtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGTCHPLwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGTCHPM = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGTCHPM'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGTCHPMtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGTCHPMwp_v8_offline')
)
BtagPerformanceESProducer_TTBARWPBTAGTCHPT = cms.ESProducer("BtagPerformanceESProducer",
# this is what it makes available
ComponentName = cms.string('TTBARWPBTAGTCHPT'),
# this is where it gets the payload from
PayloadName = cms.string('BTagTTBARWPBTAGTCHPTtable_v8_offline'),
WorkingPointName = cms.string('BTagTTBARWPBTAGTCHPTwp_v8_offline')
)
| [
"FWCore.ParameterSet.Config.string"
] | [((183, 212), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGCSVL"""'], {}), "('TTBARWPBTAGCSVL')\n", (193, 212), True, 'import FWCore.ParameterSet.Config as cms\n'), ((321, 370), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGCSVLtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGCSVLtable_v8_offline')\n", (331, 370), True, 'import FWCore.ParameterSet.Config as cms\n'), ((395, 441), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGCSVLwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGCSVLwp_v8_offline')\n", (405, 441), True, 'import FWCore.ParameterSet.Config as cms\n'), ((586, 615), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGCSVM"""'], {}), "('TTBARWPBTAGCSVM')\n", (596, 615), True, 'import FWCore.ParameterSet.Config as cms\n'), ((724, 773), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGCSVMtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGCSVMtable_v8_offline')\n", (734, 773), True, 'import FWCore.ParameterSet.Config as cms\n'), ((798, 844), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGCSVMwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGCSVMwp_v8_offline')\n", (808, 844), True, 'import FWCore.ParameterSet.Config as cms\n'), ((989, 1018), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGCSVT"""'], {}), "('TTBARWPBTAGCSVT')\n", (999, 1018), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1127, 1176), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGCSVTtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGCSVTtable_v8_offline')\n", (1137, 1176), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1201, 1247), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGCSVTwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGCSVTwp_v8_offline')\n", (1211, 1247), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1391, 1419), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGJPL"""'], {}), "('TTBARWPBTAGJPL')\n", (1401, 1419), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1528, 1576), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJPLtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGJPLtable_v8_offline')\n", (1538, 1576), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1601, 1646), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJPLwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGJPLwp_v8_offline')\n", (1611, 1646), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1790, 1818), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGJPM"""'], {}), "('TTBARWPBTAGJPM')\n", (1800, 1818), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1927, 1975), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJPMtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGJPMtable_v8_offline')\n", (1937, 1975), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2000, 2045), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJPMwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGJPMwp_v8_offline')\n", (2010, 2045), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2189, 2217), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGJPT"""'], {}), "('TTBARWPBTAGJPT')\n", (2199, 2217), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2326, 2374), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJPTtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGJPTtable_v8_offline')\n", (2336, 2374), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2399, 2444), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJPTwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGJPTwp_v8_offline')\n", (2409, 2444), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2589, 2618), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGJBPL"""'], {}), "('TTBARWPBTAGJBPL')\n", (2599, 2618), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2727, 2776), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJBPLtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGJBPLtable_v8_offline')\n", (2737, 2776), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2801, 2847), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJBPLwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGJBPLwp_v8_offline')\n", (2811, 2847), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2992, 3021), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGJBPM"""'], {}), "('TTBARWPBTAGJBPM')\n", (3002, 3021), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3130, 3179), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJBPMtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGJBPMtable_v8_offline')\n", (3140, 3179), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3204, 3250), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJBPMwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGJBPMwp_v8_offline')\n", (3214, 3250), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3395, 3424), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGJBPT"""'], {}), "('TTBARWPBTAGJBPT')\n", (3405, 3424), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3533, 3582), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJBPTtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGJBPTtable_v8_offline')\n", (3543, 3582), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3607, 3653), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJBPTwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGJBPTwp_v8_offline')\n", (3617, 3653), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3798, 3827), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGJBPL"""'], {}), "('TTBARWPBTAGJBPL')\n", (3808, 3827), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3936, 3985), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJBPLtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGJBPLtable_v8_offline')\n", (3946, 3985), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4010, 4056), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJBPLwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGJBPLwp_v8_offline')\n", (4020, 4056), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4201, 4230), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGJBPM"""'], {}), "('TTBARWPBTAGJBPM')\n", (4211, 4230), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4339, 4388), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJBPMtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGJBPMtable_v8_offline')\n", (4349, 4388), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4413, 4459), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJBPMwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGJBPMwp_v8_offline')\n", (4423, 4459), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4604, 4633), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGJBPT"""'], {}), "('TTBARWPBTAGJBPT')\n", (4614, 4633), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4742, 4791), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJBPTtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGJBPTtable_v8_offline')\n", (4752, 4791), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4816, 4862), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGJBPTwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGJBPTwp_v8_offline')\n", (4826, 4862), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5009, 5040), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGSSVHEM"""'], {}), "('TTBARWPBTAGSSVHEM')\n", (5019, 5040), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5149, 5200), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGSSVHEMtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGSSVHEMtable_v8_offline')\n", (5159, 5200), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5225, 5273), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGSSVHEMwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGSSVHEMwp_v8_offline')\n", (5235, 5273), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5420, 5451), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGSSVHET"""'], {}), "('TTBARWPBTAGSSVHET')\n", (5430, 5451), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5560, 5611), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGSSVHETtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGSSVHETtable_v8_offline')\n", (5570, 5611), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5636, 5684), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGSSVHETwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGSSVHETwp_v8_offline')\n", (5646, 5684), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5831, 5862), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGSSVHPT"""'], {}), "('TTBARWPBTAGSSVHPT')\n", (5841, 5862), True, 'import FWCore.ParameterSet.Config as cms\n'), ((5971, 6022), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGSSVHPTtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGSSVHPTtable_v8_offline')\n", (5981, 6022), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6047, 6095), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGSSVHPTwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGSSVHPTwp_v8_offline')\n", (6057, 6095), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6241, 6271), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGTCHEL"""'], {}), "('TTBARWPBTAGTCHEL')\n", (6251, 6271), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6380, 6430), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGTCHELtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGTCHELtable_v8_offline')\n", (6390, 6430), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6455, 6502), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGTCHELwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGTCHELwp_v8_offline')\n", (6465, 6502), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6648, 6678), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGTCHEM"""'], {}), "('TTBARWPBTAGTCHEM')\n", (6658, 6678), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6787, 6837), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGTCHEMtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGTCHEMtable_v8_offline')\n", (6797, 6837), True, 'import FWCore.ParameterSet.Config as cms\n'), ((6862, 6909), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGTCHEMwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGTCHEMwp_v8_offline')\n", (6872, 6909), True, 'import FWCore.ParameterSet.Config as cms\n'), ((7055, 7085), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGTCHET"""'], {}), "('TTBARWPBTAGTCHET')\n", (7065, 7085), True, 'import FWCore.ParameterSet.Config as cms\n'), ((7194, 7244), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGTCHETtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGTCHETtable_v8_offline')\n", (7204, 7244), True, 'import FWCore.ParameterSet.Config as cms\n'), ((7269, 7316), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGTCHETwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGTCHETwp_v8_offline')\n", (7279, 7316), True, 'import FWCore.ParameterSet.Config as cms\n'), ((7462, 7492), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGTCHPL"""'], {}), "('TTBARWPBTAGTCHPL')\n", (7472, 7492), True, 'import FWCore.ParameterSet.Config as cms\n'), ((7601, 7651), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGTCHPLtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGTCHPLtable_v8_offline')\n", (7611, 7651), True, 'import FWCore.ParameterSet.Config as cms\n'), ((7676, 7723), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGTCHPLwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGTCHPLwp_v8_offline')\n", (7686, 7723), True, 'import FWCore.ParameterSet.Config as cms\n'), ((7869, 7899), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGTCHPM"""'], {}), "('TTBARWPBTAGTCHPM')\n", (7879, 7899), True, 'import FWCore.ParameterSet.Config as cms\n'), ((8008, 8058), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGTCHPMtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGTCHPMtable_v8_offline')\n", (8018, 8058), True, 'import FWCore.ParameterSet.Config as cms\n'), ((8083, 8130), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGTCHPMwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGTCHPMwp_v8_offline')\n", (8093, 8130), True, 'import FWCore.ParameterSet.Config as cms\n'), ((8276, 8306), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TTBARWPBTAGTCHPT"""'], {}), "('TTBARWPBTAGTCHPT')\n", (8286, 8306), True, 'import FWCore.ParameterSet.Config as cms\n'), ((8415, 8465), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGTCHPTtable_v8_offline"""'], {}), "('BTagTTBARWPBTAGTCHPTtable_v8_offline')\n", (8425, 8465), True, 'import FWCore.ParameterSet.Config as cms\n'), ((8490, 8537), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BTagTTBARWPBTAGTCHPTwp_v8_offline"""'], {}), "('BTagTTBARWPBTAGTCHPTwp_v8_offline')\n", (8500, 8537), True, 'import FWCore.ParameterSet.Config as cms\n')] |
import numpy as np
import os
import os.path as op
import cv2
from tqdm import tqdm
import multiprocessing
from FeatureExtractor import get_gist_C_implementation
from utils import ensure_dir, info
input_dir = "./dataset/raw_image"
catalog = {}
paths = []
feats = []
for (root, dirs, files) in os.walk(input_dir):
for f in files:
if f.split('.')[-1].lower() in ['jpg', 'bmp', 'png']:
path = op.join(root, f)
catalog[len(catalog)] = {
'path': path # For possible further metadata
}
info("Extracting GIST descriptor", domain=__file__)
processnum = 6
unit = len(catalog) // processnum + 1
def getfeat(start, end, use_tqdm=False):
subpaths, subfeats = [], []
for i in (range(start, end) if not use_tqdm else tqdm(range(start, end))):
img = cv2.imread(catalog[i]['path'])
vec = get_gist_C_implementation(img)
subpaths.append(catalog[i]['path'])
subfeats.append(vec)
return subpaths, subfeats
pool = multiprocessing.Pool()
processes = []
info("Starting worker processes", domain=__file__)
for pid in tqdm(range(1, processnum)):
processes.append(pool.apply_async(getfeat, args=(pid * unit, min((pid + 1) * unit, len(catalog)))))
subpath, subfeat = getfeat(0, unit, use_tqdm=True)
paths, feats = subpath, subfeat
info("Joining worker processes", domain=__file__)
for pid in tqdm(range(processnum - 1)):
subpath, subfeat = processes[pid].get()
paths += subpath
feats += subfeat
np.savez("./dataset/feature.npz", Path=paths, Feat=feats)
info("Preprocess completed! {} images loaded into dataset".format(len(paths)), domain=__file__)
| [
"utils.info",
"numpy.savez",
"FeatureExtractor.get_gist_C_implementation",
"os.path.join",
"multiprocessing.Pool",
"cv2.imread",
"os.walk"
] | [((294, 312), 'os.walk', 'os.walk', (['input_dir'], {}), '(input_dir)\n', (301, 312), False, 'import os\n'), ((547, 598), 'utils.info', 'info', (['"""Extracting GIST descriptor"""'], {'domain': '__file__'}), "('Extracting GIST descriptor', domain=__file__)\n", (551, 598), False, 'from utils import ensure_dir, info\n'), ((1006, 1028), 'multiprocessing.Pool', 'multiprocessing.Pool', ([], {}), '()\n', (1026, 1028), False, 'import multiprocessing\n'), ((1045, 1095), 'utils.info', 'info', (['"""Starting worker processes"""'], {'domain': '__file__'}), "('Starting worker processes', domain=__file__)\n", (1049, 1095), False, 'from utils import ensure_dir, info\n'), ((1324, 1373), 'utils.info', 'info', (['"""Joining worker processes"""'], {'domain': '__file__'}), "('Joining worker processes', domain=__file__)\n", (1328, 1373), False, 'from utils import ensure_dir, info\n'), ((1501, 1558), 'numpy.savez', 'np.savez', (['"""./dataset/feature.npz"""'], {'Path': 'paths', 'Feat': 'feats'}), "('./dataset/feature.npz', Path=paths, Feat=feats)\n", (1509, 1558), True, 'import numpy as np\n'), ((819, 849), 'cv2.imread', 'cv2.imread', (["catalog[i]['path']"], {}), "(catalog[i]['path'])\n", (829, 849), False, 'import cv2\n'), ((864, 894), 'FeatureExtractor.get_gist_C_implementation', 'get_gist_C_implementation', (['img'], {}), '(img)\n', (889, 894), False, 'from FeatureExtractor import get_gist_C_implementation\n'), ((415, 431), 'os.path.join', 'op.join', (['root', 'f'], {}), '(root, f)\n', (422, 431), True, 'import os.path as op\n')] |
# -*- coding: utf-8 -*-
# Natural Language Toolkit: Interface to the TreeTagger POS-tagger
#
# Copyright (C) <NAME>
# Author: <NAME> <<EMAIL>>
"""
A Python module for interfacing with the Treetagger by <NAME>.
"""
import os
from subprocess import Popen, PIPE
from nltk.internals import find_binary, find_file
from nltk.tag.api import TaggerI
def tUoB3(obj, encoding='utf-8'):
if isinstance(obj, str) or isinstance(obj, bytes) or isinstance(obj, bytearray):
obj = str(obj, encoding)
return obj
_treetagger_url = 'http://www.cis.uni-muenchen.de/~schmid/tools/TreeTagger/'
_treetagger_languages = {
'latin-1':['latin', 'latinIT', 'mongolian', 'swahili'],
'utf-8' : ['bulgarian', 'dutch', 'english', 'estonian', 'finnish', 'french', 'galician', 'german', 'italian', 'polish', 'russian', 'slovak', 'slovak2', 'spanish']}
"""The default encoding used by TreeTagger: utf-8. u'' means latin-1; ISO-8859-1"""
_treetagger_charset = ['utf-8', 'latin-1']
class TreeTagger(TaggerI):
r"""
A class for pos tagging with TreeTagger. The input is the paths to:
- a language trained on training data
- (optionally) the path to the TreeTagger binary
- (optionally) the encoding of the training data (default: utf-8)
This class communicates with the TreeTagger binary via pipes.
Example:
.. doctest::
:options: +SKIP
>>> from treetagger3 import TreeTagger
>>> tt = TreeTagger(encoding='utf-8',language='english')
>>> tt.tag('What is the airspeed of an unladen swallow ?')
[['What', 'WP', 'What'],
['is', 'VBZ', 'be'],
['the', u'DT', 'the'],
['airspeed', 'NN', 'airspeed'],
['of', 'IN', 'of'],
['an', 'DT', 'an'],
['unladen', 'JJ', '<unknown>'],
['swallow', 'NN', 'swallow'],
['?', 'SENT', '?']]
.. doctest::
:options: +SKIP
>>> from treetagger3 import TreeTagger
>>> tt = TreeTagger()
>>> tt.tag('Das Haus ist sehr schön und groß. Es hat auch einen hübschen Garten.')
[['Das', 'ART', 'd'],
['Haus', 'NN', 'Haus'],
['ist', 'VAFIN', 'sein'],
['sehr', 'ADV', 'sehr'],
['schön', 'ADJD', 'schön'],
['und', 'KON', 'und'],
['groß', 'ADJD', 'groß'],
['.', '$.', '.'],
['Es', 'PPER', 'es'],
['hat', 'VAFIN', 'haben'],
['auch', 'ADV', 'auch'],
['einen', 'ART', 'ein'],
['hübschen', 'ADJA', 'hübsch'],
['Garten', 'NN', 'Garten'],
['.', '$.', '.']]
"""
def __init__(self, path_to_home=None, language='german',
encoding='utf-8', verbose=False, abbreviation_list=None):
"""
Initialize the TreeTagger.
:param path_to_home: The TreeTagger binary.
:param language: Default language is german.
:param encoding: The encoding used by the model. Unicode tokens
passed to the tag() and batch_tag() methods are converted to
this charset when they are sent to TreeTagger.
The default is utf-8.
This parameter is ignored for str tokens, which are sent as-is.
The caller must ensure that tokens are encoded in the right charset.
"""
treetagger_paths = ['.', '/usr/bin', '/usr/local/bin', '/opt/local/bin',
'/Applications/bin', '~/bin', '~/Applications/bin',
'~/work/TreeTagger/cmd', '~/tree-tagger/cmd']
treetagger_paths = list(map(os.path.expanduser, treetagger_paths))
self._abbr_list = abbreviation_list
try:
if encoding in _treetagger_languages.keys() and language in _treetagger_languages[encoding]:
if encoding == 'latin-1':
self._encoding = 'latin-1'
else:
self._encoding = encoding
treetagger_bin_name = 'tree-tagger-' + language
else:
raise LookupError('NLTK was unable to find the TreeTagger bin!')
except KeyError as e:
raise LookupError('NLTK was unable to find the TreeTagger bin!')
self._treetagger_bin = find_binary(
treetagger_bin_name, path_to_home,
env_vars=('TREETAGGER', 'TREETAGGER_HOME'),
searchpath=treetagger_paths,
url=_treetagger_url,
verbose=verbose)
def tag(self, sentences):
"""Tags a single sentence: a list of words.
The tokens should not contain any newline characters.
"""
encoding = self._encoding
# Write the actual sentences to the temporary input file
if isinstance(sentences, list):
_input = '\n'.join((x for x in sentences))
else:
_input = sentences
if isinstance(_input, str) and encoding:
_input = _input.encode(encoding)
# Run the tagger and get the output
if(self._abbr_list is None):
p = Popen([self._treetagger_bin],
shell=False, stdin=PIPE, stdout=PIPE, stderr=PIPE)
elif(self._abbr_list is not None):
p = Popen([self._treetagger_bin,"-a",self._abbr_list],
shell=False, stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = p.communicate(_input)
# Check the return code.
if p.returncode != 0:
print(stderr)
raise OSError('TreeTagger command failed!')
if isinstance(stdout, str) and encoding:
treetagger_output = stdout.decode(encoding)
else:
treetagger_output = tUoB3(stdout)
# Output the tagged sentences
tagged_sentences = []
for tagged_word in treetagger_output.strip().split('\n'):
tagged_word_split = tagged_word.split('\t')
tagged_sentences.append(tagged_word_split)
return tagged_sentences
if __name__ == "__main__":
import doctest
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)
| [
"nltk.internals.find_binary",
"subprocess.Popen",
"doctest.testmod"
] | [((6023, 6080), 'doctest.testmod', 'doctest.testmod', ([], {'optionflags': 'doctest.NORMALIZE_WHITESPACE'}), '(optionflags=doctest.NORMALIZE_WHITESPACE)\n', (6038, 6080), False, 'import doctest\n'), ((4201, 4366), 'nltk.internals.find_binary', 'find_binary', (['treetagger_bin_name', 'path_to_home'], {'env_vars': "('TREETAGGER', 'TREETAGGER_HOME')", 'searchpath': 'treetagger_paths', 'url': '_treetagger_url', 'verbose': 'verbose'}), "(treetagger_bin_name, path_to_home, env_vars=('TREETAGGER',\n 'TREETAGGER_HOME'), searchpath=treetagger_paths, url=_treetagger_url,\n verbose=verbose)\n", (4212, 4366), False, 'from nltk.internals import find_binary, find_file\n'), ((5030, 5115), 'subprocess.Popen', 'Popen', (['[self._treetagger_bin]'], {'shell': '(False)', 'stdin': 'PIPE', 'stdout': 'PIPE', 'stderr': 'PIPE'}), '([self._treetagger_bin], shell=False, stdin=PIPE, stdout=PIPE, stderr=PIPE\n )\n', (5035, 5115), False, 'from subprocess import Popen, PIPE\n'), ((5195, 5303), 'subprocess.Popen', 'Popen', (["[self._treetagger_bin, '-a', self._abbr_list]"], {'shell': '(False)', 'stdin': 'PIPE', 'stdout': 'PIPE', 'stderr': 'PIPE'}), "([self._treetagger_bin, '-a', self._abbr_list], shell=False, stdin=\n PIPE, stdout=PIPE, stderr=PIPE)\n", (5200, 5303), False, 'from subprocess import Popen, PIPE\n')] |
from glob import glob
import os
from unittest import TestCase
from nose.plugins.attrib import attr
from ..client import RaftClient
class RaftClientTest(TestCase):
@classmethod
def tearDownClass(cls):
map(os.remove, glob('./~test_file*'))
@attr("integration")
def test_client_init(self):
client = RaftClient(('127.0.0.1', 4000))
client.register()
| [
"glob.glob",
"nose.plugins.attrib.attr"
] | [((265, 284), 'nose.plugins.attrib.attr', 'attr', (['"""integration"""'], {}), "('integration')\n", (269, 284), False, 'from nose.plugins.attrib import attr\n'), ((236, 257), 'glob.glob', 'glob', (['"""./~test_file*"""'], {}), "('./~test_file*')\n", (240, 257), False, 'from glob import glob\n')] |
from datetime import datetime, timezone
from ..exceptions import *
class Orders(object):
def __init__(self, session, trading_types):
super(Orders, self).__init__()
self._session = session
self._trading_types = trading_types
def generateOrderObject(self, legacy_contract_id, issuer, quantity, order_type, trading_type, instrument_type, price=None):
"""
**Generate Order Object**
- legacy_contract_id (string): (required)
- issuer (string): (required)
- quantity (int): (required)
- order_type (enum): (required)
- trading_type (enum): (required)
- instrument_type (required)
- price (float): (optional)
"""
if trading_type == self._trading_types.Limited and not price:
raise OrderFormatError("If trading type is Limited, the price argument is required")
order_object = {
"algoTradingTypeId": trading_type.value,
"capitalOrderTypeId": order_type.value,
"instrumentType": instrument_type.value,
"issueId": issuer,
"quantity": quantity,
"hash": self.__generateHash(legacy_contract_id, issuer, quantity, instrument_type)
}
if price:
order_object["price"] = price
return order_object
def __generateHash(self, legacy_contract_id, issuer, quantity, instrument_type):
Now = datetime.now(timezone.utc)
Now = Now.replace(tzinfo=None)
Millis = int((Now - datetime(year=1970, day=1, month=1)).total_seconds() * 1000)
TickerName = issuer.replace(" ", "")
return f"{str(Millis)}{legacy_contract_id}{TickerName}{str(quantity)}{str(instrument_type.value)}"
def submitOrder(self, legacy_contract_id, duration, order):
"""
**Submit one order**
https://homebroker-api.gbm.com/GBMP/api/Operation/RegisterCapitalOrder
- legacy_contract_id (string): (required)
- duration (int): (required)
- order (object): (required)
"""
metadata = {
'tags': ['order', 'generate order'],
'operation': 'submitOrder'
}
resource = "https://homebroker-api.gbm.com/GBMP/api/Operation/RegisterCapitalOrder"
payload = {
"contractId": legacy_contract_id,
"duration": duration,
"algoTradingTypeId": order.get("algoTradingTypeId"),
"orders": [order]
}
return self._session.post(metadata, resource, payload)
def getOrders(self, legacy_contract_id):
"""
**Get submitted Orders**
https://homebroker-api.gbm.com/GBMP/api/Operation/GetBlotterOrders
- legacy_contract_id (string): (required)
"""
metadata = {
'tags': ['Get Orders'],
'operation': 'getOrders'
}
resource = "https://homebroker-api.gbm.com/GBMP/api/Operation/GetBlotterOrders"
payload = {
"contractId": legacy_contract_id,
"accountId": legacy_contract_id,
"instrumentTypes": [0, 2],
"processDate": datetime.utcnow().strftime('%Y-%m-%dT06:00:00.000Z')
}
return self._session.post(metadata, resource, payload)
| [
"datetime.datetime",
"datetime.datetime.now",
"datetime.datetime.utcnow"
] | [((1497, 1523), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (1509, 1523), False, 'from datetime import datetime, timezone\n'), ((3327, 3344), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3342, 3344), False, 'from datetime import datetime, timezone\n'), ((1591, 1626), 'datetime.datetime', 'datetime', ([], {'year': '(1970)', 'day': '(1)', 'month': '(1)'}), '(year=1970, day=1, month=1)\n', (1599, 1626), False, 'from datetime import datetime, timezone\n')] |
import collections
import numpy
import pandas
import scipy
from tqdm import tqdm
from .common import say, reconstruct_antigen_sequences
def compute_coverage(antigen, sequence, blast_df):
"""
Extract blast hits for some clones for a single antigen into a DataFrame
indicating whether each clone aligns at each position in the antigen.
Parameters
----------
antigen : str
sequence : str
Antigen amino acid sequence
blast_df : pandas.DataFrame
Blast results for clones to consider (e.g. may be all clones or just
those with hits).
Returns
-------
pandas.DataFrame of int
Columns are positions in the antigen (protein).
Index are clone names
Values indicate whether the clone aligns at that position (> 0) or not (0).
If a clone aligns to the same antigen in multiple ways, the number of ways
is indicated. E.g. a value of 2 at a position means the clone aligned to that
position in two different ways.
"""
blast_df = blast_df.loc[
blast_df.title == antigen
]
assert len(blast_df) > 0, "No hits for antigen %s" % antigen
clones = []
result_rows = []
for clone, hit_rows in blast_df.groupby("clone"):
# We take the union over all hsps for a given clone, i.e. not double
# counting.
result_row = numpy.zeros(len(sequence), dtype=int)
for _, row in hit_rows.iterrows():
pos = row.hit_from - 1
for i in range(len(row.hseq)):
match = row.hseq[i] not in ["-", " ", "+"] and (row.hseq[i] == row.qseq[i])
assert match == bool(row.midline[i] not in ("+", " "))
if match:
assert row.qseq[i] == sequence[pos]
assert row.hseq[i] == sequence[pos]
result_row[pos] = 1
if row.hseq[i] != "-":
pos += 1
result_rows.append(result_row)
clones.append(clone)
result = pandas.DataFrame(result_rows, index=clones)
return result
class AntigenAnalysis(object):
def __init__(self, blast_df, antigens_df, sample_to_hit_clones, sample_to_kind=None):
self.blast_df = blast_df
self.antigens_df = antigens_df
self.sample_to_kind = sample_to_kind
self.sample_to_hit_clones = dict(
(k, list(v)) for (k, v) in sample_to_hit_clones.items())
all_clones = set()
for clones in self.sample_to_hit_clones.values():
all_clones.update(clones)
self.clone_by_sample_hits_matrix = pandas.DataFrame(
index=sorted(all_clones),
columns=list(self.sample_to_hit_clones),
dtype=int)
self.clone_by_sample_hits_matrix[:] = 0
for (sample, clones) in self.sample_to_hit_clones.items():
self.clone_by_sample_hits_matrix.loc[clones, sample] = 1
say("Done.")
def plot_antigen(
self,
antigen,
heatmap=True,
special_features={},
max_len_to_show_sequence=100):
import dna_features_viewer
from matplotlib import pyplot
import seaborn
sequence = reconstruct_antigen_sequences(
self.blast_df.loc[self.blast_df.title == antigen])[antigen]
all_coverage_by_clone = compute_coverage(
antigen,
sequence,
self.blast_df)
all_coverage_total = (all_coverage_by_clone > 0).sum()
all_samples = list(self.sample_to_hit_clones)
# Columns are positions, rows are samples. 1 = hit at that position.
# We build it transposed then flip it.
hits_by_sample_and_position = pandas.DataFrame(
index=all_coverage_by_clone.columns, dtype=int)
for sample in all_samples:
hits_by_sample_and_position[sample] = all_coverage_by_clone.multiply(
self.clone_by_sample_hits_matrix.reindex(
all_coverage_by_clone.index).fillna(0)[sample],
axis=0).sum()
hits_by_sample_and_position = hits_by_sample_and_position.T
fig, axes = pyplot.subplots(
3 + (1 if heatmap else 0), 1,
figsize=(10, (len(hits_by_sample_and_position) * 0.15 + 5) if heatmap else 10),
sharex=True,
gridspec_kw={
"height_ratios": [0.3, 0.22, 0.22] + ([0.6] if heatmap else [])
},
)
axes = list(axes)
features = special_features.get(antigen, [])
# Sequence
ax = axes.pop(0)
pyplot.sca(ax)
record = dna_features_viewer.GraphicRecord(
sequence=sequence, features=features)
record.plot(ax=ax)
if len(sequence) <= max_len_to_show_sequence:
record.plot_sequence(ax=ax, fontdict={'size': 6})
# Coverage
ax = axes.pop(0)
pyplot.sca(ax)
pyplot.plot(
all_coverage_total.index, all_coverage_total.values, color='black')
pyplot.fill_between(
all_coverage_total.index,
all_coverage_total.values,
color='lightblue',
alpha=0.3)
pyplot.title(" Coverage", loc="left", pad=0)
pyplot.ylim(ymin=0)
pyplot.ylabel("Clones")
# Fraction of samples with a hit
ax = axes.pop(0)
pyplot.sca(ax)
plot_series = (hits_by_sample_and_position > 0).mean(0) * 100.0
pyplot.plot(
plot_series.index, plot_series.values, color='black')
pyplot.fill_between(
plot_series.index,
plot_series.values,
color='red',
alpha=0.3)
pyplot.title(" Hits", loc="left", pad=0)
pyplot.ylim(ymin=0)
pyplot.ylabel("Samples (%)")
# Heatmap
if heatmap:
ax = axes.pop(0)
pyplot.sca(ax)
plot_df = hits_by_sample_and_position.copy()
plot_df = plot_df.astype(float)
seaborn.heatmap(plot_df, cbar=False, ax=ax)
# Switch to one-based numbering
ticks = numpy.arange(19, len(sequence), 20)
while len(ticks) > 20:
ticks = ticks[::2]
pyplot.xticks(ticks, ticks + 1, rotation=0)
yticks = plot_df.index
pyplot.yticks(numpy.arange(len(yticks)), yticks, fontsize=6)
pyplot.xlabel("Position")
pyplot.ylabel("Sample")
seaborn.despine()
title = antigen.split("OS")[0].strip()
title += "\nHits in %d of %d clones" % (
self.clone_by_sample_hits_matrix.reindex(
all_coverage_by_clone.index).fillna(0)[all_samples].any(1).sum(),
len(all_coverage_by_clone))
pyplot.suptitle(title)
pyplot.tight_layout()
#import ipdb ; ipdb.set_trace()
return fig
| [
"dna_features_viewer.GraphicRecord",
"seaborn.despine",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.sca",
"seaborn.heatmap",
"matplotlib.pyplot.fill_between",
"matplotlib.pyplot.tight_layout",
"pandas.DataFrame"... | [((2003, 2046), 'pandas.DataFrame', 'pandas.DataFrame', (['result_rows'], {'index': 'clones'}), '(result_rows, index=clones)\n', (2019, 2046), False, 'import pandas\n'), ((3708, 3772), 'pandas.DataFrame', 'pandas.DataFrame', ([], {'index': 'all_coverage_by_clone.columns', 'dtype': 'int'}), '(index=all_coverage_by_clone.columns, dtype=int)\n', (3724, 3772), False, 'import pandas\n'), ((4587, 4601), 'matplotlib.pyplot.sca', 'pyplot.sca', (['ax'], {}), '(ax)\n', (4597, 4601), False, 'from matplotlib import pyplot\n'), ((4619, 4690), 'dna_features_viewer.GraphicRecord', 'dna_features_viewer.GraphicRecord', ([], {'sequence': 'sequence', 'features': 'features'}), '(sequence=sequence, features=features)\n', (4652, 4690), False, 'import dna_features_viewer\n'), ((4900, 4914), 'matplotlib.pyplot.sca', 'pyplot.sca', (['ax'], {}), '(ax)\n', (4910, 4914), False, 'from matplotlib import pyplot\n'), ((4923, 5002), 'matplotlib.pyplot.plot', 'pyplot.plot', (['all_coverage_total.index', 'all_coverage_total.values'], {'color': '"""black"""'}), "(all_coverage_total.index, all_coverage_total.values, color='black')\n", (4934, 5002), False, 'from matplotlib import pyplot\n'), ((5024, 5130), 'matplotlib.pyplot.fill_between', 'pyplot.fill_between', (['all_coverage_total.index', 'all_coverage_total.values'], {'color': '"""lightblue"""', 'alpha': '(0.3)'}), "(all_coverage_total.index, all_coverage_total.values,\n color='lightblue', alpha=0.3)\n", (5043, 5130), False, 'from matplotlib import pyplot\n'), ((5184, 5230), 'matplotlib.pyplot.title', 'pyplot.title', (['""" Coverage"""'], {'loc': '"""left"""', 'pad': '(0)'}), "(' Coverage', loc='left', pad=0)\n", (5196, 5230), False, 'from matplotlib import pyplot\n'), ((5239, 5258), 'matplotlib.pyplot.ylim', 'pyplot.ylim', ([], {'ymin': '(0)'}), '(ymin=0)\n', (5250, 5258), False, 'from matplotlib import pyplot\n'), ((5267, 5290), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['"""Clones"""'], {}), "('Clones')\n", (5280, 5290), False, 'from matplotlib import pyplot\n'), ((5366, 5380), 'matplotlib.pyplot.sca', 'pyplot.sca', (['ax'], {}), '(ax)\n', (5376, 5380), False, 'from matplotlib import pyplot\n'), ((5461, 5526), 'matplotlib.pyplot.plot', 'pyplot.plot', (['plot_series.index', 'plot_series.values'], {'color': '"""black"""'}), "(plot_series.index, plot_series.values, color='black')\n", (5472, 5526), False, 'from matplotlib import pyplot\n'), ((5548, 5634), 'matplotlib.pyplot.fill_between', 'pyplot.fill_between', (['plot_series.index', 'plot_series.values'], {'color': '"""red"""', 'alpha': '(0.3)'}), "(plot_series.index, plot_series.values, color='red',\n alpha=0.3)\n", (5567, 5634), False, 'from matplotlib import pyplot\n'), ((5688, 5730), 'matplotlib.pyplot.title', 'pyplot.title', (['""" Hits"""'], {'loc': '"""left"""', 'pad': '(0)'}), "(' Hits', loc='left', pad=0)\n", (5700, 5730), False, 'from matplotlib import pyplot\n'), ((5739, 5758), 'matplotlib.pyplot.ylim', 'pyplot.ylim', ([], {'ymin': '(0)'}), '(ymin=0)\n', (5750, 5758), False, 'from matplotlib import pyplot\n'), ((5767, 5795), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['"""Samples (%)"""'], {}), "('Samples (%)')\n", (5780, 5795), False, 'from matplotlib import pyplot\n'), ((6469, 6486), 'seaborn.despine', 'seaborn.despine', ([], {}), '()\n', (6484, 6486), False, 'import seaborn\n'), ((6769, 6791), 'matplotlib.pyplot.suptitle', 'pyplot.suptitle', (['title'], {}), '(title)\n', (6784, 6791), False, 'from matplotlib import pyplot\n'), ((6801, 6822), 'matplotlib.pyplot.tight_layout', 'pyplot.tight_layout', ([], {}), '()\n', (6820, 6822), False, 'from matplotlib import pyplot\n'), ((5876, 5890), 'matplotlib.pyplot.sca', 'pyplot.sca', (['ax'], {}), '(ax)\n', (5886, 5890), False, 'from matplotlib import pyplot\n'), ((6005, 6048), 'seaborn.heatmap', 'seaborn.heatmap', (['plot_df'], {'cbar': '(False)', 'ax': 'ax'}), '(plot_df, cbar=False, ax=ax)\n', (6020, 6048), False, 'import seaborn\n'), ((6232, 6275), 'matplotlib.pyplot.xticks', 'pyplot.xticks', (['ticks', '(ticks + 1)'], {'rotation': '(0)'}), '(ticks, ticks + 1, rotation=0)\n', (6245, 6275), False, 'from matplotlib import pyplot\n'), ((6398, 6423), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['"""Position"""'], {}), "('Position')\n", (6411, 6423), False, 'from matplotlib import pyplot\n'), ((6436, 6459), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['"""Sample"""'], {}), "('Sample')\n", (6449, 6459), False, 'from matplotlib import pyplot\n')] |
from go.contacts import tasks, utils
from go.contacts.parsers import ContactFileParser
class ContactImportException(Exception):
"""
Exception raised when an import handler determines that an import cannot
succeed.
"""
def dispatch_import_task(import_task, request, group, check_fields=None):
file_name, file_path = utils.get_file_hints_from_session(request)
file_type, parser = ContactFileParser.get_parser(file_name)
has_header, _, sample_row = parser.guess_headers_and_row(file_path)
# Grab the selected field names from the submitted form
# by looping over the expect n number of `column-n` keys being
# posted
field_names = [request.POST.get('column-%s' % i) for i in
range(len(sample_row))]
normalizers = [request.POST.get('normalize-%s' % i, '')
for i in range(len(sample_row))]
fields = zip(field_names, normalizers)
if check_fields is not None:
check_fields(dict(fields))
import_task.delay(
request.user_api.user_account_key, group.key, file_name,
file_path, fields, has_header)
utils.clear_file_hints_from_session(request)
def check_import_new_contacts_fields(fields):
if u'msisdn' not in fields:
raise ContactImportException("Please specify a Contact Number field.")
def handle_import_new_contacts(request, group):
return dispatch_import_task(
tasks.import_new_contacts_file, request, group,
check_import_new_contacts_fields)
def handle_import_upload_is_truth(request, group):
return dispatch_import_task(
tasks.import_upload_is_truth_contacts_file, request, group)
def handle_import_existing_is_truth(request, group):
return dispatch_import_task(
tasks.import_existing_is_truth_contacts_file, request, group)
| [
"go.contacts.parsers.ContactFileParser.get_parser",
"go.contacts.utils.get_file_hints_from_session",
"go.contacts.utils.clear_file_hints_from_session"
] | [((339, 381), 'go.contacts.utils.get_file_hints_from_session', 'utils.get_file_hints_from_session', (['request'], {}), '(request)\n', (372, 381), False, 'from go.contacts import tasks, utils\n'), ((406, 445), 'go.contacts.parsers.ContactFileParser.get_parser', 'ContactFileParser.get_parser', (['file_name'], {}), '(file_name)\n', (434, 445), False, 'from go.contacts.parsers import ContactFileParser\n'), ((1121, 1165), 'go.contacts.utils.clear_file_hints_from_session', 'utils.clear_file_hints_from_session', (['request'], {}), '(request)\n', (1156, 1165), False, 'from go.contacts import tasks, utils\n')] |
import os
import pandas as pd
base_project_path = os.path.dirname(
os.path.dirname(
os.path.abspath(__file__)
)
)
def make_table(df):
html_tables = {}
df[['DocSection', 'DocText']] = df["DocText"].str.rsplit(":", 1, expand=True)
for section, sub_df in df.groupby(['DocSection']):
sub_df.drop(['DocSection'], axis=1, inplace=True)
html_text = ''
html_text += '<h4>' + section.replace(":", " : ") + "</h4>"
sub_df.rename(columns={
"DocText": "Test",
"TimeStamp": "Last Check",
"FunctionStatus": "Status"
}, inplace=True)
html_text += sub_df.to_html(index=False) + '\n'
html_tables[section.strip()] = html_text
return html_tables
def main():
df = pd.read_csv(
os.path.join(base_project_path, "logs", "test.log"),
header=None,
names=["TimeStamp", "DocText", "FileName", "FunctionName", "FunctionStatus"])
# Get latest function return
df = df\
.sort_values(['TimeStamp'], ascending=True)\
.groupby(['DocText', 'FileName', 'FunctionName'])\
.last()\
.reset_index()
html_text = make_table(df)
sections = sorted([i for i in html_text.keys()])
# Push the general section to the top
if "General" in sections:
sections.remove("General")
sections.insert(0, "General")
# create html file and save to docs static folder.
# write out all testing sections
with open(os.path.join(base_project_path, "docs", "_static", "testing_output.html"), 'w') as f:
f.write("""
<!DOCTYPE html>
<html>
<head>
<title>Unittest Results</title>
""")
with open(os.path.join(base_project_path, "docs", "_static", "testing_base.html"), 'r') as f2:
notes_data = f2.read()
f.write(notes_data)
for section in sections:
if not section.startswith('Simulation'):
f.write(html_text[section])
f.write("""
</head>
<body>
""")
with open(os.path.join(base_project_path, "docs", "_static", "simulation_output.html"), 'w') as f:
# write out all full simulation sections
f.write("""
<!DOCTYPE html>
<html>
<head>
<title>Simulation Test Results</title>
""")
with open(os.path.join(base_project_path, "docs", "_static", "testing_base.html"), 'r') as f2:
notes_data = f2.read()
f.write(notes_data)
for section in sections:
if section.startswith('Simulation'):
f.write(html_text[section])
f.write("""
</head>
<body>
""")
return
if __name__ == "__main__":
main()
| [
"os.path.abspath",
"os.path.join"
] | [((97, 122), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (112, 122), False, 'import os\n'), ((798, 849), 'os.path.join', 'os.path.join', (['base_project_path', '"""logs"""', '"""test.log"""'], {}), "(base_project_path, 'logs', 'test.log')\n", (810, 849), False, 'import os\n'), ((1491, 1564), 'os.path.join', 'os.path.join', (['base_project_path', '"""docs"""', '"""_static"""', '"""testing_output.html"""'], {}), "(base_project_path, 'docs', '_static', 'testing_output.html')\n", (1503, 1564), False, 'import os\n'), ((2102, 2178), 'os.path.join', 'os.path.join', (['base_project_path', '"""docs"""', '"""_static"""', '"""simulation_output.html"""'], {}), "(base_project_path, 'docs', '_static', 'simulation_output.html')\n", (2114, 2178), False, 'import os\n'), ((1738, 1809), 'os.path.join', 'os.path.join', (['base_project_path', '"""docs"""', '"""_static"""', '"""testing_base.html"""'], {}), "(base_project_path, 'docs', '_static', 'testing_base.html')\n", (1750, 1809), False, 'import os\n'), ((2408, 2479), 'os.path.join', 'os.path.join', (['base_project_path', '"""docs"""', '"""_static"""', '"""testing_base.html"""'], {}), "(base_project_path, 'docs', '_static', 'testing_base.html')\n", (2420, 2479), False, 'import os\n')] |
#
# Copyright (c) 2020 <NAME>.
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import sys
import ast
import os
import importlib
import copy
from pycg import utils
def get_custom_loader(ig_obj):
"""
Closure which returns a custom loader
that modifies an ImportManager object
"""
class CustomLoader(importlib.abc.SourceLoader):
def __init__(self, fullname, path):
self.fullname = fullname
self.path = path
ig_obj.create_edge(self.fullname)
if not ig_obj.get_node(self.fullname):
ig_obj.create_node(self.fullname)
ig_obj.set_filepath(self.fullname, self.path)
def get_filename(self, fullname):
return self.path
def get_data(self, filename):
return ""
return CustomLoader
class ImportManager(object):
def __init__(self):
self.import_graph = dict()
self.current_module = ""
self.input_file = ""
self.mod_dir = None
self.old_path_hooks = None
self.old_path = None
def set_pkg(self, input_pkg):
self.mod_dir = input_pkg
def get_mod_dir(self):
return self.mod_dir
def get_node(self, name):
if name in self.import_graph:
return self.import_graph[name]
def create_node(self, name):
if not name or not isinstance(name, str):
raise ImportManagerError("Invalid node name")
if self.get_node(name):
raise ImportManagerError("Can't create a node a second time")
self.import_graph[name] = {"filename": "", "imports": set()}
return self.import_graph[name]
def create_edge(self, dest):
if not dest or not isinstance(dest, str):
raise ImportManagerError("Invalid node name")
node = self.get_node(self._get_module_path())
if not node:
raise ImportManagerError("Can't add edge to a non existing node")
node["imports"].add(dest)
def _clear_caches(self):
importlib.invalidate_caches()
sys.path_importer_cache.clear()
# TODO: maybe not do that since it empties the whole cache
for name in self.import_graph:
if name in sys.modules:
del sys.modules[name]
def _get_module_path(self):
return self.current_module
def set_current_mod(self, name, fname):
self.current_module = name
self.input_file = os.path.abspath(fname)
def get_filepath(self, modname):
if modname in self.import_graph:
return self.import_graph[modname]["filename"]
def set_filepath(self, node_name, filename):
if not filename or not isinstance(filename, str):
raise ImportManagerError("Invalid node name")
node = self.get_node(node_name)
if not node:
raise ImportManagerError("Node does not exist")
node["filename"] = os.path.abspath(filename)
def get_imports(self, modname):
if not modname in self.import_graph:
return []
return self.import_graph[modname]["imports"]
def _is_init_file(self):
return self.input_file.endswith("__init__.py")
def _handle_import_level(self, name, level):
# add a dot for each level
package = self._get_module_path().split(".")
if level > len(package):
raise ImportError("Attempting import beyond top level package")
mod_name = ("." * level) + name
# When an __init__ file is analyzed, then the module name doesn't contain
# the __init__ part in it, so special care must be taken for levels.
if self._is_init_file() and level >= 1:
if level != 1:
level -= 1
package = package[:-level]
else:
package = package[:-level]
return mod_name, ".".join(package)
def _do_import(self, mod_name, package):
if mod_name in sys.modules:
self.create_edge(mod_name)
return sys.modules[mod_name]
return importlib.import_module(mod_name, package=package)
def handle_import(self, name, level):
# We currently don't support builtin modules because they're frozen.
# Add an edge and continue.
# TODO: identify a way to include frozen modules
root = name.split(".")[0]
if root in sys.builtin_module_names:
self.create_edge(root)
return
# Import the module
try:
mod_name, package = self._handle_import_level(name, level)
except ImportError:
return
parent = ".".join(mod_name.split(".")[:-1])
parent_name = ".".join(name.split(".")[:-1])
combos = [(mod_name, package),
(parent, package),
(utils.join_ns(package, name), ""),
(utils.join_ns(package, parent_name), "")]
mod = None
for mn, pkg in combos:
try:
mod = self._do_import(mn, pkg)
break
except:
continue
if not mod:
return
if not hasattr(mod, "__file__") or not mod.__file__:
return
if self.mod_dir not in mod.__file__:
return
fname = mod.__file__
if fname.endswith("__init__.py"):
fname = os.path.split(fname)[0]
return utils.to_mod_name(
os.path.relpath(fname, self.mod_dir))
def get_import_graph(self):
return self.import_graph
def install_hooks(self):
loader = get_custom_loader(self)
self.old_path_hooks = copy.deepcopy(sys.path_hooks)
self.old_path = copy.deepcopy(sys.path)
loader_details = loader, importlib.machinery.all_suffixes()
sys.path_hooks.insert(0, importlib.machinery.FileFinder.path_hook(loader_details))
sys.path.insert(0, os.path.abspath(self.mod_dir))
self._clear_caches()
def remove_hooks(self):
sys.path_hooks = self.old_path_hooks
sys.path = self.old_path
self._clear_caches()
class ImportManagerError(Exception):
pass
| [
"importlib.invalidate_caches",
"importlib.import_module",
"importlib.machinery.FileFinder.path_hook",
"pycg.utils.join_ns",
"os.path.split",
"sys.path_importer_cache.clear",
"copy.deepcopy",
"os.path.abspath",
"importlib.machinery.all_suffixes",
"os.path.relpath"
] | [((2774, 2803), 'importlib.invalidate_caches', 'importlib.invalidate_caches', ([], {}), '()\n', (2801, 2803), False, 'import importlib\n'), ((2812, 2843), 'sys.path_importer_cache.clear', 'sys.path_importer_cache.clear', ([], {}), '()\n', (2841, 2843), False, 'import sys\n'), ((3198, 3220), 'os.path.abspath', 'os.path.abspath', (['fname'], {}), '(fname)\n', (3213, 3220), False, 'import os\n'), ((3674, 3699), 'os.path.abspath', 'os.path.abspath', (['filename'], {}), '(filename)\n', (3689, 3699), False, 'import os\n'), ((4810, 4860), 'importlib.import_module', 'importlib.import_module', (['mod_name'], {'package': 'package'}), '(mod_name, package=package)\n', (4833, 4860), False, 'import importlib\n'), ((6392, 6421), 'copy.deepcopy', 'copy.deepcopy', (['sys.path_hooks'], {}), '(sys.path_hooks)\n', (6405, 6421), False, 'import copy\n'), ((6446, 6469), 'copy.deepcopy', 'copy.deepcopy', (['sys.path'], {}), '(sys.path)\n', (6459, 6469), False, 'import copy\n'), ((6187, 6223), 'os.path.relpath', 'os.path.relpath', (['fname', 'self.mod_dir'], {}), '(fname, self.mod_dir)\n', (6202, 6223), False, 'import os\n'), ((6504, 6538), 'importlib.machinery.all_suffixes', 'importlib.machinery.all_suffixes', ([], {}), '()\n', (6536, 6538), False, 'import importlib\n'), ((6572, 6628), 'importlib.machinery.FileFinder.path_hook', 'importlib.machinery.FileFinder.path_hook', (['loader_details'], {}), '(loader_details)\n', (6612, 6628), False, 'import importlib\n'), ((6657, 6686), 'os.path.abspath', 'os.path.abspath', (['self.mod_dir'], {}), '(self.mod_dir)\n', (6672, 6686), False, 'import os\n'), ((5564, 5592), 'pycg.utils.join_ns', 'utils.join_ns', (['package', 'name'], {}), '(package, name)\n', (5577, 5592), False, 'from pycg import utils\n'), ((5616, 5651), 'pycg.utils.join_ns', 'utils.join_ns', (['package', 'parent_name'], {}), '(package, parent_name)\n', (5629, 5651), False, 'from pycg import utils\n'), ((6116, 6136), 'os.path.split', 'os.path.split', (['fname'], {}), '(fname)\n', (6129, 6136), False, 'import os\n')] |
"""
A toy example of playing against defined set of bots on Mocsár
Using env "mocsar"-cfg Using 'human_mode'
"""
import rlcard3
# Make environment and enable human mode
env = rlcard3.make('mocsar-cfg', config={'human_mode': True})
# Register agents
agents = {"mocsar_random": 2, "mocsar_min": 2}
env.model.create_agents(agents)
# Reset environment
state = env.reset()
while not env.is_over():
legal_actions = state['legal_actions']
legal_actions.insert(0, 0)
action = input('>> You choose action (integer): ')
if action == '-1':
print('Break the game...')
break
while not action.isdigit() \
or int(action) not in legal_actions:
print('Action illegal...')
action = input('>> Re-choose action (integer): ')
state, reward, done = env.step(int(action))
| [
"rlcard3.make"
] | [((177, 232), 'rlcard3.make', 'rlcard3.make', (['"""mocsar-cfg"""'], {'config': "{'human_mode': True}"}), "('mocsar-cfg', config={'human_mode': True})\n", (189, 232), False, 'import rlcard3\n')] |
"""A convenience function to rename BCP images
"""
import os
import re
from krcg.parser import _CLAN
def prepare_bcp(path):
for (dirpath, _dirnames, filenames) in os.walk(path):
for name in filenames:
clan_prefix = re.match(r"({})_".format(_CLAN), name.lower())
if clan_prefix:
os.rename(
os.path.join(dirpath, name),
os.path.join(dirpath, name[clan_prefix.end(0) :]),
)
| [
"os.path.join",
"os.walk"
] | [((170, 183), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (177, 183), False, 'import os\n'), ((364, 391), 'os.path.join', 'os.path.join', (['dirpath', 'name'], {}), '(dirpath, name)\n', (376, 391), False, 'import os\n')] |
import hou
import husdoutputprocessors.base as base
import os
class StagingDirOutputProcessor(base.OutputProcessorBase):
"""Output all USD Rop file nodes into the Staging Directory
Ignore any folders and paths set in the Configured Layers
and USD Rop node, just take the filename and save into a
single directory.
"""
theParameters = None
parameter_prefix = "stagingdiroutputprocessor_"
stagingdir_parm_name = parameter_prefix + "stagingDir"
def __init__(self):
self.staging_dir = None
def displayName(self):
return 'StagingDir Output Processor'
def parameters(self):
if not self.theParameters:
parameters = hou.ParmTemplateGroup()
rootdirparm = hou.StringParmTemplate(
self.stagingdir_parm_name,
'Staging Directory', 1,
string_type=hou.stringParmType.FileReference,
file_type=hou.fileType.Directory
)
parameters.append(rootdirparm)
self.theParameters = parameters.asDialogScript()
return self.theParameters
def beginSave(self, config_node, t):
# Use the Root Directory parameter if it is set.
root_dir_parm = config_node.parm(self.stagingdir_parm_name)
if root_dir_parm:
self.staging_dir = root_dir_parm.evalAtTime(t)
if not self.staging_dir:
out_file_parm = config_node.parm('lopoutput')
if out_file_parm:
self.staging_dir = out_file_parm.evalAtTime(t)
if self.staging_dir:
(self.staging_dir, filename) = os.path.split(self.staging_dir)
def endSave(self):
self.staging_dir = None
def processAsset(self, asset_path,
asset_path_for_save,
referencing_layer_path,
asset_is_layer,
for_save):
"""
Args:
asset_path (str): The incoming file path you want to alter or not.
asset_path_for_save (bool): Whether the current path is a
referenced path in the USD file. When True, return the path
you want inside USD file.
referencing_layer_path (str): ???
asset_is_layer (bool): Whether this asset is a USD layer file.
If this is False, the asset is something else (for example,
a texture or volume file).
for_save (bool): Whether the asset path is for a file to be saved
out. If so, then return actual written filepath.
Returns:
The refactored asset path.
"""
# Treat save paths as being relative to the output path.
if for_save and self.staging_dir:
# Whenever we're processing a Save Path make sure to
# resolve it to the Staging Directory
filename = os.path.basename(asset_path)
return os.path.join(self.staging_dir, filename)
return asset_path
output_processor = StagingDirOutputProcessor()
def usdOutputProcessor():
return output_processor
| [
"hou.StringParmTemplate",
"os.path.join",
"os.path.split",
"os.path.basename",
"hou.ParmTemplateGroup"
] | [((695, 718), 'hou.ParmTemplateGroup', 'hou.ParmTemplateGroup', ([], {}), '()\n', (716, 718), False, 'import hou\n'), ((745, 907), 'hou.StringParmTemplate', 'hou.StringParmTemplate', (['self.stagingdir_parm_name', '"""Staging Directory"""', '(1)'], {'string_type': 'hou.stringParmType.FileReference', 'file_type': 'hou.fileType.Directory'}), "(self.stagingdir_parm_name, 'Staging Directory', 1,\n string_type=hou.stringParmType.FileReference, file_type=hou.fileType.\n Directory)\n", (767, 907), False, 'import hou\n'), ((2873, 2901), 'os.path.basename', 'os.path.basename', (['asset_path'], {}), '(asset_path)\n', (2889, 2901), False, 'import os\n'), ((2921, 2961), 'os.path.join', 'os.path.join', (['self.staging_dir', 'filename'], {}), '(self.staging_dir, filename)\n', (2933, 2961), False, 'import os\n'), ((1633, 1664), 'os.path.split', 'os.path.split', (['self.staging_dir'], {}), '(self.staging_dir)\n', (1646, 1664), False, 'import os\n')] |
#!~/.virtualenvs/cv420/bin/python
# -*- coding: utf-8 -*-
"""
Author: <NAME>
Created: 4-May-2020
"""
import serial
import math
from threading import Thread
import rospy
import time
import numpy as np
from std_msgs.msg import String
ser = serial.Serial('/dev/ttyACM1',9600, timeout=5) # Ti MSP430
def inverse_Kinematics(ix,iy,EfH) :
# values in cm
l1=1.12
l2=12.5
l3=9.0
Base=5.5
base_li=6.62#l1+Base
try:
iz=((EfH))-Base
J1H=base_li-(EfH)
s=math.sqrt(pow(ix,2) +pow(iy,2))
r=math.sqrt(pow(s,2) +pow(J1H,2))
alpha2=math.acos((pow(r,2)+pow(l3,2)-pow(l2,2))/(2*r*l3)) #cosine rule
psi=math.atan2(J1H,s)
Efangle=psi+alpha2
Efangle=2*math.pi-Efangle # orientation of the end effector
iphi = Efangle
Sth2=(iz-l1-(l3*math.sin(iphi)))/l2
Cth2=math.sqrt(1-pow(Sth2,2))
ith2=math.atan2(Sth2,Cth2)*(180/math.pi)
Sth1=iy/(l3*math.cos(iphi)+l2*Cth2)
Cth1=math.sqrt(1-pow(Sth1,2))
ith1=math.atan2(Sth1,Cth1)*(180/math.pi)
if ix<0:
if ith1<180 and ith1>0:
ith1=180-ith1 #to give equivalent of ith1 in obtuse form, since since x is negative
if ith1==0: #because ith1 calculation will compute 180 as 0 even when x is negative because they are equivalent
ith1=180
ith3=(iphi*(180/math.pi))-ith2
if ith1>180:
ith1=360-ith1
if ith2>180:
ith2=360-ith2
if ith3>180:
ith3=360-ith3
return int(ith1 ),int(ith2),int(ith3)
except:
pass
class sensorData:
def __init__(self):
self.pose = " 0 \t 0 \t 0 \t 1 \t 1 \t 1 "
def position(self, data):
self.pose = data.data
return self.pose
def listener():
# current arm position
th1=90
th2=90
th3 =0
#object positions
x1,y1,z1 = 0,10,10
x2,y2,z2 = 0,10,10
y1old, y2old = 100,100
# arm home initial angles for objects 23 and 50
i23th1 ,i23th2,i23th3 = 0,90,90
i50th1 ,i50th2,i50th3 = 180,90,90
sensorData1 = sensorData()
rospy.init_node('listener', anonymous=True)
rospy.Subscriber("chatter", String, sensorData1.position)
while not rospy.is_shutdown():
jointData = sensorData1.pose
js= jointData.split("\t")
x1 = -float(js[0])
y1 = 21 - float(js[1]) #21 is length of L2+L3
z1 = float(js[2]) + 1
x2 = -float(js[3])
y2 = 21 - float(js[4])
z2 = float(js[5]) + 1
try:
if abs(abs(y1old)- abs(y1)) > 0.3: # do only is change is significant
th1,th2,th3 = inverse_Kinematics(x1,y1,z1)
if i23th1 != th1 and i23th2 != th2 and i23th3 != th3 and th3 <= 90:
# time.sleep(0.5)
i23th1,i23th2,i23th3 = th1,th2,th3
rospy.loginfo("Object id 23, Joint angles in deg.: "+ str(i23th1)+" " +str(i23th2)+" " +str(i23th3))
rospy.loginfo("Object id 23 position in cm: "+ str(x1) +" " +str(y1)+" " +str(z1))
ser.write(bytearray([251,i23th1])) #loctate object 23 arm1 first
time.sleep(0.1)
ser.write(bytearray([252,i23th2,253,i23th3])) #loctate object 23
time.sleep(2)
ser.write(bytearray([251,0,252,90,253,90])) # rotate base to 0 to indicate id 23
time.sleep(1)
y1old = y1
except:
print ("Error on object id 23 ")
try:
if abs(abs(y2old) - abs(y2)) > 0.3: # do only is change is significant
th1,th2,th3 = inverse_Kinematics(x2,y2,z2)
if i50th1 != th1 and i50th2 != th2 and i50th3 != th3 and th3 <= 90:
# time.sleep(0.5)
i50th1,i50th2,i50th3 = th1,th2,th3
rospy.loginfo("Object id 50, Joint angles in deg.: "+ str(i50th1)+" " +str(i50th2)+" " +str(i50th3))
rospy.loginfo("Object id 50, position in cm "+ str(x2)+" " +str(y2)+" " +str(z2))
ser.write(bytearray([251,i50th1])) #locate object 50 arm1 first
time.sleep(0.1)
ser.write(bytearray([252,i50th2,253,i50th3])) #locate object 50
time.sleep(1)
ser.write(bytearray([251,180,252,90,253,90])) # rotate to base to 180 to indicate id 50
time.sleep(1)
y2old = y2
except:
print ("Error on object id 50")
if __name__ == '__main__':
try:
listener()
ser.close()
print("Ros shutdown, serial port closed")
except rospy.ROSInterruptException:
ser.close()
print("Ros interrupeted, serial port closed")
except KeyboardInterrupt:
ser.close()
print("KeyboardInterrupt, serial port closed")
| [
"rospy.is_shutdown",
"rospy.init_node",
"time.sleep",
"math.sin",
"math.cos",
"serial.Serial",
"math.atan2",
"rospy.Subscriber"
] | [((243, 289), 'serial.Serial', 'serial.Serial', (['"""/dev/ttyACM1"""', '(9600)'], {'timeout': '(5)'}), "('/dev/ttyACM1', 9600, timeout=5)\n", (256, 289), False, 'import serial\n'), ((2261, 2304), 'rospy.init_node', 'rospy.init_node', (['"""listener"""'], {'anonymous': '(True)'}), "('listener', anonymous=True)\n", (2276, 2304), False, 'import rospy\n'), ((2309, 2366), 'rospy.Subscriber', 'rospy.Subscriber', (['"""chatter"""', 'String', 'sensorData1.position'], {}), "('chatter', String, sensorData1.position)\n", (2325, 2366), False, 'import rospy\n'), ((667, 685), 'math.atan2', 'math.atan2', (['J1H', 's'], {}), '(J1H, s)\n', (677, 685), False, 'import math\n'), ((2386, 2405), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (2403, 2405), False, 'import rospy\n'), ((899, 921), 'math.atan2', 'math.atan2', (['Sth2', 'Cth2'], {}), '(Sth2, Cth2)\n', (909, 921), False, 'import math\n'), ((1048, 1070), 'math.atan2', 'math.atan2', (['Sth1', 'Cth1'], {}), '(Sth1, Cth1)\n', (1058, 1070), False, 'import math\n'), ((828, 842), 'math.sin', 'math.sin', (['iphi'], {}), '(iphi)\n', (836, 842), False, 'import math\n'), ((964, 978), 'math.cos', 'math.cos', (['iphi'], {}), '(iphi)\n', (972, 978), False, 'import math\n'), ((3353, 3368), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (3363, 3368), False, 'import time\n'), ((3474, 3487), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3484, 3487), False, 'import time\n'), ((3609, 3622), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3619, 3622), False, 'import time\n'), ((4401, 4416), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (4411, 4416), False, 'import time\n'), ((4521, 4534), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4531, 4534), False, 'import time\n'), ((4663, 4676), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4673, 4676), False, 'import time\n')] |
#!/usr/bin/env python3
"""TPatrick | Alta3 Research
Creating a simple dice program utilizing classes."""
from random import randint
class Player:
def __init__(self):
self.dice = []
def roll(self):
self.dice = []
for i in range(3):
self.dice.append(randint(1,6))
def get_dice(self):
return self.dice
def main():
"""Called at run time"""
player1 = Player()
player2 = Player()
player1.roll()
player2.roll()
print(f"Player 1 rolled {player1.get_dice()}")
print(f"Player 2 rolled {player2.get_dice()}")
if sum(player1.get_dice()) == sum(player2.get_dice()):
print("Draw!")
elif sum(player1.get_dice()) > sum(player2.get_dice()):
print("Player 1 wins!")
else:
print("Player 2 wins!")
if __name__ == "__main__":
main()
| [
"random.randint"
] | [((299, 312), 'random.randint', 'randint', (['(1)', '(6)'], {}), '(1, 6)\n', (306, 312), False, 'from random import randint\n')] |
# coding=utf-8
from dynamo.api.serializers import DynamicModelSerializer, DynamicModelFieldSerializer
from dynamo.models import DynamicModel, DynamicModelField
from rest_framework import viewsets
from rest_framework import generics
from rest_framework.renderers import TemplateHTMLRenderer, JSONRenderer, HTMLFormRenderer
from rest_framework.response import Response
class DynamicModelViewSet(viewsets.ModelViewSet):
model = DynamicModel
serializer_class = DynamicModelSerializer
class DynamicModelFieldViewSet(viewsets.ModelViewSet):
queryset = DynamicModelField.objects.all()
serializer_class = DynamicModelFieldSerializer
# permission_classes = (permissions.IsAuthenticatedOrReadOnly,
# IsOwnerOrReadOnly,)
| [
"dynamo.models.DynamicModelField.objects.all"
] | [((563, 594), 'dynamo.models.DynamicModelField.objects.all', 'DynamicModelField.objects.all', ([], {}), '()\n', (592, 594), False, 'from dynamo.models import DynamicModel, DynamicModelField\n')] |
from __future__ import unicode_literals
from django.db import models
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
class TableMap(models.Model):
"""
Combines local tables with google fusion tables via
fusiontable table id and local name created from app_label
and model name.
"""
table_name = models.CharField(
max_length=255,
default='',
verbose_name=_("Local table name (<app_label>;<model__name>)")
)
ft_id = models.CharField(
max_length=255,
default='',
verbose_name=_("Fusiontable table id")
)
class KFTQuerySet(models.query.QuerySet):
"""
KFT Query Set. Contains overwritten update methods.
Update call post_save signal and pass to them required data.
Moreover, added '__raw' flag which works like a 'raw' flag from
base_save method.
"""
def update(self, **kwargs):
"""
Convert custom '__raw' flag to 'raw' flag from base_save.
Call post_save signal on update.
"""
raw = kwargs.get('__raw', False)
if raw:
del kwargs['__raw']
super(KFTQuerySet, self).update(**kwargs)
for instance in self._clone():
post_save.send(
sender=self.model,
instance=instance,
raw=raw
)
class KFTManager(models.Manager):
"""
KFT Manager. Required for modify update method from queryset.
Check out KFTQuerySet class.
"""
def get_queryset(self):
return KFTQuerySet(self.model, using=self._db)
class KFTModel(models.Model):
"""
Abstract base KFTModel. Add some required fields. Auto-synchronize
tables must inherit from this abstract class.
"""
class Meta:
abstract = True
objects = KFTManager()
_fusiontablesync = True
_ft_synced_at = models.DateTimeField(
blank=True,
null=True,
verbose_name=_("Date of the last synchronization")
)
_updated_at = models.DateTimeField(
blank=False,
null=False,
auto_now=True,
verbose_name=_("Date of the last update")
)
_ft_id = models.CharField(
max_length=255,
blank=True,
verbose_name=_("Google fusiontable row id")
)
# class TestModel1(KFTModel):
# test_field11 = models.CharField(max_length=255)
# test_field12 = models.CharField(max_length=255)
# class TestModel2(KFTModel):
# test_field21 = models.CharField(max_length=255)
# test_field22 = models.CharField(max_length=255)
| [
"django.utils.translation.ugettext_lazy",
"django.db.models.signals.post_save.send"
] | [((460, 509), 'django.utils.translation.ugettext_lazy', '_', (['"""Local table name (<app_label>;<model__name>)"""'], {}), "('Local table name (<app_label>;<model__name>)')\n", (461, 509), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((611, 636), 'django.utils.translation.ugettext_lazy', '_', (['"""Fusiontable table id"""'], {}), "('Fusiontable table id')\n", (612, 636), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1267, 1328), 'django.db.models.signals.post_save.send', 'post_save.send', ([], {'sender': 'self.model', 'instance': 'instance', 'raw': 'raw'}), '(sender=self.model, instance=instance, raw=raw)\n', (1281, 1328), False, 'from django.db.models.signals import post_save\n'), ((1993, 2030), 'django.utils.translation.ugettext_lazy', '_', (['"""Date of the last synchronization"""'], {}), "('Date of the last synchronization')\n", (1994, 2030), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2162, 2190), 'django.utils.translation.ugettext_lazy', '_', (['"""Date of the last update"""'], {}), "('Date of the last update')\n", (2163, 2190), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2293, 2323), 'django.utils.translation.ugettext_lazy', '_', (['"""Google fusiontable row id"""'], {}), "('Google fusiontable row id')\n", (2294, 2323), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
from flask import Blueprint
order_api_blueprint = Blueprint('order_api', __name__)
from . import routes | [
"flask.Blueprint"
] | [((51, 83), 'flask.Blueprint', 'Blueprint', (['"""order_api"""', '__name__'], {}), "('order_api', __name__)\n", (60, 83), False, 'from flask import Blueprint\n')] |
# -*- coding: utf-8 -*-
import requests
import os
from lxml import etree
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
try:
from .xmlns import strip_xmlns
from .service import Service
from .embedded_device import EmbeddedDevice
from .instance_singleton import InstanceSingleton
except ImportError:
from xmlns import strip_xmlns
from service import Service
from embedded_device import EmbeddedDevice
from instance_singleton import InstanceSingleton
class UPNPObject(object):
def __init__(self, ip, locations, dump=''):
self.ip_address = ip
self._devices = {}
self._services = {}
for location in locations:
parsed_url = urlparse(location)
url = parsed_url.scheme + '://' + parsed_url.netloc
response = requests.get(location)
content = response.content.decode('utf-8')
if dump:
path = location
if path.startswith('/'):
path = path[1:]
if '/' in path:
path, file_name = path.rsplit('/', 1)
path = os.path.join(dump, path)
else:
file_name = path
path = dump
if not os.path.exists(path):
os.makedirs(path)
if not file_name.endswith('.xml'):
file_name += '.xml'
with open(os.path.join(path, file_name), 'w') as f:
f.write(content)
try:
root = etree.fromstring(content)
except etree.XMLSyntaxError:
continue
root = strip_xmlns(root)
node = root.find('device')
services = node.find('serviceList')
if services is None:
services = []
devices = node.find('deviceList')
if devices is None:
devices = []
for service in services:
scpdurl = service.find('SCPDURL').text.replace(url, '')
control_url = service.find('controlURL').text
if control_url is None:
if scpdurl.endswith('.xml'):
control_url = scpdurl.rsplit('/', 1)[0]
if control_url == scpdurl:
control_url = ''
else:
control_url = scpdurl
else:
control_url = control_url.replace(url, '')
service_id = service.find('serviceId').text
service_type = service.find('serviceType').text
service = Service(
self,
url,
scpdurl,
service_type,
control_url,
node,
dump=dump
)
name = service_id.split(':')[-1]
service.__name__ = name
self._services[name] = service
for device in devices:
device = EmbeddedDevice(
url,
node=device,
parent=self,
dump=dump
)
self._devices[device.__name__] = device
def __getattr__(self, item):
if item in self.__dict__:
return self.__dict__[item]
if item in self._devices:
return self._devices[item]
if item in self._services:
return self._services[item]
if item in self.__class__.__dict__:
if hasattr(self.__class__.__dict__[item], 'fget'):
return self.__class__.__dict__[item].fget(self)
raise AttributeError(item)
@property
def as_dict(self):
res = dict(
services=list(service.as_dict for service in self.services),
devices=list(device.as_dict for device in self.devices)
)
return res
@property
def access_point(self):
return self.__class__.__name__
@property
def services(self):
return list(self._services.values())[:]
@property
def devices(self):
return list(self._devices.values())[:]
def __str__(self):
output = '\n\n' + str(self.__name__) + '\n'
output += 'IP Address: ' + self.ip_address + '\n'
output += '==============================================\n'
if self.services:
output += 'Services:\n'
for cls in self.services:
output += cls.__str__(indent=' ').rstrip() + '\n'
else:
output += 'Services: None\n'
if self.devices:
output += 'Devices:\n'
for cls in self.devices:
output += cls.__str__(indent=' ').rstrip() + '\n'
else:
output += 'Devices: None\n'
return output
| [
"os.path.exists",
"service.Service",
"urllib.parse.urlparse",
"os.makedirs",
"os.path.join",
"requests.get",
"lxml.etree.fromstring",
"xmlns.strip_xmlns",
"embedded_device.EmbeddedDevice"
] | [((755, 773), 'urllib.parse.urlparse', 'urlparse', (['location'], {}), '(location)\n', (763, 773), False, 'from urllib.parse import urlparse\n'), ((861, 883), 'requests.get', 'requests.get', (['location'], {}), '(location)\n', (873, 883), False, 'import requests\n'), ((1740, 1757), 'xmlns.strip_xmlns', 'strip_xmlns', (['root'], {}), '(root)\n', (1751, 1757), False, 'from xmlns import strip_xmlns\n'), ((1628, 1653), 'lxml.etree.fromstring', 'etree.fromstring', (['content'], {}), '(content)\n', (1644, 1653), False, 'from lxml import etree\n'), ((2748, 2819), 'service.Service', 'Service', (['self', 'url', 'scpdurl', 'service_type', 'control_url', 'node'], {'dump': 'dump'}), '(self, url, scpdurl, service_type, control_url, node, dump=dump)\n', (2755, 2819), False, 'from service import Service\n'), ((3175, 3231), 'embedded_device.EmbeddedDevice', 'EmbeddedDevice', (['url'], {'node': 'device', 'parent': 'self', 'dump': 'dump'}), '(url, node=device, parent=self, dump=dump)\n', (3189, 3231), False, 'from embedded_device import EmbeddedDevice\n'), ((1189, 1213), 'os.path.join', 'os.path.join', (['dump', 'path'], {}), '(dump, path)\n', (1201, 1213), False, 'import os\n'), ((1329, 1349), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (1343, 1349), False, 'import os\n'), ((1371, 1388), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (1382, 1388), False, 'import os\n'), ((1508, 1537), 'os.path.join', 'os.path.join', (['path', 'file_name'], {}), '(path, file_name)\n', (1520, 1537), False, 'import os\n')] |
import plotly.offline as py
import plotly.graph_objs as go
import plotly.figure_factory as ff
import pandas as pd
import numpy as np
def plotlinechart(data_list, countries, plot_name):
data_list.index = data_list.index.strftime("%Y-%m-%d")
fig = go.Figure()
if not countries:
countries = data_list['country'].unique()
for country in countries:
df = data_list['count'].loc[data_list['country'] == country]
x = df.index
values = df.values
fig.add_trace(go.Scatter(
x=x,
y=values,
name=country, # Style name/legend entry with html tags
connectgaps=True # override default to connect the gaps
))
fig.update_layout(yaxis_type="log")
chart = py.plot(
fig,
show_link=False,
output_type='div',
include_plotlyjs=False,
auto_open=False,
)
return chart
def curva_evolucao_confirmados(base,lista_paises):
confirmed_cases_t2 = base.reset_index()
confirmed_cases_t2.columns = ['variable','Country/Region','value']
first_date = confirmed_cases_t2.loc[confirmed_cases_t2['value']>=50].sort_values(by=['Country/Region','variable']).groupby('Country/Region').head(1)
first_date = first_date[['Country/Region', 'variable']]
first_date.columns = ['Country/Region', 'first_date']
confirmed_cases_t3 = confirmed_cases_t2.merge(first_date,on='Country/Region', how='left')
confirmed_cases_t4 = confirmed_cases_t3.loc[~confirmed_cases_t3['first_date'].isna()]
confirmed_cases_t4['var_dates'] = (confirmed_cases_t4['variable'] - confirmed_cases_t4['first_date']).dt.days
confirmed_cases_t5 = confirmed_cases_t4.loc[confirmed_cases_t4['var_dates']>=0].sort_values(by=['Country/Region','var_dates']).groupby(['Country/Region','var_dates']).agg({'value':sum}).reset_index()
paises_show = confirmed_cases_t5.groupby('Country/Region')['var_dates'].count().reset_index()
confirmed_cases_t5 = confirmed_cases_t5.loc[confirmed_cases_t5['Country/Region'].isin(list(paises_show.loc[paises_show['var_dates']>=5,'Country/Region']))]
confirmed_cases_t6 = confirmed_cases_t5.loc[confirmed_cases_t5['Country/Region'].isin(lista_paises)]
curva_evolucao_confirmados = confirmed_cases_t6
return curva_evolucao_confirmados
def plot_curva_evolucao_confirmados(dados_graf,dados_paises,plot_china=0):
fig = go.Figure()
dados_graf = dados_graf.merge(dados_paises[['Country/Region','Order']].drop_duplicates(),on='Country/Region').sort_values(by=['Order','var_dates'])
dados_graf = dados_graf.loc[dados_graf['var_dates']<=dados_graf.loc[dados_graf['Country/Region']!='China','var_dates'].max()]
if plot_china==0:
dados_graf = dados_graf.loc[~dados_graf['Country/Region'].isin(['China'])]
for i in list(dados_graf['Country/Region'].unique()):
fig.add_trace(go.Scatter(x=list(dados_graf.loc[dados_graf['Country/Region']==i,'var_dates']),
y=list(dados_graf.loc[dados_graf['Country/Region']==i,'value']), name=i,mode='lines',line_shape='spline',
line=dict(color=dados_paises.loc[dados_paises['Country/Region']==i,'Color'].values[0],width=dados_paises.loc[dados_paises['Country/Region']==i,'Width'].values[0])))
fig.update_layout(yaxis_type="log",paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)',title='Evolução de casos confirmados para países selecionados')
fig.update_xaxes(title_text='# Dias desde a confirmação de 50 casos', showline=True, linewidth=1, linecolor='rgb(128,128,128)',showgrid=True, gridwidth=0.5, gridcolor='rgb(240,240,240)')
fig.update_yaxes(title_text='# casos (k)', showline=True, linewidth=1, linecolor='rgb(128,128,128)',showgrid=True, gridwidth=0.5, gridcolor='rgb(240,240,240)')
chart = py.plot(
fig,
show_link=False,
output_type='div',
include_plotlyjs=False,
auto_open=False,
)
return chart
def progressao_confirmados(base,lista_paises):
base_analise = curva_evolucao_confirmados(base,lista_paises)
base_agrega = pd.DataFrame()
for j in list(base_analise['Country/Region'].unique()):
base_pais = base_analise.loc[base_analise['Country/Region']==j].sort_values(by='var_dates')
for i in list(base_pais['var_dates']):
try:
qtde_dias = int(base_pais.loc[base_pais['value']>=int(base_pais.loc[base_pais['var_dates']==i,'value'])*2].head(1)['var_dates'])-i
except:
qtde_dias = np.nan
#print(i,j,qtde_dias)
base_valores = pd.DataFrame(data={'pais':j,'data':i,'qtde_dias':qtde_dias},index={0})
base_agrega = pd.concat([base_agrega,base_valores])
base_agrega = base_agrega.reset_index(drop=True)
base_agrega = base_agrega.loc[~base_agrega['qtde_dias'].isna()]
base_agrega.rename(columns={'pais':'Country/Region','data':'var_dates','qtde_dias':'value'},inplace=True)
base_agrega = base_agrega.loc[base_agrega['Country/Region'].isin(lista_paises)]
return base_agrega
def plot_progressao_confirmados(dados_graf,dados_paises,plot_china=0):
fig = go.Figure()
dados_graf = dados_graf.merge(dados_paises[['Country/Region','Order']].drop_duplicates(),on='Country/Region').sort_values(by=['Order','var_dates'])
if plot_china==0:
dados_graf = dados_graf.loc[~dados_graf['Country/Region'].isin(['China'])]
for i in list(dados_graf['Country/Region'].unique()):
fig.add_trace(go.Scatter(x=list(dados_graf.loc[dados_graf['Country/Region']==i,'var_dates']),
y=list(dados_graf.loc[dados_graf['Country/Region']==i,'value']), name=i,mode='lines',line_shape='spline',
line=dict(color=dados_paises.loc[dados_paises['Country/Region']==i,'Color'].values[0],width=dados_paises.loc[dados_paises['Country/Region']==i,'Width'].values[0])))
fig.update_layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)', title='Variação da velocidade de progressão de contaminação para países selecionados')
fig.update_xaxes(title_text='# Dias desde a confirmação de 50 casos', showline=True, linewidth=1, linecolor='rgb(128,128,128)',showgrid=True, gridwidth=0.5, gridcolor='rgb(240,240,240)')
fig.update_yaxes(title_text='# dias para dobrar número de casos', showline=True, linewidth=1, linecolor='rgb(128,128,128)',showgrid=True, gridwidth=0.5, gridcolor='rgb(240,240,240)')
chart = py.plot(
fig,
show_link=False,
output_type='div',
include_plotlyjs=False,
auto_open=False,
)
return chart
def acumulo_progressao_confirmados(base,lista_paises):
base_analise = curva_evolucao_confirmados(base,lista_paises)
m = int((5 * round(base_analise['var_dates'].max()/5))/5) + 1
dias = [0] + list(range(5, (m * 5)+1, 5))
base_análise_2 = base_analise.loc[base_analise['var_dates'].isin(dias)]
base_analise_3 = base_análise_2.pivot(index='Country/Region',columns='var_dates',values='value')
base_analise_3.columns = ['casos_'+str(i) for i in list(base_analise_3.columns)]
cols_antes = len(base_analise_3.columns)
for i in range(0,len(list(base_analise_3.columns))):
#base_analise_3[list(base_analise_3.columns)[i+1] + '_' + str(dias[i])] = base_analise_3.iloc[:,i+1] / base_analise_3.iloc[:,i]
base_analise_3[str(dias[i+1])] = base_analise_3.iloc[:,i+1] / base_analise_3.iloc[:,i]
#base_analise_3['0'] = 1
base_analise_3 = base_analise_3[list(base_analise_3.columns[cols_antes:])].reset_index().melt(id_vars=['Country/Region'])
base_analise_3['variable'] = base_analise_3['variable'].astype(int)
base_analise_3 = base_analise_3.sort_values(by=['Country/Region','variable'])
base_analise_3['value'] = base_analise_3['value'].fillna(0)
base_analise_3 = base_analise_3.groupby(by=['Country/Region','variable']).sum().groupby(level=[0]).cumprod().reset_index()
maximo_valor = base_analise_3.groupby('Country/Region')['value'].max().reset_index()
maximo_valor.rename(columns={'value':'value_max'},inplace=True)
aux_max = base_analise_3.merge(maximo_valor,on='Country/Region',how='left')
aux_max1 = aux_max.loc[(aux_max['value']<aux_max['value_max'])&(aux_max['value']!=0)]
aux_max2 = aux_max.loc[aux_max['value']>=aux_max['value_max']]
base_analise_4 = pd.concat([aux_max1,aux_max2.groupby('Country/Region').head(1)]).sort_values(by=['Country/Region','variable'])
base_analise_4.drop(columns='value_max',inplace=True)
coloca_zero = pd.DataFrame(data={'Country/Region':list(base_analise_4['Country/Region'].unique())})
coloca_zero['variable'] = 0
coloca_zero['value'] = 1
base_analise_4 = pd.concat([base_analise_4,coloca_zero])
base_analise_4 = base_analise_4.sort_values(by=['Country/Region','variable'])
base_analise_4.columns = ['Country/Region', 'var_dates', 'value']
return base_analise_4
def plot_acumulo_progressao_confirmados(dados_graf,dados_paises,plot_china=0):
fig = go.Figure()
dados_graf = dados_graf.merge(dados_paises[['Country/Region','Order']].drop_duplicates(),on='Country/Region').sort_values(by=['Order','var_dates'])
dados_graf = dados_graf.loc[dados_graf['var_dates']<=dados_graf.loc[dados_graf['Country/Region']!='China','var_dates'].max()]
if plot_china==0:
dados_graf = dados_graf.loc[~dados_graf['Country/Region'].isin(['China'])]
for i in list(dados_graf['Country/Region'].unique()):
fig.add_trace(go.Scatter(x=list(dados_graf.loc[dados_graf['Country/Region']==i,'var_dates']),
y=list(dados_graf.loc[dados_graf['Country/Region']==i,'value']), name=i,mode='lines',line_shape='spline',
line=dict(color=dados_paises.loc[dados_paises['Country/Region']==i,'Color'].values[0],width=dados_paises.loc[dados_paises['Country/Region']==i,'Width'].values[0])))
fig.update_layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)', title='Progressão acumulada de contaminação em relação ao ponto 0 para países selecionados')
fig.update_xaxes(title_text='# Dias desde a confirmação de 50 casos', showline=True, linewidth=1, linecolor='rgb(128,128,128)',showgrid=True, gridwidth=0.5, gridcolor='rgb(240,240,240)')
fig.update_yaxes(title_text='Número de vezes em relação ao ponto 0, acumulado', showline=True, linewidth=1, linecolor='rgb(128,128,128)',showgrid=True, gridwidth=0.5, gridcolor='rgb(240,240,240)')
chart = py.plot(
fig,
show_link=False,
output_type='div',
include_plotlyjs=False,
auto_open=False,
)
return chart
def projecao_brasil(base,lista_paises,lista_paises2):
base_analise = curva_evolucao_confirmados(base,lista_paises)
base_acumulo = acumulo_progressao_confirmados(base,lista_paises)
real_brasil = base_analise.loc[base_analise['Country/Region']=='Brazil']
base_acumulo = base_acumulo.loc[base_acumulo['Country/Region'].isin(['Brazil']+lista_paises2)]
aux_acumulo = base_acumulo.pivot(index='var_dates',columns='Country/Region',values='value').reset_index()
lista_pontos = list(aux_acumulo['var_dates'].unique())
ponteiro = [i for i,x in enumerate(lista_pontos) if x == aux_acumulo.loc[aux_acumulo['Brazil'].isna()].head(1)['var_dates'].values[0]][0]
aux_ajusta_previsao = base_acumulo.loc[base_acumulo['var_dates']==(lista_pontos[ponteiro-1]),['Country/Region','value']]
aux_ajusta_previsao.columns=['Country/Region','value_ajust']
base_acumulo_2 = base_acumulo.merge(aux_ajusta_previsao,on=['Country/Region'])
base_acumulo_2['value_new'] = base_acumulo_2['value']/base_acumulo_2['value_ajust']
base_projecao = real_brasil.loc[real_brasil['var_dates']==lista_pontos[ponteiro-1],'value']
base_acumulo_2['casos_projecao'] = base_acumulo_2['value_new'] * int(base_projecao)
base_acumulo_3 = base_acumulo_2.pivot(index='var_dates',columns='Country/Region',values='casos_projecao').reset_index()
base_acumulo_3 = base_acumulo_3.loc[base_acumulo_3['var_dates']<=30]
aux_final = pd.DataFrame(pd.concat([base_acumulo_3[['var_dates']],real_brasil[['var_dates']]])['var_dates'].unique())
aux_final.columns=['var_dates']
aux_final = aux_final.sort_values(by='var_dates').merge(base_acumulo_3[['var_dates']+lista_paises2],on='var_dates',how='left').merge(real_brasil[['var_dates','value']],on='var_dates',how='left')
aux_final.rename(columns={'value':'Brazil'},inplace=True)
aux_final.loc[aux_final['var_dates']<lista_pontos[ponteiro-1],lista_paises2]= np.nan
aux_final = aux_final.melt(id_vars='var_dates')[['variable','var_dates','value']]
aux_final.rename(columns={'variable':'Country/Region'},inplace=True)
return aux_final
def plot_projecao_brasil(dados_graf,dados_paises,plot_china=0):
fig = go.Figure()
dados_graf = dados_graf.merge(dados_paises[['Country/Region','Order']].drop_duplicates(),on='Country/Region').sort_values(by=['Order','var_dates'])
dados_graf = dados_graf.loc[dados_graf['var_dates']<=dados_graf.loc[dados_graf['Country/Region']!='China','var_dates'].max()]
if plot_china==0:
dados_graf = dados_graf.loc[~dados_graf['Country/Region'].isin(['China'])]
for i in list(dados_graf['Country/Region'].unique()):
fig.add_trace(go.Scatter(x=list(dados_graf.loc[dados_graf['Country/Region']==i,'var_dates']),
y=list(dados_graf.loc[dados_graf['Country/Region']==i,'value']), name=i,mode='lines',line_shape='spline',
line=dict(color=dados_paises.loc[dados_paises['Country/Region']==i,'Color'].values[0],width=dados_paises.loc[dados_paises['Country/Region']==i,'Width'].values[0])))
fig.update_layout(yaxis_type="log", paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)', title='Estimativa de casos Brasil com base na taxa de progressão da contaminação de outros países')
fig.update_xaxes(title_text='# Dias desde a confirmação de 50 casos', showline=True, linewidth=1, linecolor='rgb(128,128,128)',showgrid=True, gridwidth=0.5, gridcolor='rgb(240,240,240)')
fig.update_yaxes(title_text='# casos (k)', showline=True, linewidth=1, linecolor='rgb(128,128,128)',showgrid=True, gridwidth=0.5, gridcolor='rgb(240,240,240)')
chart = py.plot(
fig,
show_link=False,
output_type='div',
include_plotlyjs=False,
auto_open=False,
)
return chart | [
"plotly.offline.plot",
"plotly.graph_objs.Scatter",
"pandas.DataFrame",
"plotly.graph_objs.Figure",
"pandas.concat"
] | [((258, 269), 'plotly.graph_objs.Figure', 'go.Figure', ([], {}), '()\n', (267, 269), True, 'import plotly.graph_objs as go\n'), ((764, 857), 'plotly.offline.plot', 'py.plot', (['fig'], {'show_link': '(False)', 'output_type': '"""div"""', 'include_plotlyjs': '(False)', 'auto_open': '(False)'}), "(fig, show_link=False, output_type='div', include_plotlyjs=False,\n auto_open=False)\n", (771, 857), True, 'import plotly.offline as py\n'), ((2455, 2466), 'plotly.graph_objs.Figure', 'go.Figure', ([], {}), '()\n', (2464, 2466), True, 'import plotly.graph_objs as go\n'), ((3893, 3986), 'plotly.offline.plot', 'py.plot', (['fig'], {'show_link': '(False)', 'output_type': '"""div"""', 'include_plotlyjs': '(False)', 'auto_open': '(False)'}), "(fig, show_link=False, output_type='div', include_plotlyjs=False,\n auto_open=False)\n", (3900, 3986), True, 'import plotly.offline as py\n'), ((4197, 4211), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (4209, 4211), True, 'import pandas as pd\n'), ((5320, 5331), 'plotly.graph_objs.Figure', 'go.Figure', ([], {}), '()\n', (5329, 5331), True, 'import plotly.graph_objs as go\n'), ((6653, 6746), 'plotly.offline.plot', 'py.plot', (['fig'], {'show_link': '(False)', 'output_type': '"""div"""', 'include_plotlyjs': '(False)', 'auto_open': '(False)'}), "(fig, show_link=False, output_type='div', include_plotlyjs=False,\n auto_open=False)\n", (6660, 6746), True, 'import plotly.offline as py\n'), ((8962, 9002), 'pandas.concat', 'pd.concat', (['[base_analise_4, coloca_zero]'], {}), '([base_analise_4, coloca_zero])\n', (8971, 9002), True, 'import pandas as pd\n'), ((9281, 9292), 'plotly.graph_objs.Figure', 'go.Figure', ([], {}), '()\n', (9290, 9292), True, 'import plotly.graph_objs as go\n'), ((10769, 10862), 'plotly.offline.plot', 'py.plot', (['fig'], {'show_link': '(False)', 'output_type': '"""div"""', 'include_plotlyjs': '(False)', 'auto_open': '(False)'}), "(fig, show_link=False, output_type='div', include_plotlyjs=False,\n auto_open=False)\n", (10776, 10862), True, 'import plotly.offline as py\n'), ((13170, 13181), 'plotly.graph_objs.Figure', 'go.Figure', ([], {}), '()\n', (13179, 13181), True, 'import plotly.graph_objs as go\n'), ((14646, 14739), 'plotly.offline.plot', 'py.plot', (['fig'], {'show_link': '(False)', 'output_type': '"""div"""', 'include_plotlyjs': '(False)', 'auto_open': '(False)'}), "(fig, show_link=False, output_type='div', include_plotlyjs=False,\n auto_open=False)\n", (14653, 14739), True, 'import plotly.offline as py\n'), ((513, 570), 'plotly.graph_objs.Scatter', 'go.Scatter', ([], {'x': 'x', 'y': 'values', 'name': 'country', 'connectgaps': '(True)'}), '(x=x, y=values, name=country, connectgaps=True)\n', (523, 570), True, 'import plotly.graph_objs as go\n'), ((4736, 4812), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': "{'pais': j, 'data': i, 'qtde_dias': qtde_dias}", 'index': '{0}'}), "(data={'pais': j, 'data': i, 'qtde_dias': qtde_dias}, index={0})\n", (4748, 4812), True, 'import pandas as pd\n'), ((4837, 4875), 'pandas.concat', 'pd.concat', (['[base_agrega, base_valores]'], {}), '([base_agrega, base_valores])\n', (4846, 4875), True, 'import pandas as pd\n'), ((12417, 12487), 'pandas.concat', 'pd.concat', (["[base_acumulo_3[['var_dates']], real_brasil[['var_dates']]]"], {}), "([base_acumulo_3[['var_dates']], real_brasil[['var_dates']]])\n", (12426, 12487), True, 'import pandas as pd\n')] |
# -*- coding: utf-8 -*-
# PyGlobi
# TODO:
"""
PyGlobi Library
~~~~~~~~~~~~~~~~~~~~~
Python API for the Global Biotic Interactions (GloBI) dataset.
Basic usage:
>>> import pyglobi
>>> ...
"""
import os
import json
import warnings
from .__version__ import __title__, __description__, __url__, __version__
from .__version__ import __build__, __author__, __author_email__, __license__
from .__version__ import __copyright__
from .models import *
#Module Level Data
TEST = 10
#Instantiate the interal interactions
interactions = {}
#TODO: Load from local user directory cache
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data', "interactions.json"), "r") as fptr:
data = json.load(fptr)
for _name, _values in data.items():
interactions[_name] = Interaction(
name=_name,
source=_values.get("source"),
target=_values.get("target"),
term_iri=_values.get("termIRI"),
neo_type=_values.get("neo_type")
)
from . import config
from . import utils
from .models import *
from .api import *
from .sessions import *
from .status_codes import *
from .exceptions import *
from .queries import *
# Set default logging handler to avoid "No handler found" warnings.
import logging
from logging import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())
| [
"json.load",
"os.path.abspath",
"logging.getLogger",
"logging.NullHandler"
] | [((705, 720), 'json.load', 'json.load', (['fptr'], {}), '(fptr)\n', (714, 720), False, 'import json\n'), ((1334, 1347), 'logging.NullHandler', 'NullHandler', ([], {}), '()\n', (1345, 1347), False, 'from logging import NullHandler\n'), ((1295, 1322), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1312, 1322), False, 'import logging\n'), ((623, 648), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (638, 648), False, 'import os\n')] |
from guifw.abstractparameters import *
from geometry import *
from solids import *
import multiprocessing as mp
import time
import pyclipper
from polygons import *
from gcode import *
from collections import OrderedDict
class LatheThreadingTool(ItemWithParameters):
def __init__(self, model=None, tools=[], viewUpdater=None, **kwargs):
ItemWithParameters.__init__(self, **kwargs)
self.model=None
self.patterns=[]
self.path=None
# remap lathe axis for output. For Visualisation, we use x as long axis and y as cross axis. Output uses Z as long axis, x as cross.
self.axis_mapping=["Z", "X", "Y"]
# scaling factors for output. We use factor -2.0 for x (diameter instead of radius), inverted from negative Y coordinate in viz
self.axis_scaling = [1.0, -2.0, 0.0]
self.presets = OrderedDict([("M4 x 0.7", [ 4, 3.3 , 0.7, 0]),
("M5 x 0.8", [ 5, 4.2 , 0.8, 0]),
("M6 x 1", [ 6, 5.0 , 1.0 , 0]),
("M8 x 1.25", [ 8, 6.75, 1.25, 0]),
("M10 x 1.5",[10, 8.5 , 1.5 , 0]),
("M12 x 1.5",[12, 10.5, 1.5, 0]),
("M12 x 1.75",[12, 10.25, 1.75, 0]),
("M14 x 2", [14, 11.8, 2.0 , 0]),
("M16 x 2", [16, 14 , 2.0 , 0]),
("M20 x 2.5", [20, 17.5, 2.5, 0]),
("NPT 1/8",[0.38*25.4, 0.339*25.4, 1.0/27.0*25.4, 1.7899]),
("NPT 1/4", [13.6, 11.113, 1.0 / 18.0 * 25.4, 1.7899])])
self.presetParameter = ChoiceParameter(parent=self, name="Presets", choices=self.presets.keys(), value = "M10 x 1.5", callback = self.loadPreset)
self.tool = ChoiceParameter(parent=self, name="Tool", choices=tools, value=tools[0])
self.viewUpdater=viewUpdater
self.leftBound=NumericalParameter(parent=self, name="left boundary", value=-10, step=0.1, callback = self.generatePath)
self.rightBound=NumericalParameter(parent=self, name="right boundary", value=0, step=0.1, callback = self.generatePath)
self.toolSide=ChoiceParameter(parent=self, name="Tool side", choices=["external", "internal"], value = "external", callback = self.generatePath)
self.direction=ChoiceParameter(parent=self, name="Direction", choices=["right to left", "left to right"], value="right to left", callback = self.generatePath)
self.model=model.object
self.pitch=NumericalParameter(parent=self, name="pitch", value=1.0, min=0.0001, step=0.01, callback = self.generatePath)
self.start_diameter=NumericalParameter(parent=self, name="start diameter", value=10.0, min=0.1, step=0.1, callback = self.generatePath)
self.end_diameter=NumericalParameter(parent=self, name="end diameter", value=10.0, min=0.1, step=0.1, callback = self.generatePath)
self.coneAngle=NumericalParameter(parent=self, name='cone angle', value=0.0, min=-89.9, max=89.9, step=0.01, callback = self.generatePath)
self.stepover=NumericalParameter(parent=self, name="stepover", value=0.2, min=0.0001, step=0.01, callback = self.generatePath)
self.retract = NumericalParameter(parent=self, name="retract", value=1.0, min=0.0001, step=0.1, callback = self.generatePath)
#self.diameter=NumericalParameter(parent=self, name="tool diameter", value=6.0, min=0.0, max=1000.0, step=0.1)
self.parameters=[self.presetParameter, [self.leftBound, self.rightBound], self.toolSide, self.direction, self.retract,
self.pitch, self.start_diameter, self.end_diameter, self.coneAngle, self.stepover]
self.patterns=None
self.loadPreset(self.presetParameter)
def setThread(self, outer_diameter, inner_diameter, pitch, angle):
if self.toolSide.getValue()=="external":
self.start_diameter.updateValue(outer_diameter)
self.end_diameter.updateValue(inner_diameter)
else:
self.start_diameter.updateValue(inner_diameter)
self.end_diameter.updateValue(outer_diameter)
# self.diameter.viewRefresh()
self.pitch.updateValue(pitch)
# self.pitch.viewRefresh()
self.coneAngle.updateValue(angle)
self.generatePath(None)
def loadPreset(self, parameter):
params = self.presets[parameter.value]
self.setThread(*params)
def external_thread(self):
offset_path = []
y = -self.start_diameter.getValue()/2.0
retract_y = y-self.retract.getValue()
stepover = self.stepover.getValue()
start_x = self.rightBound.getValue()
end_x = self.leftBound.getValue()
if self.direction.getValue() == "left to right":
start_x = self.leftBound.getValue()
end_x = self.rightBound.getValue()
x=start_x
cone_angle = self.coneAngle.getValue() / 180.0 * PI
cone_offset = abs(start_x-end_x)*sin(cone_angle)
finish_passes=2
# switch to feed per rev mode
offset_path.append(GCommand("G95"))
while finish_passes>0:
y+=stepover
if (y > -self.end_diameter.getValue()/2.0):
y=-self.end_diameter.getValue()/2.0
finish_passes -= 1 # count down finish passes
offset_path.append(GPoint(position=(start_x, retract_y, 0), rapid = True))
offset_path.append(GPoint(position=(start_x, y+cone_offset, 0), rapid = True))
offset_path.append(GCommand("G4 P1"))
offset_path.append(GPoint(position=(end_x, y, 0), rapid = False, feedrate=self.pitch.getValue()))
offset_path.append(GPoint(position=(end_x, retract_y, 0), rapid = True))
offset_path.append(GPoint(position=(start_x, retract_y, 0), rapid = True))
# switch back to normal feedrate mode
offset_path.append(GCommand("G94"))
return offset_path
def internal_thread(self):
offset_path = []
y = -self.start_diameter.getValue()/2.0
retract_y = y+self.retract.getValue()
stepover = self.stepover.getValue()
start_x = self.rightBound.getValue()
end_x = self.leftBound.getValue()
if self.direction.getValue() == "left to right":
start_x = self.leftBound.getValue()
end_x = self.rightBound.getValue()
x=start_x
cone_angle = self.coneAngle.getValue() / 180.0 * PI
cone_offset = abs(start_x-end_x)*sin(cone_angle)
finish_passes=2
offset_path.append(GCommand("G95"))
while finish_passes>0:
y-=stepover
if (y < -self.end_diameter.getValue()/2.0):
y=-self.end_diameter.getValue()/2.0
finish_passes -= 1 # count down finish passes
offset_path.append(GPoint(position=(start_x, retract_y, 0), rapid = True))
offset_path.append(GPoint(position=(start_x, y-cone_offset, 0), rapid = True))
offset_path.append(GCommand("G4 P1"))
offset_path.append(GPoint(position=(end_x, y, 0), rapid = False, feedrate=self.pitch.getValue()))
offset_path.append(GPoint(position=(end_x, retract_y, 0), rapid = True))
offset_path.append(GPoint(position=(start_x, retract_y, 0), rapid = True))
return offset_path
def generatePath(self, parameter=None):
if self.toolSide.getValue()=="external":
offset_path = self.external_thread()
else:
offset_path = self.internal_thread()
#self.path = GCode([p for segment in offset_path for p in segment])
self.path = GCode(offset_path)
self.path.default_feedrate = 50
self.path.applyAxisMapping(self.axis_mapping)
self.path.applyAxisScaling(self.axis_scaling)
if self.viewUpdater!=None:
self.viewUpdater(self.path)
return self.path
def calcPath(self):
self.generatePath()
return self.path
def getCompletePath(self):
return self.calcPath()
| [
"collections.OrderedDict"
] | [((859, 1361), 'collections.OrderedDict', 'OrderedDict', (["[('M4 x 0.7', [4, 3.3, 0.7, 0]), ('M5 x 0.8', [5, 4.2, 0.8, 0]), ('M6 x 1',\n [6, 5.0, 1.0, 0]), ('M8 x 1.25', [8, 6.75, 1.25, 0]), ('M10 x 1.5', [10,\n 8.5, 1.5, 0]), ('M12 x 1.5', [12, 10.5, 1.5, 0]), ('M12 x 1.75', [12, \n 10.25, 1.75, 0]), ('M14 x 2', [14, 11.8, 2.0, 0]), ('M16 x 2', [16, 14,\n 2.0, 0]), ('M20 x 2.5', [20, 17.5, 2.5, 0]), ('NPT 1/8', [0.38 * 25.4, \n 0.339 * 25.4, 1.0 / 27.0 * 25.4, 1.7899]), ('NPT 1/4', [13.6, 11.113, \n 1.0 / 18.0 * 25.4, 1.7899])]"], {}), "([('M4 x 0.7', [4, 3.3, 0.7, 0]), ('M5 x 0.8', [5, 4.2, 0.8, 0]),\n ('M6 x 1', [6, 5.0, 1.0, 0]), ('M8 x 1.25', [8, 6.75, 1.25, 0]), (\n 'M10 x 1.5', [10, 8.5, 1.5, 0]), ('M12 x 1.5', [12, 10.5, 1.5, 0]), (\n 'M12 x 1.75', [12, 10.25, 1.75, 0]), ('M14 x 2', [14, 11.8, 2.0, 0]), (\n 'M16 x 2', [16, 14, 2.0, 0]), ('M20 x 2.5', [20, 17.5, 2.5, 0]), (\n 'NPT 1/8', [0.38 * 25.4, 0.339 * 25.4, 1.0 / 27.0 * 25.4, 1.7899]), (\n 'NPT 1/4', [13.6, 11.113, 1.0 / 18.0 * 25.4, 1.7899])])\n", (870, 1361), False, 'from collections import OrderedDict\n')] |
"""Spectrum module"""
import numpy as np
from scipy.stats import norm
def pow2db(power: np.array) -> np.array:
"""
Convert power to decibels
https://de.mathworks.com/help/signal/ref/pow2db.html
"""
return 10.0 * np.log10(power)
def db2pow(decibel: np.array) -> np.array:
"""
Convert decibel to power
https://de.mathworks.com/help/signal/ref/db2pow.html
"""
return np.power(10.0, decibel / 10.0)
def mag2db(power: np.array) -> np.array:
"""
Convert magnitude to decibels
https://de.mathworks.com/help/signal/ref/mag2db.html
"""
return 2 * pow2db(power)
def signal_energy(signal: np.array) -> np.array:
"""Calculate the signal energy"""
return np.sum(np.square(signal, dtype=np.float64))
def compute_normalized_spectral_difference(
reference_spectrum: np.array, distorted_spectrum: np.array
) -> np.array:
"""Compute the normalized difference of two spectra"""
difference = np.sum(
np.abs(db2pow(reference_spectrum) - db2pow(distorted_spectrum)), axis=1
)
return pow2db(
difference
/ (np.sum(np.abs(db2pow(reference_spectrum)), axis=1) + np.finfo(float).eps)
)
def compute_spectral_support(spectrum: np.array, scale: float = 12) -> np.array:
"""Compute the spectral support of perceptual spectrum using a normal distribution cdf"""
return np.apply_along_axis(norm.cdf, 1, spectrum, scale=scale)
| [
"numpy.log10",
"numpy.power",
"numpy.square",
"numpy.apply_along_axis",
"numpy.finfo"
] | [((409, 439), 'numpy.power', 'np.power', (['(10.0)', '(decibel / 10.0)'], {}), '(10.0, decibel / 10.0)\n', (417, 439), True, 'import numpy as np\n'), ((1374, 1429), 'numpy.apply_along_axis', 'np.apply_along_axis', (['norm.cdf', '(1)', 'spectrum'], {'scale': 'scale'}), '(norm.cdf, 1, spectrum, scale=scale)\n', (1393, 1429), True, 'import numpy as np\n'), ((235, 250), 'numpy.log10', 'np.log10', (['power'], {}), '(power)\n', (243, 250), True, 'import numpy as np\n'), ((726, 761), 'numpy.square', 'np.square', (['signal'], {'dtype': 'np.float64'}), '(signal, dtype=np.float64)\n', (735, 761), True, 'import numpy as np\n'), ((1159, 1174), 'numpy.finfo', 'np.finfo', (['float'], {}), '(float)\n', (1167, 1174), True, 'import numpy as np\n')] |
import pytest
from deepctr.models import FiBiNET
from ..utils import check_model, SAMPLE_SIZE, get_test_data, get_test_data_estimator, check_estimator, TEST_Estimator
@pytest.mark.parametrize(
'bilinear_type',
["each",
"all", "interaction"]
)
def test_FiBiNET(bilinear_type):
model_name = "FiBiNET"
sample_size = SAMPLE_SIZE
x, y, feature_columns = get_test_data(sample_size, sparse_feature_num=2, dense_feature_num=2)
model = FiBiNET(feature_columns, feature_columns, bilinear_type=bilinear_type, dnn_hidden_units=[4, ],
dnn_dropout=0.5, )
check_model(model, model_name, x, y)
@pytest.mark.parametrize(
'bilinear_type',
["interaction"]
)
def test_FiBiNETEstimator(bilinear_type):
if not TEST_Estimator:
return
from deepctr.estimator import FiBiNETEstimator
sample_size = SAMPLE_SIZE
linear_feature_columns, dnn_feature_columns, input_fn = get_test_data_estimator(sample_size, sparse_feature_num=2,
dense_feature_num=2)
model = FiBiNETEstimator(linear_feature_columns, dnn_feature_columns, bilinear_type=bilinear_type,
dnn_hidden_units=[4, ], dnn_dropout=0.5, )
check_estimator(model, input_fn)
if __name__ == "__main__":
pass
| [
"pytest.mark.parametrize",
"deepctr.models.FiBiNET",
"deepctr.estimator.FiBiNETEstimator"
] | [((171, 243), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""bilinear_type"""', "['each', 'all', 'interaction']"], {}), "('bilinear_type', ['each', 'all', 'interaction'])\n", (194, 243), False, 'import pytest\n'), ((638, 695), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""bilinear_type"""', "['interaction']"], {}), "('bilinear_type', ['interaction'])\n", (661, 695), False, 'import pytest\n'), ((460, 573), 'deepctr.models.FiBiNET', 'FiBiNET', (['feature_columns', 'feature_columns'], {'bilinear_type': 'bilinear_type', 'dnn_hidden_units': '[4]', 'dnn_dropout': '(0.5)'}), '(feature_columns, feature_columns, bilinear_type=bilinear_type,\n dnn_hidden_units=[4], dnn_dropout=0.5)\n', (467, 573), False, 'from deepctr.models import FiBiNET\n'), ((1109, 1243), 'deepctr.estimator.FiBiNETEstimator', 'FiBiNETEstimator', (['linear_feature_columns', 'dnn_feature_columns'], {'bilinear_type': 'bilinear_type', 'dnn_hidden_units': '[4]', 'dnn_dropout': '(0.5)'}), '(linear_feature_columns, dnn_feature_columns, bilinear_type\n =bilinear_type, dnn_hidden_units=[4], dnn_dropout=0.5)\n', (1125, 1243), False, 'from deepctr.estimator import FiBiNETEstimator\n')] |
from __future__ import absolute_import
from unittest import TestCase
from plotly import exceptions
from plotly.graph_objs import Bar, Frames
class FramesTest(TestCase):
def test_instantiation(self):
native_frames = [
{},
{'data': []},
'foo',
{'data': [], 'group': 'baz', 'layout': {}, 'name': 'hoopla'}
]
Frames(native_frames)
Frames()
def test_string_frame(self):
frames = Frames()
frames.append({'group': 'baz', 'data': []})
frames.append('foobar')
self.assertEqual(frames[1], 'foobar')
self.assertEqual(frames.to_string(),
"Frames([\n"
" dict(\n"
" data=Data(),\n"
" group='baz'\n"
" ),\n"
" 'foobar'\n"
"])")
def test_non_string_frame(self):
frames = Frames()
frames.append({})
with self.assertRaises(exceptions.PlotlyListEntryError):
frames.append([])
with self.assertRaises(exceptions.PlotlyListEntryError):
frames.append(0)
def test_deeply_nested_layout_attributes(self):
frames = Frames()
frames.append({})
frames[0].layout.xaxis.showexponent = 'all'
# It's OK if this needs to change, but we should check *something*.
self.assertEqual(
frames[0].layout.font._get_valid_attributes(),
{'color', 'family', 'size'}
)
def test_deeply_nested_data_attributes(self):
frames = Frames()
frames.append({})
frames[0].data = [Bar()]
frames[0].data[0].marker.color = 'red'
# It's OK if this needs to change, but we should check *something*.
self.assertEqual(
frames[0].data[0].marker.line._get_valid_attributes(),
{'colorsrc', 'autocolorscale', 'cmin', 'colorscale', 'color',
'reversescale', 'width', 'cauto', 'widthsrc', 'cmax'}
)
def test_frame_only_attrs(self):
frames = Frames()
frames.append({})
# It's OK if this needs to change, but we should check *something*.
self.assertEqual(
frames[0]._get_valid_attributes(),
{'group', 'name', 'data', 'layout', 'baseframe', 'traces'}
)
| [
"plotly.graph_objs.Frames",
"plotly.graph_objs.Bar"
] | [((388, 409), 'plotly.graph_objs.Frames', 'Frames', (['native_frames'], {}), '(native_frames)\n', (394, 409), False, 'from plotly.graph_objs import Bar, Frames\n'), ((418, 426), 'plotly.graph_objs.Frames', 'Frames', ([], {}), '()\n', (424, 426), False, 'from plotly.graph_objs import Bar, Frames\n'), ((478, 486), 'plotly.graph_objs.Frames', 'Frames', ([], {}), '()\n', (484, 486), False, 'from plotly.graph_objs import Bar, Frames\n'), ((1002, 1010), 'plotly.graph_objs.Frames', 'Frames', ([], {}), '()\n', (1008, 1010), False, 'from plotly.graph_objs import Bar, Frames\n'), ((1298, 1306), 'plotly.graph_objs.Frames', 'Frames', ([], {}), '()\n', (1304, 1306), False, 'from plotly.graph_objs import Bar, Frames\n'), ((1665, 1673), 'plotly.graph_objs.Frames', 'Frames', ([], {}), '()\n', (1671, 1673), False, 'from plotly.graph_objs import Bar, Frames\n'), ((2156, 2164), 'plotly.graph_objs.Frames', 'Frames', ([], {}), '()\n', (2162, 2164), False, 'from plotly.graph_objs import Bar, Frames\n'), ((1726, 1731), 'plotly.graph_objs.Bar', 'Bar', ([], {}), '()\n', (1729, 1731), False, 'from plotly.graph_objs import Bar, Frames\n')] |
"""Downloads the RAVDESS Video Dataset."""
import os
import sys
import zipfile
import argparse
import urllib.request
NUM_ACTORS = 24
DATA_TYPES = ['song', 'speech']
BASE_DOWNLOAD_URL = 'https://zenodo.org/record/1188976/files/{0}?download=1'
def main(dest):
for datatype in DATA_TYPES:
base_filename = 'Video_' + datatype.capitalize() + '_Actor_{0}.zip'
for i in range(1, NUM_ACTORS + 1):
actor_id = str(i).zfill(2) # Add leading zeros
filename = base_filename.format(actor_id)
download_url = BASE_DOWNLOAD_URL.format(filename)
filepath = os.path.join(dest, filename)
if not os.path.exists(filepath):
try:
print('Downloading ', download_url, 'to', filepath)
urllib.request.urlretrieve(download_url, filepath)
except: # pylint: disable=bare-except
print('Failed to download. Skipping ', download_url)
extract_dirpath = os.path.join(dest, datatype)
extract_filepath = os.path.join(extract_dirpath, 'Actor_' + actor_id)
if not os.path.exists(extract_dirpath):
os.makedirs(extract_dirpath)
if not os.path.exists(extract_filepath):
try:
print('Extracting ', filepath, 'to', extract_filepath)
zip_ref = zipfile.ZipFile(filepath, 'r')
zip_ref.extractall(extract_dirpath)
zip_ref.close()
except: # pylint: disable=bare-except
print('Failed to extract. Skipping ', filepath)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Command line options')
parser.add_argument('--dest', type=str, dest='dest', default='../data')
args = parser.parse_args(sys.argv[1:])
main(**{k: v for (k, v) in vars(args).items() if v is not None})
| [
"os.path.exists",
"os.makedirs",
"zipfile.ZipFile",
"argparse.ArgumentParser",
"os.path.join"
] | [((1503, 1562), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Command line options"""'}), "(description='Command line options')\n", (1526, 1562), False, 'import argparse\n'), ((580, 608), 'os.path.join', 'os.path.join', (['dest', 'filename'], {}), '(dest, filename)\n', (592, 608), False, 'import os\n'), ((920, 948), 'os.path.join', 'os.path.join', (['dest', 'datatype'], {}), '(dest, datatype)\n', (932, 948), False, 'import os\n'), ((974, 1024), 'os.path.join', 'os.path.join', (['extract_dirpath', "('Actor_' + actor_id)"], {}), "(extract_dirpath, 'Actor_' + actor_id)\n", (986, 1024), False, 'import os\n'), ((623, 647), 'os.path.exists', 'os.path.exists', (['filepath'], {}), '(filepath)\n', (637, 647), False, 'import os\n'), ((1039, 1070), 'os.path.exists', 'os.path.exists', (['extract_dirpath'], {}), '(extract_dirpath)\n', (1053, 1070), False, 'import os\n'), ((1080, 1108), 'os.makedirs', 'os.makedirs', (['extract_dirpath'], {}), '(extract_dirpath)\n', (1091, 1108), False, 'import os\n'), ((1123, 1155), 'os.path.exists', 'os.path.exists', (['extract_filepath'], {}), '(extract_filepath)\n', (1137, 1155), False, 'import os\n'), ((1255, 1285), 'zipfile.ZipFile', 'zipfile.ZipFile', (['filepath', '"""r"""'], {}), "(filepath, 'r')\n", (1270, 1285), False, 'import zipfile\n')] |
import random
import xml.etree.ElementTree
import requests
def get_data(host, parameters):
result_data = {}
url = f'http://{host}/mux_http'
request_id = random.randint(4000, 6000)
payload_header = f'id={request_id}&show='
data = '|'.join(parameters)
payload = f'{payload_header}{data}~'
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
response_data = requests.post(url, data=payload, headers=headers)
try:
root = xml.etree.ElementTree.fromstring(response_data.text)
except xml.etree.ElementTree.ParseError as err:
raise ValueError(f"[-] failed to parse xml: {err}")
for child in root:
if child.tag == 'code':
continue
result_data.update({
child.tag: child.text
})
return result_data
if __name__ == '__main__':
host_device = 'softliq-sc-ae-85-48'
input_data = {
'D_Y_2_1': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'gestern'},
'D_Y_2_2': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 2 Tagen'},
'D_Y_2_3': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 3 Tagen'},
'D_Y_2_4': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 4 Tagen'},
'D_Y_2_5': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 5 Tagen'},
'D_Y_2_6': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 6 Tagen'},
'D_Y_2_7': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 7 Tagen'},
'D_Y_2_8': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 8 Tagen'},
'D_Y_2_9': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 9 Tagen'},
'D_Y_2_10': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 10 Tagen'},
'D_Y_2_11': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 11 Tagen'},
'D_Y_2_12': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 12 Tagen'},
'D_Y_2_13': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 13 Tagen'},
'D_Y_2_14': {'access': 'read', 'device': '', 'value': 'Int', 'unit': '[l]', 'code': '', 'note': 'vor 14 Tagen'},
}
result = get_data(host_device, input_data)
print(result)
| [
"requests.post",
"random.randint"
] | [((170, 196), 'random.randint', 'random.randint', (['(4000)', '(6000)'], {}), '(4000, 6000)\n', (184, 196), False, 'import random\n'), ((406, 455), 'requests.post', 'requests.post', (['url'], {'data': 'payload', 'headers': 'headers'}), '(url, data=payload, headers=headers)\n', (419, 455), False, 'import requests\n')] |
"""
Tests for python modules
"""
import unittest
# testing two different functions in my package
from lambdata_trevorjames.things import Character, Wizard
# import from other modules for testing
class UnitTests(unittest.TestCase):
def test_character(self):
"""Testing Character feilds are met"""
x = Character('trevor', 200, 100)
# assert character name = 'trevor'
self.assertEqual(x.name, 'trevor')
self.assertEqual(x.weight, 200)
def test_weight(self):
"""testing if adding food works correctly"""
x = Character('trevor', 200, 100)
x.add_weight(10)
self.assertEqual(x.weight, 210)
def test_wizard(self):
"""testing wizard inherits Character Qualities"""
x = Wizard('trevor', 200, 100, 'expeliamous')
x.cast_spell()
self.assertEqual(x.cast_spell(), 'Abrakadabra')
if __name__ == '__main__':
# running tests in order to see results
unittest.main()
| [
"unittest.main",
"lambdata_trevorjames.things.Character",
"lambdata_trevorjames.things.Wizard"
] | [((964, 979), 'unittest.main', 'unittest.main', ([], {}), '()\n', (977, 979), False, 'import unittest\n'), ((323, 352), 'lambdata_trevorjames.things.Character', 'Character', (['"""trevor"""', '(200)', '(100)'], {}), "('trevor', 200, 100)\n", (332, 352), False, 'from lambdata_trevorjames.things import Character, Wizard\n'), ((572, 601), 'lambdata_trevorjames.things.Character', 'Character', (['"""trevor"""', '(200)', '(100)'], {}), "('trevor', 200, 100)\n", (581, 601), False, 'from lambdata_trevorjames.things import Character, Wizard\n'), ((765, 806), 'lambdata_trevorjames.things.Wizard', 'Wizard', (['"""trevor"""', '(200)', '(100)', '"""expeliamous"""'], {}), "('trevor', 200, 100, 'expeliamous')\n", (771, 806), False, 'from lambdata_trevorjames.things import Character, Wizard\n')] |
import boto3
import os
import uuid
from urllib.parse import unquote_plus
from PIL import Image
s3_client = boto3.client('s3')
def resize_image(picture_file_path, crop_dimensions=None):
# get the profile pics store ready
image = Image.open(picture_file_path)
if crop_dimensions:
image = image.crop(crop_dimensions)
widthGet = os.environ.get('RESIZE_WIDTH')
heightGet = os.environ.get('RESIZE_HEIGHT')
width = int(widthGet)
height = int(heightGet)
image = image.resize((width, height))
# save and convert to jpg here
cropped_filename = os.path.join(os.path.dirname(picture_file_path), "{}_cropped.jpg".format(picture_file_path))
thumbnail_filename = os.path.join(os.path.dirname(picture_file_path), "{}_thumbnail.jpg".format(picture_file_path))
image.save(cropped_filename)
thumbnailWidthGet = os.environ.get('THUMBNAIL_WIDTH')
thumbnailHeightGet = os.environ.get('THUMBNAIL_HEIGHT')
thumbnailWidth = int(thumbnailWidthGet)
thumbnailHeight = int(thumbnailHeightGet)
image = image.resize((thumbnailWidth, thumbnailHeight))
image.save(thumbnail_filename)
return (cropped_filename, thumbnail_filename)
def handler(event, context):
amplify_storage_bucket_name = os.environ.get('STORAGE_PLATELETSTORAGE_BUCKETNAME')
print(os.environ)
for record in event['Records']:
bucket = record['s3']['bucket']['name']
key = unquote_plus(record['s3']['object']['key'])
tmpkey = key.replace('/', '')
download_path = '/tmp/{}{}'.format(uuid.uuid4(), tmpkey)
print('Downloading {} from bucket {} to {}'.format(key, bucket, download_path))
s3_client.download_file(bucket, key, download_path)
(newImage, thumbnail) = resize_image(download_path)
base_key = key.split('.')[0]
s3_client.upload_file(newImage, amplify_storage_bucket_name, key)
s3_client.upload_file(thumbnail, amplify_storage_bucket_name, "{}_thumbnail.jpg".format(base_key))
s3_client.delete_object(Bucket=bucket, Key=key)
| [
"PIL.Image.open",
"boto3.client",
"os.environ.get",
"uuid.uuid4",
"os.path.dirname",
"urllib.parse.unquote_plus"
] | [((108, 126), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (120, 126), False, 'import boto3\n'), ((238, 267), 'PIL.Image.open', 'Image.open', (['picture_file_path'], {}), '(picture_file_path)\n', (248, 267), False, 'from PIL import Image\n'), ((352, 382), 'os.environ.get', 'os.environ.get', (['"""RESIZE_WIDTH"""'], {}), "('RESIZE_WIDTH')\n", (366, 382), False, 'import os\n'), ((399, 430), 'os.environ.get', 'os.environ.get', (['"""RESIZE_HEIGHT"""'], {}), "('RESIZE_HEIGHT')\n", (413, 430), False, 'import os\n'), ((856, 889), 'os.environ.get', 'os.environ.get', (['"""THUMBNAIL_WIDTH"""'], {}), "('THUMBNAIL_WIDTH')\n", (870, 889), False, 'import os\n'), ((915, 949), 'os.environ.get', 'os.environ.get', (['"""THUMBNAIL_HEIGHT"""'], {}), "('THUMBNAIL_HEIGHT')\n", (929, 949), False, 'import os\n'), ((1247, 1299), 'os.environ.get', 'os.environ.get', (['"""STORAGE_PLATELETSTORAGE_BUCKETNAME"""'], {}), "('STORAGE_PLATELETSTORAGE_BUCKETNAME')\n", (1261, 1299), False, 'import os\n'), ((599, 633), 'os.path.dirname', 'os.path.dirname', (['picture_file_path'], {}), '(picture_file_path)\n', (614, 633), False, 'import os\n'), ((717, 751), 'os.path.dirname', 'os.path.dirname', (['picture_file_path'], {}), '(picture_file_path)\n', (732, 751), False, 'import os\n'), ((1412, 1455), 'urllib.parse.unquote_plus', 'unquote_plus', (["record['s3']['object']['key']"], {}), "(record['s3']['object']['key'])\n", (1424, 1455), False, 'from urllib.parse import unquote_plus\n'), ((1533, 1545), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1543, 1545), False, 'import uuid\n')] |
import numpy as np
from sklearn.decomposition import PCA
import matplotlib.pyplot as plt
def data_to_2d_heatmap(X):
pca = PCA(n_components=2)
pca.fit(X)
X_simple = pca.transform(X)
X_simple = np.array(X_simple)
# print(X_simple)
x_simple = X_simple[:,0]
y_simple = X_simple[:,1]
# fig, ax = plt.subplots()
# ax.plot(x_simple, y_simple, 'o')
# ax.set_title('Random data')
# plt.show()
heatmap, xedges, yedges = np.histogram2d(x_simple, y_simple, bins=32)
extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]]
plt.clf()
# plt.imshow(heatmap, extent=extent, cmap="viridis")
plt.imshow(heatmap, extent=extent, cmap="jet")
plt.show()
num_samples, num_dimensions = 100000, 100
X = np.random.rand(num_samples, num_dimensions)
data_to_2d_heatmap(X)
| [
"matplotlib.pyplot.imshow",
"numpy.random.rand",
"sklearn.decomposition.PCA",
"matplotlib.pyplot.clf",
"numpy.array",
"numpy.histogram2d",
"matplotlib.pyplot.show"
] | [((728, 771), 'numpy.random.rand', 'np.random.rand', (['num_samples', 'num_dimensions'], {}), '(num_samples, num_dimensions)\n', (742, 771), True, 'import numpy as np\n'), ((131, 150), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': '(2)'}), '(n_components=2)\n', (134, 150), False, 'from sklearn.decomposition import PCA\n'), ((207, 225), 'numpy.array', 'np.array', (['X_simple'], {}), '(X_simple)\n', (215, 225), True, 'import numpy as np\n'), ((447, 490), 'numpy.histogram2d', 'np.histogram2d', (['x_simple', 'y_simple'], {'bins': '(32)'}), '(x_simple, y_simple, bins=32)\n', (461, 490), True, 'import numpy as np\n'), ((551, 560), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (558, 560), True, 'import matplotlib.pyplot as plt\n'), ((618, 664), 'matplotlib.pyplot.imshow', 'plt.imshow', (['heatmap'], {'extent': 'extent', 'cmap': '"""jet"""'}), "(heatmap, extent=extent, cmap='jet')\n", (628, 664), True, 'import matplotlib.pyplot as plt\n'), ((667, 677), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (675, 677), True, 'import matplotlib.pyplot as plt\n')] |
# Copyright 2017 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple invertible tokenizer.
Converts from a unicode string to a list of tokens
(represented as Unicode strings).
This tokenizer has the following desirable properties:
- It is invertible.
- Alphanumeric characters are broken away from non-alphanumeric characters.
- A single space between words does not produce an extra token.
- The full Unicode punctuation and separator set is recognized.
The tokenization algorithm is as follows:
1. Split the text into a list of tokens, splitting at every boundary of an
alphanumeric character and a non-alphanumeric character. This produces
a list which alternates between "alphanumeric tokens"
(strings of alphanumeric characters) and "non-alphanumeric tokens"
(strings of of non-alphanumeric characters).
2. Remove every token consisting of a single space, unless it is
the very first or very last token in the list. These tokens are now
implied by the fact that there are two adjacent alphanumeric tokens.
e.g. u"Dude - that's so cool."
-> [u"Dude", u" - ", u"that", u"'", u"s", u"so", u"cool", u"."]
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import defaultdict
import sys
import unicodedata
# Dependency imports
from six import PY2
from six import unichr # pylint: disable=redefined-builtin
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
# Conversion between Unicode and UTF-8, if required (on Python2)
_native_to_unicode = (lambda s: s.decode("utf-8")) if PY2 else (lambda s: s)
# This set contains all letter and number characters.
_ALPHANUMERIC_CHAR_SET = set(
unichr(i) for i in xrange(sys.maxunicode)
if (unicodedata.category(unichr(i)).startswith("L") or
unicodedata.category(unichr(i)).startswith("N")))
def encode(text):
"""Encode a unicode string as a list of tokens.
Args:
text: a unicode string
Returns:
a list of tokens as Unicode strings
"""
if not text:
return []
ret = []
token_start = 0
# Classify each character in the input string
is_alnum = [c in _ALPHANUMERIC_CHAR_SET for c in text]
for pos in xrange(1, len(text)):
if is_alnum[pos] != is_alnum[pos - 1]:
token = text[token_start:pos]
if token != u" " or token_start == 0:
ret.append(token)
token_start = pos
final_token = text[token_start:]
ret.append(final_token)
return ret
def decode(tokens):
"""Decode a list of tokens to a unicode string.
Args:
tokens: a list of Unicode strings
Returns:
a unicode string
"""
ret = u""
token_is_alnum = [t[0] in _ALPHANUMERIC_CHAR_SET for t in tokens]
for i, token in enumerate(tokens):
if i > 0 and token_is_alnum[i - 1] and token_is_alnum[i]:
ret += u" "
ret += token
return ret
def corpus_token_counts(text_filepattern, corpus_max_lines,
split_on_newlines=True):
"""Read the corpus and compute a dictionary of token counts.
Args:
text_filepattern: a pattern matching one or more files
corpus_max_lines: an integer - maximum total lines to read.
split_on_newlines: a boolean. If true, then split files by lines and strip
leading and trailing whitespace from each line.
Returns:
a dictionary from token to count.
"""
def read_corpus():
"""Read the corpus."""
docs = []
lines_read = 0
filenames = tf.gfile.Glob(text_filepattern)
for text_filename in filenames:
with tf.gfile.Open(text_filename) as f:
if not split_on_newlines:
docs.append("")
for line in f:
if split_on_newlines:
# The tokenizer updates token_counts in encode()
docs.append(line.strip())
else:
docs[-1] += line
lines_read += 1
if corpus_max_lines > 0 and lines_read > corpus_max_lines:
return docs
return docs
counts = defaultdict(int)
for doc in read_corpus():
for tok in encode(_native_to_unicode(doc)):
counts[tok] += 1
return counts
| [
"tensorflow.gfile.Open",
"tensorflow.gfile.Glob",
"six.unichr",
"six.moves.xrange",
"collections.defaultdict"
] | [((4560, 4576), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (4571, 4576), False, 'from collections import defaultdict\n'), ((2296, 2305), 'six.unichr', 'unichr', (['i'], {}), '(i)\n', (2302, 2305), False, 'from six import unichr\n'), ((4040, 4071), 'tensorflow.gfile.Glob', 'tf.gfile.Glob', (['text_filepattern'], {}), '(text_filepattern)\n', (4053, 4071), True, 'import tensorflow as tf\n'), ((2315, 2337), 'six.moves.xrange', 'xrange', (['sys.maxunicode'], {}), '(sys.maxunicode)\n', (2321, 2337), False, 'from six.moves import xrange\n'), ((4119, 4147), 'tensorflow.gfile.Open', 'tf.gfile.Open', (['text_filename'], {}), '(text_filename)\n', (4132, 4147), True, 'import tensorflow as tf\n'), ((2367, 2376), 'six.unichr', 'unichr', (['i'], {}), '(i)\n', (2373, 2376), False, 'from six import unichr\n'), ((2426, 2435), 'six.unichr', 'unichr', (['i'], {}), '(i)\n', (2432, 2435), False, 'from six import unichr\n')] |
from git import Repo
import subprocess
import os, shutil
# I use this later to lazily generate an error with a message
class CustomError(Exception):
pass
repo_path = "../../"
r = Repo(repo_path)
repo_heads = r.heads # or it's alias: r.branches
repo_heads_names = [h.name for h in repo_heads]
#kokkos_src = '/Users/bird/kokkos/'
#kokkos_install = '/Users/bird/kokkos/build/install'
#cabana_install = '/Users/bird/Cabana/build/build/install' # not a typo, it's in a dumb path
#platforms = ["Serial", "CPU", "GPU", "UVM"]
platforms = ["Serial", "CPU", "GPU"]
#platforms = ["CPU", "GPU"]
#platforms = ["GPU"]
#platforms = ["CPU"]
CXX = "g++"
#arch = 'Volta70'
arch = 'Kepler35'
subprocess.check_call(['./timing_lib.sh'])
this_build_dir = 'build'
kokkos_dirs = {}
cabana_dirs = {}
home_dir = os.environ['HOME']
# Build Dependencies
# TODO: make this configurable
kokkos_root = os.path.join(home_dir,'kokkos')
cabana_root = os.path.join(home_dir,'Cabana')
# Check we can find Kokkos and Cabana
if not os.path.isdir(kokkos_root):
raise CustomError("Can't find kokkos")
if not os.path.isdir(cabana_root):
raise CustomError("Can't find Cabana")
# Copy Kokkos and Cabana to be inside this dir
def copy_and_overwrite(from_path, to_path):
if os.path.exists(to_path):
shutil.rmtree(to_path)
shutil.copytree(from_path, to_path)
def copy_if_safe(from_path, to_path):
if not os.path.isdir(to_path):
shutil.copytree(from_path, to_path)
# only copy if they don't exist already
kokkos_new = os.path.join(this_build_dir,'kokkos')
copy_if_safe(kokkos_root, kokkos_new)
cabana_new = os.path.join(this_build_dir,'cabana')
copy_if_safe(cabana_root, cabana_new)
# Build Dependencies
for plat in platforms:
install_dir = "build-" + plat
# Do Build
print("build_kokkos.sh " + CXX + " " + kokkos_new + " " + install_dir + " " + plat + " " + arch)
subprocess.check_call(['./build_kokkos.sh', CXX, kokkos_new, install_dir, plat, arch])
print("./build_cabana.sh " + " " + CXX + " " + os.path.join(kokkos_new,install_dir,'install') + " " + cabana_new + " " + install_dir + " " + plat)
subprocess.check_call(['./build_cabana.sh', CXX, os.path.join(kokkos_new,install_dir,'install'), cabana_new, install_dir, plat])
# Save dirs, relative to root
cabana_dirs[plat] = install_dir
kokkos_dirs[plat] = install_dir
# Iterate over *local* git branches
for branch in repo_heads_names:
print("Working on branch " + branch)
for plat in platforms:
print(plat)
# TODO: throughout these scripts we assume ./instal is the install dir! abstract it.
cabana_install = os.path.join( cabana_dirs[plat], 'install')
kokkos_install = os.path.join( kokkos_dirs[plat], 'install')
# For each repo, check it out into a new folder and build it
#clone_path = './' + branch
clone_path = os.path.join('./', this_build_dir, branch)
print("!!!! WORKING ON " + clone_path)
# look to see if the folder already exists:
if not os.path.isdir(clone_path):
# if it does... delete it (!)
#print("Deleting " + clone_path)
# We need to delete where it will build only one platforms worth,
# or hoist the clone
#shutil.rmtree(clone_path + build??)
# OR if it does... skip
#continue
# clone it
cloned = Repo.clone_from(
repo_path,
clone_path,
branch=branch
)
pwd = os.getcwd()
kokkos_full_path = os.path.join(pwd, kokkos_new, kokkos_install)
cabana_full_path = os.path.join(pwd, cabana_new, cabana_install)
print("kk full path " + kokkos_full_path)
print("./build_and_run.sh " + clone_path + " g++ " + kokkos_full_path + " " + cabana_full_path + " " + plat)
subprocess.check_call(['./build_and_run.sh', clone_path, "g++", kokkos_full_path, cabana_full_path, plat])
| [
"os.path.exists",
"subprocess.check_call",
"git.Repo.clone_from",
"os.path.join",
"shutil.rmtree",
"shutil.copytree",
"os.getcwd",
"os.path.isdir",
"git.Repo"
] | [((185, 200), 'git.Repo', 'Repo', (['repo_path'], {}), '(repo_path)\n', (189, 200), False, 'from git import Repo\n'), ((684, 726), 'subprocess.check_call', 'subprocess.check_call', (["['./timing_lib.sh']"], {}), "(['./timing_lib.sh'])\n", (705, 726), False, 'import subprocess\n'), ((886, 918), 'os.path.join', 'os.path.join', (['home_dir', '"""kokkos"""'], {}), "(home_dir, 'kokkos')\n", (898, 918), False, 'import os, shutil\n'), ((932, 964), 'os.path.join', 'os.path.join', (['home_dir', '"""Cabana"""'], {}), "(home_dir, 'Cabana')\n", (944, 964), False, 'import os, shutil\n'), ((1526, 1564), 'os.path.join', 'os.path.join', (['this_build_dir', '"""kokkos"""'], {}), "(this_build_dir, 'kokkos')\n", (1538, 1564), False, 'import os, shutil\n'), ((1616, 1654), 'os.path.join', 'os.path.join', (['this_build_dir', '"""cabana"""'], {}), "(this_build_dir, 'cabana')\n", (1628, 1654), False, 'import os, shutil\n'), ((1010, 1036), 'os.path.isdir', 'os.path.isdir', (['kokkos_root'], {}), '(kokkos_root)\n', (1023, 1036), False, 'import os, shutil\n'), ((1088, 1114), 'os.path.isdir', 'os.path.isdir', (['cabana_root'], {}), '(cabana_root)\n', (1101, 1114), False, 'import os, shutil\n'), ((1258, 1281), 'os.path.exists', 'os.path.exists', (['to_path'], {}), '(to_path)\n', (1272, 1281), False, 'import os, shutil\n'), ((1318, 1353), 'shutil.copytree', 'shutil.copytree', (['from_path', 'to_path'], {}), '(from_path, to_path)\n', (1333, 1353), False, 'import os, shutil\n'), ((1892, 1982), 'subprocess.check_call', 'subprocess.check_call', (["['./build_kokkos.sh', CXX, kokkos_new, install_dir, plat, arch]"], {}), "(['./build_kokkos.sh', CXX, kokkos_new, install_dir,\n plat, arch])\n", (1913, 1982), False, 'import subprocess\n'), ((1291, 1313), 'shutil.rmtree', 'shutil.rmtree', (['to_path'], {}), '(to_path)\n', (1304, 1313), False, 'import os, shutil\n'), ((1404, 1426), 'os.path.isdir', 'os.path.isdir', (['to_path'], {}), '(to_path)\n', (1417, 1426), False, 'import os, shutil\n'), ((1436, 1471), 'shutil.copytree', 'shutil.copytree', (['from_path', 'to_path'], {}), '(from_path, to_path)\n', (1451, 1471), False, 'import os, shutil\n'), ((2648, 2690), 'os.path.join', 'os.path.join', (['cabana_dirs[plat]', '"""install"""'], {}), "(cabana_dirs[plat], 'install')\n", (2660, 2690), False, 'import os, shutil\n'), ((2717, 2759), 'os.path.join', 'os.path.join', (['kokkos_dirs[plat]', '"""install"""'], {}), "(kokkos_dirs[plat], 'install')\n", (2729, 2759), False, 'import os, shutil\n'), ((2888, 2930), 'os.path.join', 'os.path.join', (['"""./"""', 'this_build_dir', 'branch'], {}), "('./', this_build_dir, branch)\n", (2900, 2930), False, 'import os, shutil\n'), ((3556, 3567), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3565, 3567), False, 'import os, shutil\n'), ((3596, 3641), 'os.path.join', 'os.path.join', (['pwd', 'kokkos_new', 'kokkos_install'], {}), '(pwd, kokkos_new, kokkos_install)\n', (3608, 3641), False, 'import os, shutil\n'), ((3669, 3714), 'os.path.join', 'os.path.join', (['pwd', 'cabana_new', 'cabana_install'], {}), '(pwd, cabana_new, cabana_install)\n', (3681, 3714), False, 'import os, shutil\n'), ((3892, 4002), 'subprocess.check_call', 'subprocess.check_call', (["['./build_and_run.sh', clone_path, 'g++', kokkos_full_path,\n cabana_full_path, plat]"], {}), "(['./build_and_run.sh', clone_path, 'g++',\n kokkos_full_path, cabana_full_path, plat])\n", (3913, 4002), False, 'import subprocess\n'), ((2184, 2232), 'os.path.join', 'os.path.join', (['kokkos_new', 'install_dir', '"""install"""'], {}), "(kokkos_new, install_dir, 'install')\n", (2196, 2232), False, 'import os, shutil\n'), ((3047, 3072), 'os.path.isdir', 'os.path.isdir', (['clone_path'], {}), '(clone_path)\n', (3060, 3072), False, 'import os, shutil\n'), ((3425, 3478), 'git.Repo.clone_from', 'Repo.clone_from', (['repo_path', 'clone_path'], {'branch': 'branch'}), '(repo_path, clone_path, branch=branch)\n', (3440, 3478), False, 'from git import Repo\n'), ((2031, 2079), 'os.path.join', 'os.path.join', (['kokkos_new', 'install_dir', '"""install"""'], {}), "(kokkos_new, install_dir, 'install')\n", (2043, 2079), False, 'import os, shutil\n')] |
# © 2020 [<NAME>](mailto:<EMAIL>)
import html
import logging
from xeda.utils import try_convert
from xml.etree import ElementTree
from ..flow import Flow, DebugLevel
from functools import reduce
logger = logging.getLogger()
def supported_vivado_generic(k, v, sim):
if sim:
return True
if isinstance(v, int):
return True
if isinstance(v, bool):
return True
v = str(v)
return (v.isnumeric() or (v.strip().lower() in {'true', 'false'}))
def vivado_gen_convert(k, x, sim):
if sim:
return x
xl = str(x).strip().lower()
if xl == 'false':
return "1\\'b0"
if xl == 'true':
return "1\\'b1"
return x
def vivado_generics(kvdict, sim):
return ' '.join([f"-generic{'_top' if sim else ''} {k}={vivado_gen_convert(k, v, sim)}" for k, v in kvdict.items() if supported_vivado_generic(k, v, sim)])
class Vivado(Flow):
reports_subdir_name = 'reports'
def run_vivado(self, script_path, stdout_logfile=None):
if stdout_logfile is None:
stdout_logfile = f'{self.name}_stdout.log'
debug = self.args.debug > DebugLevel.NONE
vivado_args = ['-nojournal', '-mode', 'tcl' if debug >=
DebugLevel.HIGHEST else 'batch', '-source', str(script_path)]
# if not debug:
# vivado_args.append('-notrace')
return self.run_process('vivado', vivado_args, initial_step='Starting vivado',
stdout_logfile=stdout_logfile)
@staticmethod
def parse_xml_report(report_xml):
tree = ElementTree.parse(report_xml)
data = {}
# components = {}
for section in tree.findall(f"./section"):
section_title = section.get("title")
for table in section.findall("./table"):
table_data = {}
header = [html.unescape(col.attrib['contents']).strip() for col in table.findall("./tablerow/tableheader")]
for tablerow in table.findall("./tablerow"):
cells = [html.unescape(cell.attrib['contents']).strip() for cell in tablerow.findall("./tablecell")]
if cells:
# choose 0th element as "index data" (distinct key)
cell_data = {h:c for h,c in zip(header[1:],cells[1:]) if c}
cell_key = cells[0]
if cell_data:
table_data[cell_key] = try_convert(cell_data, to_str=False)
if table_data:
table_title = table.get("title")
title = section_title + ":" + table_title if table_title else section_title
data[title] = table_data
return data
@staticmethod
def get_from_path(dct: dict, path):
if isinstance(path, str):
path = path.split('.')
return reduce(dict.__getitem__, path, dct)
| [
"logging.getLogger",
"xml.etree.ElementTree.parse",
"functools.reduce",
"html.unescape",
"xeda.utils.try_convert"
] | [((205, 224), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (222, 224), False, 'import logging\n'), ((1578, 1607), 'xml.etree.ElementTree.parse', 'ElementTree.parse', (['report_xml'], {}), '(report_xml)\n', (1595, 1607), False, 'from xml.etree import ElementTree\n'), ((2901, 2936), 'functools.reduce', 'reduce', (['dict.__getitem__', 'path', 'dct'], {}), '(dict.__getitem__, path, dct)\n', (2907, 2936), False, 'from functools import reduce\n'), ((1865, 1902), 'html.unescape', 'html.unescape', (["col.attrib['contents']"], {}), "(col.attrib['contents'])\n", (1878, 1902), False, 'import html\n'), ((2468, 2504), 'xeda.utils.try_convert', 'try_convert', (['cell_data'], {'to_str': '(False)'}), '(cell_data, to_str=False)\n', (2479, 2504), False, 'from xeda.utils import try_convert\n'), ((2053, 2091), 'html.unescape', 'html.unescape', (["cell.attrib['contents']"], {}), "(cell.attrib['contents'])\n", (2066, 2091), False, 'import html\n')] |
from django.urls import reverse
admin_link = reverse("misago:admin:settings:socialauth:index")
def test_providers_list_renders(admin_client):
response = admin_client.get(admin_link)
assert response.status_code == 200
def test_providers_list_renders_with_active_provider(admin_client, provider):
response = admin_client.get(admin_link)
assert response.status_code == 200
def test_providers_list_renders_with_disabled_provider(admin_client, disabled_provider):
response = admin_client.get(admin_link)
assert response.status_code == 200
| [
"django.urls.reverse"
] | [((47, 96), 'django.urls.reverse', 'reverse', (['"""misago:admin:settings:socialauth:index"""'], {}), "('misago:admin:settings:socialauth:index')\n", (54, 96), False, 'from django.urls import reverse\n')] |
import json
from django.core.management.base import BaseCommand, CommandError
from users.models import User
class Command(BaseCommand):
help = "Exports a user information as a set of environment variables"
def add_arguments(self, parser):
parser.add_argument("user_id", type=int)
def handle(self, *args, **options):
user_id = options["user_id"]
user = User.objects.get(id=user_id).bot_user()
if not user:
raise CommandError('User "%s" does not exist' % user_id)
print(
f"""
# user ID {user.id} for user {user.name}
USER_BINANCE_API_KEY="{user.binance_api_key}"
USER_BINANCE_SECRET_KEY="{user.binance_secret_key}"
USER_EXTERNAL_PORTFOLIO='{json.dumps(user.external_portfolio)}'
USER_PREFERENCES='{json.dumps(user.preferences)}'
"""
)
| [
"json.dumps",
"users.models.User.objects.get",
"django.core.management.base.CommandError"
] | [((474, 524), 'django.core.management.base.CommandError', 'CommandError', (['(\'User "%s" does not exist\' % user_id)'], {}), '(\'User "%s" does not exist\' % user_id)\n', (486, 524), False, 'from django.core.management.base import BaseCommand, CommandError\n'), ((394, 422), 'users.models.User.objects.get', 'User.objects.get', ([], {'id': 'user_id'}), '(id=user_id)\n', (410, 422), False, 'from users.models import User\n'), ((736, 771), 'json.dumps', 'json.dumps', (['user.external_portfolio'], {}), '(user.external_portfolio)\n', (746, 771), False, 'import json\n'), ((793, 821), 'json.dumps', 'json.dumps', (['user.preferences'], {}), '(user.preferences)\n', (803, 821), False, 'import json\n')] |
# -*- coding: utf-8 -*-
import requests
import pandas as pd
from netCDF4 import Dataset
# from lib.parse_urls import parse_urls
from . parse_urls import parse_urls
class dataset:
def __init__(self,datasetkey,datahub):
self.datasetkey = datasetkey
self.datahub=datahub
def variables(self):
variables=parse_urls(self.datahub.server,self.datahub.version,"datasets/"+self.datasetkey+"/variables",self.datahub.apikey)
return variables.r.json()['variables']
def variable_names(self):
return sorted(list(set(list(map(lambda x: x['variableKey'], self.variables())))))
def standard_names(self):
"""
return list of standard names of variables
"""
return self.return_names('standard_name')
def return_names(self,nameversion):
"""
return list of variables by name type
"""
stdnames=[]
for k in self.variables():
for j in k:
if j == 'attributes':
for i in k[j]:
if i['attributeKey']==nameversion:
stdnames.append(i['attributeValue'])
return sorted(list(set(stdnames)))
def get_standard_name_from_variable_name(self,varname):
for i in self.variables():
if i['variableKey'] == varname:
for j in i['attributes']:
if j['attributeKey']=='long_name':
return j['attributeValue']
def long_names(self):
"""
return list of long names of variables
"""
return self.return_names('long_name')
def get_tds_file(self,variable):
"""
Until something better found ...
return first file tds path that contains variable name, should work with either standard or long name!
"""
tdaddr="http://{0}/{1}/data/dataset_physical_contents/{2}?apikey={3}".format(self.datahub.server,self.datahub.version,self.datasetkey,self.datahub.apikey)
r=requests.get(tdaddr).json()
for htt in r:
found_vars=[j for j in htt['variables'] for i in j if j[i]==variable]
if len(found_vars)>0:
return htt['planetosOpenDAPVariables']
def get_tds_field(self,variable):
stdname=self.get_standard_name_from_variable_name(variable)
if not stdname:
stdname=variable
if len(stdname)==0:
stdname=variable
## print("stdname in get_field",stdname)
tdsfile=self.get_tds_file(variable)
assert len(tdsfile)>10, "could not determine TDS path, cannot continue"
## print('TDS file',tdsfile)
ds = Dataset(tdsfile)
vari = ds.variables[variable]
dimlen = len(vari.dimensions)
if dimlen==4:
return vari[0,0,:,:]
elif dimlen==3:
return vari[0,:,:]
elif dimlen==2:
return vari[:,:]
else:
return vari[:]
## raise ValueError("Cannot return 2D array for {0}".format(variable))
def get_json_data_in_pandas(self,count=10,z='all',pandas=True,**kwargs):
def convert_json_to_some_pandas(injson):
param_list = ['axes','data']
new_dict = {}
[new_dict.update({i:[]}) for i in param_list]
[(new_dict['axes'].append(i['axes']),new_dict['data'].append(i['data'])) for i in injson];
pd_temp = pd.DataFrame(injson)
dev_frame = pd_temp[['context','axes']].join(pd.concat([pd.DataFrame(new_dict[i]) for i in param_list],axis=1))
dev_frame = dev_frame[dev_frame['reftime'] == dev_frame['reftime'][0]]
return dev_frame
if not 'count' in kwargs:
kwargs['count'] = count
if not 'z' in kwargs:
kwargs["z"]=z
retjson=parse_urls(self.datahub.server,self.datahub.version,"datasets/{0}/point".format(self.datasetkey),self.datahub.apikey,clean_reftime=False,**kwargs).r.json()
if pandas: retjson=convert_json_to_some_pandas(retjson['entries'])
return retjson
def get_dataset_boundaries(self):
boundaries=parse_urls(self.datahub.server,self.datahub.version,"datasets/"+self.datasetkey,self.datahub.apikey)
rj = boundaries.r.json()['SpatialExtent']
if rj['type'] == 'Polygon':
rdict = rj['coordinates'][0]
elif rj['type'] == 'MultiPolygon':
rdict = rj['coordinates'][0][0]
else:
rdict = rj
return rdict
| [
"pandas.DataFrame",
"netCDF4.Dataset",
"requests.get"
] | [((2692, 2708), 'netCDF4.Dataset', 'Dataset', (['tdsfile'], {}), '(tdsfile)\n', (2699, 2708), False, 'from netCDF4 import Dataset\n'), ((3449, 3469), 'pandas.DataFrame', 'pd.DataFrame', (['injson'], {}), '(injson)\n', (3461, 3469), True, 'import pandas as pd\n'), ((2031, 2051), 'requests.get', 'requests.get', (['tdaddr'], {}), '(tdaddr)\n', (2043, 2051), False, 'import requests\n'), ((3538, 3563), 'pandas.DataFrame', 'pd.DataFrame', (['new_dict[i]'], {}), '(new_dict[i])\n', (3550, 3563), True, 'import pandas as pd\n')] |
from src import tours
import numpy as np
tolerance = 1e-4
def test_tour_traversal():
square = np.array([[0, 0], [0, 1], [1, 1], [1, 0.]])
tour = [0, 1, 2, 3]
assert abs(tours.tour_traversal(tour, square) - 4.) < tolerance
| [
"numpy.array",
"src.tours.tour_traversal"
] | [((101, 145), 'numpy.array', 'np.array', (['[[0, 0], [0, 1], [1, 1], [1, 0.0]]'], {}), '([[0, 0], [0, 1], [1, 1], [1, 0.0]])\n', (109, 145), True, 'import numpy as np\n'), ((184, 218), 'src.tours.tour_traversal', 'tours.tour_traversal', (['tour', 'square'], {}), '(tour, square)\n', (204, 218), False, 'from src import tours\n')] |
from netCDF4 import Dataset, num2date
import os
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
os.environ['TF_FORCE_GPU_ALLOW_GROWTH'] = 'true'
import argparse
import ast
import gc
import logging
import math
import sys
import time
import numpy as np
import pandas as pd
import psutil
import tensorflow as tf
from tensorflow.keras.mixed_precision import experimental as mixed_precision
try:
import tensorflow_addons as tfa
except Exception as e:
tfa = None
import data_generators
import custom_losses as cl
import hparameters
import models
import utility
tf.keras.backend.set_floatx('float16')
tf.keras.backend.set_epsilon(1e-3)
try:
gpu_devices = tf.config.list_physical_devices('GPU')
except Exception as e:
gpu_devices = tf.config.experimental.list_physical_devices('GPU')
policy = mixed_precision.Policy('mixed_float16')
mixed_precision.set_policy(policy)
def is_compatible_with(self, other):
"""Returns True if the `other` DType will be converted to this DType.
Monkey patch: incompatibility issues between tfa.optimizers and mixed precision training
The conversion rules are as follows:
```python
DType(T) .is_compatible_with(DType(T)) == True
```
Args:
other: A `DType` (or object that may be converted to a `DType`).
Returns:
True if a Tensor of the `other` `DType` will be implicitly converted to
this `DType`.
"""
other = tf.dtypes.as_dtype(other)
if self._type_enum==19 and other.as_datatype_enum==1:
return True
return self._type_enum in (other.as_datatype_enum,
other.base_dtype.as_datatype_enum)
tf.DType.is_compatible_with = is_compatible_with
class WeatherModel():
"""Handles the Training of the Deep Learning Weather model
Example of how to use:
WeatherModel = WeatherModel( t_params, m_params)
WeatherModel.initialize_scheme_era5Eobs() #Initializes datasets for ERA5 and Eobs
WeatherModel.train_model() #Trains and saves model
"""
def __init__(self, t_params, m_params):
"""Train the TRU_NET Model
"""
self.t_params = t_params
self.m_params = m_params
def initialize_scheme_era5Eobs(self):
"""Initialization scheme for the ERA5 and E-OBS datasets.
This method creates the datasets
"""
# region ---- Parameters related to training length and training reporting frequency
era5_eobs = data_generators.Era5_Eobs( self.t_params, self.m_params)
# hparameters files calculates train_batches assuing we are only evaluating one location,
# therefore we must adjust got multiple locations (loc_count)
self.t_params['train_batches'] = int(self.t_params['train_batches'] * era5_eobs.loc_count)
self.t_params['val_batches'] = int(self.t_params['val_batches'] * era5_eobs.loc_count)
# The fequency at which we report during training and validation i.e every 10% of minibatches report training loss and training mse
self.train_batch_report_freq = max( int(self.t_params['train_batches']*self.t_params['reporting_freq']), 3)
self.val_batch_report_freq = max( int(self.t_params['val_batches']*self.t_params['reporting_freq']), 3)
#endregion
# region ---- Restoring/Creating New Training Records and Restoring training progress
#This training records keeps track of the losses on each epoch
try:
self.df_training_info = pd.read_csv( "checkpoints/{}/checkpoint_scores.csv".format(utility.model_name_mkr(m_params,t_params=self.t_params,htuning=m_params.get('htuning',False))), header=0, index_col=False)
self.df_training_info = self.df_training_info[['Epoch','Train_loss','Train_mse','Val_loss','Val_mse','Checkpoint_Path','Last_Trained_Batch']]
self.start_epoch = int(max([self.df_training_info['Epoch'][0]], default=0))
last_batch = int( self.df_training_info.loc[self.df_training_info['Epoch']==self.start_epoch,'Last_Trained_Batch'].iloc[0] )
if(last_batch in [-1, self.t_params['train_batches']] ):
self.start_epoch = self.start_epoch + 1
self.batches_to_skip = 0
else:
self.batches_to_skip = last_batch
print("Recovered training records")
except FileNotFoundError as e:
#If no file found, then make new training records file
self.df_training_info = pd.DataFrame(columns=['Epoch','Train_loss','Train_mse','Val_loss','Val_mse','Checkpoint_Path','Last_Trained_Batch'] )
self.batches_to_skip = 0
self.start_epoch = 0
print("Did not recover training records. Starting from scratch")
# endregion
# region ---- Defining Model / Optimizer / Losses / Metrics / Records / Checkpoints / Tensorboard
devices = tf.config.get_visible_devices() #tf.config.experimental.list_physical_devices('GPU')
#gpus_names = [ device.name for device in devices if device.device_type == "GPU" ]
#self.strategy = tf.distribute.MirroredStrategy( devices=gpus_names ) #OneDeviceStrategy(device="/GPU:0") #
self.strategy = tf.distribute.MirroredStrategy( )
assert self.t_params['batch_size'] % self.strategy.num_replicas_in_sync == 0
print("Number of Devices used in MirroredStrategy: {}".format(self.strategy.num_replicas_in_sync))
with self.strategy.scope():
#Model
self.strategy_gpu_count = self.strategy.num_replicas_in_sync
self.t_params['gpu_count'] = self.strategy.num_replicas_in_sync
self.model = models.model_loader( self.t_params, self.m_params )
#Optimizer
optimizer = tfa.optimizers.RectifiedAdam( **self.m_params['rec_adam_params'], total_steps=self.t_params['train_batches']*20)
self.optimizer = mixed_precision.LossScaleOptimizer( optimizer, loss_scale=tf.mixed_precision.experimental.DynamicLossScale() )
# These objects will aggregate losses and metrics across batches and epochs
self.loss_agg_batch = tf.keras.metrics.Mean(name='loss_agg_batch' )
self.loss_agg_epoch = tf.keras.metrics.Mean(name="loss_agg_epoch")
self.mse_agg_epoch = tf.keras.metrics.Mean(name='mse_agg_epoch')
self.loss_agg_val = tf.keras.metrics.Mean(name='loss_agg_val')
self.mse_agg_val = tf.keras.metrics.Mean(name='mse_agg_val')
#checkpoints (For Epochs)
#The CheckpointManagers can be called to serializae the weights within TRUNET
checkpoint_path_epoch = "./checkpoints/{}/epoch".format(utility.model_name_mkr(m_params,t_params=self.t_params, htuning=m_params.get('htuning',False) ))
os.makedirs(checkpoint_path_epoch,exist_ok=True)
with self.strategy.scope():
ckpt_epoch = tf.train.Checkpoint(model=self.model, optimizer=self.optimizer)
self.ckpt_mngr_epoch = tf.train.CheckpointManager(ckpt_epoch, checkpoint_path_epoch, max_to_keep=self.t_params['checkpoints_to_keep'], keep_checkpoint_every_n_hours=None)
#restoring last checkpoint if it exists
if self.ckpt_mngr_epoch.latest_checkpoint:
# compat: Initializing model and optimizer before restoring from checkpoint
try:
ckpt_epoch.restore(self.ckpt_mngr_epoch.latest_checkpoint).assert_consumed()
except AssertionError as e:
ckpt_epoch.restore(self.ckpt_mngr_epoch.latest_checkpoint)
print (' Restoring model from best checkpoint')
else:
print (' Initializing model from scratch')
#Tensorboard
os.makedirs("log_tensboard/{}".format(utility.model_name_mkr(m_params, t_params=self.t_params, htuning=self.m_params.get('htuning',False) )), exist_ok=True )
#self.writer = tf.summary.create_file_writer( "log_tensboard/{}".format(utility.model_name_mkr(m_params,t_params=self.t_params, htuning=self.m_params.get('htuning',False) ) ) )
# endregion
# region ---- Making Datasets
#caching dataset to file post pre-processing steps have been completed
cache_suffix = utility.cache_suffix_mkr( m_params, self.t_params )
os.makedirs( './Data/data_cache/', exist_ok=True )
_ds_train_val, _ = era5_eobs.load_data_era5eobs( self.t_params['train_batches'] + self.t_params['val_batches'] , self.t_params['start_date'], self.t_params['parallel_calls'] )
ds_train = _ds_train_val.take(self.t_params['train_batches'] )
ds_val = _ds_train_val.skip(self.t_params['train_batches'] ).take(self.t_params['val_batches'])
#TODO: undo cache
ds_train = ds_train.cache('Data/data_cache/train'+cache_suffix )
ds_val = ds_val.cache('Data/data_cache/val'+cache_suffix )
ds_train = ds_train.unbatch().shuffle( self.t_params['batch_size']*int(self.t_params['train_batches']/5), reshuffle_each_iteration=True).batch(self.t_params['batch_size']) #.repeat(self.t_params['epochs']-self.start_epoch)
ds_train_val = ds_train.concatenate(ds_val)
ds_train_val = ds_train_val.repeat(self.t_params.get('epochs',100)-self.start_epoch)
self.ds_train_val = self.strategy.experimental_distribute_dataset(dataset=ds_train_val)
self.iter_train_val = enumerate(self.ds_train_val)
bc_ds_in_train = int( self.t_params['train_batches']/era5_eobs.loc_count ) #batch_count
bc_ds_in_val = int( self.t_params['val_batches']/era5_eobs.loc_count )
self.reset_idxs_training = np.cumsum( [bc_ds_in_train]*era5_eobs.loc_count )
self.reset_idxs_validation = np.cumsum( [bc_ds_in_val]*era5_eobs.loc_count )
# endregion
def train_model(self):
"""During training we produce a prediction for a (n by n) square patch.
But we caculate losses on a central (h, w) region within the (n by n) patch
This central region is defined by "bounds" below
"""
bounds = cl.central_region_bounds(self.m_params['region_grid_params']) #list [ lower_h_bound[0], upper_h_bound[0], lower_w_bound[1], upper_w_bound[1] ]
#Training for n epochs
#self.t_params['train_batches'] = self.t_params['train_batches'] if self.m_params['time_sequential'] else int(self.t_params['train_batches']*self.t_params['lookback_target'] )
#self.t_params['val_batches'] = self.t_params['val_batches'] if self.m_params['time_sequential'] else int(self.t_params['val_batches']*self.t_params['lookback_target'] )
for epoch in range(self.start_epoch, int(self.t_params['epochs']) ):
#region resetting metrics, losses, records, timers
self.loss_agg_batch.reset_states()
self.loss_agg_epoch.reset_states()
self.mse_agg_epoch.reset_states()
self.loss_agg_val.reset_states()
self.mse_agg_val.reset_states()
self.df_training_info = self.df_training_info.append( { 'Epoch':epoch, 'Last_Trained_Batch':0 }, ignore_index=True )
start_epoch_train = time.time()
start_batch_group_time = time.time()
batch=0
print("\n\nStarting EPOCH {}".format(epoch ))
#endregion
# --- Training Loops
for batch in range(self.batches_to_skip+1,self.t_params['train_batches'] +1):
# get next set of training datums
idx, (feature, target, mask) = next(self.iter_train_val)
gradients = self.distributed_train_step( feature, target, mask, bounds, 0.0 )
#print(gradients)
# reporting
if( batch % self.train_batch_report_freq==0 or batch == self.t_params['train_batches']):
batch_group_time = time.time() - start_batch_group_time
est_completion_time_seconds = (batch_group_time/self.t_params['reporting_freq']) * (1 - batch/self.t_params['train_batches'])
est_completion_time_mins = est_completion_time_seconds/60
print("\t\tBatch:{}/{}\tTrain Loss: {:.8f} \t Batch Time:{:.4f}\tEpoch mins left:{:.1f}".format(batch, self.t_params['train_batches'], self.loss_agg_batch.result(), batch_group_time, est_completion_time_mins ) )
# resetting time and losses
start_batch_group_time = time.time()
# Updating record of the last batch to be operated on in training epoch
self.df_training_info.loc[ ( self.df_training_info['Epoch']==epoch) , ['Last_Trained_Batch'] ] = batch
self.df_training_info.to_csv( path_or_buf="checkpoints/{}/checkpoint_scores.csv".format(utility.model_name_mkr(self.m_params,t_params=self.t_params, htuning=m_params.get('htuning',False) )), header=True, index=False )
li_losses = [self.loss_agg_batch.result()]
li_names = ['train_loss_batch']
step = batch + (epoch)*self.t_params['train_batches']
#utility.tensorboard_record( self.writer.as_default(), li_losses, li_names, step, gradients, self.model.trainable_variables )
#utility.tensorboard_record( self.writer.as_default(), li_losses, li_names, step, None, None )
self.loss_agg_batch.reset_states()
if batch in self.reset_idxs_training:
self.model.reset_states()
# --- Tensorboard record
li_losses = [self.loss_agg_epoch.result(), self.mse_agg_epoch.result()]
li_names = ['train_loss_epoch','train_mse_epoch']
#utility.tensorboard_record( self.writer.as_default(), li_losses, li_names, epoch)
print("\tStarting Validation")
start_batch_group_time = time.time()
# --- Validation Loops
for batch in range(1, self.t_params['val_batches']+1):
# next datum
idx, (feature, target, mask) = next(self.iter_train_val)
bool_cmpltd = self.distributed_val_step(feature, target, mask, bounds)
# Reporting for validation
if batch % self.val_batch_report_freq == 0 or batch==self.t_params['val_batches'] :
batch_group_time = time.time() - start_batch_group_time
est_completion_time_seconds = (batch_group_time/self.t_params['reporting_freq']) * (1 - batch/self.t_params['val_batches'])
est_completion_time_mins = est_completion_time_seconds/60
print("\t\tCompleted Validation Batch:{}/{} \t Time:{:.4f} \tEst Time Left:{:.1f}".format( batch, self.t_params['val_batches'], batch_group_time, est_completion_time_mins))
start_batch_group_time = time.time()
if batch in self.reset_idxs_validation:
self.model.reset_states()
# region - End of Epoch Reporting and Early iteration Callback
print("\tEpoch:{}\t Train Loss:{:.8f}\t Train MSE:{:.5f}\t Val Loss:{:.5f}\t Val MSE:{:.5f}\t Time:{:.5f}".format(epoch, self.loss_agg_epoch.result(), self.mse_agg_epoch.result(),
self.loss_agg_val.result(), self.mse_agg_val.result() ,time.time()-start_epoch_train ) )
#utility.tensorboard_record( self.writer.as_default(), [self.loss_agg_val.result(), self.mse_agg_val.result()], ['Validation Loss', 'Validation MSE' ], epoch )
self.df_training_info = utility.update_checkpoints_epoch(self.df_training_info, epoch, self.loss_agg_epoch, self.loss_agg_val, self.ckpt_mngr_epoch, self.t_params,
self.m_params, self.mse_agg_epoch ,self.mse_agg_val, self.t_params['objective'] )
# Early Stop Callback
if epoch > ( max( self.df_training_info.loc[:, 'Epoch'], default=0 ) + self.t_params['early_stopping_period']) :
print("Model Stopping Early at EPOCH {}".format(epoch))
print(self.df_training_info)
break
# endregion
print("Model Training Finished")
def train_step(self, feature, target, mask, bounds, _init):
if _init==1.0:
if self.m_params['time_sequential'] == True:
inp_shape = [self.t_params['batch_size'], self.t_params['lookback_feature']] + self.m_params['region_grid_params']['outer_box_dims'] + [len(self.t_params['vars_for_feature'])]
else:
inp_shape = [self.t_params['batch_size'] ] + self.m_params['region_grid_params']['outer_box_dims'] + [ int(self.t_params['lookback_feature']*len(self.t_params['vars_for_feature'])) ]
_ = self.model( tf.zeros( inp_shape, dtype=tf.float16), self.t_params['trainable'] ) #( bs, tar_seq_len, h, w)
gradients = [ tf.zeros_like(t_var, dtype=tf.float32 ) for t_var in self.model.trainable_variables ]
self.optimizer.apply_gradients(zip(gradients, self.model.trainable_variables))
return [0]
with tf.GradientTape(persistent=False) as tape:
# non conditional continuous training
if self.m_params['model_type_settings']['discrete_continuous'] == False:
#making predictions
preds = self.model( feature, self.t_params['trainable'] ) #( bs, tar_seq_len, h, w)
preds = tf.squeeze( preds,axis=[-1] )
preds = cl.extract_central_region(preds, bounds)
mask = cl.extract_central_region(mask, bounds)
target = cl.extract_central_region(target, bounds)
#Applying mask
preds_masked = tf.boolean_mask( preds, mask )
target_masked = tf.boolean_mask( target, mask )
# reversing standardization
preds_masked = utility.standardize_ati( preds_masked, self.t_params['normalization_shift']['rain'], self.t_params['normalization_scales']['rain'], reverse=True)
# getting losses for records and/or optimizer
metric_mse = cl.mse(target_masked, preds_masked)
loss_to_optimize = metric_mse
# conditional continuous training
elif self.m_params['model_type_settings']['discrete_continuous'] == True:
# Producing predictions - conditional rain value and prob of rain
preds = self.model( feature, self.t_params['trainable'] ) # ( bs, seq_len, h, w, 1)
preds = tf.squeeze(preds, axis=[-1])
preds, probs = tf.unstack(preds, axis=0)
# extracting the central region of interest
preds = cl.extract_central_region(preds, bounds)
probs = cl.extract_central_region(probs, bounds)
mask = cl.extract_central_region(mask, bounds)
target = cl.extract_central_region(target, bounds)
# applying mask to predicted values
preds_masked = tf.boolean_mask(preds, mask )
probs_masked = tf.boolean_mask(probs, mask )
target_masked = tf.boolean_mask(target, mask )
# Reverising standardization of predictions
preds_masked = utility.standardize_ati( preds_masked, self.t_params['normalization_shift']['rain'],
self.t_params['normalization_scales']['rain'], reverse=True)
# Getting true labels and predicted labels for whether or not it rained [ 1 if if did rain, 0 if it did not rain]
labels_true = tf.where( target_masked > 0.0, 1.0, 0.0 )
labels_pred = probs_masked
all_count = tf.size( labels_true, out_type=tf.int64 )
# region Calculating Losses and Metrics
metric_mse = cl.mse( target_masked, cl.cond_rain(preds_masked, probs_masked, threshold=0.5) )
# To calculate metric_mse for CC model we assume that pred_rain=0 if pred_prob<=0.5
# CC Normal loss
loss_to_optimize = 0
loss_to_optimize += cl.mse( target_masked, preds_masked, all_count )
loss_to_optimize += tf.reduce_mean( tf.keras.backend.binary_crossentropy(labels_true, labels_pred, from_logits=False) )
# endregion
loss_to_optimize_agg = tf.grad_pass_through( lambda x: x/self.strategy_gpu_count )(loss_to_optimize)
scaled_loss = self.optimizer.get_scaled_loss( loss_to_optimize_agg )
scaled_gradients = tape.gradient( scaled_loss, self.model.trainable_variables )
unscaled_gradients = self.optimizer.get_unscaled_gradients(scaled_gradients)
gradients, _ = tf.clip_by_global_norm( unscaled_gradients, clip_norm=self.m_params['clip_norm'] ) #gradient clipping
self.optimizer.apply_gradients( zip(gradients, self.model.trainable_variables))
# Metrics (batchwise, epoch)
self.loss_agg_batch( loss_to_optimize )
self.loss_agg_epoch( loss_to_optimize )
self.mse_agg_epoch( metric_mse )
val = cl.rNmse(target_masked, preds_masked, 10.0)
return gradients
def val_step(self, feature, target, mask, bounds):
# Non CC distribution
if self.m_params['model_type_settings']['discrete_continuous'] == False:
# Get predictions
preds = self.model(feature, False )
preds = tf.squeeze(preds)
# Extracting central region for evaluation
preds = cl.extract_central_region(preds, bounds)
mask = cl.extract_central_region(mask, bounds)
target = cl.extract_central_region(target, bounds)
# Applying masks to predictions
preds_masked = tf.boolean_mask( preds, mask )
target_masked = tf.boolean_mask( target, mask )
preds_masked = utility.standardize_ati( preds_masked, self.t_params['normalization_shift']['rain'],
self.t_params['normalization_scales']['rain'], reverse=True)
# Updating losses
mse = cl.mse( target_masked , preds_masked )
loss = mse
# CC distribution
elif self.m_params['model_type_settings']['discrete_continuous'] == True:
# Get predictions
preds = self.model(feature, training=False )
preds = tf.squeeze(preds,axis=[-1])
preds, probs = tf.unstack(preds, axis=0)
# Extracting central region for evaluation
preds = cl.extract_central_region(preds, bounds)
probs = cl.extract_central_region(probs, bounds)
mask = cl.extract_central_region(mask, bounds)
target = cl.extract_central_region(target,bounds)
# Applying masks to predictions
preds_masked = tf.boolean_mask( preds, mask )
probs_masked = tf.boolean_mask( probs, mask)
target_masked = tf.boolean_mask( target, mask )
preds_masked = utility.standardize_ati( preds_masked, self.t_params['normalization_shift']['rain'],
self.t_params['normalization_scales']['rain'], reverse=True)
# Getting classification labels for whether or not it rained
labels_true = tf.where( target_masked > 0.0, 1.0, 0.0 )
labels_pred = probs_masked
all_count = tf.size( labels_true, out_type=tf.int64 )
# calculating seperate mse for reporting
# This mse metric assumes that if probability of rain is predicted below 0.5, the rain value is 0
mse = cl.mse( target_masked, cl.cond_rain( preds_masked, probs_masked, threshold=0.5) )
# Calculating cross entropy loss
loss = tf.reduce_mean( tf.keras.backend.binary_crossentropy( labels_true, labels_pred, from_logits=False) )
# Calculating conditinal continuous loss
loss += cl.mse( target_masked, preds_masked, all_count )
self.loss_agg_val(loss)
self.mse_agg_val(mse)
return True
@tf.function
def distributed_train_step(self, feature, target, mask, bounds, _init):
gradients = self.strategy.run( self.train_step, args=(feature, target, mask, bounds, _init) )
return gradients
@tf.function
def distributed_val_step(self, feature, target, mask, bounds):
bool_completed = self.strategy.run( self.val_step, args=(feature, target, mask, bounds))
return bool_completed
if __name__ == "__main__":
s_dir = utility.get_script_directory(sys.argv[0])
args_dict = utility.parse_arguments(s_dir)
# get training and model params
t_params, m_params = utility.load_params(args_dict)
# Initialize and train model
weather_model = WeatherModel(t_params, m_params)
weather_model.initialize_scheme_era5Eobs()
weather_model.train_model()
| [
"tensorflow.unstack",
"tensorflow.train.Checkpoint",
"custom_losses.extract_central_region",
"tensorflow.boolean_mask",
"custom_losses.cond_rain",
"tensorflow.keras.backend.set_epsilon",
"utility.load_params",
"tensorflow_addons.optimizers.RectifiedAdam",
"tensorflow.GradientTape",
"models.model_l... | [((593, 631), 'tensorflow.keras.backend.set_floatx', 'tf.keras.backend.set_floatx', (['"""float16"""'], {}), "('float16')\n", (620, 631), True, 'import tensorflow as tf\n'), ((633, 668), 'tensorflow.keras.backend.set_epsilon', 'tf.keras.backend.set_epsilon', (['(0.001)'], {}), '(0.001)\n', (661, 668), True, 'import tensorflow as tf\n'), ((841, 880), 'tensorflow.keras.mixed_precision.experimental.Policy', 'mixed_precision.Policy', (['"""mixed_float16"""'], {}), "('mixed_float16')\n", (863, 880), True, 'from tensorflow.keras.mixed_precision import experimental as mixed_precision\n'), ((882, 916), 'tensorflow.keras.mixed_precision.experimental.set_policy', 'mixed_precision.set_policy', (['policy'], {}), '(policy)\n', (908, 916), True, 'from tensorflow.keras.mixed_precision import experimental as mixed_precision\n'), ((695, 733), 'tensorflow.config.list_physical_devices', 'tf.config.list_physical_devices', (['"""GPU"""'], {}), "('GPU')\n", (726, 733), True, 'import tensorflow as tf\n'), ((1482, 1507), 'tensorflow.dtypes.as_dtype', 'tf.dtypes.as_dtype', (['other'], {}), '(other)\n', (1500, 1507), True, 'import tensorflow as tf\n'), ((26375, 26416), 'utility.get_script_directory', 'utility.get_script_directory', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (26403, 26416), False, 'import utility\n'), ((26434, 26464), 'utility.parse_arguments', 'utility.parse_arguments', (['s_dir'], {}), '(s_dir)\n', (26457, 26464), False, 'import utility\n'), ((26530, 26560), 'utility.load_params', 'utility.load_params', (['args_dict'], {}), '(args_dict)\n', (26549, 26560), False, 'import utility\n'), ((777, 828), 'tensorflow.config.experimental.list_physical_devices', 'tf.config.experimental.list_physical_devices', (['"""GPU"""'], {}), "('GPU')\n", (821, 828), True, 'import tensorflow as tf\n'), ((2597, 2652), 'data_generators.Era5_Eobs', 'data_generators.Era5_Eobs', (['self.t_params', 'self.m_params'], {}), '(self.t_params, self.m_params)\n', (2622, 2652), False, 'import data_generators\n'), ((5065, 5096), 'tensorflow.config.get_visible_devices', 'tf.config.get_visible_devices', ([], {}), '()\n', (5094, 5096), True, 'import tensorflow as tf\n'), ((5386, 5418), 'tensorflow.distribute.MirroredStrategy', 'tf.distribute.MirroredStrategy', ([], {}), '()\n', (5416, 5418), True, 'import tensorflow as tf\n'), ((7061, 7110), 'os.makedirs', 'os.makedirs', (['checkpoint_path_epoch'], {'exist_ok': '(True)'}), '(checkpoint_path_epoch, exist_ok=True)\n', (7072, 7110), False, 'import os\n'), ((8634, 8683), 'utility.cache_suffix_mkr', 'utility.cache_suffix_mkr', (['m_params', 'self.t_params'], {}), '(m_params, self.t_params)\n', (8658, 8683), False, 'import utility\n'), ((8695, 8743), 'os.makedirs', 'os.makedirs', (['"""./Data/data_cache/"""'], {'exist_ok': '(True)'}), "('./Data/data_cache/', exist_ok=True)\n", (8706, 8743), False, 'import os\n'), ((10044, 10093), 'numpy.cumsum', 'np.cumsum', (['([bc_ds_in_train] * era5_eobs.loc_count)'], {}), '([bc_ds_in_train] * era5_eobs.loc_count)\n', (10053, 10093), True, 'import numpy as np\n'), ((10132, 10179), 'numpy.cumsum', 'np.cumsum', (['([bc_ds_in_val] * era5_eobs.loc_count)'], {}), '([bc_ds_in_val] * era5_eobs.loc_count)\n', (10141, 10179), True, 'import numpy as np\n'), ((10513, 10574), 'custom_losses.central_region_bounds', 'cl.central_region_bounds', (["self.m_params['region_grid_params']"], {}), "(self.m_params['region_grid_params'])\n", (10537, 10574), True, 'import custom_losses as cl\n'), ((22586, 22629), 'custom_losses.rNmse', 'cl.rNmse', (['target_masked', 'preds_masked', '(10.0)'], {}), '(target_masked, preds_masked, 10.0)\n', (22594, 22629), True, 'import custom_losses as cl\n'), ((5860, 5909), 'models.model_loader', 'models.model_loader', (['self.t_params', 'self.m_params'], {}), '(self.t_params, self.m_params)\n', (5879, 5909), False, 'import models\n'), ((5975, 6092), 'tensorflow_addons.optimizers.RectifiedAdam', 'tfa.optimizers.RectifiedAdam', ([], {'total_steps': "(self.t_params['train_batches'] * 20)"}), "(**self.m_params['rec_adam_params'],\n total_steps=self.t_params['train_batches'] * 20)\n", (6003, 6092), True, 'import tensorflow_addons as tfa\n'), ((6379, 6423), 'tensorflow.keras.metrics.Mean', 'tf.keras.metrics.Mean', ([], {'name': '"""loss_agg_batch"""'}), "(name='loss_agg_batch')\n", (6400, 6423), True, 'import tensorflow as tf\n'), ((6460, 6504), 'tensorflow.keras.metrics.Mean', 'tf.keras.metrics.Mean', ([], {'name': '"""loss_agg_epoch"""'}), "(name='loss_agg_epoch')\n", (6481, 6504), True, 'import tensorflow as tf\n'), ((6541, 6584), 'tensorflow.keras.metrics.Mean', 'tf.keras.metrics.Mean', ([], {'name': '"""mse_agg_epoch"""'}), "(name='mse_agg_epoch')\n", (6562, 6584), True, 'import tensorflow as tf\n'), ((6632, 6674), 'tensorflow.keras.metrics.Mean', 'tf.keras.metrics.Mean', ([], {'name': '"""loss_agg_val"""'}), "(name='loss_agg_val')\n", (6653, 6674), True, 'import tensorflow as tf\n'), ((6707, 6748), 'tensorflow.keras.metrics.Mean', 'tf.keras.metrics.Mean', ([], {'name': '"""mse_agg_val"""'}), "(name='mse_agg_val')\n", (6728, 6748), True, 'import tensorflow as tf\n'), ((7183, 7246), 'tensorflow.train.Checkpoint', 'tf.train.Checkpoint', ([], {'model': 'self.model', 'optimizer': 'self.optimizer'}), '(model=self.model, optimizer=self.optimizer)\n', (7202, 7246), True, 'import tensorflow as tf\n'), ((7283, 7435), 'tensorflow.train.CheckpointManager', 'tf.train.CheckpointManager', (['ckpt_epoch', 'checkpoint_path_epoch'], {'max_to_keep': "self.t_params['checkpoints_to_keep']", 'keep_checkpoint_every_n_hours': 'None'}), "(ckpt_epoch, checkpoint_path_epoch, max_to_keep=\n self.t_params['checkpoints_to_keep'], keep_checkpoint_every_n_hours=None)\n", (7309, 7435), True, 'import tensorflow as tf\n'), ((11673, 11684), 'time.time', 'time.time', ([], {}), '()\n', (11682, 11684), False, 'import time\n'), ((11723, 11734), 'time.time', 'time.time', ([], {}), '()\n', (11732, 11734), False, 'import time\n'), ((14610, 14621), 'time.time', 'time.time', ([], {}), '()\n', (14619, 14621), False, 'import time\n'), ((16514, 16748), 'utility.update_checkpoints_epoch', 'utility.update_checkpoints_epoch', (['self.df_training_info', 'epoch', 'self.loss_agg_epoch', 'self.loss_agg_val', 'self.ckpt_mngr_epoch', 'self.t_params', 'self.m_params', 'self.mse_agg_epoch', 'self.mse_agg_val', "self.t_params['objective']"], {}), "(self.df_training_info, epoch, self.\n loss_agg_epoch, self.loss_agg_val, self.ckpt_mngr_epoch, self.t_params,\n self.m_params, self.mse_agg_epoch, self.mse_agg_val, self.t_params[\n 'objective'])\n", (16546, 16748), False, 'import utility\n'), ((18113, 18146), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {'persistent': '(False)'}), '(persistent=False)\n', (18128, 18146), True, 'import tensorflow as tf\n'), ((22172, 22257), 'tensorflow.clip_by_global_norm', 'tf.clip_by_global_norm', (['unscaled_gradients'], {'clip_norm': "self.m_params['clip_norm']"}), "(unscaled_gradients, clip_norm=self.m_params['clip_norm']\n )\n", (22194, 22257), True, 'import tensorflow as tf\n'), ((22994, 23011), 'tensorflow.squeeze', 'tf.squeeze', (['preds'], {}), '(preds)\n', (23004, 23011), True, 'import tensorflow as tf\n'), ((23093, 23133), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['preds', 'bounds'], {}), '(preds, bounds)\n', (23118, 23133), True, 'import custom_losses as cl\n'), ((23157, 23196), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['mask', 'bounds'], {}), '(mask, bounds)\n', (23182, 23196), True, 'import custom_losses as cl\n'), ((23220, 23261), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['target', 'bounds'], {}), '(target, bounds)\n', (23245, 23261), True, 'import custom_losses as cl\n'), ((23349, 23377), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['preds', 'mask'], {}), '(preds, mask)\n', (23364, 23377), True, 'import tensorflow as tf\n'), ((23409, 23438), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['target', 'mask'], {}), '(target, mask)\n', (23424, 23438), True, 'import tensorflow as tf\n'), ((23469, 23618), 'utility.standardize_ati', 'utility.standardize_ati', (['preds_masked', "self.t_params['normalization_shift']['rain']", "self.t_params['normalization_scales']['rain']"], {'reverse': '(True)'}), "(preds_masked, self.t_params['normalization_shift'][\n 'rain'], self.t_params['normalization_scales']['rain'], reverse=True)\n", (23492, 23618), False, 'import utility\n'), ((23719, 23754), 'custom_losses.mse', 'cl.mse', (['target_masked', 'preds_masked'], {}), '(target_masked, preds_masked)\n', (23725, 23754), True, 'import custom_losses as cl\n'), ((4646, 4772), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['Epoch', 'Train_loss', 'Train_mse', 'Val_loss', 'Val_mse',\n 'Checkpoint_Path', 'Last_Trained_Batch']"}), "(columns=['Epoch', 'Train_loss', 'Train_mse', 'Val_loss',\n 'Val_mse', 'Checkpoint_Path', 'Last_Trained_Batch'])\n", (4658, 4772), True, 'import pandas as pd\n'), ((17767, 17804), 'tensorflow.zeros', 'tf.zeros', (['inp_shape'], {'dtype': 'tf.float16'}), '(inp_shape, dtype=tf.float16)\n', (17775, 17804), True, 'import tensorflow as tf\n'), ((17894, 17932), 'tensorflow.zeros_like', 'tf.zeros_like', (['t_var'], {'dtype': 'tf.float32'}), '(t_var, dtype=tf.float32)\n', (17907, 17932), True, 'import tensorflow as tf\n'), ((18511, 18539), 'tensorflow.squeeze', 'tf.squeeze', (['preds'], {'axis': '[-1]'}), '(preds, axis=[-1])\n', (18521, 18539), True, 'import tensorflow as tf\n'), ((18588, 18628), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['preds', 'bounds'], {}), '(preds, bounds)\n', (18613, 18628), True, 'import custom_losses as cl\n'), ((18656, 18695), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['mask', 'bounds'], {}), '(mask, bounds)\n', (18681, 18695), True, 'import custom_losses as cl\n'), ((18723, 18764), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['target', 'bounds'], {}), '(target, bounds)\n', (18748, 18764), True, 'import custom_losses as cl\n'), ((18831, 18859), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['preds', 'mask'], {}), '(preds, mask)\n', (18846, 18859), True, 'import tensorflow as tf\n'), ((18895, 18924), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['target', 'mask'], {}), '(target, mask)\n', (18910, 18924), True, 'import tensorflow as tf\n'), ((19007, 19156), 'utility.standardize_ati', 'utility.standardize_ati', (['preds_masked', "self.t_params['normalization_shift']['rain']", "self.t_params['normalization_scales']['rain']"], {'reverse': '(True)'}), "(preds_masked, self.t_params['normalization_shift'][\n 'rain'], self.t_params['normalization_scales']['rain'], reverse=True)\n", (19030, 19156), False, 'import utility\n'), ((19248, 19283), 'custom_losses.mse', 'cl.mse', (['target_masked', 'preds_masked'], {}), '(target_masked, preds_masked)\n', (19254, 19283), True, 'import custom_losses as cl\n'), ((21785, 21844), 'tensorflow.grad_pass_through', 'tf.grad_pass_through', (['(lambda x: x / self.strategy_gpu_count)'], {}), '(lambda x: x / self.strategy_gpu_count)\n', (21805, 21844), True, 'import tensorflow as tf\n'), ((24035, 24063), 'tensorflow.squeeze', 'tf.squeeze', (['preds'], {'axis': '[-1]'}), '(preds, axis=[-1])\n', (24045, 24063), True, 'import tensorflow as tf\n'), ((24091, 24116), 'tensorflow.unstack', 'tf.unstack', (['preds'], {'axis': '(0)'}), '(preds, axis=0)\n', (24101, 24116), True, 'import tensorflow as tf\n'), ((24208, 24248), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['preds', 'bounds'], {}), '(preds, bounds)\n', (24233, 24248), True, 'import custom_losses as cl\n'), ((24272, 24312), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['probs', 'bounds'], {}), '(probs, bounds)\n', (24297, 24312), True, 'import custom_losses as cl\n'), ((24336, 24375), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['mask', 'bounds'], {}), '(mask, bounds)\n', (24361, 24375), True, 'import custom_losses as cl\n'), ((24400, 24441), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['target', 'bounds'], {}), '(target, bounds)\n', (24425, 24441), True, 'import custom_losses as cl\n'), ((24519, 24547), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['preds', 'mask'], {}), '(preds, mask)\n', (24534, 24547), True, 'import tensorflow as tf\n'), ((24581, 24609), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['probs', 'mask'], {}), '(probs, mask)\n', (24596, 24609), True, 'import tensorflow as tf\n'), ((24642, 24671), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['target', 'mask'], {}), '(target, mask)\n', (24657, 24671), True, 'import tensorflow as tf\n'), ((24705, 24854), 'utility.standardize_ati', 'utility.standardize_ati', (['preds_masked', "self.t_params['normalization_shift']['rain']", "self.t_params['normalization_scales']['rain']"], {'reverse': '(True)'}), "(preds_masked, self.t_params['normalization_shift'][\n 'rain'], self.t_params['normalization_scales']['rain'], reverse=True)\n", (24728, 24854), False, 'import utility\n'), ((25022, 25061), 'tensorflow.where', 'tf.where', (['(target_masked > 0.0)', '(1.0)', '(0.0)'], {}), '(target_masked > 0.0, 1.0, 0.0)\n', (25030, 25061), True, 'import tensorflow as tf\n'), ((25132, 25171), 'tensorflow.size', 'tf.size', (['labels_true'], {'out_type': 'tf.int64'}), '(labels_true, out_type=tf.int64)\n', (25139, 25171), True, 'import tensorflow as tf\n'), ((25721, 25767), 'custom_losses.mse', 'cl.mse', (['target_masked', 'preds_masked', 'all_count'], {}), '(target_masked, preds_masked, all_count)\n', (25727, 25767), True, 'import custom_losses as cl\n'), ((6179, 6229), 'tensorflow.mixed_precision.experimental.DynamicLossScale', 'tf.mixed_precision.experimental.DynamicLossScale', ([], {}), '()\n', (6227, 6229), True, 'import tensorflow as tf\n'), ((13121, 13132), 'time.time', 'time.time', ([], {}), '()\n', (13130, 13132), False, 'import time\n'), ((15712, 15723), 'time.time', 'time.time', ([], {}), '()\n', (15721, 15723), False, 'import time\n'), ((19707, 19735), 'tensorflow.squeeze', 'tf.squeeze', (['preds'], {'axis': '[-1]'}), '(preds, axis=[-1])\n', (19717, 19735), True, 'import tensorflow as tf\n'), ((19768, 19793), 'tensorflow.unstack', 'tf.unstack', (['preds'], {'axis': '(0)'}), '(preds, axis=0)\n', (19778, 19793), True, 'import tensorflow as tf\n'), ((19899, 19939), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['preds', 'bounds'], {}), '(preds, bounds)\n', (19924, 19939), True, 'import custom_losses as cl\n'), ((19967, 20007), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['probs', 'bounds'], {}), '(probs, bounds)\n', (19992, 20007), True, 'import custom_losses as cl\n'), ((20035, 20074), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['mask', 'bounds'], {}), '(mask, bounds)\n', (20060, 20074), True, 'import custom_losses as cl\n'), ((20102, 20143), 'custom_losses.extract_central_region', 'cl.extract_central_region', (['target', 'bounds'], {}), '(target, bounds)\n', (20127, 20143), True, 'import custom_losses as cl\n'), ((20234, 20262), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['preds', 'mask'], {}), '(preds, mask)\n', (20249, 20262), True, 'import tensorflow as tf\n'), ((20299, 20327), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['probs', 'mask'], {}), '(probs, mask)\n', (20314, 20327), True, 'import tensorflow as tf\n'), ((20365, 20394), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['target', 'mask'], {}), '(target, mask)\n', (20380, 20394), True, 'import tensorflow as tf\n'), ((20511, 20660), 'utility.standardize_ati', 'utility.standardize_ati', (['preds_masked', "self.t_params['normalization_shift']['rain']", "self.t_params['normalization_scales']['rain']"], {'reverse': '(True)'}), "(preds_masked, self.t_params['normalization_shift'][\n 'rain'], self.t_params['normalization_scales']['rain'], reverse=True)\n", (20534, 20660), False, 'import utility\n'), ((20936, 20975), 'tensorflow.where', 'tf.where', (['(target_masked > 0.0)', '(1.0)', '(0.0)'], {}), '(target_masked > 0.0, 1.0, 0.0)\n', (20944, 20975), True, 'import tensorflow as tf\n'), ((21054, 21093), 'tensorflow.size', 'tf.size', (['labels_true'], {'out_type': 'tf.int64'}), '(labels_true, out_type=tf.int64)\n', (21061, 21093), True, 'import tensorflow as tf\n'), ((21519, 21565), 'custom_losses.mse', 'cl.mse', (['target_masked', 'preds_masked', 'all_count'], {}), '(target_masked, preds_masked, all_count)\n', (21525, 21565), True, 'import custom_losses as cl\n'), ((25387, 25442), 'custom_losses.cond_rain', 'cl.cond_rain', (['preds_masked', 'probs_masked'], {'threshold': '(0.5)'}), '(preds_masked, probs_masked, threshold=0.5)\n', (25399, 25442), True, 'import custom_losses as cl\n'), ((25556, 25642), 'tensorflow.keras.backend.binary_crossentropy', 'tf.keras.backend.binary_crossentropy', (['labels_true', 'labels_pred'], {'from_logits': '(False)'}), '(labels_true, labels_pred, from_logits=\n False)\n', (25592, 25642), True, 'import tensorflow as tf\n'), ((12506, 12517), 'time.time', 'time.time', ([], {}), '()\n', (12515, 12517), False, 'import time\n'), ((15155, 15166), 'time.time', 'time.time', ([], {}), '()\n', (15164, 15166), False, 'import time\n'), ((16226, 16237), 'time.time', 'time.time', ([], {}), '()\n', (16235, 16237), False, 'import time\n'), ((21241, 21296), 'custom_losses.cond_rain', 'cl.cond_rain', (['preds_masked', 'probs_masked'], {'threshold': '(0.5)'}), '(preds_masked, probs_masked, threshold=0.5)\n', (21253, 21296), True, 'import custom_losses as cl\n'), ((21625, 21711), 'tensorflow.keras.backend.binary_crossentropy', 'tf.keras.backend.binary_crossentropy', (['labels_true', 'labels_pred'], {'from_logits': '(False)'}), '(labels_true, labels_pred, from_logits=\n False)\n', (21661, 21711), True, 'import tensorflow as tf\n')] |
#! /usr/bin/env python
#
# BitBake Toaster functional tests implementation
#
# Copyright (C) 2017 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import time
import re
from tests.functional.functional_helpers import SeleniumFunctionalTestCase
from orm.models import Project
class FuntionalTestBasic(SeleniumFunctionalTestCase):
# testcase (1514)
def test_create_slenium_project(self):
project_name = 'selenium-project'
self.get('')
self.driver.find_element_by_link_text("To start building, create your first Toaster project").click()
self.driver.find_element_by_id("new-project-name").send_keys(project_name)
self.driver.find_element_by_id('projectversion').click()
self.driver.find_element_by_id("create-project-button").click()
element = self.wait_until_visible('#project-created-notification')
self.assertTrue(self.element_exists('#project-created-notification'),'Project creation notification not shown')
self.assertTrue(project_name in element.text,
"New project name not in new project notification")
self.assertTrue(Project.objects.filter(name=project_name).count(),
"New project not found in database")
# testcase (1515)
def test_verify_left_bar_menu(self):
self.get('')
self.wait_until_visible('#projectstable')
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
self.assertTrue(self.element_exists('#config-nav'),'Configuration Tab does not exist')
project_URL=self.get_URL()
self.driver.find_element_by_xpath('//a[@href="'+project_URL+'"]').click()
try:
self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click()
self.assertTrue(re.search("Custom images",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'Custom images information is not loading properly')
except:
self.fail(msg='No Custom images tab available')
try:
self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'images/"'+"]").click()
self.assertTrue(re.search("Compatible image recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible image recipes information is not loading properly')
except:
self.fail(msg='No Compatible image tab available')
try:
self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'softwarerecipes/"'+"]").click()
self.assertTrue(re.search("Compatible software recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible software recipe information is not loading properly')
except:
self.fail(msg='No Compatible software recipe tab available')
try:
self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'machines/"'+"]").click()
self.assertTrue(re.search("Compatible machines",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible machine information is not loading properly')
except:
self.fail(msg='No Compatible machines tab available')
try:
self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'layers/"'+"]").click()
self.assertTrue(re.search("Compatible layers",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible layer information is not loading properly')
except:
self.fail(msg='No Compatible layers tab available')
try:
self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'configuration"'+"]").click()
self.assertTrue(re.search("Bitbake variables",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Bitbake variables information is not loading properly')
except:
self.fail(msg='No Bitbake variables tab available')
# testcase (1516)
def test_review_configuration_information(self):
self.get('')
self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
self.wait_until_visible('#projectstable')
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
project_URL=self.get_URL()
try:
self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist')
self.assertTrue(re.search("qemux86",self.driver.find_element_by_xpath("//span[@id='project-machine-name']").text),'The machine type is not assigned')
self.driver.find_element_by_xpath("//span[@id='change-machine-toggle']").click()
self.wait_until_visible('#select-machine-form')
self.wait_until_visible('#cancel-machine-change')
self.driver.find_element_by_xpath("//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click()
except:
self.fail(msg='The machine information is wrong in the configuration page')
try:
self.driver.find_element_by_id('no-most-built')
except:
self.fail(msg='No Most built information in project detail page')
try:
self.assertTrue(re.search("Yocto Project master",self.driver.find_element_by_xpath("//span[@id='project-release-title']").text),'The project release is not defined')
except:
self.fail(msg='No project release title information in project detail page')
try:
self.driver.find_element_by_xpath("//div[@id='layer-container']")
self.assertTrue(re.search("3",self.driver.find_element_by_id("project-layers-count").text),'There should be 3 layers listed in the layer count')
layer_list = self.driver.find_element_by_id("layers-in-project-list")
layers = layer_list.find_elements_by_tag_name("li")
for layer in layers:
if re.match ("openembedded-core",layer.text):
print ("openembedded-core layer is a default layer in the project configuration")
elif re.match ("meta-poky",layer.text):
print ("meta-poky layer is a default layer in the project configuration")
elif re.match ("meta-yocto-bsp",layer.text):
print ("meta-yocto-bsp is a default layer in the project configuratoin")
else:
self.fail(msg='default layers are missing from the project configuration')
except:
self.fail(msg='No Layer information in project detail page')
# testcase (1517)
def test_verify_machine_information(self):
self.get('')
self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
self.wait_until_visible('#projectstable')
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
try:
self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist')
self.assertTrue(re.search("qemux86",self.driver.find_element_by_id("project-machine-name").text),'The machine type is not assigned')
self.driver.find_element_by_id("change-machine-toggle").click()
self.wait_until_visible('#select-machine-form')
self.wait_until_visible('#cancel-machine-change')
self.driver.find_element_by_id("cancel-machine-change").click()
except:
self.fail(msg='The machine information is wrong in the configuration page')
# testcase (1518)
def test_verify_most_built_recipes_information(self):
self.get('')
self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
self.wait_until_visible('#projectstable')
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
project_URL=self.get_URL()
try:
self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element_by_id("no-most-built").text),'Default message of no builds is not present')
self.driver.find_element_by_xpath("//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click()
self.assertTrue(re.search("Compatible image recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Choose a recipe to build link is not working properly')
except:
self.fail(msg='No Most built information in project detail page')
# testcase (1519)
def test_verify_project_release_information(self):
self.get('')
self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
self.wait_until_visible('#projectstable')
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
try:
self.assertTrue(re.search("Yocto Project master",self.driver.find_element_by_id("project-release-title").text),'The project release is not defined')
except:
self.fail(msg='No project release title information in project detail page')
# testcase (1520)
def test_verify_layer_information(self):
self.get('')
self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
self.wait_until_visible('#projectstable')
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
project_URL=self.get_URL()
try:
self.driver.find_element_by_xpath("//div[@id='layer-container']")
self.assertTrue(re.search("3",self.driver.find_element_by_id("project-layers-count").text),'There should be 3 layers listed in the layer count')
layer_list = self.driver.find_element_by_id("layers-in-project-list")
layers = layer_list.find_elements_by_tag_name("li")
for layer in layers:
if re.match ("openembedded-core",layer.text):
print ("openembedded-core layer is a default layer in the project configuration")
elif re.match ("meta-poky",layer.text):
print ("meta-poky layer is a default layer in the project configuration")
elif re.match ("meta-yocto-bsp",layer.text):
print ("meta-yocto-bsp is a default layer in the project configuratoin")
else:
self.fail(msg='default layers are missing from the project configuration')
self.driver.find_element_by_xpath("//input[@id='layer-add-input']")
self.driver.find_element_by_xpath("//button[@id='add-layer-btn']")
self.driver.find_element_by_xpath("//div[@id='layer-container']/form[@class='form-inline']/p/a[@id='view-compatible-layers']")
self.driver.find_element_by_xpath("//div[@id='layer-container']/form[@class='form-inline']/p/a[@href="+'"'+project_URL+'importlayer"'+"]")
except:
self.fail(msg='No Layer information in project detail page')
# testcase (1521)
def test_verify_project_detail_links(self):
self.get('')
self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
self.wait_until_visible('#projectstable')
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
project_URL=self.get_URL()
self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click()
self.assertTrue(re.search("Configuration",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled')
try:
self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").click()
self.assertTrue(re.search("Builds",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").text), 'Builds tab in project topbar is misspelled')
self.driver.find_element_by_xpath("//div[@id='empty-state-projectbuildstable']")
except:
self.fail(msg='Builds tab information is not present')
try:
self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").click()
self.assertTrue(re.search("Import layer",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").text), 'Import layer tab in project topbar is misspelled')
self.driver.find_element_by_xpath("//fieldset[@id='repo-select']")
self.driver.find_element_by_xpath("//fieldset[@id='git-repo']")
except:
self.fail(msg='Import layer tab not loading properly')
try:
self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").click()
self.assertTrue(re.search("New custom image",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").text), 'New custom image tab in project topbar is misspelled')
self.assertTrue(re.search("Select the image recipe you want to customise",self.driver.find_element_by_xpath("//div[@class='col-md-12']/h2").text),'The new custom image tab is not loading correctly')
except:
self.fail(msg='New custom image tab not loading properly')
| [
"re.match",
"orm.models.Project.objects.filter"
] | [((6244, 6285), 're.match', 're.match', (['"""openembedded-core"""', 'layer.text'], {}), "('openembedded-core', layer.text)\n", (6252, 6285), False, 'import re\n'), ((10418, 10459), 're.match', 're.match', (['"""openembedded-core"""', 'layer.text'], {}), "('openembedded-core', layer.text)\n", (10426, 10459), False, 'import re\n'), ((1150, 1191), 'orm.models.Project.objects.filter', 'Project.objects.filter', ([], {'name': 'project_name'}), '(name=project_name)\n', (1172, 1191), False, 'from orm.models import Project\n'), ((6409, 6442), 're.match', 're.match', (['"""meta-poky"""', 'layer.text'], {}), "('meta-poky', layer.text)\n", (6417, 6442), False, 'import re\n'), ((10583, 10616), 're.match', 're.match', (['"""meta-poky"""', 'layer.text'], {}), "('meta-poky', layer.text)\n", (10591, 10616), False, 'import re\n'), ((6556, 6594), 're.match', 're.match', (['"""meta-yocto-bsp"""', 'layer.text'], {}), "('meta-yocto-bsp', layer.text)\n", (6564, 6594), False, 'import re\n'), ((10730, 10768), 're.match', 're.match', (['"""meta-yocto-bsp"""', 'layer.text'], {}), "('meta-yocto-bsp', layer.text)\n", (10738, 10768), False, 'import re\n')] |
"""
This module contains functional for Child RP test items management.
Copyright (c) 2018 http://reportportal.io .
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from weakref import proxy
from reportportal_client.core.rp_requests import ItemStartRequest
from reportportal_client.items.rp_test_items.rp_base_test_item import \
RPBaseTestItem
class RPChildTestItem(RPBaseTestItem):
"""This model stores attributes for RP child test items."""
def __init__(self, rp_url, session, api_version, project_name, parent_item,
item_name, item_type, launch_uuid, generated_id,
**kwargs):
"""Initialize instance attributes.
:param rp_url: report portal url
:param session: Session object
:param api_version: RP API version
:param project_name: RP project name
:param item_name: RP item name
:param item_type: Type of the test item. Allowable values: "suite",
"story", "test", "scenario", "step",
"before_class", "before_groups", "before_method",
"before_suite", "before_test", "after_class",
"after_groups", "after_method", "after_suite",
"after_test"
:param launch_uuid: Parent launch UUID
:param generated_id: Id generated to speed up client
:param kwargs: Dict of additional named parameters
"""
super(RPChildTestItem, self).__init__(rp_url, session, api_version,
project_name, item_name,
item_type, launch_uuid,
generated_id, **kwargs)
self.parent_item = proxy(parent_item)
self.parent_item.add_child_item(self)
self.weight = self.parent_item.weight + 1
def start(self, start_time):
"""Create request object to start child test item.
:param start_time: Test item start time
"""
endpoint = "{url}/{api_version}/{project_name}/item/" \
"{parentItemUuid}". \
format(url=self.rp_url, api_version=self.api_version,
project_name=self.project_name,
parentItemUuid=self.parent_item.uuid)
self.add_request(endpoint, self.session.post, ItemStartRequest,
self.item_name, start_time,
self.item_type, self.launch_uuid,
attributes=self.attributes, code_ref=self.code_ref,
description=self.description,
has_stats=self.has_stats,
parameters=self.parameters,
retry=self.retry, uuid=self.uuid,
unique_id=self.unique_id)
| [
"weakref.proxy"
] | [((2304, 2322), 'weakref.proxy', 'proxy', (['parent_item'], {}), '(parent_item)\n', (2309, 2322), False, 'from weakref import proxy\n')] |
#https://www.crummy.com/software/BeautifulSoup/bs4/doc/#strings-and-stripped-strings
html_doc = """<html><head><title>The Dormouse's story</title></head>
<body>
<p class="title"><b>The Dormouse's story</b></p>
<p class="story">Once upon a time there were three little sisters; and their names were
<a href="http://example.com/elsie" class="sister" id="link1">Elsie</a>,
<a href="http://example.com/lacie" class="sister" id="link2">Lacie</a> and
<a href="http://example.com/tillie" class="sister" id="link3">Tillie</a>
;and they lived at the bottom of a well.</p>
<p class="story">...</p>
"""
from bs4 import BeautifulSoup
soup = BeautifulSoup(html_doc, 'html.parser')
last_a_tag = soup.find("a", id="link3")
def going_back_forth_1():
print('\n', 1, last_a_tag)
print('\n', 2, last_a_tag.next_sibling)
'''But the .next_element of that <a> tag, the thing that was parsed immediately after the <a> tag, is not the rest of that sentence: it’s the word “Tillie”:'''
print('\n', 3, last_a_tag.next_element)
print('\n', 4, last_a_tag.previous_element)
print('\n', 5, last_a_tag.previous_element.next_element)
def going_back_forth_2():
for element in last_a_tag.next_elements:
print(repr(element))
#going_back_forth_1()
going_back_forth_2() | [
"bs4.BeautifulSoup"
] | [((632, 670), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html_doc', '"""html.parser"""'], {}), "(html_doc, 'html.parser')\n", (645, 670), False, 'from bs4 import BeautifulSoup\n')] |
import numpy as np
from numpy.random import seed
seed(1)
import pandas as pd
from math import sqrt
from sklearn.decomposition import PCA
######################################################################
# METRICS
######################################################################
def mse(y, y_hat):
"""
Calculates Mean Squared Error.
MSE measures the prediction accuracy of a
forecasting method by calculating the squared deviation
of the prediction and the true value at a given time and
averages these devations over the length of the series.
y: numpy array
actual test values
y_hat: numpy array
predicted values
return: MSE
"""
mse = np.mean(np.square(y - y_hat))
return mse
def rmse(y, y_hat):
"""
Calculates Root Mean Squared Error.
RMSE measures the prediction accuracy of a
forecasting method by calculating the squared deviation
of the prediction and the true value at a given time and
averages these devations over the length of the series.
Finally the RMSE will be in the same scale
as the original time series so its comparison with other
series is possible only if they share a common scale.
y: numpy array
actual test values
y_hat: numpy array
predicted values
return: RMSE
"""
rmse = sqrt(np.mean(np.square(y - y_hat)))
return rmse
def mape(y, y_hat):
"""
Calculates Mean Absolute Percentage Error.
MAPE measures the relative prediction accuracy of a
forecasting method by calculating the percentual deviation
of the prediction and the true value at a given time and
averages these devations over the length of the series.
y: numpy array
actual test values
y_hat: numpy array
predicted values
return: MAPE
"""
mape = np.mean(np.abs(y - y_hat) / np.abs(y))
mape = 100 * mape
return mape
def smape(y, y_hat):
"""
Calculates Symmetric Mean Absolute Percentage Error.
SMAPE measures the relative prediction accuracy of a
forecasting method by calculating the relative deviation
of the prediction and the true value scaled by the sum of the
absolute values for the prediction and true value at a
given time, then averages these devations over the length
of the series. This allows the SMAPE to have bounds between
0% and 200% which is desireble compared to normal MAPE that
may be undetermined.
y: numpy array
actual test values
y_hat: numpy array
predicted values
return: SMAPE
"""
smape = np.mean(np.abs(y - y_hat) / (np.abs(y) + np.abs(y_hat)))
smape = 200 * smape
return smape
def mase(y, y_hat, y_train, seasonality=1):
"""
Calculates the M4 Mean Absolute Scaled Error.
MASE measures the relative prediction accuracy of a
forecasting method by comparinng the mean absolute errors
of the prediction and the true value against the mean
absolute errors of the seasonal naive model.
y: numpy array
actual test values
y_hat: numpy array
predicted values
y_train: numpy array
actual train values for Naive1 predictions
seasonality: int
main frequency of the time series
Hourly 24, Daily 7, Weekly 52,
Monthly 12, Quarterly 4, Yearly 1
return: MASE
"""
scale = np.mean(abs(y_train[seasonality:] - y_train[:-seasonality]))
mase = np.mean(abs(y - y_hat)) / scale
mase = 100 * mase
return mase
def rmsse(y, y_hat, y_train, seasonality=1):
"""
Calculates the M5 Root Mean Squared Scaled Error.
Calculates the M4 Mean Absolute Scaled Error.
MASE measures the relative prediction accuracy of a
forecasting method by comparinng the mean squared errors
of the prediction and the true value against the mean
squared errors of the seasonal naive model.
y: numpy array
actual test values
y_hat: numpy array of len h (forecasting horizon)
predicted values
seasonality: int
main frequency of the time series
Hourly 24, Daily 7, Weekly 52,
Monthly 12, Quarterly 4, Yearly 1
return: RMSSE
"""
scale = np.mean(np.square(y_train[seasonality:] - y_train[:-seasonality]))
rmsse = sqrt(mse(y, y_hat) / scale)
rmsse = 100 * rmsse
return rmsse
def pinball_loss(y, y_hat, tau=0.5):
"""
Calculates the Pinball Loss.
The Pinball loss measures the deviation of a quantile forecast.
By weighting the absolute deviation in a non symmetric way, the
loss pays more attention to under or over estimation.
A common value for tau is 0.5 for the deviation from the median.
y: numpy array
actual test values
y_hat: numpy array of len h (forecasting horizon)
predicted values
tau: float
Fixes the quantile against which the predictions are compared.
return: pinball_loss
"""
delta_y = y - y_hat
pinball = np.maximum(tau * delta_y, (tau-1) * delta_y)
pinball = pinball.mean()
return pinball_loss
def evaluate_panel(y_test, y_hat, y_train,
metric, seasonality):
"""
Calculates a specific metric for y and y_hat
y_test: pandas df
df with columns unique_id, ds, y
y_hat: pandas df
df with columns unique_id, ds, y_hat
y_train: pandas df
df with columns unique_id, ds, y (train)
this is used in the scaled metrics
seasonality: int
main frequency of the time series
Hourly 24, Daily 7, Weekly 52,
Monthly 12, Quarterly 4, Yearly 1
return: list of metric evaluations for each unique_id
in the panel data
"""
metric_name = metric.__code__.co_name
uids = y_test.index.get_level_values('unique_id').unique()
y_hat_uids = y_hat.index.get_level_values('unique_id').unique()
assert len(y_test)==len(y_hat), "not same length"
assert all(uids == y_hat_uids), "not same u_ids"
idxs, evaluations = [], []
for uid in uids:
y_test_uid = y_test.loc[uid].values
y_hat_uid = y_hat.loc[uid].values
y_train_uid = y_train.loc[uid].y.values
if metric_name in ['mase', 'rmsse']:
evaluation_uid = metric(y=y_test_uid, y_hat=y_hat_uid,
y_train=y_train_uid, seasonality=seasonality)
else:
evaluation_uid = metric(y=y_test_uid, y_hat=y_hat_uid)
idxs.append(uid)
evaluations.append(evaluation_uid)
idxs = pd.Index(idxs, name='unique_id')
evaluations = pd.Series(evaluations, index=idxs)
return evaluations
def compute_evaluations(y_test, y_hat, y_train, metrics, seasonality): #, progress_bar
"""
Calculates all metrics in list for y and y_hat panel data,
and creates rank based on PCA dimensionality reduction.
y_test: pandas df
df with columns unique_id, ds, y
y_hat: pandas df
df with columns unique_id, ds, y_hat
y_train: pandas df
df with columns unique_id, ds, y (train)
this is used in the scaled metrics
metrics: list
list of strings containing all metrics to compute
seasonality: int
main frequency of the time series
Hourly 24, Daily 7, Weekly 52,
Monthly 12, Quarterly 4, Yearly 1
return: list of metric evaluations
"""
print("\n Evaluating models")
evaluations = {}
for metric_name, metric in metrics.items():
print(metric_name)
for col in y_hat.columns:
mod_evaluation = evaluate_panel(y_test=y_test, y_hat=y_hat[col],
y_train=y_train, metric=metric,
seasonality=seasonality)
mod_evaluation.name = y_hat[col].name
if not (metric_name in evaluations.keys()):
evaluations[metric_name] = [mod_evaluation]
else:
evaluations[metric_name].append(mod_evaluation)
#progress_bar['value']+=1
#progress_bar.update()
# Collapse Metrics
for metric_name, metric in metrics.items():
evaluations[metric_name] = pd.concat(evaluations[metric_name], axis=1)
evaluations[metric_name] = evaluations[metric_name].mean(axis=0)
evaluations = pd.DataFrame.from_dict(evaluations)
# PCA rank
X = evaluations.values
pca = PCA(n_components=1)
pca.fit(X)
evaluations['pca_rank'] = pca.fit_transform(X)
evaluations['pca_rank'] = evaluations['pca_rank'].rank(ascending=False)
evaluations['pca_rank'] = evaluations['pca_rank'].astype(int)
evaluations.sort_values(by='pca_rank', inplace=True)
evaluations.reset_index(inplace=True)
evaluations.rename(columns={'index': 'model'}, inplace=True)
return evaluations
| [
"pandas.Series",
"numpy.abs",
"sklearn.decomposition.PCA",
"pandas.DataFrame.from_dict",
"numpy.square",
"pandas.Index",
"numpy.random.seed",
"numpy.maximum",
"pandas.concat"
] | [((49, 56), 'numpy.random.seed', 'seed', (['(1)'], {}), '(1)\n', (53, 56), False, 'from numpy.random import seed\n'), ((4958, 5004), 'numpy.maximum', 'np.maximum', (['(tau * delta_y)', '((tau - 1) * delta_y)'], {}), '(tau * delta_y, (tau - 1) * delta_y)\n', (4968, 5004), True, 'import numpy as np\n'), ((6497, 6529), 'pandas.Index', 'pd.Index', (['idxs'], {'name': '"""unique_id"""'}), "(idxs, name='unique_id')\n", (6505, 6529), True, 'import pandas as pd\n'), ((6548, 6582), 'pandas.Series', 'pd.Series', (['evaluations'], {'index': 'idxs'}), '(evaluations, index=idxs)\n', (6557, 6582), True, 'import pandas as pd\n'), ((8268, 8303), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['evaluations'], {}), '(evaluations)\n', (8290, 8303), True, 'import pandas as pd\n'), ((8361, 8380), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': '(1)'}), '(n_components=1)\n', (8364, 8380), False, 'from sklearn.decomposition import PCA\n'), ((717, 737), 'numpy.square', 'np.square', (['(y - y_hat)'], {}), '(y - y_hat)\n', (726, 737), True, 'import numpy as np\n'), ((4198, 4255), 'numpy.square', 'np.square', (['(y_train[seasonality:] - y_train[:-seasonality])'], {}), '(y_train[seasonality:] - y_train[:-seasonality])\n', (4207, 4255), True, 'import numpy as np\n'), ((8128, 8171), 'pandas.concat', 'pd.concat', (['evaluations[metric_name]'], {'axis': '(1)'}), '(evaluations[metric_name], axis=1)\n', (8137, 8171), True, 'import pandas as pd\n'), ((1360, 1380), 'numpy.square', 'np.square', (['(y - y_hat)'], {}), '(y - y_hat)\n', (1369, 1380), True, 'import numpy as np\n'), ((1852, 1869), 'numpy.abs', 'np.abs', (['(y - y_hat)'], {}), '(y - y_hat)\n', (1858, 1869), True, 'import numpy as np\n'), ((1872, 1881), 'numpy.abs', 'np.abs', (['y'], {}), '(y)\n', (1878, 1881), True, 'import numpy as np\n'), ((2606, 2623), 'numpy.abs', 'np.abs', (['(y - y_hat)'], {}), '(y - y_hat)\n', (2612, 2623), True, 'import numpy as np\n'), ((2627, 2636), 'numpy.abs', 'np.abs', (['y'], {}), '(y)\n', (2633, 2636), True, 'import numpy as np\n'), ((2639, 2652), 'numpy.abs', 'np.abs', (['y_hat'], {}), '(y_hat)\n', (2645, 2652), True, 'import numpy as np\n')] |
#!/usr/bin/env python
import util
import newdb
from collections import Counter
from collections import deque
#Check if two rows have equal values in the keys attributes
def __compare(row1,row2,keys):
equal=True
for key in keys:
if row1[key]!=row2[key]:
equal=False
break
return equal
def __getNextRow(database,command,buffer=deque()):
if len(buffer)>0:
return buffer.popleft()
else:
rows=database.execute(command).fetchall()
if len(rows)==0:
return None
else:
buffer.extend(rows)
return buffer.popleft()
#def __getNextBatchFromCursor(keys,cursor,buffer=[]):
def __getNextBatchFromHandler(keys,database,command,buffer=[]):
batch=[]
batch.extend(buffer)
del buffer[:]
#row=cursor.fetchone()
#row=database.execute(command).fetchone()
row=__getNextRow(database, command)
while row is not None:
if len(batch)==0:
batch.append(row)
#row=cursor.fetchone()
#row=database.execute(command).fetchone()
row=__getNextRow(database, command)
elif __compare(batch[-1], row, keys):
batch.append(row)
#row=cursor.fetchone()
#row=database.execute(command).fetchone()
row=__getNextRow(database, command)
else:
buffer.append(row)
break
if len(batch)==0:
return None
else:
return batch
def __getNextBatchFromCursor(keys,cursor,buffer=[]):
batch=[]
batch.extend(buffer)
del buffer[:]
row=cursor.fetchone()
#row=database.execute(command).fetchone()
#row=__getNextRow(database, command)
while row is not None:
if len(batch)==0:
batch.append(row)
row=cursor.fetchone()
#row=database.execute(command).fetchone()
#row=__getNextRow(database, command)
elif __compare(batch[-1], row, keys):
batch.append(row)
row=cursor.fetchone()
#row=database.execute(command).fetchone()
#row=__getNextRow(database, command)
else:
buffer.append(row)
break
if len(batch)==0:
return None
else:
return batch
#verify that a consecutive slice of rows are nice and consistent
def __verify(rows,start,end,data,warnings,stats):
#check that born<died for each row. report rows with born=died and born>died
for i in range(start,end):
if rows[i]["born"]==rows[i]["died"]:
if warnings: util.warning("born=died: {}".format(rows[i]))
stats["row born=died"]=stats["row born=died"]+1
stats["warnings"]=stats["warnings"]+1
elif rows[i]["died"] is not None and rows[i]["born"]>rows[i]["died"]:
util.error("born>died: {}".format(rows[i]))
stats["row born>died"]=stats["row born>died"]+1
stats["errors"]=stats["errors"]+1
#check that each row was born at the point the other died
#chck if consecutive rows have the same data
for i in range(start+1,end):
if rows[i-1]["died"] is None or rows[i-1]["died"]>rows[i]["born"]: #overlapping rows
util.error("overlapping rows: {} | {}".format(rows[i-1],rows[i]))
stats["overlapping row"]=stats["overlapping row"]+1
stats["errors"]=stats["errors"]+1
elif rows[i-1]["died"]<rows[i]["born"]:
if warnings: util.warning("gap in the timeline: {} | {}".format(rows[i-1],rows[i])) #timeline gap
stats["gap"]=stats["gap"]+1
stats["warnings"]=stats["warnings"]+1
elif util.dict_fields_eq(rows[i-1], rows[i], data):
if warnings: util.warning("consecutive rows with same data: {} | {}".format(rows[i-1],rows[i]))
stats["consecutive with same data"]=stats["consecutive with same data"]+1
stats["warnings"]=stats["warnings"]+1
#verify that a consecutive slice of rows are nice and consistent
def __verifySymbols(rows,start,end,data,warnings,stats):
#check that born<died for each row. report rows with born=died and born>died
for i in range(start,end):
if rows[i]["born"]==rows[i]["died"]:
if warnings: util.warning("born=died: {}".format(rows[i]))
stats["row born=died"]=stats["row born=died"]+1
stats["warnings"]=stats["warnings"]+1
elif rows[i]["died"] is not None and rows[i]["born"]>rows[i]["died"]:
util.error("born>died: {}".format(rows[i]))
stats["row born>died"]=stats["row born>died"]+1
stats["errors"]=stats["errors"]+1
#check that each row was born at the point the other died
#chck if consecutive rows have the same data
for i in range(start+1,end):
if rows[i-1]["died"] is None or rows[i-1]["died"]>rows[i]["born"]: #overlapping rows
#do a more thorough check
if rows[i-1]["country"]!=rows[i]["country"] or rows[i-1]["coid"]==rows[i]["coid"]:
if warnings: util.warning("benign overlap: {} | {}".format(rows[i-1],rows[i]))
stats["benign overlap"]=stats["benign overlap"]+1
stats["warnings"]=stats["warnings"]+1
else:
util.error("overlapping rows: {} | {}".format(rows[i-1],rows[i]))
stats["overlapping row"]=stats["overlapping row"]+1
stats["errors"]=stats["errors"]+1
elif rows[i-1]["died"]<rows[i]["born"]:
if warnings: util.warning("gap in the timeline: {} | {}".format(rows[i-1],rows[i])) #timeline gap
stats["gap"]=stats["gap"]+1
stats["warnings"]=stats["warnings"]+1
elif util.dict_fields_eq(rows[i-1], rows[i], data):
if warnings: util.warning("consecutive rows with same data: {} | {}".format(rows[i-1],rows[i]))
stats["consecutive with same data"]=stats["consecutive with same data"]+1
stats["warnings"]=stats["warnings"]+1
def xrefsOK():
#get the database
database = newdb.get_db()
warnings=0
errors=0
###############################
util.info("\nChecking xrefs based on SecIds")
cursor=database.execute("SELECT * FROM {} ORDER BY source,secid,xref_type,born".format(database.XREF_TABLE))
#database.execute("SELECT * FROM {} ORDER BY source,secid,xref_type,born".format("xref"))
#cursor=database._curs
buffer=[]
keys=("secid","xref_type","source")
stats=Counter()
while True:
batch=__getNextBatchFromCursor(keys, cursor, buffer)
if batch is None:
break
__verify(batch, 0, len(batch), ("value",), False,stats)
warnings=warnings+stats["warnings"]
errors=errors+stats["errors"]
util.info("Errors={}, Warnings={}".format(stats["errors"],stats["warnings"]))
del stats["warnings"]
del stats["errors"]
for k,v in stats.iteritems():
util.info("{} = {}".format(k,v))
###################################
util.info("\nChecking xrefs based on Values")
cursor=database.execute("SELECT xf.secid,xf.xref_type,xf.value,xf.source,cs.coid,cs.issueid,cs.country,xf.born,xf.died FROM {} as xf, {} as cs WHERE xf.secid=cs.secid ORDER BY xf.source,xf.xref_type,xf.value,xf.born".format(database.XREF_TABLE,database.STOCK_TABLE))
#database.execute("SELECT xf.secid,xf.xref_type,xf.value,xf.source,cs.coid,cs.issueid,cs.country,xf.born,xf.died FROM {} as xf, {} as cs WHERE xf.secid=cs.secid ORDER BY xf.source,xf.xref_type,xf.value,xf.born".format("xref","stock"))
#cursor=database._curs
buffer=[]
keys=("value","xref_type","source")
stats=Counter()
while True:
batch=__getNextBatchFromCursor(keys, cursor, buffer)
if batch is None:
break
__verifySymbols(batch, 0, len(batch), ("secid",), False,stats)
warnings=warnings+stats["warnings"]
errors=errors+stats["errors"]
util.info("Errors={}, Warnings={}".format(stats["errors"],stats["warnings"]))
del stats["warnings"]
del stats["errors"]
for k,v in stats.iteritems():
util.info("{} = {}".format(k,v))
return (errors==0)
def attrOK(target,datatype,warn,stepSize=100000):
#get the database
database = newdb.get_db()
warnings=0
errors=0
###############################
util.info("\nChecking "+target+database.ATTR_TABLE+datatype)
database.execute("HANDLER {} OPEN AS foobar".format(target+database.ATTR_TABLE+datatype))
command="HANDLER foobar READ `PRIMARY` NEXT LIMIT {}".format(stepSize)
keys=(target+"id","type","date")
stats=Counter()
while True:
batch=__getNextBatchFromHandler(keys, database, command)
if batch is None:
break
__verify(batch, 0, len(batch), ("value","backfill"), warn,stats)
warnings=warnings+stats["warnings"]
errors=errors+stats["errors"]
util.info("Errors={}, Warnings={}".format(stats["errors"],stats["warnings"]))
del stats["warnings"]
del stats["errors"]
for k,v in stats.iteritems():
util.info("{} = {}".format(k,v))
database.execute("HANDLER foobar CLOSE")
return (warnings,errors)
def splitOK(warn,stepSize=100000):
#get the database
database = newdb.get_db()
warnings=0
errors=0
###############################
util.info("\nChecking "+database.SPLIT)
database.execute("HANDLER {} OPEN AS foobar".format(database.SPLIT))
command="HANDLER foobar READ `PRIMARY` NEXT LIMIT {}".format(stepSize)
keys=("secid","date")
stats=Counter()
while True:
batch=__getNextBatchFromHandler(keys, database, command)
if batch is None:
break
__verify(batch, 0, len(batch), ("rate","backfill"), warn,stats)
warnings=warnings+stats["warnings"]
errors=errors+stats["errors"]
util.info("Errors={}, Warnings={}".format(stats["errors"],stats["warnings"]))
del stats["warnings"]
del stats["errors"]
for k,v in stats.iteritems():
util.info("{} = {}".format(k,v))
database.execute("HANDLER foobar CLOSE")
return (warnings,errors)
def dividendOK(warn,stepSize=100000):
#get the database
database = newdb.get_db()
warnings=0
errors=0
###############################
util.info("\nChecking "+database.DIVIDEND)
database.execute("HANDLER {} OPEN AS foobar".format(database.DIVIDEND))
command="HANDLER foobar READ `PRIMARY` NEXT LIMIT {}".format(stepSize)
keys=("secid","date")
stats=Counter()
while True:
batch=__getNextBatchFromHandler(keys, database, command)
if batch is None:
break
__verify(batch, 0, len(batch), ("dividend","casheq","backfill","currency"), warn,stats)
warnings=warnings+stats["warnings"]
errors=errors+stats["errors"]
util.info("Errors={}, Warnings={}".format(stats["errors"],stats["warnings"]))
del stats["warnings"]
del stats["errors"]
for k,v in stats.iteritems():
util.info("{} = {}".format(k,v))
database.execute("HANDLER foobar CLOSE")
return (warnings,errors)
def priceOK(warn,stepSize=100000):
#get the database
database = newdb.get_db()
warnings=0
errors=0
###############################
util.info("\nChecking "+database.PRICE_FULL_TABLE)
database.execute("HANDLER {} OPEN AS foobar".format(database.PRICE_FULL_TABLE))
command="HANDLER foobar READ `PRIMARY` NEXT LIMIT {}".format(stepSize)
keys=("secid","date")
stats=Counter()
while True:
batch=__getNextBatchFromHandler(keys, database, command)
if batch is None:
break
__verify(batch, 0, len(batch), ("open","high","low","close","volume","adj","adrrc","cond","backfill","currency"), warn,stats)
warnings=warnings+stats["warnings"]
errors=errors+stats["errors"]
util.info("Errors={}, Warnings={}".format(stats["errors"],stats["warnings"]))
del stats["warnings"]
del stats["errors"]
for k,v in stats.iteritems():
util.info("{} = {}".format(k,v))
database.execute("HANDLER foobar CLOSE")
return (warnings,errors)
def actualsOK(datatype,warn,stepSize=100000):
#get the database
database = newdb.get_db()
warnings=0
errors=0
###############################
util.info("\nChecking "+database.CO_ACTUALS+datatype)
database.execute("HANDLER {} OPEN AS foobar".format(database.CO_ACTUALS+datatype))
command="HANDLER foobar READ `PRIMARY` NEXT LIMIT {}".format(stepSize)
keys=("coid","type","date")
stats=Counter()
while True:
batch=__getNextBatchFromHandler(keys, database, command)
if batch is None:
break
__verify(batch, 0, len(batch), ("value","backfill","currency"), warn,stats)
warnings=warnings+stats["warnings"]
errors=errors+stats["errors"]
util.info("Errors={}, Warnings={}".format(stats["errors"],stats["warnings"]))
del stats["warnings"]
del stats["errors"]
for k,v in stats.iteritems():
util.info("{} = {}".format(k,v))
database.execute("HANDLER foobar CLOSE")
return (warnings,errors)
def estimatesOK(datatype,warn,stepSize=100000):
#get the database
database = newdb.get_db()
warnings=0
errors=0
###############################
util.info("\nChecking "+database.CO_ESTIMATES+datatype)
database.execute("HANDLER {} OPEN AS foobar".format(database.CO_ESTIMATES+datatype))
command="HANDLER foobar READ `PRIMARY` NEXT LIMIT {}".format(stepSize)
keys=("coid","type","date","brokerid","orig")
stats=Counter()
while True:
batch=__getNextBatchFromHandler(keys, database, command)
if batch is None:
break
__verify(batch, 0, len(batch), ("value","backfill","currency"), warn,stats)
warnings=warnings+stats["warnings"]
errors=errors+stats["errors"]
util.info("Errors={}, Warnings={}".format(stats["errors"],stats["warnings"]))
del stats["warnings"]
del stats["errors"]
for k,v in stats.iteritems():
util.info("{} = {}".format(k,v))
database.execute("HANDLER foobar CLOSE")
return (warnings,errors)
#add command line options
if __name__ == "__main__":
newdb.init_db()
#dbLocal=db.get_db();
#util.DEBUG=True
#xrefsOK()
#attrOK("sec","n",False)
#attrOK("sec","d",False)
#attrOK("sec","s",False)
#attrOK("co","n",False)
#attrOK("co","d",False)
#attrOK("co","s",False)
#dividendOK(False)
#priceOK(False)
#splitOK(False)
#estimatesOK("n", False)
#estimatesOK("b", False)
#actualsOK("n", False)
#actualsOK("d", False) | [
"newdb.init_db",
"collections.deque",
"newdb.get_db",
"collections.Counter",
"util.dict_fields_eq",
"util.info"
] | [((372, 379), 'collections.deque', 'deque', ([], {}), '()\n', (377, 379), False, 'from collections import deque\n'), ((6104, 6118), 'newdb.get_db', 'newdb.get_db', ([], {}), '()\n', (6116, 6118), False, 'import newdb\n'), ((6206, 6254), 'util.info', 'util.info', (['"""\nChecking xrefs based on SecIds"""'], {}), '("""\nChecking xrefs based on SecIds""")\n', (6215, 6254), False, 'import util\n'), ((6554, 6563), 'collections.Counter', 'Counter', ([], {}), '()\n', (6561, 6563), False, 'from collections import Counter\n'), ((7089, 7137), 'util.info', 'util.info', (['"""\nChecking xrefs based on Values"""'], {}), '("""\nChecking xrefs based on Values""")\n', (7098, 7137), False, 'import util\n'), ((7736, 7745), 'collections.Counter', 'Counter', ([], {}), '()\n', (7743, 7745), False, 'from collections import Counter\n'), ((8340, 8354), 'newdb.get_db', 'newdb.get_db', ([], {}), '()\n', (8352, 8354), False, 'import newdb\n'), ((8433, 8499), 'util.info', 'util.info', (["('\\nChecking ' + target + database.ATTR_TABLE + datatype)"], {}), "('\\nChecking ' + target + database.ATTR_TABLE + datatype)\n", (8442, 8499), False, 'import util\n'), ((8710, 8719), 'collections.Counter', 'Counter', ([], {}), '()\n', (8717, 8719), False, 'from collections import Counter\n'), ((9381, 9395), 'newdb.get_db', 'newdb.get_db', ([], {}), '()\n', (9393, 9395), False, 'import newdb\n'), ((9474, 9515), 'util.info', 'util.info', (["('\\nChecking ' + database.SPLIT)"], {}), "('\\nChecking ' + database.SPLIT)\n", (9483, 9515), False, 'import util\n'), ((9698, 9707), 'collections.Counter', 'Counter', ([], {}), '()\n', (9705, 9707), False, 'from collections import Counter\n'), ((10371, 10385), 'newdb.get_db', 'newdb.get_db', ([], {}), '()\n', (10383, 10385), False, 'import newdb\n'), ((10464, 10508), 'util.info', 'util.info', (["('\\nChecking ' + database.DIVIDEND)"], {}), "('\\nChecking ' + database.DIVIDEND)\n", (10473, 10508), False, 'import util\n'), ((10694, 10703), 'collections.Counter', 'Counter', ([], {}), '()\n', (10701, 10703), False, 'from collections import Counter\n'), ((11388, 11402), 'newdb.get_db', 'newdb.get_db', ([], {}), '()\n', (11400, 11402), False, 'import newdb\n'), ((11481, 11533), 'util.info', 'util.info', (["('\\nChecking ' + database.PRICE_FULL_TABLE)"], {}), "('\\nChecking ' + database.PRICE_FULL_TABLE)\n", (11490, 11533), False, 'import util\n'), ((11727, 11736), 'collections.Counter', 'Counter', ([], {}), '()\n', (11734, 11736), False, 'from collections import Counter\n'), ((12470, 12484), 'newdb.get_db', 'newdb.get_db', ([], {}), '()\n', (12482, 12484), False, 'import newdb\n'), ((12563, 12620), 'util.info', 'util.info', (["('\\nChecking ' + database.CO_ACTUALS + datatype)"], {}), "('\\nChecking ' + database.CO_ACTUALS + datatype)\n", (12572, 12620), False, 'import util\n'), ((12821, 12830), 'collections.Counter', 'Counter', ([], {}), '()\n', (12828, 12830), False, 'from collections import Counter\n'), ((13520, 13534), 'newdb.get_db', 'newdb.get_db', ([], {}), '()\n', (13532, 13534), False, 'import newdb\n'), ((13613, 13672), 'util.info', 'util.info', (["('\\nChecking ' + database.CO_ESTIMATES + datatype)"], {}), "('\\nChecking ' + database.CO_ESTIMATES + datatype)\n", (13622, 13672), False, 'import util\n'), ((13893, 13902), 'collections.Counter', 'Counter', ([], {}), '()\n', (13900, 13902), False, 'from collections import Counter\n'), ((14560, 14575), 'newdb.init_db', 'newdb.init_db', ([], {}), '()\n', (14573, 14575), False, 'import newdb\n'), ((3692, 3739), 'util.dict_fields_eq', 'util.dict_fields_eq', (['rows[i - 1]', 'rows[i]', 'data'], {}), '(rows[i - 1], rows[i], data)\n', (3711, 3739), False, 'import util\n'), ((5760, 5807), 'util.dict_fields_eq', 'util.dict_fields_eq', (['rows[i - 1]', 'rows[i]', 'data'], {}), '(rows[i - 1], rows[i], data)\n', (5779, 5807), False, 'import util\n')] |
# Generated by Django 2.2.10 on 2020-03-19 08:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('onepanman_api', '0013_auto_20200319_1714'),
]
operations = [
migrations.AlterField(
model_name='problem',
name='rule',
field=models.TextField(db_column='RULE', default='{"obj_num": ,"placement" : , "action" : , "ending": ,}', verbose_name='문제 규칙'),
),
migrations.DeleteModel(
name='ProblemRuleInfo',
),
migrations.DeleteModel(
name='RuleInfo',
),
]
| [
"django.db.migrations.DeleteModel",
"django.db.models.TextField"
] | [((485, 531), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""ProblemRuleInfo"""'}), "(name='ProblemRuleInfo')\n", (507, 531), False, 'from django.db import migrations, models\n'), ((564, 603), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""RuleInfo"""'}), "(name='RuleInfo')\n", (586, 603), False, 'from django.db import migrations, models\n'), ((342, 474), 'django.db.models.TextField', 'models.TextField', ([], {'db_column': '"""RULE"""', 'default': '"""{"obj_num": ,"placement" : , "action" : , "ending": ,}"""', 'verbose_name': '"""문제 규칙"""'}), '(db_column=\'RULE\', default=\n \'{"obj_num": ,"placement" : , "action" : , "ending": ,}\', verbose_name=\n \'문제 규칙\')\n', (358, 474), False, 'from django.db import migrations, models\n')] |
import os
import pandas as pd
from datetime import datetime
from preprocessing_service import PreProcessingService
class PreProcessing(PreProcessingService):
"""
A class used to automatically scrape CSV files from ENTSOE Transparecny Platform
...
Attributes
----------
_preProcessing : str
A PreProcessingService instance for handling CSV files
_preProcessing2: str
A PreProcessingService instance for handling CSV files
Methods
-------
find_time_column(df)
A function to find the timestamp column for a given CSV files
start_preprocess()
Initializes pre-processing
"""
def __init__(self):
super().__init__()
self._preProcessing = PreProcessingService()
self._preProcessing2 = PreProcessingService()
@staticmethod
def find_time_column(df):
"""Finds the timestamp column for a given dataset.
Parameters
----------
df : DataFrame
The dataset containing historical-observations
Raises
------
ValueError
If no timestamp column is found.
"""
for column in df.get_columns():
try:
found = datetime.strptime(df.df[column][0][:10], "%d.%m.%Y")
return column
except:
continue
raise ValueError("Timestamp column not found!")
@staticmethod
def last_date_for_column(df, column_name):
""" Return last date of a specific column
"""
return df[column_name].dropna().index[-1:][0]
def start_preprocess(self):
"""Initiates a sequence of pre-processing tasks
"""
# Get all the CSV files in the directory
file_directory = os.getcwd() + "/download/"
file_names = os.listdir(file_directory)
actual_columns = ['Solar - Actual Aggregated [MW]',
'Wind Offshore - Actual Aggregated [MW]',
'Wind Onshore - Actual Aggregated [MW]',
'Actual Total Load [MW] - BZN|DE-LU']
forecast_columns = ['Generation - Solar [MW] Day Ahead/ BZN|DE-LU',
'Generation - Wind Offshore [MW] Day Ahead/ BZN|DE-LU',
'Generation - Wind Onshore [MW] Day Ahead/ BZN|DE-LU',
'Day-ahead Total Load Forecast [MW] - BZN|DE-LU']
filter_columns = ['Day-ahead Price [EUR/MWh]',
'Generation - Solar [MW] Day Ahead/ BZN|DE-LU',
'Generation - Wind Offshore [MW] Day Ahead/ BZN|DE-LU',
'Generation - Wind Onshore [MW] Day Ahead/ BZN|DE-LU',
'Solar - Actual Aggregated [MW]',
'Wind Offshore - Actual Aggregated [MW]',
'Wind Onshore - Actual Aggregated [MW]',
'Day-ahead Total Load Forecast [MW] - BZN|DE-LU',
'Actual Total Load [MW] - BZN|DE-LU']
for i in range(0, len(file_names)):
# First CSV is used as the main file to combine the rest of the files
if i == 0:
self._preProcessing.open_csv(file_directory + file_names[i])
# Find time column and set it as index
time_column = self.find_time_column(self._preProcessing)
self._preProcessing.reformat_time(colname=time_column)
self._preProcessing.rename_colname(time_column, "cet_timestamp")
self._preProcessing.set_index(colname="cet_timestamp")
# Convert arguments (non numeric values) to invalid NAN
for j in self._preProcessing.get_columns():
self._preProcessing.df[j] = pd.to_numeric(self._preProcessing.df[j], errors='coerce')
# Drop first duplicate
self._preProcessing.drop_duplicates(keep="first", inplace=True)
# Resample the quarterly values in the 'Total Load' and 'Actual Generation' to hourly
if ("Actual" or "Load" or "Forecasts") in file_names[i]:
self._preProcessing = self._preProcessing.resample_rows()
else:
self._preProcessing2.open_csv(file_directory + file_names[i])
# Find time column and set it as index
time_column = self.find_time_column(self._preProcessing2)
self._preProcessing2.reformat_time(colname=time_column)
self._preProcessing2.rename_colname(time_column, "cet_timestamp")
self._preProcessing2.set_index(colname="cet_timestamp")
# Drop all the columns that are not numeric
for column_index in self._preProcessing2.get_columns():
try:
float(self._preProcessing2.df[column_index][0])
except:
self._preProcessing2.df.drop(columns=column_index, inplace=True)
continue
# Convert the non-numeric items to Nans
# If ‘coerce’, then invalid parsing will be set as NaN.
for column_name in self._preProcessing2.get_columns():
self._preProcessing2.df[column_name] = pd.to_numeric(self._preProcessing2.df[column_name],
errors='coerce')
# To resample quarter hour to hour if present
self._preProcessing2 = self._preProcessing2.resample_rows()
# concatenate all the columns in one single dataset
print(self._preProcessing2.df)
self._preProcessing.df = pd.concat([self._preProcessing.df, self._preProcessing2.df], axis=1,
sort=False)
# Filter and drop all rows with four NAN values (to remove NAN values beyond latest time of available
# day-ahead price)
self._preProcessing.df = self._preProcessing.df[filter_columns].dropna(thresh=4, axis=0)
print(self._preProcessing.df.columns)
# Get three month of past observations
if len(self._preProcessing.df) >= 2160:
self._preProcessing.df = self._preProcessing.df[-2160:]
# the missing values in the Actuals can be replaced by the generation forecast
# the last date of the actuals column just before the missing values can be replaced with the values from the
# last date of the forecast columns
for column_name, forecast_column_name in zip(actual_columns, forecast_columns):
last_date = self.last_date_for_column(self._preProcessing.df, column_name)
self._preProcessing.df[column_name][last_date:] = self._preProcessing.df[forecast_column_name][
last_date:].values
# Generate and add 'WeekDays' feature
week_day_col = self._preProcessing.df.index.weekday
self._preProcessing.df["WeekDays"] = week_day_col
# Imputation through a interpolation method called 'cubic' to fulfill forecasting method criteria
self._preProcessing = self._preProcessing.interpolate_ts()
# Remove forecast columns, since they are not needed anymore
self._preProcessing.remove_columns(cols = ['Generation - Solar [MW] Day Ahead/ BZN|DE-LU',
'Generation - Wind Offshore [MW] Day Ahead/ BZN|DE-LU',
'Generation - Wind Onshore [MW] Day Ahead/ BZN|DE-LU',
'Day-ahead Total Load Forecast [MW] - BZN|DE-LU'])
self._preProcessing.df.to_csv(os.getcwd() + "/download/final_dataset_kafka.csv")
| [
"os.listdir",
"datetime.datetime.strptime",
"preprocessing_service.PreProcessingService",
"os.getcwd",
"pandas.to_numeric",
"pandas.concat"
] | [((734, 756), 'preprocessing_service.PreProcessingService', 'PreProcessingService', ([], {}), '()\n', (754, 756), False, 'from preprocessing_service import PreProcessingService\n'), ((788, 810), 'preprocessing_service.PreProcessingService', 'PreProcessingService', ([], {}), '()\n', (808, 810), False, 'from preprocessing_service import PreProcessingService\n'), ((1819, 1845), 'os.listdir', 'os.listdir', (['file_directory'], {}), '(file_directory)\n', (1829, 1845), False, 'import os\n'), ((1771, 1782), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1780, 1782), False, 'import os\n'), ((1227, 1279), 'datetime.datetime.strptime', 'datetime.strptime', (['df.df[column][0][:10]', '"""%d.%m.%Y"""'], {}), "(df.df[column][0][:10], '%d.%m.%Y')\n", (1244, 1279), False, 'from datetime import datetime\n'), ((5807, 5892), 'pandas.concat', 'pd.concat', (['[self._preProcessing.df, self._preProcessing2.df]'], {'axis': '(1)', 'sort': '(False)'}), '([self._preProcessing.df, self._preProcessing2.df], axis=1, sort=False\n )\n', (5816, 5892), True, 'import pandas as pd\n'), ((7866, 7877), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (7875, 7877), False, 'import os\n'), ((3845, 3902), 'pandas.to_numeric', 'pd.to_numeric', (['self._preProcessing.df[j]'], {'errors': '"""coerce"""'}), "(self._preProcessing.df[j], errors='coerce')\n", (3858, 3902), True, 'import pandas as pd\n'), ((5369, 5437), 'pandas.to_numeric', 'pd.to_numeric', (['self._preProcessing2.df[column_name]'], {'errors': '"""coerce"""'}), "(self._preProcessing2.df[column_name], errors='coerce')\n", (5382, 5437), True, 'import pandas as pd\n')] |
import logging
from typing import List
from typing import Dict
import line_data
import ean_data
from core.model.ptn import Stop
from core.util.constants import SECONDS_PER_MINUTE
from parameters import VSParameters
logger = logging.getLogger(__name__)
class VehicleSchedule:
def __init__(self, line_pool: line_data.LinePool) -> None:
self.max_vehicle_id = 0
self.vehicles = []
self.connections = []
self.drivings_per_line = {}
for line in line_pool.get_lines():
self.drivings_per_line[line.get_directed_line_id()] = 0
def add_connection(self, connection: "Connection") -> None:
self.connections.append(connection)
def add_vehicle(self, vehicle: "Vehicle") -> None:
self.vehicles.append(vehicle)
self.max_vehicle_id += 1
def get_connections(self) -> List["Connection"]:
return self.connections
def get_vehicles(self) -> List["Vehicle"]:
return self.vehicles
def add_driving(self, line: line_data.Line) -> None:
self.drivings_per_line[line.get_directed_line_id()] += 1
def get_drivings(self, line: line_data.Line) -> int:
return self.drivings_per_line[line.get_directed_line_id()]
def get_max_vehicle_id(self) -> int:
return self.max_vehicle_id
def add_connections_from_ip_model(self,
vehicle_connect: Dict[int, Dict[line_data.Line,
Dict[int, Dict[line_data.Line, int]]]],
p_max: int, line_pool: line_data.LinePool,
vs_allow_empty_trips) -> None:
for p_1 in range(1, p_max + 1):
for l_1 in line_pool.get_lines():
for p_2 in range(1, p_max + 1):
for l_2 in line_pool.get_lines():
if vs_allow_empty_trips or l_1.get_last_stop() == l_2.get_first_stop():
if vehicle_connect[p_1][l_1][p_2][l_2] == 1:
self.add_connection(Connection(l_1, p_1, l_2, p_2))
for line in line_pool.get_lines():
self.drivings_per_line[line.get_directed_line_id()] = p_max
class Connection:
def __init__(self, line_1: line_data.Line, period_1: int, line_2: line_data.Line, period_2: int):
self.line_1 = line_1
self.line_2 = line_2
self.period_1 = period_1
self.period_2 = period_2
def get_line_1(self) -> line_data.Line:
return self.line_1
def get_line_2(self) -> line_data.Line:
return self.line_2
def get_period_1(self) -> int:
return self.period_1
def get_period_2(self) -> int:
return self.period_2
def to_string(self) -> str:
return "(%d,%s)(%d,%s)" % (self.period_1, self.line_1.to_string(), self.period_2, self.line_2.to_string())
class Vehicle:
def __init__(self, vehicle_schedule: VehicleSchedule, start_period: int, start_line: line_data.Line) -> None:
self.vehicle_id = vehicle_schedule.get_max_vehicle_id() + 1
self.connections = []
self.start_period = start_period
self.start_line = start_line
self.last_period = start_period
self.last_line = start_line
def add_connection(self, trip: "Connection") -> None:
self.connections.append(trip)
def get_vehicle_id(self) -> int:
return self.vehicle_id
def get_connections(self) -> List[Connection]:
return self.connections
def get_start_line(self) -> line_data.Line:
return self.start_line
def get_start_period(self) -> int:
return self.start_period
def get_last_period(self) -> int:
return self.last_period
def get_last_line(self) -> line_data.Line:
return self.last_line
def find_all_connections(self, connections: List[Connection]) -> None:
add_connection = True
while add_connection:
add_connection = False
for connection in connections:
if connection.get_period_1() == self.last_period and connection.get_line_1() == self.last_line:
self.add_connection(connection)
self.last_period = connection.get_period_2()
self.last_line = connection.get_line_2()
add_connection = True
break
class Trip:
def __init__(self) -> None:
self.circ_id = -1
self.vehicle_id = -1
self.trip_id = -1
self.trip_type = ""
self.start_event = None
self.periodic_start_event = None
self.start_station: Stop = None
self.start_time = -1
self.end_event = None
self.periodic_end_event = None
self.end_station: Stop = None
self.end_time = -1
self.line_id = ""
# trip from line, period
def trip(self, circ_id: int, trip_id: int, vehicle_id: int, line: line_data.Line, period: int, period_length: int,
periodic_ean: "ean_data.Ean", aperiodic_ean: "ean_data.AperiodicEan",
duration: Dict[line_data.Line, int]) -> None:
self.circ_id = circ_id
self.vehicle_id = vehicle_id
self.trip_id = trip_id
self.trip_type = "TRIP"
self.start_event = aperiodic_ean.get_aperiodic_starting_event(periodic_ean, line, period, period_length)
self.periodic_start_event = periodic_ean.get_first_event_in_line(line)
self.start_station = line.get_first_stop()
self.start_time = self.start_event.get_aperiodic_time_with_offset()
self.end_event = aperiodic_ean.get_aperiodic_ending_event(periodic_ean, line, period, duration, period_length)
self.periodic_end_event = periodic_ean.get_last_event_in_line(line)
self.end_station = line.get_last_stop()
self.end_time = self.end_event.get_aperiodic_time_with_offset()
self.line_id = line.get_undirected_line_id()
# empty trip from connection
def empty_trip(self, circ_id: int, trip_id: int, vehicle_id: int, connection: Connection, period_length: int,
periodic_ean: "ean_data.Ean", aperiodic_ean: "ean_data.AperiodicEan",
duration: Dict[line_data.Line, int]) -> None:
self.circ_id = circ_id
self.vehicle_id = vehicle_id
self.trip_id = trip_id
self.trip_type = "EMPTY"
self.start_event = aperiodic_ean.get_aperiodic_ending_event(periodic_ean, connection.get_line_1(),
connection.get_period_1(), duration, period_length)
self.periodic_start_event = periodic_ean.get_last_event_in_line(connection.get_line_1())
self.start_station = connection.get_line_1().get_last_stop()
self.start_time = self.start_event.get_aperiodic_time_with_offset()
self.end_event = aperiodic_ean.get_aperiodic_starting_event(periodic_ean, connection.get_line_2(),
connection.get_period_2(), period_length)
self.periodic_end_event = periodic_ean.get_last_event_in_line(connection.get_line_2())
self.end_station = connection.get_line_2().get_first_stop()
self.end_time = self.end_event.get_aperiodic_time_with_offset()
self.line_id = -1
def set_end(self, circ_id: int, line: line_data.Line, period: int, period_length: int, periodic_ean: "ean_data.Ean",
aperiodic_ean: "ean_data.AperiodicEan") -> None:
self.circ_id = circ_id
self.trip_type = "EMPTY"
self.end_event = aperiodic_ean.get_aperiodic_starting_event(periodic_ean, line, period, period_length)
self.periodic_end_event = periodic_ean.get_first_event_in_line(line)
self.end_station = line.get_first_stop()
self.end_time = self.end_event.get_aperiodic_time_with_offset()
self.line_id = -1
def set_start(self, trip_id: int, vehicle_id: int, line: line_data.Line, period: int, period_length: int,
periodic_ean: "ean_data.Ean", aperiodic_ean: "ean_data.AperiodicEan",
duration: Dict[line_data.Line, int]) -> None:
self.vehicle_id = vehicle_id
self.trip_id = trip_id
self.start_event = aperiodic_ean.get_aperiodic_ending_event(periodic_ean, line, period, duration, period_length)
self.periodic_start_event = periodic_ean.get_last_event_in_line(line)
self.start_station = line.get_last_stop()
self.start_time = self.start_event.get_aperiodic_time_with_offset()
# to csv
def to_csv(self) -> str:
return '%d; %d; %d; %s; %d; %d; %d; %d; %d; %d; %d; %d; %s' % (
self.circ_id, self.vehicle_id, self.trip_id, self.trip_type, self.start_event.get_event_id(),
self.periodic_start_event.get_event_id(), self.start_station.getId(), self.start_time * SECONDS_PER_MINUTE,
self.end_event.get_event_id(), self.periodic_end_event.get_event_id(), self.end_station.getId(),
self.end_time * SECONDS_PER_MINUTE, self.line_id)
def to_csv_trip(self) -> str:
return '%d; %d; %d; %d; %d; %d; %d; %d; %s' % (
self.start_event.get_event_id(),
self.periodic_start_event.get_event_id(), self.start_station.getId(), self.start_time * SECONDS_PER_MINUTE,
self.end_event.get_event_id(), self.periodic_end_event.get_event_id(), self.end_station.getId(),
self.end_time * SECONDS_PER_MINUTE, self.line_id)
def to_csv_end_events(self) -> str:
return '%d' % self.end_event.get_event_id()
def construct_vehicle_schedule_from_ip(line_pool: line_data.LinePool,
vehicle_connect: Dict[int, Dict[line_data.Line,
Dict[int, Dict[line_data.Line, int]]]],
parameters: VSParameters,
vehicle_from_depot: Dict[int, Dict[line_data.Line, int]],
allow_empty_trips: bool) -> VehicleSchedule:
logger.debug("Construct vehicle schedule")
vehicle_schedule = VehicleSchedule(line_pool)
vehicle_schedule.add_connections_from_ip_model(vehicle_connect, parameters.p_max, line_pool,
allow_empty_trips)
logger.debug("Add vehicles:")
for p_1 in range(1, parameters.p_max + 1):
for l_1 in line_pool.get_lines():
if vehicle_from_depot[p_1][l_1] == 1:
vehicle_schedule.add_vehicle(Vehicle(vehicle_schedule, p_1, l_1))
for vehicle in vehicle_schedule.get_vehicles():
vehicle.find_all_connections(vehicle_schedule.get_connections())
return vehicle_schedule | [
"logging.getLogger"
] | [((226, 253), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (243, 253), False, 'import logging\n')] |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions for downloading and reading MNIST data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import cv2
import numpy as np
from src.exp.bboxes import read_images
def read_batches_of_clips(paths, batch_size, num_frames_per_clip=16, stride=8, resize_size=112):
clips = []
names = []
for clip, name in read_clips(paths, num_frames_per_clip, stride, resize_size):
if len(clip) >= batch_size:
yield np.array(clips), names
clips.append(clip)
names.append(name)
def read_clips(paths, num_frames_per_clip, stride, resize_size):
assert stride < num_frames_per_clip, "stride < num_frames_per_clip"
for path in paths:
vc = cv2.VideoCapture(path)
clip = []
first_frame_n = 0
for frame in read_images(vc):
if len(clip) >= num_frames_per_clip:
yield np.array(clip).astype(np.float32), (path, first_frame_n)
clip = clip[:num_frames_per_clip - stride]
first_frame_n += stride
img = np.array(cv2.resize(np.array(frame), (resize_size, resize_size))).astype(np.float32)
clip.append(img)
# if piece of clip remains then pad it
if len(clip) > 0:
for frame in range(len(clip), num_frames_per_clip):
clip[frame] = clip[-1]
yield np.array(clip).astype(np.float32), (path, first_frame_n)
| [
"numpy.array",
"src.exp.bboxes.read_images",
"cv2.VideoCapture"
] | [((1432, 1454), 'cv2.VideoCapture', 'cv2.VideoCapture', (['path'], {}), '(path)\n', (1448, 1454), False, 'import cv2\n'), ((1520, 1535), 'src.exp.bboxes.read_images', 'read_images', (['vc'], {}), '(vc)\n', (1531, 1535), False, 'from src.exp.bboxes import read_images\n'), ((1180, 1195), 'numpy.array', 'np.array', (['clips'], {}), '(clips)\n', (1188, 1195), True, 'import numpy as np\n'), ((1802, 1817), 'numpy.array', 'np.array', (['frame'], {}), '(frame)\n', (1810, 1817), True, 'import numpy as np\n'), ((2090, 2104), 'numpy.array', 'np.array', (['clip'], {}), '(clip)\n', (2098, 2104), True, 'import numpy as np\n'), ((1608, 1622), 'numpy.array', 'np.array', (['clip'], {}), '(clip)\n', (1616, 1622), True, 'import numpy as np\n')] |
import torch
import torchvision
from torchvision import transforms
def load_mnist_dataset(train_batch_size, test_batch_size=1):
train_set = torchvision.datasets.MNIST(".", train=True, transform=transforms.Compose([transforms.ToTensor()]), download=True)
test_set = torchvision.datasets.MNIST(".", train=False, transform=transforms.Compose([transforms.ToTensor()]), download=True)
train_loader = torch.utils.data.DataLoader(train_set, batch_size=train_batch_size, shuffle=True)
test_loader = torch.utils.data.DataLoader(test_set, batch_size=test_batch_size, shuffle=False)
return train_loader, test_loader
| [
"torchvision.transforms.ToTensor",
"torch.utils.data.DataLoader"
] | [((409, 495), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['train_set'], {'batch_size': 'train_batch_size', 'shuffle': '(True)'}), '(train_set, batch_size=train_batch_size, shuffle\n =True)\n', (436, 495), False, 'import torch\n'), ((509, 594), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['test_set'], {'batch_size': 'test_batch_size', 'shuffle': '(False)'}), '(test_set, batch_size=test_batch_size, shuffle=False\n )\n', (536, 594), False, 'import torch\n'), ((220, 241), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (239, 241), False, 'from torchvision import transforms\n'), ((350, 371), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (369, 371), False, 'from torchvision import transforms\n')] |
"""Setup script for openodia
Referred: https://github.com/realpython/reader/blob/master/setup.py
https://realpython.com/pypi-publish-python-package/
"""
import os.path
from setuptools import find_packages, setup
# The directory containing this file
HERE = os.path.abspath(os.path.dirname(__file__))
# The text of the README file
with open(os.path.join(HERE, "README.md")) as fid:
README = fid.read()
# The text of the requirements file
with open(os.path.join(HERE, "requirements.txt")) as rt:
REQUIREMENTS = rt.readlines()
# This call to setup() does all the work
setup(
name="openodia",
version="0.1.11",
description="Open source Odia language tools",
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/soumendrak/openodia",
author="<NAME>",
author_email="<EMAIL>",
license="MIT",
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3 :: Only',
],
packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]),
include_package_data=True,
install_requires=REQUIREMENTS,
entry_points={"console_scripts": ["openodia=openodia.__main__:main"]},
)
| [
"setuptools.find_packages"
] | [((1213, 1280), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests', '*.tests', '*.tests.*', 'tests.*']"}), "(exclude=['tests', '*.tests', '*.tests.*', 'tests.*'])\n", (1226, 1280), False, 'from setuptools import find_packages, setup\n')] |
from __future__ import absolute_import, print_function, unicode_literals
import os
import shutil
import stat
import sys
import tempfile
from io import StringIO, open
from subprocess import list2cmdline
from textwrap import dedent
import ksconf.ext.six as six
from ksconf.__main__ import cli
from ksconf.conf.parser import (GLOBAL_STANZA, PARSECONF_MID, parse_conf,
parse_conf_stream, write_conf)
from ksconf.util.file import file_hash
from ksconf.vc.git import git_cmd
# Some unittest fixup for various python versions
import tests.compat as _ # noqa
del _
# What to export
__all__ = [
"static_data",
"ksconf_cli",
"TestWorkDir",
"FakeStdin",
"GLOBAL_STANZA",
"parse_conf",
"parse_string",
"write_conf",
"_debug_file",
]
def _debug_file(flag, fn): # pragma: no cover
""" Dump file contents with a message string to the output. For quick'n'dirty unittest
debugging only """
with open(fn) as fp:
content = fp.read()
length = len(content)
hash = file_hash(fn)
print("\n{flag} {fn} len={length} hash={hash} \n{content}".format(**vars()))
del flag, hash, length
def static_data(path):
""" Get paths to files under the 'tests/data/*' location """
# Assume "/" for path separation for simplicity; but ultimately OS independent
parts = path.split("/")
return os.path.abspath(os.path.join(os.path.dirname(__file__), "data", *parts))
def parse_string(text, profile=None, **kwargs):
text = dedent(text)
f = StringIO(text)
if profile:
return parse_conf(f, profile)
else:
return parse_conf_stream(f, **kwargs)
'''
# Let's try to avoid launching external processes (makes coverage more difficult, and so on)
def ksconf_exec(args):
args = list(args)
args.insert(0, "ksconf.py")
from subprocess import call
args = list(args)
if True: # Coverage enabled
args = ["coverage", "run", "-a" ] + args
rc = call(args)
return KsconfOutput(rc, ...)
'''
class _KsconfCli():
"""
CLI Wrapper context management class for unit testing;
USAGE: Use the ksconf_cli() singleton in a context (with)
Unfortunately, we have to redirect stdout/stderr while this runs, not
very clean, but we try to make it as safe as possible.
tmpfile: os.tmpfile, or StringIO?
"""
class KsconfOutput(object):
""" Container for the results from a KsconfCli call."""
__slots__ = ("returncode", "stdout", "stderr")
def __init__(self, *args):
self.returncode, self.stdout, self.stderr = args
def get_conf(self, profile=None, **kwargs):
""" Parse stdout as a .conf file"""
f = StringIO(self.stdout)
if profile:
return parse_conf(f, profile)
else:
return parse_conf_stream(f, **kwargs)
@staticmethod
def _as_string(stream):
stream.seek(0)
return stream.read()
def __call__(self, *args):
# In later versions of Python (3.4), something like this could be considered:
# from contextlib import redirect_stdout
self._last_args = args
_stdout, _stderr = (sys.stdout, sys.stderr)
try:
# Capture all output written to stdout/stderr
temp_stdout = sys.stdout = StringIO()
temp_stderr = sys.stderr = StringIO()
try:
rc = cli(args, _unittest=True)
except SystemExit as e: # pragma: no cover
if hasattr(e, "code"): # PY3
rc = e.code
else:
rc = e.message
finally:
# This next step MUST be done!
(sys.stdout, sys.stderr) = _stdout, _stderr
stdout = self._as_string(temp_stdout)
stderr = self._as_string(temp_stderr)
output = self.KsconfOutput(rc, stdout, stderr)
self._last_output = output
return output
def __enter__(self):
self._last_args = None
self._last_output = None
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# Don't worry with coverage here. It gets plenty of testing DURING unittest development ;-)
if exc_type is not None: # pragma: no cover
sys.stderr.write("Exception while running: ksconf {0}\n".
format(list2cmdline(self._last_args)))
ko = self._last_output
if ko:
if ko.stdout:
sys.stderr.write("STDOUT:\n{0}\n".format(ko.stdout))
if ko.stderr:
sys.stderr.write("STDERR:\n{0}\n".format(ko.stderr))
# Re-raise exception
return False
ksconf_cli = _KsconfCli()
class FakeStdin(object):
def __init__(self, content):
if isinstance(content, six.string_types):
content = StringIO(content)
self.stream = content
def __enter__(self):
self._real_stdin = sys.stdin
sys.stdin = self.stream
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# Don't worry with coverage here. It gets plenty of testing DURING unittest development ;-)
sys.stdin = self._real_stdin
if exc_type is not None: # pragma: no cover
# Re-raise exception
return False
class TestWorkDir(object):
""" Create a temporary working directory to create app-like structures and other supporting
file system artifacts necessary for many CLI tests. Cleanup is done automatically.
Can also be used as context manager (``with``) to temporarily change the directory and restore
the working directory upon completion.
"""
encoding = "utf-8"
def __init__(self, git_repo=False):
if git_repo:
self._path = tempfile.mkdtemp("-ksconftest-git")
self.git("init")
else:
self._path = tempfile.mkdtemp("-ksconftest")
self.git_repo = git_repo
self._working_dir = None
def __del__(self):
self.clean()
def __enter__(self):
self._working_dir = os.getcwd()
os.chdir(self._path)
def __exit__(self, exc_type, exc_val, exc_tb):
os.chdir(self._working_dir)
self._working_dir = None
def clean(self, force=False):
""" Explicitly clean/wipe the working directory. """
if not hasattr(self, "_path"):
return
if "KSCONF_KEEP_TEST_FILES" in os.environ and not force: # pragma: no cover
return
# Remove read-only file handler (e.g. clean .git/objects/xx/* files on Windows)
def del_rw(action, name, exc): # pragma: no cover (infrequently used)
# https://stackoverflow.com/a/21263493/315892
# Not checking for file vs dir, ...
os.chmod(name, stat.S_IWRITE)
os.remove(name)
del action, exc
shutil.rmtree(self._path, onerror=del_rw)
# Prevent the class from being used further
del self._path
def git(self, *args):
o = git_cmd(args, cwd=self._path)
if o.returncode != 0: # pragma: no cover
# Because, if we're using ksconf_cli, then we may be redirecting these...
stderr = sys.__stderr__
stderr.write("Git command 'git {0}' failed with exit code {1}\n{2}\n"
.format(" ".join(args), o.returncode, o.stderr))
raise RuntimeError("Failed git command (return code {0})".format(o.returncode))
def get_path(self, rel_path):
# Always using unix/URL style paths internally. But we want this to be OS agnostic
rel_parts = rel_path.split("/")
return os.path.join(self._path, *rel_parts)
def makedir(self, rel_path, path=None):
if path is None:
path = self.get_path(rel_path)
if not os.path.isdir(path):
os.makedirs(path)
return path
def write_file(self, rel_path, content):
path = self.get_path(rel_path)
self.makedir(None, path=os.path.dirname(path))
kw = {}
if isinstance(content, bytes):
kw["mode"] = "wb"
else:
kw["mode"] = "w"
kw["encoding"] = self.encoding
content = dedent(content)
with open(path, **kw) as stream:
stream.write(content)
return path
def read_file(self, rel_path, as_bytes=False):
path = self.get_path(rel_path)
kw = {}
if as_bytes:
kw["mode"] = "rb"
else:
kw["mode"] = "r"
kw["encoding"] = self.encoding
with open(path, **kw) as stream:
content = stream.read()
return content
def remove_file(self, rel_path):
path = self.get_path(rel_path)
os.unlink(path)
def write_conf(self, rel_path, conf):
path = self.get_path(rel_path)
self.makedir(None, path=os.path.dirname(path))
write_conf(path, conf)
return path
def read_conf(self, rel_path, profile=PARSECONF_MID):
path = self.get_path(rel_path)
return parse_conf(path, profile=profile)
def copy_static(self, static, rel_path):
src = static_data(static)
with open(src, "rb") as stream:
content = stream.read()
return self.write_file(rel_path, content)
| [
"io.open",
"os.remove",
"textwrap.dedent",
"os.chmod",
"ksconf.__main__.cli",
"os.path.isdir",
"os.unlink",
"io.StringIO",
"ksconf.conf.parser.parse_conf_stream",
"ksconf.util.file.file_hash",
"os.path.dirname",
"tempfile.mkdtemp",
"ksconf.conf.parser.parse_conf",
"ksconf.vc.git.git_cmd",
... | [((1051, 1064), 'ksconf.util.file.file_hash', 'file_hash', (['fn'], {}), '(fn)\n', (1060, 1064), False, 'from ksconf.util.file import file_hash\n'), ((1520, 1532), 'textwrap.dedent', 'dedent', (['text'], {}), '(text)\n', (1526, 1532), False, 'from textwrap import dedent\n'), ((1541, 1555), 'io.StringIO', 'StringIO', (['text'], {}), '(text)\n', (1549, 1555), False, 'from io import StringIO, open\n'), ((970, 978), 'io.open', 'open', (['fn'], {}), '(fn)\n', (974, 978), False, 'from io import StringIO, open\n'), ((1587, 1609), 'ksconf.conf.parser.parse_conf', 'parse_conf', (['f', 'profile'], {}), '(f, profile)\n', (1597, 1609), False, 'from ksconf.conf.parser import GLOBAL_STANZA, PARSECONF_MID, parse_conf, parse_conf_stream, write_conf\n'), ((1635, 1665), 'ksconf.conf.parser.parse_conf_stream', 'parse_conf_stream', (['f'], {}), '(f, **kwargs)\n', (1652, 1665), False, 'from ksconf.conf.parser import GLOBAL_STANZA, PARSECONF_MID, parse_conf, parse_conf_stream, write_conf\n'), ((6171, 6182), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (6180, 6182), False, 'import os\n'), ((6191, 6211), 'os.chdir', 'os.chdir', (['self._path'], {}), '(self._path)\n', (6199, 6211), False, 'import os\n'), ((6272, 6299), 'os.chdir', 'os.chdir', (['self._working_dir'], {}), '(self._working_dir)\n', (6280, 6299), False, 'import os\n'), ((6973, 7014), 'shutil.rmtree', 'shutil.rmtree', (['self._path'], {'onerror': 'del_rw'}), '(self._path, onerror=del_rw)\n', (6986, 7014), False, 'import shutil\n'), ((7129, 7158), 'ksconf.vc.git.git_cmd', 'git_cmd', (['args'], {'cwd': 'self._path'}), '(args, cwd=self._path)\n', (7136, 7158), False, 'from ksconf.vc.git import git_cmd\n'), ((7761, 7797), 'os.path.join', 'os.path.join', (['self._path', '*rel_parts'], {}), '(self._path, *rel_parts)\n', (7773, 7797), False, 'import os\n'), ((8870, 8885), 'os.unlink', 'os.unlink', (['path'], {}), '(path)\n', (8879, 8885), False, 'import os\n'), ((9031, 9053), 'ksconf.conf.parser.write_conf', 'write_conf', (['path', 'conf'], {}), '(path, conf)\n', (9041, 9053), False, 'from ksconf.conf.parser import GLOBAL_STANZA, PARSECONF_MID, parse_conf, parse_conf_stream, write_conf\n'), ((9187, 9220), 'ksconf.conf.parser.parse_conf', 'parse_conf', (['path'], {'profile': 'profile'}), '(path, profile=profile)\n', (9197, 9220), False, 'from ksconf.conf.parser import GLOBAL_STANZA, PARSECONF_MID, parse_conf, parse_conf_stream, write_conf\n'), ((1415, 1440), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1430, 1440), False, 'import os\n'), ((2740, 2761), 'io.StringIO', 'StringIO', (['self.stdout'], {}), '(self.stdout)\n', (2748, 2761), False, 'from io import StringIO, open\n'), ((3363, 3373), 'io.StringIO', 'StringIO', ([], {}), '()\n', (3371, 3373), False, 'from io import StringIO, open\n'), ((3413, 3423), 'io.StringIO', 'StringIO', ([], {}), '()\n', (3421, 3423), False, 'from io import StringIO, open\n'), ((4931, 4948), 'io.StringIO', 'StringIO', (['content'], {}), '(content)\n', (4939, 4948), False, 'from io import StringIO, open\n'), ((5870, 5905), 'tempfile.mkdtemp', 'tempfile.mkdtemp', (['"""-ksconftest-git"""'], {}), "('-ksconftest-git')\n", (5886, 5905), False, 'import tempfile\n'), ((5974, 6005), 'tempfile.mkdtemp', 'tempfile.mkdtemp', (['"""-ksconftest"""'], {}), "('-ksconftest')\n", (5990, 6005), False, 'import tempfile\n'), ((6878, 6907), 'os.chmod', 'os.chmod', (['name', 'stat.S_IWRITE'], {}), '(name, stat.S_IWRITE)\n', (6886, 6907), False, 'import os\n'), ((6920, 6935), 'os.remove', 'os.remove', (['name'], {}), '(name)\n', (6929, 6935), False, 'import os\n'), ((7926, 7945), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (7939, 7945), False, 'import os\n'), ((7959, 7976), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (7970, 7976), False, 'import os\n'), ((8330, 8345), 'textwrap.dedent', 'dedent', (['content'], {}), '(content)\n', (8336, 8345), False, 'from textwrap import dedent\n'), ((8359, 8375), 'io.open', 'open', (['path'], {}), '(path, **kw)\n', (8363, 8375), False, 'from io import StringIO, open\n'), ((8698, 8714), 'io.open', 'open', (['path'], {}), '(path, **kw)\n', (8702, 8714), False, 'from io import StringIO, open\n'), ((9314, 9329), 'io.open', 'open', (['src', '"""rb"""'], {}), "(src, 'rb')\n", (9318, 9329), False, 'from io import StringIO, open\n'), ((2809, 2831), 'ksconf.conf.parser.parse_conf', 'parse_conf', (['f', 'profile'], {}), '(f, profile)\n', (2819, 2831), False, 'from ksconf.conf.parser import GLOBAL_STANZA, PARSECONF_MID, parse_conf, parse_conf_stream, write_conf\n'), ((2873, 2903), 'ksconf.conf.parser.parse_conf_stream', 'parse_conf_stream', (['f'], {}), '(f, **kwargs)\n', (2890, 2903), False, 'from ksconf.conf.parser import GLOBAL_STANZA, PARSECONF_MID, parse_conf, parse_conf_stream, write_conf\n'), ((3462, 3487), 'ksconf.__main__.cli', 'cli', (['args'], {'_unittest': '(True)'}), '(args, _unittest=True)\n', (3465, 3487), False, 'from ksconf.__main__ import cli\n'), ((8114, 8135), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (8129, 8135), False, 'import os\n'), ((9000, 9021), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (9015, 9021), False, 'import os\n'), ((4421, 4450), 'subprocess.list2cmdline', 'list2cmdline', (['self._last_args'], {}), '(self._last_args)\n', (4433, 4450), False, 'from subprocess import list2cmdline\n')] |
from setuptools import setup
setup(name='reservoirlib',
version='0.1',
description='Python 3 library that provides utilities for creating and'
' training reservoir computers.',
author='<NAME>',
packages=['reservoirlib'],
url='https://github.com/Nathaniel-Rodriguez/reservoirlib.git',
install_requires=[
'numpy',
'scipy'
],
include_package_data=True)
| [
"setuptools.setup"
] | [((30, 364), 'setuptools.setup', 'setup', ([], {'name': '"""reservoirlib"""', 'version': '"""0.1"""', 'description': '"""Python 3 library that provides utilities for creating and training reservoir computers."""', 'author': '"""<NAME>"""', 'packages': "['reservoirlib']", 'url': '"""https://github.com/Nathaniel-Rodriguez/reservoirlib.git"""', 'install_requires': "['numpy', 'scipy']", 'include_package_data': '(True)'}), "(name='reservoirlib', version='0.1', description=\n 'Python 3 library that provides utilities for creating and training reservoir computers.'\n , author='<NAME>', packages=['reservoirlib'], url=\n 'https://github.com/Nathaniel-Rodriguez/reservoirlib.git',\n install_requires=['numpy', 'scipy'], include_package_data=True)\n", (35, 364), False, 'from setuptools import setup\n')] |
# todo: How to Select how many hidden layer and neurons in a neural network
# Importing the libraries
import pandas as pd
from keras.models import Sequential
from keras.layers import Dense, Activation
# Importing the dataset
dataset = pd.read_csv('Churn_Modelling.csv')
X = dataset.iloc[:, 3:13].values
y = dataset.iloc[:, 13].values
dataset.head()
# Encoding categorical data
from sklearn.preprocessing import LabelEncoder, OneHotEncoder
labelencoder_X_1 = LabelEncoder()
X[:, 1] = labelencoder_X_1.fit_transform(X[:, 1])
labelencoder_X_2 = LabelEncoder()
X[:, 2] = labelencoder_X_2.fit_transform(X[:, 2])
onehotencoder = OneHotEncoder(categorical_features=[1])
X = onehotencoder.fit_transform(X).toarray()
X = X[:, 1:]
# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import GridSearchCV
def create_model(layers, activation):
model = Sequential()
for i, nodes in enumerate(layers):
if i == 0:
model.add(Dense(nodes, input_dim=X_train.shape[1]))
model.add(Activation(activation))
else:
model.add(Dense(nodes))
model.add(Activation(activation))
model.add(Dense(1)) # Note: no activation beyond this point
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
return model
model = KerasClassifier(build_fn=create_model, verbose=0)
print(model)
layers = [[20], [40, 20], [45, 30, 15]]
activations = ['sigmoid', 'relu']
param_grid = dict(layers=layers, activation=activations, batch_size=[128, 256], epochs=[30])
grid = GridSearchCV(estimator=model, param_grid=param_grid)
grid_result = grid.fit(X_train, y_train)
print([grid_result.best_score_, grid_result.best_params_])
pred_y = grid.predict(X_test)
y_pred = (pred_y > 0.5)
print(y_pred)
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, y_pred)
print(cm)
from sklearn.metrics import accuracy_score
score = accuracy_score(y_test, y_pred)
print(score)
| [
"sklearn.model_selection.GridSearchCV",
"sklearn.preprocessing.LabelEncoder",
"pandas.read_csv",
"keras.wrappers.scikit_learn.KerasClassifier",
"sklearn.model_selection.train_test_split",
"sklearn.preprocessing.OneHotEncoder",
"keras.models.Sequential",
"sklearn.preprocessing.StandardScaler",
"keras... | [((237, 271), 'pandas.read_csv', 'pd.read_csv', (['"""Churn_Modelling.csv"""'], {}), "('Churn_Modelling.csv')\n", (248, 271), True, 'import pandas as pd\n'), ((461, 475), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (473, 475), False, 'from sklearn.preprocessing import LabelEncoder, OneHotEncoder\n'), ((545, 559), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (557, 559), False, 'from sklearn.preprocessing import LabelEncoder, OneHotEncoder\n'), ((626, 665), 'sklearn.preprocessing.OneHotEncoder', 'OneHotEncoder', ([], {'categorical_features': '[1]'}), '(categorical_features=[1])\n', (639, 665), False, 'from sklearn.preprocessing import LabelEncoder, OneHotEncoder\n'), ((872, 925), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.2)', 'random_state': '(0)'}), '(X, y, test_size=0.2, random_state=0)\n', (888, 925), False, 'from sklearn.model_selection import train_test_split\n'), ((999, 1015), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (1013, 1015), False, 'from sklearn.preprocessing import StandardScaler\n'), ((1696, 1745), 'keras.wrappers.scikit_learn.KerasClassifier', 'KerasClassifier', ([], {'build_fn': 'create_model', 'verbose': '(0)'}), '(build_fn=create_model, verbose=0)\n', (1711, 1745), False, 'from keras.wrappers.scikit_learn import KerasClassifier\n'), ((1935, 1987), 'sklearn.model_selection.GridSearchCV', 'GridSearchCV', ([], {'estimator': 'model', 'param_grid': 'param_grid'}), '(estimator=model, param_grid=param_grid)\n', (1947, 1987), False, 'from sklearn.model_selection import GridSearchCV\n'), ((2210, 2242), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (2226, 2242), False, 'from sklearn.metrics import confusion_matrix\n'), ((2305, 2335), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (2319, 2335), False, 'from sklearn.metrics import accuracy_score\n'), ((1240, 1252), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (1250, 1252), False, 'from keras.models import Sequential\n'), ((1531, 1539), 'keras.layers.Dense', 'Dense', (['(1)'], {}), '(1)\n', (1536, 1539), False, 'from keras.layers import Dense, Activation\n'), ((1333, 1373), 'keras.layers.Dense', 'Dense', (['nodes'], {'input_dim': 'X_train.shape[1]'}), '(nodes, input_dim=X_train.shape[1])\n', (1338, 1373), False, 'from keras.layers import Dense, Activation\n'), ((1397, 1419), 'keras.layers.Activation', 'Activation', (['activation'], {}), '(activation)\n', (1407, 1419), False, 'from keras.layers import Dense, Activation\n'), ((1457, 1469), 'keras.layers.Dense', 'Dense', (['nodes'], {}), '(nodes)\n', (1462, 1469), False, 'from keras.layers import Dense, Activation\n'), ((1493, 1515), 'keras.layers.Activation', 'Activation', (['activation'], {}), '(activation)\n', (1503, 1515), False, 'from keras.layers import Dense, Activation\n')] |
"""
This module is the computational part of the geometrical module of ToFu
"""
# Built-in
import sys
import warnings
# Common
import numpy as np
import scipy.interpolate as scpinterp
import scipy.integrate as scpintg
if sys.version[0]=='3':
from inspect import signature as insp
elif sys.version[0]=='2':
from inspect import getargspec as insp
# ToFu-specific
try:
import tofu.geom._def as _def
import tofu.geom._GG as _GG
except Exception:
from . import _def as _def
from . import _GG as _GG
"""
###############################################################################
###############################################################################
Ves functions
###############################################################################
"""
############################################
##### Ves sub-functions
############################################
def _Struct_set_Poly(Poly, pos=None, extent=None, arrayorder='C',
Type='Tor', Clock=False):
""" Compute geometrical attributes of a Struct object """
# Make Poly closed, counter-clockwise, with '(cc,N)' layout and arrayorder
Poly = _GG.Poly_Order(Poly, order='C', Clock=False,
close=True, layout='(cc,N)', Test=True)
assert Poly.shape[0]==2, "Arg Poly must be a 2D polygon !"
fPfmt = np.ascontiguousarray if arrayorder=='C' else np.asfortranarray
# Get all remarkable points and moments
NP = Poly.shape[1]-1
P1Max = Poly[:,np.argmax(Poly[0,:])]
P1Min = Poly[:,np.argmin(Poly[0,:])]
P2Max = Poly[:,np.argmax(Poly[1,:])]
P2Min = Poly[:,np.argmin(Poly[1,:])]
BaryP = np.sum(Poly[:,:-1],axis=1,keepdims=False)/(Poly.shape[1]-1)
BaryL = np.array([(P1Max[0]+P1Min[0])/2., (P2Max[1]+P2Min[1])/2.])
BaryS, Surf = _GG.poly_area_and_barycenter(Poly, NP)
# Get lim-related indicators
noccur = int(pos.size)
Multi = noccur>1
# Get Tor-related quantities
if Type.lower()=='lin':
Vol, BaryV = None, None
else:
Vol, BaryV = _GG.Poly_VolAngTor(Poly)
msg = "Pb. with volume computation for Ves object of type 'Tor' !"
assert Vol>0., msg
# Compute the non-normalized vector of each side of the Poly
Vect = np.diff(Poly,n=1,axis=1)
Vect = fPfmt(Vect)
# Compute the normalised vectors directed inwards
Vin = np.array([Vect[1,:],-Vect[0,:]])
Vin = -Vin # Poly is Counter Clock-wise as defined above
Vin = Vin/np.hypot(Vin[0,:],Vin[1,:])[np.newaxis,:]
Vin = fPfmt(Vin)
poly = _GG.Poly_Order(Poly, order=arrayorder, Clock=Clock,
close=False, layout='(cc,N)', Test=True)
# Get bounding circle
circC = BaryS
r = np.sqrt(np.sum((poly-circC[:,np.newaxis])**2,axis=0))
circr = np.max(r)
dout = {'Poly':poly, 'pos':pos, 'extent':extent,
'noccur':noccur, 'Multi':Multi, 'nP':NP,
'P1Max':P1Max, 'P1Min':P1Min, 'P2Max':P2Max, 'P2Min':P2Min,
'BaryP':BaryP, 'BaryL':BaryL, 'BaryS':BaryS, 'BaryV':BaryV,
'Surf':Surf, 'VolAng':Vol, 'Vect':Vect, 'VIn':Vin,
'circ-C':circC, 'circ-r':circr, 'Clock':Clock}
return dout
def _Ves_get_InsideConvexPoly(Poly, P2Min, P2Max, BaryS, RelOff=_def.TorRelOff, ZLim='Def', Spline=True, Splprms=_def.TorSplprms, NP=_def.TorInsideNP, Plot=False, Test=True):
if Test:
assert type(RelOff) is float, "Arg RelOff must be a float"
assert ZLim is None or ZLim=='Def' or type(ZLim) in [tuple,list], "Arg ZLim must be a tuple (ZlimMin, ZLimMax)"
assert type(Spline) is bool, "Arg Spline must be a bool !"
if not ZLim is None:
if ZLim=='Def':
ZLim = (P2Min[1]+0.1*(P2Max[1]-P2Min[1]), P2Max[1]-0.05*(P2Max[1]-P2Min[1]))
indZLim = (Poly[1,:]<ZLim[0]) | (Poly[1,:]>ZLim[1])
if Poly.shape[1]-indZLim.sum()<10:
msg = "Poly seems to be Convex and simple enough !"
msg += "\n Poly.shape[1] - indZLim.sum() < 10"
warnings.warn(msg)
return Poly
Poly = np.delete(Poly, indZLim.nonzero()[0], axis=1)
if np.all(Poly[:,0]==Poly[:,-1]):
Poly = Poly[:,:-1]
Np = Poly.shape[1]
if Spline:
BarySbis = np.tile(BaryS,(Np,1)).T
Ptemp = (1.-RelOff)*(Poly-BarySbis)
#Poly = BarySbis + Ptemp
Ang = np.arctan2(Ptemp[1,:],Ptemp[0,:])
Ang, ind = np.unique(Ang, return_index=True)
Ptemp = Ptemp[:,ind]
# spline parameters
ww = Splprms[0]*np.ones((Np+1,))
ss = Splprms[1]*(Np+1) # smoothness parameter
kk = Splprms[2] # spline order
nest = int((Np+1)/2.) # estimate of number of knots needed (-1 = maximal)
# Find the knot points
#tckp,uu = scpinterp.splprep([np.append(Ptemp[0,:],Ptemp[0,0]),np.append(Ptemp[1,:],Ptemp[1,0]),np.append(Ang,Ang[0]+2.*np.pi)], w=ww, s=ss, k=kk, nest=nest)
tckp,uu = scpinterp.splprep([np.append(Ptemp[0,:],Ptemp[0,0]),np.append(Ptemp[1,:],Ptemp[1,0])], u=np.append(Ang,Ang[0]+2.*np.pi), w=ww, s=ss, k=kk, nest=nest, full_output=0)
xnew,ynew = scpinterp.splev(np.linspace(-np.pi,np.pi,NP),tckp)
Poly = np.array([xnew+BaryS[0],ynew+BaryS[1]])
Poly = np.concatenate((Poly,Poly[:,0:1]),axis=1)
if Plot:
f = plt.figure(facecolor='w',figsize=(8,10))
ax = f.add_axes([0.1,0.1,0.8,0.8])
ax.plot(Poly[0,:], Poly[1,:],'-k', Poly[0,:],Poly[1,:],'-r')
ax.set_aspect(aspect="equal",adjustable='datalim'), ax.set_xlabel(r"R (m)"), ax.set_ylabel(r"Z (m)")
f.canvas.draw()
return Poly
def _Ves_get_sampleEdge(VPoly, dL, DS=None, dLMode='abs', DIn=0., VIn=None,
margin=1.e-9):
types =[int,float,np.int32,np.int64,np.float32,np.float64]
assert type(dL) in types and type(DIn) in types
assert DS is None or (hasattr(DS,'__iter__') and len(DS)==2)
if DS is None:
DS = [None,None]
else:
assert all([ds is None or (hasattr(ds,'__iter__') and len(ds)==2 and
all([ss is None or type(ss) in types
for ss in ds])) for ds in DS])
assert (type(dLMode) is str and
dLMode.lower() in ['abs','rel']), "Arg dLMode must be in ['abs','rel'] !"
#assert ind is None or (type(ind) is np.ndarray and ind.ndim==1 and ind.dtype in ['int32','int64'] and np.all(ind>=0)), "Arg ind must be None or 1D np.ndarray of positive int !"
Pts, dLr, ind, N,\
Rref, VPolybis = _GG.discretize_vpoly(VPoly, float(dL),
mode=dLMode.lower(),
D1=DS[0], D2=DS[1],
margin=margin,
DIn=float(DIn), VIn=VIn)
return Pts, dLr, ind
def _Ves_get_sampleCross(VPoly, Min1, Max1, Min2, Max2, dS,
DS=None, dSMode='abs', ind=None,
margin=1.e-9, mode='flat'):
assert mode in ['flat','imshow']
types =[int,float,np.int32,np.int64,np.float32,np.float64]
c0 = (hasattr(dS,'__iter__') and len(dS)==2
and all([type(ds) in types for ds in dS]))
assert c0 or type(dS) in types, "Arg dS must be a float or a list 2 floats!"
dS = [float(dS),float(dS)] if type(dS) in types else [float(dS[0]),
float(dS[1])]
assert DS is None or (hasattr(DS,'__iter__') and len(DS)==2)
if DS is None:
DS = [None,None]
else:
assert all([ds is None or (hasattr(ds,'__iter__') and len(ds)==2
and all([ss is None or type(ss) in types
for ss in ds])) for ds in DS])
assert type(dSMode) is str and dSMode.lower() in ['abs','rel'],\
"Arg dSMode must be in ['abs','rel'] !"
assert ind is None or (type(ind) is np.ndarray and ind.ndim==1
and ind.dtype in ['int32','int64']
and np.all(ind>=0)), \
"Arg ind must be None or 1D np.ndarray of positive int !"
MinMax1 = np.array([Min1,Max1])
MinMax2 = np.array([Min2,Max2])
if ind is None:
if mode == 'flat':
Pts, dS, ind, d1r, d2r = _GG.discretize_segment2d(MinMax1, MinMax2,
dS[0], dS[1],
D1=DS[0],
D2=DS[1],
mode=dSMode,
VPoly=VPoly,
margin=margin)
out = (Pts, dS, ind, (d1r,d2r))
else:
x1, d1r, ind1, N1 = _GG._Ves_mesh_dlfromL_cython(MinMax1,
dS[0], DS[0],
Lim=True,
dLMode=dSMode,
margin=margin)
x2, d2r, ind2, N2 = _GG._Ves_mesh_dlfromL_cython(MinMax2,
dS[1], DS[1],
Lim=True,
dLMode=dSMode,
margin=margin)
xx1, xx2 = np.meshgrid(x1,x2)
pts = np.squeeze([xx1,xx2])
extent = (x1[0]-d1r/2., x1[-1]+d1r/2., x2[0]-d2r/2., x2[-1]+d2r/2.)
out = (pts, x1, x2, extent)
else:
assert mode == 'flat'
c0 = type(ind) is np.ndarray and ind.ndim==1
c0 = c0 and ind.dtype in ['int32','int64'] and np.all(ind>=0)
assert c0, "Arg ind must be a np.ndarray of int !"
Pts, dS, d1r, d2r = _GG._Ves_meshCross_FromInd(MinMax1, MinMax2,
dS[0], dS[1], ind,
dSMode=dSMode,
margin=margin)
out = (Pts, dS, ind, (d1r,d2r))
return out
def _Ves_get_sampleV(VPoly, Min1, Max1, Min2, Max2, dV,
DV=None, dVMode='abs', ind=None,
VType='Tor', VLim=None,
Out='(X,Y,Z)', margin=1.e-9):
types =[int,float,np.int32,np.int64,np.float32,np.float64]
assert type(dV) in types or (hasattr(dV,'__iter__') and len(dV)==3 and all([type(ds) in types for ds in dV])), "Arg dV must be a float or a list 3 floats !"
dV = [float(dV),float(dV),float(dV)] if type(dV) in types else [float(dV[0]),float(dV[1]),float(dV[2])]
assert DV is None or (hasattr(DV,'__iter__') and len(DV)==3)
if DV is None:
DV = [None,None,None]
else:
assert all([ds is None or (hasattr(ds,'__iter__') and len(ds)==2 and all([ss is None or type(ss) in types for ss in ds])) for ds in DV]), "Arg DV must be a list of 3 lists of 2 floats !"
assert type(dVMode) is str and dVMode.lower() in ['abs','rel'], "Arg dVMode must be in ['abs','rel'] !"
assert ind is None or (type(ind) is np.ndarray and ind.ndim==1 and ind.dtype in ['int32','int64'] and np.all(ind>=0)), "Arg ind must be None or 1D np.ndarray of positive int !"
MinMax1 = np.array([Min1,Max1])
MinMax2 = np.array([Min2,Max2])
VLim = None if VType.lower()=='tor' else np.array(VLim).ravel()
dVr = [None,None,None]
if ind is None:
if VType.lower()=='tor':
Pts, dV, ind, dVr[0], dVr[1], dVr[2] = _GG._Ves_Vmesh_Tor_SubFromD_cython(dV[0], dV[1], dV[2], MinMax1, MinMax2, DR=DV[0], DZ=DV[1], DPhi=DV[2], VPoly=VPoly, Out=Out, margin=margin)
else:
Pts, dV, ind, dVr[0], dVr[1], dVr[2] = _GG._Ves_Vmesh_Lin_SubFromD_cython(dV[0], dV[1], dV[2], VLim, MinMax1, MinMax2, DX=DV[0], DY=DV[1], DZ=DV[2], VPoly=VPoly, margin=margin)
else:
if VType.lower()=='tor':
Pts, dV, dVr[0], dVr[1], dVr[2] = _GG._Ves_Vmesh_Tor_SubFromInd_cython(dV[0], dV[1], dV[2], MinMax1, MinMax2, ind, Out=Out, margin=margin)
else:
Pts, dV, dVr[0], dVr[1], dVr[2] = _GG._Ves_Vmesh_Lin_SubFromInd_cython(dV[0], dV[1], dV[2], VLim, MinMax1, MinMax2, ind, margin=margin)
return Pts, dV, ind, dVr
def _Ves_get_sampleS(VPoly, Min1, Max1, Min2, Max2, dS,
DS=None, dSMode='abs', ind=None, DIn=0., VIn=None,
VType='Tor', VLim=None, nVLim=None, Out='(X,Y,Z)',
margin=1.e-9, Multi=False, Ind=None):
types =[int,float,np.int32,np.int64,np.float32,np.float64]
assert type(dS) in types or (hasattr(dS,'__iter__') and len(dS)==2 and all([type(ds) in types for ds in dS])), "Arg dS must be a float or a list of 2 floats !"
dS = [float(dS),float(dS),float(dS)] if type(dS) in types else [float(dS[0]),float(dS[1]),float(dS[2])]
assert DS is None or (hasattr(DS,'__iter__') and len(DS)==3)
msg = "type(nVLim)={0} and nVLim={1}".format(str(type(nVLim)),nVLim)
assert type(nVLim) is int and nVLim>=0, msg
if DS is None:
DS = [None,None,None]
else:
assert all([ds is None or (hasattr(ds,'__iter__') and len(ds)==2 and all([ss is None or type(ss) in types for ss in ds])) for ds in DS]), "Arg DS must be a list of 3 lists of 2 floats !"
assert type(dSMode) is str and dSMode.lower() in ['abs','rel'], "Arg dSMode must be in ['abs','rel'] !"
assert type(Multi) is bool, "Arg Multi must be a bool !"
VLim = None if (VLim is None or nVLim==0) else np.array(VLim)
MinMax1 = np.array([Min1,Max1])
MinMax2 = np.array([Min2,Max2])
# Check if Multi
if nVLim>1:
assert VLim is not None, "For multiple Struct, Lim cannot be None !"
assert all([hasattr(ll,'__iter__') and len(ll)==2 for ll in VLim])
if Ind is None:
Ind = np.arange(0,nVLim)
else:
Ind = [Ind] if not hasattr(Ind,'__iter__') else Ind
Ind = np.asarray(Ind).astype(int)
if ind is not None:
assert hasattr(ind,'__iter__') and len(ind)==len(Ind), "For multiple Struct, ind must be a list of len() = len(Ind) !"
assert all([type(ind[ii]) is np.ndarray and ind[ii].ndim==1 and ind[ii].dtype in ['int32','int64'] and np.all(ind[ii]>=0) for ii in range(0,len(ind))]), "For multiple Struct, ind must be a list of index arrays !"
else:
VLim = [None] if VLim is None else [VLim.ravel()]
assert ind is None or (type(ind) is np.ndarray and ind.ndim==1 and ind.dtype in ['int32','int64'] and np.all(ind>=0)), "Arg ind must be None or 1D np.ndarray of positive int !"
Ind = [0]
if ind is None:
Pts, dS, ind, dSr = [0 for ii in Ind], [dS for ii in Ind], [0 for ii in Ind], [[0,0] for ii in Ind]
if VType.lower()=='tor':
for ii in range(0,len(Ind)):
if VLim[Ind[ii]] is None:
Pts[ii], dS[ii], ind[ii], NL, dSr[ii][0], Rref, dSr[ii][1], nRPhi0, VPbis = _GG._Ves_Smesh_Tor_SubFromD_cython(dS[ii][0], dS[ii][1], VPoly, DR=DS[0], DZ=DS[1], DPhi=DS[2], DIn=DIn, VIn=VIn, PhiMinMax=None, Out=Out, margin=margin)
else:
Pts[ii], dS[ii], ind[ii], NL, dSr[ii][0], Rref, dR0r, dZ0r, dSr[ii][1], VPbis = _GG._Ves_Smesh_TorStruct_SubFromD_cython(VLim[Ind[ii]], dS[ii][0], dS[ii][1], VPoly, DR=DS[0], DZ=DS[1], DPhi=DS[2], DIn=DIn, VIn=VIn, Out=Out, margin=margin)
dSr[ii] += [dR0r, dZ0r]
else:
for ii in range(0,len(Ind)):
Pts[ii], dS[ii], ind[ii], NL, dSr[ii][0], Rref, dSr[ii][1], dY0r, dZ0r, VPbis = _GG._Ves_Smesh_Lin_SubFromD_cython(VLim[Ind[ii]], dS[ii][0], dS[ii][1], VPoly, DX=DS[0], DY=DS[1], DZ=DS[2], DIn=DIn, VIn=VIn, margin=margin)
dSr[ii] += [dY0r, dZ0r]
else:
ind = ind if Multi else [ind]
Pts, dS, dSr = [np.ones((3,0)) for ii in Ind], [dS for ii in Ind], [[0,0] for ii in Ind]
if VType.lower()=='tor':
for ii in range(0,len(Ind)):
if ind[Ind[ii]].size>0:
if VLim[Ind[ii]] is None:
Pts[ii], dS[ii], NL, dSr[ii][0], Rref, dSr[ii][1], nRPhi0, VPbis = _GG._Ves_Smesh_Tor_SubFromInd_cython(dS[ii][0], dS[ii][1], VPoly, ind[Ind[ii]], DIn=DIn, VIn=VIn, PhiMinMax=None, Out=Out, margin=margin)
else:
Pts[ii], dS[ii], NL, dSr[ii][0], Rref, dR0r, dZ0r, dSr[ii][1], VPbis = _GG._Ves_Smesh_TorStruct_SubFromInd_cython(VLim[Ind[ii]], dS[ii][0], dS[ii][1], VPoly, ind[Ind[ii]], DIn=DIn, VIn=VIn, Out=Out, margin=margin)
dSr[ii] += [dR0r, dZ0r]
else:
for ii in range(0,len(Ind)):
if ind[Ind[ii]].size>0:
Pts[ii], dS[ii], NL, dSr[ii][0], Rref, dSr[ii][1], dY0r, dZ0r, VPbis = _GG._Ves_Smesh_Lin_SubFromInd_cython(VLim[Ind[ii]], dS[ii][0], dS[ii][1], VPoly, ind[Ind[ii]], DIn=DIn, VIn=VIn, margin=margin)
dSr[ii] += [dY0r, dZ0r]
if len(VLim)==1:
Pts, dS, ind, dSr = Pts[0], dS[0], ind[0], dSr[0]
return Pts, dS, ind, dSr
# ------------------------------------------------------------
# phi / theta projections for magfieldlines
def _Struct_get_phithetaproj(ax=None, poly_closed=None, lim=None, noccur=0):
# phi = toroidal angle
if noccur == 0:
Dphi = np.array([[-np.pi,np.pi]])
nphi = np.r_[1]
else:
assert lim.ndim == 2, str(lim)
nphi = np.ones((noccur,),dtype=int)
ind = (lim[:,0] > lim[:,1]).nonzero()[0]
Dphi = np.concatenate((lim, np.full((noccur,2),np.nan)), axis=1)
if ind.size > 0:
for ii in ind:
Dphi[ii,:] = [lim[ii,0], np.pi, -np.pi, lim[ii,1]]
nphi[ii] = 2
# theta = poloidal angle
Dtheta = np.arctan2(poly_closed[1,:]-ax[1], poly_closed[0,:]-ax[0])
Dtheta = np.r_[np.min(Dtheta), np.max(Dtheta)]
if Dtheta[0] > Dtheta[1]:
ntheta = 2
Dtheta = [Dtheta[0],np.pi, -np.pi, Dtheta[1]]
else:
ntheta = 1
return nphi, Dphi, ntheta, Dtheta
def _get_phithetaproj_dist(poly_closed, ax, Dtheta, nDtheta,
Dphi, nDphi, theta, phi, ntheta, nphi, noccur):
if nDtheta == 1:
ind = (theta >= Dtheta[0]) & (theta <= Dtheta[1])
else:
ind = (theta >= Dtheta[0]) | (theta <= Dtheta[1])
disttheta = np.full((theta.size,), np.nan)
# phi within Dphi
if noccur > 0:
indphi = np.zeros((nphi,),dtype=bool)
for ii in range(0,noccur):
for jj in range(0,nDphi[ii]):
indphi |= (phi >= Dphi[ii,jj]) & (phi<= Dphi[ii,jj+1])
if not np.any(indphi):
return disttheta, indphi
else:
indphi = np.ones((nphi,),dtype=bool)
# No theta within Dtheta
if not np.any(ind):
return disttheta, indphi
# Check for non-parallel AB / u pairs
u = np.array([np.cos(theta), np.sin(theta)])
AB = np.diff(poly_closed, axis=1)
detABu = AB[0,:,None]*u[1,None,:] - AB[1,:,None]*u[0,None,:]
inddet = ind[None,:] & (np.abs(detABu) > 1.e-9)
if not np.any(inddet):
return disttheta, indphi
nseg = poly_closed.shape[1]-1
k = np.full((nseg, ntheta), np.nan)
OA = poly_closed[:,:-1] - ax[:,None]
detOAu = (OA[0,:,None]*u[1,None,:] - OA[1,:,None]*u[0,None,:])[inddet]
ss = - detOAu / detABu[inddet]
inds = (ss >= 0.) & (ss < 1.)
inddet[inddet] = inds
if not np.any(inds):
return disttheta, indphi
scaOAu = (OA[0,:,None]*u[0,None,:] + OA[1,:,None]*u[1,None,:])[inddet]
scaABu = (AB[0,:,None]*u[0,None,:] + AB[1,:,None]*u[1,None,:])[inddet]
k[inddet] = scaOAu + ss[inds]*scaABu
indk = k[inddet] > 0.
inddet[inddet] = indk
if not np.any(indk):
return disttheta, indphi
k[~inddet] = np.nan
indok = np.any(inddet, axis=0)
disttheta[indok] = np.nanmin(k[:,indok], axis=0)
return disttheta, indphi
"""
###############################################################################
###############################################################################
LOS functions
###############################################################################
"""
def LOS_PRMin(Ds, us, kOut=None, Eps=1.e-12, squeeze=True, Test=True):
""" Compute the point on the LOS where the major radius is minimum """
if Test:
assert Ds.ndim in [1,2,3] and 3 in Ds.shape and Ds.shape == us.shape
if kOut is not None:
kOut = np.atleast_1d(kOut)
assert kOut.size == Ds.size/3
v = Ds.ndim == 1
if Ds.ndim == 1:
Ds, us = Ds[:,None,None], us[:,None,None]
elif Ds.ndim == 2:
Ds, us = Ds[:,:,None], us[:,:,None]
if kOut is not None:
if kOut.ndim == 1:
kOut = kOut[:,None]
_, nlos, nref = Ds.shape
kRMin = np.full((nlos,nref), np.nan)
uparN = np.sqrt(us[0,:,:]**2 + us[1,:,:]**2)
# Case with u vertical
ind = uparN > Eps
kRMin[~ind] = 0.
# Else
kRMin[ind] = -(us[0,ind]*Ds[0,ind] + us[1,ind]*Ds[1,ind]) / uparN[ind]**2
# Check
kRMin[kRMin <= 0.] = 0.
if kOut is not None:
kRMin[kRMin > kOut] = kOut[kRMin > kOut]
# squeeze
if squeeze:
if nref == 1 and nlos == 11:
kRMin = kRMin[0,0]
elif nref == 1:
kRMin = kRMin[:,0]
elif nlos == 1:
kRMin = kRMin[0,:]
return kRMin
def LOS_CrossProj(VType, Ds, us, kOuts, proj='All', multi=False,
num_threads=16, return_pts=False, Test=True):
""" Compute the parameters to plot the poloidal projection of the LOS """
assert type(VType) is str and VType.lower() in ['tor','lin']
dproj = {'cross':('R','Z'), 'hor':('x,y'), 'all':('R','Z','x','y'),
'3d':('x','y','z')}
assert type(proj) in [str, tuple]
if type(proj) is tuple:
assert all([type(pp) is str for pp in proj])
lcoords = proj
else:
proj = proj.lower()
assert proj in dproj.keys()
lcoords = dproj[proj]
if return_pts:
assert proj in ['cross','hor', '3d']
lc = [Ds.ndim == 3, Ds.shape == us.shape]
if not all(lc):
msg = "Ds and us must have the same shape and dim in [2,3]:\n"
msg += " - provided Ds.shape: %s\n"%str(Ds.shape)
msg += " - provided us.shape: %s"%str(us.shape)
raise Exception(msg)
lc = [kOuts.size == Ds.size/3, kOuts.shape == Ds.shape[1:]]
if not all(lc):
msg = "kOuts must have the same shape and ndim = Ds.ndim-1:\n"
msg += " - Ds.shape : %s\n"%str(Ds.shape)
msg += " - kOutss.shape: %s"%str(kOuts.shape)
raise Exception(msg)
# Prepare inputs
_, nlos, nseg = Ds.shape
# Detailed sampling for 'tor' and ('cross' or 'all')
R, Z = None, None
if 'R' in lcoords or 'Z' in lcoords:
angcross = np.arccos(np.sqrt(us[0,...]**2 + us[1,...]**2)
/np.sqrt(np.sum(us**2, axis=0)))
resnk = np.ceil(25.*(1 - (angcross/(np.pi/4)-1)**2) + 5)
resnk = 1./resnk.ravel()
# Use optimized get sample
DL = np.vstack((np.zeros((nlos*nseg,),dtype=float), kOuts.ravel()))
k, reseff, lind = _GG.LOS_get_sample(nlos*nseg, resnk, DL,
dmethod='rel', method='simps',
num_threads=num_threads, Test=Test)
assert lind.size == nseg*nlos - 1
ind = lind[nseg-1::nseg]
nbrep = np.r_[lind[0], np.diff(lind), k.size - lind[-1]]
pts = (np.repeat(Ds.reshape((3,nlos*nseg)), nbrep, axis=1)
+ k[None,:] * np.repeat(us.reshape((3,nlos*nseg)), nbrep,
axis=1))
if return_pts:
pts = np.array([np.hypot(pts[0,:],pts[1,:]), pts[2,:]])
if multi:
pts = np.split(pts, ind, axis=1)
else:
pts = np.insert(pts, ind, np.nan, axis=1)
else:
if multi:
if 'R' in lcoords:
R = np.split(np.hypot(pts[0,:],pts[1,:]), ind)
if 'Z' in lcoords:
Z = np.split(pts[2,:], ind)
else:
if 'R' in lcoords:
R = np.insert(np.hypot(pts[0,:],pts[1,:]), ind, np.nan)
if 'Z' in lcoords:
Z = np.insert(pts[2,:], ind, np.nan)
# Normal sampling => pts
# unnecessary only if 'tor' and 'cross'
x, y, z = None, None, None
if 'x' in lcoords or 'y' in lcoords or 'z' in lcoords:
pts = np.concatenate((Ds, Ds[:,:,-1:] + kOuts[None,:,-1:]*us[:,:,-1:]),
axis=-1)
if multi:
ind = np.arange(1,nlos)*(nseg+1)
pts = pts.reshape((3,nlos*(nseg+1)))
else:
nancoords = np.full((3,nlos,1), np.nan)
pts = np.concatenate((pts,nancoords), axis=-1)
pts = pts.reshape((3,nlos*(nseg+2)))
if return_pts:
assert proj in ['hor','3d']
if multi:
if proj == 'hor':
pts = np.split(pts[:2,:], ind, axis=1)
else:
pts = np.split(pts, ind, axis=1)
elif proj == 'hor':
pts = pts[:2,:]
else:
if multi:
if 'x' in lcoords:
x = np.split(pts[0,:], ind)
if 'y' in lcoords:
y = np.split(pts[1,:], ind)
if 'z' in lcoords:
z = np.split(pts[2,:], ind)
else:
if 'x' in lcoords:
x = pts[0,:]
if 'y' in lcoords:
y = pts[1,:]
if 'z' in lcoords:
z = pts[2,:]
if return_pts:
return pts
else:
return R, Z, x, y, z
##############################################
# Meshing & signal
##############################################
def LOS_get_sample(D, u, dL, DL=None, dLMode='abs', method='sum', Test=True):
""" Return the sampled line, with the specified method
'linspace': return the N+1 edges, including the first and last point
'sum' : return the N middle of the segments
'simps': return the N+1 egdes, where N has to be even (scipy.simpson requires an even number of intervals)
'romb' : return the N+1 edges, where N+1 = 2**k+1 (fed to scipy.romb for integration)
"""
if Test:
assert all([type(dd) is np.ndarray and dd.shape==(3,) for dd in [D,u]])
assert not hasattr(dL,'__iter__')
assert DL is None or all([hasattr(DL,'__iter__'), len(DL)==2, all([not hasattr(dd,'__iter__') for dd in DL])])
assert dLMode in ['abs','rel']
assert type(method) is str and method in ['linspace','sum','simps','romb']
# Compute the minimum number of intervals to satisfy the specified resolution
N = int(np.ceil((DL[1]-DL[0])/dL)) if dLMode=='abs' else int(np.ceil(1./dL))
# Modify N according to the desired method
if method=='simps':
N = N if N%2==0 else N+1
elif method=='romb':
N = 2**int(np.ceil(np.log(N)/np.log(2.)))
# Derive k and dLr
if method=='sum':
dLr = (DL[1]-DL[0])/N
k = DL[0] + (0.5+np.arange(0,N))*dLr
else:
k, dLr = np.linspace(DL[0], DL[1], N+1, endpoint=True, retstep=True, dtype=float)
Pts = D[:,np.newaxis] + k[np.newaxis,:]*u[:,np.newaxis]
return Pts, k, dLr
def LOS_calc_signal(ff, D, u, dL, DL=None, dLMode='abs', method='romb', Test=True):
assert hasattr(ff,'__call__'), "Arg ff must be a callable (function) taking at least 1 positional Pts (a (3,N) np.ndarray of cartesian (X,Y,Z) coordinates) !"
assert not method=='linspace'
Pts, k, dLr = LOS_get_sample(D, u, dL, DL=DL, dLMode=dLMode, method=method, Test=Test)
out = insp(ff)
if sys.version[0]=='3':
N = np.sum([(pp.kind==pp.POSITIONAL_OR_KEYWORD and pp.default is pp.empty) for pp in out.parameters.values()])
else:
N = len(out.args)
if N==1:
Vals = ff(Pts)
elif N==2:
Vals = ff(Pts, np.tile(-u,(Pts.shape[1],1)).T)
else:
raise ValueError("The function (ff) assessing the emissivity locally "
+ "must take a single positional argument: Pts a (3,N)"
+ " np.ndarray of (X,Y,Z) cartesian coordinates !")
Vals[np.isnan(Vals)] = 0.
if method=='sum':
Int = np.sum(Vals)*dLr
elif method=='simps':
Int = scpintg.simps(Vals, x=None, dx=dLr)
elif method=='romb':
Int = scpintg.romb(Vals, dx=dLr, show=False)
return Int
"""
###############################################################################
###############################################################################
Solid Angle particle
###############################################################################
"""
def calc_solidangle_particle(traj, pts, r=1., config=None,
approx=True, aniso=False, block=True):
""" Compute the solid angle subtended by a particle along a trajectory
The particle has radius r, and trajectory (array of points) traj
It is observed from pts (array of points)
traj and pts are (3,N) and (3,M) arrays of cartesian coordinates
approx = True => use approximation
aniso = True => return also unit vector of emission
block = True consider LOS collisions (with Ves, Struct...)
if block:
config = config used for LOS collisions
Return:
-------
sang: np.ndarray
(N,M) Array of floats, solid angles
"""
################
# Prepare inputs
traj = np.ascontiguousarray(traj, dtype=float)
pts = np.ascontiguousarray(pts, dtype=float)
r = np.r_[r].astype(float).ravel()
# Check booleans
assert type(approx) is bool
assert type(aniso) is bool
assert type(block) is bool
# Check config
assert config is None or config.__class__.__name__ == 'Config'
assert block == (config is not None)
# Check pts, traj and r are array of good shape
assert traj.ndim in [1,2]
assert pts.ndim in [1,2]
assert 3 in traj.shape and 3 in pts.shape
if traj.ndim == 1:
traj = traj.reshape((3,1))
if traj.shape[0] != 3:
traj = traj.T
if pts.ndim == 1:
pts = pts.reshape((3,1))
if pts.shape[0] != 3:
pts = pts.T
# get npart
ntraj = traj.shape[1]
nr = r.size
npts = pts.shape[1]
npart = max(nr,ntraj)
assert nr in [1,npart]
assert ntraj in [1,npart]
if nr < npart:
r = np.full((npart,), r[0])
if ntraj < npart:
traj = np.repeat(traj, npart, axis=1)
################
# Main computation
# traj2pts vector, with length (3d array (3,N,M))
vect = pts[:,None,:] - traj[:,:,None]
l = np.sqrt(np.sum(vect**2, axis=0))
# If aniso or block, normalize
if aniso or block:
vect = vect/l[None,:,:]
# Solid angle
if approx:
sang = np.pi*r[None,:]**2/l**2
else:
sang = 2.*np.pi*(1 - np.sqrt(1.-r**2[None,:]/l**2))
# block
if block:
kwdargs = config._get_kwdargs_LOS_isVis()
# TODO : modify this function along issue #102
indnan = _GG.LOS_areVis_PtsFromPts_VesStruct(traj, pts, k=l, vis=False,
**kwdargs)
sang[indnan] = 0.
vect[indnan,:] = np.nan
################
# Return
if aniso:
return sang, vect
else:
return sang
def calc_solidangle_particle_integ(traj, r=1., config=None,
approx=True, block=True, res=0.01):
# step0: if block : generate kwdargs from config
# step 1: sample cross-section
# step 2: loop on R of pts of cross-section (parallelize ?)
# => fix nb. of phi for the rest of the loop
# loop of Z
# step 3: loop phi
# Check visibility (if block = True) for each phi (LOS collision)
# If visible => compute solid angle
# integrate (sum * res) on each phi the solid angle
# Return sang as (N,nR,nZ) array
return
| [
"numpy.sqrt",
"tofu.geom._GG.Poly_VolAngTor",
"numpy.log",
"tofu.geom._GG.discretize_segment2d",
"numpy.ascontiguousarray",
"numpy.array",
"numpy.arctan2",
"tofu.geom._GG._Ves_Smesh_Lin_SubFromD_cython",
"numpy.nanmin",
"numpy.sin",
"numpy.arange",
"numpy.repeat",
"tofu.geom._GG._Ves_Vmesh_T... | [((1201, 1289), 'tofu.geom._GG.Poly_Order', '_GG.Poly_Order', (['Poly'], {'order': '"""C"""', 'Clock': '(False)', 'close': '(True)', 'layout': '"""(cc,N)"""', 'Test': '(True)'}), "(Poly, order='C', Clock=False, close=True, layout='(cc,N)',\n Test=True)\n", (1215, 1289), True, 'import tofu.geom._GG as _GG\n'), ((1768, 1836), 'numpy.array', 'np.array', (['[(P1Max[0] + P1Min[0]) / 2.0, (P2Max[1] + P2Min[1]) / 2.0]'], {}), '([(P1Max[0] + P1Min[0]) / 2.0, (P2Max[1] + P2Min[1]) / 2.0])\n', (1776, 1836), True, 'import numpy as np\n'), ((1845, 1883), 'tofu.geom._GG.poly_area_and_barycenter', '_GG.poly_area_and_barycenter', (['Poly', 'NP'], {}), '(Poly, NP)\n', (1873, 1883), True, 'import tofu.geom._GG as _GG\n'), ((2296, 2322), 'numpy.diff', 'np.diff', (['Poly'], {'n': '(1)', 'axis': '(1)'}), '(Poly, n=1, axis=1)\n', (2303, 2322), True, 'import numpy as np\n'), ((2409, 2444), 'numpy.array', 'np.array', (['[Vect[1, :], -Vect[0, :]]'], {}), '([Vect[1, :], -Vect[0, :]])\n', (2417, 2444), True, 'import numpy as np\n'), ((2592, 2689), 'tofu.geom._GG.Poly_Order', '_GG.Poly_Order', (['Poly'], {'order': 'arrayorder', 'Clock': 'Clock', 'close': '(False)', 'layout': '"""(cc,N)"""', 'Test': '(True)'}), "(Poly, order=arrayorder, Clock=Clock, close=False, layout=\n '(cc,N)', Test=True)\n", (2606, 2689), True, 'import tofu.geom._GG as _GG\n'), ((2830, 2839), 'numpy.max', 'np.max', (['r'], {}), '(r)\n', (2836, 2839), True, 'import numpy as np\n'), ((4161, 4194), 'numpy.all', 'np.all', (['(Poly[:, 0] == Poly[:, -1])'], {}), '(Poly[:, 0] == Poly[:, -1])\n', (4167, 4194), True, 'import numpy as np\n'), ((8235, 8257), 'numpy.array', 'np.array', (['[Min1, Max1]'], {}), '([Min1, Max1])\n', (8243, 8257), True, 'import numpy as np\n'), ((8271, 8293), 'numpy.array', 'np.array', (['[Min2, Max2]'], {}), '([Min2, Max2])\n', (8279, 8293), True, 'import numpy as np\n'), ((11590, 11612), 'numpy.array', 'np.array', (['[Min1, Max1]'], {}), '([Min1, Max1])\n', (11598, 11612), True, 'import numpy as np\n'), ((11626, 11648), 'numpy.array', 'np.array', (['[Min2, Max2]'], {}), '([Min2, Max2])\n', (11634, 11648), True, 'import numpy as np\n'), ((13864, 13886), 'numpy.array', 'np.array', (['[Min1, Max1]'], {}), '([Min1, Max1])\n', (13872, 13886), True, 'import numpy as np\n'), ((13900, 13922), 'numpy.array', 'np.array', (['[Min2, Max2]'], {}), '([Min2, Max2])\n', (13908, 13922), True, 'import numpy as np\n'), ((18129, 18193), 'numpy.arctan2', 'np.arctan2', (['(poly_closed[1, :] - ax[1])', '(poly_closed[0, :] - ax[0])'], {}), '(poly_closed[1, :] - ax[1], poly_closed[0, :] - ax[0])\n', (18139, 18193), True, 'import numpy as np\n'), ((18712, 18742), 'numpy.full', 'np.full', (['(theta.size,)', 'np.nan'], {}), '((theta.size,), np.nan)\n', (18719, 18742), True, 'import numpy as np\n'), ((19290, 19318), 'numpy.diff', 'np.diff', (['poly_closed'], {'axis': '(1)'}), '(poly_closed, axis=1)\n', (19297, 19318), True, 'import numpy as np\n'), ((19539, 19570), 'numpy.full', 'np.full', (['(nseg, ntheta)', 'np.nan'], {}), '((nseg, ntheta), np.nan)\n', (19546, 19570), True, 'import numpy as np\n'), ((20184, 20206), 'numpy.any', 'np.any', (['inddet'], {'axis': '(0)'}), '(inddet, axis=0)\n', (20190, 20206), True, 'import numpy as np\n'), ((20230, 20260), 'numpy.nanmin', 'np.nanmin', (['k[:, indok]'], {'axis': '(0)'}), '(k[:, indok], axis=0)\n', (20239, 20260), True, 'import numpy as np\n'), ((21205, 21234), 'numpy.full', 'np.full', (['(nlos, nref)', 'np.nan'], {}), '((nlos, nref), np.nan)\n', (21212, 21234), True, 'import numpy as np\n'), ((21246, 21290), 'numpy.sqrt', 'np.sqrt', (['(us[0, :, :] ** 2 + us[1, :, :] ** 2)'], {}), '(us[0, :, :] ** 2 + us[1, :, :] ** 2)\n', (21253, 21290), True, 'import numpy as np\n'), ((28275, 28283), 'inspect.getargspec', 'insp', (['ff'], {}), '(ff)\n', (28279, 28283), True, 'from inspect import getargspec as insp\n'), ((30135, 30174), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['traj'], {'dtype': 'float'}), '(traj, dtype=float)\n', (30155, 30174), True, 'import numpy as np\n'), ((30185, 30223), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['pts'], {'dtype': 'float'}), '(pts, dtype=float)\n', (30205, 30223), True, 'import numpy as np\n'), ((1696, 1740), 'numpy.sum', 'np.sum', (['Poly[:, :-1]'], {'axis': '(1)', 'keepdims': '(False)'}), '(Poly[:, :-1], axis=1, keepdims=False)\n', (1702, 1740), True, 'import numpy as np\n'), ((2092, 2116), 'tofu.geom._GG.Poly_VolAngTor', '_GG.Poly_VolAngTor', (['Poly'], {}), '(Poly)\n', (2110, 2116), True, 'import tofu.geom._GG as _GG\n'), ((2772, 2822), 'numpy.sum', 'np.sum', (['((poly - circC[:, np.newaxis]) ** 2)'], {'axis': '(0)'}), '((poly - circC[:, np.newaxis]) ** 2, axis=0)\n', (2778, 2822), True, 'import numpy as np\n'), ((4391, 4427), 'numpy.arctan2', 'np.arctan2', (['Ptemp[1, :]', 'Ptemp[0, :]'], {}), '(Ptemp[1, :], Ptemp[0, :])\n', (4401, 4427), True, 'import numpy as np\n'), ((4444, 4477), 'numpy.unique', 'np.unique', (['Ang'], {'return_index': '(True)'}), '(Ang, return_index=True)\n', (4453, 4477), True, 'import numpy as np\n'), ((5218, 5262), 'numpy.array', 'np.array', (['[xnew + BaryS[0], ynew + BaryS[1]]'], {}), '([xnew + BaryS[0], ynew + BaryS[1]])\n', (5226, 5262), True, 'import numpy as np\n'), ((5273, 5317), 'numpy.concatenate', 'np.concatenate', (['(Poly, Poly[:, 0:1])'], {'axis': '(1)'}), '((Poly, Poly[:, 0:1]), axis=1)\n', (5287, 5317), True, 'import numpy as np\n'), ((10113, 10211), 'tofu.geom._GG._Ves_meshCross_FromInd', '_GG._Ves_meshCross_FromInd', (['MinMax1', 'MinMax2', 'dS[0]', 'dS[1]', 'ind'], {'dSMode': 'dSMode', 'margin': 'margin'}), '(MinMax1, MinMax2, dS[0], dS[1], ind, dSMode=\n dSMode, margin=margin)\n', (10139, 10211), True, 'import tofu.geom._GG as _GG\n'), ((13835, 13849), 'numpy.array', 'np.array', (['VLim'], {}), '(VLim)\n', (13843, 13849), True, 'import numpy as np\n'), ((17672, 17699), 'numpy.array', 'np.array', (['[[-np.pi, np.pi]]'], {}), '([[-np.pi, np.pi]])\n', (17680, 17699), True, 'import numpy as np\n'), ((17787, 17816), 'numpy.ones', 'np.ones', (['(noccur,)'], {'dtype': 'int'}), '((noccur,), dtype=int)\n', (17794, 17816), True, 'import numpy as np\n'), ((18802, 18831), 'numpy.zeros', 'np.zeros', (['(nphi,)'], {'dtype': 'bool'}), '((nphi,), dtype=bool)\n', (18810, 18831), True, 'import numpy as np\n'), ((19074, 19102), 'numpy.ones', 'np.ones', (['(nphi,)'], {'dtype': 'bool'}), '((nphi,), dtype=bool)\n', (19081, 19102), True, 'import numpy as np\n'), ((19143, 19154), 'numpy.any', 'np.any', (['ind'], {}), '(ind)\n', (19149, 19154), True, 'import numpy as np\n'), ((19447, 19461), 'numpy.any', 'np.any', (['inddet'], {}), '(inddet)\n', (19453, 19461), True, 'import numpy as np\n'), ((19795, 19807), 'numpy.any', 'np.any', (['inds'], {}), '(inds)\n', (19801, 19807), True, 'import numpy as np\n'), ((20100, 20112), 'numpy.any', 'np.any', (['indk'], {}), '(indk)\n', (20106, 20112), True, 'import numpy as np\n'), ((20861, 20880), 'numpy.atleast_1d', 'np.atleast_1d', (['kOut'], {}), '(kOut)\n', (20874, 20880), True, 'import numpy as np\n'), ((23375, 23434), 'numpy.ceil', 'np.ceil', (['(25.0 * (1 - (angcross / (np.pi / 4) - 1) ** 2) + 5)'], {}), '(25.0 * (1 - (angcross / (np.pi / 4) - 1) ** 2) + 5)\n', (23382, 23434), True, 'import numpy as np\n'), ((23596, 23709), 'tofu.geom._GG.LOS_get_sample', '_GG.LOS_get_sample', (['(nlos * nseg)', 'resnk', 'DL'], {'dmethod': '"""rel"""', 'method': '"""simps"""', 'num_threads': 'num_threads', 'Test': 'Test'}), "(nlos * nseg, resnk, DL, dmethod='rel', method='simps',\n num_threads=num_threads, Test=Test)\n", (23614, 23709), True, 'import tofu.geom._GG as _GG\n'), ((24982, 25068), 'numpy.concatenate', 'np.concatenate', (['(Ds, Ds[:, :, -1:] + kOuts[None, :, -1:] * us[:, :, -1:])'], {'axis': '(-1)'}), '((Ds, Ds[:, :, -1:] + kOuts[None, :, -1:] * us[:, :, -1:]),\n axis=-1)\n', (24996, 25068), True, 'import numpy as np\n'), ((27734, 27808), 'numpy.linspace', 'np.linspace', (['DL[0]', 'DL[1]', '(N + 1)'], {'endpoint': '(True)', 'retstep': '(True)', 'dtype': 'float'}), '(DL[0], DL[1], N + 1, endpoint=True, retstep=True, dtype=float)\n', (27745, 27808), True, 'import numpy as np\n'), ((28831, 28845), 'numpy.isnan', 'np.isnan', (['Vals'], {}), '(Vals)\n', (28839, 28845), True, 'import numpy as np\n'), ((31071, 31094), 'numpy.full', 'np.full', (['(npart,)', 'r[0]'], {}), '((npart,), r[0])\n', (31078, 31094), True, 'import numpy as np\n'), ((31132, 31162), 'numpy.repeat', 'np.repeat', (['traj', 'npart'], {'axis': '(1)'}), '(traj, npart, axis=1)\n', (31141, 31162), True, 'import numpy as np\n'), ((31321, 31346), 'numpy.sum', 'np.sum', (['(vect ** 2)'], {'axis': '(0)'}), '(vect ** 2, axis=0)\n', (31327, 31346), True, 'import numpy as np\n'), ((31729, 31802), 'tofu.geom._GG.LOS_areVis_PtsFromPts_VesStruct', '_GG.LOS_areVis_PtsFromPts_VesStruct', (['traj', 'pts'], {'k': 'l', 'vis': '(False)'}), '(traj, pts, k=l, vis=False, **kwdargs)\n', (31764, 31802), True, 'import tofu.geom._GG as _GG\n'), ((1539, 1560), 'numpy.argmax', 'np.argmax', (['Poly[0, :]'], {}), '(Poly[0, :])\n', (1548, 1560), True, 'import numpy as np\n'), ((1580, 1601), 'numpy.argmin', 'np.argmin', (['Poly[0, :]'], {}), '(Poly[0, :])\n', (1589, 1601), True, 'import numpy as np\n'), ((1621, 1642), 'numpy.argmax', 'np.argmax', (['Poly[1, :]'], {}), '(Poly[1, :])\n', (1630, 1642), True, 'import numpy as np\n'), ((1662, 1683), 'numpy.argmin', 'np.argmin', (['Poly[1, :]'], {}), '(Poly[1, :])\n', (1671, 1683), True, 'import numpy as np\n'), ((2517, 2547), 'numpy.hypot', 'np.hypot', (['Vin[0, :]', 'Vin[1, :]'], {}), '(Vin[0, :], Vin[1, :])\n', (2525, 2547), True, 'import numpy as np\n'), ((4050, 4068), 'warnings.warn', 'warnings.warn', (['msg'], {}), '(msg)\n', (4063, 4068), False, 'import warnings\n'), ((4276, 4299), 'numpy.tile', 'np.tile', (['BaryS', '(Np, 1)'], {}), '(BaryS, (Np, 1))\n', (4283, 4299), True, 'import numpy as np\n'), ((4559, 4577), 'numpy.ones', 'np.ones', (['(Np + 1,)'], {}), '((Np + 1,))\n', (4566, 4577), True, 'import numpy as np\n'), ((5168, 5198), 'numpy.linspace', 'np.linspace', (['(-np.pi)', 'np.pi', 'NP'], {}), '(-np.pi, np.pi, NP)\n', (5179, 5198), True, 'import numpy as np\n'), ((8116, 8132), 'numpy.all', 'np.all', (['(ind >= 0)'], {}), '(ind >= 0)\n', (8122, 8132), True, 'import numpy as np\n'), ((8377, 8498), 'tofu.geom._GG.discretize_segment2d', '_GG.discretize_segment2d', (['MinMax1', 'MinMax2', 'dS[0]', 'dS[1]'], {'D1': 'DS[0]', 'D2': 'DS[1]', 'mode': 'dSMode', 'VPoly': 'VPoly', 'margin': 'margin'}), '(MinMax1, MinMax2, dS[0], dS[1], D1=DS[0], D2=DS[1],\n mode=dSMode, VPoly=VPoly, margin=margin)\n', (8401, 8498), True, 'import tofu.geom._GG as _GG\n'), ((8957, 9052), 'tofu.geom._GG._Ves_mesh_dlfromL_cython', '_GG._Ves_mesh_dlfromL_cython', (['MinMax1', 'dS[0]', 'DS[0]'], {'Lim': '(True)', 'dLMode': 'dSMode', 'margin': 'margin'}), '(MinMax1, dS[0], DS[0], Lim=True, dLMode=dSMode,\n margin=margin)\n', (8985, 9052), True, 'import tofu.geom._GG as _GG\n'), ((9325, 9420), 'tofu.geom._GG._Ves_mesh_dlfromL_cython', '_GG._Ves_mesh_dlfromL_cython', (['MinMax2', 'dS[1]', 'DS[1]'], {'Lim': '(True)', 'dLMode': 'dSMode', 'margin': 'margin'}), '(MinMax2, dS[1], DS[1], Lim=True, dLMode=dSMode,\n margin=margin)\n', (9353, 9420), True, 'import tofu.geom._GG as _GG\n'), ((9684, 9703), 'numpy.meshgrid', 'np.meshgrid', (['x1', 'x2'], {}), '(x1, x2)\n', (9695, 9703), True, 'import numpy as np\n'), ((9721, 9743), 'numpy.squeeze', 'np.squeeze', (['[xx1, xx2]'], {}), '([xx1, xx2])\n', (9731, 9743), True, 'import numpy as np\n'), ((10011, 10027), 'numpy.all', 'np.all', (['(ind >= 0)'], {}), '(ind >= 0)\n', (10017, 10027), True, 'import numpy as np\n'), ((11500, 11516), 'numpy.all', 'np.all', (['(ind >= 0)'], {}), '(ind >= 0)\n', (11506, 11516), True, 'import numpy as np\n'), ((11847, 11993), 'tofu.geom._GG._Ves_Vmesh_Tor_SubFromD_cython', '_GG._Ves_Vmesh_Tor_SubFromD_cython', (['dV[0]', 'dV[1]', 'dV[2]', 'MinMax1', 'MinMax2'], {'DR': 'DV[0]', 'DZ': 'DV[1]', 'DPhi': 'DV[2]', 'VPoly': 'VPoly', 'Out': 'Out', 'margin': 'margin'}), '(dV[0], dV[1], dV[2], MinMax1, MinMax2,\n DR=DV[0], DZ=DV[1], DPhi=DV[2], VPoly=VPoly, Out=Out, margin=margin)\n', (11881, 11993), True, 'import tofu.geom._GG as _GG\n'), ((12055, 12196), 'tofu.geom._GG._Ves_Vmesh_Lin_SubFromD_cython', '_GG._Ves_Vmesh_Lin_SubFromD_cython', (['dV[0]', 'dV[1]', 'dV[2]', 'VLim', 'MinMax1', 'MinMax2'], {'DX': 'DV[0]', 'DY': 'DV[1]', 'DZ': 'DV[2]', 'VPoly': 'VPoly', 'margin': 'margin'}), '(dV[0], dV[1], dV[2], VLim, MinMax1,\n MinMax2, DX=DV[0], DY=DV[1], DZ=DV[2], VPoly=VPoly, margin=margin)\n', (12089, 12196), True, 'import tofu.geom._GG as _GG\n'), ((12282, 12390), 'tofu.geom._GG._Ves_Vmesh_Tor_SubFromInd_cython', '_GG._Ves_Vmesh_Tor_SubFromInd_cython', (['dV[0]', 'dV[1]', 'dV[2]', 'MinMax1', 'MinMax2', 'ind'], {'Out': 'Out', 'margin': 'margin'}), '(dV[0], dV[1], dV[2], MinMax1, MinMax2,\n ind, Out=Out, margin=margin)\n', (12318, 12390), True, 'import tofu.geom._GG as _GG\n'), ((12447, 12552), 'tofu.geom._GG._Ves_Vmesh_Lin_SubFromInd_cython', '_GG._Ves_Vmesh_Lin_SubFromInd_cython', (['dV[0]', 'dV[1]', 'dV[2]', 'VLim', 'MinMax1', 'MinMax2', 'ind'], {'margin': 'margin'}), '(dV[0], dV[1], dV[2], VLim, MinMax1,\n MinMax2, ind, margin=margin)\n', (12483, 12552), True, 'import tofu.geom._GG as _GG\n'), ((14154, 14173), 'numpy.arange', 'np.arange', (['(0)', 'nVLim'], {}), '(0, nVLim)\n', (14163, 14173), True, 'import numpy as np\n'), ((18207, 18221), 'numpy.min', 'np.min', (['Dtheta'], {}), '(Dtheta)\n', (18213, 18221), True, 'import numpy as np\n'), ((18223, 18237), 'numpy.max', 'np.max', (['Dtheta'], {}), '(Dtheta)\n', (18229, 18237), True, 'import numpy as np\n'), ((18994, 19008), 'numpy.any', 'np.any', (['indphi'], {}), '(indphi)\n', (19000, 19008), True, 'import numpy as np\n'), ((19250, 19263), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (19256, 19263), True, 'import numpy as np\n'), ((19265, 19278), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (19271, 19278), True, 'import numpy as np\n'), ((19412, 19426), 'numpy.abs', 'np.abs', (['detABu'], {}), '(detABu)\n', (19418, 19426), True, 'import numpy as np\n'), ((25236, 25265), 'numpy.full', 'np.full', (['(3, nlos, 1)', 'np.nan'], {}), '((3, nlos, 1), np.nan)\n', (25243, 25265), True, 'import numpy as np\n'), ((25282, 25323), 'numpy.concatenate', 'np.concatenate', (['(pts, nancoords)'], {'axis': '(-1)'}), '((pts, nancoords), axis=-1)\n', (25296, 25323), True, 'import numpy as np\n'), ((27338, 27367), 'numpy.ceil', 'np.ceil', (['((DL[1] - DL[0]) / dL)'], {}), '((DL[1] - DL[0]) / dL)\n', (27345, 27367), True, 'import numpy as np\n'), ((27391, 27408), 'numpy.ceil', 'np.ceil', (['(1.0 / dL)'], {}), '(1.0 / dL)\n', (27398, 27408), True, 'import numpy as np\n'), ((28888, 28900), 'numpy.sum', 'np.sum', (['Vals'], {}), '(Vals)\n', (28894, 28900), True, 'import numpy as np\n'), ((28945, 28980), 'scipy.integrate.simps', 'scpintg.simps', (['Vals'], {'x': 'None', 'dx': 'dLr'}), '(Vals, x=None, dx=dLr)\n', (28958, 28980), True, 'import scipy.integrate as scpintg\n'), ((4986, 5021), 'numpy.append', 'np.append', (['Ptemp[0, :]', 'Ptemp[0, 0]'], {}), '(Ptemp[0, :], Ptemp[0, 0])\n', (4995, 5021), True, 'import numpy as np\n'), ((5019, 5054), 'numpy.append', 'np.append', (['Ptemp[1, :]', 'Ptemp[1, 0]'], {}), '(Ptemp[1, :], Ptemp[1, 0])\n', (5028, 5054), True, 'import numpy as np\n'), ((5056, 5092), 'numpy.append', 'np.append', (['Ang', '(Ang[0] + 2.0 * np.pi)'], {}), '(Ang, Ang[0] + 2.0 * np.pi)\n', (5065, 5092), True, 'import numpy as np\n'), ((11693, 11707), 'numpy.array', 'np.array', (['VLim'], {}), '(VLim)\n', (11701, 11707), True, 'import numpy as np\n'), ((14860, 14876), 'numpy.all', 'np.all', (['(ind >= 0)'], {}), '(ind >= 0)\n', (14866, 14876), True, 'import numpy as np\n'), ((15924, 16069), 'tofu.geom._GG._Ves_Smesh_Lin_SubFromD_cython', '_GG._Ves_Smesh_Lin_SubFromD_cython', (['VLim[Ind[ii]]', 'dS[ii][0]', 'dS[ii][1]', 'VPoly'], {'DX': 'DS[0]', 'DY': 'DS[1]', 'DZ': 'DS[2]', 'DIn': 'DIn', 'VIn': 'VIn', 'margin': 'margin'}), '(VLim[Ind[ii]], dS[ii][0], dS[ii][1],\n VPoly, DX=DS[0], DY=DS[1], DZ=DS[2], DIn=DIn, VIn=VIn, margin=margin)\n', (15958, 16069), True, 'import tofu.geom._GG as _GG\n'), ((16178, 16193), 'numpy.ones', 'np.ones', (['(3, 0)'], {}), '((3, 0))\n', (16185, 16193), True, 'import numpy as np\n'), ((17901, 17929), 'numpy.full', 'np.full', (['(noccur, 2)', 'np.nan'], {}), '((noccur, 2), np.nan)\n', (17908, 17929), True, 'import numpy as np\n'), ((23260, 23302), 'numpy.sqrt', 'np.sqrt', (['(us[0, ...] ** 2 + us[1, ...] ** 2)'], {}), '(us[0, ...] ** 2 + us[1, ...] ** 2)\n', (23267, 23302), True, 'import numpy as np\n'), ((23517, 23554), 'numpy.zeros', 'np.zeros', (['(nlos * nseg,)'], {'dtype': 'float'}), '((nlos * nseg,), dtype=float)\n', (23525, 23554), True, 'import numpy as np\n'), ((23901, 23914), 'numpy.diff', 'np.diff', (['lind'], {}), '(lind)\n', (23908, 23914), True, 'import numpy as np\n'), ((24259, 24285), 'numpy.split', 'np.split', (['pts', 'ind'], {'axis': '(1)'}), '(pts, ind, axis=1)\n', (24267, 24285), True, 'import numpy as np\n'), ((24326, 24361), 'numpy.insert', 'np.insert', (['pts', 'ind', 'np.nan'], {'axis': '(1)'}), '(pts, ind, np.nan, axis=1)\n', (24335, 24361), True, 'import numpy as np\n'), ((25122, 25140), 'numpy.arange', 'np.arange', (['(1)', 'nlos'], {}), '(1, nlos)\n', (25131, 25140), True, 'import numpy as np\n'), ((29020, 29058), 'scipy.integrate.romb', 'scpintg.romb', (['Vals'], {'dx': 'dLr', 'show': '(False)'}), '(Vals, dx=dLr, show=False)\n', (29032, 29058), True, 'import scipy.integrate as scpintg\n'), ((31549, 31590), 'numpy.sqrt', 'np.sqrt', (['(1.0 - r ** (2)[None, :] / l ** 2)'], {}), '(1.0 - r ** (2)[None, :] / l ** 2)\n', (31556, 31590), True, 'import numpy as np\n'), ((14269, 14284), 'numpy.asarray', 'np.asarray', (['Ind'], {}), '(Ind)\n', (14279, 14284), True, 'import numpy as np\n'), ((15294, 15456), 'tofu.geom._GG._Ves_Smesh_Tor_SubFromD_cython', '_GG._Ves_Smesh_Tor_SubFromD_cython', (['dS[ii][0]', 'dS[ii][1]', 'VPoly'], {'DR': 'DS[0]', 'DZ': 'DS[1]', 'DPhi': 'DS[2]', 'DIn': 'DIn', 'VIn': 'VIn', 'PhiMinMax': 'None', 'Out': 'Out', 'margin': 'margin'}), '(dS[ii][0], dS[ii][1], VPoly, DR=DS[0],\n DZ=DS[1], DPhi=DS[2], DIn=DIn, VIn=VIn, PhiMinMax=None, Out=Out, margin\n =margin)\n', (15328, 15456), True, 'import tofu.geom._GG as _GG\n'), ((15570, 15737), 'tofu.geom._GG._Ves_Smesh_TorStruct_SubFromD_cython', '_GG._Ves_Smesh_TorStruct_SubFromD_cython', (['VLim[Ind[ii]]', 'dS[ii][0]', 'dS[ii][1]', 'VPoly'], {'DR': 'DS[0]', 'DZ': 'DS[1]', 'DPhi': 'DS[2]', 'DIn': 'DIn', 'VIn': 'VIn', 'Out': 'Out', 'margin': 'margin'}), '(VLim[Ind[ii]], dS[ii][0], dS[ii][1\n ], VPoly, DR=DS[0], DZ=DS[1], DPhi=DS[2], DIn=DIn, VIn=VIn, Out=Out,\n margin=margin)\n', (15610, 15737), True, 'import tofu.geom._GG as _GG\n'), ((17138, 17269), 'tofu.geom._GG._Ves_Smesh_Lin_SubFromInd_cython', '_GG._Ves_Smesh_Lin_SubFromInd_cython', (['VLim[Ind[ii]]', 'dS[ii][0]', 'dS[ii][1]', 'VPoly', 'ind[Ind[ii]]'], {'DIn': 'DIn', 'VIn': 'VIn', 'margin': 'margin'}), '(VLim[Ind[ii]], dS[ii][0], dS[ii][1],\n VPoly, ind[Ind[ii]], DIn=DIn, VIn=VIn, margin=margin)\n', (17174, 17269), True, 'import tofu.geom._GG as _GG\n'), ((23335, 23358), 'numpy.sum', 'np.sum', (['(us ** 2)'], {'axis': '(0)'}), '(us ** 2, axis=0)\n', (23341, 23358), True, 'import numpy as np\n'), ((24175, 24205), 'numpy.hypot', 'np.hypot', (['pts[0, :]', 'pts[1, :]'], {}), '(pts[0, :], pts[1, :])\n', (24183, 24205), True, 'import numpy as np\n'), ((24559, 24583), 'numpy.split', 'np.split', (['pts[2, :]', 'ind'], {}), '(pts[2, :], ind)\n', (24567, 24583), True, 'import numpy as np\n'), ((24771, 24804), 'numpy.insert', 'np.insert', (['pts[2, :]', 'ind', 'np.nan'], {}), '(pts[2, :], ind, np.nan)\n', (24780, 24804), True, 'import numpy as np\n'), ((25518, 25551), 'numpy.split', 'np.split', (['pts[:2, :]', 'ind'], {'axis': '(1)'}), '(pts[:2, :], ind, axis=1)\n', (25526, 25551), True, 'import numpy as np\n'), ((25599, 25625), 'numpy.split', 'np.split', (['pts', 'ind'], {'axis': '(1)'}), '(pts, ind, axis=1)\n', (25607, 25625), True, 'import numpy as np\n'), ((25786, 25810), 'numpy.split', 'np.split', (['pts[0, :]', 'ind'], {}), '(pts[0, :], ind)\n', (25794, 25810), True, 'import numpy as np\n'), ((25869, 25893), 'numpy.split', 'np.split', (['pts[1, :]', 'ind'], {}), '(pts[1, :], ind)\n', (25877, 25893), True, 'import numpy as np\n'), ((25952, 25976), 'numpy.split', 'np.split', (['pts[2, :]', 'ind'], {}), '(pts[2, :], ind)\n', (25960, 25976), True, 'import numpy as np\n'), ((27687, 27702), 'numpy.arange', 'np.arange', (['(0)', 'N'], {}), '(0, N)\n', (27696, 27702), True, 'import numpy as np\n'), ((28542, 28572), 'numpy.tile', 'np.tile', (['(-u)', '(Pts.shape[1], 1)'], {}), '(-u, (Pts.shape[1], 1))\n', (28549, 28572), True, 'import numpy as np\n'), ((14571, 14591), 'numpy.all', 'np.all', (['(ind[ii] >= 0)'], {}), '(ind[ii] >= 0)\n', (14577, 14591), True, 'import numpy as np\n'), ((16502, 16644), 'tofu.geom._GG._Ves_Smesh_Tor_SubFromInd_cython', '_GG._Ves_Smesh_Tor_SubFromInd_cython', (['dS[ii][0]', 'dS[ii][1]', 'VPoly', 'ind[Ind[ii]]'], {'DIn': 'DIn', 'VIn': 'VIn', 'PhiMinMax': 'None', 'Out': 'Out', 'margin': 'margin'}), '(dS[ii][0], dS[ii][1], VPoly, ind[Ind[\n ii]], DIn=DIn, VIn=VIn, PhiMinMax=None, Out=Out, margin=margin)\n', (16538, 16644), True, 'import tofu.geom._GG as _GG\n'), ((16761, 16908), 'tofu.geom._GG._Ves_Smesh_TorStruct_SubFromInd_cython', '_GG._Ves_Smesh_TorStruct_SubFromInd_cython', (['VLim[Ind[ii]]', 'dS[ii][0]', 'dS[ii][1]', 'VPoly', 'ind[Ind[ii]]'], {'DIn': 'DIn', 'VIn': 'VIn', 'Out': 'Out', 'margin': 'margin'}), '(VLim[Ind[ii]], dS[ii][0], dS[ii]\n [1], VPoly, ind[Ind[ii]], DIn=DIn, VIn=VIn, Out=Out, margin=margin)\n', (16803, 16908), True, 'import tofu.geom._GG as _GG\n'), ((24466, 24496), 'numpy.hypot', 'np.hypot', (['pts[0, :]', 'pts[1, :]'], {}), '(pts[0, :], pts[1, :])\n', (24474, 24496), True, 'import numpy as np\n'), ((24670, 24700), 'numpy.hypot', 'np.hypot', (['pts[0, :]', 'pts[1, :]'], {}), '(pts[0, :], pts[1, :])\n', (24678, 24700), True, 'import numpy as np\n'), ((27563, 27572), 'numpy.log', 'np.log', (['N'], {}), '(N)\n', (27569, 27572), True, 'import numpy as np\n'), ((27573, 27584), 'numpy.log', 'np.log', (['(2.0)'], {}), '(2.0)\n', (27579, 27584), True, 'import numpy as np\n')] |
'''
Created on May 10, 2019
@author: kreuzer
'''
import json
import uuid
import base64
import time
import requests
import os
from contextlib import closing
from jupyterhub.orm import APIToken, User
from jupyterhub.apihandlers.base import APIHandler
class J4J_APITokenHandler(APIHandler):
async def get(self, username, server_name=''): # @UnusedVariable
uuidcode = self.request.headers.get('uuidcode', None)
if not uuidcode:
uuidcode = uuid.uuid4().hex
with open(os.environ.get('HUB_TOKEN_PATH', ''), 'r') as f:
intern_token = f.read().rstrip()
if self.request.headers.get('Intern-Authorization', '') != intern_token:
self.log.warning("uuidcode={} - Could not validate Intern-Authorization".format(uuidcode))
self.set_status(401)
return
self.log.debug("uuidcode={} - GetToken for servername={}".format(uuidcode, server_name))
user = None
try:
if 'Authorization' in self.request.headers.keys():
s = self.request.headers.get('Authorization').split()
found = APIToken.find(self.db, token=s[1])
if found is not None:
user = self._user_from_orm(found.user)
except:
self.log.debug("uuidcode={} - Could not find user for this token: {}".format(uuidcode, self.request.headers.get('Authorization', '<no Authorization header>')))
#if not user:
# user = self.find_user(username)
if user:
self.set_header('Content-Type', 'text/plain')
self.set_status(201)
self.log.debug("UID={} - uuidcode={} - load accesstoken from database.".format(user.name, uuidcode))
db_user = user.db.query(User).filter(User.name == user.name).first()
if db_user:
user.db.refresh(db_user)
user.encrypted_auth_state = db_user.encrypted_auth_state
state = await user.get_auth_state()
token = { 'accesstoken': state.get('accesstoken'), 'refreshtoken': state.get('refreshtoken'), 'expire': state.get('expire') }
if self.request.headers.get('renew', 'False').lower() == 'true':
if int(token.get('expire')) - time.time() < 480:
try:
self.log.debug("uuidcode={} - UID={} - Try to update accesstoken".format(uuidcode, user.name))
with open(user.authenticator.unity_file, 'r') as f:
unity = json.load(f)
if state.get('login_handler') == 'jscldap':
b64key = base64.b64encode(bytes('{}:{}'.format(unity[user.authenticator.jscldap_token_url]['client_id'], unity[user.authenticator.jscldap_token_url]['client_secret']), 'utf-8')).decode('utf-8')
data = {'refresh_token': token.get('refreshtoken'),
'grant_type': 'refresh_token',
'scope': ' '.join(unity[user.authenticator.jscldap_token_url]['scope'])}
url = user.authenticator.jscldap_token_url
info_url = unity[user.authenticator.jscldap_token_url]['links']['tokeninfo']
elif state.get('login_handler') == 'jscusername':
b64key = base64.b64encode(bytes('{}:{}'.format(unity[user.authenticator.jscusername_token_url]['client_id'], unity[user.authenticator.jscusername_token_url]['client_secret']), 'utf-8')).decode('utf-8')
data = {'refresh_token': token.get('refreshtoken'),
'grant_type': 'refresh_token',
'scope': ' '.join(unity[user.authenticator.jscusername_token_url]['scope'])}
url = user.authenticator.jscusername_token_url
info_url = unity[user.authenticator.jscusername_token_url]['links']['tokeninfo']
accesstoken = token.get('accesstoken')
expire = token.get('expire')
headers = {'Authorization': 'Basic {}'.format(b64key),
'Accept': 'application/json'}
with closing(requests.post(url, headers=headers, data=data, verify=False)) as r:
if r.status_code == 200:
accesstoken = r.json().get('access_token')
else:
self.log.warning("uuidcode={} - UID={} - Could not update accesstoken: {} {}".format(uuidcode, user.name, r.status_code, r.text))
with closing(requests.get(info_url, headers={ 'Authorization': 'Bearer {}'.format(accesstoken) }, verify=False)) as r:
if r.status_code == 200:
expire = r.json().get('exp')
else:
self.log.warning("uuidcode={} - UID={} - Could not receive token information: {} {}".format(uuidcode, user.name, r.status_code, r.text))
state['accesstoken'] = accesstoken
state['expire'] = expire
await user.save_auth_state(state)
token['accesstoken'] = accesstoken
token['expire'] = expire
except:
self.log.exception("uuidcode={} - UID={} - Could not update accesstoken".format(uuidcode, user.name))
if self.request.headers.get('accounts', 'False').lower() == 'true':
token['oauth_user'] = state.get('oauth_user')
self.write(json.dumps(token))
self.flush()
else:
self.set_status(404)
self.write("User with token {} not found".format(self.request.headers.get('Authorization', None)))
self.flush()
return
async def post(self, username, server_name=''):
uuidcode = self.request.headers.get('uuidcode', None)
if not uuidcode:
uuidcode = uuid.uuid4().hex
self.log.debug("uuidcode={} - PostToken for servername={}".format(uuidcode, server_name))
with open(os.environ.get('HUB_TOKEN_PATH', ''), 'r') as f:
intern_token = f.read().rstrip()
if self.request.headers.get('Intern-Authorization', '') != intern_token:
self.log.warning("uuidcode={} - Could not validate Intern-Authorization".format(uuidcode))
self.set_status(401)
return
data = self.request.body.decode("utf8")
self.set_header('Content-Type', 'text/plain')
if not data:
self.set_status(400)
self.write("Please send the token in the body as json: { \"accesstoken\": \"...\", \"expire\": \"...\" }")
self.flush()
return
user = None
if 'Authorization' in self.request.headers.keys():
s = self.request.headers.get('Authorization').split()
found = APIToken.find(self.db, token=s[1])
if found is not None:
user = self._user_from_orm(found.user)
if not user:
user = self.find_user(username)
if user:
data_json = json.loads(data)
try:
self.log.debug("uuidcode={} - UID={} - update accesstoken in database.".format(uuidcode, user.name))
db_user = user.db.query(User).filter(User.name == user.name).first()
if db_user:
user.db.refresh(db_user)
user.encrypted_auth_state = db_user.encrypted_auth_state
state = await user.get_auth_state()
state['accesstoken'] = data_json['accesstoken']
state['expire'] = data_json['expire']
await user.save_auth_state(state)
except KeyError as e:
self.set_status(400)
self.write("Key {} missing".format(str(e)))
self.flush()
return
self.set_header('Content-Type', 'text/plain')
self.set_status(201)
else:
self.set_status(404)
self.write("User with token {} not found".format(self.request.headers.get('Authorization', None)))
self.flush()
return
| [
"jupyterhub.orm.APIToken.find",
"json.loads",
"requests.post",
"json.dumps",
"os.environ.get",
"uuid.uuid4",
"json.load",
"time.time"
] | [((7112, 7146), 'jupyterhub.orm.APIToken.find', 'APIToken.find', (['self.db'], {'token': 's[1]'}), '(self.db, token=s[1])\n', (7125, 7146), False, 'from jupyterhub.orm import APIToken, User\n'), ((7342, 7358), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (7352, 7358), False, 'import json\n'), ((472, 484), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (482, 484), False, 'import uuid\n'), ((507, 543), 'os.environ.get', 'os.environ.get', (['"""HUB_TOKEN_PATH"""', '""""""'], {}), "('HUB_TOKEN_PATH', '')\n", (521, 543), False, 'import os\n'), ((1124, 1158), 'jupyterhub.orm.APIToken.find', 'APIToken.find', (['self.db'], {'token': 's[1]'}), '(self.db, token=s[1])\n', (1137, 1158), False, 'from jupyterhub.orm import APIToken, User\n'), ((5760, 5777), 'json.dumps', 'json.dumps', (['token'], {}), '(token)\n', (5770, 5777), False, 'import json\n'), ((6165, 6177), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (6175, 6177), False, 'import uuid\n'), ((6298, 6334), 'os.environ.get', 'os.environ.get', (['"""HUB_TOKEN_PATH"""', '""""""'], {}), "('HUB_TOKEN_PATH', '')\n", (6312, 6334), False, 'import os\n'), ((2260, 2271), 'time.time', 'time.time', ([], {}), '()\n', (2269, 2271), False, 'import time\n'), ((2535, 2547), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2544, 2547), False, 'import json\n'), ((4315, 4375), 'requests.post', 'requests.post', (['url'], {'headers': 'headers', 'data': 'data', 'verify': '(False)'}), '(url, headers=headers, data=data, verify=False)\n', (4328, 4375), False, 'import requests\n')] |
from django.db import models
# from django.contrib.auth.models import User
from apps.users.models import CustomUser
# 引入Enum类型
from enum import Enum
from enumfields import EnumIntegerField
class BillType(Enum):
OUTGO = 0 # 账目类型.支出
INCOME = 1 # 账目类型.收入
class Categorys(models.Model):
"""
账目明细分类表
"""
is_default = models.BooleanField('是否默认分类', default=False) # True:默认存在分类 False:用户自定义分类
user = models.ForeignKey(CustomUser, verbose_name='自定义分类所属用户', blank=True, null=True, on_delete=models.CASCADE)
bill_type = EnumIntegerField(BillType, verbose_name='账目类型', default=BillType.OUTGO)
name = models.CharField('分类名称', max_length=20, unique=True)
parent = models.ForeignKey('self', verbose_name='父级分类', blank=True, null=True, on_delete=models.CASCADE)
modify_time = models.DateTimeField('修改时间', auto_now=True)
create_time = models.DateTimeField('创建时间', auto_now_add=True)
class Meta:
db_table = "categorys"
ordering = ['-modify_time']
| [
"django.db.models.ForeignKey",
"django.db.models.DateTimeField",
"django.db.models.BooleanField",
"enumfields.EnumIntegerField",
"django.db.models.CharField"
] | [((343, 387), 'django.db.models.BooleanField', 'models.BooleanField', (['"""是否默认分类"""'], {'default': '(False)'}), "('是否默认分类', default=False)\n", (362, 387), False, 'from django.db import models\n'), ((427, 536), 'django.db.models.ForeignKey', 'models.ForeignKey', (['CustomUser'], {'verbose_name': '"""自定义分类所属用户"""', 'blank': '(True)', 'null': '(True)', 'on_delete': 'models.CASCADE'}), "(CustomUser, verbose_name='自定义分类所属用户', blank=True, null=\n True, on_delete=models.CASCADE)\n", (444, 536), False, 'from django.db import models\n'), ((548, 619), 'enumfields.EnumIntegerField', 'EnumIntegerField', (['BillType'], {'verbose_name': '"""账目类型"""', 'default': 'BillType.OUTGO'}), "(BillType, verbose_name='账目类型', default=BillType.OUTGO)\n", (564, 619), False, 'from enumfields import EnumIntegerField\n'), ((631, 683), 'django.db.models.CharField', 'models.CharField', (['"""分类名称"""'], {'max_length': '(20)', 'unique': '(True)'}), "('分类名称', max_length=20, unique=True)\n", (647, 683), False, 'from django.db import models\n'), ((698, 797), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""self"""'], {'verbose_name': '"""父级分类"""', 'blank': '(True)', 'null': '(True)', 'on_delete': 'models.CASCADE'}), "('self', verbose_name='父级分类', blank=True, null=True,\n on_delete=models.CASCADE)\n", (715, 797), False, 'from django.db import models\n'), ((812, 855), 'django.db.models.DateTimeField', 'models.DateTimeField', (['"""修改时间"""'], {'auto_now': '(True)'}), "('修改时间', auto_now=True)\n", (832, 855), False, 'from django.db import models\n'), ((874, 921), 'django.db.models.DateTimeField', 'models.DateTimeField', (['"""创建时间"""'], {'auto_now_add': '(True)'}), "('创建时间', auto_now_add=True)\n", (894, 921), False, 'from django.db import models\n')] |
"""Random subset dataset.
"""
import random
import numpy as np
import torch
from torch.utils.data import Dataset
from PIL import Image
class RandomSubset(Dataset):
"""Class allows to iterate every epoch through a different random subset of the original
dataset.
The intention behind this class is to speed up genetic optimization by using only a small subset
of the original dataset every epoch. The subset is randomly created every epoch from the
original dataset. Therefore, all samples from the original dataset are used at some point during
the training.
"""
def __init__(self, dataset: Dataset, subset_ratio: float) -> None:
"""
Args:
dataset: PyTorch dataset.
subset_ratio: Defines size of subset.
"""
super().__init__()
self.dataset = dataset
if isinstance(dataset.data, np.ndarray):
self.data = dataset.data
elif isinstance(dataset.data, list):
self.data = np.array(dataset.data)
elif isinstance(dataset.data, torch.Tensor):
self.data = dataset.data.numpy()
else:
raise TypeError(f"Targets must be of type 'list' or 'tensor', "
f"but got {type(dataset.data)}.")
if isinstance(dataset.targets, np.ndarray):
self.data = dataset.data
elif isinstance(dataset.targets, list):
self.targets = np.array(dataset.targets)
elif isinstance(dataset.targets, torch.Tensor):
self.targets = dataset.targets.numpy()
else:
raise TypeError(f"Targets must be of type 'list' or 'tensor', "
f"but got {type(dataset.targets)}.")
self.subset_length = int(len(self.data) * subset_ratio)
self.counter = 0
self._random_subset()
def _random_subset(self) -> None:
"""Creates random mappings.
"""
self.rand_map = random.sample(list(range(len(self.data))), self.subset_length)
def __len__(self) -> int:
return self.subset_length
def __getitem__(self, index: int) -> tuple:
self.counter += 1
if self.counter > self.subset_length:
self._random_subset()
self.counter = 0
rand_index = self.rand_map[index]
img, target = self.data[rand_index], int(self.targets[rand_index])
# Cast to PIL Image required for transformations.
if len(img.shape) == 2:
img = Image.fromarray(img, mode="L")
elif (len(img.shape) == 3) and (img.shape[-1] == 3):
img = Image.fromarray(img, mode="RGB")
if self.dataset.transform is not None:
img = self.dataset.transform(img)
return img, target
| [
"numpy.array",
"PIL.Image.fromarray"
] | [((2511, 2541), 'PIL.Image.fromarray', 'Image.fromarray', (['img'], {'mode': '"""L"""'}), "(img, mode='L')\n", (2526, 2541), False, 'from PIL import Image\n'), ((1013, 1035), 'numpy.array', 'np.array', (['dataset.data'], {}), '(dataset.data)\n', (1021, 1035), True, 'import numpy as np\n'), ((1451, 1476), 'numpy.array', 'np.array', (['dataset.targets'], {}), '(dataset.targets)\n', (1459, 1476), True, 'import numpy as np\n'), ((2621, 2653), 'PIL.Image.fromarray', 'Image.fromarray', (['img'], {'mode': '"""RGB"""'}), "(img, mode='RGB')\n", (2636, 2653), False, 'from PIL import Image\n')] |
from tempfile import mkstemp
import os
import tinys3
def create_temp_file(data):
fd, temp_path = mkstemp()
file = open(temp_path, 'r')
file.write(data)
file.close()
os.close(fd)
return data
def push_to_s3(filepath):
s3 = tinys3.Connection(os.environ['AWS_ACCESS_KEY_ID'],os.environ['AWS_SECRET_KEY'],tls=True)
f = open(filepath,'rb')
s3.upload(filepath, f ,'darkmattersheep.uk/strictly/')
return
| [
"os.close",
"tempfile.mkstemp",
"tinys3.Connection"
] | [((102, 111), 'tempfile.mkstemp', 'mkstemp', ([], {}), '()\n', (109, 111), False, 'from tempfile import mkstemp\n'), ((186, 198), 'os.close', 'os.close', (['fd'], {}), '(fd)\n', (194, 198), False, 'import os\n'), ((252, 347), 'tinys3.Connection', 'tinys3.Connection', (["os.environ['AWS_ACCESS_KEY_ID']", "os.environ['AWS_SECRET_KEY']"], {'tls': '(True)'}), "(os.environ['AWS_ACCESS_KEY_ID'], os.environ[\n 'AWS_SECRET_KEY'], tls=True)\n", (269, 347), False, 'import tinys3\n')] |
import re
instr = re.compile(r'[ \t]*[a-z]+ ([a-z0-9\[\]]+,? *)*')
data = []
with open("conditionals.enc", "r") as file:
for line in file:
data.append(line)
from random import choice
conditions = ['zero', 'carry', 'negative', 'equal', 'greater', 'less']
with open("cond.enc", 'w') as file:
for line in data:
if instr.search(line) is not None:
line = line[:-1] + f' -> {choice(conditions)}\n'
file.write(line)
| [
"random.choice",
"re.compile"
] | [((20, 70), 're.compile', 're.compile', (['"""[ \\\\t]*[a-z]+ ([a-z0-9\\\\[\\\\]]+,? *)*"""'], {}), "('[ \\\\t]*[a-z]+ ([a-z0-9\\\\[\\\\]]+,? *)*')\n", (30, 70), False, 'import re\n'), ((392, 410), 'random.choice', 'choice', (['conditions'], {}), '(conditions)\n', (398, 410), False, 'from random import choice\n')] |
import torch
import torch.nn as nn
from einops.layers.torch import Rearrange
class cnnTransformer(nn.Module):
def __init__(self,
name:str,
n_token:int,
n_embed:int,
n_head:int,
n_hid:int,
n_layer:int,
dropout:float=0.5):
super(cnnTransformer, self).__init__()
self.enc_sequences = Rearrange('b c h w -> (h w) b c')
self.pos_embedding = nn.Parameter(torch.randn(n_token, 1, n_embed))
encoder_layer = nn.TransformerEncoderLayer(d_model=n_embed,
nhead=n_head,
dim_feedforward=n_hid,
dropout=dropout)
self.transformer = nn.TransformerEncoder(encoder_layer, num_layers=n_layer)
self.dec_sequences = Rearrange('(h w) b c -> b c h w', h=16)
def forward(self, input):
x = self.enc_sequences(input)
x = x + self.pos_embedding
x = self.transformer(x)
x = self.dec_sequences(x)
return x
| [
"torch.nn.TransformerEncoder",
"torch.nn.TransformerEncoderLayer",
"einops.layers.torch.Rearrange",
"torch.randn"
] | [((439, 472), 'einops.layers.torch.Rearrange', 'Rearrange', (['"""b c h w -> (h w) b c"""'], {}), "('b c h w -> (h w) b c')\n", (448, 472), False, 'from einops.layers.torch import Rearrange\n'), ((575, 677), 'torch.nn.TransformerEncoderLayer', 'nn.TransformerEncoderLayer', ([], {'d_model': 'n_embed', 'nhead': 'n_head', 'dim_feedforward': 'n_hid', 'dropout': 'dropout'}), '(d_model=n_embed, nhead=n_head, dim_feedforward=\n n_hid, dropout=dropout)\n', (601, 677), True, 'import torch.nn as nn\n'), ((860, 916), 'torch.nn.TransformerEncoder', 'nn.TransformerEncoder', (['encoder_layer'], {'num_layers': 'n_layer'}), '(encoder_layer, num_layers=n_layer)\n', (881, 916), True, 'import torch.nn as nn\n'), ((947, 986), 'einops.layers.torch.Rearrange', 'Rearrange', (['"""(h w) b c -> b c h w"""'], {'h': '(16)'}), "('(h w) b c -> b c h w', h=16)\n", (956, 986), False, 'from einops.layers.torch import Rearrange\n'), ((516, 548), 'torch.randn', 'torch.randn', (['n_token', '(1)', 'n_embed'], {}), '(n_token, 1, n_embed)\n', (527, 548), False, 'import torch\n')] |
import numpy
import matplotlib.pyplot as plt
x = numpy.random.normal(1.9, 1.0, 109324)
plt.hist(x, 100)
plt.show()
| [
"numpy.random.normal",
"matplotlib.pyplot.hist",
"matplotlib.pyplot.show"
] | [((50, 87), 'numpy.random.normal', 'numpy.random.normal', (['(1.9)', '(1.0)', '(109324)'], {}), '(1.9, 1.0, 109324)\n', (69, 87), False, 'import numpy\n'), ((89, 105), 'matplotlib.pyplot.hist', 'plt.hist', (['x', '(100)'], {}), '(x, 100)\n', (97, 105), True, 'import matplotlib.pyplot as plt\n'), ((106, 116), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (114, 116), True, 'import matplotlib.pyplot as plt\n')] |