code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
#
# Copyright (c) 2019 -2021 MINRES Technolgies GmbH
#
# SPDX-License-Identifier: Apache-2.0
#
from cppyy_backend._cppyy_generator import CppyyGenerator
from clang.cindex import Config
from pprint import pprint
import glob
import os.path
proj_dir='../../PySysC-SC/components'
flags=['-I/home/eyck/.conan/data/SystemC/2.3.2/minres/stable/package/672f3350f4be793d25afa19a6a77085e20d01ea5/include',
'-I'+proj_dir,
'-fvisibility=hidden',
'-D__PIC__',
'-Wno-macro-redefined',
'-std=c++11']
Config.set_library_file('/usr/lib/x86_64-linux-gnu/libclang-6.0.so')
lib = Config().lib
import ctypes
from clang.cindex import Type
items = [
("clang_Type_getNumTemplateArguments", [Type], ctypes.c_size_t),
]
for item in items:
func = getattr(lib, item[0])
if len(item) >= 2:
func.argtypes = item[1]
if len(item) >= 3:
func.restype = item[2]
if len(item) == 4:
func.errcheck = item[3]
g = CppyyGenerator(flags, dump_modules=True, dump_items=True, dump_includes=False, dump_privates=True, verbose=True)
mapping = g.create_mapping([os.path.join(proj_dir, 'initiator.h')])
pprint(mapping)
|
[
"cppyy_backend._cppyy_generator.CppyyGenerator",
"clang.cindex.Config",
"pprint.pprint",
"clang.cindex.Config.set_library_file"
] |
[((527, 595), 'clang.cindex.Config.set_library_file', 'Config.set_library_file', (['"""/usr/lib/x86_64-linux-gnu/libclang-6.0.so"""'], {}), "('/usr/lib/x86_64-linux-gnu/libclang-6.0.so')\n", (550, 595), False, 'from clang.cindex import Config\n'), ((962, 1079), 'cppyy_backend._cppyy_generator.CppyyGenerator', 'CppyyGenerator', (['flags'], {'dump_modules': '(True)', 'dump_items': '(True)', 'dump_includes': '(False)', 'dump_privates': '(True)', 'verbose': '(True)'}), '(flags, dump_modules=True, dump_items=True, dump_includes=\n False, dump_privates=True, verbose=True)\n', (976, 1079), False, 'from cppyy_backend._cppyy_generator import CppyyGenerator\n'), ((1144, 1159), 'pprint.pprint', 'pprint', (['mapping'], {}), '(mapping)\n', (1150, 1159), False, 'from pprint import pprint\n'), ((602, 610), 'clang.cindex.Config', 'Config', ([], {}), '()\n', (608, 610), False, 'from clang.cindex import Config\n')]
|
# Copyright 2008-2018 Univa Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict
from marshmallow import fields
from .base import BaseEventSchema, BaseEvent
class NodeStateChangedSchema(BaseEventSchema):
"""
Schema for the NodeStateChange events.
"""
node = fields.Dict()
previous_state = fields.String()
class NodeStateChanged(BaseEvent):
"""
Event that fires when a node state changes.
"""
name = 'node-state-changed'
schema_class = NodeStateChangedSchema
def __init__(self, **kwargs):
"""
Initializer.
:param str node: the current state of the node affected
:param dict request: the previous state
:param kwargs:
"""
super().__init__(**kwargs)
self.node: dict = kwargs.get('node', {})
self.previous_state: str = kwargs.get('previous_state', None)
class NodeTagsChangedSchema(BaseEventSchema):
"""
Schema for the NodeTagsChanged events.
"""
node_id = fields.String()
node_name = fields.String()
tags = fields.Dict()
previous_tags = fields.Dict()
class NodeTagsChanged(BaseEvent):
"""
Event that fires when a node tags are changed.
"""
name = 'node-tags-changed'
schema_class = NodeTagsChangedSchema
def __init__(self, **kwargs):
"""
Initializer.
:param dict node: the current state of the nodeprofile
:param dict previous_tags: the previous version of the tags for the
node
:param kwargs:
"""
super().__init__(**kwargs)
self.node_id: str = kwargs.get('node_id', None)
self.node_name: str = kwargs.get('node_name', None)
self.tags: Dict[str, str] = kwargs.get('tags', {})
self.previous_tags: Dict[str, str] = kwargs.get('previous_tags', {})
|
[
"marshmallow.fields.Dict",
"marshmallow.fields.String"
] |
[((808, 821), 'marshmallow.fields.Dict', 'fields.Dict', ([], {}), '()\n', (819, 821), False, 'from marshmallow import fields\n'), ((843, 858), 'marshmallow.fields.String', 'fields.String', ([], {}), '()\n', (856, 858), False, 'from marshmallow import fields\n'), ((1532, 1547), 'marshmallow.fields.String', 'fields.String', ([], {}), '()\n', (1545, 1547), False, 'from marshmallow import fields\n'), ((1564, 1579), 'marshmallow.fields.String', 'fields.String', ([], {}), '()\n', (1577, 1579), False, 'from marshmallow import fields\n'), ((1591, 1604), 'marshmallow.fields.Dict', 'fields.Dict', ([], {}), '()\n', (1602, 1604), False, 'from marshmallow import fields\n'), ((1625, 1638), 'marshmallow.fields.Dict', 'fields.Dict', ([], {}), '()\n', (1636, 1638), False, 'from marshmallow import fields\n')]
|
import logging
import inspect
import functools
from urllib.request import urlopen, Request
from functools import partial
import toml as hjson
import mistune
from django.conf import settings
from django.template import loader
from biostar.utils.decorators import task
#
# Do not use logging in tasks! Deadlocking may occur!
#
# https://github.com/unbit/uwsgi/issues/1369
def message(msg, level=0):
print(f"{msg}")
@task
def detect_location(ip, user_id):
"""
Fills the user location based on url.
"""
from biostar.accounts.models import Profile
msg = f"location check for \tid={user_id}\tip={ip}"
# The lookup needs to be turned on.
if not settings.LOCATION_LOOKUP:
message(f"skip {msg}")
return
message(f"execute {msg}")
# Get the profile for the user
profile = Profile.objects.filter(user__id=user_id).first()
# Skip value if it has the word unknown in it
def get(data, attr):
value = data.get(attr, '')
return "" if "unknown" in value.lower() else value.title()
# Check and log location.
if not profile.location:
try:
url = f"http://api.hostip.info/get_json.php?ip={ip}"
message(url)
message(f"{ip}, {profile.user}, {url}")
req = Request(url=url, headers={'User-Agent': 'Mozilla/5.0'})
resp = urlopen(req, timeout=3).read()
data = hjson.loads(resp)
city = get(data, "city")
country = get(data, "country_name")
location = city or country
msg = f"location result for \tid={user_id}\tip={ip}\tloc={location}"
if location:
Profile.objects.filter(user=profile.user).update(location=location)
message(f"updated profile {msg}")
else:
message(f"empty location {msg}")
except Exception as exc:
message(exc)
@task
def verification_email(user_id):
from biostar.accounts import auth, models
user = models.User.objects.filter(id=user_id).first()
auth.send_verification_email(user=user)
return
@task
def create_messages(template, user_ids, sender=None, extra_context={}):
"""
Create batch message from sender to a given recipient_list
"""
from biostar.accounts.models import User, Message, MessageBody
rec_list = User.objects.filter(id__in=user_ids)
# Get the sender
name, email = settings.ADMINS[0]
sender = sender or User.objects.filter(email=email).first() or User.objects.filter(is_superuser=True).first()
# Load the template and context
tmpl = loader.get_template(template_name=template)
context = dict(sender=sender)
context.update(extra_context)
body = tmpl.render(context)
html = mistune.markdown(body, escape=False)
for rec in rec_list:
body = MessageBody.objects.create(body=body, html=html)
Message.objects.create(sender=sender, recipient=rec, body=body)
|
[
"biostar.accounts.auth.send_verification_email",
"biostar.accounts.models.MessageBody.objects.create",
"biostar.accounts.models.User.objects.filter",
"mistune.markdown",
"biostar.accounts.models.Message.objects.create",
"urllib.request.Request",
"biostar.accounts.models.Profile.objects.filter",
"urllib.request.urlopen",
"toml.loads",
"django.template.loader.get_template"
] |
[((2074, 2113), 'biostar.accounts.auth.send_verification_email', 'auth.send_verification_email', ([], {'user': 'user'}), '(user=user)\n', (2102, 2113), False, 'from biostar.accounts import auth, models\n'), ((2367, 2403), 'biostar.accounts.models.User.objects.filter', 'User.objects.filter', ([], {'id__in': 'user_ids'}), '(id__in=user_ids)\n', (2386, 2403), False, 'from biostar.accounts.models import User, Message, MessageBody\n'), ((2623, 2666), 'django.template.loader.get_template', 'loader.get_template', ([], {'template_name': 'template'}), '(template_name=template)\n', (2642, 2666), False, 'from django.template import loader\n'), ((2778, 2814), 'mistune.markdown', 'mistune.markdown', (['body'], {'escape': '(False)'}), '(body, escape=False)\n', (2794, 2814), False, 'import mistune\n'), ((2856, 2904), 'biostar.accounts.models.MessageBody.objects.create', 'MessageBody.objects.create', ([], {'body': 'body', 'html': 'html'}), '(body=body, html=html)\n', (2882, 2904), False, 'from biostar.accounts.models import User, Message, MessageBody\n'), ((2913, 2976), 'biostar.accounts.models.Message.objects.create', 'Message.objects.create', ([], {'sender': 'sender', 'recipient': 'rec', 'body': 'body'}), '(sender=sender, recipient=rec, body=body)\n', (2935, 2976), False, 'from biostar.accounts.models import User, Message, MessageBody\n'), ((828, 868), 'biostar.accounts.models.Profile.objects.filter', 'Profile.objects.filter', ([], {'user__id': 'user_id'}), '(user__id=user_id)\n', (850, 868), False, 'from biostar.accounts.models import Profile\n'), ((1288, 1343), 'urllib.request.Request', 'Request', ([], {'url': 'url', 'headers': "{'User-Agent': 'Mozilla/5.0'}"}), "(url=url, headers={'User-Agent': 'Mozilla/5.0'})\n", (1295, 1343), False, 'from urllib.request import urlopen, Request\n'), ((1413, 1430), 'toml.loads', 'hjson.loads', (['resp'], {}), '(resp)\n', (1424, 1430), True, 'import toml as hjson\n'), ((2022, 2060), 'biostar.accounts.models.User.objects.filter', 'models.User.objects.filter', ([], {'id': 'user_id'}), '(id=user_id)\n', (2048, 2060), False, 'from biostar.accounts import auth, models\n'), ((2485, 2517), 'biostar.accounts.models.User.objects.filter', 'User.objects.filter', ([], {'email': 'email'}), '(email=email)\n', (2504, 2517), False, 'from biostar.accounts.models import User, Message, MessageBody\n'), ((2529, 2567), 'biostar.accounts.models.User.objects.filter', 'User.objects.filter', ([], {'is_superuser': '(True)'}), '(is_superuser=True)\n', (2548, 2567), False, 'from biostar.accounts.models import User, Message, MessageBody\n'), ((1363, 1386), 'urllib.request.urlopen', 'urlopen', (['req'], {'timeout': '(3)'}), '(req, timeout=3)\n', (1370, 1386), False, 'from urllib.request import urlopen, Request\n'), ((1679, 1720), 'biostar.accounts.models.Profile.objects.filter', 'Profile.objects.filter', ([], {'user': 'profile.user'}), '(user=profile.user)\n', (1701, 1720), False, 'from biostar.accounts.models import Profile\n')]
|
import unittest
from datetime import datetime
from unittest.mock import patch
from src.updater.helpers import filter_outdated_players, get_player_usernames
class TestFilterOutdatedPlayers(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls._mock_now_patcher = patch('src.updater.helpers._now')
cls._mock_now = cls._mock_now_patcher.start()
cls._mock_now.return_value = datetime(2021, 5, 27, 14, 41, 32, 709490)
@classmethod
def tearDownClass(cls):
cls._mock_now_patcher.stop()
def test_when_no_players(self):
players = []
outdated_players = filter_outdated_players(players)
self.assertListEqual(outdated_players, [])
def test_when_no_outdated_players(self):
players = [
{
"username": "kedd",
"updatedAt": "2021-05-27T14:41:00.000Z"
},
{
"username": "alvx",
"updatedAt": "2021-05-27T14:40:33.000Z"
}
]
outdated_players = filter_outdated_players(players)
self.assertListEqual(outdated_players, [])
def test_when_one_outdated_player(self):
players = [
{
"username": "kedd",
"updatedAt": "2021-05-27T14:40:00.000Z"
},
{
"username": "alvx",
"updatedAt": "2021-05-27T14:40:33.000Z"
}
]
outdated_players = filter_outdated_players(players)
self.assertListEqual(outdated_players, [players[0]])
def test_when_multiple_outdated_players(self):
players = [
{
"username": "kedd",
"updatedAt": "2021-05-27T14:40:00.000Z"
},
{
"username": "alvx",
"updatedAt": "2021-05-27T14:40:32.000Z"
},
{
"username": "detredwings",
"updatedAt": "2021-05-27T14:41:00.000Z"
}
]
outdated_players = filter_outdated_players(players)
self.assertListEqual(outdated_players, players[:-1])
class TestGetPlayerUsernames(unittest.TestCase):
def test_when_no_players(self):
players = []
usernames = get_player_usernames(players)
self.assertListEqual(usernames, [])
def test_when_one_player(self):
players = [
{
"username": "kedd"
}
]
usernames = get_player_usernames(players)
self.assertListEqual(usernames, ["kedd"])
def test_when_multiple_players(self):
players = [
{
"username": "kedd"
},
{
"username": "alvx"
}
]
usernames = get_player_usernames(players)
self.assertListEqual(usernames, ["kedd", "alvx"])
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"datetime.datetime",
"unittest.mock.patch",
"src.updater.helpers.filter_outdated_players",
"src.updater.helpers.get_player_usernames"
] |
[((2929, 2944), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2942, 2944), False, 'import unittest\n'), ((285, 318), 'unittest.mock.patch', 'patch', (['"""src.updater.helpers._now"""'], {}), "('src.updater.helpers._now')\n", (290, 318), False, 'from unittest.mock import patch\n'), ((410, 451), 'datetime.datetime', 'datetime', (['(2021)', '(5)', '(27)', '(14)', '(41)', '(32)', '(709490)'], {}), '(2021, 5, 27, 14, 41, 32, 709490)\n', (418, 451), False, 'from datetime import datetime\n'), ((621, 653), 'src.updater.helpers.filter_outdated_players', 'filter_outdated_players', (['players'], {}), '(players)\n', (644, 653), False, 'from src.updater.helpers import filter_outdated_players, get_player_usernames\n'), ((1051, 1083), 'src.updater.helpers.filter_outdated_players', 'filter_outdated_players', (['players'], {}), '(players)\n', (1074, 1083), False, 'from src.updater.helpers import filter_outdated_players, get_player_usernames\n'), ((1481, 1513), 'src.updater.helpers.filter_outdated_players', 'filter_outdated_players', (['players'], {}), '(players)\n', (1504, 1513), False, 'from src.updater.helpers import filter_outdated_players, get_player_usernames\n'), ((2055, 2087), 'src.updater.helpers.filter_outdated_players', 'filter_outdated_players', (['players'], {}), '(players)\n', (2078, 2087), False, 'from src.updater.helpers import filter_outdated_players, get_player_usernames\n'), ((2279, 2308), 'src.updater.helpers.get_player_usernames', 'get_player_usernames', (['players'], {}), '(players)\n', (2299, 2308), False, 'from src.updater.helpers import filter_outdated_players, get_player_usernames\n'), ((2505, 2534), 'src.updater.helpers.get_player_usernames', 'get_player_usernames', (['players'], {}), '(players)\n', (2525, 2534), False, 'from src.updater.helpers import filter_outdated_players, get_player_usernames\n'), ((2807, 2836), 'src.updater.helpers.get_player_usernames', 'get_player_usernames', (['players'], {}), '(players)\n', (2827, 2836), False, 'from src.updater.helpers import filter_outdated_players, get_player_usernames\n')]
|
"""
An implementation of a stereo algorith based on Lucas-Kanade method
"""
import sys, os, time
import skimage
import numpy as np
def preprocess_image(path):
return list(skimage.transform.pyramid_gaussian(
skimage.color.rgb2gray(
skimage.io.imread(path)
),
max_layer = 8,
multichannel = False,
))
im_prmd_0 = preprocess_image('data/Classroom1-perfect/im0.png')
im_prmd_1 = preprocess_image('data/Classroom1-perfect/im1.png')
dx = list(map(skimage.filters.scharr_v, im_prmd_0))
dt = list(a - b for a,b in zip(im_prmd_0, im_prmd_1))
skimage.io.imshow_collection([
dx[-1],
dt[-1],
dt[-1] / dx[-1], # disparity based on the last lvl of image pyramids
])
skimage.io.show()
|
[
"skimage.io.imshow_collection",
"skimage.io.show",
"skimage.io.imread"
] |
[((603, 666), 'skimage.io.imshow_collection', 'skimage.io.imshow_collection', (['[dx[-1], dt[-1], dt[-1] / dx[-1]]'], {}), '([dx[-1], dt[-1], dt[-1] / dx[-1]])\n', (631, 666), False, 'import skimage\n'), ((734, 751), 'skimage.io.show', 'skimage.io.show', ([], {}), '()\n', (749, 751), False, 'import skimage\n'), ((258, 281), 'skimage.io.imread', 'skimage.io.imread', (['path'], {}), '(path)\n', (275, 281), False, 'import skimage\n')]
|
import pika
from lakeweed import clickhouse as d2c
from . import dbms_clickhouse as dbms
from .dbms_clickhouse import TableNotFoundException
class Grebe:
def __init__(self, client, schema_store, source_settings_store, resend_exchange_name, retry_max: int, tz_str: str, logger):
self.client = client
self.schema_store = schema_store
self.source_settings_store = source_settings_store
self.resend_exchange_name = resend_exchange_name
self.retry_max = retry_max
self.tz_str = tz_str
self.logger = logger
self.reload_schema()
self.logger.info(f"Schemas: {[s for s in self.schema_cache.values()]}")
self.reload_source_settings()
def callback(self, channel, method, properties, body):
self.logger.debug("receive '{}({})'".format(method.routing_key, method.delivery_tag))
try:
Grebe.insert_data(
method, body,
self.client,
self.schema_store,
self.schema_cache,
self.specified_types_cache,
self.tz_str,
self.logger
)
except TableNotFoundException as e:
self.logger.error(f"Table '{ e.table_name }' is renamed? Update schema table cache.")
self.reload_schema()
self.logger.error("Consume failed '{}'. retrying...".format(method.routing_key))
Grebe.send_retry(
channel, method, properties, body,
self.retry_max,
self.resend_exchange_name,
self.logger
)
except Exception as e:
self.logger.error(e, exc_info=e)
self.logger.error("Consume failed '{}'. retrying...".format(method.routing_key))
Grebe.send_retry(
channel, method, properties, body,
self.retry_max,
self.resend_exchange_name,
self.logger
)
finally:
channel.basic_ack(delivery_tag=method.delivery_tag)
self.logger.debug("return basic_ack '{}({})'".format(method.routing_key, method.delivery_tag))
def reload_schema(self):
self.schema_cache = self.schema_store.load_all_schemas()
self.logger.info(f"Load {len(self.schema_cache)} schemas from {self.schema_store}")
return {'schema_count': len(self.schema_cache), 'store': str(type(self.schema_store))}
def reload_source_settings(self):
self.source_settings_cache = self.source_settings_store.load_all_source_settings()
self.logger.info(f"Loaded source_settings: {self.source_settings_cache}")
self.specified_types_cache = {source_id: body['types'] for source_id, body in self.source_settings_cache.items() if 'types' in body}
self.logger.info(f"Specified Types cache: {self.specified_types_cache}")
return {'store': str(type(self.source_settings_store))}
@classmethod
def insert_data(cls, method, body, client, schema_store, schema_cache, specified_types, tz_str, logger):
# replace delimiter in topic mqtt/amqp.
topic = method.routing_key
source_id = topic.replace("/", "_").replace(".", "_")
payload = str(body.decode('utf-8'))
if source_id in specified_types.keys():
spec_types = specified_types[source_id]
else:
spec_types = {}
(columns, types, values_list) = d2c.data_string2type_value(payload, specified_types=spec_types, tz_str=tz_str)
serialized = dbms.serialize_schema(columns, types, source_id)
data_table_name = dbms.get_table_name_with_insert_if_new_schema(client, schema_store, source_id, columns, types, serialized, schema_cache)
dbms.insert_data(client, data_table_name, columns, values_list)
logger.debug(serialized)
@classmethod
def send_retry(cls, channel, method, properties, body, retry_max, resend_exchange_name, logger):
RetryCountKey = "x-grebe-retry-count"
current_retry_count = 0
if properties.headers and properties.headers.get(RetryCountKey):
current_retry_count = int(properties.headers.get(RetryCountKey))
if current_retry_count >= retry_max:
logger.error("Retry count exceeded!!({}). Discard Message = [exchange={}, routing_key={}, body={}]".format(retry_max, resend_exchange_name, method.routing_key, body))
return
props = pika.BasicProperties(
headers={RetryCountKey: current_retry_count + 1}
)
logger.debug("Re-sending [exchange={}, routing_key={}, props={}, body={}]".format(resend_exchange_name, method.routing_key, props, body))
channel.basic_publish(exchange=resend_exchange_name, routing_key=method.routing_key, properties=props, body=body)
logger.warning("Re-send complete. ({})".format(current_retry_count + 1))
|
[
"pika.BasicProperties",
"lakeweed.clickhouse.data_string2type_value"
] |
[((3463, 3541), 'lakeweed.clickhouse.data_string2type_value', 'd2c.data_string2type_value', (['payload'], {'specified_types': 'spec_types', 'tz_str': 'tz_str'}), '(payload, specified_types=spec_types, tz_str=tz_str)\n', (3489, 3541), True, 'from lakeweed import clickhouse as d2c\n'), ((4476, 4546), 'pika.BasicProperties', 'pika.BasicProperties', ([], {'headers': '{RetryCountKey: current_retry_count + 1}'}), '(headers={RetryCountKey: current_retry_count + 1})\n', (4496, 4546), False, 'import pika\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# (c) University of St Andrews 2020-2021
# (c) University of Strathclyde 2020-2021
# (c) James Hutton Institute 2020-2021
#
# Author:
# <NAME>
#
# Contact
# <EMAIL>
#
# <NAME>,
# Biomolecular Sciences Building,
# University of St Andrews,
# North Haugh Campus,
# St Andrews,
# KY16 9ST
# Scotland,
# UK
#
# The MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Tests sql.sql_interface.get_data.get_taxonomies.py
These test are intened to be run from the root of the repository using:
pytest -v
"""
from argparse import Namespace, ArgumentParser
from datetime import datetime
from pathlib import Path
import pytest
from cazy_webscraper.sql import sql_orm
from cazy_webscraper.sql.sql_interface.get_data import get_selected_gbks, get_taxonomies
from cazy_webscraper.sql.sql_orm import (
Ec,
Genbank,
Session,
Taxonomy,
Uniprot,
)
def test_get_tax_user_acc(monkeypatch):
argsdict = {"args": Namespace(
genbank_accessions="tests/test_inputs/test_inputs_sql_interface/test_accs.txt",
uniprot_accessions="tests/test_inputs/test_inputs_sql_interface/test_accs.txt",
)}
def mock_get_tax(*args, **kwards):
return [1, 2, 3]
def mock_get_table_dicts(*args, **kwards):
return {}, {}
monkeypatch.setattr(get_taxonomies, "get_taxs_for_user_gbks", mock_get_tax)
monkeypatch.setattr(get_taxonomies, "get_taxs_for_uniprots", mock_get_tax)
monkeypatch.setattr(get_taxonomies, "get_uni_gbk_tax_dict", mock_get_table_dicts)
get_taxonomies.get_taxonomies(set(), set(), {}, set(), set(), 'connection', argsdict['args'])
def test_get_tax_db_tax(monkeypatch):
argsdict = {"args": Namespace(
genbank_accessions=None,
uniprot_accessions=None,
)}
def mock_get_tax(*args, **kwards):
return [1, 2, 3]
monkeypatch.setattr(get_taxonomies, "get_taxs_for_user_gbks", mock_get_tax)
monkeypatch.setattr(get_taxonomies, "get_taxs_for_uniprots", mock_get_tax)
monkeypatch.setattr(get_taxonomies, "get_filtered_taxs", mock_get_tax)
get_taxonomies.get_taxonomies(set(), set(), {}, set(), set(), 'connection', argsdict['args'])
def test_get_uni_gbk_dict(db_path):
argsdict = {"args": Namespace(
sql_echo=True,
uniprot_accessions=None,
)}
db_connection = sql_orm.get_db_connection(db_path, False, False)
assert ({}, {}) == get_taxonomies.get_uni_gbk_tax_dict(db_connection)
def test_get_user_gbks_fail():
argsdict = {"args": Namespace(
sql_echo=True,
uniprot_accessions=None,
genbank_accessions="tests/test_inputs/test_inputs_sql_interface/test_accs_FAIL.txt",
)}
gbk_table_dict = {'test_gbk': 1, 'gbk_acc': 2}
with pytest.raises(SystemExit) as pytest_wrapped_e:
get_taxonomies.get_taxs_for_user_gbks(gbk_table_dict, argsdict['args'])
assert pytest_wrapped_e.type == SystemExit
def test_get_user_gbks():
argsdict = {"args": Namespace(
sql_echo=True,
uniprot_accessions=None,
genbank_accessions="tests/test_inputs/test_inputs_sql_interface/test_accs.txt",
)}
gbk_table_dict = {'test_gbk': 1, 'gbk_acc': 2}
assert [2] == get_taxonomies.get_taxs_for_user_gbks(gbk_table_dict, argsdict['args'])
def test_get_user_uniprot_fail():
argsdict = {"args": Namespace(
sql_echo=True,
uniprot_accessions="tests/test_inputs/test_inputs_sql_interface/test_accs_FAIL.txt",
)}
gbk_table_dict = {'test_gbk': 1, 'gbk_acc': 2}
with pytest.raises(SystemExit) as pytest_wrapped_e:
get_taxonomies.get_taxs_for_uniprots(gbk_table_dict, argsdict['args'])
assert pytest_wrapped_e.type == SystemExit
def test_get_user_uniprot():
argsdict = {"args": Namespace(
sql_echo=True,
uniprot_accessions="tests/test_inputs/test_inputs_sql_interface/test_accs.txt",
)}
gbk_table_dict = {'test_gbk': 1, 'gbk_acc': 2}
assert [2] == get_taxonomies.get_taxs_for_uniprots(gbk_table_dict, argsdict['args'])
|
[
"argparse.Namespace",
"cazy_webscraper.sql.sql_interface.get_data.get_taxonomies.get_taxs_for_uniprots",
"cazy_webscraper.sql.sql_orm.get_db_connection",
"pytest.raises",
"cazy_webscraper.sql.sql_interface.get_data.get_taxonomies.get_uni_gbk_tax_dict",
"cazy_webscraper.sql.sql_interface.get_data.get_taxonomies.get_taxs_for_user_gbks"
] |
[((3365, 3413), 'cazy_webscraper.sql.sql_orm.get_db_connection', 'sql_orm.get_db_connection', (['db_path', '(False)', '(False)'], {}), '(db_path, False, False)\n', (3390, 3413), False, 'from cazy_webscraper.sql import sql_orm\n'), ((1988, 2171), 'argparse.Namespace', 'Namespace', ([], {'genbank_accessions': '"""tests/test_inputs/test_inputs_sql_interface/test_accs.txt"""', 'uniprot_accessions': '"""tests/test_inputs/test_inputs_sql_interface/test_accs.txt"""'}), "(genbank_accessions=\n 'tests/test_inputs/test_inputs_sql_interface/test_accs.txt',\n uniprot_accessions=\n 'tests/test_inputs/test_inputs_sql_interface/test_accs.txt')\n", (1997, 2171), False, 'from argparse import Namespace, ArgumentParser\n'), ((2726, 2785), 'argparse.Namespace', 'Namespace', ([], {'genbank_accessions': 'None', 'uniprot_accessions': 'None'}), '(genbank_accessions=None, uniprot_accessions=None)\n', (2735, 2785), False, 'from argparse import Namespace, ArgumentParser\n'), ((3271, 3320), 'argparse.Namespace', 'Namespace', ([], {'sql_echo': '(True)', 'uniprot_accessions': 'None'}), '(sql_echo=True, uniprot_accessions=None)\n', (3280, 3320), False, 'from argparse import Namespace, ArgumentParser\n'), ((3438, 3488), 'cazy_webscraper.sql.sql_interface.get_data.get_taxonomies.get_uni_gbk_tax_dict', 'get_taxonomies.get_uni_gbk_tax_dict', (['db_connection'], {}), '(db_connection)\n', (3473, 3488), False, 'from cazy_webscraper.sql.sql_interface.get_data import get_selected_gbks, get_taxonomies\n'), ((3546, 3685), 'argparse.Namespace', 'Namespace', ([], {'sql_echo': '(True)', 'uniprot_accessions': 'None', 'genbank_accessions': '"""tests/test_inputs/test_inputs_sql_interface/test_accs_FAIL.txt"""'}), "(sql_echo=True, uniprot_accessions=None, genbank_accessions=\n 'tests/test_inputs/test_inputs_sql_interface/test_accs_FAIL.txt')\n", (3555, 3685), False, 'from argparse import Namespace, ArgumentParser\n'), ((3775, 3800), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (3788, 3800), False, 'import pytest\n'), ((3830, 3901), 'cazy_webscraper.sql.sql_interface.get_data.get_taxonomies.get_taxs_for_user_gbks', 'get_taxonomies.get_taxs_for_user_gbks', (['gbk_table_dict', "argsdict['args']"], {}), "(gbk_table_dict, argsdict['args'])\n", (3867, 3901), False, 'from cazy_webscraper.sql.sql_interface.get_data import get_selected_gbks, get_taxonomies\n'), ((4001, 4135), 'argparse.Namespace', 'Namespace', ([], {'sql_echo': '(True)', 'uniprot_accessions': 'None', 'genbank_accessions': '"""tests/test_inputs/test_inputs_sql_interface/test_accs.txt"""'}), "(sql_echo=True, uniprot_accessions=None, genbank_accessions=\n 'tests/test_inputs/test_inputs_sql_interface/test_accs.txt')\n", (4010, 4135), False, 'from argparse import Namespace, ArgumentParser\n'), ((4234, 4305), 'cazy_webscraper.sql.sql_interface.get_data.get_taxonomies.get_taxs_for_user_gbks', 'get_taxonomies.get_taxs_for_user_gbks', (['gbk_table_dict', "argsdict['args']"], {}), "(gbk_table_dict, argsdict['args'])\n", (4271, 4305), False, 'from cazy_webscraper.sql.sql_interface.get_data import get_selected_gbks, get_taxonomies\n'), ((4366, 4480), 'argparse.Namespace', 'Namespace', ([], {'sql_echo': '(True)', 'uniprot_accessions': '"""tests/test_inputs/test_inputs_sql_interface/test_accs_FAIL.txt"""'}), "(sql_echo=True, uniprot_accessions=\n 'tests/test_inputs/test_inputs_sql_interface/test_accs_FAIL.txt')\n", (4375, 4480), False, 'from argparse import Namespace, ArgumentParser\n'), ((4562, 4587), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (4575, 4587), False, 'import pytest\n'), ((4617, 4687), 'cazy_webscraper.sql.sql_interface.get_data.get_taxonomies.get_taxs_for_uniprots', 'get_taxonomies.get_taxs_for_uniprots', (['gbk_table_dict', "argsdict['args']"], {}), "(gbk_table_dict, argsdict['args'])\n", (4653, 4687), False, 'from cazy_webscraper.sql.sql_interface.get_data import get_selected_gbks, get_taxonomies\n'), ((4790, 4899), 'argparse.Namespace', 'Namespace', ([], {'sql_echo': '(True)', 'uniprot_accessions': '"""tests/test_inputs/test_inputs_sql_interface/test_accs.txt"""'}), "(sql_echo=True, uniprot_accessions=\n 'tests/test_inputs/test_inputs_sql_interface/test_accs.txt')\n", (4799, 4899), False, 'from argparse import Namespace, ArgumentParser\n'), ((4990, 5060), 'cazy_webscraper.sql.sql_interface.get_data.get_taxonomies.get_taxs_for_uniprots', 'get_taxonomies.get_taxs_for_uniprots', (['gbk_table_dict', "argsdict['args']"], {}), "(gbk_table_dict, argsdict['args'])\n", (5026, 5060), False, 'from cazy_webscraper.sql.sql_interface.get_data import get_selected_gbks, get_taxonomies\n')]
|
from findthatpostcode.controllers.controller import Controller
from tests.fixtures import MockElasticsearch
def test_controller_class():
id_ = "testentity"
data = {"code": "testentity", "name": "Test Entity"}
# test a found entity
a = Controller(id_, data)
assert a.id == id_
assert a.attributes["name"] == data["name"]
assert a.found is True
assert len(a.get_errors()) == 0
# test internal functions
assert a.parse_id(id_) == id_
assert a.process_attributes(data) == data
# test a non existant object
a = Controller(id_, {})
assert a.id == id_
assert a.attributes.get("name") is None
assert a.found is False
assert len(a.get_errors()) == 1
assert a.get_errors()[0]["status"] == "404"
def test_controller_fetch():
es = MockElasticsearch()
a = Controller.get_from_es(
"EX36 4AT", es, es_config={"es_index": "geo_postcode", "es_type": "_doc"}
)
assert isinstance(a.id, str)
assert len(a.attributes) > 4
assert a.found is True
assert len(a.get_errors()) == 0
|
[
"findthatpostcode.controllers.controller.Controller",
"tests.fixtures.MockElasticsearch",
"findthatpostcode.controllers.controller.Controller.get_from_es"
] |
[((254, 275), 'findthatpostcode.controllers.controller.Controller', 'Controller', (['id_', 'data'], {}), '(id_, data)\n', (264, 275), False, 'from findthatpostcode.controllers.controller import Controller\n'), ((563, 582), 'findthatpostcode.controllers.controller.Controller', 'Controller', (['id_', '{}'], {}), '(id_, {})\n', (573, 582), False, 'from findthatpostcode.controllers.controller import Controller\n'), ((803, 822), 'tests.fixtures.MockElasticsearch', 'MockElasticsearch', ([], {}), '()\n', (820, 822), False, 'from tests.fixtures import MockElasticsearch\n'), ((832, 933), 'findthatpostcode.controllers.controller.Controller.get_from_es', 'Controller.get_from_es', (['"""EX36 4AT"""', 'es'], {'es_config': "{'es_index': 'geo_postcode', 'es_type': '_doc'}"}), "('EX36 4AT', es, es_config={'es_index':\n 'geo_postcode', 'es_type': '_doc'})\n", (854, 933), False, 'from findthatpostcode.controllers.controller import Controller\n')]
|
import argparse
from typing import List
from prompt_toolkit.completion import NestedCompleter
from gamestonk_terminal import feature_flags as gtff
from gamestonk_terminal.menu import session
from gamestonk_terminal.helper_funcs import (
EXPORT_ONLY_RAW_DATA_ALLOWED,
MENU_GO_BACK,
MENU_QUIT,
MENU_RESET,
try_except,
system_clear,
get_flair,
check_positive,
parse_known_args_and_warn,
)
from gamestonk_terminal.cryptocurrency.nft import nftcalendar_view
class NFTController:
"""NFT Controller class"""
CHOICES = [
"cls",
"?",
"help",
"q",
"quit",
"reset",
]
CHOICES_COMMANDS = [
"today",
"upcoming",
"ongoing",
"newest",
]
CHOICES += CHOICES_COMMANDS
def __init__(self):
"""Constructor"""
self.nft_parser = argparse.ArgumentParser(add_help=False, prog="nft")
self.nft_parser.add_argument(
"cmd",
choices=self.CHOICES,
)
self.completer = NestedCompleter.from_nested_dict(
{c: None for c in self.CHOICES}
)
def print_help(self):
"""Print help"""
help_text = """
What do you want to do?
cls clear screen
?/help show this menu again
q quit this menu, and shows back to main menu
quit quit to abandon the program
reset reset terminal and reload configs
nftcalendar.io:
today today's NFT drops
upcoming upcoming NFT drops
ongoing Ongoing NFT drops
newest Recently NFTs added
"""
print(help_text)
def switch(self, an_input: str):
"""Process and dispatch input
Returns
-------
MENU_GO_BACK, MENU_QUIT, MENU_RESET
MENU_GO_BACK - Show main context menu again
MENU_QUIT - Quit terminal
MENU_RESET - Reset terminal and go back to same previous menu
"""
# Empty command
if not an_input:
print("")
return None
(known_args, other_args) = self.nft_parser.parse_known_args(an_input.split())
# Help menu again
if known_args.cmd == "?":
self.print_help()
return None
# Clear screen
if known_args.cmd == "cls":
system_clear()
return None
return getattr(
self, "call_" + known_args.cmd, lambda: "command not recognized!"
)(other_args)
def call_help(self, _):
"""Process Help Command"""
self.print_help()
def call_q(self, _):
"""Process Q command - quit the menu"""
return MENU_GO_BACK
def call_quit(self, _):
"""Process Quit command - exit the program"""
return MENU_QUIT
def call_reset(self, _):
"""Process Reset command - reset the program"""
return MENU_RESET
@try_except
def call_today(self, other_args: List[str]):
"""Process today command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="today",
description="Today's NFT drops [Source: nftcalendar.io]",
)
parser.add_argument(
"-n",
"--num",
type=check_positive,
help="Number of NFT collections to display",
dest="num",
default=5,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if not ns_parser:
return
nftcalendar_view.display_nft_today_drops(
num=ns_parser.num,
export=ns_parser.export,
)
@try_except
def call_upcoming(self, other_args: List[str]):
"""Process upcoming command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="upcoming",
description="Upcoming's NFT drops [Source: nftcalendar.io]",
)
parser.add_argument(
"-n",
"--num",
type=check_positive,
help="Number of NFT collections to display",
dest="num",
default=5,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if not ns_parser:
return
nftcalendar_view.display_nft_upcoming_drops(
num=ns_parser.num,
export=ns_parser.export,
)
@try_except
def call_ongoing(self, other_args: List[str]):
"""Process ongoing command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="ongoing",
description="Ongoing's NFT drops [Source: nftcalendar.io]",
)
parser.add_argument(
"-n",
"--num",
type=check_positive,
help="Number of NFT collections to display",
dest="num",
default=5,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if not ns_parser:
return
nftcalendar_view.display_nft_ongoing_drops(
num=ns_parser.num,
export=ns_parser.export,
)
@try_except
def call_newest(self, other_args: List[str]):
"""Process newest command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="newest",
description="Newest's NFT drops [Source: nftcalendar.io]",
)
parser.add_argument(
"-n",
"--num",
type=check_positive,
help="Number of NFT collections to display",
dest="num",
default=5,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if not ns_parser:
return
nftcalendar_view.display_nft_newest_drops(
num=ns_parser.num,
export=ns_parser.export,
)
def menu():
"""NFT Menu"""
nft_controller = NFTController()
nft_controller.call_help(None)
while True:
if session and gtff.USE_PROMPT_TOOLKIT:
completer = NestedCompleter.from_nested_dict(
{c: None for c in nft_controller.CHOICES}
)
an_input = session.prompt(
f"{get_flair()} (nft)> ",
completer=completer,
)
else:
an_input = input(f"{get_flair()} (nft)> ")
try:
process_input = nft_controller.switch(an_input)
if process_input is not None:
return process_input
except SystemExit:
print("The command selected doesn't exit\n")
continue
|
[
"argparse.ArgumentParser",
"gamestonk_terminal.helper_funcs.system_clear",
"gamestonk_terminal.helper_funcs.get_flair",
"gamestonk_terminal.cryptocurrency.nft.nftcalendar_view.display_nft_upcoming_drops",
"gamestonk_terminal.helper_funcs.parse_known_args_and_warn",
"gamestonk_terminal.cryptocurrency.nft.nftcalendar_view.display_nft_ongoing_drops",
"prompt_toolkit.completion.NestedCompleter.from_nested_dict",
"gamestonk_terminal.cryptocurrency.nft.nftcalendar_view.display_nft_today_drops",
"gamestonk_terminal.cryptocurrency.nft.nftcalendar_view.display_nft_newest_drops"
] |
[((874, 925), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)', 'prog': '"""nft"""'}), "(add_help=False, prog='nft')\n", (897, 925), False, 'import argparse\n'), ((1052, 1117), 'prompt_toolkit.completion.NestedCompleter.from_nested_dict', 'NestedCompleter.from_nested_dict', (['{c: None for c in self.CHOICES}'], {}), '({c: None for c in self.CHOICES})\n', (1084, 1117), False, 'from prompt_toolkit.completion import NestedCompleter\n'), ((3048, 3225), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter', 'prog': '"""today"""', 'description': '"""Today\'s NFT drops [Source: nftcalendar.io]"""'}), '(add_help=False, formatter_class=argparse.\n ArgumentDefaultsHelpFormatter, prog=\'today\', description=\n "Today\'s NFT drops [Source: nftcalendar.io]")\n', (3071, 3225), False, 'import argparse\n'), ((3510, 3585), 'gamestonk_terminal.helper_funcs.parse_known_args_and_warn', 'parse_known_args_and_warn', (['parser', 'other_args', 'EXPORT_ONLY_RAW_DATA_ALLOWED'], {}), '(parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED)\n', (3535, 3585), False, 'from gamestonk_terminal.helper_funcs import EXPORT_ONLY_RAW_DATA_ALLOWED, MENU_GO_BACK, MENU_QUIT, MENU_RESET, try_except, system_clear, get_flair, check_positive, parse_known_args_and_warn\n'), ((3662, 3751), 'gamestonk_terminal.cryptocurrency.nft.nftcalendar_view.display_nft_today_drops', 'nftcalendar_view.display_nft_today_drops', ([], {'num': 'ns_parser.num', 'export': 'ns_parser.export'}), '(num=ns_parser.num, export=\n ns_parser.export)\n', (3702, 3751), False, 'from gamestonk_terminal.cryptocurrency.nft import nftcalendar_view\n'), ((3907, 4090), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter', 'prog': '"""upcoming"""', 'description': '"""Upcoming\'s NFT drops [Source: nftcalendar.io]"""'}), '(add_help=False, formatter_class=argparse.\n ArgumentDefaultsHelpFormatter, prog=\'upcoming\', description=\n "Upcoming\'s NFT drops [Source: nftcalendar.io]")\n', (3930, 4090), False, 'import argparse\n'), ((4375, 4450), 'gamestonk_terminal.helper_funcs.parse_known_args_and_warn', 'parse_known_args_and_warn', (['parser', 'other_args', 'EXPORT_ONLY_RAW_DATA_ALLOWED'], {}), '(parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED)\n', (4400, 4450), False, 'from gamestonk_terminal.helper_funcs import EXPORT_ONLY_RAW_DATA_ALLOWED, MENU_GO_BACK, MENU_QUIT, MENU_RESET, try_except, system_clear, get_flair, check_positive, parse_known_args_and_warn\n'), ((4527, 4619), 'gamestonk_terminal.cryptocurrency.nft.nftcalendar_view.display_nft_upcoming_drops', 'nftcalendar_view.display_nft_upcoming_drops', ([], {'num': 'ns_parser.num', 'export': 'ns_parser.export'}), '(num=ns_parser.num, export=\n ns_parser.export)\n', (4570, 4619), False, 'from gamestonk_terminal.cryptocurrency.nft import nftcalendar_view\n'), ((4773, 4954), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter', 'prog': '"""ongoing"""', 'description': '"""Ongoing\'s NFT drops [Source: nftcalendar.io]"""'}), '(add_help=False, formatter_class=argparse.\n ArgumentDefaultsHelpFormatter, prog=\'ongoing\', description=\n "Ongoing\'s NFT drops [Source: nftcalendar.io]")\n', (4796, 4954), False, 'import argparse\n'), ((5239, 5314), 'gamestonk_terminal.helper_funcs.parse_known_args_and_warn', 'parse_known_args_and_warn', (['parser', 'other_args', 'EXPORT_ONLY_RAW_DATA_ALLOWED'], {}), '(parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED)\n', (5264, 5314), False, 'from gamestonk_terminal.helper_funcs import EXPORT_ONLY_RAW_DATA_ALLOWED, MENU_GO_BACK, MENU_QUIT, MENU_RESET, try_except, system_clear, get_flair, check_positive, parse_known_args_and_warn\n'), ((5391, 5482), 'gamestonk_terminal.cryptocurrency.nft.nftcalendar_view.display_nft_ongoing_drops', 'nftcalendar_view.display_nft_ongoing_drops', ([], {'num': 'ns_parser.num', 'export': 'ns_parser.export'}), '(num=ns_parser.num, export=\n ns_parser.export)\n', (5433, 5482), False, 'from gamestonk_terminal.cryptocurrency.nft import nftcalendar_view\n'), ((5634, 5813), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter', 'prog': '"""newest"""', 'description': '"""Newest\'s NFT drops [Source: nftcalendar.io]"""'}), '(add_help=False, formatter_class=argparse.\n ArgumentDefaultsHelpFormatter, prog=\'newest\', description=\n "Newest\'s NFT drops [Source: nftcalendar.io]")\n', (5657, 5813), False, 'import argparse\n'), ((6098, 6173), 'gamestonk_terminal.helper_funcs.parse_known_args_and_warn', 'parse_known_args_and_warn', (['parser', 'other_args', 'EXPORT_ONLY_RAW_DATA_ALLOWED'], {}), '(parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED)\n', (6123, 6173), False, 'from gamestonk_terminal.helper_funcs import EXPORT_ONLY_RAW_DATA_ALLOWED, MENU_GO_BACK, MENU_QUIT, MENU_RESET, try_except, system_clear, get_flair, check_positive, parse_known_args_and_warn\n'), ((6250, 6340), 'gamestonk_terminal.cryptocurrency.nft.nftcalendar_view.display_nft_newest_drops', 'nftcalendar_view.display_nft_newest_drops', ([], {'num': 'ns_parser.num', 'export': 'ns_parser.export'}), '(num=ns_parser.num, export=\n ns_parser.export)\n', (6291, 6340), False, 'from gamestonk_terminal.cryptocurrency.nft import nftcalendar_view\n'), ((2353, 2367), 'gamestonk_terminal.helper_funcs.system_clear', 'system_clear', ([], {}), '()\n', (2365, 2367), False, 'from gamestonk_terminal.helper_funcs import EXPORT_ONLY_RAW_DATA_ALLOWED, MENU_GO_BACK, MENU_QUIT, MENU_RESET, try_except, system_clear, get_flair, check_positive, parse_known_args_and_warn\n'), ((6564, 6639), 'prompt_toolkit.completion.NestedCompleter.from_nested_dict', 'NestedCompleter.from_nested_dict', (['{c: None for c in nft_controller.CHOICES}'], {}), '({c: None for c in nft_controller.CHOICES})\n', (6596, 6639), False, 'from prompt_toolkit.completion import NestedCompleter\n'), ((6729, 6740), 'gamestonk_terminal.helper_funcs.get_flair', 'get_flair', ([], {}), '()\n', (6738, 6740), False, 'from gamestonk_terminal.helper_funcs import EXPORT_ONLY_RAW_DATA_ALLOWED, MENU_GO_BACK, MENU_QUIT, MENU_RESET, try_except, system_clear, get_flair, check_positive, parse_known_args_and_warn\n'), ((6849, 6860), 'gamestonk_terminal.helper_funcs.get_flair', 'get_flair', ([], {}), '()\n', (6858, 6860), False, 'from gamestonk_terminal.helper_funcs import EXPORT_ONLY_RAW_DATA_ALLOWED, MENU_GO_BACK, MENU_QUIT, MENU_RESET, try_except, system_clear, get_flair, check_positive, parse_known_args_and_warn\n')]
|
"""The H API service."""
from h_api.bulk_api import BulkAPI, CommandBuilder
from lms.models import HUser
from lms.services import ExternalRequestError
class HAPIError(ExternalRequestError):
"""
A problem with an h API request.
Raised whenever an h API request times out or when an unsuccessful, invalid
or unexpected response is received from the h API.
"""
class HAPI:
"""
A client for the "h" API.
:raise HAPIError: if a call to the "h" API raises an unhandled exception
"""
def __init__(self, _context, request):
self._request = request
settings = request.registry.settings
self._authority = settings["h_authority"]
self._http_auth = (settings["h_client_id"], settings["h_client_secret"])
self._base_url = settings["h_api_url_private"]
self._http_service = request.find_service(name="http")
def execute_bulk(self, commands):
"""
Send a series of h_api commands to the H bulk API.
:param commands: Instances of h_api Commands
"""
commands = list(commands)
commands = [
CommandBuilder.configure(
effective_user=HUser(username="lms").userid(self._authority),
total_instructions=len(commands) + 1,
)
] + commands
self._api_request(
"POST",
path="bulk",
body=BulkAPI.to_string(commands),
headers={"Content-Type": "application/vnd.hypothesis.v1+x-ndjson"},
)
def get_user(self, username):
"""
Return the h user for the given username.
:rtype: HUser
"""
userid = HUser(username).userid(self._authority)
user_info = self._api_request("GET", path=f"users/{userid}").json()
return HUser(username=username, display_name=user_info["display_name"])
def _api_request(self, method, path, body=None, headers=None):
"""
Send any kind of HTTP request to the h API and return the response.
:param method: the HTTP request method to use, (e.g. "GET")
:param path: the h API path to post to, relative to
`settings["h_api_url_private"]` (e.g. "users")
:param body: the body to send as a string (without modification)
:param headers: extra headers to pass with the request
:raise HAPIError: if the request fails for any other reason
:return: the response from the h API
:rtype: requests.Response
"""
headers = headers or {}
headers["Hypothesis-Application"] = "lms"
request_args = {}
if body is not None:
request_args["data"] = body
try:
response = self._http_service.request(
method=method,
url=self._base_url + path.lstrip("/"),
auth=self._http_auth,
headers=headers,
**request_args,
)
except ExternalRequestError as err:
raise HAPIError("Connecting to Hypothesis failed", err.response) from err
return response
|
[
"h_api.bulk_api.BulkAPI.to_string",
"lms.models.HUser"
] |
[((1820, 1884), 'lms.models.HUser', 'HUser', ([], {'username': 'username', 'display_name': "user_info['display_name']"}), "(username=username, display_name=user_info['display_name'])\n", (1825, 1884), False, 'from lms.models import HUser\n'), ((1420, 1447), 'h_api.bulk_api.BulkAPI.to_string', 'BulkAPI.to_string', (['commands'], {}), '(commands)\n', (1437, 1447), False, 'from h_api.bulk_api import BulkAPI, CommandBuilder\n'), ((1688, 1703), 'lms.models.HUser', 'HUser', (['username'], {}), '(username)\n', (1693, 1703), False, 'from lms.models import HUser\n'), ((1194, 1215), 'lms.models.HUser', 'HUser', ([], {'username': '"""lms"""'}), "(username='lms')\n", (1199, 1215), False, 'from lms.models import HUser\n')]
|
import os
from pprint import pprint as pp
import colorgram
from flask import Flask
from flask import render_template
app = Flask(__name__)
def get_filenames(f_dir, prefix='', suffix=''):
"""Get list of filenames within a directory. Optionally scope by prefix/suffix."""
f_names = []
for r,d,files in os.walk(f_dir):
for f in files:
if f.startswith(prefix) and f.endswith(suffix):
f_names.append('{}/{}'.format(r, f))
return f_names
def get_colors(num_cols=6):
imgs = []
for suffix in ['jpg', 'jpeg', 'png', 'gif']:
for fname in get_filenames('static', suffix=suffix):
colors = colorgram.extract(fname, num_cols)
summary = []
for color in colors:
summary.append({
'rgb':tuple(color.rgb),
'hsl':tuple(color.hsl),
'percent':round(color.proportion * 100, 1)
})
imgs.append({
'fname':fname,
'colors':summary
})
return imgs
@app.route("/")
def home():
imgs = get_colors()
return render_template('color.html', imgs=imgs)
@app.route("/api")
def api_home():
imgs = get_colors()
return {'images':imgs}
@app.route("/<int:num_cols>")
def number_colors(num_cols):
imgs = get_colors(num_cols)
return render_template('color.html', imgs=imgs)
|
[
"colorgram.extract",
"flask.Flask",
"os.walk",
"flask.render_template"
] |
[((124, 139), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (129, 139), False, 'from flask import Flask\n'), ((314, 328), 'os.walk', 'os.walk', (['f_dir'], {}), '(f_dir)\n', (321, 328), False, 'import os\n'), ((1141, 1181), 'flask.render_template', 'render_template', (['"""color.html"""'], {'imgs': 'imgs'}), "('color.html', imgs=imgs)\n", (1156, 1181), False, 'from flask import render_template\n'), ((1372, 1412), 'flask.render_template', 'render_template', (['"""color.html"""'], {'imgs': 'imgs'}), "('color.html', imgs=imgs)\n", (1387, 1412), False, 'from flask import render_template\n'), ((660, 694), 'colorgram.extract', 'colorgram.extract', (['fname', 'num_cols'], {}), '(fname, num_cols)\n', (677, 694), False, 'import colorgram\n')]
|
"""The project."""
from dataclasses import dataclass
from jupiter.domain.entity_name import EntityName
from jupiter.domain.projects.project_key import ProjectKey
from jupiter.framework.aggregate_root import AggregateRoot
from jupiter.framework.base.entity_id import BAD_REF_ID
from jupiter.framework.base.timestamp import Timestamp
@dataclass()
class Project(AggregateRoot):
"""The project."""
@dataclass(frozen=True)
class Created(AggregateRoot.Created):
"""Created event."""
@dataclass(frozen=True)
class Updated(AggregateRoot.Updated):
"""Updated event."""
_key: ProjectKey
_name: EntityName
@staticmethod
def new_project(key: ProjectKey, name: EntityName, created_time: Timestamp) -> 'Project':
"""Create a project."""
project = Project(
_ref_id=BAD_REF_ID,
_archived=False,
_created_time=created_time,
_archived_time=None,
_last_modified_time=created_time,
_events=[],
_key=key,
_name=name)
project.record_event(Project.Created.make_event_from_frame_args(created_time))
return project
def change_name(self, name: EntityName, modification_time: Timestamp) -> 'Project':
"""Change the name of the workspace."""
self._name = name
self.record_event(
Project.Updated.make_event_from_frame_args(modification_time))
return self
@property
def key(self) -> ProjectKey:
"""The key of the project."""
return self._key
@property
def name(self) -> EntityName:
"""The name of the project."""
return self._name
|
[
"dataclasses.dataclass"
] |
[((336, 347), 'dataclasses.dataclass', 'dataclass', ([], {}), '()\n', (345, 347), False, 'from dataclasses import dataclass\n'), ((407, 429), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (416, 429), False, 'from dataclasses import dataclass\n'), ((507, 529), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (516, 529), False, 'from dataclasses import dataclass\n')]
|
import os
import logging
import testfixtures
from pyvivado import interface, signal, builder
from rfgnocchi import config, ettus
logger = logging.getLogger(__name__)
def get_mult_interface(params):
module_name = 'Mult'
width_A = params['width_A']
width_B = params['width_B']
width_P = params['width_P']
drop_top_P = params['drop_top_P']
latency = params['latency']
cascade_out = params['cascade_out']
builder = ettus.get_builder('mult')
packages = []
module_parameters = {
'WIDTH_A': width_A,
'WIDTH_B': width_B,
'WIDTH_P': width_P,
'DROP_TOP_P': drop_top_P,
'LATENCY': latency,
'CASCADE_OUT': cascade_out,
}
wires_in = (
('reset', signal.std_logic_type),
('a_tdata', signal.StdLogicVector(width=width_A)),
('a_tlast', signal.std_logic_type),
('a_tvalid', signal.std_logic_type),
('b_tdata', signal.StdLogicVector(width=width_B)),
('b_tlast', signal.std_logic_type),
('b_tvalid', signal.std_logic_type),
('p_tready', signal.std_logic_type),
)
wires_out = (
('a_tready', signal.std_logic_type),
('b_tready', signal.std_logic_type),
('p_tdata', signal.StdLogicVector(width=width_P)),
('p_tlast', signal.std_logic_type),
('p_tvalid', signal.std_logic_type),
)
iface = interface.Interface(
wires_in, wires_out, module_name=module_name,
parameters=params, module_parameters=module_parameters,
packages=packages, builder=builder, clock_names=['clk'])
return iface
assert('Mult' not in interface.module_register)
interface.module_register['Mult'] = get_mult_interface
|
[
"pyvivado.signal.StdLogicVector",
"rfgnocchi.ettus.get_builder",
"pyvivado.interface.Interface",
"logging.getLogger"
] |
[((149, 176), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (166, 176), False, 'import logging\n'), ((467, 492), 'rfgnocchi.ettus.get_builder', 'ettus.get_builder', (['"""mult"""'], {}), "('mult')\n", (484, 492), False, 'from rfgnocchi import config, ettus\n'), ((1432, 1619), 'pyvivado.interface.Interface', 'interface.Interface', (['wires_in', 'wires_out'], {'module_name': 'module_name', 'parameters': 'params', 'module_parameters': 'module_parameters', 'packages': 'packages', 'builder': 'builder', 'clock_names': "['clk']"}), "(wires_in, wires_out, module_name=module_name,\n parameters=params, module_parameters=module_parameters, packages=\n packages, builder=builder, clock_names=['clk'])\n", (1451, 1619), False, 'from pyvivado import interface, signal, builder\n'), ((816, 852), 'pyvivado.signal.StdLogicVector', 'signal.StdLogicVector', ([], {'width': 'width_A'}), '(width=width_A)\n', (837, 852), False, 'from pyvivado import interface, signal, builder\n'), ((967, 1003), 'pyvivado.signal.StdLogicVector', 'signal.StdLogicVector', ([], {'width': 'width_B'}), '(width=width_B)\n', (988, 1003), False, 'from pyvivado import interface, signal, builder\n'), ((1282, 1318), 'pyvivado.signal.StdLogicVector', 'signal.StdLogicVector', ([], {'width': 'width_P'}), '(width=width_P)\n', (1303, 1318), False, 'from pyvivado import interface, signal, builder\n')]
|
from nox_poetry import session, SDIST
args = lambda s: s.split()
@session(python=['3.7', '3.8', '3.9'])
def test_main(session):
session.poetry.installroot(distribution_format=SDIST)
session.install('pytest')
session.run(*args('pytest -v tests/main'))
@session(python=['3.7', '3.8', '3.9'])
def test_geo(session):
session.poetry.installroot(distribution_format=SDIST)
session.install('csv-reconcile-geo')
session.install('pytest')
session.run(*args('pytest -v tests/plugins/geo'))
|
[
"nox_poetry.session.install",
"nox_poetry.session.poetry.installroot",
"nox_poetry.session"
] |
[((69, 106), 'nox_poetry.session', 'session', ([], {'python': "['3.7', '3.8', '3.9']"}), "(python=['3.7', '3.8', '3.9'])\n", (76, 106), False, 'from nox_poetry import session, SDIST\n'), ((269, 306), 'nox_poetry.session', 'session', ([], {'python': "['3.7', '3.8', '3.9']"}), "(python=['3.7', '3.8', '3.9'])\n", (276, 306), False, 'from nox_poetry import session, SDIST\n'), ((135, 188), 'nox_poetry.session.poetry.installroot', 'session.poetry.installroot', ([], {'distribution_format': 'SDIST'}), '(distribution_format=SDIST)\n', (161, 188), False, 'from nox_poetry import session, SDIST\n'), ((193, 218), 'nox_poetry.session.install', 'session.install', (['"""pytest"""'], {}), "('pytest')\n", (208, 218), False, 'from nox_poetry import session, SDIST\n'), ((334, 387), 'nox_poetry.session.poetry.installroot', 'session.poetry.installroot', ([], {'distribution_format': 'SDIST'}), '(distribution_format=SDIST)\n', (360, 387), False, 'from nox_poetry import session, SDIST\n'), ((392, 428), 'nox_poetry.session.install', 'session.install', (['"""csv-reconcile-geo"""'], {}), "('csv-reconcile-geo')\n", (407, 428), False, 'from nox_poetry import session, SDIST\n'), ((433, 458), 'nox_poetry.session.install', 'session.install', (['"""pytest"""'], {}), "('pytest')\n", (448, 458), False, 'from nox_poetry import session, SDIST\n')]
|
from typing import List, Tuple
from soco.data_center.online import stop
from soco.data_center.online.uni_dimensional import (
lazy_capacity_provisioning,
memoryless,
probabilistic,
randomized,
randomly_biased_greedy,
)
from soco.data_center.online.multi_dimensional import (
horizon_control,
lazy_budgeting,
online_balanced_descent,
online_gradient_descent,
)
from soco.data_center.model import DataCenterModel
from tqdm import tqdm
ADDR = "127.0.0.1:3449"
def evaluate(
alg,
online_inp: List[List[List[List[int]]]],
fractional,
integral,
m,
initial_runtime,
) -> Tuple[float, float]:
initial_cost = fractional[1][0]
initial_int_cost = integral[1][0]
cost = initial_cost
int_cost = initial_int_cost
energy_cost = integral[1][1].energy_cost if integral[1][1] is not None else 0
revenue_loss = integral[1][1].revenue_loss if integral[1][1] is not None else 0
assert int_cost >= energy_cost + revenue_loss
runtimes = []
initial_xs = integral[0]
xs = initial_xs.copy()
ms = []
for i in tqdm(range(len(online_inp))):
fractional, integral, m, runtime = alg.next(ADDR, online_inp[i])
cost = fractional[1][0]
int_cost = integral[1][0]
if integral[1][1] is not None:
energy_cost = integral[1][1].energy_cost
revenue_loss = integral[1][1].revenue_loss
assert int_cost >= energy_cost + revenue_loss
xs.append(integral[0])
ms.append(m)
runtimes.append(runtime)
stop(ADDR)
switching_cost = int_cost - energy_cost - revenue_loss
assert len(xs) - len(initial_xs) == len(online_inp)
print(f"Resulting schedule: {xs}")
return (
initial_cost,
cost,
initial_int_cost,
int_cost,
energy_cost,
revenue_loss,
switching_cost,
initial_runtime,
runtimes,
ms,
)
def evaluate_fractional_lazy_capacity_provisioning(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
w: int = 0,
) -> Tuple[float, float]:
(
fractional,
integral,
m,
initial_runtime,
) = lazy_capacity_provisioning.fractional.start(ADDR, model, offline_inp, w)
return evaluate(
lazy_capacity_provisioning.fractional,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_integral_lazy_capacity_provisioning(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
w: int = 0,
) -> Tuple[float, float]:
(
fractional,
integral,
m,
initial_runtime,
) = lazy_capacity_provisioning.integral.start(ADDR, model, offline_inp, w)
return evaluate(
lazy_capacity_provisioning.integral,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_memoryless(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
) -> Tuple[float, float]:
fractional, integral, m, initial_runtime = memoryless.start(
ADDR, model, offline_inp, 0
)
return evaluate(memoryless, online_inp, fractional, integral, m, initial_runtime)
def evaluate_probabilistic(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
) -> Tuple[float, float]:
options = probabilistic.Options(probabilistic.Breakpoints([]))
fractional, integral, m, initial_runtime = probabilistic.start(
ADDR, model, offline_inp, 0, options
)
return evaluate(probabilistic, online_inp, fractional, integral, m, initial_runtime)
def evaluate_randomized_probabilistic(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
) -> Tuple[float, float]:
fractional, integral, m, initial_runtime = randomized.probabilistic.start(
ADDR, model, offline_inp, 0
)
return evaluate(
randomized.probabilistic, online_inp, fractional, integral, m, initial_runtime
)
def evaluate_randomized_randomly_biased_greedy(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
) -> Tuple[float, float]:
fractional, integral, m, initial_runtime = randomized.randomly_biased_greedy.start(
ADDR, model, offline_inp, 0
)
return evaluate(
randomized.randomly_biased_greedy,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_randomly_biased_greedy(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
theta: float = 1,
) -> Tuple[float, float]:
options = randomly_biased_greedy.Options(theta)
fractional, integral, m, initial_runtime = randomly_biased_greedy.start(
ADDR, model, offline_inp, 0, options
)
return evaluate(
randomly_biased_greedy, online_inp, fractional, integral, m, initial_runtime
)
def evaluate_lazy_budgeting_slo(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
randomized: bool = False,
) -> Tuple[float, float]:
options = lazy_budgeting.smoothed_load_optimization.Options(randomized)
(
fractional,
integral,
m,
initial_runtime,
) = lazy_budgeting.smoothed_load_optimization.start(
ADDR, model, offline_inp, 0, options
)
return evaluate(
lazy_budgeting.smoothed_load_optimization,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_lazy_budgeting_sblo(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
epsilon: float = 0.25,
) -> Tuple[float, float]:
options = lazy_budgeting.smoothed_balanced_load_optimization.Options(epsilon)
(
fractional,
integral,
m,
initial_runtime,
) = lazy_budgeting.smoothed_balanced_load_optimization.start(
ADDR, model, offline_inp, 0, options
)
return evaluate(
lazy_budgeting.smoothed_balanced_load_optimization,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_pobd(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
beta: float = 0.5,
) -> Tuple[float, float]:
options = online_balanced_descent.primal.Options.euclidean_squared(beta)
(
fractional,
integral,
m,
initial_runtime,
) = online_balanced_descent.primal.start(ADDR, model, offline_inp, 0, options)
return evaluate(
online_balanced_descent.primal,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_dobd(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
eta: float = 1,
) -> Tuple[float, float]:
options = online_balanced_descent.dual.Options.euclidean_squared(eta)
(
fractional,
integral,
m,
initial_runtime,
) = online_balanced_descent.dual.start(ADDR, model, offline_inp, 0, options)
return evaluate(
online_balanced_descent.dual,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_ogd(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
) -> Tuple[float, float]:
options = online_gradient_descent.Options.sqrt()
(
fractional,
integral,
m,
initial_runtime,
) = online_gradient_descent.start(ADDR, model, offline_inp, 0, options)
return evaluate(
online_gradient_descent,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_receding_horizon_control(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
w: int = 0,
) -> Tuple[float, float]:
(
fractional,
integral,
m,
initial_runtime,
) = horizon_control.receding_horizon_control.start(ADDR, model, offline_inp, w)
return evaluate(
horizon_control.receding_horizon_control,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_averaging_fixed_horizon_control(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
w: int = 0,
) -> Tuple[float, float]:
(
fractional,
integral,
m,
initial_runtime,
) = horizon_control.averaging_fixed_horizon_control.start(
ADDR, model, offline_inp, w
)
return evaluate(
horizon_control.averaging_fixed_horizon_control,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
|
[
"soco.data_center.online.multi_dimensional.lazy_budgeting.smoothed_balanced_load_optimization.Options",
"soco.data_center.online.uni_dimensional.lazy_capacity_provisioning.integral.start",
"soco.data_center.online.multi_dimensional.online_balanced_descent.primal.Options.euclidean_squared",
"soco.data_center.online.uni_dimensional.memoryless.start",
"soco.data_center.online.uni_dimensional.randomly_biased_greedy.start",
"soco.data_center.online.uni_dimensional.randomly_biased_greedy.Options",
"soco.data_center.online.uni_dimensional.randomized.probabilistic.start",
"soco.data_center.online.multi_dimensional.lazy_budgeting.smoothed_balanced_load_optimization.start",
"soco.data_center.online.multi_dimensional.horizon_control.receding_horizon_control.start",
"soco.data_center.online.multi_dimensional.online_gradient_descent.Options.sqrt",
"soco.data_center.online.multi_dimensional.lazy_budgeting.smoothed_load_optimization.start",
"soco.data_center.online.multi_dimensional.online_balanced_descent.dual.start",
"soco.data_center.online.multi_dimensional.online_balanced_descent.primal.start",
"soco.data_center.online.uni_dimensional.lazy_capacity_provisioning.fractional.start",
"soco.data_center.online.multi_dimensional.lazy_budgeting.smoothed_load_optimization.Options",
"soco.data_center.online.multi_dimensional.online_balanced_descent.dual.Options.euclidean_squared",
"soco.data_center.online.multi_dimensional.horizon_control.averaging_fixed_horizon_control.start",
"soco.data_center.online.stop",
"soco.data_center.online.multi_dimensional.online_gradient_descent.start",
"soco.data_center.online.uni_dimensional.probabilistic.Breakpoints",
"soco.data_center.online.uni_dimensional.randomized.randomly_biased_greedy.start",
"soco.data_center.online.uni_dimensional.probabilistic.start"
] |
[((1553, 1563), 'soco.data_center.online.stop', 'stop', (['ADDR'], {}), '(ADDR)\n', (1557, 1563), False, 'from soco.data_center.online import stop\n'), ((2233, 2305), 'soco.data_center.online.uni_dimensional.lazy_capacity_provisioning.fractional.start', 'lazy_capacity_provisioning.fractional.start', (['ADDR', 'model', 'offline_inp', 'w'], {}), '(ADDR, model, offline_inp, w)\n', (2276, 2305), False, 'from soco.data_center.online.uni_dimensional import lazy_capacity_provisioning, memoryless, probabilistic, randomized, randomly_biased_greedy\n'), ((2763, 2833), 'soco.data_center.online.uni_dimensional.lazy_capacity_provisioning.integral.start', 'lazy_capacity_provisioning.integral.start', (['ADDR', 'model', 'offline_inp', 'w'], {}), '(ADDR, model, offline_inp, w)\n', (2804, 2833), False, 'from soco.data_center.online.uni_dimensional import lazy_capacity_provisioning, memoryless, probabilistic, randomized, randomly_biased_greedy\n'), ((3207, 3252), 'soco.data_center.online.uni_dimensional.memoryless.start', 'memoryless.start', (['ADDR', 'model', 'offline_inp', '(0)'], {}), '(ADDR, model, offline_inp, 0)\n', (3223, 3252), False, 'from soco.data_center.online.uni_dimensional import lazy_capacity_provisioning, memoryless, probabilistic, randomized, randomly_biased_greedy\n'), ((3630, 3687), 'soco.data_center.online.uni_dimensional.probabilistic.start', 'probabilistic.start', (['ADDR', 'model', 'offline_inp', '(0)', 'options'], {}), '(ADDR, model, offline_inp, 0, options)\n', (3649, 3687), False, 'from soco.data_center.online.uni_dimensional import lazy_capacity_provisioning, memoryless, probabilistic, randomized, randomly_biased_greedy\n'), ((4012, 4071), 'soco.data_center.online.uni_dimensional.randomized.probabilistic.start', 'randomized.probabilistic.start', (['ADDR', 'model', 'offline_inp', '(0)'], {}), '(ADDR, model, offline_inp, 0)\n', (4042, 4071), False, 'from soco.data_center.online.uni_dimensional import lazy_capacity_provisioning, memoryless, probabilistic, randomized, randomly_biased_greedy\n'), ((4430, 4498), 'soco.data_center.online.uni_dimensional.randomized.randomly_biased_greedy.start', 'randomized.randomly_biased_greedy.start', (['ADDR', 'model', 'offline_inp', '(0)'], {}), '(ADDR, model, offline_inp, 0)\n', (4469, 4498), False, 'from soco.data_center.online.uni_dimensional import lazy_capacity_provisioning, memoryless, probabilistic, randomized, randomly_biased_greedy\n'), ((4885, 4922), 'soco.data_center.online.uni_dimensional.randomly_biased_greedy.Options', 'randomly_biased_greedy.Options', (['theta'], {}), '(theta)\n', (4915, 4922), False, 'from soco.data_center.online.uni_dimensional import lazy_capacity_provisioning, memoryless, probabilistic, randomized, randomly_biased_greedy\n'), ((4970, 5036), 'soco.data_center.online.uni_dimensional.randomly_biased_greedy.start', 'randomly_biased_greedy.start', (['ADDR', 'model', 'offline_inp', '(0)', 'options'], {}), '(ADDR, model, offline_inp, 0, options)\n', (4998, 5036), False, 'from soco.data_center.online.uni_dimensional import lazy_capacity_provisioning, memoryless, probabilistic, randomized, randomly_biased_greedy\n'), ((5375, 5436), 'soco.data_center.online.multi_dimensional.lazy_budgeting.smoothed_load_optimization.Options', 'lazy_budgeting.smoothed_load_optimization.Options', (['randomized'], {}), '(randomized)\n', (5424, 5436), False, 'from soco.data_center.online.multi_dimensional import horizon_control, lazy_budgeting, online_balanced_descent, online_gradient_descent\n'), ((5525, 5614), 'soco.data_center.online.multi_dimensional.lazy_budgeting.smoothed_load_optimization.start', 'lazy_budgeting.smoothed_load_optimization.start', (['ADDR', 'model', 'offline_inp', '(0)', 'options'], {}), '(ADDR, model, offline_inp, 0,\n options)\n', (5572, 5614), False, 'from soco.data_center.online.multi_dimensional import horizon_control, lazy_budgeting, online_balanced_descent, online_gradient_descent\n'), ((6007, 6074), 'soco.data_center.online.multi_dimensional.lazy_budgeting.smoothed_balanced_load_optimization.Options', 'lazy_budgeting.smoothed_balanced_load_optimization.Options', (['epsilon'], {}), '(epsilon)\n', (6065, 6074), False, 'from soco.data_center.online.multi_dimensional import horizon_control, lazy_budgeting, online_balanced_descent, online_gradient_descent\n'), ((6163, 6261), 'soco.data_center.online.multi_dimensional.lazy_budgeting.smoothed_balanced_load_optimization.start', 'lazy_budgeting.smoothed_balanced_load_optimization.start', (['ADDR', 'model', 'offline_inp', '(0)', 'options'], {}), '(ADDR, model,\n offline_inp, 0, options)\n', (6219, 6261), False, 'from soco.data_center.online.multi_dimensional import horizon_control, lazy_budgeting, online_balanced_descent, online_gradient_descent\n'), ((6644, 6706), 'soco.data_center.online.multi_dimensional.online_balanced_descent.primal.Options.euclidean_squared', 'online_balanced_descent.primal.Options.euclidean_squared', (['beta'], {}), '(beta)\n', (6700, 6706), False, 'from soco.data_center.online.multi_dimensional import horizon_control, lazy_budgeting, online_balanced_descent, online_gradient_descent\n'), ((6795, 6869), 'soco.data_center.online.multi_dimensional.online_balanced_descent.primal.start', 'online_balanced_descent.primal.start', (['ADDR', 'model', 'offline_inp', '(0)', 'options'], {}), '(ADDR, model, offline_inp, 0, options)\n', (6831, 6869), False, 'from soco.data_center.online.multi_dimensional import horizon_control, lazy_budgeting, online_balanced_descent, online_gradient_descent\n'), ((7219, 7278), 'soco.data_center.online.multi_dimensional.online_balanced_descent.dual.Options.euclidean_squared', 'online_balanced_descent.dual.Options.euclidean_squared', (['eta'], {}), '(eta)\n', (7273, 7278), False, 'from soco.data_center.online.multi_dimensional import horizon_control, lazy_budgeting, online_balanced_descent, online_gradient_descent\n'), ((7367, 7439), 'soco.data_center.online.multi_dimensional.online_balanced_descent.dual.start', 'online_balanced_descent.dual.start', (['ADDR', 'model', 'offline_inp', '(0)', 'options'], {}), '(ADDR, model, offline_inp, 0, options)\n', (7401, 7439), False, 'from soco.data_center.online.multi_dimensional import horizon_control, lazy_budgeting, online_balanced_descent, online_gradient_descent\n'), ((7766, 7804), 'soco.data_center.online.multi_dimensional.online_gradient_descent.Options.sqrt', 'online_gradient_descent.Options.sqrt', ([], {}), '()\n', (7802, 7804), False, 'from soco.data_center.online.multi_dimensional import horizon_control, lazy_budgeting, online_balanced_descent, online_gradient_descent\n'), ((7893, 7960), 'soco.data_center.online.multi_dimensional.online_gradient_descent.start', 'online_gradient_descent.start', (['ADDR', 'model', 'offline_inp', '(0)', 'options'], {}), '(ADDR, model, offline_inp, 0, options)\n', (7922, 7960), False, 'from soco.data_center.online.multi_dimensional import horizon_control, lazy_budgeting, online_balanced_descent, online_gradient_descent\n'), ((8393, 8468), 'soco.data_center.online.multi_dimensional.horizon_control.receding_horizon_control.start', 'horizon_control.receding_horizon_control.start', (['ADDR', 'model', 'offline_inp', 'w'], {}), '(ADDR, model, offline_inp, w)\n', (8439, 8468), False, 'from soco.data_center.online.multi_dimensional import horizon_control, lazy_budgeting, online_balanced_descent, online_gradient_descent\n'), ((8925, 9011), 'soco.data_center.online.multi_dimensional.horizon_control.averaging_fixed_horizon_control.start', 'horizon_control.averaging_fixed_horizon_control.start', (['ADDR', 'model', 'offline_inp', 'w'], {}), '(ADDR, model,\n offline_inp, w)\n', (8978, 9011), False, 'from soco.data_center.online.multi_dimensional import horizon_control, lazy_budgeting, online_balanced_descent, online_gradient_descent\n'), ((3552, 3581), 'soco.data_center.online.uni_dimensional.probabilistic.Breakpoints', 'probabilistic.Breakpoints', (['[]'], {}), '([])\n', (3577, 3581), False, 'from soco.data_center.online.uni_dimensional import lazy_capacity_provisioning, memoryless, probabilistic, randomized, randomly_biased_greedy\n')]
|
#!/usr/bin/env python3
from cv_bridge import CvBridge, CvBridgeError
import cv2
import rospy
from sensor_msgs.msg import Image
from gazebo_msgs.srv import GetModelState
from tf.transformations import euler_from_quaternion
import numpy as np
import json
import os
data_dir = os.path.abspath( os.path.join(os.path.dirname(__file__), os.pardir)) + "/data/"
def main():
rospy.init_node("satview_streamer")
bridge = CvBridge()
sv_filename = data_dir + "hk_data_sv_mean.json"
sv_data = {}
with open(sv_filename, "rb") as fstream:
sv_data = json.load(fstream)
print(sv_data.keys())
cno_max = np.max([sv_data[key]["cno_max"] for key in sv_data.keys()])
gms = rospy.ServiceProxy("/gazebo/get_model_state", GetModelState)
img_pub = rospy.Publisher("skycam/satview", Image, queue_size=1)
start_time = rospy.get_rostime()
cb_args = [bridge, sv_data, gms, img_pub, start_time, cno_max]
img_sub = rospy.Subscriber("skycam/image_raw", Image, img_sub_cb, cb_args)
def img_sub_cb(data, cb_args):
bridge = cb_args[0]
sv_data = cb_args[1]
gms = cb_args[2]
img_pub = cb_args[3]
start_time = cb_args[4]
cno_max = cb_args[5]
model_pose = gms("laser_0", "world")
model_euler = euler_from_quaternion(
[model_pose.pose.orientation.x,
model_pose.pose.orientation.y,
model_pose.pose.orientation.z,
model_pose.pose.orientation.w,]
)
# ENU (gzb) to NED
heading = np.pi/2 - model_euler[2]
cv_img = bridge.imgmsg_to_cv2(data, "bgr8")
cv_img = np.array(np.flip(cv_img, axis=0))
img_height = cv_img.shape[0]
img_width = cv_img.shape[1]
img_center = np.array([img_height/2.0, img_width/2.0]) # [250 250]
r_max = np.min(img_center)
green = (0, 255, 0)
red = (0, 0, 255)
blue = (255, 0, 0)
now = rospy.get_rostime()
elapsed = (now-start_time).to_sec()
for sv_id in sv_data.keys():
elev = sv_data[sv_id]["mean"][0]
azim = sv_data[sv_id]["mean"][1]
index = int(elapsed*10 % len(sv_data[sv_id]["cno"]))
cno = sv_data[sv_id]["cno"][index]
# print(sv_id+" cno: ", cno)
# print(sv_id+" color: ", int((cno)/cno_max*255), int((cno_max - cno)/cno_max*255))
r = (90.0 - elev)/90.0 * r_max
theta = np.deg2rad(azim) - np.pi/2 - heading
x = int(r*np.cos(theta) + img_center[0])
y = int(r*np.sin(theta) + img_center[1])
cv2.circle(cv_img, (x, y), 10, (0, int((cno)/cno_max*255), int((cno_max-cno)/cno_max*255)/2), -1)
cv2.circle(cv_img, (x, y), 11, (0, 0, 255), 2)
cv2.putText(cv_img, sv_id, (x-10, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.3, green, 1)
nesw = ["N", "E", "S", "W"]
for i in range(4):
theta = i*np.pi/2 - np.pi/2 - heading
r = 235
x = int(r*np.cos(theta) + img_center[0])
y = int(r*np.sin(theta) + img_center[1])
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(cv_img, nesw[i], (x,y), font, 0.5, green, 2)
ros_img = bridge.cv2_to_imgmsg(cv_img, "bgr8")
img_pub.publish(ros_img)
# cv2.imshow("skycam", cv_img)
# k = cv2.waitKey(3) & 0xff
if __name__ == "__main__":
main()
try:
rospy.spin()
except KeyboardInterrupt:
print("Shutting down...")
cv2.destroyAllWindows()
|
[
"rospy.Subscriber",
"rospy.ServiceProxy",
"numpy.sin",
"os.path.dirname",
"rospy.init_node",
"cv2.destroyAllWindows",
"cv2.circle",
"numpy.min",
"numpy.cos",
"cv_bridge.CvBridge",
"json.load",
"numpy.flip",
"cv2.putText",
"numpy.deg2rad",
"rospy.get_rostime",
"rospy.Publisher",
"numpy.array",
"tf.transformations.euler_from_quaternion",
"rospy.spin"
] |
[((374, 409), 'rospy.init_node', 'rospy.init_node', (['"""satview_streamer"""'], {}), "('satview_streamer')\n", (389, 409), False, 'import rospy\n'), ((428, 438), 'cv_bridge.CvBridge', 'CvBridge', ([], {}), '()\n', (436, 438), False, 'from cv_bridge import CvBridge, CvBridgeError\n'), ((700, 760), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""/gazebo/get_model_state"""', 'GetModelState'], {}), "('/gazebo/get_model_state', GetModelState)\n", (718, 760), False, 'import rospy\n'), ((775, 829), 'rospy.Publisher', 'rospy.Publisher', (['"""skycam/satview"""', 'Image'], {'queue_size': '(1)'}), "('skycam/satview', Image, queue_size=1)\n", (790, 829), False, 'import rospy\n'), ((847, 866), 'rospy.get_rostime', 'rospy.get_rostime', ([], {}), '()\n', (864, 866), False, 'import rospy\n'), ((948, 1012), 'rospy.Subscriber', 'rospy.Subscriber', (['"""skycam/image_raw"""', 'Image', 'img_sub_cb', 'cb_args'], {}), "('skycam/image_raw', Image, img_sub_cb, cb_args)\n", (964, 1012), False, 'import rospy\n'), ((1258, 1415), 'tf.transformations.euler_from_quaternion', 'euler_from_quaternion', (['[model_pose.pose.orientation.x, model_pose.pose.orientation.y, model_pose.\n pose.orientation.z, model_pose.pose.orientation.w]'], {}), '([model_pose.pose.orientation.x, model_pose.pose.\n orientation.y, model_pose.pose.orientation.z, model_pose.pose.\n orientation.w])\n', (1279, 1415), False, 'from tf.transformations import euler_from_quaternion\n'), ((1694, 1739), 'numpy.array', 'np.array', (['[img_height / 2.0, img_width / 2.0]'], {}), '([img_height / 2.0, img_width / 2.0])\n', (1702, 1739), True, 'import numpy as np\n'), ((1765, 1783), 'numpy.min', 'np.min', (['img_center'], {}), '(img_center)\n', (1771, 1783), True, 'import numpy as np\n'), ((1868, 1887), 'rospy.get_rostime', 'rospy.get_rostime', ([], {}), '()\n', (1885, 1887), False, 'import rospy\n'), ((3342, 3365), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3363, 3365), False, 'import cv2\n'), ((571, 589), 'json.load', 'json.load', (['fstream'], {}), '(fstream)\n', (580, 589), False, 'import json\n'), ((1582, 1605), 'numpy.flip', 'np.flip', (['cv_img'], {'axis': '(0)'}), '(cv_img, axis=0)\n', (1589, 1605), True, 'import numpy as np\n'), ((2589, 2635), 'cv2.circle', 'cv2.circle', (['cv_img', '(x, y)', '(11)', '(0, 0, 255)', '(2)'], {}), '(cv_img, (x, y), 11, (0, 0, 255), 2)\n', (2599, 2635), False, 'import cv2\n'), ((2644, 2733), 'cv2.putText', 'cv2.putText', (['cv_img', 'sv_id', '(x - 10, y - 15)', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.3)', 'green', '(1)'], {}), '(cv_img, sv_id, (x - 10, y - 15), cv2.FONT_HERSHEY_SIMPLEX, 0.3,\n green, 1)\n', (2655, 2733), False, 'import cv2\n'), ((2994, 3051), 'cv2.putText', 'cv2.putText', (['cv_img', 'nesw[i]', '(x, y)', 'font', '(0.5)', 'green', '(2)'], {}), '(cv_img, nesw[i], (x, y), font, 0.5, green, 2)\n', (3005, 3051), False, 'import cv2\n'), ((3260, 3272), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (3270, 3272), False, 'import rospy\n'), ((306, 331), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (321, 331), False, 'import os\n'), ((2338, 2354), 'numpy.deg2rad', 'np.deg2rad', (['azim'], {}), '(azim)\n', (2348, 2354), True, 'import numpy as np\n'), ((2394, 2407), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (2400, 2407), True, 'import numpy as np\n'), ((2443, 2456), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (2449, 2456), True, 'import numpy as np\n'), ((2866, 2879), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (2872, 2879), True, 'import numpy as np\n'), ((2915, 2928), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (2921, 2928), True, 'import numpy as np\n')]
|
from __future__ import unicode_literals
import unittest
from datetime import date
from infi.clickhouse_orm.database import Database
from infi.clickhouse_orm.models import Model
from infi.clickhouse_orm.fields import *
from infi.clickhouse_orm.engines import *
class ArrayFieldsTest(unittest.TestCase):
def setUp(self):
self.database = Database('test-db')
self.database.create_table(ModelWithArrays)
def tearDown(self):
self.database.drop_database()
def test_insert_and_select(self):
instance = ModelWithArrays(
date_field='2016-08-30',
arr_str=['goodbye,', 'cruel', 'world', 'special chars: ,"\\\'` \n\t\\[]'],
arr_date=['2010-01-01'],
)
self.database.insert([instance])
query = 'SELECT * from $db.modelwitharrays ORDER BY date_field'
for model_cls in (ModelWithArrays, None):
results = list(self.database.select(query, model_cls))
self.assertEqual(len(results), 1)
self.assertEqual(results[0].arr_str, instance.arr_str)
self.assertEqual(results[0].arr_int, instance.arr_int)
self.assertEqual(results[0].arr_date, instance.arr_date)
def test_conversion(self):
instance = ModelWithArrays(
arr_int=('1', '2', '3'),
arr_date=['2010-01-01']
)
self.assertEqual(instance.arr_str, [])
self.assertEqual(instance.arr_int, [1, 2, 3])
self.assertEqual(instance.arr_date, [date(2010, 1, 1)])
def test_assignment_error(self):
instance = ModelWithArrays()
for value in (7, 'x', [date.today()], ['aaa'], [None]):
with self.assertRaises(ValueError):
instance.arr_int = value
def test_parse_array(self):
from infi.clickhouse_orm.utils import parse_array, unescape
self.assertEqual(parse_array("[]"), [])
self.assertEqual(parse_array("[1, 2, 395, -44]"), ["1", "2", "395", "-44"])
self.assertEqual(parse_array("['big','mouse','','!']"), ["big", "mouse", "", "!"])
self.assertEqual(parse_array(unescape("['\\r\\n\\0\\t\\b']")), ["\r\n\0\t\b"])
for s in ("",
"[",
"]",
"[1, 2",
"3, 4]",
"['aaa', 'aaa]"):
with self.assertRaises(ValueError):
parse_array(s)
def test_invalid_inner_field(self):
for x in (DateField, None, "", ArrayField(Int32Field())):
with self.assertRaises(AssertionError):
ArrayField(x)
class ModelWithArrays(Model):
date_field = DateField()
arr_str = ArrayField(StringField())
arr_int = ArrayField(Int32Field())
arr_date = ArrayField(DateField())
engine = MergeTree('date_field', ('date_field',))
|
[
"datetime.date",
"infi.clickhouse_orm.utils.unescape",
"datetime.date.today",
"infi.clickhouse_orm.utils.parse_array",
"infi.clickhouse_orm.database.Database"
] |
[((351, 370), 'infi.clickhouse_orm.database.Database', 'Database', (['"""test-db"""'], {}), "('test-db')\n", (359, 370), False, 'from infi.clickhouse_orm.database import Database\n'), ((1881, 1898), 'infi.clickhouse_orm.utils.parse_array', 'parse_array', (['"""[]"""'], {}), "('[]')\n", (1892, 1898), False, 'from infi.clickhouse_orm.utils import parse_array, unescape\n'), ((1929, 1960), 'infi.clickhouse_orm.utils.parse_array', 'parse_array', (['"""[1, 2, 395, -44]"""'], {}), "('[1, 2, 395, -44]')\n", (1940, 1960), False, 'from infi.clickhouse_orm.utils import parse_array, unescape\n'), ((2013, 2050), 'infi.clickhouse_orm.utils.parse_array', 'parse_array', (['"""[\'big\',\'mouse\',\'\',\'!\']"""'], {}), '("[\'big\',\'mouse\',\'\',\'!\']")\n', (2024, 2050), False, 'from infi.clickhouse_orm.utils import parse_array, unescape\n'), ((1508, 1524), 'datetime.date', 'date', (['(2010)', '(1)', '(1)'], {}), '(2010, 1, 1)\n', (1512, 1524), False, 'from datetime import date\n'), ((1633, 1645), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1643, 1645), False, 'from datetime import date\n'), ((2116, 2147), 'infi.clickhouse_orm.utils.unescape', 'unescape', (['"""[\'\\\\r\\\\n\\\\0\\\\t\\\\b\']"""'], {}), '("[\'\\\\r\\\\n\\\\0\\\\t\\\\b\']")\n', (2124, 2147), False, 'from infi.clickhouse_orm.utils import parse_array, unescape\n'), ((2388, 2402), 'infi.clickhouse_orm.utils.parse_array', 'parse_array', (['s'], {}), '(s)\n', (2399, 2402), False, 'from infi.clickhouse_orm.utils import parse_array, unescape\n')]
|
import Utils
from config.Resources import UnitTypes
from controllers.Controller import Controller
class ZombieController(Controller):
def __init__(self):
self.good_points = []
def do(self, unit, all_units):
target = list(all_units)[0]
#target = self.find_nearest_obj(unit, all_units, {UnitTypes.core})
better_target = target.pos
priority = unit.hp - 6
for point in self.good_points:
if Utils.classical_dist(better_target, target.pos) > Utils.classical_dist(point[0], target.pos) \
or (Utils.classical_dist(better_target, target.pos) > Utils.classical_dist(point[0], target.pos) - 40 and point[1] > priority):
better_target = point[0]
priority = point[1]
if target is None:
exit(0)
unit.reload()
if unit.move(better_target) is False:
obj = self.find_nearest_obj(unit, all_units)
if obj.unit_type is UnitTypes.wall:
target = obj
if target:
unit.attack(target)
self.good_points.append((unit.pos, unit.hp))
if len(self.good_points) > 1000:
self.good_points.clear()
|
[
"Utils.classical_dist"
] |
[((457, 504), 'Utils.classical_dist', 'Utils.classical_dist', (['better_target', 'target.pos'], {}), '(better_target, target.pos)\n', (477, 504), False, 'import Utils\n'), ((507, 549), 'Utils.classical_dist', 'Utils.classical_dist', (['point[0]', 'target.pos'], {}), '(point[0], target.pos)\n', (527, 549), False, 'import Utils\n'), ((576, 623), 'Utils.classical_dist', 'Utils.classical_dist', (['better_target', 'target.pos'], {}), '(better_target, target.pos)\n', (596, 623), False, 'import Utils\n'), ((626, 668), 'Utils.classical_dist', 'Utils.classical_dist', (['point[0]', 'target.pos'], {}), '(point[0], target.pos)\n', (646, 668), False, 'import Utils\n')]
|
import csv
import numpy as np
import os
import sklearn
from sklearn import datasets
from sklearn import neighbors, datasets, preprocessing
from sklearn.decomposition import PCA
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score, auc
from sklearn.neighbors import KNeighborsClassifier
import math
from sklearn.multioutput import MultiOutputClassifier
'''
3 6
1 4
2 5
'''
def logistic(X_train, y_train, X_test):
output = np.zeros((2705, 147))#2705 or 1967
# X_train, X_test, y_train, y_test = train_test_split(X, Y)
# print(y_train)
# clf.predict_proba(X)
clf = LogisticRegression(random_state=0, solver='saga', n_jobs=-1, max_iter=250)
for k in range(148):
clf.fit(X_train, y_train[:, k])
print(k+1, "/148")
y_predict = clf.predict_proba(X_test)
output[:, k] = y_predict[:, 1]
np.savetxt("logistic.csv", output, delimiter=",")
def RandForest(X_train, y_train, X_test):
# X_train, X_test, y_train, y_test = train_test_split(X, Y)
output = np.zeros((2705,147))
for k in range(147):
clf = RandomForestClassifier(n_estimators=100, max_depth=60,
random_state=0)
clf.fit(X_train, y_train[:,k])
print(k, "/75")
y_predict = clf.predict_proba(X_test)
# print(y_predict)
output[:, k] = y_predict[:, 1]
# print(y_test)
# print("\n\n",output)
# fpr, tpr, thresholds = sklearn.metrics.roc_curve(y_test, output)
# roc_auc = auc(fpr, tpr)
# print(roc_auc)
np.savetxt("RandomForest.csv", output, delimiter=",")
print("done")
def KNN(X_train, x_test, y_train, y_test):
knn = KNeighborsClassifier(algorithm='auto', metric='minkowski', metric_params=None, n_jobs=-1,
n_neighbors=147, p=2, weights='distance')
print("poopf")
knn.fit(X_train, y_train)
classifier = MultiOutputClassifier(knn, n_jobs=-1)
classifier.fit(X_train, y_train)
y_predict = (classifier.predict_proba(x_test))
output = np.zeros((1967,147)) #2597
for x in range(1967):
for y in range(147):
output[x][y] = y_predict[y][x][1]
# print(output)
# np.savetxt("sub.csv", output, delimiter=",")
print(classifier.score(output,y_test))
def main():
Trainfiles = 7868
TrainList = np.zeros((7868, 76))
for x in range(Trainfiles):
filename = "/Users/harrymargalotti/MLfinal/MachineLearningFinal/Kaggle_Final/train_feature_files/" + str(
x) + ".npz"
data = np.load(filename)
TrainList[x] = data['summary']
X = TrainList
X = np.nan_to_num(X)
tesetfile = 2705
testList = np.zeros((2705, 76))
for x in range(tesetfile):
filename = "/Users/harrymargalotti/MLfinal/MachineLearningFinal/Kaggle_Final/test_feature_files/" + str(
x) + ".npz"
data = np.load(filename)
testList[x] = data['summary']
xtest = testList
xtest= np.nan_to_num(xtest)
file = '/Users/harrymargalotti/MLfinal/MachineLearningFinal/Kaggle_Final/cal10k_train_data.csv'
y = np.array(list(csv.reader(open(file, "r"), delimiter=","))).astype("float")
# X_train, X_test, y_train, y_test = train_test_split(X, y)
# print("Xtrain: ", X_train.shape)
# print("Xtest: ",X_test.shape)
# print("ytrain: ", y_train.shape)
# print("ytest: ", y_test.shape)
# KNN(X_train, X_test, y_train, y_test)
print("data load done")
# RandForest(X, y, xtest)
pca = PCA()
pca.fit(X)
logistic(X, y, xtest)
main()
'''
Xtrain: (5901, 76)
Xtest: (1967, 76)
ytrain: (5901, 148)
ytest: (1967, 148)
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.33, random_state=42)
create a data matrix X_train from the 74-dimensional summary audio feature vectors for each of the 7868 training tracks: done
Load the cal10k_train_date matrix Y_train for the 147 genres and 7868 training tracks
Using scikit-learn, train 147 logistic regression classifiers with one for each genre
Iterate through the list of test npz files and create a data matrix X_test from the 74-dimensional summary audio feature vectors for each of the 2705 test tracks
Predict the probability of each test track and each genre. This should be a 2705-by-147 dimensional matrix called Y_predict
Format Y_predict so that it match the file format that is given in the cal10k_test_random_submission.csv
Upload your submission csv to Kaggle and check out the leaderboard
------------------------------------------------------------------------------------------------------------------------------
The training set is a subset of the data set used to train a model.
x_train is the training data set.
y_train is the set of labels to all the data in x_train.
The test set is a subset of the data set that you use to test your model after the model has gone through initial vetting by the validation set.
x_test is the test data set.
y_test is the set of labels to all the data in x_test.
'''
|
[
"sklearn.ensemble.RandomForestClassifier",
"numpy.load",
"numpy.nan_to_num",
"numpy.savetxt",
"numpy.zeros",
"sklearn.linear_model.LogisticRegression",
"sklearn.neighbors.KNeighborsClassifier",
"sklearn.decomposition.PCA",
"sklearn.multioutput.MultiOutputClassifier"
] |
[((573, 594), 'numpy.zeros', 'np.zeros', (['(2705, 147)'], {}), '((2705, 147))\n', (581, 594), True, 'import numpy as np\n'), ((730, 804), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'random_state': '(0)', 'solver': '"""saga"""', 'n_jobs': '(-1)', 'max_iter': '(250)'}), "(random_state=0, solver='saga', n_jobs=-1, max_iter=250)\n", (748, 804), False, 'from sklearn.linear_model import LogisticRegression\n'), ((986, 1035), 'numpy.savetxt', 'np.savetxt', (['"""logistic.csv"""', 'output'], {'delimiter': '""","""'}), "('logistic.csv', output, delimiter=',')\n", (996, 1035), True, 'import numpy as np\n'), ((1157, 1178), 'numpy.zeros', 'np.zeros', (['(2705, 147)'], {}), '((2705, 147))\n', (1165, 1178), True, 'import numpy as np\n'), ((1679, 1732), 'numpy.savetxt', 'np.savetxt', (['"""RandomForest.csv"""', 'output'], {'delimiter': '""","""'}), "('RandomForest.csv', output, delimiter=',')\n", (1689, 1732), True, 'import numpy as np\n'), ((1805, 1941), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', ([], {'algorithm': '"""auto"""', 'metric': '"""minkowski"""', 'metric_params': 'None', 'n_jobs': '(-1)', 'n_neighbors': '(147)', 'p': '(2)', 'weights': '"""distance"""'}), "(algorithm='auto', metric='minkowski', metric_params=\n None, n_jobs=-1, n_neighbors=147, p=2, weights='distance')\n", (1825, 1941), False, 'from sklearn.neighbors import KNeighborsClassifier\n'), ((2028, 2065), 'sklearn.multioutput.MultiOutputClassifier', 'MultiOutputClassifier', (['knn'], {'n_jobs': '(-1)'}), '(knn, n_jobs=-1)\n', (2049, 2065), False, 'from sklearn.multioutput import MultiOutputClassifier\n'), ((2167, 2188), 'numpy.zeros', 'np.zeros', (['(1967, 147)'], {}), '((1967, 147))\n', (2175, 2188), True, 'import numpy as np\n'), ((2461, 2481), 'numpy.zeros', 'np.zeros', (['(7868, 76)'], {}), '((7868, 76))\n', (2469, 2481), True, 'import numpy as np\n'), ((2750, 2766), 'numpy.nan_to_num', 'np.nan_to_num', (['X'], {}), '(X)\n', (2763, 2766), True, 'import numpy as np\n'), ((2804, 2824), 'numpy.zeros', 'np.zeros', (['(2705, 76)'], {}), '((2705, 76))\n', (2812, 2824), True, 'import numpy as np\n'), ((3096, 3116), 'numpy.nan_to_num', 'np.nan_to_num', (['xtest'], {}), '(xtest)\n', (3109, 3116), True, 'import numpy as np\n'), ((3629, 3634), 'sklearn.decomposition.PCA', 'PCA', ([], {}), '()\n', (3632, 3634), False, 'from sklearn.decomposition import PCA\n'), ((1218, 1288), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {'n_estimators': '(100)', 'max_depth': '(60)', 'random_state': '(0)'}), '(n_estimators=100, max_depth=60, random_state=0)\n', (1240, 1288), False, 'from sklearn.ensemble import RandomForestClassifier\n'), ((2667, 2684), 'numpy.load', 'np.load', (['filename'], {}), '(filename)\n', (2674, 2684), True, 'import numpy as np\n'), ((3008, 3025), 'numpy.load', 'np.load', (['filename'], {}), '(filename)\n', (3015, 3025), True, 'import numpy as np\n')]
|
#!/usr/bin/python3.7
import time
from gpiozero import CPUTemperature
import psutil
import sys
from datetime import datetime
def check_temp(threshold=70):
'''
uses gpiozero to measure the cpu temp of the pi
returns a data structure with the result, a status flag
and a string representation of what is going on.
'''
cpu = CPUTemperature()
temp_C = cpu.temperature
alert = int(temp_C) >= threshold
alert_str = f'The temp is: {temp_C}'
return {'result': temp_C,
'alert_status': alert,
'message': alert_str}
def get_memory(threshold=95):
'''
using psutil determine how much free memory (RAM) there is.
uses the .free method which returns memory not used at all (zeroed) that is
readily available - note that this does not reflect the actual memory
available
param threshold is expressed as a percent
'''
vm = psutil.virtual_memory()
free_m = vm.free/1073741824
return {'message': f'% of memory used: {vm.percent}\nGB free is: {free_m:.2f}',
'alert_status': vm.percent>threshold,
'result': f'{vm.percent}%'}
def get_cpu(duration=4, all_cpu=True, threshold=90):
'''
using psutil determine how much cpu is being used over duration.
uses .cpu_percent method with takes two parameters
interval (duration) and percpu (all_cpu)
returns a float representing the current system-wide CPU utilization
as a % when percpu is True returns a list of floats representing
the utilization as a % for each CPU.
'''
usage = psutil.cpu_percent(duration, all_cpu)
avg = sum(usage)/len(usage)
return {'result': usage,
'alert_status': avg > threshold,
'message': f'cpu usage over {duration} seconds is {usage}'}
def parse_arg_number(arg):
'''
helper to parse the command line argument
which should provide the number of times to cycle through
the tests
returns (True, <number of cycles as int>)
or if the input is invalid (None, None)
'''
try:
valid = type(int(arg)) == int
return (valid, int(arg))
except Exception as Error:
print('please provide a number as an argument for the number of tests that you wish to run')
return (None, None)
def parse_all_arg(arg):
'''
helper function to make sure additional functions are run when desired
'''
try:
if arg[2].lower() == 'all':
return True
except Exception as Error:
return None
health_functions = [check_temp]
all_funcs = [check_temp, get_memory, get_cpu]
if __name__ == '__main__':
repeat = 1
count = 0
if len(sys.argv) >= 2:
valid, arg = parse_arg_number(sys.argv[1])
if valid:
repeat = arg
invoke_all = parse_all_arg(sys.argv)
if invoke_all:
health_functions = all_funcs
while count < repeat:
print('\n')
for f in health_functions:
print(datetime.now())
check = f()
m_str = f"{check['message']}"
if check['alert_status']:
print('ALERT!')
print(m_str)
else:
print(m_str)
count +=1
time.sleep(30)
|
[
"psutil.virtual_memory",
"gpiozero.CPUTemperature",
"time.sleep",
"datetime.datetime.now",
"psutil.cpu_percent"
] |
[((347, 363), 'gpiozero.CPUTemperature', 'CPUTemperature', ([], {}), '()\n', (361, 363), False, 'from gpiozero import CPUTemperature\n'), ((905, 928), 'psutil.virtual_memory', 'psutil.virtual_memory', ([], {}), '()\n', (926, 928), False, 'import psutil\n'), ((1570, 1607), 'psutil.cpu_percent', 'psutil.cpu_percent', (['duration', 'all_cpu'], {}), '(duration, all_cpu)\n', (1588, 1607), False, 'import psutil\n'), ((3240, 3254), 'time.sleep', 'time.sleep', (['(30)'], {}), '(30)\n', (3250, 3254), False, 'import time\n'), ((2986, 3000), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2998, 3000), False, 'from datetime import datetime\n')]
|
import elevator
import unittest
class TestElevator(unittest.TestCase):
def test_parenthesesCount_inputFile(self):
self.assertEqual(74, elevator.parenthesesCount("input1.1.txt"))
def test_parenthesesMismatch_inputFile(self):
self.assertEqual(1795, elevator.parenthesesMismatch("input1.1.txt"))
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"elevator.parenthesesCount",
"elevator.parenthesesMismatch"
] |
[((352, 367), 'unittest.main', 'unittest.main', ([], {}), '()\n', (365, 367), False, 'import unittest\n'), ((149, 190), 'elevator.parenthesesCount', 'elevator.parenthesesCount', (['"""input1.1.txt"""'], {}), "('input1.1.txt')\n", (174, 190), False, 'import elevator\n'), ((274, 318), 'elevator.parenthesesMismatch', 'elevator.parenthesesMismatch', (['"""input1.1.txt"""'], {}), "('input1.1.txt')\n", (302, 318), False, 'import elevator\n')]
|
# coding: utf-8
# In[ ]:
import numpy
import sys
import glob
import matplotlib.pyplot
def analyze(filename):
data = numpy.loadtxt(fname=filename, delimiter=',')
fig = matplotlib.pyplot.figure(figsize=(10.0,3.0))
axes1 = fig.add_subplot(1,3,1)
axes2 = fig.add_subplot(1,3,2)
axes3 = fig.add_subplot(1,3,3)
axes1.set_ylabel("average")
axes1.plot(numpy.mean(data,axis=0))
axes2.set_ylabel("min")
axes2.plot(numpy.min(data,axis=0))
axes3.set_ylabel("max")
axes3.plot(numpy.max(data,axis=0))
fig.tight_layout()
matplotlib.pyplot.savefig(filename+'_fig.eps')
def detect_problems(f_name):
data = numpy.loadtxt(fname=f_name, delimiter=',')
if numpy.max(data, axis=0)[0] == 0 and numpy.max(data, axis=0)[20] == 20:
print("Suspicous looking maxima")
elif numpy.sum(numpy.min(data,axis=0)) == 0:
print("Suspicous looking minima")
else:
print("OK")
name = sys.argv[1]
filenames = sorted(glob.glob(name+'*.csv'))
for f in filenames:
print(f)
analyze(f)
detect_problems(f)
|
[
"numpy.min",
"numpy.mean",
"numpy.max",
"numpy.loadtxt",
"glob.glob"
] |
[((130, 174), 'numpy.loadtxt', 'numpy.loadtxt', ([], {'fname': 'filename', 'delimiter': '""","""'}), "(fname=filename, delimiter=',')\n", (143, 174), False, 'import numpy\n'), ((667, 709), 'numpy.loadtxt', 'numpy.loadtxt', ([], {'fname': 'f_name', 'delimiter': '""","""'}), "(fname=f_name, delimiter=',')\n", (680, 709), False, 'import numpy\n'), ((999, 1024), 'glob.glob', 'glob.glob', (["(name + '*.csv')"], {}), "(name + '*.csv')\n", (1008, 1024), False, 'import glob\n'), ((385, 409), 'numpy.mean', 'numpy.mean', (['data'], {'axis': '(0)'}), '(data, axis=0)\n', (395, 409), False, 'import numpy\n'), ((454, 477), 'numpy.min', 'numpy.min', (['data'], {'axis': '(0)'}), '(data, axis=0)\n', (463, 477), False, 'import numpy\n'), ((522, 545), 'numpy.max', 'numpy.max', (['data'], {'axis': '(0)'}), '(data, axis=0)\n', (531, 545), False, 'import numpy\n'), ((717, 740), 'numpy.max', 'numpy.max', (['data'], {'axis': '(0)'}), '(data, axis=0)\n', (726, 740), False, 'import numpy\n'), ((753, 776), 'numpy.max', 'numpy.max', (['data'], {'axis': '(0)'}), '(data, axis=0)\n', (762, 776), False, 'import numpy\n'), ((849, 872), 'numpy.min', 'numpy.min', (['data'], {'axis': '(0)'}), '(data, axis=0)\n', (858, 872), False, 'import numpy\n')]
|
import graphene
from django.conf import settings
from django.utils.timezone import get_default_timezone
from opening_hours.utils.opening_hours_client import OpeningHoursClient
DEFAULT_TIMEZONE = get_default_timezone()
class TimeSpanType(graphene.ObjectType):
start_time = graphene.Time()
end_time = graphene.Time()
weekdays = graphene.List(graphene.Int)
resource_state = graphene.String()
end_time_on_next_day = graphene.Boolean()
name_fi = graphene.String()
name_en = graphene.String()
name_sv = graphene.String()
description_fi = graphene.String()
description_en = graphene.String()
description_sv = graphene.String()
def resolve_start_time(self, info):
if not self.start_time:
return None
tzinfo = self.start_time.tzinfo or DEFAULT_TIMEZONE
start = tzinfo.localize(self.start_time)
return start
def resolve_end_time(self, info):
if not self.end_time:
return None
tzinfo = self.start_time.tzinfo or DEFAULT_TIMEZONE
end = tzinfo.localize(self.end_time)
return end
class PeriodType(graphene.ObjectType):
period_id = graphene.Int()
start_date = graphene.Date()
end_date = graphene.Date()
resource_state = graphene.String()
time_spans = graphene.List(TimeSpanType)
name_fi = graphene.String()
name_en = graphene.String()
name_sv = graphene.String()
description_fi = graphene.String()
description_en = graphene.String()
description_sv = graphene.String()
class OpeningTimesType(graphene.ObjectType):
date = graphene.Date()
start_time = graphene.Time()
end_time = graphene.Time()
state = graphene.String()
periods = graphene.List(graphene.Int)
def resolve_date(self, info):
return self.date
def resolve_start_time(self, info):
if not self.start_time:
return None
tzinfo = self.start_time.tzinfo or DEFAULT_TIMEZONE
start = tzinfo.localize(self.start_time)
return start
def resolve_end_time(self, info):
if not self.end_time:
return None
tzinfo = self.start_time.tzinfo or DEFAULT_TIMEZONE
end = tzinfo.localize(self.end_time)
return end
def resolve_periods(self, info, **kwargs):
return self.periods
class OpeningHoursType(graphene.ObjectType):
opening_times = graphene.List(OpeningTimesType)
opening_time_periods = graphene.List(PeriodType)
class OpeningHoursMixin:
hauki_origin_id = settings.HAUKI_ORIGIN_ID
opening_hours = graphene.Field(
OpeningHoursType,
opening_times=graphene.Boolean(),
periods=graphene.Boolean(),
start_date=graphene.Date(),
end_date=graphene.Date(),
)
def resolve_opening_hours(self, info, **kwargs):
start = kwargs.get("start_date")
end = kwargs.get("end_date")
init_periods = kwargs.get("periods", False)
init_times = kwargs.get("opening_times", False)
if not (start and end):
init_times = False
opening_hours_client = OpeningHoursClient(
self.hauki_resource_origin_id,
start,
end,
single=True,
init_periods=init_periods,
init_opening_hours=init_times,
hauki_origin_id=self.hauki_resource_data_source_id,
)
return_object = OpeningHoursType()
if init_times:
hours = opening_hours_client.get_opening_hours_for_date_range(
str(self.hauki_resource_origin_id), start, end
)
opening_hours = []
for date, times in hours.items():
for time in times:
oh = OpeningTimesType(
date=date,
start_time=time.start_time.time(),
end_time=time.end_time.time(),
state=time.resource_state,
periods=time.periods,
)
opening_hours.append(oh)
return_object.opening_times = opening_hours
if init_periods:
periods = []
for period in opening_hours_client.get_resource_periods(
str(self.hauki_resource_origin_id)
):
time_spans = []
for time_span in period.time_spans:
time_spans.append(
TimeSpanType(
start_time=time_span.start_time,
end_time=time_span.end_time,
resource_state=time_span.resource_state,
weekdays=time_span.weekdays,
name_fi=time_span.name.get("fi"),
name_sv=time_span.name.get("sv"),
name_en=time_span.name.get("en"),
description_fi=time_span.description.get("fi"),
description_sv=time_span.description.get("sv"),
description_en=time_span.description.get("en"),
)
)
periods.append(
PeriodType(
period_id=period.id,
start_date=period.start_date,
end_date=period.end_date,
time_spans=time_spans,
name_fi=period.name.get("fi"),
name_sv=period.name.get("sv"),
name_en=period.name.get("en"),
description_fi=period.description.get("fi"),
description_sv=period.description.get("sv"),
description_en=period.description.get("en"),
)
)
return_object.opening_time_periods = periods
return return_object
|
[
"graphene.List",
"graphene.String",
"graphene.Boolean",
"opening_hours.utils.opening_hours_client.OpeningHoursClient",
"django.utils.timezone.get_default_timezone",
"graphene.Date",
"graphene.Int",
"graphene.Time"
] |
[((197, 219), 'django.utils.timezone.get_default_timezone', 'get_default_timezone', ([], {}), '()\n', (217, 219), False, 'from django.utils.timezone import get_default_timezone\n'), ((280, 295), 'graphene.Time', 'graphene.Time', ([], {}), '()\n', (293, 295), False, 'import graphene\n'), ((311, 326), 'graphene.Time', 'graphene.Time', ([], {}), '()\n', (324, 326), False, 'import graphene\n'), ((342, 369), 'graphene.List', 'graphene.List', (['graphene.Int'], {}), '(graphene.Int)\n', (355, 369), False, 'import graphene\n'), ((391, 408), 'graphene.String', 'graphene.String', ([], {}), '()\n', (406, 408), False, 'import graphene\n'), ((436, 454), 'graphene.Boolean', 'graphene.Boolean', ([], {}), '()\n', (452, 454), False, 'import graphene\n'), ((469, 486), 'graphene.String', 'graphene.String', ([], {}), '()\n', (484, 486), False, 'import graphene\n'), ((501, 518), 'graphene.String', 'graphene.String', ([], {}), '()\n', (516, 518), False, 'import graphene\n'), ((533, 550), 'graphene.String', 'graphene.String', ([], {}), '()\n', (548, 550), False, 'import graphene\n'), ((572, 589), 'graphene.String', 'graphene.String', ([], {}), '()\n', (587, 589), False, 'import graphene\n'), ((611, 628), 'graphene.String', 'graphene.String', ([], {}), '()\n', (626, 628), False, 'import graphene\n'), ((650, 667), 'graphene.String', 'graphene.String', ([], {}), '()\n', (665, 667), False, 'import graphene\n'), ((1171, 1185), 'graphene.Int', 'graphene.Int', ([], {}), '()\n', (1183, 1185), False, 'import graphene\n'), ((1203, 1218), 'graphene.Date', 'graphene.Date', ([], {}), '()\n', (1216, 1218), False, 'import graphene\n'), ((1234, 1249), 'graphene.Date', 'graphene.Date', ([], {}), '()\n', (1247, 1249), False, 'import graphene\n'), ((1271, 1288), 'graphene.String', 'graphene.String', ([], {}), '()\n', (1286, 1288), False, 'import graphene\n'), ((1306, 1333), 'graphene.List', 'graphene.List', (['TimeSpanType'], {}), '(TimeSpanType)\n', (1319, 1333), False, 'import graphene\n'), ((1348, 1365), 'graphene.String', 'graphene.String', ([], {}), '()\n', (1363, 1365), False, 'import graphene\n'), ((1380, 1397), 'graphene.String', 'graphene.String', ([], {}), '()\n', (1395, 1397), False, 'import graphene\n'), ((1412, 1429), 'graphene.String', 'graphene.String', ([], {}), '()\n', (1427, 1429), False, 'import graphene\n'), ((1451, 1468), 'graphene.String', 'graphene.String', ([], {}), '()\n', (1466, 1468), False, 'import graphene\n'), ((1490, 1507), 'graphene.String', 'graphene.String', ([], {}), '()\n', (1505, 1507), False, 'import graphene\n'), ((1529, 1546), 'graphene.String', 'graphene.String', ([], {}), '()\n', (1544, 1546), False, 'import graphene\n'), ((1605, 1620), 'graphene.Date', 'graphene.Date', ([], {}), '()\n', (1618, 1620), False, 'import graphene\n'), ((1638, 1653), 'graphene.Time', 'graphene.Time', ([], {}), '()\n', (1651, 1653), False, 'import graphene\n'), ((1669, 1684), 'graphene.Time', 'graphene.Time', ([], {}), '()\n', (1682, 1684), False, 'import graphene\n'), ((1697, 1714), 'graphene.String', 'graphene.String', ([], {}), '()\n', (1712, 1714), False, 'import graphene\n'), ((1729, 1756), 'graphene.List', 'graphene.List', (['graphene.Int'], {}), '(graphene.Int)\n', (1742, 1756), False, 'import graphene\n'), ((2406, 2437), 'graphene.List', 'graphene.List', (['OpeningTimesType'], {}), '(OpeningTimesType)\n', (2419, 2437), False, 'import graphene\n'), ((2465, 2490), 'graphene.List', 'graphene.List', (['PeriodType'], {}), '(PeriodType)\n', (2478, 2490), False, 'import graphene\n'), ((3117, 3309), 'opening_hours.utils.opening_hours_client.OpeningHoursClient', 'OpeningHoursClient', (['self.hauki_resource_origin_id', 'start', 'end'], {'single': '(True)', 'init_periods': 'init_periods', 'init_opening_hours': 'init_times', 'hauki_origin_id': 'self.hauki_resource_data_source_id'}), '(self.hauki_resource_origin_id, start, end, single=True,\n init_periods=init_periods, init_opening_hours=init_times,\n hauki_origin_id=self.hauki_resource_data_source_id)\n', (3135, 3309), False, 'from opening_hours.utils.opening_hours_client import OpeningHoursClient\n'), ((2650, 2668), 'graphene.Boolean', 'graphene.Boolean', ([], {}), '()\n', (2666, 2668), False, 'import graphene\n'), ((2686, 2704), 'graphene.Boolean', 'graphene.Boolean', ([], {}), '()\n', (2702, 2704), False, 'import graphene\n'), ((2725, 2740), 'graphene.Date', 'graphene.Date', ([], {}), '()\n', (2738, 2740), False, 'import graphene\n'), ((2759, 2774), 'graphene.Date', 'graphene.Date', ([], {}), '()\n', (2772, 2774), False, 'import graphene\n')]
|
#!/usr/bin/python
from commands import getstatusoutput
import sys
W = '\033[0m' # white (normal)
R = '\033[31m' # red
G = '\033[32m' # green
O = '\033[33m' # orange
B = '\033[34m' # blue
P = '\033[35m' # purple
format_file_type = [
".c",
".h",
".cpp"
]
status, output = getstatusoutput("git show --pretty=\"\" --name-only")
lines = output.split("\n")
files = [line for line in lines if len(line) != 0]
for file_name in files:
needs = False
for ext in format_file_type:
if file_name.endswith(ext):
needs = True;
if not needs:
continue
cmd = "git-clang-format --commit HEAD~1 --style=file " + file_name
sys.stdout.write(cmd)
status, output = getstatusoutput(cmd)
sys.stdout.write(" -- ");
statustxt = "";
if status != 0:
print(R + "FAIL" + W)
else:
print(G + "DONE" + W)
|
[
"sys.stdout.write",
"commands.getstatusoutput"
] |
[((304, 355), 'commands.getstatusoutput', 'getstatusoutput', (['"""git show --pretty="" --name-only"""'], {}), '(\'git show --pretty="" --name-only\')\n', (319, 355), False, 'from commands import getstatusoutput\n'), ((731, 752), 'sys.stdout.write', 'sys.stdout.write', (['cmd'], {}), '(cmd)\n', (747, 752), False, 'import sys\n'), ((778, 798), 'commands.getstatusoutput', 'getstatusoutput', (['cmd'], {}), '(cmd)\n', (793, 798), False, 'from commands import getstatusoutput\n'), ((807, 831), 'sys.stdout.write', 'sys.stdout.write', (['""" -- """'], {}), "(' -- ')\n", (823, 831), False, 'import sys\n')]
|
from django.db import models
from billing.models import BillingProfile
ADDRESS_TYPES = (
('billing', 'Billing'),
('shipping', 'Shipping'),
)
class Addresses(models.Model):
billing_profile = models.ForeignKey(BillingProfile, on_delete=models.DO_NOTHING)
address_type = models.CharField(max_length=120, choices=ADDRESS_TYPES)
address_line_1 = models.CharField(max_length=120)
address_line_2 = models.CharField(max_length=120, null=True, blank=True)
city = models.CharField(max_length=120)
country = models.CharField(max_length=120, default='Nigeria')
state = models.CharField(max_length=120)
postal_code = models.CharField(max_length=120)
def __str__(self):
return str(self.billing_profile)
def get_address(self):
return f"{self.address_line_1}\n{self.address_line_2 or ''}\n{self.city}\n{self.state}, {self.postal_code}\n{self.country}"
|
[
"django.db.models.ForeignKey",
"django.db.models.CharField"
] |
[((205, 267), 'django.db.models.ForeignKey', 'models.ForeignKey', (['BillingProfile'], {'on_delete': 'models.DO_NOTHING'}), '(BillingProfile, on_delete=models.DO_NOTHING)\n', (222, 267), False, 'from django.db import models\n'), ((290, 345), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)', 'choices': 'ADDRESS_TYPES'}), '(max_length=120, choices=ADDRESS_TYPES)\n', (306, 345), False, 'from django.db import models\n'), ((368, 400), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)'}), '(max_length=120)\n', (384, 400), False, 'from django.db import models\n'), ((423, 478), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)', 'null': '(True)', 'blank': '(True)'}), '(max_length=120, null=True, blank=True)\n', (439, 478), False, 'from django.db import models\n'), ((501, 533), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)'}), '(max_length=120)\n', (517, 533), False, 'from django.db import models\n'), ((556, 607), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)', 'default': '"""Nigeria"""'}), "(max_length=120, default='Nigeria')\n", (572, 607), False, 'from django.db import models\n'), ((630, 662), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)'}), '(max_length=120)\n', (646, 662), False, 'from django.db import models\n'), ((685, 717), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)'}), '(max_length=120)\n', (701, 717), False, 'from django.db import models\n')]
|
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import logging
from collections import defaultdict
import pytest
from datadog_checks.dev.testing import requires_windows
try:
from datadog_test_libs.win.pdh_mocks import ( # noqa: F401
initialize_pdh_tests,
pdh_mocks_fixture,
pdh_mocks_fixture_bad_perf_strings,
)
from datadog_checks.checks.win.winpdh import SINGLE_INSTANCE_KEY, WinPDHCounter
except ImportError:
import platform
if platform.system() != 'Windows':
pass
logger = logging.getLogger(__file__)
'''
WinPDHCounter tests.
Test specific behavior of the WinPDHCounter class, which provides
the interface to the OS API.
'''
@requires_windows
def test_winpdhcounter_bad_strings_english(pdh_mocks_fixture_bad_perf_strings): # noqa F811
initialize_pdh_tests()
counter = WinPDHCounter('System', 'Processor Queue Length', logger)
vals = counter.get_all_values()
assert len(vals) == 1 # single instance key, should only have one value
assert SINGLE_INSTANCE_KEY in vals
@requires_windows
def test_winpdhcounter_throws_on_bad_input(pdh_mocks_fixture): # noqa F811
initialize_pdh_tests()
with pytest.raises(AttributeError):
WinPDHCounter('Ssystem', 'Processor Queue Length', logger)
with pytest.raises(AttributeError):
WinPDHCounter('System', 'PProcessor Queue Length', logger)
@requires_windows
def test_winpdhcounter_throws_on_bad_input_with_bad_strings(pdh_mocks_fixture_bad_perf_strings): # noqa F811
initialize_pdh_tests()
with pytest.raises(AttributeError):
WinPDHCounter('Ssystem', 'Processor Queue Length', logger)
with pytest.raises(AttributeError):
WinPDHCounter('System', 'PProcessor Queue Length', logger)
@requires_windows
def test_winpdhcounter_bad_strings_not_english(pdh_mocks_fixture_bad_perf_strings): # noqa F811
WinPDHCounter._use_en_counter_names = False
WinPDHCounter.pdh_counter_dict = defaultdict(list)
initialize_pdh_tests(lang="se-sv")
'''
expectation is that the initialization will fail. We attempt to fall
back to english counters if the strings database isn't present; however,
on non-english windows the english counters won't work
'''
with pytest.raises(AttributeError):
WinPDHCounter('System', 'Processor Queue Length', logger)
@requires_windows
def test_winpdhcounter_non_english(pdh_mocks_fixture): # noqa F811
WinPDHCounter._use_en_counter_names = False
WinPDHCounter.pdh_counter_dict = defaultdict(list)
initialize_pdh_tests(lang="se-sv")
counter = WinPDHCounter('System', 'Processor Queue Length', logger)
vals = counter.get_all_values()
assert len(vals) == 1 # single instance key, should only have one value
assert SINGLE_INSTANCE_KEY in vals
|
[
"datadog_checks.checks.win.winpdh.WinPDHCounter",
"datadog_test_libs.win.pdh_mocks.initialize_pdh_tests",
"collections.defaultdict",
"pytest.raises",
"platform.system",
"logging.getLogger"
] |
[((603, 630), 'logging.getLogger', 'logging.getLogger', (['__file__'], {}), '(__file__)\n', (620, 630), False, 'import logging\n'), ((875, 897), 'datadog_test_libs.win.pdh_mocks.initialize_pdh_tests', 'initialize_pdh_tests', ([], {}), '()\n', (895, 897), False, 'from datadog_test_libs.win.pdh_mocks import initialize_pdh_tests, pdh_mocks_fixture, pdh_mocks_fixture_bad_perf_strings\n'), ((912, 969), 'datadog_checks.checks.win.winpdh.WinPDHCounter', 'WinPDHCounter', (['"""System"""', '"""Processor Queue Length"""', 'logger'], {}), "('System', 'Processor Queue Length', logger)\n", (925, 969), False, 'from datadog_checks.checks.win.winpdh import SINGLE_INSTANCE_KEY, WinPDHCounter\n'), ((1223, 1245), 'datadog_test_libs.win.pdh_mocks.initialize_pdh_tests', 'initialize_pdh_tests', ([], {}), '()\n', (1243, 1245), False, 'from datadog_test_libs.win.pdh_mocks import initialize_pdh_tests, pdh_mocks_fixture, pdh_mocks_fixture_bad_perf_strings\n'), ((1595, 1617), 'datadog_test_libs.win.pdh_mocks.initialize_pdh_tests', 'initialize_pdh_tests', ([], {}), '()\n', (1615, 1617), False, 'from datadog_test_libs.win.pdh_mocks import initialize_pdh_tests, pdh_mocks_fixture, pdh_mocks_fixture_bad_perf_strings\n'), ((2035, 2052), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2046, 2052), False, 'from collections import defaultdict\n'), ((2058, 2092), 'datadog_test_libs.win.pdh_mocks.initialize_pdh_tests', 'initialize_pdh_tests', ([], {'lang': '"""se-sv"""'}), "(lang='se-sv')\n", (2078, 2092), False, 'from datadog_test_libs.win.pdh_mocks import initialize_pdh_tests, pdh_mocks_fixture, pdh_mocks_fixture_bad_perf_strings\n'), ((2598, 2615), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2609, 2615), False, 'from collections import defaultdict\n'), ((2620, 2654), 'datadog_test_libs.win.pdh_mocks.initialize_pdh_tests', 'initialize_pdh_tests', ([], {'lang': '"""se-sv"""'}), "(lang='se-sv')\n", (2640, 2654), False, 'from datadog_test_libs.win.pdh_mocks import initialize_pdh_tests, pdh_mocks_fixture, pdh_mocks_fixture_bad_perf_strings\n'), ((2669, 2726), 'datadog_checks.checks.win.winpdh.WinPDHCounter', 'WinPDHCounter', (['"""System"""', '"""Processor Queue Length"""', 'logger'], {}), "('System', 'Processor Queue Length', logger)\n", (2682, 2726), False, 'from datadog_checks.checks.win.winpdh import SINGLE_INSTANCE_KEY, WinPDHCounter\n'), ((1255, 1284), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (1268, 1284), False, 'import pytest\n'), ((1294, 1352), 'datadog_checks.checks.win.winpdh.WinPDHCounter', 'WinPDHCounter', (['"""Ssystem"""', '"""Processor Queue Length"""', 'logger'], {}), "('Ssystem', 'Processor Queue Length', logger)\n", (1307, 1352), False, 'from datadog_checks.checks.win.winpdh import SINGLE_INSTANCE_KEY, WinPDHCounter\n'), ((1363, 1392), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (1376, 1392), False, 'import pytest\n'), ((1402, 1460), 'datadog_checks.checks.win.winpdh.WinPDHCounter', 'WinPDHCounter', (['"""System"""', '"""PProcessor Queue Length"""', 'logger'], {}), "('System', 'PProcessor Queue Length', logger)\n", (1415, 1460), False, 'from datadog_checks.checks.win.winpdh import SINGLE_INSTANCE_KEY, WinPDHCounter\n'), ((1627, 1656), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (1640, 1656), False, 'import pytest\n'), ((1666, 1724), 'datadog_checks.checks.win.winpdh.WinPDHCounter', 'WinPDHCounter', (['"""Ssystem"""', '"""Processor Queue Length"""', 'logger'], {}), "('Ssystem', 'Processor Queue Length', logger)\n", (1679, 1724), False, 'from datadog_checks.checks.win.winpdh import SINGLE_INSTANCE_KEY, WinPDHCounter\n'), ((1735, 1764), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (1748, 1764), False, 'import pytest\n'), ((1774, 1832), 'datadog_checks.checks.win.winpdh.WinPDHCounter', 'WinPDHCounter', (['"""System"""', '"""PProcessor Queue Length"""', 'logger'], {}), "('System', 'PProcessor Queue Length', logger)\n", (1787, 1832), False, 'from datadog_checks.checks.win.winpdh import SINGLE_INSTANCE_KEY, WinPDHCounter\n'), ((2328, 2357), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (2341, 2357), False, 'import pytest\n'), ((2367, 2424), 'datadog_checks.checks.win.winpdh.WinPDHCounter', 'WinPDHCounter', (['"""System"""', '"""Processor Queue Length"""', 'logger'], {}), "('System', 'Processor Queue Length', logger)\n", (2380, 2424), False, 'from datadog_checks.checks.win.winpdh import SINGLE_INSTANCE_KEY, WinPDHCounter\n'), ((548, 565), 'platform.system', 'platform.system', ([], {}), '()\n', (563, 565), False, 'import platform\n')]
|
import calendar
import datetime
from nevow import tags as t, url, itaglibrary, rend, static
_calendar_css = """
.calendar tbody td.today { background-color: #aaaaaa; }
"""
calendarCSS = t.style(type_="text/css")[_calendar_css]
calendarCSSFile = static.File(_calendar_css, "text/css")
class CalendarComponent(object):
current_date = None
def days(self, year, month):
def _(ctx, data):
return [[day and datetime.date(year, month, day) or None
for day in row]
for row in calendar.monthcalendar(year, month)]
return _
def render_calendarDay(self, ctx, data):
options = itaglibrary.ICalendarOptions(ctx, {})
today_class = options.get('today_class', 'today')
if data is None:
return ctx.tag['']
if self.current_date.day == data.day and \
self.current_date.month == data.month and \
self.current_date.year == data.year:
return ctx.tag(class_=today_class)[data.day]
return ctx.tag[data.day]
def calendar(self, ctx, data):
now = datetime.datetime.now()
self.current_date = now
month_delta = datetime.timedelta(31)
options = itaglibrary.ICalendarOptions(ctx, {})
strftime = options.get('strftime', '%b %d, %Y @ %I:%M %p')
width = options.get('width', 2)
prev = options.get('prev', None)
next = options.get('next', None)
base = options.get('base_url', None)
calendar_class = options.get('calendar_class', 'calendar')
if data is None:
d = now
current = d.year, d.month
elif isinstance(data, tuple):
year, month = data
d = datetime.date(year, month, 4)
current = data
elif isinstance(data, (datetime.date, datetime.datetime)):
d = data
current = d.year, d.month
if prev is None or next is None:
p = d - month_delta
n = d + month_delta
prev = p.year, p.month
next = n.year, n.month
if base is None:
u = url.URL.fromContext(ctx)
segments = u.pathList()
if segments[-1] == '':
u = u.up()
segments = segments[:-1]
if segments[-1].isdigit() and segments[-2].isdigit():
u = u.up().up()
prev_url = u
next_url = u
else:
prev_url = base
next_url = base
add_query_params = False
def buildUrl(u, el):
if add_query_params:
param_name, param_value = el
u = u.add(param_name, str(param_value))
else:
u = u.child(str(el))
return u
for el in prev:
if el == '?':
add_query_params = True
continue
prev_url = buildUrl(prev_url, el)
add_query_params = False
for el in next:
if el == '?':
add_query_params = True
continue
next_url = buildUrl(next_url, el)
else:
if isinstance(prev, (url.URL, url.URLOverlay)) and \
isinstance(next, (url.URL, url.URLOverlay)):
next_url = next
prev_url = prev
return t.table(class_=calendar_class)[
t.thead[
t.tr[
t.th(colspan="7")[
t.a(href=prev_url)[t.xml("←")],
t.xml(" "),
t.xml('-'.join([str(el) for el in current])),
t.xml(" "),
t.a(href=next_url)[t.xml("→")]
]
],
[
t.tr[[t.td[dayname] for dayname in calendar.weekheader(width).split()]]
]
],
t.tbody[
t.invisible(data=self.days(*current), render=rend.sequence)[
t.tr(pattern='item', render=rend.sequence)[
t.td(pattern='item', render=self.render_calendarDay)
]
]
],
t.tfoot[
t.tr[
t.td(colspan="7")[
now.strftime(strftime)
]
]
]
]
c = CalendarComponent()
cal = c.calendar
__all__ = ["cal", "CalendarComponent", "calendarCSS", "calendarCSSFile"]
|
[
"nevow.tags.td",
"nevow.tags.xml",
"nevow.tags.tr",
"nevow.tags.a",
"nevow.tags.table",
"calendar.monthcalendar",
"nevow.tags.style",
"datetime.date",
"calendar.weekheader",
"nevow.static.File",
"datetime.timedelta",
"nevow.tags.th",
"nevow.itaglibrary.ICalendarOptions",
"datetime.datetime.now",
"nevow.url.URL.fromContext"
] |
[((248, 286), 'nevow.static.File', 'static.File', (['_calendar_css', '"""text/css"""'], {}), "(_calendar_css, 'text/css')\n", (259, 286), False, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((189, 214), 'nevow.tags.style', 't.style', ([], {'type_': '"""text/css"""'}), "(type_='text/css')\n", (196, 214), True, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((664, 701), 'nevow.itaglibrary.ICalendarOptions', 'itaglibrary.ICalendarOptions', (['ctx', '{}'], {}), '(ctx, {})\n', (692, 701), False, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((1111, 1134), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1132, 1134), False, 'import datetime\n'), ((1189, 1211), 'datetime.timedelta', 'datetime.timedelta', (['(31)'], {}), '(31)\n', (1207, 1211), False, 'import datetime\n'), ((1230, 1267), 'nevow.itaglibrary.ICalendarOptions', 'itaglibrary.ICalendarOptions', (['ctx', '{}'], {}), '(ctx, {})\n', (1258, 1267), False, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((3495, 3525), 'nevow.tags.table', 't.table', ([], {'class_': 'calendar_class'}), '(class_=calendar_class)\n', (3502, 3525), True, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((1737, 1766), 'datetime.date', 'datetime.date', (['year', 'month', '(4)'], {}), '(year, month, 4)\n', (1750, 1766), False, 'import datetime\n'), ((2145, 2169), 'nevow.url.URL.fromContext', 'url.URL.fromContext', (['ctx'], {}), '(ctx)\n', (2164, 2169), False, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((546, 581), 'calendar.monthcalendar', 'calendar.monthcalendar', (['year', 'month'], {}), '(year, month)\n', (568, 581), False, 'import calendar\n'), ((433, 464), 'datetime.date', 'datetime.date', (['year', 'month', 'day'], {}), '(year, month, day)\n', (446, 464), False, 'import datetime\n'), ((4124, 4166), 'nevow.tags.tr', 't.tr', ([], {'pattern': '"""item"""', 'render': 'rend.sequence'}), "(pattern='item', render=rend.sequence)\n", (4128, 4166), True, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((4189, 4241), 'nevow.tags.td', 't.td', ([], {'pattern': '"""item"""', 'render': 'self.render_calendarDay'}), "(pattern='item', render=self.render_calendarDay)\n", (4193, 4241), True, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((4352, 4369), 'nevow.tags.td', 't.td', ([], {'colspan': '"""7"""'}), "(colspan='7')\n", (4356, 4369), True, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((3584, 3601), 'nevow.tags.th', 't.th', ([], {'colspan': '"""7"""'}), "(colspan='7')\n", (3588, 3601), True, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((3678, 3688), 'nevow.tags.xml', 't.xml', (['""" """'], {}), "(' ')\n", (3683, 3688), True, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((3774, 3784), 'nevow.tags.xml', 't.xml', (['""" """'], {}), "(' ')\n", (3779, 3784), True, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((3622, 3640), 'nevow.tags.a', 't.a', ([], {'href': 'prev_url'}), '(href=prev_url)\n', (3625, 3640), True, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((3641, 3656), 'nevow.tags.xml', 't.xml', (['"""←"""'], {}), "('←')\n", (3646, 3656), True, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((3805, 3823), 'nevow.tags.a', 't.a', ([], {'href': 'next_url'}), '(href=next_url)\n', (3808, 3823), True, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((3824, 3839), 'nevow.tags.xml', 't.xml', (['"""→"""'], {}), "('→')\n", (3829, 3839), True, 'from nevow import tags as t, url, itaglibrary, rend, static\n'), ((3943, 3969), 'calendar.weekheader', 'calendar.weekheader', (['width'], {}), '(width)\n', (3962, 3969), False, 'import calendar\n')]
|
import numpy as np
import pytest
import numpy.testing as npt
from lenstronomy.Util import util
import lenstronomy.Util.param_util as param_util
def test_cart2polar():
#singel 2d coordinate transformation
center_x, center_y = 0, 0
x = 1
y = 1
r, phi = param_util.cart2polar(x, y, center_x, center_y)
assert r == np.sqrt(2) #radial part
assert phi == np.arctan(1)
#array of 2d coordinates
x = np.array([1, 2])
y = np.array([1, 1])
r, phi = param_util.cart2polar(x, y, center_x, center_y)
assert r[0] == np.sqrt(2) #radial part
assert phi[0] == np.arctan(1)
def test_polar2cart():
#singel 2d coordinate transformation
center = np.array([0,0])
r = 1
phi = np.pi
x, y = param_util.polar2cart(r, phi, center)
assert x == -1
assert abs(y) < 10e-14
def test_phi_q2_ellipticity():
phi, q = 0, 1
e1,e2 = param_util.phi_q2_ellipticity(phi, q)
assert e1 == 0
assert e2 == 0
phi, q = 1, 1
e1,e2 = param_util.phi_q2_ellipticity(phi, q)
assert e1 == 0
assert e2 == 0
phi, q = 2.,0.95
e1, e2 = param_util.phi_q2_ellipticity(phi, q)
assert e1 == -0.016760092842656733
assert e2 == -0.019405192187382792
phi, q = 0, 0.9
e1, e2 = param_util.phi_q2_ellipticity(phi, q)
assert e1 == 0.05263157894736841
assert e2 == 0
def test_ellipticity2phi_q():
e1, e2 = 0.3,0
phi,q = param_util.ellipticity2phi_q(e1, e2)
assert phi == 0
assert q == 0.53846153846153844
# Works on np arrays as well
e1 = np.array([0.3, 0.9])
e2 = np.array([0.0, 0.9 ])
phi, q = param_util.ellipticity2phi_q(e1, e2)
assert np.allclose(phi, [0.0, 0.39269908], atol=1.e-08)
assert np.allclose(q, [0.53846153, 5.00025001e-05], atol=1.e-08)
def test_ellipticity2phi_q_symmetry():
phi,q = 1.5, 0.8
e1,e2 = param_util.phi_q2_ellipticity(phi, q)
phi_new,q_new = param_util.ellipticity2phi_q(e1, e2)
assert phi == phi_new
assert q == q_new
phi,q = -1.5, 0.8
e1,e2 = param_util.phi_q2_ellipticity(phi, q)
phi_new,q_new = param_util.ellipticity2phi_q(e1, e2)
assert phi == phi_new
assert q == q_new
e1, e2 = 0.1, -0.1
phi, q = param_util.ellipticity2phi_q(e1, e2)
e1_new, e2_new = param_util.phi_q2_ellipticity(phi, q)
npt.assert_almost_equal(e1, e1_new, decimal=10)
npt.assert_almost_equal(e2, e2_new, decimal=10)
e1, e2 = 2.99, -0.0
phi, q = param_util.ellipticity2phi_q(e1, e2)
print(phi, q)
e1_new, e2_new = param_util.phi_q2_ellipticity(phi, q)
phi_new, q_new = param_util.ellipticity2phi_q(e1_new, e2_new)
npt.assert_almost_equal(phi, phi_new, decimal=10)
npt.assert_almost_equal(q, q_new, decimal=10)
#npt.assert_almost_equal(e1, e1_new, decimal=10)
#npt.assert_almost_equal(e2, e2_new, decimal=10)
def test_transform_e1e2():
e1 = 0.01
e2 = 0.
x = 0.
y = 1.
x_, y_ = param_util.transform_e1e2(x, y, e1, e2, center_x=0, center_y=0)
x_new = (1-e1) * x - e2 * y
y_new = -e2 * x + (1 + e1) * y
det = np.sqrt((1 - e1) * (1 + e1) + e2 ** 2)
npt.assert_almost_equal(x_, x_new / det, decimal=5)
npt.assert_almost_equal(y_, y_new / det, decimal=5)
def test_phi_gamma_ellipticity():
phi = -1.
gamma = 0.1
e1, e2 = param_util.shear_polar2cartesian(phi, gamma)
print(e1, e2, 'e1, e2')
phi_out, gamma_out = param_util.shear_cartesian2polar(e1, e2)
assert phi == phi_out
assert gamma == gamma_out
def test_phi_gamma_ellipticity_2():
e1, e2 = -0.04, -0.01
phi, gamma = param_util.shear_cartesian2polar(e1, e2)
e1_out, e2_out = param_util.shear_polar2cartesian(phi, gamma)
npt.assert_almost_equal(e1, e1_out, decimal=10)
npt.assert_almost_equal(e2, e2_out, decimal=10)
def test_displace_eccentricity():
#x, y = np.array([1, 0]), np.array([0, 1])
x, y = util.make_grid(numPix=10, deltapix=1)
e1 = 0.1#.1
e2 = -0#.1
center_x, center_y = 0, 0
x_, y_ = param_util.transform_e1e2(x, y, e1, e2, center_x=center_x, center_y=center_y)
phi_G, q = param_util.ellipticity2phi_q(e1, e2)
x_shift = x - center_x
y_shift = y - center_y
cos_phi = np.cos(phi_G)
sin_phi = np.sin(phi_G)
print(cos_phi, sin_phi)
xt1 = cos_phi * x_shift + sin_phi * y_shift
xt2 = -sin_phi * x_shift + cos_phi * y_shift
xt1 *= np.sqrt(q)
xt2 /= np.sqrt(q)
npt.assert_almost_equal(x_, xt1, decimal=8)
npt.assert_almost_equal(y_, xt2, decimal=8)
x, y = util.make_grid(numPix=10, deltapix=1)
x, y = np.array([1, 0]), np.array([0, 1])
e1 = 0.1#.1#.1
e2 = 0
center_x, center_y = 0, 0
x_, y_ = param_util.transform_e1e2(x, y, e1, e2, center_x=center_x, center_y=center_y)
phi_G, q = param_util.ellipticity2phi_q(e1, e2)
x_shift = x - center_x
y_shift = y - center_y
cos_phi = np.cos(phi_G)
sin_phi = np.sin(phi_G)
print(cos_phi, sin_phi)
xt1 = cos_phi * x_shift + sin_phi * y_shift
xt2 = -sin_phi * x_shift + cos_phi * y_shift
xt1 *= np.sqrt(q)
xt2 /= np.sqrt(q)
npt.assert_almost_equal(x_, xt1, decimal=8)
npt.assert_almost_equal(y_, xt2, decimal=8)
if __name__ == '__main__':
pytest.main()
|
[
"lenstronomy.Util.param_util.cart2polar",
"lenstronomy.Util.param_util.shear_polar2cartesian",
"numpy.testing.assert_almost_equal",
"numpy.allclose",
"lenstronomy.Util.util.make_grid",
"lenstronomy.Util.param_util.shear_cartesian2polar",
"pytest.main",
"numpy.sin",
"numpy.array",
"numpy.cos",
"lenstronomy.Util.param_util.polar2cart",
"numpy.arctan",
"lenstronomy.Util.param_util.transform_e1e2",
"lenstronomy.Util.param_util.ellipticity2phi_q",
"lenstronomy.Util.param_util.phi_q2_ellipticity",
"numpy.sqrt"
] |
[((273, 320), 'lenstronomy.Util.param_util.cart2polar', 'param_util.cart2polar', (['x', 'y', 'center_x', 'center_y'], {}), '(x, y, center_x, center_y)\n', (294, 320), True, 'import lenstronomy.Util.param_util as param_util\n'), ((429, 445), 'numpy.array', 'np.array', (['[1, 2]'], {}), '([1, 2])\n', (437, 445), True, 'import numpy as np\n'), ((454, 470), 'numpy.array', 'np.array', (['[1, 1]'], {}), '([1, 1])\n', (462, 470), True, 'import numpy as np\n'), ((485, 532), 'lenstronomy.Util.param_util.cart2polar', 'param_util.cart2polar', (['x', 'y', 'center_x', 'center_y'], {}), '(x, y, center_x, center_y)\n', (506, 532), True, 'import lenstronomy.Util.param_util as param_util\n'), ((689, 705), 'numpy.array', 'np.array', (['[0, 0]'], {}), '([0, 0])\n', (697, 705), True, 'import numpy as np\n'), ((742, 779), 'lenstronomy.Util.param_util.polar2cart', 'param_util.polar2cart', (['r', 'phi', 'center'], {}), '(r, phi, center)\n', (763, 779), True, 'import lenstronomy.Util.param_util as param_util\n'), ((889, 926), 'lenstronomy.Util.param_util.phi_q2_ellipticity', 'param_util.phi_q2_ellipticity', (['phi', 'q'], {}), '(phi, q)\n', (918, 926), True, 'import lenstronomy.Util.param_util as param_util\n'), ((996, 1033), 'lenstronomy.Util.param_util.phi_q2_ellipticity', 'param_util.phi_q2_ellipticity', (['phi', 'q'], {}), '(phi, q)\n', (1025, 1033), True, 'import lenstronomy.Util.param_util as param_util\n'), ((1107, 1144), 'lenstronomy.Util.param_util.phi_q2_ellipticity', 'param_util.phi_q2_ellipticity', (['phi', 'q'], {}), '(phi, q)\n', (1136, 1144), True, 'import lenstronomy.Util.param_util as param_util\n'), ((1257, 1294), 'lenstronomy.Util.param_util.phi_q2_ellipticity', 'param_util.phi_q2_ellipticity', (['phi', 'q'], {}), '(phi, q)\n', (1286, 1294), True, 'import lenstronomy.Util.param_util as param_util\n'), ((1414, 1450), 'lenstronomy.Util.param_util.ellipticity2phi_q', 'param_util.ellipticity2phi_q', (['e1', 'e2'], {}), '(e1, e2)\n', (1442, 1450), True, 'import lenstronomy.Util.param_util as param_util\n'), ((1550, 1570), 'numpy.array', 'np.array', (['[0.3, 0.9]'], {}), '([0.3, 0.9])\n', (1558, 1570), True, 'import numpy as np\n'), ((1580, 1600), 'numpy.array', 'np.array', (['[0.0, 0.9]'], {}), '([0.0, 0.9])\n', (1588, 1600), True, 'import numpy as np\n'), ((1615, 1651), 'lenstronomy.Util.param_util.ellipticity2phi_q', 'param_util.ellipticity2phi_q', (['e1', 'e2'], {}), '(e1, e2)\n', (1643, 1651), True, 'import lenstronomy.Util.param_util as param_util\n'), ((1663, 1710), 'numpy.allclose', 'np.allclose', (['phi', '[0.0, 0.39269908]'], {'atol': '(1e-08)'}), '(phi, [0.0, 0.39269908], atol=1e-08)\n', (1674, 1710), True, 'import numpy as np\n'), ((1723, 1779), 'numpy.allclose', 'np.allclose', (['q', '[0.53846153, 5.00025001e-05]'], {'atol': '(1e-08)'}), '(q, [0.53846153, 5.00025001e-05], atol=1e-08)\n', (1734, 1779), True, 'import numpy as np\n'), ((1854, 1891), 'lenstronomy.Util.param_util.phi_q2_ellipticity', 'param_util.phi_q2_ellipticity', (['phi', 'q'], {}), '(phi, q)\n', (1883, 1891), True, 'import lenstronomy.Util.param_util as param_util\n'), ((1912, 1948), 'lenstronomy.Util.param_util.ellipticity2phi_q', 'param_util.ellipticity2phi_q', (['e1', 'e2'], {}), '(e1, e2)\n', (1940, 1948), True, 'import lenstronomy.Util.param_util as param_util\n'), ((2032, 2069), 'lenstronomy.Util.param_util.phi_q2_ellipticity', 'param_util.phi_q2_ellipticity', (['phi', 'q'], {}), '(phi, q)\n', (2061, 2069), True, 'import lenstronomy.Util.param_util as param_util\n'), ((2090, 2126), 'lenstronomy.Util.param_util.ellipticity2phi_q', 'param_util.ellipticity2phi_q', (['e1', 'e2'], {}), '(e1, e2)\n', (2118, 2126), True, 'import lenstronomy.Util.param_util as param_util\n'), ((2212, 2248), 'lenstronomy.Util.param_util.ellipticity2phi_q', 'param_util.ellipticity2phi_q', (['e1', 'e2'], {}), '(e1, e2)\n', (2240, 2248), True, 'import lenstronomy.Util.param_util as param_util\n'), ((2270, 2307), 'lenstronomy.Util.param_util.phi_q2_ellipticity', 'param_util.phi_q2_ellipticity', (['phi', 'q'], {}), '(phi, q)\n', (2299, 2307), True, 'import lenstronomy.Util.param_util as param_util\n'), ((2312, 2359), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['e1', 'e1_new'], {'decimal': '(10)'}), '(e1, e1_new, decimal=10)\n', (2335, 2359), True, 'import numpy.testing as npt\n'), ((2364, 2411), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['e2', 'e2_new'], {'decimal': '(10)'}), '(e2, e2_new, decimal=10)\n', (2387, 2411), True, 'import numpy.testing as npt\n'), ((2450, 2486), 'lenstronomy.Util.param_util.ellipticity2phi_q', 'param_util.ellipticity2phi_q', (['e1', 'e2'], {}), '(e1, e2)\n', (2478, 2486), True, 'import lenstronomy.Util.param_util as param_util\n'), ((2526, 2563), 'lenstronomy.Util.param_util.phi_q2_ellipticity', 'param_util.phi_q2_ellipticity', (['phi', 'q'], {}), '(phi, q)\n', (2555, 2563), True, 'import lenstronomy.Util.param_util as param_util\n'), ((2585, 2629), 'lenstronomy.Util.param_util.ellipticity2phi_q', 'param_util.ellipticity2phi_q', (['e1_new', 'e2_new'], {}), '(e1_new, e2_new)\n', (2613, 2629), True, 'import lenstronomy.Util.param_util as param_util\n'), ((2634, 2683), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['phi', 'phi_new'], {'decimal': '(10)'}), '(phi, phi_new, decimal=10)\n', (2657, 2683), True, 'import numpy.testing as npt\n'), ((2688, 2733), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['q', 'q_new'], {'decimal': '(10)'}), '(q, q_new, decimal=10)\n', (2711, 2733), True, 'import numpy.testing as npt\n'), ((2930, 2993), 'lenstronomy.Util.param_util.transform_e1e2', 'param_util.transform_e1e2', (['x', 'y', 'e1', 'e2'], {'center_x': '(0)', 'center_y': '(0)'}), '(x, y, e1, e2, center_x=0, center_y=0)\n', (2955, 2993), True, 'import lenstronomy.Util.param_util as param_util\n'), ((3071, 3109), 'numpy.sqrt', 'np.sqrt', (['((1 - e1) * (1 + e1) + e2 ** 2)'], {}), '((1 - e1) * (1 + e1) + e2 ** 2)\n', (3078, 3109), True, 'import numpy as np\n'), ((3114, 3165), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['x_', '(x_new / det)'], {'decimal': '(5)'}), '(x_, x_new / det, decimal=5)\n', (3137, 3165), True, 'import numpy.testing as npt\n'), ((3170, 3221), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['y_', '(y_new / det)'], {'decimal': '(5)'}), '(y_, y_new / det, decimal=5)\n', (3193, 3221), True, 'import numpy.testing as npt\n'), ((3301, 3345), 'lenstronomy.Util.param_util.shear_polar2cartesian', 'param_util.shear_polar2cartesian', (['phi', 'gamma'], {}), '(phi, gamma)\n', (3333, 3345), True, 'import lenstronomy.Util.param_util as param_util\n'), ((3399, 3439), 'lenstronomy.Util.param_util.shear_cartesian2polar', 'param_util.shear_cartesian2polar', (['e1', 'e2'], {}), '(e1, e2)\n', (3431, 3439), True, 'import lenstronomy.Util.param_util as param_util\n'), ((3577, 3617), 'lenstronomy.Util.param_util.shear_cartesian2polar', 'param_util.shear_cartesian2polar', (['e1', 'e2'], {}), '(e1, e2)\n', (3609, 3617), True, 'import lenstronomy.Util.param_util as param_util\n'), ((3640, 3684), 'lenstronomy.Util.param_util.shear_polar2cartesian', 'param_util.shear_polar2cartesian', (['phi', 'gamma'], {}), '(phi, gamma)\n', (3672, 3684), True, 'import lenstronomy.Util.param_util as param_util\n'), ((3689, 3736), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['e1', 'e1_out'], {'decimal': '(10)'}), '(e1, e1_out, decimal=10)\n', (3712, 3736), True, 'import numpy.testing as npt\n'), ((3741, 3788), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['e2', 'e2_out'], {'decimal': '(10)'}), '(e2, e2_out, decimal=10)\n', (3764, 3788), True, 'import numpy.testing as npt\n'), ((3883, 3920), 'lenstronomy.Util.util.make_grid', 'util.make_grid', ([], {'numPix': '(10)', 'deltapix': '(1)'}), '(numPix=10, deltapix=1)\n', (3897, 3920), False, 'from lenstronomy.Util import util\n'), ((3995, 4072), 'lenstronomy.Util.param_util.transform_e1e2', 'param_util.transform_e1e2', (['x', 'y', 'e1', 'e2'], {'center_x': 'center_x', 'center_y': 'center_y'}), '(x, y, e1, e2, center_x=center_x, center_y=center_y)\n', (4020, 4072), True, 'import lenstronomy.Util.param_util as param_util\n'), ((4089, 4125), 'lenstronomy.Util.param_util.ellipticity2phi_q', 'param_util.ellipticity2phi_q', (['e1', 'e2'], {}), '(e1, e2)\n', (4117, 4125), True, 'import lenstronomy.Util.param_util as param_util\n'), ((4195, 4208), 'numpy.cos', 'np.cos', (['phi_G'], {}), '(phi_G)\n', (4201, 4208), True, 'import numpy as np\n'), ((4223, 4236), 'numpy.sin', 'np.sin', (['phi_G'], {}), '(phi_G)\n', (4229, 4236), True, 'import numpy as np\n'), ((4374, 4384), 'numpy.sqrt', 'np.sqrt', (['q'], {}), '(q)\n', (4381, 4384), True, 'import numpy as np\n'), ((4396, 4406), 'numpy.sqrt', 'np.sqrt', (['q'], {}), '(q)\n', (4403, 4406), True, 'import numpy as np\n'), ((4411, 4454), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['x_', 'xt1'], {'decimal': '(8)'}), '(x_, xt1, decimal=8)\n', (4434, 4454), True, 'import numpy.testing as npt\n'), ((4459, 4502), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['y_', 'xt2'], {'decimal': '(8)'}), '(y_, xt2, decimal=8)\n', (4482, 4502), True, 'import numpy.testing as npt\n'), ((4516, 4553), 'lenstronomy.Util.util.make_grid', 'util.make_grid', ([], {'numPix': '(10)', 'deltapix': '(1)'}), '(numPix=10, deltapix=1)\n', (4530, 4553), False, 'from lenstronomy.Util import util\n'), ((4673, 4750), 'lenstronomy.Util.param_util.transform_e1e2', 'param_util.transform_e1e2', (['x', 'y', 'e1', 'e2'], {'center_x': 'center_x', 'center_y': 'center_y'}), '(x, y, e1, e2, center_x=center_x, center_y=center_y)\n', (4698, 4750), True, 'import lenstronomy.Util.param_util as param_util\n'), ((4767, 4803), 'lenstronomy.Util.param_util.ellipticity2phi_q', 'param_util.ellipticity2phi_q', (['e1', 'e2'], {}), '(e1, e2)\n', (4795, 4803), True, 'import lenstronomy.Util.param_util as param_util\n'), ((4873, 4886), 'numpy.cos', 'np.cos', (['phi_G'], {}), '(phi_G)\n', (4879, 4886), True, 'import numpy as np\n'), ((4901, 4914), 'numpy.sin', 'np.sin', (['phi_G'], {}), '(phi_G)\n', (4907, 4914), True, 'import numpy as np\n'), ((5052, 5062), 'numpy.sqrt', 'np.sqrt', (['q'], {}), '(q)\n', (5059, 5062), True, 'import numpy as np\n'), ((5074, 5084), 'numpy.sqrt', 'np.sqrt', (['q'], {}), '(q)\n', (5081, 5084), True, 'import numpy as np\n'), ((5089, 5132), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['x_', 'xt1'], {'decimal': '(8)'}), '(x_, xt1, decimal=8)\n', (5112, 5132), True, 'import numpy.testing as npt\n'), ((5137, 5180), 'numpy.testing.assert_almost_equal', 'npt.assert_almost_equal', (['y_', 'xt2'], {'decimal': '(8)'}), '(y_, xt2, decimal=8)\n', (5160, 5180), True, 'import numpy.testing as npt\n'), ((5214, 5227), 'pytest.main', 'pytest.main', ([], {}), '()\n', (5225, 5227), False, 'import pytest\n'), ((337, 347), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (344, 347), True, 'import numpy as np\n'), ((379, 391), 'numpy.arctan', 'np.arctan', (['(1)'], {}), '(1)\n', (388, 391), True, 'import numpy as np\n'), ((552, 562), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (559, 562), True, 'import numpy as np\n'), ((597, 609), 'numpy.arctan', 'np.arctan', (['(1)'], {}), '(1)\n', (606, 609), True, 'import numpy as np\n'), ((4565, 4581), 'numpy.array', 'np.array', (['[1, 0]'], {}), '([1, 0])\n', (4573, 4581), True, 'import numpy as np\n'), ((4583, 4599), 'numpy.array', 'np.array', (['[0, 1]'], {}), '([0, 1])\n', (4591, 4599), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
import math
import operator
from pywizlight.bulb import PilotBuilder, PilotParser, wizlight
try: from .vec import *;
except: from vec import *;
# can't actually change a value like this from another file once it's been loaded. this
# is actually a very bad design on python's part IMNSHO, and this is an ugly workaround
verbose = [False]
def setVerbose (val):
verbose[0] = val
def debug (msg, end = "\n"):
if (verbose[0]): print (msg, end = end);
# red, green, blue basis vectors from the hue/saturation color wheel - just vectors at 3
# angles (0, 120, 240)
angle = (math.pi * 2) / 3
basis = (
vecFromAngle (0),
vecFromAngle (angle),
vecFromAngle (angle * 2)
)
def printBasis (basis, prefix = ""):
debug ("{}Basis Vectors: ".format (prefix), end = "")
for vector in basis:
debug ("{} ".format (vecFormat (vector)), end="")
debug ("")
# this function computes the linear combination of two basis vectors that define a trapezoid
# hueVec - a normalized vector in the hue color wheel (0..1, 0..1, 0..1)
# saturation - a single value representing the length of the hue vector (0..1)
# brightness - a separate value that may be passed in, and should be used in the Pilot
def trapezoid (hueVec, saturation, brightness):
# if saturation is essentially 0, just go to the full on
if (saturation <= epsilon):
rgb = (0, 0, 0)
else:
# we want to compute the actual RGB color of the saturated point as a linear
# combination of no more than two of the basis vectors. first we have to figure
# out which of the basis vectors we will use
maxAngle = math.cos ((math.pi * 2 / 3) - epsilon)
mask = tuple([(1 if (vecDot (hueVec, vector) > maxAngle) else 0) for vector in basis])
count = sum(mask)
debug (" Max Angle: {:0.3f}, Mask: ({}, {}, {}), Count: {}".format (maxAngle, mask[0], mask[1], mask[2], count))
if (count == 1):
# easy case, it's just one color component
rgb = mask
else:
# recast as a ray-line intersection using the two found basis vectors, note
# the basis vectors are normalized by definition
subBasis = [basis[i] for i, maskVal in enumerate(mask) if (maskVal == 1)]
printBasis (subBasis, " ")
# define the line from the origin along the second vector, computing its
# equation in the form Ax + C = 0, but C is always 0 for this line
AB = (subBasis[1][1], subBasis[1][0] * -1)
# intersect the ray from the saturation point along the first basis vector
# with the line we just computed, these are definitely not co-linear, so there
# should always be an intersection point, and the result should always be in
# the range [-1 .. 1], this is the first basis coefficient
coeff = [0, 0]
coeff[0] = vecDot (hueVec, AB) / vecDot (subBasis[0], AB)
# compute the intersection point, and the second basis coefficient, note that
# we compute the coefficients to always be positive, but the intersection calculation
# needs to be in the opposite direction from the basis vector (hence the negative on
# coeff[0]).
intersection = vecAdd (vecMul (subBasis[0], -coeff[0]), hueVec)
coeff[1] = vecDot (intersection, subBasis[1])
debug (" Intersection Point: {}, Coefficients: {}".format (vecFormat (intersection), vecFormat (coeff)))
# there's a bit of a gamut problem here, as the area outside the hexagon defined by
# the three unit basis vectors is not actually reachable. this manifests as
# coefficients greater than 1, which will always happen unless the target color is
# either one of the basis vectors or a bisector of two basis vectors. we scale both
# coefficients by 1/maxCoefficient to make valid colors
maxCoeff = max (coeff[0], coeff[1])
coeff = [c / maxCoeff for c in coeff]
debug (" Scaled Coefficients: {}".format (vecFormat (coeff)))
# now rebuild the rgb vector by putting the coefficients into the correct place
j = 0
rgbList = []
for i in range (3):
if (mask[i] == 1):
rgbList.append (min (coeff[j], 1))
j += 1
else:
rgbList.append (0)
rgb = tuple (rgbList)
# we want a discontinuous behavior. if saturation >= 0.5, we want the color to remain saturated
# and we scale the cw value down to 0 as saturation goes from 0.5 to 1. if saturation < 0.5, we
# want to saturate cw, and scale the rgb down to (0, 0, 0) as saturation goes from 0.5 - 0
if (saturation >= 0.5):
# rgb remains saturated
# scale the cw value down to 0 as saturation goes from 0.5 to 1
cw = 1 - ((saturation - 0.5) * 2)
else:
cw = 1
rgb = vecMul (rgb, saturation * 2)
# scale back to the pilot color space
rgb = vecInt (vecMul (rgb, 255))
cw = int (max (0, cw * cwMax))
if (cw == 0): cw = None;
# scale cw back to 1-255 and return the Pilot Builder that includes the white light
debug (" RGB OUT: {}, CW: {}".format (rgb, cw))
# the wiz light appears to have 5 different LEDs, r, g, b, warm_white, and cold_white
# there appears to be a max power supplied across the 5 LEDs, which explains why all-
# on full isn't the brightest configuration
# warm_white appears to be 2800k, and cold_white appears to be 6200k, somewhat neutral
# brightness is achieved by turning both of them on
return PilotBuilder(rgb = rgb, warm_white = cw, cold_white = cw, brightness = brightness)
# the max value we will use for c and w
cwMax = 128
def rgb2rgbcw (rgb, brightness):
debug ("RGB IN: {}, BRIGHTNESS: {}".format (rgb, brightness))
# scale the vector into canonical space ([0-1])
rgb = vecMul (rgb, 1 / 255)
# compute the hue vector as a linear combination of the basis vectors, and extract the
# saturation, there's probably a better pythonese way of doing this
hueVec = vecAdd (vecAdd (vecMul (basis[0], rgb[0]), vecMul (basis[1], rgb[1])), vecMul (basis[2], rgb[2]))
saturation = vecLen (hueVec)
if (saturation > epsilon):
hueVec = vecMul (hueVec, 1 / saturation)
return trapezoid(hueVec, saturation, brightness)
# given a tuple that is r,g,b and cw in 0-255 range, convert that to a hue, saturation tuple in the
# range (0..360, 0..100)
def rgbcw2hs (rgb, cw):
# scale the rgb and cw values into canonical space (the wiz app might set cw to higher than the
# value we use, so we have to allow for that
rgb = vecMul (rgb, 1 / 255)
cw = min (cw, cwMax) / cwMax
# compute the hue vector as a linear combination of the basis vectors, there's probably a
# better pythonese way of doing this
hueVec = vecAdd (vecAdd (vecMul (basis[0], rgb[0]), vecMul (basis[1], rgb[1])), vecMul (basis[2], rgb[2]))
debug ("RGB IN: {}, CW: {:.5f}, HUE VECTOR: {:.3f}".format (vecFormat(rgb), cw, vecFormat (hueVec)))
# the discontinuous nature of the wiz bulb setting means we have two different states:
# 1) the cw value is 1, and the hue vector is scaled (from 50% saturation to white)
# 2) the hue vector is saturated, and cw is scaled down (from 50% saturation to full color)
if (cw == 1):
# hue scales down to (0, 0) at saturation 0, up to unit length at 50% saturation, so we get
# that length, normalize the vector, and scale the saturation to reflect the half range
hueVecLength = vecLen(hueVec)
if (hueVecLength > epsilon):
vecMul (hueVec, 1 / hueVecLength)
saturation = hueVecLength * 0.5
else:
# the hue vector is already fully saturated, and cw scales from 0 - 0.5 to add in white light
saturation = 1 - (cw / 2)
# we have a saturated version of the hue vector now, which we convert to a hue vector and
# then extract the angle of the vector in radians. We add P2 pi to the angle if it is less than
# 0 to put the hue angle in the range from 0 to 2 Pi
hue = math.atan2 (hueVec[1], hueVec[0])
while (hue < 0): hue += (math.pi * 2)
# scale the hue/saturation values back to their native ranges and return the tuple
hue *= (180 / math.pi)
saturation *= 100
debug (" HUE OUT: {:.5f}, SATURATION: {:.3f}".format (hue, saturation))
return hue, saturation
# given a canonical value, a width, and a number of divisions, snap the value to the nearest subdivision
def snapToDiscreteValue (canonicalValue, divisions, scale):
spacing = 1 / (divisions - 1)
snapIndex = int ((canonicalValue + (spacing / 2)) / spacing)
snappedX = snapIndex * spacing
return snappedX * scale
# given a hue, saturation tuple in the range (0..360, 0..100), convert that to a rgbcw for the wiz light
def hs2rgbcw (hs, brightness):
# convert hue to a canonical value
hueCanonical = hs[0] / 360
while (hueCanonical >= 1): hueCanonical -= 1;
# compute hue in a discretized space and convert to radians, then a vector
hueRadians = snapToDiscreteValue (hueCanonical, 3 * 8, math.pi * 2)
hueVec = vecFromAngle(hueRadians)
# convert saturation to a canonical value in a discretized space
# we take the square root to give the user more visual control
saturationCanonical = hs[1] / 100
saturation = snapToDiscreteValue (saturationCanonical, 8, 1)
debug ("HS IN: {}, HUE: {:.5f}, SATURATION: {:.3f}, BRIGHTNESS: {}".format (vecFormat(hs), hueRadians, saturation, brightness))
return trapezoid (hueVec, saturation, brightness)
|
[
"math.cos",
"pywizlight.bulb.PilotBuilder",
"math.atan2"
] |
[((5770, 5844), 'pywizlight.bulb.PilotBuilder', 'PilotBuilder', ([], {'rgb': 'rgb', 'warm_white': 'cw', 'cold_white': 'cw', 'brightness': 'brightness'}), '(rgb=rgb, warm_white=cw, cold_white=cw, brightness=brightness)\n', (5782, 5844), False, 'from pywizlight.bulb import PilotBuilder, PilotParser, wizlight\n'), ((8308, 8340), 'math.atan2', 'math.atan2', (['hueVec[1]', 'hueVec[0]'], {}), '(hueVec[1], hueVec[0])\n', (8318, 8340), False, 'import math\n'), ((1658, 1693), 'math.cos', 'math.cos', (['(math.pi * 2 / 3 - epsilon)'], {}), '(math.pi * 2 / 3 - epsilon)\n', (1666, 1693), False, 'import math\n')]
|
# -- coding: utf-8 --
import json
from datetime import datetime, timedelta
from test.factories import ProjectFactory, OrganizationFactory, IssueFactory
from test.harness import IntegrationTest
from app import db, Issue
class TestProjects(IntegrationTest):
def test_all_projects_order(self):
'''
Test that projects gets returned in order of last_updated
'''
ProjectFactory(name=u'Project 1', last_updated='Mon, 01 Jan 2010 00:00:00 GMT')
ProjectFactory(name=u'Project 2', last_updated='Tue, 01 Jan 2011 00:00:00 GMT')
ProjectFactory(name=u'Non Github Project', last_updated='Wed, 01 Jan 2013 00:00:00', github_details=None)
ProjectFactory(name=u'Project 3', last_updated='Thu, 01 Jan 2014 00:00:00 GMT')
db.session.commit()
response = self.app.get('/api/projects')
response = json.loads(response.data)
self.assertEqual(response['objects'][0]['name'], u'Project 3')
self.assertEqual(response['objects'][1]['name'], u'Non Github Project')
self.assertEqual(response['objects'][2]['name'], u'Project 2')
self.assertEqual(response['objects'][3]['name'], u'Project 1')
def test_projects(self):
ProjectFactory()
db.session.commit()
response = self.app.get('/api/projects')
response = json.loads(response.data)
assert isinstance(response, dict)
assert isinstance(response['pages'], dict)
assert isinstance(response['total'], int)
assert isinstance(response['objects'], list)
assert isinstance(response['objects'][0]['categories'], unicode)
assert isinstance(response['objects'][0]['tags'], list)
assert isinstance(response['objects'][0]['code_url'], unicode)
assert isinstance(response['objects'][0]['description'], unicode)
assert isinstance(response['objects'][0]['github_details'], dict)
assert isinstance(response['objects'][0]['id'], int)
assert isinstance(response['objects'][0]['api_url'], unicode)
assert isinstance(response['objects'][0]['link_url'], unicode)
assert isinstance(response['objects'][0]['name'], unicode)
assert isinstance(response['objects'][0]['organization'], dict)
assert isinstance(response['objects'][0]['organization_name'], unicode)
assert isinstance(response['objects'][0]['type'], unicode)
assert isinstance(response['objects'][0]['status'], unicode)
assert isinstance(response['objects'][0]['languages'], list)
def test_project_search_nonexisting_text(self):
''' Searching for non-existing text in the project and org/project
endpoints returns no results
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'Coder')
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 0)
self.assertEqual(len(project_response['objects']), 0)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 0)
self.assertEqual(len(org_project_response['objects']), 0)
def test_project_search_existing_text(self):
''' Searching for existing text in the project and org/project endpoints
returns expected results
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby')
ProjectFactory(organization_name=organization.name, description=u'python')
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 1)
self.assertEqual(len(project_response['objects']), 1)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 1)
self.assertEqual(len(org_project_response['objects']), 1)
def test_project_search_escaped_text(self):
''' Searching for escaped text in the project and org/project endpoints
returns expected results
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'What\'s My \'District')
ProjectFactory(organization_name=organization.name, description=u'Cöde%%for%%Ameriça')
db.session.commit()
project_response = self.app.get('/api/projects?q=What\'s My \'District')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 1)
self.assertEqual(len(project_response['objects']), 1)
org_project_response = self.app.get("/api/organizations/Code-for-San-Francisco/projects?q='District")
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 1)
self.assertEqual(len(org_project_response['objects']), 1)
project_response = self.app.get('/api/projects?q=%Ameriça')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 1)
self.assertEqual(len(project_response['objects']), 1)
org_project_response = self.app.get("/api/organizations/Code-for-San-Francisco/projects?q=Cöde%")
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 1)
self.assertEqual(len(org_project_response['objects']), 1)
def test_project_search_existing_phrase(self):
''' Searching for an existing phrase in the project and org/project endpoints
returns expected results
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby on rails')
ProjectFactory(organization_name=organization.name, description=u'i love lamp')
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby on rails')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 1)
self.assertEqual(len(project_response['objects']), 1)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby on rails')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 1)
self.assertEqual(len(org_project_response['objects']), 1)
def test_project_search_existing_part_of_phrase(self):
''' Searching for a partial phrase in the project and org/project endpoints
returns expected results
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby on rails')
ProjectFactory(organization_name=organization.name, description=u'i love lamp')
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 1)
self.assertEqual(len(project_response['objects']), 1)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 1)
self.assertEqual(len(org_project_response['objects']), 1)
def test_project_search_nonexisting_phrase(self):
''' Searching for a term that is not part of an existing phrase in the project and
org/project endpoints returns no results
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby on rails')
db.session.commit()
project_response = self.app.get('/api/projects?q=joomla')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 0)
self.assertEqual(len(project_response['objects']), 0)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=joomla')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 0)
self.assertEqual(len(org_project_response['objects']), 0)
def test_project_search_order_by_relevance(self):
''' Search results from the project and org/project endpoints are returned
in order of relevance
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(len(project_response["objects"]), 2)
self.assertEqual(project_response['objects'][0]['description'], 'ruby ruby ruby ruby ruby')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(len(org_project_response["objects"]), 2)
self.assertEqual(org_project_response['objects'][0]['description'], 'ruby ruby ruby ruby ruby')
def test_project_search_order_by_relevance_requested(self):
''' Search results from the project and org/project endpoints are returned
in order of relevance when explicitly requested
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby&sort_by=relevance')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(len(project_response["objects"]), 2)
self.assertEqual(project_response['objects'][0]['description'], 'ruby ruby ruby ruby ruby')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby&sort_by=relevance')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(len(org_project_response["objects"]), 2)
self.assertEqual(org_project_response['objects'][0]['description'], 'ruby ruby ruby ruby ruby')
def test_project_search_order_by_last_updated(self):
''' Search results from the project and org/project endpoints are returned
in order of last_updated, if requested
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby&sort_by=last_updated')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(len(project_response["objects"]), 2)
self.assertEqual(project_response['objects'][0]['description'], 'ruby')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby&sort_by=last_updated')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(len(org_project_response["objects"]), 2)
self.assertEqual(org_project_response['objects'][0]['description'], 'ruby')
def test_project_search_order_by_last_updated_sort_desc(self):
''' Search results from the project and org/project endpoints are returned
in descending order of last_updated, if requested
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby&sort_by=last_updated&sort_dir=desc')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(len(project_response["objects"]), 2)
self.assertEqual(project_response['objects'][0]['description'], 'ruby')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby&sort_by=last_updated&sort_dir=desc')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(len(org_project_response["objects"]), 2)
self.assertEqual(org_project_response['objects'][0]['description'], 'ruby')
def test_project_search_order_by_last_updated_sort_asc(self):
''' Search results from the project and org/project endpoints are returned
in ascending order of last_updated, if requested
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby&sort_by=last_updated&sort_dir=asc')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(len(project_response["objects"]), 2)
self.assertEqual(project_response['objects'][0]['description'], 'ruby ruby ruby ruby ruby')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby&sort_by=last_updated&sort_dir=asc')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(len(org_project_response["objects"]), 2)
self.assertEqual(org_project_response['objects'][0]['description'], 'ruby ruby ruby ruby ruby')
def test_project_search_ranked_order(self):
''' Search results from the project and org/project endpoints are returned
with correct ranking values
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, status=u'TEST', last_updated=datetime.now() - timedelta(10000))
ProjectFactory(organization_name=organization.name, description=u'testing a new thing', last_updated=datetime.now() - timedelta(1))
ProjectFactory(organization_name=organization.name, tags=[u'test,tags,what,ever'], last_updated=datetime.now() - timedelta(100))
ProjectFactory(organization_name=organization.name, last_updated=datetime.now())
db.session.commit()
project_response = self.app.get('/api/projects?q=TEST')
project_response = json.loads(project_response.data)
self.assertEqual(project_response['total'], 3)
self.assertEqual(project_response['objects'][0]['status'], u'TEST')
self.assertEqual(project_response['objects'][1]['tags'], [u'test,tags,what,ever'])
self.assertEqual(project_response['objects'][2]['description'], u'testing a new thing')
def test_project_return_only_ids(self):
''' Search results from the project and org/project endpoints are returned
as only IDs if requested
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
project_one = ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
project_two = ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_one_id = project_one.id
project_two_id = project_two.id
project_response = self.app.get('/api/projects?q=ruby&only_ids=true')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(len(project_response["objects"]), 2)
assert isinstance(project_response['objects'][0], int)
assert isinstance(project_response['objects'][1], int)
self.assertEqual(project_response['objects'][0], project_one_id)
self.assertEqual(project_response['objects'][1], project_two_id)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby&only_ids=true')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(len(org_project_response["objects"]), 2)
assert isinstance(org_project_response['objects'][0], int)
assert isinstance(org_project_response['objects'][1], int)
self.assertEqual(org_project_response['objects'][0], project_one_id)
self.assertEqual(org_project_response['objects'][1], project_two_id)
def test_project_search_empty_string(self):
''' Searching an empty string on the project and org/project endpoints returns all projects
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 2)
self.assertEqual(len(project_response['objects']), 2)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 2)
self.assertEqual(len(org_project_response['objects']), 2)
def test_project_search_tsv_body_not_in_response(self):
''' The tsv_body field is not in the response from the project and org/project endpoints
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=')
project_response = json.loads(project_response.data)
self.assertEqual(len(project_response['objects']), 2)
self.assertFalse('tsv_body' in project_response['objects'][0])
self.assertFalse('tsv_body' in project_response['objects'][1])
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=')
org_project_response = json.loads(org_project_response.data)
self.assertEqual(len(org_project_response['objects']), 2)
self.assertFalse('tsv_body' in org_project_response['objects'][0])
self.assertFalse('tsv_body' in org_project_response['objects'][1])
def test_project_orgs_dont_include_tsv(self):
OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=u"Code for San Francisco")
db.session.commit()
response = self.app.get('/api/projects')
response = json.loads(response.data)
self.assertFalse('tsv_body' in response['objects'][0]['organization'])
def test_project_search_includes_status(self):
''' The status field is included in search results from the project and org/project endpoints
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, status=u'Beta')
ProjectFactory(organization_name=organization.name, status=u'Alpha')
db.session.commit()
project_response = self.app.get('/api/projects?q=alpha')
project_response = json.loads(project_response.data)
self.assertEqual(len(project_response['objects']), 1)
self.assertEqual(project_response['objects'][0]['status'], 'Alpha')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=alpha')
org_project_response = json.loads(org_project_response.data)
self.assertEqual(len(org_project_response['objects']), 1)
self.assertEqual(org_project_response['objects'][0]['status'], 'Alpha')
def test_project_search_includes_name(self):
''' The name field is included in search results from the project and org/project endpoints
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, name=u'My Cool Project')
ProjectFactory(organization_name=organization.name, name=u'My Dumb Project')
db.session.commit()
project_response = self.app.get('/api/projects?q=cool')
project_response = json.loads(project_response.data)
self.assertEqual(len(project_response['objects']), 1)
self.assertEqual(project_response['objects'][0]['name'], 'My Cool Project')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=cool')
org_project_response = json.loads(org_project_response.data)
self.assertEqual(len(org_project_response['objects']), 1)
self.assertEqual(org_project_response['objects'][0]['name'], 'My Cool Project')
def test_project_search_includes_tags(self):
'''
The tags field is included in search results from the project and org/project endpoints
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, tags=['mapping', 'philly'])
ProjectFactory(organization_name=organization.name, tags=['food stamps', 'health'])
db.session.commit()
project_response = self.app.get('/api/projects?q=stamps')
project_response = json.loads(project_response.data)
self.assertEqual(len(project_response['objects']), 1)
self.assertEqual(project_response['objects'][0]['tags'], ['food stamps', 'health'])
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=stamps')
org_project_response = json.loads(org_project_response.data)
self.assertEqual(len(org_project_response['objects']), 1)
self.assertEqual(org_project_response['objects'][0]['tags'], ['food stamps', 'health'])
def test_project_search_includes_organization_name(self):
'''
The organization name is included in the project search
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, name=u"Project One")
ProjectFactory(organization_name=organization.name, name=u"Project Two", description=u"America")
organization = OrganizationFactory(name=u"Code for America")
ProjectFactory(organization_name=organization.name, name=u"Project Three")
ProjectFactory(organization_name=organization.name, name=u"Project Four", tags=u"San Francisco")
db.session.commit()
# Test that org_name matches return before project name
project_response = self.app.get('/api/projects?q=Code+for+San+Francisco')
project_response = json.loads(project_response.data)
self.assertEqual(len(project_response['objects']), 3)
self.assertEqual(project_response['objects'][0]['name'], u'Project One')
self.assertEqual(project_response['objects'][1]['name'], u'Project Two')
self.assertEqual(project_response['objects'][2]['name'], u'Project Four')
self.assertTrue('San Francisco' in project_response['objects'][2]['tags'])
# Test that org name matches return before project description
project_response = self.app.get('/api/projects?q=Code for America')
project_response = json.loads(project_response.data)
self.assertEqual(len(project_response['objects']), 3)
self.assertEqual(project_response['objects'][0]['name'], u'Project Three')
self.assertEqual(project_response['objects'][1]['name'], u'Project Four')
self.assertEqual(project_response['objects'][2]['name'], u'Project Two')
self.assertEqual(project_response['objects'][2]['description'], u'America')
def test_project_organzation_type_filter(self):
'''
Test searching for projects from certain types of organizations.
'''
brigade = OrganizationFactory(name=u'Brigade Org', type=u'Brigade, midwest')
code_for_all = OrganizationFactory(name=u'Code for All Org', type=u'Code for All')
gov_org = OrganizationFactory(name=u'Gov Org', type=u'Government')
brigade_project = ProjectFactory(name=u'Today Brigade project', organization_name=brigade.name)
code_for_all_project = ProjectFactory(name=u'Yesterday Code for All project', organization_name=code_for_all.name, last_updated=datetime.now() - timedelta(days=1))
gov_project = ProjectFactory(name=u'Two days ago Gov project', organization_name=gov_org.name, last_updated=datetime.now() - timedelta(days=2))
brigade_project2 = ProjectFactory(name=u'Three days ago Brigade project', organization_name=brigade.name, last_updated=datetime.now() - timedelta(days=3))
code_for_all_project2 = ProjectFactory(name=u'Four days ago Code for All project', organization_name=code_for_all.name, last_updated=datetime.now() - timedelta(days=4))
gov_project2 = ProjectFactory(name=u'Five days ago Gov project', organization_name=gov_org.name, last_updated=datetime.now() - timedelta(days=5))
db.session.add(brigade_project)
db.session.add(code_for_all_project)
db.session.add(gov_project)
db.session.add(brigade_project2)
db.session.add(code_for_all_project2)
db.session.add(gov_project2)
db.session.commit()
# Test they return in order of last_updated
response = self.app.get('/api/projects')
self.assertEqual(response.status_code, 200)
response = json.loads(response.data)
self.assertEqual(response['total'], 6)
self.assertEqual(response['objects'][0]['name'], 'Today Brigade project')
self.assertEqual(response['objects'][1]['name'], 'Yesterday Code for All project')
self.assertEqual(response['objects'][2]['name'], 'Two days ago Gov project')
self.assertEqual(response['objects'][3]['name'], 'Three days ago Brigade project')
self.assertEqual(response['objects'][4]['name'], 'Four days ago Code for All project')
self.assertEqual(response['objects'][5]['name'], 'Five days ago Gov project')
# Test they return in order of last_updated, no matter the search order
response = self.app.get('/api/projects?organization_type=Government,Code+for+All,Brigade')
self.assertEqual(response.status_code, 200)
response = json.loads(response.data)
self.assertEqual(response['total'], 6)
self.assertEqual(response['objects'][0]['name'], 'Today Brigade project')
self.assertEqual(response['objects'][1]['name'], 'Yesterday Code for All project')
self.assertEqual(response['objects'][2]['name'], 'Two days ago Gov project')
self.assertEqual(response['objects'][3]['name'], 'Three days ago Brigade project')
self.assertEqual(response['objects'][4]['name'], 'Four days ago Code for All project')
self.assertEqual(response['objects'][5]['name'], 'Five days ago Gov project')
response = self.app.get('/api/projects?organization_type=Brigade,Code+for+All')
self.assertEqual(response.status_code, 200)
response = json.loads(response.data)
self.assertEqual(response['total'], 4)
self.assertEqual(response['objects'][0]['name'], 'Today Brigade project')
self.assertEqual(response['objects'][1]['name'], 'Yesterday Code for All project')
self.assertEqual(response['objects'][2]['name'], 'Three days ago Brigade project')
self.assertEqual(response['objects'][3]['name'], 'Four days ago Code for All project')
# # Different order, same results
response = self.app.get('/api/projects?organization_type=Code+for+All,Brigade')
self.assertEqual(response.status_code, 200)
response = json.loads(response.data)
self.assertEqual(response['total'], 4)
self.assertEqual(response['objects'][0]['name'], 'Today Brigade project')
self.assertEqual(response['objects'][1]['name'], 'Yesterday Code for All project')
self.assertEqual(response['objects'][2]['name'], 'Three days ago Brigade project')
self.assertEqual(response['objects'][3]['name'], 'Four days ago Code for All project')
response = self.app.get('/api/projects?organization_type=Code+for+All,Government')
self.assertEqual(response.status_code, 200)
response = json.loads(response.data)
self.assertEqual(response['total'], 4)
self.assertEqual(response['objects'][0]['name'], 'Yesterday Code for All project')
self.assertEqual(response['objects'][1]['name'], 'Two days ago Gov project')
self.assertEqual(response['objects'][2]['name'], 'Four days ago Code for All project')
self.assertEqual(response['objects'][3]['name'], 'Five days ago Gov project')
# # Different order, same results
response = self.app.get('/api/projects?organization_type=Government,Code+for+All')
self.assertEqual(response.status_code, 200)
response = json.loads(response.data)
self.assertEqual(response['total'], 4)
self.assertEqual(response['objects'][0]['name'], 'Yesterday Code for All project')
self.assertEqual(response['objects'][1]['name'], 'Two days ago Gov project')
self.assertEqual(response['objects'][2]['name'], 'Four days ago Code for All project')
self.assertEqual(response['objects'][3]['name'], 'Five days ago Gov project')
def test_project_cascading_deletes(self):
''' Test that issues get deleted when their parent
project and org is deleted
'''
# set up test objects and delete a project
organization = OrganizationFactory(name=u'TEST ORG')
db.session.flush()
project = ProjectFactory(organization_name=organization.name, name=u'TEST PROJECT')
db.session.flush()
issue = IssueFactory(title=u'TEST ISSUE', project_id=project.id)
another_issue = IssueFactory(title=u'ANOTHER TEST ISSUE', project_id=project.id)
a_third_issue = IssueFactory(title=u'A THIRD TEST ISSUE', project_id=project.id)
db.session.commit()
# make sure the issues are in the db
issues = db.session.query(Issue).all()
self.assertTrue(len(issues) == 3)
db.session.execute('DELETE FROM project')
db.session.commit()
issues = db.session.query(Issue).all()
self.assertFalse(len(issues))
# delete an organization
project = ProjectFactory(organization_name=organization.name, name=u'TEST PROJECT')
db.session.flush()
issue = IssueFactory(title=u'TEST ISSUE', project_id=project.id)
another_issue = IssueFactory(title=u'ANOTHER TEST ISSUE', project_id=project.id)
a_third_issue = IssueFactory(title=u'A THIRD TEST ISSUE', project_id=project.id)
db.session.add(issue)
db.session.add(another_issue)
db.session.add(a_third_issue)
db.session.commit()
# make sure the issues are in the db
issues = db.session.query(Issue).all()
self.assertTrue(len(issues) == 3)
db.session.execute('DELETE FROM organization')
db.session.commit()
issues = db.session.query(Issue).all()
self.assertFalse(len(issues))
def test_include_issues(self):
""" Test the include_issues flag """
project = ProjectFactory()
db.session.commit()
IssueFactory(project_id=project.id)
db.session.commit()
got = self.app.get("/api/projects?include_issues=True")
project = json.loads(got.data)['objects'][0]
self.assertTrue(isinstance(project['issues'], list))
got = self.app.get("/api/projects?include_issues=False")
project = json.loads(got.data)['objects'][0]
self.assertFalse(isinstance(project['issues'], list))
self.assertEqual("http://localhost/api/projects/1/issues", project["issues"])
got = self.app.get("/api/projects")
project = json.loads(got.data)['objects'][0]
self.assertFalse(isinstance(project['issues'], list))
self.assertEqual("http://localhost/api/projects/1/issues", project["issues"])
|
[
"test.factories.ProjectFactory",
"json.loads",
"app.db.session.execute",
"test.factories.OrganizationFactory",
"datetime.timedelta",
"app.db.session.commit",
"app.db.session.flush",
"test.factories.IssueFactory",
"app.db.session.query",
"datetime.datetime.now",
"app.db.session.add"
] |
[((397, 476), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'name': 'u"""Project 1"""', 'last_updated': '"""Mon, 01 Jan 2010 00:00:00 GMT"""'}), "(name=u'Project 1', last_updated='Mon, 01 Jan 2010 00:00:00 GMT')\n", (411, 476), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((485, 564), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'name': 'u"""Project 2"""', 'last_updated': '"""Tue, 01 Jan 2011 00:00:00 GMT"""'}), "(name=u'Project 2', last_updated='Tue, 01 Jan 2011 00:00:00 GMT')\n", (499, 564), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((573, 683), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'name': 'u"""Non Github Project"""', 'last_updated': '"""Wed, 01 Jan 2013 00:00:00"""', 'github_details': 'None'}), "(name=u'Non Github Project', last_updated=\n 'Wed, 01 Jan 2013 00:00:00', github_details=None)\n", (587, 683), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((687, 766), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'name': 'u"""Project 3"""', 'last_updated': '"""Thu, 01 Jan 2014 00:00:00 GMT"""'}), "(name=u'Project 3', last_updated='Thu, 01 Jan 2014 00:00:00 GMT')\n", (701, 766), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((775, 794), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (792, 794), False, 'from app import db, Issue\n'), ((864, 889), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (874, 889), False, 'import json\n'), ((1222, 1238), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {}), '()\n', (1236, 1238), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((1247, 1266), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1264, 1266), False, 'from app import db, Issue\n'), ((1336, 1361), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (1346, 1361), False, 'import json\n'), ((2744, 2795), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (2763, 2795), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((2804, 2877), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'description': 'u"""Coder"""'}), "(organization_name=organization.name, description=u'Coder')\n", (2818, 2877), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((2886, 2905), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2903, 2905), False, 'from app import db, Issue\n'), ((2997, 3030), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (3007, 3030), False, 'import json\n'), ((3404, 3441), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (3414, 3441), False, 'import json\n'), ((3897, 3948), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (3916, 3948), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((3957, 4029), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'description': 'u"""ruby"""'}), "(organization_name=organization.name, description=u'ruby')\n", (3971, 4029), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((4038, 4112), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'description': 'u"""python"""'}), "(organization_name=organization.name, description=u'python')\n", (4052, 4112), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((4121, 4140), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4138, 4140), False, 'from app import db, Issue\n'), ((4232, 4265), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (4242, 4265), False, 'import json\n'), ((4639, 4676), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (4649, 4676), False, 'import json\n'), ((5130, 5181), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (5149, 5181), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((5190, 5282), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'description': 'u"""What\'s My \'District"""'}), '(organization_name=organization.name, description=\n u"What\'s My \'District")\n', (5204, 5282), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((5288, 5379), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'description': 'u"""Cöde%%for%%Ameriça"""'}), "(organization_name=organization.name, description=\n u'Cöde%%for%%Ameriça')\n", (5302, 5379), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((5383, 5402), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (5400, 5402), False, 'from app import db, Issue\n'), ((5511, 5544), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (5521, 5544), False, 'import json\n'), ((5923, 5960), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (5933, 5960), False, 'import json\n'), ((6309, 6342), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (6319, 6342), False, 'import json\n'), ((6717, 6754), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (6727, 6754), False, 'import json\n'), ((7217, 7268), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (7236, 7268), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((7277, 7363), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'description': 'u"""ruby on rails"""'}), "(organization_name=organization.name, description=\n u'ruby on rails')\n", (7291, 7363), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((7367, 7446), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'description': 'u"""i love lamp"""'}), "(organization_name=organization.name, description=u'i love lamp')\n", (7381, 7446), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((7455, 7474), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (7472, 7474), False, 'from app import db, Issue\n'), ((7575, 7608), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (7585, 7608), False, 'import json\n'), ((7991, 8028), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (8001, 8028), False, 'import json\n'), ((8497, 8548), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (8516, 8548), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((8557, 8643), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'description': 'u"""ruby on rails"""'}), "(organization_name=organization.name, description=\n u'ruby on rails')\n", (8571, 8643), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((8647, 8726), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'description': 'u"""i love lamp"""'}), "(organization_name=organization.name, description=u'i love lamp')\n", (8661, 8726), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((8735, 8754), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (8752, 8754), False, 'from app import db, Issue\n'), ((8846, 8879), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (8856, 8879), False, 'import json\n'), ((9253, 9290), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (9263, 9290), False, 'import json\n'), ((9777, 9828), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (9796, 9828), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((9837, 9923), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'description': 'u"""ruby on rails"""'}), "(organization_name=organization.name, description=\n u'ruby on rails')\n", (9851, 9923), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((9927, 9946), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (9944, 9946), False, 'from app import db, Issue\n'), ((10040, 10073), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (10050, 10073), False, 'import json\n'), ((10449, 10486), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (10459, 10486), False, 'import json\n'), ((10946, 10997), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (10965, 10997), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((11277, 11296), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (11294, 11296), False, 'from app import db, Issue\n'), ((11388, 11421), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (11398, 11421), False, 'import json\n'), ((11840, 11877), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (11850, 11877), False, 'import json\n'), ((12418, 12469), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (12437, 12469), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((12749, 12768), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (12766, 12768), False, 'from app import db, Issue\n'), ((12878, 12911), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (12888, 12911), False, 'import json\n'), ((13348, 13385), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (13358, 13385), False, 'import json\n'), ((13910, 13961), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (13929, 13961), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((14241, 14260), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (14258, 14260), False, 'from app import db, Issue\n'), ((14373, 14406), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (14383, 14406), False, 'import json\n'), ((14826, 14863), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (14836, 14863), False, 'import json\n'), ((15389, 15440), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (15408, 15440), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((15720, 15739), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (15737, 15739), False, 'from app import db, Issue\n'), ((15866, 15899), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (15876, 15899), False, 'import json\n'), ((16333, 16370), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (16343, 16370), False, 'import json\n'), ((16894, 16945), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (16913, 16945), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((17225, 17244), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (17242, 17244), False, 'from app import db, Issue\n'), ((17370, 17403), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (17380, 17403), False, 'import json\n'), ((17856, 17893), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (17866, 17893), False, 'import json\n'), ((18398, 18449), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (18417, 18449), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((18948, 18967), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (18965, 18967), False, 'from app import db, Issue\n'), ((19059, 19092), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (19069, 19092), False, 'import json\n'), ((19611, 19662), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (19630, 19662), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((19970, 19989), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (19987, 19989), False, 'from app import db, Issue\n'), ((20176, 20209), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (20186, 20209), False, 'import json\n'), ((20814, 20851), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (20824, 20851), False, 'import json\n'), ((21517, 21568), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (21536, 21568), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((21848, 21867), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (21865, 21867), False, 'from app import db, Issue\n'), ((21955, 21988), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (21965, 21988), False, 'import json\n'), ((22358, 22395), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (22368, 22395), False, 'import json\n'), ((22841, 22892), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (22860, 22892), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((23172, 23191), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (23189, 23191), False, 'from app import db, Issue\n'), ((23279, 23312), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (23289, 23312), False, 'import json\n'), ((23650, 23687), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (23660, 23687), False, 'import json\n'), ((23963, 24014), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (23982, 24014), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((24023, 24082), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'u"""Code for San Francisco"""'}), "(organization_name=u'Code for San Francisco')\n", (24037, 24082), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((24091, 24110), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (24108, 24110), False, 'from app import db, Issue\n'), ((24179, 24204), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (24189, 24204), False, 'import json\n'), ((24473, 24524), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (24492, 24524), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((24533, 24600), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'status': 'u"""Beta"""'}), "(organization_name=organization.name, status=u'Beta')\n", (24547, 24600), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((24609, 24677), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'status': 'u"""Alpha"""'}), "(organization_name=organization.name, status=u'Alpha')\n", (24623, 24677), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((24686, 24705), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (24703, 24705), False, 'from app import db, Issue\n'), ((24798, 24831), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (24808, 24831), False, 'import json\n'), ((25108, 25145), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (25118, 25145), False, 'import json\n'), ((25477, 25528), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (25496, 25528), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((25537, 25613), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'name': 'u"""My Cool Project"""'}), "(organization_name=organization.name, name=u'My Cool Project')\n", (25551, 25613), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((25622, 25698), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'name': 'u"""My Dumb Project"""'}), "(organization_name=organization.name, name=u'My Dumb Project')\n", (25636, 25698), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((25707, 25726), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (25724, 25726), False, 'from app import db, Issue\n'), ((25818, 25851), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (25828, 25851), False, 'import json\n'), ((26135, 26172), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (26145, 26172), False, 'import json\n'), ((26520, 26571), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (26539, 26571), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((26580, 26659), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'tags': "['mapping', 'philly']"}), "(organization_name=organization.name, tags=['mapping', 'philly'])\n", (26594, 26659), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((26668, 26755), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'tags': "['food stamps', 'health']"}), "(organization_name=organization.name, tags=['food stamps',\n 'health'])\n", (26682, 26755), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((26760, 26779), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (26777, 26779), False, 'from app import db, Issue\n'), ((26873, 26906), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (26883, 26906), False, 'import json\n'), ((27200, 27237), 'json.loads', 'json.loads', (['org_project_response.data'], {}), '(org_project_response.data)\n', (27210, 27237), False, 'import json\n'), ((27574, 27625), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for San Francisco"""'}), "(name=u'Code for San Francisco')\n", (27593, 27625), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((27634, 27706), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'name': 'u"""Project One"""'}), "(organization_name=organization.name, name=u'Project One')\n", (27648, 27706), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((27715, 27815), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'name': 'u"""Project Two"""', 'description': 'u"""America"""'}), "(organization_name=organization.name, name=u'Project Two',\n description=u'America')\n", (27729, 27815), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((27836, 27881), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for America"""'}), "(name=u'Code for America')\n", (27855, 27881), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((27890, 27964), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'name': 'u"""Project Three"""'}), "(organization_name=organization.name, name=u'Project Three')\n", (27904, 27964), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((27973, 28073), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'name': 'u"""Project Four"""', 'tags': 'u"""San Francisco"""'}), "(organization_name=organization.name, name=u'Project Four',\n tags=u'San Francisco')\n", (27987, 28073), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((28078, 28097), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (28095, 28097), False, 'from app import db, Issue\n'), ((28272, 28305), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (28282, 28305), False, 'import json\n'), ((28870, 28903), 'json.loads', 'json.loads', (['project_response.data'], {}), '(project_response.data)\n', (28880, 28903), False, 'import json\n'), ((29464, 29530), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Brigade Org"""', 'type': 'u"""Brigade, midwest"""'}), "(name=u'Brigade Org', type=u'Brigade, midwest')\n", (29483, 29530), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((29554, 29621), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Code for All Org"""', 'type': 'u"""Code for All"""'}), "(name=u'Code for All Org', type=u'Code for All')\n", (29573, 29621), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((29640, 29696), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""Gov Org"""', 'type': 'u"""Government"""'}), "(name=u'Gov Org', type=u'Government')\n", (29659, 29696), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((29724, 29801), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'name': 'u"""Today Brigade project"""', 'organization_name': 'brigade.name'}), "(name=u'Today Brigade project', organization_name=brigade.name)\n", (29738, 29801), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((30629, 30660), 'app.db.session.add', 'db.session.add', (['brigade_project'], {}), '(brigade_project)\n', (30643, 30660), False, 'from app import db, Issue\n'), ((30669, 30705), 'app.db.session.add', 'db.session.add', (['code_for_all_project'], {}), '(code_for_all_project)\n', (30683, 30705), False, 'from app import db, Issue\n'), ((30714, 30741), 'app.db.session.add', 'db.session.add', (['gov_project'], {}), '(gov_project)\n', (30728, 30741), False, 'from app import db, Issue\n'), ((30750, 30782), 'app.db.session.add', 'db.session.add', (['brigade_project2'], {}), '(brigade_project2)\n', (30764, 30782), False, 'from app import db, Issue\n'), ((30791, 30828), 'app.db.session.add', 'db.session.add', (['code_for_all_project2'], {}), '(code_for_all_project2)\n', (30805, 30828), False, 'from app import db, Issue\n'), ((30837, 30865), 'app.db.session.add', 'db.session.add', (['gov_project2'], {}), '(gov_project2)\n', (30851, 30865), False, 'from app import db, Issue\n'), ((30874, 30893), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (30891, 30893), False, 'from app import db, Issue\n'), ((31067, 31092), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (31077, 31092), False, 'import json\n'), ((31921, 31946), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (31931, 31946), False, 'import json\n'), ((32684, 32709), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (32694, 32709), False, 'import json\n'), ((33318, 33343), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (33328, 33343), False, 'import json\n'), ((33913, 33938), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (33923, 33938), False, 'import json\n'), ((34548, 34573), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (34558, 34573), False, 'import json\n'), ((35211, 35248), 'test.factories.OrganizationFactory', 'OrganizationFactory', ([], {'name': 'u"""TEST ORG"""'}), "(name=u'TEST ORG')\n", (35230, 35248), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((35257, 35275), 'app.db.session.flush', 'db.session.flush', ([], {}), '()\n', (35273, 35275), False, 'from app import db, Issue\n'), ((35295, 35368), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'name': 'u"""TEST PROJECT"""'}), "(organization_name=organization.name, name=u'TEST PROJECT')\n", (35309, 35368), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((35377, 35395), 'app.db.session.flush', 'db.session.flush', ([], {}), '()\n', (35393, 35395), False, 'from app import db, Issue\n'), ((35413, 35469), 'test.factories.IssueFactory', 'IssueFactory', ([], {'title': 'u"""TEST ISSUE"""', 'project_id': 'project.id'}), "(title=u'TEST ISSUE', project_id=project.id)\n", (35425, 35469), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((35494, 35558), 'test.factories.IssueFactory', 'IssueFactory', ([], {'title': 'u"""ANOTHER TEST ISSUE"""', 'project_id': 'project.id'}), "(title=u'ANOTHER TEST ISSUE', project_id=project.id)\n", (35506, 35558), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((35583, 35647), 'test.factories.IssueFactory', 'IssueFactory', ([], {'title': 'u"""A THIRD TEST ISSUE"""', 'project_id': 'project.id'}), "(title=u'A THIRD TEST ISSUE', project_id=project.id)\n", (35595, 35647), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((35656, 35675), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (35673, 35675), False, 'from app import db, Issue\n'), ((35820, 35861), 'app.db.session.execute', 'db.session.execute', (['"""DELETE FROM project"""'], {}), "('DELETE FROM project')\n", (35838, 35861), False, 'from app import db, Issue\n'), ((35870, 35889), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (35887, 35889), False, 'from app import db, Issue\n'), ((36027, 36100), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {'organization_name': 'organization.name', 'name': 'u"""TEST PROJECT"""'}), "(organization_name=organization.name, name=u'TEST PROJECT')\n", (36041, 36100), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((36109, 36127), 'app.db.session.flush', 'db.session.flush', ([], {}), '()\n', (36125, 36127), False, 'from app import db, Issue\n'), ((36145, 36201), 'test.factories.IssueFactory', 'IssueFactory', ([], {'title': 'u"""TEST ISSUE"""', 'project_id': 'project.id'}), "(title=u'TEST ISSUE', project_id=project.id)\n", (36157, 36201), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((36226, 36290), 'test.factories.IssueFactory', 'IssueFactory', ([], {'title': 'u"""ANOTHER TEST ISSUE"""', 'project_id': 'project.id'}), "(title=u'ANOTHER TEST ISSUE', project_id=project.id)\n", (36238, 36290), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((36315, 36379), 'test.factories.IssueFactory', 'IssueFactory', ([], {'title': 'u"""A THIRD TEST ISSUE"""', 'project_id': 'project.id'}), "(title=u'A THIRD TEST ISSUE', project_id=project.id)\n", (36327, 36379), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((36388, 36409), 'app.db.session.add', 'db.session.add', (['issue'], {}), '(issue)\n', (36402, 36409), False, 'from app import db, Issue\n'), ((36418, 36447), 'app.db.session.add', 'db.session.add', (['another_issue'], {}), '(another_issue)\n', (36432, 36447), False, 'from app import db, Issue\n'), ((36456, 36485), 'app.db.session.add', 'db.session.add', (['a_third_issue'], {}), '(a_third_issue)\n', (36470, 36485), False, 'from app import db, Issue\n'), ((36494, 36513), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (36511, 36513), False, 'from app import db, Issue\n'), ((36658, 36704), 'app.db.session.execute', 'db.session.execute', (['"""DELETE FROM organization"""'], {}), "('DELETE FROM organization')\n", (36676, 36704), False, 'from app import db, Issue\n'), ((36713, 36732), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (36730, 36732), False, 'from app import db, Issue\n'), ((36917, 36933), 'test.factories.ProjectFactory', 'ProjectFactory', ([], {}), '()\n', (36931, 36933), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((36942, 36961), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (36959, 36961), False, 'from app import db, Issue\n'), ((36970, 37005), 'test.factories.IssueFactory', 'IssueFactory', ([], {'project_id': 'project.id'}), '(project_id=project.id)\n', (36982, 37005), False, 'from test.factories import ProjectFactory, OrganizationFactory, IssueFactory\n'), ((37014, 37033), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (37031, 37033), False, 'from app import db, Issue\n'), ((18924, 18938), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (18936, 18938), False, 'from datetime import datetime, timedelta\n'), ((35739, 35762), 'app.db.session.query', 'db.session.query', (['Issue'], {}), '(Issue)\n', (35755, 35762), False, 'from app import db, Issue\n'), ((35907, 35930), 'app.db.session.query', 'db.session.query', (['Issue'], {}), '(Issue)\n', (35923, 35930), False, 'from app import db, Issue\n'), ((36577, 36600), 'app.db.session.query', 'db.session.query', (['Issue'], {}), '(Issue)\n', (36593, 36600), False, 'from app import db, Issue\n'), ((36750, 36773), 'app.db.session.query', 'db.session.query', (['Issue'], {}), '(Issue)\n', (36766, 36773), False, 'from app import db, Issue\n'), ((37117, 37137), 'json.loads', 'json.loads', (['got.data'], {}), '(got.data)\n', (37127, 37137), False, 'import json\n'), ((37296, 37316), 'json.loads', 'json.loads', (['got.data'], {}), '(got.data)\n', (37306, 37316), False, 'import json\n'), ((37541, 37561), 'json.loads', 'json.loads', (['got.data'], {}), '(got.data)\n', (37551, 37561), False, 'import json\n'), ((11112, 11126), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (11124, 11126), False, 'from datetime import datetime, timedelta\n'), ((11129, 11142), 'datetime.timedelta', 'timedelta', (['(10)'], {}), '(10)\n', (11138, 11142), False, 'from datetime import datetime, timedelta\n'), ((11238, 11252), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (11250, 11252), False, 'from datetime import datetime, timedelta\n'), ((11255, 11267), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (11264, 11267), False, 'from datetime import datetime, timedelta\n'), ((12584, 12598), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (12596, 12598), False, 'from datetime import datetime, timedelta\n'), ((12601, 12614), 'datetime.timedelta', 'timedelta', (['(10)'], {}), '(10)\n', (12610, 12614), False, 'from datetime import datetime, timedelta\n'), ((12710, 12724), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (12722, 12724), False, 'from datetime import datetime, timedelta\n'), ((12727, 12739), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (12736, 12739), False, 'from datetime import datetime, timedelta\n'), ((14076, 14090), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (14088, 14090), False, 'from datetime import datetime, timedelta\n'), ((14093, 14106), 'datetime.timedelta', 'timedelta', (['(10)'], {}), '(10)\n', (14102, 14106), False, 'from datetime import datetime, timedelta\n'), ((14202, 14216), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (14214, 14216), False, 'from datetime import datetime, timedelta\n'), ((14219, 14231), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (14228, 14231), False, 'from datetime import datetime, timedelta\n'), ((15555, 15569), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (15567, 15569), False, 'from datetime import datetime, timedelta\n'), ((15572, 15585), 'datetime.timedelta', 'timedelta', (['(10)'], {}), '(10)\n', (15581, 15585), False, 'from datetime import datetime, timedelta\n'), ((15681, 15695), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (15693, 15695), False, 'from datetime import datetime, timedelta\n'), ((15698, 15710), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (15707, 15710), False, 'from datetime import datetime, timedelta\n'), ((17060, 17074), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (17072, 17074), False, 'from datetime import datetime, timedelta\n'), ((17077, 17090), 'datetime.timedelta', 'timedelta', (['(10)'], {}), '(10)\n', (17086, 17090), False, 'from datetime import datetime, timedelta\n'), ((17186, 17200), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (17198, 17200), False, 'from datetime import datetime, timedelta\n'), ((17203, 17215), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (17212, 17215), False, 'from datetime import datetime, timedelta\n'), ((18539, 18553), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (18551, 18553), False, 'from datetime import datetime, timedelta\n'), ((18556, 18572), 'datetime.timedelta', 'timedelta', (['(10000)'], {}), '(10000)\n', (18565, 18572), False, 'from datetime import datetime, timedelta\n'), ((18683, 18697), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (18695, 18697), False, 'from datetime import datetime, timedelta\n'), ((18700, 18712), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (18709, 18712), False, 'from datetime import datetime, timedelta\n'), ((18818, 18832), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (18830, 18832), False, 'from datetime import datetime, timedelta\n'), ((18835, 18849), 'datetime.timedelta', 'timedelta', (['(100)'], {}), '(100)\n', (18844, 18849), False, 'from datetime import datetime, timedelta\n'), ((19791, 19805), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (19803, 19805), False, 'from datetime import datetime, timedelta\n'), ((19808, 19821), 'datetime.timedelta', 'timedelta', (['(10)'], {}), '(10)\n', (19817, 19821), False, 'from datetime import datetime, timedelta\n'), ((19931, 19945), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (19943, 19945), False, 'from datetime import datetime, timedelta\n'), ((19948, 19960), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (19957, 19960), False, 'from datetime import datetime, timedelta\n'), ((21683, 21697), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (21695, 21697), False, 'from datetime import datetime, timedelta\n'), ((21700, 21713), 'datetime.timedelta', 'timedelta', (['(10)'], {}), '(10)\n', (21709, 21713), False, 'from datetime import datetime, timedelta\n'), ((21809, 21823), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (21821, 21823), False, 'from datetime import datetime, timedelta\n'), ((21826, 21838), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (21835, 21838), False, 'from datetime import datetime, timedelta\n'), ((23007, 23021), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (23019, 23021), False, 'from datetime import datetime, timedelta\n'), ((23024, 23037), 'datetime.timedelta', 'timedelta', (['(10)'], {}), '(10)\n', (23033, 23037), False, 'from datetime import datetime, timedelta\n'), ((23133, 23147), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (23145, 23147), False, 'from datetime import datetime, timedelta\n'), ((23150, 23162), 'datetime.timedelta', 'timedelta', (['(1)'], {}), '(1)\n', (23159, 23162), False, 'from datetime import datetime, timedelta\n'), ((29938, 29952), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (29950, 29952), False, 'from datetime import datetime, timedelta\n'), ((29955, 29972), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (29964, 29972), False, 'from datetime import datetime, timedelta\n'), ((30090, 30104), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (30102, 30104), False, 'from datetime import datetime, timedelta\n'), ((30107, 30124), 'datetime.timedelta', 'timedelta', ([], {'days': '(2)'}), '(days=2)\n', (30116, 30124), False, 'from datetime import datetime, timedelta\n'), ((30253, 30267), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (30265, 30267), False, 'from datetime import datetime, timedelta\n'), ((30270, 30287), 'datetime.timedelta', 'timedelta', ([], {'days': '(3)'}), '(days=3)\n', (30279, 30287), False, 'from datetime import datetime, timedelta\n'), ((30430, 30444), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (30442, 30444), False, 'from datetime import datetime, timedelta\n'), ((30447, 30464), 'datetime.timedelta', 'timedelta', ([], {'days': '(4)'}), '(days=4)\n', (30456, 30464), False, 'from datetime import datetime, timedelta\n'), ((30584, 30598), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (30596, 30598), False, 'from datetime import datetime, timedelta\n'), ((30601, 30618), 'datetime.timedelta', 'timedelta', ([], {'days': '(5)'}), '(days=5)\n', (30610, 30618), False, 'from datetime import datetime, timedelta\n')]
|
import pytest
def test_get_games_dataset_subset():
from relevanceai.utils.datasets import get_games_dataset
assert len(get_games_dataset(number_of_documents=100)) == 100
|
[
"relevanceai.utils.datasets.get_games_dataset"
] |
[((130, 172), 'relevanceai.utils.datasets.get_games_dataset', 'get_games_dataset', ([], {'number_of_documents': '(100)'}), '(number_of_documents=100)\n', (147, 172), False, 'from relevanceai.utils.datasets import get_games_dataset\n')]
|
import requests
from datetime import datetime, timedelta
import jwt
import base64
secret = ''
# The secret provided by ROK is b64 encoded, we need to decode it for jwt
notb64_secret = base64.b64decode(secret)
# Here we provide the registered claims and the claims we agreed on with ROK
data = {
'aud': 'Rok-solution',
'iss': '',
'exp': str(int((datetime.now() + timedelta(minutes=10)).timestamp())),
'email': '',
'iat': str(int(datetime.now().timestamp())),
'nbf': str(int(datetime.now().timestamp())),
}
token = jwt.encode(data, notb64_secret, algorithm='HS256')
response = requests.post('https://demo.rok-solution.com/graphql',
data='{\'query\':\'\'}',
headers={'DatabaseName': '', 'JwtString': token,
'Accept': 'application/json', 'Content-Type': 'application/json'}).text
print(response)
|
[
"jwt.encode",
"base64.b64decode",
"datetime.timedelta",
"requests.post",
"datetime.datetime.now"
] |
[((185, 209), 'base64.b64decode', 'base64.b64decode', (['secret'], {}), '(secret)\n', (201, 209), False, 'import base64\n'), ((540, 590), 'jwt.encode', 'jwt.encode', (['data', 'notb64_secret'], {'algorithm': '"""HS256"""'}), "(data, notb64_secret, algorithm='HS256')\n", (550, 590), False, 'import jwt\n'), ((602, 801), 'requests.post', 'requests.post', (['"""https://demo.rok-solution.com/graphql"""'], {'data': '"""{\'query\':\'\'}"""', 'headers': "{'DatabaseName': '', 'JwtString': token, 'Accept': 'application/json',\n 'Content-Type': 'application/json'}"}), '(\'https://demo.rok-solution.com/graphql\', data="{\'query\':\'\'}",\n headers={\'DatabaseName\': \'\', \'JwtString\': token, \'Accept\':\n \'application/json\', \'Content-Type\': \'application/json\'})\n', (615, 801), False, 'import requests\n'), ((450, 464), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (462, 464), False, 'from datetime import datetime, timedelta\n'), ((499, 513), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (511, 513), False, 'from datetime import datetime, timedelta\n'), ((359, 373), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (371, 373), False, 'from datetime import datetime, timedelta\n'), ((376, 397), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(10)'}), '(minutes=10)\n', (385, 397), False, 'from datetime import datetime, timedelta\n')]
|
import sys
import os.path as op
import pyxnat
from . import skip_if_no_network
_modulepath = op.dirname(op.abspath(pyxnat.__file__))
dd = op.join(op.split(_modulepath)[0], 'bin')
sys.path.append(dd)
@skip_if_no_network
def test_001_sessionmirror():
from sessionmirror import create_parser, main
parser = create_parser()
cfg = op.join(op.dirname(op.abspath(__file__)), 'central.cfg')
central = pyxnat.Interface(config=cfg)
e = 'CENTRAL_E74609'
args = ['--h1', cfg, '--h2', cfg, '-e', e, '-p', 'nosetests3']
args = parser.parse_args(args)
main(args)
e1 = central.array.experiments(experiment_id=e,
columns=['subject_label']).data[0]
@skip_if_no_network
def test_002_deletesubject():
print('DELETING')
cfg = op.join(op.dirname(op.abspath(__file__)), 'central.cfg')
central = pyxnat.Interface(config=cfg)
e = 'CENTRAL_E74609'
e0 = central.array.experiments(experiment_id=e,
columns=['subject_label', 'label']).data[0]
subject_label = e0['subject_label']
experiment_label = e0['label']
e1 = central.array.experiments(project_id='nosetests3',
subject_label=subject_label,
experiment_label=experiment_label,
columns=['subject_id']).data[0]
e2 = central.select.project('nosetests3').subject(e1['subject_ID']).experiment(e1['ID'])
assert(e2.exists())
e2.delete()
assert(not e2.exists())
|
[
"sys.path.append",
"sessionmirror.create_parser",
"os.path.abspath",
"pyxnat.Interface",
"sessionmirror.main",
"os.path.split"
] |
[((181, 200), 'sys.path.append', 'sys.path.append', (['dd'], {}), '(dd)\n', (196, 200), False, 'import sys\n'), ((105, 132), 'os.path.abspath', 'op.abspath', (['pyxnat.__file__'], {}), '(pyxnat.__file__)\n', (115, 132), True, 'import os.path as op\n'), ((315, 330), 'sessionmirror.create_parser', 'create_parser', ([], {}), '()\n', (328, 330), False, 'from sessionmirror import create_parser, main\n'), ((412, 440), 'pyxnat.Interface', 'pyxnat.Interface', ([], {'config': 'cfg'}), '(config=cfg)\n', (428, 440), False, 'import pyxnat\n'), ((572, 582), 'sessionmirror.main', 'main', (['args'], {}), '(args)\n', (576, 582), False, 'from sessionmirror import create_parser, main\n'), ((858, 886), 'pyxnat.Interface', 'pyxnat.Interface', ([], {'config': 'cfg'}), '(config=cfg)\n', (874, 886), False, 'import pyxnat\n'), ((148, 169), 'os.path.split', 'op.split', (['_modulepath'], {}), '(_modulepath)\n', (156, 169), True, 'import os.path as op\n'), ((360, 380), 'os.path.abspath', 'op.abspath', (['__file__'], {}), '(__file__)\n', (370, 380), True, 'import os.path as op\n'), ((806, 826), 'os.path.abspath', 'op.abspath', (['__file__'], {}), '(__file__)\n', (816, 826), True, 'import os.path as op\n')]
|
# Copyright (c) 2008 Simplistix Ltd
# See license.txt for license details.
from logging import Handler,ERROR,getLogger
class ErrorHandler(Handler):
fired = False
def __init__(self,level=ERROR,logger='',install=True):
Handler.__init__(self)
self.level=level
self.logger=logger
if install:
self.install()
def install(self):
self.setLevel(self.level)
getLogger(self.logger).addHandler(self)
def emit(self, record):
self.fired=True
def reset(self):
self.fired=False
def remove(self):
getLogger().removeHandler(self)
|
[
"logging.Handler.__init__",
"logging.getLogger"
] |
[((241, 263), 'logging.Handler.__init__', 'Handler.__init__', (['self'], {}), '(self)\n', (257, 263), False, 'from logging import Handler, ERROR, getLogger\n'), ((429, 451), 'logging.getLogger', 'getLogger', (['self.logger'], {}), '(self.logger)\n', (438, 451), False, 'from logging import Handler, ERROR, getLogger\n'), ((608, 619), 'logging.getLogger', 'getLogger', ([], {}), '()\n', (617, 619), False, 'from logging import Handler, ERROR, getLogger\n')]
|
import os
from django.core.exceptions import ValidationError
from django.shortcuts import get_object_or_404
from django.http import JsonResponse, HttpResponse
from django.views import View
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.db import transaction
from django.conf import settings
from .common import get_file_hash, remove_saved_files_and_empty_dirs, unmarkdown, trans_markdown_to_html_and_bleach, bleach_clean
from .forms import PostFormExceptFiles
from .validators import FileValidator
from accounts.models import Account
from accounts.serializers import AccountSerializerInPost
from .models import Post, PostFile
from .serializers import PostDetailSerializer, PostFileDetailSerializerForNonAnonymousUser
from board.models import Board
from board.serializers import BoardCategorySerializer
class PostCreateView(View):
@method_decorator(login_required, name="dispatch")
def post(self, request):
success_message = "글 생성에 성공했습니다."
error_messages = {
'wrong_request' : "잘못된 요청입니다. 다시 시도해주세요.",
'not_permitted' : "글 생성 권한이 없습니다. 승인된 회원 계정으로 로그인 후 다시 시도해주세요.",
'file_max_size_over' : f"첨부한 파일(들)의 총 용량이 글 하나당 저장 가능한 최대 용량({settings.MAX_FILE_UPLOAD_SIZE_TO_UNIT_NOTATION})을 넘어갑니다.",
'invalid_board_id' : "입력한 보드가 존재하지 않습니다. 확인 후 다시 시도해주세요.",
'not_exist_account' : "작성자의 회원 정보가 존재하지 않습니다. 관리자에게 연락해주세요.",
'fail_create_post' : "글 생성에 실패했습니다. 확인 후 다시 시도해주세요."
}
data = request.POST
if data is None:
return JsonResponse({"message": error_messages['wrong_request']}, status=400)
if not request.user.is_active:
return JsonResponse({"message":error_messages['not_permitted']}, status=403)
form = PostFormExceptFiles(data)
if not form.is_valid():
return JsonResponse({"message":form.errors}, status=400)
board = Board.objects.filter(id=form.cleaned_data['board_id'])
if not board.exists():
return JsonResponse({"message":error_messages['invalid_board_id']}, status=404)
account = Account.objects.filter(user_id=request.user.id)
if not account.exists():
return JsonResponse({"message":error_messages['not_exist_account']}, status=404)
total_file_size = 0
for file in request.FILES.getlist('files'):
total_file_size += file.size
fv = FileValidator(allowed_extensions=settings.ALLOWED_FILE_EXTENTIONS)
try:
if total_file_size > settings.MAX_FILE_UPLOAD_SIZE:
raise ValidationError(message=error_messages['file_max_size_over'])
fv(file)
except ValidationError as e:
return JsonResponse({"message": e.message}, status=400)
md_content = form.cleaned_data['md_content']
html_text = trans_markdown_to_html_and_bleach(md_content)
plain_text = unmarkdown(md_content)
background_image = settings.DEFAULT_IMAGE_RELATIVE_PATH if form.cleaned_data['background_image_url'] is None else form.cleaned_data['background_image_url']
savedFilePaths = []
try:
with transaction.atomic():
post = Post.objects.create(
title = bleach_clean(form.cleaned_data['title']),
content = html_text,
md_content = md_content,
plain_content = plain_text,
preview_content = plain_text[:128],
background_image_url = background_image,
board_id = board[0],
author_id = account[0],
hits = 0
)
savedFilePaths.append(post.background_image_real_relative_path)
for f in request.FILES.getlist('files'):
ff = f.open()
file_hash = get_file_hash(ff)
file = PostFile.objects.create(
post_id = post,
title = f.name,
file = f,
hash = file_hash
)
ff.close()
savedFilePaths.append(file.file.url[1:])
for tag in form.cleaned_data['tags'].split(','):
post.tags.add(tag.strip())
except:
remove_saved_files_and_empty_dirs(savedFilePaths)
return JsonResponse({"message":error_messages['fail_create_post']}, status=406)
return JsonResponse({"message":success_message}, status=200)
def get(self, request):
post = Post.objects.values()
# post_tag = ', '.join(o.name for o in Post.tags.all())
# print(post_tag)
postfile = PostFile.objects.values()
return JsonResponse({"list": list(post), "files": list(postfile)}, status=200)
class PostDetailView(View):
@method_decorator(login_required, name="dispatch")
def post(self, request, post_id):
success_message = "%(before_post_title)s 글을 수정하는데 성공했습니다"
error_messages = {
'wrong_request' : "잘못된 요청입니다. 다시 시도해주세요.",
'not_exist_post' : "수정하려는 글이 존재하지 않습니다. 확인 후 다시 시도해주세요.",
'not_permitted' : "해당 글 수정 권한이 없습니다. 작성자나 관리자 계정으로 로그인 후 다시 요청해주세요.",
'file_max_size_over' : f"첨부한 파일(들)의 총 용량이 글 하나당 저장 가능한 최대 용량({settings.MAX_FILE_UPLOAD_SIZE_TO_UNIT_NOTATION})을 넘어갑니다.",
'invalid_board_id' : "입력한 보드가 존재하지 않습니다. 확인 후 다시 시도해주세요.",
'fail_update_post' : "'%(before_post_title)s' 글을 수정하는데 실패했습니다. 확인 후 다시 시도해주세요."
}
data = request.POST
if data is None:
return JsonResponse({"message":error_messages['wrong_request']}, status=400)
post = Post.objects.filter(id=post_id)
if not post.exists():
return JsonResponse({"message":error_messages['not_exist_post']}, status=404)
post = post[0]
if post.author_id.id != request.user.id and not request.user.is_superuser:
return JsonResponse({"message":error_messages['not_permitted']}, status=401)
form = PostFormExceptFiles(data)
if not form.is_valid():
return JsonResponse({"message":form.errors}, status=400)
board = Board.objects.filter(id=form.cleaned_data['board_id'])
if not board.exists():
return JsonResponse({"message":error_messages['invalid_board_id']}, status=404)
total_file_size = 0
for file in request.FILES.getlist('files'):
total_file_size += file.size
fv = FileValidator(allowed_extensions=settings.ALLOWED_FILE_EXTENTIONS)
try:
if total_file_size > settings.MAX_FILE_UPLOAD_SIZE:
raise ValidationError(message=error_messages['file_max_size_over'])
fv(file)
except ValidationError as e:
return JsonResponse({"message": e.message}, status=400)
md_content = form.cleaned_data['md_content']
html_text = trans_markdown_to_html_and_bleach(md_content)
plain_text = unmarkdown(md_content)
before_post_title = post.title
post.title = bleach_clean(form.cleaned_data['title'])
post.content = html_text
post.md_content = md_content
post.plain_content = plain_text
post.preview_content = plain_text[:128]
post.board_id = board[0]
removeFilePaths = []
removeFilePaths.append(post.background_image_real_relative_path)
post.background_image_url = settings.DEFAULT_IMAGE_RELATIVE_PATH if form.cleaned_data['background_image_url'] is None else form.cleaned_data['background_image_url']
savedFilePaths = []
try:
with transaction.atomic():
post.save()
files = PostFile.objects.filter(post_id=post_id)
fileList = list(files)
for f in request.FILES.getlist('files'):
isItSameFile = False
try:
ff = f.open()
file_hash = get_file_hash(ff)
for file in files:
if file.hash == file_hash:
isItSameFile = True
fileList.remove(file)
ff.close()
break
if isItSameFile:
continue
fileInstance = PostFile.objects.create(
post_id = post,
title = f.name,
file = f,
hash = file_hash
)
savedFilePaths.append(fileInstance.file.url[1:])
finally:
if not ff.closed:
ff.close()
for file in fileList:
removeFilePaths.append(file.file.url[1:])
file.delete()
remove_saved_files_and_empty_dirs(removeFilePaths)
post.tags.clear()
for tag in data['tags'].split(','):
post.tags.add(tag.strip())
except:
remove_saved_files_and_empty_dirs(savedFilePaths)
return JsonResponse({"message" : error_messages['fail_update_post'] % {"before_post_title":before_post_title}}, status=406)
return JsonResponse({"message":success_message % {"before_post_title":before_post_title}}, status=200)
def get(self, request, post_id):
error_messages = {
"not_exist_post" : "해당 글이 존재하지 않습니다.",
"data_load_fail" : "글을 불러오는데 실패했습니다. 다시 시도해주세요."
}
post = Post.objects.filter(id=post_id)
if not post.exists():
return JsonResponse({"message":error_messages['not_exist_post']}, status=404)
post = post[0]
response_data = PostDetailSerializer(post).data
try:
with transaction.atomic():
if request.user.is_active:
fileInstances = PostFile.objects.filter(post_id=post_id)
files = [PostFileDetailSerializerForNonAnonymousUser(file).data for file in fileInstances]
else:
fileInstances = PostFile.objects.filter(post_id=post_id).values('title')
files = list(fileInstances)
tags = [x.name for x in post.tags.all()]
post.hits += 1
post.save(update_fields=['hits'])
except:
return JsonResponse({"message":error_messages['data_load_fail']}, status=406)
response_data['tags'] = tags
response_data['files'] = files
response_data['author'] = AccountSerializerInPost(post.author_id).data
response_data['board'] = BoardCategorySerializer(post.board_id).data
return JsonResponse(response_data, status=200)
@method_decorator(login_required, name="dispatch")
def delete(self, request, post_id):
success_message = "'%(post_title)s' 글이 정상적으로 삭제되었습니다"
error_messages = {
"not_exist_post" : "해당 글이 존재하지 않습니다. 확인 후 다시 시도해주세요.",
"not_permitted" : "해당 글 삭제 권한이 없습니다. 작성자나 관리자 계정으로 로그인 후 다시 시도해주세요.",
"delete_fail" : "'%(post_title)s' 글을 삭제하는데 실패했습니다. 다시 시도해주세요."
}
post = Post.objects.filter(id=post_id)
if not post.exists():
return JsonResponse({"message" : error_messages['not_exist_post']}, status=404)
post = post[0]
if post.author_id.id != request.user.id and not request.user.is_superuser:
return JsonResponse({"message" : error_messages['not_permitted']}, status=403)
try:
with transaction.atomic():
savedFilePaths = []
savedFilePaths.append(post.background_image_real_relative_path)
postFiles = PostFile.objects.filter(post_id=post_id)
for file in postFiles:
savedFilePaths.append(file.file.url[1:])
remove_saved_files_and_empty_dirs(savedFilePaths)
post.tags.clear()
post.delete()
except:
return JsonResponse({"message":error_messages['delete_fail'] % {"post_title":post.title}}, status=406)
return JsonResponse({"message":success_message % {"post_title":post.title}}, status=200)
class PostFileDownloadView(View):
@method_decorator(login_required, name="dispatch")
def get(self, request, post_id, file_name):
error_messages = {
"not_permitted" : "파일 다운로드 권한이 없습니다. 인증된 회원 계정으로 로그인 후 다시 시도해주세요.",
"not_exist_file_in_post" : "해당 글에 등록된 파일이 없습니다. 확인 후 다시 시도해주세요.",
"not_exist_file_in_path" : "파일이 해당 경로에 존재하지 않습니다."
}
if not request.user.is_active:
return JsonResponse({"message" : error_messages['not_permitted']}, status=403)
postFiles = PostFile.objects.filter(post_id=post_id)
if not postFiles.exists():
return JsonResponse({"message" : error_messages['not_exist_file_in_post']}, status=404)
for pf in postFiles:
if pf.real_file_name == file_name:
filePath = pf.file.path
if os.path.exists(filePath):
try:
with open(filePath, 'rb') as f:
response = HttpResponse(f.read(), content_type="application/force-download")
response['Content-Disposition'] = f'inline; filename={pf.title}'
return response
except:
return JsonResponse({"message" : error_messages['not_exist_file_in_path']}, status=404)
|
[
"board.models.Board.objects.filter",
"django.utils.decorators.method_decorator",
"django.core.exceptions.ValidationError",
"os.path.exists",
"django.http.JsonResponse",
"accounts.serializers.AccountSerializerInPost",
"board.serializers.BoardCategorySerializer",
"accounts.models.Account.objects.filter",
"django.db.transaction.atomic"
] |
[((918, 967), 'django.utils.decorators.method_decorator', 'method_decorator', (['login_required'], {'name': '"""dispatch"""'}), "(login_required, name='dispatch')\n", (934, 967), False, 'from django.utils.decorators import method_decorator\n'), ((5029, 5078), 'django.utils.decorators.method_decorator', 'method_decorator', (['login_required'], {'name': '"""dispatch"""'}), "(login_required, name='dispatch')\n", (5045, 5078), False, 'from django.utils.decorators import method_decorator\n'), ((11187, 11236), 'django.utils.decorators.method_decorator', 'method_decorator', (['login_required'], {'name': '"""dispatch"""'}), "(login_required, name='dispatch')\n", (11203, 11236), False, 'from django.utils.decorators import method_decorator\n'), ((12747, 12796), 'django.utils.decorators.method_decorator', 'method_decorator', (['login_required'], {'name': '"""dispatch"""'}), "(login_required, name='dispatch')\n", (12763, 12796), False, 'from django.utils.decorators import method_decorator\n'), ((1992, 2046), 'board.models.Board.objects.filter', 'Board.objects.filter', ([], {'id': "form.cleaned_data['board_id']"}), "(id=form.cleaned_data['board_id'])\n", (2012, 2046), False, 'from board.models import Board\n'), ((2188, 2235), 'accounts.models.Account.objects.filter', 'Account.objects.filter', ([], {'user_id': 'request.user.id'}), '(user_id=request.user.id)\n', (2210, 2235), False, 'from accounts.models import Account\n'), ((4652, 4706), 'django.http.JsonResponse', 'JsonResponse', (["{'message': success_message}"], {'status': '(200)'}), "({'message': success_message}, status=200)\n", (4664, 4706), False, 'from django.http import JsonResponse, HttpResponse\n'), ((6398, 6452), 'board.models.Board.objects.filter', 'Board.objects.filter', ([], {'id': "form.cleaned_data['board_id']"}), "(id=form.cleaned_data['board_id'])\n", (6418, 6452), False, 'from board.models import Board\n'), ((9669, 9770), 'django.http.JsonResponse', 'JsonResponse', (["{'message': success_message % {'before_post_title': before_post_title}}"], {'status': '(200)'}), "({'message': success_message % {'before_post_title':\n before_post_title}}, status=200)\n", (9681, 9770), False, 'from django.http import JsonResponse, HttpResponse\n'), ((11141, 11180), 'django.http.JsonResponse', 'JsonResponse', (['response_data'], {'status': '(200)'}), '(response_data, status=200)\n', (11153, 11180), False, 'from django.http import JsonResponse, HttpResponse\n'), ((12624, 12711), 'django.http.JsonResponse', 'JsonResponse', (["{'message': success_message % {'post_title': post.title}}"], {'status': '(200)'}), "({'message': success_message % {'post_title': post.title}},\n status=200)\n", (12636, 12711), False, 'from django.http import JsonResponse, HttpResponse\n'), ((1633, 1703), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['wrong_request']}"], {'status': '(400)'}), "({'message': error_messages['wrong_request']}, status=400)\n", (1645, 1703), False, 'from django.http import JsonResponse, HttpResponse\n'), ((1763, 1833), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['not_permitted']}"], {'status': '(403)'}), "({'message': error_messages['not_permitted']}, status=403)\n", (1775, 1833), False, 'from django.http import JsonResponse, HttpResponse\n'), ((1926, 1976), 'django.http.JsonResponse', 'JsonResponse', (["{'message': form.errors}"], {'status': '(400)'}), "({'message': form.errors}, status=400)\n", (1938, 1976), False, 'from django.http import JsonResponse, HttpResponse\n'), ((2097, 2170), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['invalid_board_id']}"], {'status': '(404)'}), "({'message': error_messages['invalid_board_id']}, status=404)\n", (2109, 2170), False, 'from django.http import JsonResponse, HttpResponse\n'), ((2288, 2362), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['not_exist_account']}"], {'status': '(404)'}), "({'message': error_messages['not_exist_account']}, status=404)\n", (2300, 2362), False, 'from django.http import JsonResponse, HttpResponse\n'), ((5797, 5867), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['wrong_request']}"], {'status': '(400)'}), "({'message': error_messages['wrong_request']}, status=400)\n", (5809, 5867), False, 'from django.http import JsonResponse, HttpResponse\n'), ((5972, 6043), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['not_exist_post']}"], {'status': '(404)'}), "({'message': error_messages['not_exist_post']}, status=404)\n", (5984, 6043), False, 'from django.http import JsonResponse, HttpResponse\n'), ((6169, 6239), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['not_permitted']}"], {'status': '(401)'}), "({'message': error_messages['not_permitted']}, status=401)\n", (6181, 6239), False, 'from django.http import JsonResponse, HttpResponse\n'), ((6332, 6382), 'django.http.JsonResponse', 'JsonResponse', (["{'message': form.errors}"], {'status': '(400)'}), "({'message': form.errors}, status=400)\n", (6344, 6382), False, 'from django.http import JsonResponse, HttpResponse\n'), ((6503, 6576), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['invalid_board_id']}"], {'status': '(404)'}), "({'message': error_messages['invalid_board_id']}, status=404)\n", (6515, 6576), False, 'from django.http import JsonResponse, HttpResponse\n'), ((10049, 10120), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['not_exist_post']}"], {'status': '(404)'}), "({'message': error_messages['not_exist_post']}, status=404)\n", (10061, 10120), False, 'from django.http import JsonResponse, HttpResponse\n'), ((11003, 11042), 'accounts.serializers.AccountSerializerInPost', 'AccountSerializerInPost', (['post.author_id'], {}), '(post.author_id)\n', (11026, 11042), False, 'from accounts.serializers import AccountSerializerInPost\n'), ((11081, 11119), 'board.serializers.BoardCategorySerializer', 'BoardCategorySerializer', (['post.board_id'], {}), '(post.board_id)\n', (11104, 11119), False, 'from board.serializers import BoardCategorySerializer\n'), ((11705, 11776), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['not_exist_post']}"], {'status': '(404)'}), "({'message': error_messages['not_exist_post']}, status=404)\n", (11717, 11776), False, 'from django.http import JsonResponse, HttpResponse\n'), ((11904, 11974), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['not_permitted']}"], {'status': '(403)'}), "({'message': error_messages['not_permitted']}, status=403)\n", (11916, 11974), False, 'from django.http import JsonResponse, HttpResponse\n'), ((13162, 13232), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['not_permitted']}"], {'status': '(403)'}), "({'message': error_messages['not_permitted']}, status=403)\n", (13174, 13232), False, 'from django.http import JsonResponse, HttpResponse\n'), ((13350, 13429), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['not_exist_file_in_post']}"], {'status': '(404)'}), "({'message': error_messages['not_exist_file_in_post']}, status=404)\n", (13362, 13429), False, 'from django.http import JsonResponse, HttpResponse\n'), ((3275, 3295), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (3293, 3295), False, 'from django.db import transaction\n'), ((4564, 4637), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['fail_create_post']}"], {'status': '(406)'}), "({'message': error_messages['fail_create_post']}, status=406)\n", (4576, 4637), False, 'from django.http import JsonResponse, HttpResponse\n'), ((7895, 7915), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (7913, 7915), False, 'from django.db import transaction\n'), ((9537, 9658), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['fail_update_post'] % {'before_post_title':\n before_post_title}}"], {'status': '(406)'}), "({'message': error_messages['fail_update_post'] % {\n 'before_post_title': before_post_title}}, status=406)\n", (9549, 9658), False, 'from django.http import JsonResponse, HttpResponse\n'), ((10230, 10250), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (10248, 10250), False, 'from django.db import transaction\n'), ((10821, 10892), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['data_load_fail']}"], {'status': '(406)'}), "({'message': error_messages['data_load_fail']}, status=406)\n", (10833, 10892), False, 'from django.http import JsonResponse, HttpResponse\n'), ((12007, 12027), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (12025, 12027), False, 'from django.db import transaction\n'), ((12513, 12614), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['delete_fail'] % {'post_title': post.title}}"], {'status': '(406)'}), "({'message': error_messages['delete_fail'] % {'post_title':\n post.title}}, status=406)\n", (12525, 12614), False, 'from django.http import JsonResponse, HttpResponse\n'), ((13566, 13590), 'os.path.exists', 'os.path.exists', (['filePath'], {}), '(filePath)\n', (13580, 13590), False, 'import os\n'), ((2679, 2740), 'django.core.exceptions.ValidationError', 'ValidationError', ([], {'message': "error_messages['file_max_size_over']"}), "(message=error_messages['file_max_size_over'])\n", (2694, 2740), False, 'from django.core.exceptions import ValidationError\n'), ((2830, 2878), 'django.http.JsonResponse', 'JsonResponse', (["{'message': e.message}"], {'status': '(400)'}), "({'message': e.message}, status=400)\n", (2842, 2878), False, 'from django.http import JsonResponse, HttpResponse\n'), ((6893, 6954), 'django.core.exceptions.ValidationError', 'ValidationError', ([], {'message': "error_messages['file_max_size_over']"}), "(message=error_messages['file_max_size_over'])\n", (6908, 6954), False, 'from django.core.exceptions import ValidationError\n'), ((7044, 7092), 'django.http.JsonResponse', 'JsonResponse', (["{'message': e.message}"], {'status': '(400)'}), "({'message': e.message}, status=400)\n", (7056, 7092), False, 'from django.http import JsonResponse, HttpResponse\n'), ((13974, 14053), 'django.http.JsonResponse', 'JsonResponse', (["{'message': error_messages['not_exist_file_in_path']}"], {'status': '(404)'}), "({'message': error_messages['not_exist_file_in_path']}, status=404)\n", (13986, 14053), False, 'from django.http import JsonResponse, HttpResponse\n')]
|
#!/usr/bin/python
# Copyright (C) 2010-2013 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ProtoRPC message class definitions for TicTicToe API."""
from protorpc import messages
class BoardMessage(messages.Message):
"""ProtoRPC message definition to represent a board."""
state = messages.StringField(1, required=True)
class ScoresListRequest(messages.Message):
"""ProtoRPC message definition to represent a scores query."""
limit = messages.IntegerField(1, default=10)
class Order(messages.Enum):
WHEN = 1
TEXT = 2
order = messages.EnumField(Order, 2, default=Order.WHEN)
class ScoreRequestMessage(messages.Message):
"""ProtoRPC message definition to represent a score to be inserted."""
outcome = messages.StringField(1, required=True)
class ScoreResponseMessage(messages.Message):
"""ProtoRPC message definition to represent a score that is stored."""
id = messages.IntegerField(1)
outcome = messages.StringField(2)
played = messages.StringField(3)
class ScoresListResponse(messages.Message):
"""ProtoRPC message definition to represent a list of stored scores."""
items = messages.MessageField(ScoreResponseMessage, 1, repeated=True)
|
[
"protorpc.messages.MessageField",
"protorpc.messages.EnumField",
"protorpc.messages.IntegerField",
"protorpc.messages.StringField"
] |
[((809, 847), 'protorpc.messages.StringField', 'messages.StringField', (['(1)'], {'required': '(True)'}), '(1, required=True)\n', (829, 847), False, 'from protorpc import messages\n'), ((972, 1008), 'protorpc.messages.IntegerField', 'messages.IntegerField', (['(1)'], {'default': '(10)'}), '(1, default=10)\n', (993, 1008), False, 'from protorpc import messages\n'), ((1087, 1135), 'protorpc.messages.EnumField', 'messages.EnumField', (['Order', '(2)'], {'default': 'Order.WHEN'}), '(Order, 2, default=Order.WHEN)\n', (1105, 1135), False, 'from protorpc import messages\n'), ((1272, 1310), 'protorpc.messages.StringField', 'messages.StringField', (['(1)'], {'required': '(True)'}), '(1, required=True)\n', (1292, 1310), False, 'from protorpc import messages\n'), ((1443, 1467), 'protorpc.messages.IntegerField', 'messages.IntegerField', (['(1)'], {}), '(1)\n', (1464, 1467), False, 'from protorpc import messages\n'), ((1482, 1505), 'protorpc.messages.StringField', 'messages.StringField', (['(2)'], {}), '(2)\n', (1502, 1505), False, 'from protorpc import messages\n'), ((1519, 1542), 'protorpc.messages.StringField', 'messages.StringField', (['(3)'], {}), '(3)\n', (1539, 1542), False, 'from protorpc import messages\n'), ((1677, 1738), 'protorpc.messages.MessageField', 'messages.MessageField', (['ScoreResponseMessage', '(1)'], {'repeated': '(True)'}), '(ScoreResponseMessage, 1, repeated=True)\n', (1698, 1738), False, 'from protorpc import messages\n')]
|
# -*- coding: utf-8 -*-
import logging
import platform
from enzi.backend.backend import *
from enzi.backend.ies import IES
from enzi.backend.questa import Questa
from enzi.backend.vivado import Vivado
__all__ = ['KnownBackends', 'Questa', 'IES', 'Vivado',
'Backend', 'BackendCallback', 'value_str_filter']
logger = logging.getLogger(__name__)
logger.setLevel(logging.WARNING)
cur_system = platform.system()
class KnownBackends(object):
"""
Factory class for backends.
Currently, the available backends are: ies.
TODO: more backends may be added, if we get acess to use them.
"""
def __init__(self):
known_backends = Backend.__subclasses__()
def f(x): return (x.__name__.lower(), x)
def g(x): return cur_system in x[1].supported_system
self.allow_backends = dict(map(f, known_backends))
self.known_backends = dict(filter(g, self.allow_backends.items()))
# hard code 'vsim' to 'questa'
self.known_backends['vsim'] = self.known_backends['questa']
self.allow_backends['vsim'] = self.allow_backends['questa']
def register_backend(self, backend):
"""
register new backend
:param backend: a subclass of Backend
"""
name = backend.__class__.__name__.lower()
if not issubclass(backend.__class__, Backend):
fmt = 'register_backend: backend(class:{}) must be a subclass of Backend'
msg = fmt.format(backend.__class__)
logger.error(msg)
raise ValueError(msg)
self.known_backends[name] = backend
self.allow_backends[name] = backend
def get(self, backend_name, config, work_root):
if not backend_name:
raise RuntimeError('No backend name specified.')
backend_name = backend_name.lower()
if backend_name in self.known_backends:
return self.known_backends[backend_name](config, work_root)
else:
# the given backend name is not in support list.
raise NameError('backend name {} not found'.format(backend_name))
|
[
"platform.system",
"logging.getLogger"
] |
[((330, 357), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (347, 357), False, 'import logging\n'), ((405, 422), 'platform.system', 'platform.system', ([], {}), '()\n', (420, 422), False, 'import platform\n')]
|
"""Test suite for Taxa Tree display module."""
from app.display_modules.display_module_base_test import BaseDisplayModuleTest
from app.display_modules.taxa_tree import TaxaTreeDisplayModule
from app.display_modules.taxa_tree.models import TaxaTreeResult
from app.display_modules.taxa_tree.constants import MODULE_NAME
from app.tool_results.kraken import KrakenResultModule
from app.tool_results.kraken.tests.factory import create_kraken
from app.tool_results.krakenhll import KrakenHLLResultModule
from app.tool_results.krakenhll.tests.factory import create_krakenhll
from app.tool_results.metaphlan2 import Metaphlan2ResultModule
from app.tool_results.metaphlan2.tests.factory import create_metaphlan2
from .factory import generate_random_tree, TaxaTreeFactory
class TestTaxaTreeModule(BaseDisplayModuleTest):
"""Test suite for TaxaTree display module."""
def test_get_taxa_tree(self):
"""Ensure getting a single TaxaTree behaves correctly."""
ttree = TaxaTreeFactory()
self.generic_getter_test(ttree, MODULE_NAME,
verify_fields=('metaphlan2', 'kraken', 'krakenhll'))
def test_add_taxa_tree(self):
"""Ensure TaxaTree model is created correctly."""
kwargs = {
'metaphlan2': generate_random_tree(),
'kraken': generate_random_tree(),
'krakenhll': generate_random_tree(),
}
taxa_tree_result = TaxaTreeResult(**kwargs)
self.generic_adder_test(taxa_tree_result, MODULE_NAME)
def test_run_taxa_tree_sample(self): # pylint: disable=invalid-name
"""Ensure TaxaTree run_sample produces correct results."""
kwargs = {
KrakenResultModule.name(): create_kraken(),
KrakenHLLResultModule.name(): create_krakenhll(),
Metaphlan2ResultModule.name(): create_metaphlan2(),
}
self.generic_run_sample_test(kwargs, TaxaTreeDisplayModule)
|
[
"app.display_modules.taxa_tree.models.TaxaTreeResult",
"app.tool_results.krakenhll.KrakenHLLResultModule.name",
"app.tool_results.kraken.tests.factory.create_kraken",
"app.tool_results.metaphlan2.tests.factory.create_metaphlan2",
"app.tool_results.metaphlan2.Metaphlan2ResultModule.name",
"app.tool_results.krakenhll.tests.factory.create_krakenhll",
"app.tool_results.kraken.KrakenResultModule.name"
] |
[((1433, 1457), 'app.display_modules.taxa_tree.models.TaxaTreeResult', 'TaxaTreeResult', ([], {}), '(**kwargs)\n', (1447, 1457), False, 'from app.display_modules.taxa_tree.models import TaxaTreeResult\n'), ((1693, 1718), 'app.tool_results.kraken.KrakenResultModule.name', 'KrakenResultModule.name', ([], {}), '()\n', (1716, 1718), False, 'from app.tool_results.kraken import KrakenResultModule\n'), ((1749, 1777), 'app.tool_results.krakenhll.KrakenHLLResultModule.name', 'KrakenHLLResultModule.name', ([], {}), '()\n', (1775, 1777), False, 'from app.tool_results.krakenhll import KrakenHLLResultModule\n'), ((1811, 1840), 'app.tool_results.metaphlan2.Metaphlan2ResultModule.name', 'Metaphlan2ResultModule.name', ([], {}), '()\n', (1838, 1840), False, 'from app.tool_results.metaphlan2 import Metaphlan2ResultModule\n'), ((1720, 1735), 'app.tool_results.kraken.tests.factory.create_kraken', 'create_kraken', ([], {}), '()\n', (1733, 1735), False, 'from app.tool_results.kraken.tests.factory import create_kraken\n'), ((1779, 1797), 'app.tool_results.krakenhll.tests.factory.create_krakenhll', 'create_krakenhll', ([], {}), '()\n', (1795, 1797), False, 'from app.tool_results.krakenhll.tests.factory import create_krakenhll\n'), ((1842, 1861), 'app.tool_results.metaphlan2.tests.factory.create_metaphlan2', 'create_metaphlan2', ([], {}), '()\n', (1859, 1861), False, 'from app.tool_results.metaphlan2.tests.factory import create_metaphlan2\n')]
|
"""
.. _tut-fnirs-glm-components:
GLM and Design Matrix Parameters
================================
This tutorial describes the various design choices available when analysing
fNIRS data with a GLM approach.
.. sidebar:: Nilearn
If you use MNE-NIRS to conduct a GLM analysis please cite Nilearn.
This package relies on Nilearn for the underlying computation.
Without Nilearn this would not be possible.
For how to accurately cite Nilearn see:
http://nilearn.github.io/authors.html#citing
There are subtle differences between the GLM analysis procedures
available in the different fNIRS software packages (Homer, NIRS-SPM, etc).
This document aims to clarify the features available for GLM analysis
in the MNE-NIRS software, and demonstrate how you can modify the default
analysis parameters to best suit your experiment.
It also endeavours to motivate some of the design choices that were made
when designing this software.
Please raise a GitHub issue if there is an analysis design you would
like to use but can not determine how to do with MNE-NIRS.
The MNE-NIRS GLM analysis framework is entirely based on the Nilearn package.
Their excellent software forms the basis of the analysis described in this tutorial.
As such, you may also wish to read
`their documentation <http://nilearn.github.io>`__
to familiarise yourself with different concepts used in MNE-NIRS.
Specifically this tutorial is heavily based on the following Nilearn examples,
but placed within an fNIRS context.
* `Nilearn: Understanding parameters of the first-level model <http://nilearn.github.io/auto_examples/04_glm_first_level/plot_first_level_details.html>`__.
* `Nilearn: Example of hemodynamic response functions <https://nilearn.github.io/auto_examples/04_glm_first_level/plot_hrf.html>`__.
Accordingly, in this tutorial we will access nilearn functions directly to illustrate
various choices available in your analysis.
However, this is just to illustrate various points. In reality (see all other tutorials),
MNE-NIRS will wrap all required Nilearn functions so you don't need to access them directly.
.. contents:: Page contents
:local:
:depth: 2
"""
# sphinx_gallery_thumbnail_number = 1
# Authors: <NAME> <<EMAIL>>
#
# License: BSD (3-clause)
# Import common libraries
import os
import numpy as np
import mne
# Import MNE-NIRS processing
from mne_nirs.experimental_design import make_first_level_design_matrix, \
longest_inter_annotation_interval, drift_high_pass
# Import Nilearn
from nilearn.glm import first_level
from nilearn.plotting import plot_design_matrix
# Import Plotting Library
import matplotlib.pyplot as plt
import matplotlib as mpl
# %%
# Haemodynamic Response Function
# ---------------------------------------------------------------------
#
# Various Haemodynamic Response Functions (HRFs) are provided for use
# when analysing your data. A summary of these functions in the context
# of fMRI is provided in the Nilearn tutorial
# `Nilearn: Example of hemodynamic response functions. <https://nilearn.github.io/auto_examples/04_glm_first_level/plot_hrf.html>`__.
# This example heavily borrows from that example but expands the description
# within an fNIRS context.
#
# To illustrate underlying concepts we will use Nilearn functions directly,
# but for analysing actual data you should use the MNE-NIRS
# :func:`mne_nirs.experimental_design.make_first_level_design_matrix`
# wrapper.
# %%
# HRF Model Selection
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# .. sidebar:: FIR Models
#
# MNE-NIRS also supports FIR GLM models.
# See :ref:`MNE-NIRS FIR GLM tutorial <tut-fnirs-fir>`.
#
# Two standard HRF models are provided. The SPM and Glover models.
# These differ in their response dynamics.
# Both are plotted on top of each other below for comparison.
# Note that they differ in their peak timing and undershoot.
time_length = 30
glover_timecourse = first_level.glover_hrf(1, oversampling=50, time_length=time_length)
spm_timecourse = first_level.spm_hrf(1, oversampling=50, time_length=time_length)
sample_times = np.linspace(0, time_length, num=len(glover_timecourse))
plt.plot(sample_times, glover_timecourse, label="Glover")
plt.plot(sample_times, spm_timecourse, label="SPM")
plt.xlabel("Time (s)")
plt.ylabel("Amplitude (AU)")
plt.legend()
# %%
# Regressor Computation
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# These functions are not used directly in the GLM analysis.
# Instead they are used as the basis to compute a regressor which is
# utilised in the GLM fit.
# This is done by convolving the HRF model with a boxcar function that
# distills information
# about the experimental design. Specifically the stimulus onset times
# are used to indicate when a response begins, and a duration is used
# to specify the time over which the model should be convolved.
#
# Modifying the duration changes the regressor timecourse. Below we demonstrate
# how this varies for several duration values with the Glover HRF.
# Convenient functions so we dont need to repeat code below
def generate_stim(onset, amplitude, duration, hrf_model, maxtime=30):
# Generate signal with specified duration and onset
frame_times = np.linspace(0, maxtime, 601)
exp_condition = np.array((onset, duration, amplitude)).reshape(3, 1)
stim = np.zeros_like(frame_times)
stim[(frame_times > onset) * (frame_times <= onset + duration)] = amplitude
signal, name = first_level.compute_regressor(
exp_condition, hrf_model, frame_times, con_id="main", oversampling=16
)
return frame_times, stim, signal
def plot_regressor(onset, amplitude, duration, hrf_model):
frame_times, stim, signal = generate_stim(
onset, amplitude, duration, hrf_model)
plt.fill(frame_times, stim, "k", alpha=0.5, label="stimulus")
plt.plot(frame_times, signal.T[0], label="Regressor")
plt.xlabel("Time (s)")
plt.ylabel("Amplitude (AU)")
plt.legend(loc=1)
plt.title(hrf_model)
return None
# Generate an event of 1 second duration that occurs at time zero.
onset, amplitude, duration = 0.0, 1.0, 1.0
hrf_model = "glover"
plot_regressor(onset, amplitude, duration, hrf_model)
# %%
#
# If the duration is increased we see the resulting regressor
# is modified, and the transformation is not a simple scaling.
#
# For a 3 second duration:
duration = 3
plot_regressor(onset, amplitude, duration, hrf_model)
# %%
#
# Or for a 5 second duration:
duration = 5
plot_regressor(onset, amplitude, duration, hrf_model)
# %%
#
# Or for a 15 second duration:
duration = 15
plot_regressor(onset, amplitude, duration, hrf_model)
# %%
#
# We can plot multiple durations together to see how the
# resulting regressor varies as a function of this parameter.
cmap = mpl.cm.viridis
norm = mpl.colors.Normalize(vmin=0, vmax=40)
for n in [1, 3, 5, 10, 15, 20, 25, 30, 35]:
frame_times, stim, signal = generate_stim(
onset, amplitude, n, hrf_model, maxtime=50)
plt.plot(frame_times, signal.T[0], label="Regressor", c=cmap(norm(n)))
plt.xlabel("Time (s)")
plt.ylabel("Amplitude (AU)")
plt.colorbar(mpl.cm.ScalarMappable(norm=norm, cmap=cmap))
# %%
# Inclusion in Design matrix
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# .. sidebar:: Derivative and dispersion terms
#
# You can also include derivative and dispersion terms to model
# differences between your data and the model. This is done by simply
# specifying your selected model plus the additional terms.
# For example, ``spm + derivative`` or
# ``glover + derivative + dispersion``.
#
# As mentioned above, we don't directly compute these regressors for
# each condition. Instead the function ``make_first_level_design_matrix``
# conveniently does this for us.
#
# As an example we will import a measurement and generate a
# design matrix for it. We will specify that we wish to use a Glover
# HRF convolved with a 3 second duration.
# See the :ref:`MNE-NIRS fNIRS GLM tutorial <tut-fnirs-hrf>` for more details.
#
# First we import the example data, crop to just the first few minutes,
# and give names to the annotations.
fnirs_data_folder = mne.datasets.fnirs_motor.data_path()
fnirs_raw_dir = os.path.join(fnirs_data_folder, 'Participant-1')
raw_intensity = mne.io.read_raw_nirx(fnirs_raw_dir).load_data().crop(tmax=300)
# raw_intensity.resample(0.7)
raw_intensity.annotations.rename({'1.0': 'Control',
'2.0': 'Tapping/Left',
'3.0': 'Tapping/Right'})
raw_intensity.annotations.delete(raw_intensity.annotations.description == '15.0')
raw_intensity.annotations.set_durations(5)
# %%
#
# Next we generate the design matrix and plot it.
# This representation of the regressor is transposed,
# time goes down the vertical
# axis and is specified in scan number (fMRI hangover) or sample.
# There is no colorbar for this plot, as specified in Nilearn.
#
# We can see that when each event occurs the model value increases before returning to baseline.
# this is the same information as was shown in the time courses above, except displayed differently
# with color representing amplitude.
design_matrix = make_first_level_design_matrix(raw_intensity,
# Ignore drift model for now, see section below
drift_model='polynomial',
drift_order=0,
# Here we specify the HRF and duration
hrf_model='glover',
stim_dur=3.0)
fig, ax1 = plt.subplots(figsize=(10, 6), nrows=1, ncols=1)
fig = plot_design_matrix(design_matrix, ax=ax1)
# %%
#
# As before we can explore the effect of modifying the duration,
# the resulting regressor for each annotation is elongated.
design_matrix = make_first_level_design_matrix(raw_intensity,
# Ignore drift model for now, see section below
drift_model='polynomial',
drift_order=0,
# Here we specify the HRF and duration
hrf_model='glover',
stim_dur=13.0)
fig, ax1 = plt.subplots(figsize=(10, 6), nrows=1, ncols=1)
fig = plot_design_matrix(design_matrix, ax=ax1)
# %%
#
# Depending on your experimental design the resulting responses
# may overlap (for example an event related design).
# This is not an issue, the design matrix can handle overlapping responses.
design_matrix = make_first_level_design_matrix(raw_intensity,
# Ignore drift model for now, see section below
drift_model='polynomial',
drift_order=0,
# Here we specify the HRF and duration
hrf_model='glover',
stim_dur=30.0)
fig, ax1 = plt.subplots(figsize=(10, 6), nrows=1, ncols=1)
fig = plot_design_matrix(design_matrix, ax=ax1)
# %%
# Drift Regressors
# ---------------------------------------------------------------------
#
# Aspects of the measured signal may change over time in a manner
# unrelated to the neural response we wish to measure.
# For example, the measurement room may warm up and result in a steady
# increase in the signal over the measurement duration.
# These signal changes that are unrelated to our feature of interest are
# termed drifts, and can be included in the design matrix and the GLM
# fitting as drift regressors.
#
# In the examples above a single drift regressor was used to model a constant
# offset in the data. This is also termed a zero order polynomial regressor.
# Two types of regressors are provided for in MNE-NIRS thanks to Nilearn.
# Polynomial and cosine drift regressors.
#
# .. note::
#
# Remember that the GLM can fit a negative coefficient,
# so a decreasing drift can be modeled by the increasing drift
# regressor with a negative coefficient.
# %%
# Polynomial Drift Regressors
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# In the example above a polynomial drift regressor is included.
# In this case we can specify the order of the polynomials to be included.
# A zero order polynomial will fit a constant, a first order will fit an
# increasing function, and so on.
# As an example we demonstrate how to include up to a fifth order polynomial.
# You can observe that with increasing polynomial order,
# higher frequency components will be regressed from the signal.
design_matrix = make_first_level_design_matrix(raw_intensity,
drift_model='polynomial',
drift_order=5)
fig, ax1 = plt.subplots(figsize=(10, 6), nrows=1, ncols=1)
fig = plot_design_matrix(design_matrix, ax=ax1)
# %%
# Cosine Drift Regressors
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# An alternative way to specify drift regressors is via the cosine drift model.
# This may be more intuitive as you can specify regressors up to a certain cut off
# frequency. Effectively regressing out frequency components below a limit,
# which may be interpreted as a high pass filter.
# In the example below we demonstrate how to regress our signals up to 0.01 Hz.
# We observe that the function has included 6 drift regressors in the design matrix.
design_matrix = make_first_level_design_matrix(raw_intensity,
drift_model='cosine',
high_pass=0.01)
fig, ax1 = plt.subplots(figsize=(10, 6), nrows=1, ncols=1)
fig = plot_design_matrix(design_matrix, ax=ax1)
# %%
#
# As described above, including additional regressor components will remove
# higher frequency components. So we can increase the high pass cut off and
# this should add more regressors.
design_matrix = make_first_level_design_matrix(raw_intensity,
drift_model='cosine',
high_pass=0.03)
fig, ax1 = plt.subplots(figsize=(10, 6), nrows=1, ncols=1)
fig = plot_design_matrix(design_matrix, ax=ax1)
# %%
# Selecting Drift Regressors
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# The aim of the drift regressors is to remove signal components unrelated
# to the expected neural response. As the expected response can be computed
# based on annotation timing and expected brain responses
# (see :ref:`frequency commentary <tut-fnirs-freq>`)
# the high pass cut off can be set on first principles.
#
# The Nilearn documentation states that
# "The cutoff period (1/high_pass) should be set as the longest period between two trials of the same condition multiplied by 2.
# For instance, if the longest period is 32s, the high_pass frequency shall be 1/64 Hz ~ 0.016 Hz."
# `(reference) <http://nilearn.github.io/auto_examples/04_glm_first_level/plot_first_level_details.html#changing-the-drift-model>`__.
#
# To assist in selecting a high pass value a few convenience functions are included in MNE-NIRS.
# First we can query what the longest ISI is per annotation, but first we must be sure
# to remove annotations we aren't interested in (in this experiment the trigger
# 15 is not of interest).
raw_original = mne.io.read_raw_nirx(fnirs_raw_dir)
raw_original.annotations.delete(raw_original.annotations.description == '15.0')
isis, names = longest_inter_annotation_interval(raw_original)
print(isis)
# %%
#
# We see that the longest period between two trials is 435 seconds. Which multiplied
# by two is 870 seconds. So a high pass value of 1/870 or 0.001 Hz is appropriate.
# We can also use the function
# :func:`mne_nirs.experimental_design.make_first_level_design_matrix`
# to suggest the high pass value. Note however, that you should not blindly follow
# this functions suggestion, as each experiment is different. Instead use this as
# a sanity check on your own calculations.
print(drift_high_pass(raw_original))
# %%
#
# For example, if all conditions were evoking the same response it may make more
# sense to include them as a single condition when computing the ISI.
# This would be achieved by renaming the triggers.
raw_original.annotations.rename({'2.0': 'Tapping', '3.0': 'Tapping'})
raw_original.annotations.delete(raw_original.annotations.description == '1.0')
isis, names = longest_inter_annotation_interval(raw_original)
print(isis)
print(drift_high_pass(raw_original))
|
[
"matplotlib.pyplot.title",
"nilearn.glm.first_level.spm_hrf",
"os.path.join",
"numpy.zeros_like",
"matplotlib.colors.Normalize",
"nilearn.plotting.plot_design_matrix",
"matplotlib.cm.ScalarMappable",
"nilearn.glm.first_level.compute_regressor",
"nilearn.glm.first_level.glover_hrf",
"numpy.linspace",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.legend",
"mne.io.read_raw_nirx",
"matplotlib.pyplot.ylabel",
"mne_nirs.experimental_design.make_first_level_design_matrix",
"mne_nirs.experimental_design.drift_high_pass",
"mne_nirs.experimental_design.longest_inter_annotation_interval",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.fill",
"mne.datasets.fnirs_motor.data_path",
"numpy.array",
"matplotlib.pyplot.xlabel"
] |
[((3942, 4009), 'nilearn.glm.first_level.glover_hrf', 'first_level.glover_hrf', (['(1)'], {'oversampling': '(50)', 'time_length': 'time_length'}), '(1, oversampling=50, time_length=time_length)\n', (3964, 4009), False, 'from nilearn.glm import first_level\n'), ((4027, 4091), 'nilearn.glm.first_level.spm_hrf', 'first_level.spm_hrf', (['(1)'], {'oversampling': '(50)', 'time_length': 'time_length'}), '(1, oversampling=50, time_length=time_length)\n', (4046, 4091), False, 'from nilearn.glm import first_level\n'), ((4165, 4222), 'matplotlib.pyplot.plot', 'plt.plot', (['sample_times', 'glover_timecourse'], {'label': '"""Glover"""'}), "(sample_times, glover_timecourse, label='Glover')\n", (4173, 4222), True, 'import matplotlib.pyplot as plt\n'), ((4223, 4274), 'matplotlib.pyplot.plot', 'plt.plot', (['sample_times', 'spm_timecourse'], {'label': '"""SPM"""'}), "(sample_times, spm_timecourse, label='SPM')\n", (4231, 4274), True, 'import matplotlib.pyplot as plt\n'), ((4275, 4297), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (4285, 4297), True, 'import matplotlib.pyplot as plt\n'), ((4298, 4326), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Amplitude (AU)"""'], {}), "('Amplitude (AU)')\n", (4308, 4326), True, 'import matplotlib.pyplot as plt\n'), ((4327, 4339), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4337, 4339), True, 'import matplotlib.pyplot as plt\n'), ((6843, 6880), 'matplotlib.colors.Normalize', 'mpl.colors.Normalize', ([], {'vmin': '(0)', 'vmax': '(40)'}), '(vmin=0, vmax=40)\n', (6863, 6880), True, 'import matplotlib as mpl\n'), ((7101, 7123), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (7111, 7123), True, 'import matplotlib.pyplot as plt\n'), ((7124, 7152), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Amplitude (AU)"""'], {}), "('Amplitude (AU)')\n", (7134, 7152), True, 'import matplotlib.pyplot as plt\n'), ((8220, 8256), 'mne.datasets.fnirs_motor.data_path', 'mne.datasets.fnirs_motor.data_path', ([], {}), '()\n', (8254, 8256), False, 'import mne\n'), ((8273, 8321), 'os.path.join', 'os.path.join', (['fnirs_data_folder', '"""Participant-1"""'], {}), "(fnirs_data_folder, 'Participant-1')\n", (8285, 8321), False, 'import os\n'), ((9249, 9373), 'mne_nirs.experimental_design.make_first_level_design_matrix', 'make_first_level_design_matrix', (['raw_intensity'], {'drift_model': '"""polynomial"""', 'drift_order': '(0)', 'hrf_model': '"""glover"""', 'stim_dur': '(3.0)'}), "(raw_intensity, drift_model='polynomial',\n drift_order=0, hrf_model='glover', stim_dur=3.0)\n", (9279, 9373), False, 'from mne_nirs.experimental_design import make_first_level_design_matrix, longest_inter_annotation_interval, drift_high_pass\n'), ((9751, 9798), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(10, 6)', 'nrows': '(1)', 'ncols': '(1)'}), '(figsize=(10, 6), nrows=1, ncols=1)\n', (9763, 9798), True, 'import matplotlib.pyplot as plt\n'), ((9805, 9846), 'nilearn.plotting.plot_design_matrix', 'plot_design_matrix', (['design_matrix'], {'ax': 'ax1'}), '(design_matrix, ax=ax1)\n', (9823, 9846), False, 'from nilearn.plotting import plot_design_matrix\n'), ((9998, 10123), 'mne_nirs.experimental_design.make_first_level_design_matrix', 'make_first_level_design_matrix', (['raw_intensity'], {'drift_model': '"""polynomial"""', 'drift_order': '(0)', 'hrf_model': '"""glover"""', 'stim_dur': '(13.0)'}), "(raw_intensity, drift_model='polynomial',\n drift_order=0, hrf_model='glover', stim_dur=13.0)\n", (10028, 10123), False, 'from mne_nirs.experimental_design import make_first_level_design_matrix, longest_inter_annotation_interval, drift_high_pass\n'), ((10501, 10548), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(10, 6)', 'nrows': '(1)', 'ncols': '(1)'}), '(figsize=(10, 6), nrows=1, ncols=1)\n', (10513, 10548), True, 'import matplotlib.pyplot as plt\n'), ((10555, 10596), 'nilearn.plotting.plot_design_matrix', 'plot_design_matrix', (['design_matrix'], {'ax': 'ax1'}), '(design_matrix, ax=ax1)\n', (10573, 10596), False, 'from nilearn.plotting import plot_design_matrix\n'), ((10816, 10941), 'mne_nirs.experimental_design.make_first_level_design_matrix', 'make_first_level_design_matrix', (['raw_intensity'], {'drift_model': '"""polynomial"""', 'drift_order': '(0)', 'hrf_model': '"""glover"""', 'stim_dur': '(30.0)'}), "(raw_intensity, drift_model='polynomial',\n drift_order=0, hrf_model='glover', stim_dur=30.0)\n", (10846, 10941), False, 'from mne_nirs.experimental_design import make_first_level_design_matrix, longest_inter_annotation_interval, drift_high_pass\n'), ((11319, 11366), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(10, 6)', 'nrows': '(1)', 'ncols': '(1)'}), '(figsize=(10, 6), nrows=1, ncols=1)\n', (11331, 11366), True, 'import matplotlib.pyplot as plt\n'), ((11373, 11414), 'nilearn.plotting.plot_design_matrix', 'plot_design_matrix', (['design_matrix'], {'ax': 'ax1'}), '(design_matrix, ax=ax1)\n', (11391, 11414), False, 'from nilearn.plotting import plot_design_matrix\n'), ((12970, 13060), 'mne_nirs.experimental_design.make_first_level_design_matrix', 'make_first_level_design_matrix', (['raw_intensity'], {'drift_model': '"""polynomial"""', 'drift_order': '(5)'}), "(raw_intensity, drift_model='polynomial',\n drift_order=5)\n", (13000, 13060), False, 'from mne_nirs.experimental_design import make_first_level_design_matrix, longest_inter_annotation_interval, drift_high_pass\n'), ((13163, 13210), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(10, 6)', 'nrows': '(1)', 'ncols': '(1)'}), '(figsize=(10, 6), nrows=1, ncols=1)\n', (13175, 13210), True, 'import matplotlib.pyplot as plt\n'), ((13217, 13258), 'nilearn.plotting.plot_design_matrix', 'plot_design_matrix', (['design_matrix'], {'ax': 'ax1'}), '(design_matrix, ax=ax1)\n', (13235, 13258), False, 'from nilearn.plotting import plot_design_matrix\n'), ((13837, 13924), 'mne_nirs.experimental_design.make_first_level_design_matrix', 'make_first_level_design_matrix', (['raw_intensity'], {'drift_model': '"""cosine"""', 'high_pass': '(0.01)'}), "(raw_intensity, drift_model='cosine',\n high_pass=0.01)\n", (13867, 13924), False, 'from mne_nirs.experimental_design import make_first_level_design_matrix, longest_inter_annotation_interval, drift_high_pass\n'), ((14027, 14074), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(10, 6)', 'nrows': '(1)', 'ncols': '(1)'}), '(figsize=(10, 6), nrows=1, ncols=1)\n', (14039, 14074), True, 'import matplotlib.pyplot as plt\n'), ((14081, 14122), 'nilearn.plotting.plot_design_matrix', 'plot_design_matrix', (['design_matrix'], {'ax': 'ax1'}), '(design_matrix, ax=ax1)\n', (14099, 14122), False, 'from nilearn.plotting import plot_design_matrix\n'), ((14336, 14423), 'mne_nirs.experimental_design.make_first_level_design_matrix', 'make_first_level_design_matrix', (['raw_intensity'], {'drift_model': '"""cosine"""', 'high_pass': '(0.03)'}), "(raw_intensity, drift_model='cosine',\n high_pass=0.03)\n", (14366, 14423), False, 'from mne_nirs.experimental_design import make_first_level_design_matrix, longest_inter_annotation_interval, drift_high_pass\n'), ((14526, 14573), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(10, 6)', 'nrows': '(1)', 'ncols': '(1)'}), '(figsize=(10, 6), nrows=1, ncols=1)\n', (14538, 14573), True, 'import matplotlib.pyplot as plt\n'), ((14580, 14621), 'nilearn.plotting.plot_design_matrix', 'plot_design_matrix', (['design_matrix'], {'ax': 'ax1'}), '(design_matrix, ax=ax1)\n', (14598, 14621), False, 'from nilearn.plotting import plot_design_matrix\n'), ((15763, 15798), 'mne.io.read_raw_nirx', 'mne.io.read_raw_nirx', (['fnirs_raw_dir'], {}), '(fnirs_raw_dir)\n', (15783, 15798), False, 'import mne\n'), ((15894, 15941), 'mne_nirs.experimental_design.longest_inter_annotation_interval', 'longest_inter_annotation_interval', (['raw_original'], {}), '(raw_original)\n', (15927, 15941), False, 'from mne_nirs.experimental_design import make_first_level_design_matrix, longest_inter_annotation_interval, drift_high_pass\n'), ((16853, 16900), 'mne_nirs.experimental_design.longest_inter_annotation_interval', 'longest_inter_annotation_interval', (['raw_original'], {}), '(raw_original)\n', (16886, 16900), False, 'from mne_nirs.experimental_design import make_first_level_design_matrix, longest_inter_annotation_interval, drift_high_pass\n'), ((5256, 5284), 'numpy.linspace', 'np.linspace', (['(0)', 'maxtime', '(601)'], {}), '(0, maxtime, 601)\n', (5267, 5284), True, 'import numpy as np\n'), ((5369, 5395), 'numpy.zeros_like', 'np.zeros_like', (['frame_times'], {}), '(frame_times)\n', (5382, 5395), True, 'import numpy as np\n'), ((5496, 5601), 'nilearn.glm.first_level.compute_regressor', 'first_level.compute_regressor', (['exp_condition', 'hrf_model', 'frame_times'], {'con_id': '"""main"""', 'oversampling': '(16)'}), "(exp_condition, hrf_model, frame_times, con_id\n ='main', oversampling=16)\n", (5525, 5601), False, 'from nilearn.glm import first_level\n'), ((5808, 5869), 'matplotlib.pyplot.fill', 'plt.fill', (['frame_times', 'stim', '"""k"""'], {'alpha': '(0.5)', 'label': '"""stimulus"""'}), "(frame_times, stim, 'k', alpha=0.5, label='stimulus')\n", (5816, 5869), True, 'import matplotlib.pyplot as plt\n'), ((5874, 5927), 'matplotlib.pyplot.plot', 'plt.plot', (['frame_times', 'signal.T[0]'], {'label': '"""Regressor"""'}), "(frame_times, signal.T[0], label='Regressor')\n", (5882, 5927), True, 'import matplotlib.pyplot as plt\n'), ((5932, 5954), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (5942, 5954), True, 'import matplotlib.pyplot as plt\n'), ((5959, 5987), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Amplitude (AU)"""'], {}), "('Amplitude (AU)')\n", (5969, 5987), True, 'import matplotlib.pyplot as plt\n'), ((5992, 6009), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(1)'}), '(loc=1)\n', (6002, 6009), True, 'import matplotlib.pyplot as plt\n'), ((6014, 6034), 'matplotlib.pyplot.title', 'plt.title', (['hrf_model'], {}), '(hrf_model)\n', (6023, 6034), True, 'import matplotlib.pyplot as plt\n'), ((7166, 7209), 'matplotlib.cm.ScalarMappable', 'mpl.cm.ScalarMappable', ([], {'norm': 'norm', 'cmap': 'cmap'}), '(norm=norm, cmap=cmap)\n', (7187, 7209), True, 'import matplotlib as mpl\n'), ((16447, 16476), 'mne_nirs.experimental_design.drift_high_pass', 'drift_high_pass', (['raw_original'], {}), '(raw_original)\n', (16462, 16476), False, 'from mne_nirs.experimental_design import make_first_level_design_matrix, longest_inter_annotation_interval, drift_high_pass\n'), ((16919, 16948), 'mne_nirs.experimental_design.drift_high_pass', 'drift_high_pass', (['raw_original'], {}), '(raw_original)\n', (16934, 16948), False, 'from mne_nirs.experimental_design import make_first_level_design_matrix, longest_inter_annotation_interval, drift_high_pass\n'), ((5305, 5343), 'numpy.array', 'np.array', (['(onset, duration, amplitude)'], {}), '((onset, duration, amplitude))\n', (5313, 5343), True, 'import numpy as np\n'), ((8338, 8373), 'mne.io.read_raw_nirx', 'mne.io.read_raw_nirx', (['fnirs_raw_dir'], {}), '(fnirs_raw_dir)\n', (8358, 8373), False, 'import mne\n')]
|
import glob
import multiprocessing
import os
import json
from django.core.exceptions import ImproperlyConfigured
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# Load Secrets
def load_secrets(file=os.path.join(BASE_DIR, '.secrets.json')):
try:
with open(file) as f:
secrets = json.loads(f.read())
return secrets
except FileNotFoundError:
raise ImproperlyConfigured(
'Secrets file not found. Please create the secrets file or correct'
' the configuration.'
)
secrets = load_secrets()
# Get a secret
def get_secret(key, secrets=secrets or load_secrets()):
try:
val = secrets[key]
if val == 'True':
val = True
elif val == 'False':
val = False
return val
except KeyError:
error_msg = (
"ImproperlyConfigured: Set {0} environment variable"
).format(key)
raise ImproperlyConfigured(error_msg)
def watch_extra_files():
files = set()
patterns = [
{'path': '**/*.html', 'recursive': True, },
{'path': '**/*.py', 'recursive': True, },
]
for pattern in patterns:
files = files.union(glob.glob(pattern['path'], recursive=pattern[
'recursive']))
return files
proc_name = 'districtwebsites'
pidfile = '/var/run/gunicorn/www_slcschools_org.pid'
worker_tmp_dir = '/srv/gunicorn/www_slcschools_org'
bind = 'unix:/var/run/gunicorn/www_slcschools_org.sock'
workers = multiprocessing.cpu_count() * 3 + 1
worker_class = 'sync'
timeout = 3600
raw_env = [
'DJANGO_SETTINGS_MODULE={0}'.format(get_secret('DJANGO_SETTINGS_MODULE')),
]
reload = get_secret('GUNICORN_RELOAD')
if reload:
reload_extra_files = watch_extra_files()
|
[
"os.path.abspath",
"django.core.exceptions.ImproperlyConfigured",
"glob.glob",
"os.path.join",
"multiprocessing.cpu_count"
] |
[((214, 239), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (229, 239), False, 'import os\n'), ((280, 319), 'os.path.join', 'os.path.join', (['BASE_DIR', '""".secrets.json"""'], {}), "(BASE_DIR, '.secrets.json')\n", (292, 319), False, 'import os\n'), ((1574, 1601), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (1599, 1601), False, 'import multiprocessing\n'), ((475, 593), 'django.core.exceptions.ImproperlyConfigured', 'ImproperlyConfigured', (['"""Secrets file not found. Please create the secrets file or correct the configuration."""'], {}), "(\n 'Secrets file not found. Please create the secrets file or correct the configuration.'\n )\n", (495, 593), False, 'from django.core.exceptions import ImproperlyConfigured\n'), ((1022, 1053), 'django.core.exceptions.ImproperlyConfigured', 'ImproperlyConfigured', (['error_msg'], {}), '(error_msg)\n', (1042, 1053), False, 'from django.core.exceptions import ImproperlyConfigured\n'), ((1281, 1339), 'glob.glob', 'glob.glob', (["pattern['path']"], {'recursive': "pattern['recursive']"}), "(pattern['path'], recursive=pattern['recursive'])\n", (1290, 1339), False, 'import glob\n')]
|
import datetime
from billy.utils.fulltext import pdfdata_to_text, text_after_line_numbers
from billy.scrape.utils import url_xpath
from .bills import MOBillScraper
from .legislators import MOLegislatorScraper
from .committees import MOCommitteeScraper
from .votes import MOVoteScraper
metadata = dict(
name = 'Missouri',
abbreviation = 'mo',
legislature_name = 'Missouri General Assembly',
legislature_url = 'http://www.moga.mo.gov/',
capitol_timezone = 'America/Chicago',
chambers = {
'upper': {
'name': 'Senate',
'title': 'Senator'
},
'lower': {
'name': 'House',
'title': 'Representative'
},
},
terms = [
{
'name': '2011-2012',
'sessions': ['2012'],
'start_year': 2011,
'end_year': 2012,
},
{
'name': '2013-2014',
'sessions': ['2013', '2014'],
'start_year': 2013,
'end_year': 2014,
},
{
'name': '2015-2016',
'sessions': ['2015', '2016'],
'start_year': 2015,
'end_year': 2016,
},
],
# General Assembly sessions convene the Wed. following the first Mon.
# of January and adjourn May 30.
# http://www.house.mo.gov/content.aspx?info=/info/howbill.htm
session_details = {
'2012': {
'type': 'primary',
'start_date': datetime.date(2012,1,4),
'end_date': datetime.date(2012,5,30),
'display_name': '2012 Regular Session',
'_scraped_name': '2012 - 96th General Assembly - 2nd Regular Session',
},
'2013': {
'type': 'primary',
'start_date': datetime.date(2013,1,9),
'end_date': datetime.date(2013,5,30),
'display_name': '2013 Regular Session',
'_scraped_name': '2013 - 97th General Assembly - 1st Regular Session',
},
'2014': {
'type': 'primary',
'start_date': datetime.date(2014,1,8),
'end_date': datetime.date(2014,5,30),
'display_name': '2014 Regular Session',
'_scaped_name': '2014 - 97th General Assembly - 2nd Regular Session',
},
'2015': {
'type': 'primary',
'start_date': datetime.date(2015,1,7),
'end_date': datetime.date(2015,5,30),
'display_name': '2015 Regular Session',
'_scraped_name': '2015 - 98th General Assembly - 1st Regular Session',
},
'2016': {
'type': 'primary',
'start_date': datetime.date(2016,1,6),
'end_date': datetime.date(2016,5,30),
'display_name': '2016 Regular Session',
},
},
feature_flags = ['subjects', 'influenceexplorer'],
_ignored_scraped_sessions = [
'2014 - 97th General Assembly - 2nd Regular Session',
'2013 - 97th General Assembly - 1st Regular Session',
'2012 - 96th General Assembly - 2nd Regular Session',
'2011 - 96th General Assembly - 1st Regular Session',
'2010 - 95th General Assembly - 2nd Regular Session',
'2009 - 95th General Assembly - 1st Regular Session',
'2008 - 94th General Assembly - 2nd Regular Session',
'2007 - 94th General Assembly - 1st Regular Session',
'2006 - 93rd General Assembly - 2nd Regular Session',
'2005 - 93rd General Assembly - 1st Regular Session',
'2004 - 92nd General Assembly - 2nd Regular Session',
'2003 - 92nd General Assembly - 1st Regular Session',
'2002 - 91st General Assembly - 2nd Regular Session',
'2001 - 91st General Assembly - 1st Regular Session',
'2000 - 90th General Assembly - 2nd Regular Session',
'1999 - 90th General Assembly - 1st Regular Session',
'1998 - 89th General Assembly - 2nd Regular Session',
'1997 - 89th General Assembly - 1st Regular Session',
'1996 - 88th General Assembly - 2nd Regular Session',
'1995 - 88th General Assembly - 1st Regular Session'
]
)
def session_list():
sessions = url_xpath('http://www.senate.mo.gov/pastsessions.htm',
'//div[@id="list"]/li/a/text()')
return sessions
def extract_text(doc, data):
text = pdfdata_to_text(data)
return text_after_line_numbers(text).encode('ascii', 'ignore')
|
[
"billy.utils.fulltext.text_after_line_numbers",
"billy.utils.fulltext.pdfdata_to_text",
"datetime.date",
"billy.scrape.utils.url_xpath"
] |
[((4170, 4261), 'billy.scrape.utils.url_xpath', 'url_xpath', (['"""http://www.senate.mo.gov/pastsessions.htm"""', '"""//div[@id="list"]/li/a/text()"""'], {}), '(\'http://www.senate.mo.gov/pastsessions.htm\',\n \'//div[@id="list"]/li/a/text()\')\n', (4179, 4261), False, 'from billy.scrape.utils import url_xpath\n'), ((4328, 4349), 'billy.utils.fulltext.pdfdata_to_text', 'pdfdata_to_text', (['data'], {}), '(data)\n', (4343, 4349), False, 'from billy.utils.fulltext import pdfdata_to_text, text_after_line_numbers\n'), ((4361, 4390), 'billy.utils.fulltext.text_after_line_numbers', 'text_after_line_numbers', (['text'], {}), '(text)\n', (4384, 4390), False, 'from billy.utils.fulltext import pdfdata_to_text, text_after_line_numbers\n'), ((1469, 1494), 'datetime.date', 'datetime.date', (['(2012)', '(1)', '(4)'], {}), '(2012, 1, 4)\n', (1482, 1494), False, 'import datetime\n'), ((1518, 1544), 'datetime.date', 'datetime.date', (['(2012)', '(5)', '(30)'], {}), '(2012, 5, 30)\n', (1531, 1544), False, 'import datetime\n'), ((1765, 1790), 'datetime.date', 'datetime.date', (['(2013)', '(1)', '(9)'], {}), '(2013, 1, 9)\n', (1778, 1790), False, 'import datetime\n'), ((1814, 1840), 'datetime.date', 'datetime.date', (['(2013)', '(5)', '(30)'], {}), '(2013, 5, 30)\n', (1827, 1840), False, 'import datetime\n'), ((2061, 2086), 'datetime.date', 'datetime.date', (['(2014)', '(1)', '(8)'], {}), '(2014, 1, 8)\n', (2074, 2086), False, 'import datetime\n'), ((2110, 2136), 'datetime.date', 'datetime.date', (['(2014)', '(5)', '(30)'], {}), '(2014, 5, 30)\n', (2123, 2136), False, 'import datetime\n'), ((2356, 2381), 'datetime.date', 'datetime.date', (['(2015)', '(1)', '(7)'], {}), '(2015, 1, 7)\n', (2369, 2381), False, 'import datetime\n'), ((2405, 2431), 'datetime.date', 'datetime.date', (['(2015)', '(5)', '(30)'], {}), '(2015, 5, 30)\n', (2418, 2431), False, 'import datetime\n'), ((2652, 2677), 'datetime.date', 'datetime.date', (['(2016)', '(1)', '(6)'], {}), '(2016, 1, 6)\n', (2665, 2677), False, 'import datetime\n'), ((2701, 2727), 'datetime.date', 'datetime.date', (['(2016)', '(5)', '(30)'], {}), '(2016, 5, 30)\n', (2714, 2727), False, 'import datetime\n')]
|
# -*- coding: utf-8 -*-
r"""
=================================
Wasserstein unmixing with PyTorch
=================================
In this example we estimate mixing parameters from distributions that minimize
the Wasserstein distance. In other words we suppose that a target
distribution :math:`\mu^t` can be expressed as a weighted sum of source
distributions :math:`\mu^s_k` with the following model:
.. math::
\mu^t = \sum_{k=1}^K w_k\mu^s_k
where :math:`\mathbf{w}` is a vector of size :math:`K` and belongs in the
distribution simplex :math:`\Delta_K`.
In order to estimate this weight vector we propose to optimize the Wasserstein
distance between the model and the observed :math:`\mu^t` with respect to
the vector. This leads to the following optimization problem:
.. math::
\min_{\mathbf{w}\in\Delta_K} \quad W \left(\mu^t,\sum_{k=1}^K w_k\mu^s_k\right)
This minimization is done in this example with a simple projected gradient
descent in PyTorch. We use the automatic backend of POT that allows us to
compute the Wasserstein distance with :any:`ot.emd2` with
differentiable losses.
"""
# Author: <NAME> <<EMAIL>>
#
# License: MIT License
# sphinx_gallery_thumbnail_number = 2
import numpy as np
import matplotlib.pylab as pl
import ot
import torch
##############################################################################
# Generate data
# -------------
#%% Data
nt = 100
nt1 = 10 #
ns1 = 50
ns = 2 * ns1
rng = np.random.RandomState(2)
xt = rng.randn(nt, 2) * 0.2
xt[:nt1, 0] += 1
xt[nt1:, 1] += 1
xs1 = rng.randn(ns1, 2) * 0.2
xs1[:, 0] += 1
xs2 = rng.randn(ns1, 2) * 0.2
xs2[:, 1] += 1
xs = np.concatenate((xs1, xs2))
# Sample reweighting matrix H
H = np.zeros((ns, 2))
H[:ns1, 0] = 1 / ns1
H[ns1:, 1] = 1 / ns1
# each columns sums to 1 and has weights only for samples form the
# corresponding source distribution
M = ot.dist(xs, xt)
##############################################################################
# Plot data
# ---------
#%% plot the distributions
pl.figure(1)
pl.scatter(xt[:, 0], xt[:, 1], label='Target $\mu^t$', alpha=0.5)
pl.scatter(xs1[:, 0], xs1[:, 1], label='Source $\mu^s_1$', alpha=0.5)
pl.scatter(xs2[:, 0], xs2[:, 1], label='Source $\mu^s_2$', alpha=0.5)
pl.title('Sources and Target distributions')
pl.legend()
##############################################################################
# Optimization of the model wrt the Wasserstein distance
# ------------------------------------------------------
#%% Weights optimization with gradient descent
# convert numpy arrays to torch tensors
H2 = torch.tensor(H)
M2 = torch.tensor(M)
# weights for the source distributions
w = torch.tensor(ot.unif(2), requires_grad=True)
# uniform weights for target
b = torch.tensor(ot.unif(nt))
lr = 2e-3 # learning rate
niter = 500 # number of iterations
losses = [] # loss along the iterations
# loss for the minimal Wasserstein estimator
def get_loss(w):
a = torch.mv(H2, w) # distribution reweighting
return ot.emd2(a, b, M2) # squared Wasserstein 2
for i in range(niter):
loss = get_loss(w)
losses.append(float(loss))
loss.backward()
with torch.no_grad():
w -= lr * w.grad # gradient step
w[:] = ot.utils.proj_simplex(w) # projection on the simplex
w.grad.zero_()
##############################################################################
# Estimated weights and convergence of the objective
# ---------------------------------------------------
we = w.detach().numpy()
print('Estimated mixture:', we)
pl.figure(2)
pl.semilogy(losses)
pl.grid()
pl.title('Wasserstein distance')
pl.xlabel("Iterations")
##############################################################################
# Ploting the reweighted source distribution
# ------------------------------------------
pl.figure(3)
# compute source weights
ws = H.dot(we)
pl.scatter(xt[:, 0], xt[:, 1], label='Target $\mu^t$', alpha=0.5)
pl.scatter(xs[:, 0], xs[:, 1], color='C3', s=ws * 20 * ns, label='Weighted sources $\sum_{k} w_k\mu^s_k$', alpha=0.5)
pl.title('Target and reweighted source distributions')
pl.legend()
|
[
"ot.emd2",
"torch.tensor",
"matplotlib.pylab.scatter",
"matplotlib.pylab.legend",
"ot.unif",
"ot.dist",
"numpy.zeros",
"numpy.random.RandomState",
"torch.mv",
"matplotlib.pylab.xlabel",
"ot.utils.proj_simplex",
"matplotlib.pylab.semilogy",
"matplotlib.pylab.title",
"torch.no_grad",
"matplotlib.pylab.grid",
"numpy.concatenate",
"matplotlib.pylab.figure"
] |
[((1453, 1477), 'numpy.random.RandomState', 'np.random.RandomState', (['(2)'], {}), '(2)\n', (1474, 1477), True, 'import numpy as np\n'), ((1639, 1665), 'numpy.concatenate', 'np.concatenate', (['(xs1, xs2)'], {}), '((xs1, xs2))\n', (1653, 1665), True, 'import numpy as np\n'), ((1701, 1718), 'numpy.zeros', 'np.zeros', (['(ns, 2)'], {}), '((ns, 2))\n', (1709, 1718), True, 'import numpy as np\n'), ((1869, 1884), 'ot.dist', 'ot.dist', (['xs', 'xt'], {}), '(xs, xt)\n', (1876, 1884), False, 'import ot\n'), ((2018, 2030), 'matplotlib.pylab.figure', 'pl.figure', (['(1)'], {}), '(1)\n', (2027, 2030), True, 'import matplotlib.pylab as pl\n'), ((2031, 2097), 'matplotlib.pylab.scatter', 'pl.scatter', (['xt[:, 0]', 'xt[:, 1]'], {'label': '"""Target $\\\\mu^t$"""', 'alpha': '(0.5)'}), "(xt[:, 0], xt[:, 1], label='Target $\\\\mu^t$', alpha=0.5)\n", (2041, 2097), True, 'import matplotlib.pylab as pl\n'), ((2097, 2167), 'matplotlib.pylab.scatter', 'pl.scatter', (['xs1[:, 0]', 'xs1[:, 1]'], {'label': '"""Source $\\\\mu^s_1$"""', 'alpha': '(0.5)'}), "(xs1[:, 0], xs1[:, 1], label='Source $\\\\mu^s_1$', alpha=0.5)\n", (2107, 2167), True, 'import matplotlib.pylab as pl\n'), ((2167, 2237), 'matplotlib.pylab.scatter', 'pl.scatter', (['xs2[:, 0]', 'xs2[:, 1]'], {'label': '"""Source $\\\\mu^s_2$"""', 'alpha': '(0.5)'}), "(xs2[:, 0], xs2[:, 1], label='Source $\\\\mu^s_2$', alpha=0.5)\n", (2177, 2237), True, 'import matplotlib.pylab as pl\n'), ((2237, 2281), 'matplotlib.pylab.title', 'pl.title', (['"""Sources and Target distributions"""'], {}), "('Sources and Target distributions')\n", (2245, 2281), True, 'import matplotlib.pylab as pl\n'), ((2282, 2293), 'matplotlib.pylab.legend', 'pl.legend', ([], {}), '()\n', (2291, 2293), True, 'import matplotlib.pylab as pl\n'), ((2584, 2599), 'torch.tensor', 'torch.tensor', (['H'], {}), '(H)\n', (2596, 2599), False, 'import torch\n'), ((2605, 2620), 'torch.tensor', 'torch.tensor', (['M'], {}), '(M)\n', (2617, 2620), False, 'import torch\n'), ((3551, 3563), 'matplotlib.pylab.figure', 'pl.figure', (['(2)'], {}), '(2)\n', (3560, 3563), True, 'import matplotlib.pylab as pl\n'), ((3564, 3583), 'matplotlib.pylab.semilogy', 'pl.semilogy', (['losses'], {}), '(losses)\n', (3575, 3583), True, 'import matplotlib.pylab as pl\n'), ((3584, 3593), 'matplotlib.pylab.grid', 'pl.grid', ([], {}), '()\n', (3591, 3593), True, 'import matplotlib.pylab as pl\n'), ((3594, 3626), 'matplotlib.pylab.title', 'pl.title', (['"""Wasserstein distance"""'], {}), "('Wasserstein distance')\n", (3602, 3626), True, 'import matplotlib.pylab as pl\n'), ((3627, 3650), 'matplotlib.pylab.xlabel', 'pl.xlabel', (['"""Iterations"""'], {}), "('Iterations')\n", (3636, 3650), True, 'import matplotlib.pylab as pl\n'), ((3822, 3834), 'matplotlib.pylab.figure', 'pl.figure', (['(3)'], {}), '(3)\n', (3831, 3834), True, 'import matplotlib.pylab as pl\n'), ((3877, 3943), 'matplotlib.pylab.scatter', 'pl.scatter', (['xt[:, 0]', 'xt[:, 1]'], {'label': '"""Target $\\\\mu^t$"""', 'alpha': '(0.5)'}), "(xt[:, 0], xt[:, 1], label='Target $\\\\mu^t$', alpha=0.5)\n", (3887, 3943), True, 'import matplotlib.pylab as pl\n'), ((3943, 4067), 'matplotlib.pylab.scatter', 'pl.scatter', (['xs[:, 0]', 'xs[:, 1]'], {'color': '"""C3"""', 's': '(ws * 20 * ns)', 'label': '"""Weighted sources $\\\\sum_{k} w_k\\\\mu^s_k$"""', 'alpha': '(0.5)'}), "(xs[:, 0], xs[:, 1], color='C3', s=ws * 20 * ns, label=\n 'Weighted sources $\\\\sum_{k} w_k\\\\mu^s_k$', alpha=0.5)\n", (3953, 4067), True, 'import matplotlib.pylab as pl\n'), ((4061, 4115), 'matplotlib.pylab.title', 'pl.title', (['"""Target and reweighted source distributions"""'], {}), "('Target and reweighted source distributions')\n", (4069, 4115), True, 'import matplotlib.pylab as pl\n'), ((4116, 4127), 'matplotlib.pylab.legend', 'pl.legend', ([], {}), '()\n', (4125, 4127), True, 'import matplotlib.pylab as pl\n'), ((2678, 2688), 'ot.unif', 'ot.unif', (['(2)'], {}), '(2)\n', (2685, 2688), False, 'import ot\n'), ((2757, 2768), 'ot.unif', 'ot.unif', (['nt'], {}), '(nt)\n', (2764, 2768), False, 'import ot\n'), ((2948, 2963), 'torch.mv', 'torch.mv', (['H2', 'w'], {}), '(H2, w)\n', (2956, 2963), False, 'import torch\n'), ((3003, 3020), 'ot.emd2', 'ot.emd2', (['a', 'b', 'M2'], {}), '(a, b, M2)\n', (3010, 3020), False, 'import ot\n'), ((3157, 3172), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3170, 3172), False, 'import torch\n'), ((3231, 3255), 'ot.utils.proj_simplex', 'ot.utils.proj_simplex', (['w'], {}), '(w)\n', (3252, 3255), False, 'import ot\n')]
|
from elements import Elements
from enemy import Goal, GoalType
from ability import EffectType, Effect
from ai.single_target_attack import SingleTargetAttack
from ai.single_target_heal import SingleTargetHeal
from ai.status_effect import StatusEffect
from summon import Summon
from ai.explode import Explode
summons = {
'coal_golem':
Summon('Coal Golem',
{'h': 0, 's': 0, 'm': 8},
[
SingleTargetAttack('Punch', 0, 0.05, [Effect(EffectType.damage_health, Elements.earth, _dice_value=4)]),
SingleTargetAttack('Slam', 3, 0.05, [Effect(EffectType.damage_health, Elements.earth, _dice_value=8)])
],
[Goal(GoalType.damage_opponent, 400)],
strength_growth=7,
earth_res=0.1),
'blood_golem':
Summon('Blood Golem',
{'h': 5, 's': 0, 'm': 0},
[
SingleTargetAttack('Exsanguinate', 0, 0.05, [Effect(EffectType.bleed, Elements.earth, _effect_turns=1, _dot_value=2)]),
Explode('Blood Geyser', [Effect(EffectType.restore_health, Elements.water, _dice_value=4)])
],
[
Goal(GoalType.enrage, 0),
Goal(GoalType.damage_opponent, 400)
],
water_res=0.1,
electricity_res_growth=0.01,
dot_res=0.05,
dot_res_growth=0.03,
health_regen=1,
health_regen_growth=1,
shock_limit=3,
confusion_limit=5),
'wisp':
Summon('Will-o-the-Wisp',
{'h': 0, 's': 0, 'm': 5},
[
StatusEffect('Misdirection', 3, [
Effect(EffectType.debuff, Elements.earth, _stat='bonus_strength', _dice_value=4, _effect_turns=2, _status_effect_name='Weakened'),
Effect(EffectType.debuff, Elements.fire, _stat='bonus_fire', _dice_value=4, _effect_turns=2, _status_effect_name='Stupefied'),
Effect(EffectType.debuff, Elements.electricity, _stat='bonus_dexterity', _dice_value=4, _effect_turns=2, _status_effect_name='Exhausted'),
Effect(EffectType.debuff, Elements.water, _stat='bonus_willpower', _dice_value=4, _effect_turns=2, _status_effect_name='Discouraged'),
]),
SingleTargetAttack('Foolish Fire', 0, 0.08, [Effect(EffectType.damage_health, Elements.fire, _dice_value=2)]),
],
[
Goal(GoalType.debuff_opponent, 450),
Goal(GoalType.damage_opponent, 400)
],
health=7,
health_growth=4),
}
|
[
"ability.Effect",
"enemy.Goal"
] |
[((705, 740), 'enemy.Goal', 'Goal', (['GoalType.damage_opponent', '(400)'], {}), '(GoalType.damage_opponent, 400)\n', (709, 740), False, 'from enemy import Goal, GoalType\n'), ((1220, 1244), 'enemy.Goal', 'Goal', (['GoalType.enrage', '(0)'], {}), '(GoalType.enrage, 0)\n', (1224, 1244), False, 'from enemy import Goal, GoalType\n'), ((1265, 1300), 'enemy.Goal', 'Goal', (['GoalType.damage_opponent', '(400)'], {}), '(GoalType.damage_opponent, 400)\n', (1269, 1300), False, 'from enemy import Goal, GoalType\n'), ((2577, 2612), 'enemy.Goal', 'Goal', (['GoalType.debuff_opponent', '(450)'], {}), '(GoalType.debuff_opponent, 450)\n', (2581, 2612), False, 'from enemy import Goal, GoalType\n'), ((2633, 2668), 'enemy.Goal', 'Goal', (['GoalType.damage_opponent', '(400)'], {}), '(GoalType.damage_opponent, 400)\n', (2637, 2668), False, 'from enemy import Goal, GoalType\n'), ((482, 545), 'ability.Effect', 'Effect', (['EffectType.damage_health', 'Elements.earth'], {'_dice_value': '(4)'}), '(EffectType.damage_health, Elements.earth, _dice_value=4)\n', (488, 545), False, 'from ability import EffectType, Effect\n'), ((605, 668), 'ability.Effect', 'Effect', (['EffectType.damage_health', 'Elements.earth'], {'_dice_value': '(8)'}), '(EffectType.damage_health, Elements.earth, _dice_value=8)\n', (611, 668), False, 'from ability import EffectType, Effect\n'), ((980, 1051), 'ability.Effect', 'Effect', (['EffectType.bleed', 'Elements.earth'], {'_effect_turns': '(1)', '_dot_value': '(2)'}), '(EffectType.bleed, Elements.earth, _effect_turns=1, _dot_value=2)\n', (986, 1051), False, 'from ability import EffectType, Effect\n'), ((1099, 1163), 'ability.Effect', 'Effect', (['EffectType.restore_health', 'Elements.water'], {'_dice_value': '(4)'}), '(EffectType.restore_health, Elements.water, _dice_value=4)\n', (1105, 1163), False, 'from ability import EffectType, Effect\n'), ((1772, 1905), 'ability.Effect', 'Effect', (['EffectType.debuff', 'Elements.earth'], {'_stat': '"""bonus_strength"""', '_dice_value': '(4)', '_effect_turns': '(2)', '_status_effect_name': '"""Weakened"""'}), "(EffectType.debuff, Elements.earth, _stat='bonus_strength',\n _dice_value=4, _effect_turns=2, _status_effect_name='Weakened')\n", (1778, 1905), False, 'from ability import EffectType, Effect\n'), ((1925, 2054), 'ability.Effect', 'Effect', (['EffectType.debuff', 'Elements.fire'], {'_stat': '"""bonus_fire"""', '_dice_value': '(4)', '_effect_turns': '(2)', '_status_effect_name': '"""Stupefied"""'}), "(EffectType.debuff, Elements.fire, _stat='bonus_fire', _dice_value=4,\n _effect_turns=2, _status_effect_name='Stupefied')\n", (1931, 2054), False, 'from ability import EffectType, Effect\n'), ((2074, 2215), 'ability.Effect', 'Effect', (['EffectType.debuff', 'Elements.electricity'], {'_stat': '"""bonus_dexterity"""', '_dice_value': '(4)', '_effect_turns': '(2)', '_status_effect_name': '"""Exhausted"""'}), "(EffectType.debuff, Elements.electricity, _stat='bonus_dexterity',\n _dice_value=4, _effect_turns=2, _status_effect_name='Exhausted')\n", (2080, 2215), False, 'from ability import EffectType, Effect\n'), ((2235, 2372), 'ability.Effect', 'Effect', (['EffectType.debuff', 'Elements.water'], {'_stat': '"""bonus_willpower"""', '_dice_value': '(4)', '_effect_turns': '(2)', '_status_effect_name': '"""Discouraged"""'}), "(EffectType.debuff, Elements.water, _stat='bonus_willpower',\n _dice_value=4, _effect_turns=2, _status_effect_name='Discouraged')\n", (2241, 2372), False, 'from ability import EffectType, Effect\n'), ((2457, 2519), 'ability.Effect', 'Effect', (['EffectType.damage_health', 'Elements.fire'], {'_dice_value': '(2)'}), '(EffectType.damage_health, Elements.fire, _dice_value=2)\n', (2463, 2519), False, 'from ability import EffectType, Effect\n')]
|
from __future__ import annotations
from typing import Match
from bot.config import Config
from bot.data import command
from bot.data import esc
from bot.data import format_msg
@command('!bonk')
async def cmd_bonk(config: Config, match: Match[str]) -> str:
_, _, rest = match['msg'].partition(' ')
rest = rest.strip() or 'marsha_socks'
return format_msg(
match,
f'awcBonk awcBonk awcBonk {esc(rest)} awcBonk awcBonk awcBonk',
)
|
[
"bot.data.command",
"bot.data.esc"
] |
[((181, 197), 'bot.data.command', 'command', (['"""!bonk"""'], {}), "('!bonk')\n", (188, 197), False, 'from bot.data import command\n'), ((420, 429), 'bot.data.esc', 'esc', (['rest'], {}), '(rest)\n', (423, 429), False, 'from bot.data import esc\n')]
|
from flask import Flask
from flask_restful import Resource, Api, reqparse
app = Flask(__name__)
api = Api(app)
parser = reqparse.RequestParser()
parser.add_argument('name', type=str, help='Name of a guest')
parser.add_argument('id', type=int, help='Numerical ID of a guest in the list')
# A list containing names of guests
guest_list = ['Akansh', '<NAME>', '<NAME>']
# GET request to read all items in our list
class GuestList(Resource):
def get(self):
return {'names': [name for name in guest_list]}, 200
# GET request to check for a specific guest
class GuestById(Resource):
def get(self):
args = parser.parse_args()
id = args['id']
return {'name': guest_list[id]}
# POST request to create a new guest into our list
class GuestAdd(Resource):
def post(self):
args = parser.parse_args()
name = args['name']
guest_list.append(name)
return {'message': 'Guest added'}
# POST request to delete a name from our list
class GuestDelete(Resource):
def post(self):
args = parser.parse_args()
id = args['id']
guest_list.pop(id)
return {'message': 'Guest removed'}
# POST request to update an existing guest in our list
class GuestUpdate(Resource):
def put(self):
args = parser.parse_args()
id = args['id']
name = args['name']
guest_list[id] = name
return {'message': 'Guest updated'}
# Creating endpoints for our API
api.add_resource(GuestList, '/guests')
api.add_resource(GuestById, '/guest')
api.add_resource(GuestAdd, '/guest/add')
api.add_resource(GuestDelete, '/guest/delete')
api.add_resource(GuestUpdate, '/guest/update')
if __name__ == '__main__':
app.run()
|
[
"flask_restful.Api",
"flask.Flask",
"flask_restful.reqparse.RequestParser"
] |
[((82, 97), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (87, 97), False, 'from flask import Flask\n'), ((104, 112), 'flask_restful.Api', 'Api', (['app'], {}), '(app)\n', (107, 112), False, 'from flask_restful import Resource, Api, reqparse\n'), ((124, 148), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {}), '()\n', (146, 148), False, 'from flask_restful import Resource, Api, reqparse\n')]
|
from flask_appbuilder import ModelRestApi
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.security.sqla.models import PermissionView
class PermissionViewMenuApi(ModelRestApi):
resource_name = "security/permissions-resources"
openapi_spec_tag = "Security Permissions on Resources (View Menus)"
class_permission_name = "PermissionViewMenu"
datamodel = SQLAInterface(PermissionView)
allow_browser_login = True
list_columns = ["id", "permission.name", "view_menu.name"]
show_columns = list_columns
add_columns = ["permission_id", "view_menu_id"]
edit_columns = add_columns
|
[
"flask_appbuilder.models.sqla.interface.SQLAInterface"
] |
[((407, 436), 'flask_appbuilder.models.sqla.interface.SQLAInterface', 'SQLAInterface', (['PermissionView'], {}), '(PermissionView)\n', (420, 436), False, 'from flask_appbuilder.models.sqla.interface import SQLAInterface\n')]
|
"""
GTSAM Copyright 2010-2018, Georgia Tech Research Corporation,
Atlanta, Georgia 30332-0415
All Rights Reserved
Authors: <NAME>, et al. (see THANKS for the full author list)
See LICENSE for the license information
Kinematics of three-link manipulator with GTSAM poses and product of exponential maps.
Author: <NAME>
"""
# pylint: disable=invalid-name, E1101
from __future__ import print_function
import math
import unittest
from functools import reduce
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.mplot3d import Axes3D # pylint: disable=W0611
import gtsam
import gtsam.utils.plot as gtsam_plot
from gtsam import Pose2
from gtsam.utils.test_case import GtsamTestCase
def vector3(x, y, z):
"""Create 3D double numpy array."""
return np.array([x, y, z], dtype=np.float)
def compose(*poses):
"""Compose all Pose2 transforms given as arguments from left to right."""
return reduce((lambda x, y: x.compose(y)), poses)
def vee(M):
"""Pose2 vee operator."""
return vector3(M[0, 2], M[1, 2], M[1, 0])
def delta(g0, g1):
"""Difference between x,y,,theta components of SE(2) poses."""
return vector3(g1.x() - g0.x(), g1.y() - g0.y(), g1.theta() - g0.theta())
def trajectory(g0, g1, N=20):
""" Create an interpolated trajectory in SE(2), treating x,y, and theta separately.
g0 and g1 are the initial and final pose, respectively.
N is the number of *intervals*
Returns N+1 poses
"""
e = delta(g0, g1)
return [Pose2(g0.x()+e[0]*t, g0.y()+e[1]*t, g0.theta()+e[2]*t) for t in np.linspace(0, 1, N)]
class ThreeLinkArm(object):
"""Three-link arm class."""
def __init__(self):
self.L1 = 3.5
self.L2 = 3.5
self.L3 = 2.5
self.xi1 = vector3(0, 0, 1)
self.xi2 = vector3(self.L1, 0, 1)
self.xi3 = vector3(self.L1+self.L2, 0, 1)
self.sXt0 = Pose2(0, self.L1+self.L2 + self.L3, math.radians(90))
def fk(self, q):
""" Forward kinematics.
Takes numpy array of joint angles, in radians.
"""
sXl1 = Pose2(0, 0, math.radians(90))
l1Zl1 = Pose2(0, 0, q[0])
l1Xl2 = Pose2(self.L1, 0, 0)
l2Zl2 = Pose2(0, 0, q[1])
l2Xl3 = Pose2(self.L2, 0, 0)
l3Zl3 = Pose2(0, 0, q[2])
l3Xt = Pose2(self.L3, 0, 0)
return compose(sXl1, l1Zl1, l1Xl2, l2Zl2, l2Xl3, l3Zl3, l3Xt)
def jacobian(self, q):
""" Calculate manipulator Jacobian.
Takes numpy array of joint angles, in radians.
"""
a = q[0]+q[1]
b = a+q[2]
return np.array([[-self.L1*math.cos(q[0]) - self.L2*math.cos(a)-self.L3*math.cos(b),
-self.L1*math.cos(a)-self.L3*math.cos(b),
- self.L3*math.cos(b)],
[-self.L1*math.sin(q[0]) - self.L2*math.sin(a)-self.L3*math.sin(b),
-self.L1*math.sin(a)-self.L3*math.sin(b),
- self.L3*math.sin(b)],
[1, 1, 1]], np.float)
def poe(self, q):
""" Forward kinematics.
Takes numpy array of joint angles, in radians.
"""
l1Zl1 = Pose2.Expmap(self.xi1 * q[0])
l2Zl2 = Pose2.Expmap(self.xi2 * q[1])
l3Zl3 = Pose2.Expmap(self.xi3 * q[2])
return compose(l1Zl1, l2Zl2, l3Zl3, self.sXt0)
def con(self, q):
""" Forward kinematics, conjugation form.
Takes numpy array of joint angles, in radians.
"""
def expmap(x, y, theta):
"""Implement exponential map via conjugation with axis (x,y)."""
return compose(Pose2(x, y, 0), Pose2(0, 0, theta), Pose2(-x, -y, 0))
l1Zl1 = expmap(0.0, 0.0, q[0])
l2Zl2 = expmap(0.0, self.L1, q[1])
l3Zl3 = expmap(0.0, self.L1+self.L2, q[2])
return compose(l1Zl1, l2Zl2, l3Zl3, self.sXt0)
def ik(self, sTt_desired, e=1e-9):
""" Inverse kinematics.
Takes desired Pose2 of tool T with respect to base S.
Optional: mu, gradient descent rate; e: error norm threshold
"""
q = np.radians(vector3(30, -30, 45)) # well within workspace
error = vector3(100, 100, 100)
while np.linalg.norm(error) > e:
error = delta(sTt_desired, self.fk(q))
J = self.jacobian(q)
q -= np.dot(np.linalg.pinv(J), error)
# return result in interval [-pi,pi)
return np.remainder(q+math.pi, 2*math.pi)-math.pi
def manipulator_jacobian(self, q):
""" Calculate manipulator Jacobian.
Takes numpy array of joint angles, in radians.
Returns the manipulator Jacobian of differential twists. When multiplied with
a vector of joint velocities, will yield a single differential twist which is
the spatial velocity d(sTt)/dt * inv(sTt) of the end-effector pose.
Just like always, differential twists can be hatted and multiplied with spatial
coordinates of a point to give the spatial velocity of the point.
"""
l1Zl1 = Pose2.Expmap(self.xi1 * q[0])
l2Zl2 = Pose2.Expmap(self.xi2 * q[1])
# l3Zl3 = Pose2.Expmap(self.xi3 * q[2])
p1 = self.xi1
# p1 = Pose2().Adjoint(self.xi1)
sTl1 = l1Zl1
p2 = sTl1.Adjoint(self.xi2)
sTl2 = compose(l1Zl1, l2Zl2)
p3 = sTl2.Adjoint(self.xi3)
differential_twists = [p1, p2, p3]
return np.stack(differential_twists, axis=1)
def plot(self, fignum, q):
""" Plot arm.
Takes figure number, and numpy array of joint angles, in radians.
"""
fig = plt.figure(fignum)
axes = fig.gca()
sXl1 = Pose2(0, 0, math.radians(90))
p1 = sXl1.translation()
gtsam_plot.plot_pose2_on_axes(axes, sXl1)
def plot_line(p, g, color):
q = g.translation()
line = np.append(p[np.newaxis], q[np.newaxis], axis=0)
axes.plot(line[:, 0], line[:, 1], color)
return q
l1Zl1 = Pose2(0, 0, q[0])
l1Xl2 = Pose2(self.L1, 0, 0)
sTl2 = compose(sXl1, l1Zl1, l1Xl2)
p2 = plot_line(p1, sTl2, 'r-')
gtsam_plot.plot_pose2_on_axes(axes, sTl2)
l2Zl2 = Pose2(0, 0, q[1])
l2Xl3 = Pose2(self.L2, 0, 0)
sTl3 = compose(sTl2, l2Zl2, l2Xl3)
p3 = plot_line(p2, sTl3, 'g-')
gtsam_plot.plot_pose2_on_axes(axes, sTl3)
l3Zl3 = Pose2(0, 0, q[2])
l3Xt = Pose2(self.L3, 0, 0)
sTt = compose(sTl3, l3Zl3, l3Xt)
plot_line(p3, sTt, 'b-')
gtsam_plot.plot_pose2_on_axes(axes, sTt)
# Create common example configurations.
Q0 = vector3(0, 0, 0)
Q1 = np.radians(vector3(-30, -45, -90))
Q2 = np.radians(vector3(-90, 90, 0))
class TestPose2SLAMExample(GtsamTestCase):
"""Unit tests for functions used below."""
def setUp(self):
self.arm = ThreeLinkArm()
def assertPose2Equals(self, actual, expected, tol=1e-2):
"""Helper function that prints out actual and expected if not equal."""
equal = actual.equals(expected, tol)
if not equal:
raise self.failureException(
"Poses are not equal:\n{}!={}".format(actual, expected))
def test_fk_arm(self):
"""Make sure forward kinematics is correct for some known test configurations."""
# at rest
expected = Pose2(0, 2*3.5 + 2.5, math.radians(90))
sTt = self.arm.fk(Q0)
self.assertIsInstance(sTt, Pose2)
self.assertPose2Equals(sTt, expected)
# -30, -45, -90
expected = Pose2(5.78, 1.52, math.radians(-75))
sTt = self.arm.fk(Q1)
self.assertPose2Equals(sTt, expected)
def test_jacobian(self):
"""Test Jacobian calculation."""
# at rest
expected = np.array([[-9.5, -6, -2.5], [0, 0, 0], [1, 1, 1]], np.float)
J = self.arm.jacobian(Q0)
np.testing.assert_array_almost_equal(J, expected)
# at -90, 90, 0
expected = np.array([[-6, -6, -2.5], [3.5, 0, 0], [1, 1, 1]], np.float)
J = self.arm.jacobian(Q2)
np.testing.assert_array_almost_equal(J, expected)
def test_con_arm(self):
"""Make sure POE is correct for some known test configurations."""
# at rest
expected = Pose2(0, 2*3.5 + 2.5, math.radians(90))
sTt = self.arm.con(Q0)
self.assertIsInstance(sTt, Pose2)
self.assertPose2Equals(sTt, expected)
# -30, -45, -90
expected = Pose2(5.78, 1.52, math.radians(-75))
sTt = self.arm.con(Q1)
self.assertPose2Equals(sTt, expected)
def test_poe_arm(self):
"""Make sure POE is correct for some known test configurations."""
# at rest
expected = Pose2(0, 2*3.5 + 2.5, math.radians(90))
sTt = self.arm.poe(Q0)
self.assertIsInstance(sTt, Pose2)
self.assertPose2Equals(sTt, expected)
# -30, -45, -90
expected = Pose2(5.78, 1.52, math.radians(-75))
sTt = self.arm.poe(Q1)
self.assertPose2Equals(sTt, expected)
def test_ik(self):
"""Check iterative inverse kinematics function."""
# at rest
actual = self.arm.ik(Pose2(0, 2*3.5 + 2.5, math.radians(90)))
np.testing.assert_array_almost_equal(actual, Q0, decimal=2)
# -30, -45, -90
sTt_desired = Pose2(5.78, 1.52, math.radians(-75))
actual = self.arm.ik(sTt_desired)
self.assertPose2Equals(self.arm.fk(actual), sTt_desired)
np.testing.assert_array_almost_equal(actual, Q1, decimal=2)
def test_manipulator_jacobian(self):
"""Test Jacobian calculation."""
# at rest
expected = np.array([[0, 3.5, 7], [0, 0, 0], [1, 1, 1]], np.float)
J = self.arm.manipulator_jacobian(Q0)
np.testing.assert_array_almost_equal(J, expected)
# at -90, 90, 0
expected = np.array(
[[0, 0, 3.5], [0, -3.5, -3.5], [1, 1, 1]], np.float)
J = self.arm.manipulator_jacobian(Q2)
np.testing.assert_array_almost_equal(J, expected)
def run_example():
""" Use trajectory interpolation and then trajectory tracking a la Murray
to move a 3-link arm on a straight line.
"""
# Create arm
arm = ThreeLinkArm()
# Get initial pose using forward kinematics
q = np.radians(vector3(30, -30, 45))
sTt_initial = arm.fk(q)
# Create interpolated trajectory in task space to desired goal pose
sTt_goal = Pose2(2.4, 4.3, math.radians(0))
poses = trajectory(sTt_initial, sTt_goal, 50)
# Setup figure and plot initial pose
fignum = 0
fig = plt.figure(fignum)
axes = fig.gca()
axes.set_xlim(-5, 5)
axes.set_ylim(0, 10)
gtsam_plot.plot_pose2(fignum, arm.fk(q))
# For all poses in interpolated trajectory, calculate dq to move to next pose.
# We do this by calculating the local Jacobian J and doing dq = inv(J)*delta(sTt, pose).
for pose in poses:
sTt = arm.fk(q)
error = delta(sTt, pose)
J = arm.jacobian(q)
q += np.dot(np.linalg.inv(J), error)
arm.plot(fignum, q)
plt.pause(0.01)
plt.pause(10)
if __name__ == "__main__":
run_example()
unittest.main()
|
[
"unittest.main",
"numpy.stack",
"gtsam.Pose2",
"math.radians",
"numpy.remainder",
"math.sin",
"gtsam.Pose2.Expmap",
"numpy.append",
"matplotlib.pyplot.figure",
"numpy.array",
"numpy.linalg.norm",
"numpy.linspace",
"numpy.linalg.inv",
"math.cos",
"numpy.linalg.pinv",
"numpy.testing.assert_array_almost_equal",
"matplotlib.pyplot.pause",
"gtsam.utils.plot.plot_pose2_on_axes"
] |
[((775, 810), 'numpy.array', 'np.array', (['[x, y, z]'], {'dtype': 'np.float'}), '([x, y, z], dtype=np.float)\n', (783, 810), True, 'import numpy as np\n'), ((10685, 10703), 'matplotlib.pyplot.figure', 'plt.figure', (['fignum'], {}), '(fignum)\n', (10695, 10703), True, 'import matplotlib.pyplot as plt\n'), ((11207, 11220), 'matplotlib.pyplot.pause', 'plt.pause', (['(10)'], {}), '(10)\n', (11216, 11220), True, 'import matplotlib.pyplot as plt\n'), ((11272, 11287), 'unittest.main', 'unittest.main', ([], {}), '()\n', (11285, 11287), False, 'import unittest\n'), ((2140, 2157), 'gtsam.Pose2', 'Pose2', (['(0)', '(0)', 'q[0]'], {}), '(0, 0, q[0])\n', (2145, 2157), False, 'from gtsam import Pose2\n'), ((2174, 2194), 'gtsam.Pose2', 'Pose2', (['self.L1', '(0)', '(0)'], {}), '(self.L1, 0, 0)\n', (2179, 2194), False, 'from gtsam import Pose2\n'), ((2211, 2228), 'gtsam.Pose2', 'Pose2', (['(0)', '(0)', 'q[1]'], {}), '(0, 0, q[1])\n', (2216, 2228), False, 'from gtsam import Pose2\n'), ((2245, 2265), 'gtsam.Pose2', 'Pose2', (['self.L2', '(0)', '(0)'], {}), '(self.L2, 0, 0)\n', (2250, 2265), False, 'from gtsam import Pose2\n'), ((2282, 2299), 'gtsam.Pose2', 'Pose2', (['(0)', '(0)', 'q[2]'], {}), '(0, 0, q[2])\n', (2287, 2299), False, 'from gtsam import Pose2\n'), ((2315, 2335), 'gtsam.Pose2', 'Pose2', (['self.L3', '(0)', '(0)'], {}), '(self.L3, 0, 0)\n', (2320, 2335), False, 'from gtsam import Pose2\n'), ((3201, 3230), 'gtsam.Pose2.Expmap', 'Pose2.Expmap', (['(self.xi1 * q[0])'], {}), '(self.xi1 * q[0])\n', (3213, 3230), False, 'from gtsam import Pose2\n'), ((3247, 3276), 'gtsam.Pose2.Expmap', 'Pose2.Expmap', (['(self.xi2 * q[1])'], {}), '(self.xi2 * q[1])\n', (3259, 3276), False, 'from gtsam import Pose2\n'), ((3293, 3322), 'gtsam.Pose2.Expmap', 'Pose2.Expmap', (['(self.xi3 * q[2])'], {}), '(self.xi3 * q[2])\n', (3305, 3322), False, 'from gtsam import Pose2\n'), ((5115, 5144), 'gtsam.Pose2.Expmap', 'Pose2.Expmap', (['(self.xi1 * q[0])'], {}), '(self.xi1 * q[0])\n', (5127, 5144), False, 'from gtsam import Pose2\n'), ((5161, 5190), 'gtsam.Pose2.Expmap', 'Pose2.Expmap', (['(self.xi2 * q[1])'], {}), '(self.xi2 * q[1])\n', (5173, 5190), False, 'from gtsam import Pose2\n'), ((5494, 5531), 'numpy.stack', 'np.stack', (['differential_twists'], {'axis': '(1)'}), '(differential_twists, axis=1)\n', (5502, 5531), True, 'import numpy as np\n'), ((5690, 5708), 'matplotlib.pyplot.figure', 'plt.figure', (['fignum'], {}), '(fignum)\n', (5700, 5708), True, 'import matplotlib.pyplot as plt\n'), ((5820, 5861), 'gtsam.utils.plot.plot_pose2_on_axes', 'gtsam_plot.plot_pose2_on_axes', (['axes', 'sXl1'], {}), '(axes, sXl1)\n', (5849, 5861), True, 'import gtsam.utils.plot as gtsam_plot\n'), ((6089, 6106), 'gtsam.Pose2', 'Pose2', (['(0)', '(0)', 'q[0]'], {}), '(0, 0, q[0])\n', (6094, 6106), False, 'from gtsam import Pose2\n'), ((6123, 6143), 'gtsam.Pose2', 'Pose2', (['self.L1', '(0)', '(0)'], {}), '(self.L1, 0, 0)\n', (6128, 6143), False, 'from gtsam import Pose2\n'), ((6234, 6275), 'gtsam.utils.plot.plot_pose2_on_axes', 'gtsam_plot.plot_pose2_on_axes', (['axes', 'sTl2'], {}), '(axes, sTl2)\n', (6263, 6275), True, 'import gtsam.utils.plot as gtsam_plot\n'), ((6293, 6310), 'gtsam.Pose2', 'Pose2', (['(0)', '(0)', 'q[1]'], {}), '(0, 0, q[1])\n', (6298, 6310), False, 'from gtsam import Pose2\n'), ((6327, 6347), 'gtsam.Pose2', 'Pose2', (['self.L2', '(0)', '(0)'], {}), '(self.L2, 0, 0)\n', (6332, 6347), False, 'from gtsam import Pose2\n'), ((6438, 6479), 'gtsam.utils.plot.plot_pose2_on_axes', 'gtsam_plot.plot_pose2_on_axes', (['axes', 'sTl3'], {}), '(axes, sTl3)\n', (6467, 6479), True, 'import gtsam.utils.plot as gtsam_plot\n'), ((6497, 6514), 'gtsam.Pose2', 'Pose2', (['(0)', '(0)', 'q[2]'], {}), '(0, 0, q[2])\n', (6502, 6514), False, 'from gtsam import Pose2\n'), ((6530, 6550), 'gtsam.Pose2', 'Pose2', (['self.L3', '(0)', '(0)'], {}), '(self.L3, 0, 0)\n', (6535, 6550), False, 'from gtsam import Pose2\n'), ((6633, 6673), 'gtsam.utils.plot.plot_pose2_on_axes', 'gtsam_plot.plot_pose2_on_axes', (['axes', 'sTt'], {}), '(axes, sTt)\n', (6662, 6673), True, 'import gtsam.utils.plot as gtsam_plot\n'), ((7864, 7924), 'numpy.array', 'np.array', (['[[-9.5, -6, -2.5], [0, 0, 0], [1, 1, 1]]', 'np.float'], {}), '([[-9.5, -6, -2.5], [0, 0, 0], [1, 1, 1]], np.float)\n', (7872, 7924), True, 'import numpy as np\n'), ((7967, 8016), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['J', 'expected'], {}), '(J, expected)\n', (8003, 8016), True, 'import numpy as np\n'), ((8061, 8121), 'numpy.array', 'np.array', (['[[-6, -6, -2.5], [3.5, 0, 0], [1, 1, 1]]', 'np.float'], {}), '([[-6, -6, -2.5], [3.5, 0, 0], [1, 1, 1]], np.float)\n', (8069, 8121), True, 'import numpy as np\n'), ((8164, 8213), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['J', 'expected'], {}), '(J, expected)\n', (8200, 8213), True, 'import numpy as np\n'), ((9309, 9368), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['actual', 'Q0'], {'decimal': '(2)'}), '(actual, Q0, decimal=2)\n', (9345, 9368), True, 'import numpy as np\n'), ((9568, 9627), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['actual', 'Q1'], {'decimal': '(2)'}), '(actual, Q1, decimal=2)\n', (9604, 9627), True, 'import numpy as np\n'), ((9748, 9803), 'numpy.array', 'np.array', (['[[0, 3.5, 7], [0, 0, 0], [1, 1, 1]]', 'np.float'], {}), '([[0, 3.5, 7], [0, 0, 0], [1, 1, 1]], np.float)\n', (9756, 9803), True, 'import numpy as np\n'), ((9858, 9907), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['J', 'expected'], {}), '(J, expected)\n', (9894, 9907), True, 'import numpy as np\n'), ((9952, 10013), 'numpy.array', 'np.array', (['[[0, 0, 3.5], [0, -3.5, -3.5], [1, 1, 1]]', 'np.float'], {}), '([[0, 0, 3.5], [0, -3.5, -3.5], [1, 1, 1]], np.float)\n', (9960, 10013), True, 'import numpy as np\n'), ((10081, 10130), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['J', 'expected'], {}), '(J, expected)\n', (10117, 10130), True, 'import numpy as np\n'), ((10551, 10566), 'math.radians', 'math.radians', (['(0)'], {}), '(0)\n', (10563, 10566), False, 'import math\n'), ((11186, 11201), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.01)'], {}), '(0.01)\n', (11195, 11201), True, 'import matplotlib.pyplot as plt\n'), ((1577, 1597), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'N'], {}), '(0, 1, N)\n', (1588, 1597), True, 'import numpy as np\n'), ((1936, 1952), 'math.radians', 'math.radians', (['(90)'], {}), '(90)\n', (1948, 1952), False, 'import math\n'), ((2106, 2122), 'math.radians', 'math.radians', (['(90)'], {}), '(90)\n', (2118, 2122), False, 'import math\n'), ((4249, 4270), 'numpy.linalg.norm', 'np.linalg.norm', (['error'], {}), '(error)\n', (4263, 4270), True, 'import numpy as np\n'), ((4471, 4509), 'numpy.remainder', 'np.remainder', (['(q + math.pi)', '(2 * math.pi)'], {}), '(q + math.pi, 2 * math.pi)\n', (4483, 4509), True, 'import numpy as np\n'), ((5762, 5778), 'math.radians', 'math.radians', (['(90)'], {}), '(90)\n', (5774, 5778), False, 'import math\n'), ((5950, 5997), 'numpy.append', 'np.append', (['p[np.newaxis]', 'q[np.newaxis]'], {'axis': '(0)'}), '(p[np.newaxis], q[np.newaxis], axis=0)\n', (5959, 5997), True, 'import numpy as np\n'), ((7463, 7479), 'math.radians', 'math.radians', (['(90)'], {}), '(90)\n', (7475, 7479), False, 'import math\n'), ((7661, 7678), 'math.radians', 'math.radians', (['(-75)'], {}), '(-75)\n', (7673, 7678), False, 'import math\n'), ((8377, 8393), 'math.radians', 'math.radians', (['(90)'], {}), '(90)\n', (8389, 8393), False, 'import math\n'), ((8576, 8593), 'math.radians', 'math.radians', (['(-75)'], {}), '(-75)\n', (8588, 8593), False, 'import math\n'), ((8835, 8851), 'math.radians', 'math.radians', (['(90)'], {}), '(90)\n', (8847, 8851), False, 'import math\n'), ((9034, 9051), 'math.radians', 'math.radians', (['(-75)'], {}), '(-75)\n', (9046, 9051), False, 'import math\n'), ((9434, 9451), 'math.radians', 'math.radians', (['(-75)'], {}), '(-75)\n', (9446, 9451), False, 'import math\n'), ((11125, 11141), 'numpy.linalg.inv', 'np.linalg.inv', (['J'], {}), '(J)\n', (11138, 11141), True, 'import numpy as np\n'), ((3659, 3673), 'gtsam.Pose2', 'Pose2', (['x', 'y', '(0)'], {}), '(x, y, 0)\n', (3664, 3673), False, 'from gtsam import Pose2\n'), ((3675, 3693), 'gtsam.Pose2', 'Pose2', (['(0)', '(0)', 'theta'], {}), '(0, 0, theta)\n', (3680, 3693), False, 'from gtsam import Pose2\n'), ((3695, 3711), 'gtsam.Pose2', 'Pose2', (['(-x)', '(-y)', '(0)'], {}), '(-x, -y, 0)\n', (3700, 3711), False, 'from gtsam import Pose2\n'), ((4384, 4401), 'numpy.linalg.pinv', 'np.linalg.pinv', (['J'], {}), '(J)\n', (4398, 4401), True, 'import numpy as np\n'), ((9282, 9298), 'math.radians', 'math.radians', (['(90)'], {}), '(90)\n', (9294, 9298), False, 'import math\n'), ((2787, 2798), 'math.cos', 'math.cos', (['b'], {}), '(b)\n', (2795, 2798), False, 'import math\n'), ((2998, 3009), 'math.sin', 'math.sin', (['b'], {}), '(b)\n', (3006, 3009), False, 'import math\n'), ((2670, 2681), 'math.cos', 'math.cos', (['b'], {}), '(b)\n', (2678, 2681), False, 'import math\n'), ((2718, 2729), 'math.cos', 'math.cos', (['a'], {}), '(a)\n', (2726, 2729), False, 'import math\n'), ((2738, 2749), 'math.cos', 'math.cos', (['b'], {}), '(b)\n', (2746, 2749), False, 'import math\n'), ((2881, 2892), 'math.sin', 'math.sin', (['b'], {}), '(b)\n', (2889, 2892), False, 'import math\n'), ((2929, 2940), 'math.sin', 'math.sin', (['a'], {}), '(a)\n', (2937, 2940), False, 'import math\n'), ((2949, 2960), 'math.sin', 'math.sin', (['b'], {}), '(b)\n', (2957, 2960), False, 'import math\n'), ((2625, 2639), 'math.cos', 'math.cos', (['q[0]'], {}), '(q[0])\n', (2633, 2639), False, 'import math\n'), ((2650, 2661), 'math.cos', 'math.cos', (['a'], {}), '(a)\n', (2658, 2661), False, 'import math\n'), ((2836, 2850), 'math.sin', 'math.sin', (['q[0]'], {}), '(q[0])\n', (2844, 2850), False, 'import math\n'), ((2861, 2872), 'math.sin', 'math.sin', (['a'], {}), '(a)\n', (2869, 2872), False, 'import math\n')]
|
# Generated by Django 3.0.2 on 2020-01-13 02:58
from uuid import uuid4
from django.conf import settings
from django.db import migrations, models
from private_storage.fields import PrivateFileField
from private_storage.storage.files import PrivateFileSystemStorage
import django.utils.timezone
import model_utils.fields
import partial_date.fields
from attendees.persons.models.enum import GenderEnum
from attendees.persons.models import Utility
class Migration(migrations.Migration):
dependencies = [
('persons', '0005_folk'),
('whereabouts', '0004_division'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('pghistory', '0003_auto_20201023_1636'),
]
operations = [
migrations.CreateModel(
name='Attendee',
fields=[
('id', models.UUIDField(default=uuid4, editable=False, primary_key=True, serialize=False)),
('user', models.OneToOneField(blank=True, default=None, null=True, on_delete=models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('division', models.ForeignKey(default=0, blank=False, null=False, on_delete=models.SET(0), to='whereabouts.Division')),
('first_name', models.CharField(blank=True, db_index=True, max_length=25, null=True)),
('last_name', models.CharField(blank=True, db_index=True, max_length=25, null=True)),
('first_name2', models.CharField(blank=True, db_index=True, max_length=12, null=True)),
('last_name2', models.CharField(blank=True, db_index=True, max_length=8, null=True)),
('gender', models.CharField(choices=GenderEnum.choices(), default=GenderEnum.UNSPECIFIED, max_length=11)),
('photo', PrivateFileField(blank=True, null=True, storage=PrivateFileSystemStorage(), upload_to='attendee_portrait', verbose_name='Photo')),
('actual_birthday', models.DateField(blank=True, null=True)),
('estimated_birthday', partial_date.fields.PartialDateField(blank=True, null=True, help_text='1998, 1998-12 or 1992-12-31, please enter 1800 if year not known')),
('deathday', models.DateField(blank=True, null=True)),
# ('progressions', models.JSONField(blank=True, default=dict, help_text='Example: {"Christian": true, "baptized": {"time": "12/31/2020", "place":"SF"}}. Please keep {} here even no data', null=True)),
('infos', models.JSONField(blank=True, default=Utility.attendee_infos, help_text=('Example: {"fixed": {"food_pref": "peanut allergy", "nick_name": "John"}}.Please keep {} here even no data'), null=True)),
],
options={
'db_table': 'persons_attendees',
'ordering': ['last_name', 'first_name'],
},
bases=(Utility, models.Model),
),
migrations.RunSQL(Utility.default_sql('persons_attendees')),
# migrations.RunSQL(
# sql="""
# ALTER TABLE persons_attendees DROP COLUMN full_name;
# ALTER TABLE persons_attendees ADD COLUMN full_name VARCHAR(70)
# GENERATED ALWAYS AS (TRIM(
# COALESCE(first_name, '') || ' ' ||
# COALESCE(last_name, '') || ' ' ||
# COALESCE(last_name2, '') ||
# COALESCE(first_name2, '')
# )) STORED;
# CREATE INDEX attendee_full_name_raw
# ON persons_attendees (full_name);
# """,
# # reverse_sql="",
# ), # switching to use opencc for language conversion in Attendee.save()
migrations.AddIndex(
model_name='attendee',
index=django.contrib.postgres.indexes.GinIndex(fields=['infos'], name='attendee_infos_gin'),
),
# migrations.AddIndex(
# model_name='attendee',
# index=django.contrib.postgres.indexes.GinIndex(fields=['progressions'], name='attendee_progressions_gin'),
# ),
migrations.CreateModel(
name='AttendeesHistory',
fields=[
('pgh_id', models.BigAutoField(primary_key=True, serialize=False)),
('pgh_created_at', models.DateTimeField(auto_now_add=True)),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('pgh_obj', models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='history', to='persons.attendee')),
('id', models.UUIDField(db_index=True, default=uuid4, editable=False, serialize=False)),
('division', models.ForeignKey(db_constraint=False, default=0, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='whereabouts.division')),
('gender', models.CharField(choices=GenderEnum.choices(), default=GenderEnum['UNSPECIFIED'], max_length=11)),
('pgh_label', models.TextField(help_text='The event label.')),
('user', models.ForeignKey(blank=True, db_constraint=False, default=None, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to=settings.AUTH_USER_MODEL)),
('pgh_context', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context')),
('infos', models.JSONField(blank=True, default=Utility.attendee_infos, help_text='Example: {"fixed": {"food_pref": "peanut allergy", "nick_name": "John"}}.Please keep {} here even no data', null=True)),
('first_name', models.CharField(blank=True, max_length=25, null=True)),
('last_name', models.CharField(blank=True, max_length=25, null=True)),
('first_name2', models.CharField(blank=True, max_length=12, null=True)),
('last_name2', models.CharField(blank=True, max_length=8, null=True)),
('photo', PrivateFileField(blank=True, null=True, storage=PrivateFileSystemStorage(), upload_to='attendee_portrait', verbose_name='Photo')),
('actual_birthday', models.DateField(blank=True, null=True)),
('estimated_birthday', partial_date.fields.PartialDateField(blank=True, help_text='1998, 1998-12 or 1992-12-31, please enter 1800 if year not known', null=True)),
('deathday', models.DateField(blank=True, null=True)),
],
options={
'db_table': 'persons_attendeeshistory',
},
),
migrations.RunSQL(Utility.pgh_default_sql('persons_attendeeshistory')),
]
|
[
"django.db.models.OneToOneField",
"django.db.models.TextField",
"django.db.migrations.swappable_dependency",
"private_storage.storage.files.PrivateFileSystemStorage",
"django.db.models.CharField",
"django.db.models.BigAutoField",
"django.db.models.DateTimeField",
"django.db.models.ForeignKey",
"django.db.models.JSONField",
"django.db.models.SET",
"django.db.models.BooleanField",
"attendees.persons.models.Utility.pgh_default_sql",
"attendees.persons.models.Utility.default_sql",
"django.db.models.DateField",
"django.db.models.UUIDField",
"attendees.persons.models.enum.GenderEnum.choices"
] |
[((592, 649), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (623, 649), False, 'from django.db import migrations, models\n'), ((3256, 3296), 'attendees.persons.models.Utility.default_sql', 'Utility.default_sql', (['"""persons_attendees"""'], {}), "('persons_attendees')\n", (3275, 3296), False, 'from attendees.persons.models import Utility\n'), ((7323, 7374), 'attendees.persons.models.Utility.pgh_default_sql', 'Utility.pgh_default_sql', (['"""persons_attendeeshistory"""'], {}), "('persons_attendeeshistory')\n", (7346, 7374), False, 'from attendees.persons.models import Utility\n'), ((832, 919), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid4, editable=False, primary_key=True, serialize\n =False)\n', (848, 919), False, 'from django.db import migrations, models\n'), ((942, 1069), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'blank': '(True)', 'default': 'None', 'null': '(True)', 'on_delete': 'models.deletion.SET_NULL', 'to': 'settings.AUTH_USER_MODEL'}), '(blank=True, default=None, null=True, on_delete=models.\n deletion.SET_NULL, to=settings.AUTH_USER_MODEL)\n', (962, 1069), False, 'from django.db import migrations, models\n'), ((1387, 1421), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1406, 1421), False, 'from django.db import migrations, models\n'), ((1592, 1661), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_index': '(True)', 'max_length': '(25)', 'null': '(True)'}), '(blank=True, db_index=True, max_length=25, null=True)\n', (1608, 1661), False, 'from django.db import migrations, models\n'), ((1694, 1763), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_index': '(True)', 'max_length': '(25)', 'null': '(True)'}), '(blank=True, db_index=True, max_length=25, null=True)\n', (1710, 1763), False, 'from django.db import migrations, models\n'), ((1798, 1867), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_index': '(True)', 'max_length': '(12)', 'null': '(True)'}), '(blank=True, db_index=True, max_length=12, null=True)\n', (1814, 1867), False, 'from django.db import migrations, models\n'), ((1901, 1969), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'db_index': '(True)', 'max_length': '(8)', 'null': '(True)'}), '(blank=True, db_index=True, max_length=8, null=True)\n', (1917, 1969), False, 'from django.db import migrations, models\n'), ((2288, 2327), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2304, 2327), False, 'from django.db import migrations, models\n'), ((2538, 2577), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2554, 2577), False, 'from django.db import migrations, models\n'), ((2823, 3023), 'django.db.models.JSONField', 'models.JSONField', ([], {'blank': '(True)', 'default': 'Utility.attendee_infos', 'help_text': '"""Example: {"fixed": {"food_pref": "peanut allergy", "nick_name": "John"}}.Please keep {} here even no data"""', 'null': '(True)'}), '(blank=True, default=Utility.attendee_infos, help_text=\n \'Example: {"fixed": {"food_pref": "peanut allergy", "nick_name": "John"}}.Please keep {} here even no data\'\n , null=True)\n', (2839, 3023), False, 'from django.db import migrations, models\n'), ((4564, 4618), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (4583, 4618), False, 'from django.db import migrations, models\n'), ((4656, 4695), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (4676, 4695), False, 'from django.db import migrations, models\n'), ((5018, 5052), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (5037, 5052), False, 'from django.db import migrations, models\n'), ((5083, 5221), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'db_constraint': '(False)', 'on_delete': 'django.db.models.deletion.DO_NOTHING', 'related_name': '"""history"""', 'to': '"""persons.attendee"""'}), "(db_constraint=False, on_delete=django.db.models.deletion.\n DO_NOTHING, related_name='history', to='persons.attendee')\n", (5100, 5221), False, 'from django.db import migrations, models\n'), ((5242, 5321), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'db_index': '(True)', 'default': 'uuid4', 'editable': '(False)', 'serialize': '(False)'}), '(db_index=True, default=uuid4, editable=False, serialize=False)\n', (5258, 5321), False, 'from django.db import migrations, models\n'), ((5353, 5528), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'db_constraint': '(False)', 'default': '(0)', 'on_delete': 'django.db.models.deletion.DO_NOTHING', 'related_name': '"""+"""', 'related_query_name': '"""+"""', 'to': '"""whereabouts.division"""'}), "(db_constraint=False, default=0, on_delete=django.db.\n models.deletion.DO_NOTHING, related_name='+', related_query_name='+',\n to='whereabouts.division')\n", (5370, 5528), False, 'from django.db import migrations, models\n'), ((5678, 5724), 'django.db.models.TextField', 'models.TextField', ([], {'help_text': '"""The event label."""'}), "(help_text='The event label.')\n", (5694, 5724), False, 'from django.db import migrations, models\n'), ((5752, 5954), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'db_constraint': '(False)', 'default': 'None', 'null': '(True)', 'on_delete': 'django.db.models.deletion.DO_NOTHING', 'related_name': '"""+"""', 'related_query_name': '"""+"""', 'to': 'settings.AUTH_USER_MODEL'}), "(blank=True, db_constraint=False, default=None, null=True,\n on_delete=django.db.models.deletion.DO_NOTHING, related_name='+',\n related_query_name='+', to=settings.AUTH_USER_MODEL)\n", (5769, 5954), False, 'from django.db import migrations, models\n'), ((5981, 6125), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'db_constraint': '(False)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.DO_NOTHING', 'related_name': '"""+"""', 'to': '"""pghistory.context"""'}), "(db_constraint=False, null=True, on_delete=django.db.\n models.deletion.DO_NOTHING, related_name='+', to='pghistory.context')\n", (5998, 6125), False, 'from django.db import migrations, models\n'), ((6149, 6349), 'django.db.models.JSONField', 'models.JSONField', ([], {'blank': '(True)', 'default': 'Utility.attendee_infos', 'help_text': '"""Example: {"fixed": {"food_pref": "peanut allergy", "nick_name": "John"}}.Please keep {} here even no data"""', 'null': '(True)'}), '(blank=True, default=Utility.attendee_infos, help_text=\n \'Example: {"fixed": {"food_pref": "peanut allergy", "nick_name": "John"}}.Please keep {} here even no data\'\n , null=True)\n', (6165, 6349), False, 'from django.db import migrations, models\n'), ((6373, 6427), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(25)', 'null': '(True)'}), '(blank=True, max_length=25, null=True)\n', (6389, 6427), False, 'from django.db import migrations, models\n'), ((6460, 6514), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(25)', 'null': '(True)'}), '(blank=True, max_length=25, null=True)\n', (6476, 6514), False, 'from django.db import migrations, models\n'), ((6549, 6603), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(12)', 'null': '(True)'}), '(blank=True, max_length=12, null=True)\n', (6565, 6603), False, 'from django.db import migrations, models\n'), ((6637, 6690), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(8)', 'null': '(True)'}), '(blank=True, max_length=8, null=True)\n', (6653, 6690), False, 'from django.db import migrations, models\n'), ((6886, 6925), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6902, 6925), False, 'from django.db import migrations, models\n'), ((7136, 7175), 'django.db.models.DateField', 'models.DateField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7152, 7175), False, 'from django.db import migrations, models\n'), ((1517, 1530), 'django.db.models.SET', 'models.SET', (['(0)'], {}), '(0)\n', (1527, 1530), False, 'from django.db import migrations, models\n'), ((2024, 2044), 'attendees.persons.models.enum.GenderEnum.choices', 'GenderEnum.choices', ([], {}), '()\n', (2042, 2044), False, 'from attendees.persons.models.enum import GenderEnum\n'), ((2169, 2195), 'private_storage.storage.files.PrivateFileSystemStorage', 'PrivateFileSystemStorage', ([], {}), '()\n', (2193, 2195), False, 'from private_storage.storage.files import PrivateFileSystemStorage\n'), ((5574, 5594), 'attendees.persons.models.enum.GenderEnum.choices', 'GenderEnum.choices', ([], {}), '()\n', (5592, 5594), False, 'from attendees.persons.models.enum import GenderEnum\n'), ((6767, 6793), 'private_storage.storage.files.PrivateFileSystemStorage', 'PrivateFileSystemStorage', ([], {}), '()\n', (6791, 6793), False, 'from private_storage.storage.files import PrivateFileSystemStorage\n')]
|
# -*- coding:utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from appUI.MainWindow import main
if __name__ == "__main__":
#
main()
|
[
"os.path.abspath",
"appUI.MainWindow.main"
] |
[((180, 186), 'appUI.MainWindow.main', 'main', ([], {}), '()\n', (184, 186), False, 'from appUI.MainWindow import main\n'), ((78, 103), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (93, 103), False, 'import os\n')]
|
#!/usr/bin/env python
from datetime import date, timedelta
import logging
import sys
import click
from sickle import Sickle
from sickle.iterator import OAIItemIterator
from sickle.oaiexceptions import NoRecordsMatch
from smart_open import open
yesterday = (date.today() - timedelta(days=1)).strftime('%Y-%m-%d')
tomorrow = (date.today() + timedelta(days=1)).strftime('%Y-%m-%d')
@click.command()
@click.option('--host',
default="https://dspace.mit.edu/oai/request",
help='hostname of OAI-PMH server to harvest from')
@click.option('--from_date',
default=yesterday,
help='from date format: YYYY-MM-DD')
@click.option('--until',
default=tomorrow,
help='until date format: YYYY-MM-DD')
@click.option('--format',
default='oai_dc',
help='Add metadata type (e.g. mods, mets, oai_dc, qdc, ore)')
@click.option('--set', default=None, help='set to be harvested.')
@click.option('--out', default='out.xml', help='Filepath to write output')
@click.option('--verbose', help='Enable debug output', is_flag=True)
def harvest(host, from_date, until, format, out, set, verbose):
counter = 0
if verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
logging.info("OAI-PMH harvesting from %s", host)
logging.info("From date = %s", from_date)
logging.info("Until date = %s", until)
logging.info("Metadata format = %s", format)
logging.info("Outfile = %s", out)
mysickle = Sickle(host, iterator=OAIItemIterator)
params = {'metadataPrefix': format,
'from': from_date,
'until': until
}
if set is not None:
params['set'] = set
try:
responses = mysickle.ListIdentifiers(
**params)
except NoRecordsMatch:
logging.info("No records harvested: the combination of the values of "
"the arguments results in an empty list.")
sys.exit()
identifier_list = []
for records in responses:
identifier_list.append(records.identifier)
logging.info(f"Identifier count to harvest: {len(identifier_list)}")
with open(out, 'wb') as f:
f.write('<records>'.encode())
for identifier in identifier_list:
r = mysickle.GetRecord(identifier=identifier,
metadataPrefix=format)
f.write(r.raw.encode('utf8'))
logging.debug(counter)
logging.debug(r.raw)
counter += 1
f.write('</records>'.encode())
logging.info("Total records harvested: %i", counter)
if __name__ == "__main__":
harvest()
|
[
"logging.debug",
"logging.basicConfig",
"click.option",
"datetime.date.today",
"smart_open.open",
"click.command",
"logging.info",
"datetime.timedelta",
"sickle.Sickle",
"sys.exit"
] |
[((385, 400), 'click.command', 'click.command', ([], {}), '()\n', (398, 400), False, 'import click\n'), ((402, 526), 'click.option', 'click.option', (['"""--host"""'], {'default': '"""https://dspace.mit.edu/oai/request"""', 'help': '"""hostname of OAI-PMH server to harvest from"""'}), "('--host', default='https://dspace.mit.edu/oai/request', help=\n 'hostname of OAI-PMH server to harvest from')\n", (414, 526), False, 'import click\n'), ((551, 639), 'click.option', 'click.option', (['"""--from_date"""'], {'default': 'yesterday', 'help': '"""from date format: YYYY-MM-DD"""'}), "('--from_date', default=yesterday, help=\n 'from date format: YYYY-MM-DD')\n", (563, 639), False, 'import click\n'), ((664, 743), 'click.option', 'click.option', (['"""--until"""'], {'default': 'tomorrow', 'help': '"""until date format: YYYY-MM-DD"""'}), "('--until', default=tomorrow, help='until date format: YYYY-MM-DD')\n", (676, 743), False, 'import click\n'), ((773, 882), 'click.option', 'click.option', (['"""--format"""'], {'default': '"""oai_dc"""', 'help': '"""Add metadata type (e.g. mods, mets, oai_dc, qdc, ore)"""'}), "('--format', default='oai_dc', help=\n 'Add metadata type (e.g. mods, mets, oai_dc, qdc, ore)')\n", (785, 882), False, 'import click\n'), ((907, 971), 'click.option', 'click.option', (['"""--set"""'], {'default': 'None', 'help': '"""set to be harvested."""'}), "('--set', default=None, help='set to be harvested.')\n", (919, 971), False, 'import click\n'), ((973, 1046), 'click.option', 'click.option', (['"""--out"""'], {'default': '"""out.xml"""', 'help': '"""Filepath to write output"""'}), "('--out', default='out.xml', help='Filepath to write output')\n", (985, 1046), False, 'import click\n'), ((1048, 1115), 'click.option', 'click.option', (['"""--verbose"""'], {'help': '"""Enable debug output"""', 'is_flag': '(True)'}), "('--verbose', help='Enable debug output', is_flag=True)\n", (1060, 1115), False, 'import click\n'), ((1325, 1373), 'logging.info', 'logging.info', (['"""OAI-PMH harvesting from %s"""', 'host'], {}), "('OAI-PMH harvesting from %s', host)\n", (1337, 1373), False, 'import logging\n'), ((1378, 1419), 'logging.info', 'logging.info', (['"""From date = %s"""', 'from_date'], {}), "('From date = %s', from_date)\n", (1390, 1419), False, 'import logging\n'), ((1424, 1462), 'logging.info', 'logging.info', (['"""Until date = %s"""', 'until'], {}), "('Until date = %s', until)\n", (1436, 1462), False, 'import logging\n'), ((1467, 1511), 'logging.info', 'logging.info', (['"""Metadata format = %s"""', 'format'], {}), "('Metadata format = %s', format)\n", (1479, 1511), False, 'import logging\n'), ((1516, 1549), 'logging.info', 'logging.info', (['"""Outfile = %s"""', 'out'], {}), "('Outfile = %s', out)\n", (1528, 1549), False, 'import logging\n'), ((1566, 1604), 'sickle.Sickle', 'Sickle', (['host'], {'iterator': 'OAIItemIterator'}), '(host, iterator=OAIItemIterator)\n', (1572, 1604), False, 'from sickle import Sickle\n'), ((2633, 2685), 'logging.info', 'logging.info', (['"""Total records harvested: %i"""', 'counter'], {}), "('Total records harvested: %i', counter)\n", (2645, 2685), False, 'import logging\n'), ((1221, 1261), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (1240, 1261), False, 'import logging\n'), ((1280, 1319), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (1299, 1319), False, 'import logging\n'), ((2233, 2248), 'smart_open.open', 'open', (['out', '"""wb"""'], {}), "(out, 'wb')\n", (2237, 2248), False, 'from smart_open import open\n'), ((260, 272), 'datetime.date.today', 'date.today', ([], {}), '()\n', (270, 272), False, 'from datetime import date, timedelta\n'), ((275, 292), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (284, 292), False, 'from datetime import date, timedelta\n'), ((327, 339), 'datetime.date.today', 'date.today', ([], {}), '()\n', (337, 339), False, 'from datetime import date, timedelta\n'), ((342, 359), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (351, 359), False, 'from datetime import date, timedelta\n'), ((1887, 2007), 'logging.info', 'logging.info', (['"""No records harvested: the combination of the values of the arguments results in an empty list."""'], {}), "(\n 'No records harvested: the combination of the values of the arguments results in an empty list.'\n )\n", (1899, 2007), False, 'import logging\n'), ((2030, 2040), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2038, 2040), False, 'import sys\n'), ((2507, 2529), 'logging.debug', 'logging.debug', (['counter'], {}), '(counter)\n', (2520, 2529), False, 'import logging\n'), ((2542, 2562), 'logging.debug', 'logging.debug', (['r.raw'], {}), '(r.raw)\n', (2555, 2562), False, 'import logging\n')]
|
import time
from osbrain import run_agent
from osbrain import run_nameserver
def rep_handler(agent, message):
if not agent.tasks:
return None
return agent.tasks.pop()
def request_work(agent):
x = agent.send_recv('dispatcher', 'READY!')
if not x:
agent.shutdown()
return
time.sleep(x)
agent.send('results', '%s finished with %s' % (agent.name, x))
if __name__ == '__main__':
ns = run_nameserver()
results = run_agent('Results')
results_addr = results.bind('PULL', handler=lambda x, y: x.log_info(y))
dispatcher = run_agent('Dispatcher')
dispatcher.set_attr(tasks=[1, 1, 1, 1, 5, 1, 1, 1, 1, 5])
dispatcher_addr = dispatcher.bind('REP', alias='rep', handler=rep_handler)
for i in range(5):
worker = run_agent('Worker%s' % i)
worker.connect(results_addr, alias='results')
worker.connect(dispatcher_addr, alias='dispatcher')
worker.each(0., request_work)
while len(ns.agents()) > 2:
time.sleep(0.1)
ns.shutdown()
|
[
"osbrain.run_nameserver",
"osbrain.run_agent",
"time.sleep"
] |
[((318, 331), 'time.sleep', 'time.sleep', (['x'], {}), '(x)\n', (328, 331), False, 'import time\n'), ((438, 454), 'osbrain.run_nameserver', 'run_nameserver', ([], {}), '()\n', (452, 454), False, 'from osbrain import run_nameserver\n'), ((470, 490), 'osbrain.run_agent', 'run_agent', (['"""Results"""'], {}), "('Results')\n", (479, 490), False, 'from osbrain import run_agent\n'), ((585, 608), 'osbrain.run_agent', 'run_agent', (['"""Dispatcher"""'], {}), "('Dispatcher')\n", (594, 608), False, 'from osbrain import run_agent\n'), ((791, 816), 'osbrain.run_agent', 'run_agent', (["('Worker%s' % i)"], {}), "('Worker%s' % i)\n", (800, 816), False, 'from osbrain import run_agent\n'), ((1010, 1025), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (1020, 1025), False, 'import time\n')]
|
import operator
import os
import re
import sys
import time
from collections import deque
from functools import reduce
import numpy as np
def read_input() -> list[list[int]]:
# Read lines input:
# 2199943210
# 3987894921
# 9856789892
# 8767896789
# 9899965678
# return list with lists of integers [[r1.1,r1.2,r1.3...],[r2.1,r2.2,r2.3,...],...]
# plus wrapped with max values (9) all around for easier low points search ;)
data = [[int(elem) for elem in '9' + line.strip() + '9'] for line in sys.stdin]
nr_additions = len(data[0])
data.insert(0, [9 for _ in range(nr_additions)])
data.append([9 for _ in range(nr_additions)])
return data
def is_low_point(height_map: np.array, x_curr: int, y_curr: int) -> bool:
val = height_map[x_curr, y_curr]
hor_elems = [height_map[x_curr, y_curr - 1], height_map[x_curr, y_curr + 1]]
ver_elems = [height_map[x_curr - 1, y_curr], height_map[x_curr + 1, y_curr]]
all_elems = hor_elems + ver_elems
min_elem = min(all_elems)
if val < min_elem:
return True
return False
def find_low_points(height_map: np.array) -> list[(int, int), int]:
candidates = list()
# print("height map shape: {}".format(height_map.shape))
x_dim, y_dim = height_map.shape
for x_curr in range(1, x_dim - 1):
for y_curr in range(1, y_dim - 1):
if is_low_point(height_map, x_curr, y_curr):
# print("low point found: [{}][{}] -> {}".format(x_curr, y_curr, height_map[x_curr, y_curr]))
candidates.append([(x_curr, y_curr), height_map[x_curr, y_curr]])
return candidates
def find_solution_a(low_points: list[(int, int), int]) -> int:
answer = sum([elem[1] for elem in low_points]) + len(low_points)
return answer
def generate_basin(height_map: np.array, x_y: (int, int)) -> list[(int, int)]:
basin = set()
candidates = deque()
candidates.append(x_y)
neigh_offsets = [(0, -1), (1, 0), (0, 1), (-1, 0)]
while len(candidates) > 0:
# use DFS (depth first search) by simulating store in stack (FIFO)
x_curr, y_curr = candidates.pop()
# use BFS (breadth first search) by simulating store in queue (FILO)
# x_curr, y_curr = candidates.popleft()
# NOTE: this is not optimal (maybe due to python implementation)
# but that's the fact, BFS is 3 times slower than DFS
# both methods should work
basin.add((x_curr, y_curr))
for x_off, y_off in neigh_offsets:
candi_x = x_curr + x_off
candi_y = y_curr + y_off
cur_val = height_map[x_curr][y_curr]
candi_val = height_map[candi_x][candi_y]
if candi_val != 9 and candi_val > cur_val and\
(candi_x, candi_y) not in basin:
candidates.append((candi_x, candi_y))
return list(basin)
def find_solution_b(height_map: np.array, low_points: list[(int, int), int]) -> int:
# print("low points: {}".format(low_points))
basin_sizes = list()
for x_y, _ in low_points:
basin = generate_basin(height_map, x_y)
if len(basin) > 0:
basin_sizes.append(len(basin))
answer = reduce(operator.mul, sorted(basin_sizes, reverse=True)[:3], 1)
return answer
def do_main():
prev_time = time.process_time()
print("start reading input...")
data = read_input()
cur_time = time.process_time()
diff = cur_time - prev_time
prev_time = cur_time
print("[{}] took: {} sec.".format(cur_time, diff))
# print("input data: {}".format(data))
print("generate low points...")
height_map = np.array(data)
low_points = find_low_points(height_map)
cur_time = time.process_time()
diff = cur_time - prev_time
prev_time = cur_time
print("[{}] took: {} sec.".format(cur_time, diff))
print("find_solution_a...")
result_a = find_solution_a(low_points)
print("result_a:", result_a)
cur_time = time.process_time()
diff = cur_time - prev_time
prev_time = cur_time
print("[{}] took: {} sec.".format(cur_time, diff))
print("find_solution_b...")
result_b = find_solution_b(height_map, low_points)
print("result_b:", result_b)
cur_time = time.process_time()
diff = cur_time - prev_time
# prev_time = cur_time
print("[{}] took: {} sec.".format(cur_time, diff))
if __name__ == "__main__":
# execute only if run as a script
filename = os.path.basename(__file__)
day_nr = re.search(r"\d+", filename).group()
print("day_nr:", day_nr)
do_main()
|
[
"os.path.basename",
"time.process_time",
"numpy.array",
"re.search",
"collections.deque"
] |
[((1912, 1919), 'collections.deque', 'deque', ([], {}), '()\n', (1917, 1919), False, 'from collections import deque\n'), ((3345, 3364), 'time.process_time', 'time.process_time', ([], {}), '()\n', (3362, 3364), False, 'import time\n'), ((3441, 3460), 'time.process_time', 'time.process_time', ([], {}), '()\n', (3458, 3460), False, 'import time\n'), ((3671, 3685), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (3679, 3685), True, 'import numpy as np\n'), ((3746, 3765), 'time.process_time', 'time.process_time', ([], {}), '()\n', (3763, 3765), False, 'import time\n'), ((4002, 4021), 'time.process_time', 'time.process_time', ([], {}), '()\n', (4019, 4021), False, 'import time\n'), ((4270, 4289), 'time.process_time', 'time.process_time', ([], {}), '()\n', (4287, 4289), False, 'import time\n'), ((4488, 4514), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (4504, 4514), False, 'import os\n'), ((4528, 4555), 're.search', 're.search', (['"""\\\\d+"""', 'filename'], {}), "('\\\\d+', filename)\n", (4537, 4555), False, 'import re\n')]
|
"""
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* - Neither the name of prim nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior
* written permission.
*
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
"""
import os
import subprocess
import re
import threading
from .task import Task
class ClusterTask(Task):
"""
This class is a Task that runs as a cluster process.
"""
@staticmethod
def supported_modes():
"""
Returns a list of supported modes.
"""
return ['sge', 'lsf', 'slurm']
def __init__(self, manager, name, command, mode):
"""
This instantiates a ClusterTask object with a subprocess command
Args:
manager (TaskManager) : passed to Task.__init__()
name (str) : passed to Task.__init__()
command (str) : the command to be run
mode (str) : name of cluster scheduler
"""
super().__init__(manager, name)
self._command = command
assert mode in self.supported_modes(), 'invalid scheduler name: ' + mode
self._mode = mode
self._stdout_file = None
self._stderr_file = None
self._log_file = None
self._queues = set()
self._cluster_resources = dict()
self._cluster_options = list()
self.stdout = None
self.stderr = None
self.returncode = None
self._proc = None
self._lock = threading.Lock()
@property
def command(self):
"""
Returns:
(str) : the process's command
"""
return self._command
@command.setter
def command(self, value):
"""
Sets the process's command
Args:
value (str) : the new command
"""
self._command = value
@property
def stdout_file(self):
"""
Returns:
(str) : filename for stdout text
"""
return self._stdout_file
@stdout_file.setter
def stdout_file(self, filename):
"""
Sets the filename of the stdout text
Args:
filename (str) : a filename for stdout text
"""
self._stdout_file = filename
@property
def stderr_file(self):
"""
Returns:
(str) : filename for stderr text
"""
return self._stderr_file
@stderr_file.setter
def stderr_file(self, filename):
"""
Sets the filename of the stderr text.
Args:
filename (str) : a filename for stderr text
"""
self._stderr_file = filename
@property
def log_file(self):
"""
Returns:
(str) : filename for job log
"""
return self._log_file
@log_file.setter
def log_file(self, filename):
"""
Sets the filename of the job log.
Args:
filename (str) : a filename for job log
"""
self._log_file = filename
@property
def queues(self):
"""
Returns:
(set<str>) : the queues allowed to run in
"""
return self._queues
@queues.setter
def queues(self, value):
"""
Sets the allowed queues to run in
Args:
value (strs): the queues
"""
self._queues = set(value)
@property
def cluster_resources(self):
"""
Returns:
(dict<str,str>) : the resources in the cluster
"""
return self._cluster_resources
@cluster_resources.setter
def cluster_resources(self, value):
"""
Sets the cluster resources for this task
Args:
value (strs): the resources
"""
self._cluster_resources = dict(value)
@property
def cluster_options(self):
"""
Returns:
(dict<str,str>) : the options in the cluster
"""
return self._cluster_options
@cluster_options.setter
def cluster_options(self, value):
"""
Sets the cluster options for this task
Args:
value (strs): the options
"""
self._cluster_options = list(value)
def describe(self):
"""
See Task.describe()
"""
return self._build_command()
def _build_command(self):
"""
This builds the command string for this cluster task.
Returns:
(str) : the full command line
"""
# SGE cluster task
if self._mode == 'sge':
cmd = ['qsub',
'-V', # copy full environment
'-b', 'yes', # execute binary file
'-sync', 'yes', # wait for job to complete before exiting
'-cwd', # use current working directory
'-N', self.name] # name of the task
if self._stdout_file:
cmd.extend(['-o', self._stdout_file])
else:
cmd.extend(['-o', os.devnull])
if self._stderr_file:
cmd.extend(['-e', self._stderr_file])
else:
cmd.extend(['-e', os.devnull])
if len(self._queues) > 0:
cmd.extend(['-q', ','.join(self._queues)])
if len(self._cluster_resources) > 0:
cmd.extend(['-l', ','.join(
['{0}={1}'.format(k, v)
for k, v in self._cluster_resources.items()])])
cmd.append(self._command)
return ' '.join(cmd)
# LSF cluster task
if self._mode == 'lsf':
cmd = ['bsub', '-J', self.name] # name of the task
if self._stdout_file:
cmd.extend(['-o', self._stdout_file])
else:
cmd.extend(['-o', os.devnull])
if self._stderr_file:
cmd.extend(['-e', self._stderr_file])
else:
cmd.extend(['-e', os.devnull])
if len(self._queues) > 0:
cmd.extend(['-q', ','.join(self._queues)])
if len(self._cluster_resources) > 0:
cmd.extend(
['{0} {1}'.format(k, v) for k, v in self._cluster_resources.items()])
cmd.append("--")
cmd.append(re.sub('"', '\\"', self._command))
return ' '.join(cmd)
# Slurm cluster task
if self._mode == 'slurm':
cmd = ['srun', '-vv', '-J', self.name]
if self._stdout_file:
cmd.extend(['-o', self._stdout_file])
else:
cmd.extend(['-o', os.devnull])
if self._stderr_file:
cmd.extend(['-e', self._stderr_file])
else:
cmd.extend(['-e', os.devnull])
if self._cluster_options:
cmd.extend(self._cluster_options)
cmd.append(self._command)
return ' '.join(cmd)
assert False, 'programmer error :('
return None
def execute(self):
"""
See Task.execute()
"""
with self._lock:
# If we're killed at this point, don't bother starting a new process.
if self.killed:
return None
# format stderr output
if self._log_file:
stderr_fd = open(self._log_file, 'w')
else:
stderr_fd = subprocess.PIPE
# start the command
cmd = self._build_command()
self._proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=stderr_fd, shell=True,
start_new_session=True)
# wait for the process to finish, collect output
self.stdout, self.stderr = self._proc.communicate()
if self.stdout is not None:
self.stdout = self.stdout.decode('utf-8')
if self.stderr is not None:
self.stderr = self.stderr.decode('utf-8')
# close the output file
if self._log_file:
#pylint: disable=maybe-no-member
stderr_fd.close()
# check the return code
self.returncode = self._proc.returncode
if self.returncode == 0:
return None
return self.returncode
def kill(self):
"""
See Task.kill()
This implementation calls Popen.terminate()
"""
with self._lock:
# Don't kill if already completed or already killed
if self.returncode is None and not self.killed:
self.killed = True
# there is a chance the proc hasn't been created yet
if self._proc is not None:
try:
self._proc.terminate()
except ProcessLookupError:
pass
|
[
"threading.Lock",
"subprocess.Popen",
"re.sub"
] |
[((2730, 2746), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (2744, 2746), False, 'import threading\n'), ((7917, 8020), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdout': 'subprocess.PIPE', 'stderr': 'stderr_fd', 'shell': '(True)', 'start_new_session': '(True)'}), '(cmd, stdout=subprocess.PIPE, stderr=stderr_fd, shell=True,\n start_new_session=True)\n', (7933, 8020), False, 'import subprocess\n'), ((6884, 6917), 're.sub', 're.sub', (['"""\\""""', '"""\\\\\\""""', 'self._command'], {}), '(\'"\', \'\\\\"\', self._command)\n', (6890, 6917), False, 'import re\n')]
|
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
def plot_image_frame(image_frame):
"""
utils for plot image frames
:param image_frame: list of images
"""
for ii, image in enumerate(image_frame):
plt.figure()
if isinstance(image, list):
image = image[0]
plt.imshow(image)
plt.title('frame: ' + str(ii))
plt.show()
def plot_trajectories(pose_frame):
"""
utils for plot trajectory related to time-step t
:param pose_frame: numpy-array, (time_step,joint_num, ccordinate_dim)
"""
pose_frame = np.array(pose_frame)
timestep, joint_num, dim = pose_frame.shape
joints = ['neck', 'shoulder', 'elbow', 'hand']
plt.figure(figsize=(12, 7))
t = np.arange(timestep)
for ii, mark in enumerate(joints):
plt.subplot(331)
plt.plot(t, pose_frame[:, ii, 0], label=mark)
plt.xlabel('t')
plt.ylabel('x')
plt.subplot(332)
plt.plot(t, pose_frame[:, ii, 1], label=mark)
plt.xlabel('t')
plt.ylabel('y')
if dim > 2:
plt.subplot(333)
plt.plot(t, pose_frame[:, ii, 2], label=mark)
plt.xlabel('t')
plt.ylabel('z')
plt.subplots_adjust(wspace=0.5, hspace=0)
plt.legend(loc=(1, 0.4))
plt.show()
def plot_trajectory_3d(trajectory):
"""
plot 3d trajectory
:param trajectory: numpy-array, shape of (time_step,3)
"""
xs = trajectory[:, 0]
ys = trajectory[:, 1]
zs = trajectory[:, 2]
fig = plt.figure()
ax = Axes3D(fig)
ax.plot3D(xs, ys, zs=zs, marker='o', color='b')
plt.show()
|
[
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show",
"mpl_toolkits.mplot3d.Axes3D",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.imshow",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.figure",
"numpy.array",
"numpy.arange",
"matplotlib.pyplot.subplots_adjust",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel"
] |
[((558, 578), 'numpy.array', 'np.array', (['pose_frame'], {}), '(pose_frame)\n', (566, 578), True, 'import numpy as np\n'), ((673, 700), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 7)'}), '(figsize=(12, 7))\n', (683, 700), True, 'import matplotlib.pyplot as plt\n'), ((706, 725), 'numpy.arange', 'np.arange', (['timestep'], {}), '(timestep)\n', (715, 725), True, 'import numpy as np\n'), ((1090, 1131), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'wspace': '(0.5)', 'hspace': '(0)'}), '(wspace=0.5, hspace=0)\n', (1109, 1131), True, 'import matplotlib.pyplot as plt\n'), ((1133, 1157), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(1, 0.4)'}), '(loc=(1, 0.4))\n', (1143, 1157), True, 'import matplotlib.pyplot as plt\n'), ((1159, 1169), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1167, 1169), True, 'import matplotlib.pyplot as plt\n'), ((1370, 1382), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1380, 1382), True, 'import matplotlib.pyplot as plt\n'), ((1389, 1400), 'mpl_toolkits.mplot3d.Axes3D', 'Axes3D', (['fig'], {}), '(fig)\n', (1395, 1400), False, 'from mpl_toolkits.mplot3d import Axes3D\n'), ((1451, 1461), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1459, 1461), True, 'import matplotlib.pyplot as plt\n'), ((247, 259), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (257, 259), True, 'import matplotlib.pyplot as plt\n'), ((312, 329), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image'], {}), '(image)\n', (322, 329), True, 'import matplotlib.pyplot as plt\n'), ((365, 375), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (373, 375), True, 'import matplotlib.pyplot as plt\n'), ((764, 780), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(331)'], {}), '(331)\n', (775, 780), True, 'import matplotlib.pyplot as plt\n'), ((783, 828), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'pose_frame[:, ii, 0]'], {'label': 'mark'}), '(t, pose_frame[:, ii, 0], label=mark)\n', (791, 828), True, 'import matplotlib.pyplot as plt\n'), ((831, 846), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""t"""'], {}), "('t')\n", (841, 846), True, 'import matplotlib.pyplot as plt\n'), ((849, 864), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""x"""'], {}), "('x')\n", (859, 864), True, 'import matplotlib.pyplot as plt\n'), ((867, 883), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(332)'], {}), '(332)\n', (878, 883), True, 'import matplotlib.pyplot as plt\n'), ((886, 931), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'pose_frame[:, ii, 1]'], {'label': 'mark'}), '(t, pose_frame[:, ii, 1], label=mark)\n', (894, 931), True, 'import matplotlib.pyplot as plt\n'), ((934, 949), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""t"""'], {}), "('t')\n", (944, 949), True, 'import matplotlib.pyplot as plt\n'), ((952, 967), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""y"""'], {}), "('y')\n", (962, 967), True, 'import matplotlib.pyplot as plt\n'), ((985, 1001), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(333)'], {}), '(333)\n', (996, 1001), True, 'import matplotlib.pyplot as plt\n'), ((1005, 1050), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'pose_frame[:, ii, 2]'], {'label': 'mark'}), '(t, pose_frame[:, ii, 2], label=mark)\n', (1013, 1050), True, 'import matplotlib.pyplot as plt\n'), ((1054, 1069), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""t"""'], {}), "('t')\n", (1064, 1069), True, 'import matplotlib.pyplot as plt\n'), ((1073, 1088), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""z"""'], {}), "('z')\n", (1083, 1088), True, 'import matplotlib.pyplot as plt\n')]
|
# Code generated by `typeddictgen`. DO NOT EDIT.
"""V1ServiceSpecDict generated type."""
from typing import TypedDict, Dict, List
from kubernetes_typed.client import V1ServicePortDict, V1SessionAffinityConfigDict
V1ServiceSpecDict = TypedDict(
"V1ServiceSpecDict",
{
"allocateLoadBalancerNodePorts": bool,
"clusterIP": str,
"clusterIPs": List[str],
"externalIPs": List[str],
"externalName": str,
"externalTrafficPolicy": str,
"healthCheckNodePort": int,
"internalTrafficPolicy": str,
"ipFamilies": List[str],
"ipFamilyPolicy": str,
"loadBalancerClass": str,
"loadBalancerIP": str,
"loadBalancerSourceRanges": List[str],
"ports": List[V1ServicePortDict],
"publishNotReadyAddresses": bool,
"selector": Dict[str, str],
"sessionAffinity": str,
"sessionAffinityConfig": V1SessionAffinityConfigDict,
"type": str,
},
total=False,
)
|
[
"typing.TypedDict"
] |
[((235, 852), 'typing.TypedDict', 'TypedDict', (['"""V1ServiceSpecDict"""', "{'allocateLoadBalancerNodePorts': bool, 'clusterIP': str, 'clusterIPs':\n List[str], 'externalIPs': List[str], 'externalName': str,\n 'externalTrafficPolicy': str, 'healthCheckNodePort': int,\n 'internalTrafficPolicy': str, 'ipFamilies': List[str], 'ipFamilyPolicy':\n str, 'loadBalancerClass': str, 'loadBalancerIP': str,\n 'loadBalancerSourceRanges': List[str], 'ports': List[V1ServicePortDict],\n 'publishNotReadyAddresses': bool, 'selector': Dict[str, str],\n 'sessionAffinity': str, 'sessionAffinityConfig':\n V1SessionAffinityConfigDict, 'type': str}"], {'total': '(False)'}), "('V1ServiceSpecDict', {'allocateLoadBalancerNodePorts': bool,\n 'clusterIP': str, 'clusterIPs': List[str], 'externalIPs': List[str],\n 'externalName': str, 'externalTrafficPolicy': str,\n 'healthCheckNodePort': int, 'internalTrafficPolicy': str, 'ipFamilies':\n List[str], 'ipFamilyPolicy': str, 'loadBalancerClass': str,\n 'loadBalancerIP': str, 'loadBalancerSourceRanges': List[str], 'ports':\n List[V1ServicePortDict], 'publishNotReadyAddresses': bool, 'selector':\n Dict[str, str], 'sessionAffinity': str, 'sessionAffinityConfig':\n V1SessionAffinityConfigDict, 'type': str}, total=False)\n", (244, 852), False, 'from typing import TypedDict, Dict, List\n')]
|
from .common import Benchmark, get_squares_, get_indexes_rand, TYPES1
import numpy_demo as np
class Eindot(Benchmark):
def setup(self):
self.a = np.arange(60000.0).reshape(150, 400)
self.ac = self.a.copy()
self.at = self.a.T
self.atc = self.a.T.copy()
self.b = np.arange(240000.0).reshape(400, 600)
self.c = np.arange(600)
self.d = np.arange(400)
self.a3 = np.arange(480000.).reshape(60, 80, 100)
self.b3 = np.arange(192000.).reshape(80, 60, 40)
def time_dot_a_b(self):
np.dot(self.a, self.b)
def time_dot_d_dot_b_c(self):
np.dot(self.d, np.dot(self.b, self.c))
def time_dot_trans_a_at(self):
np.dot(self.a, self.at)
def time_dot_trans_a_atc(self):
np.dot(self.a, self.atc)
def time_dot_trans_at_a(self):
np.dot(self.at, self.a)
def time_dot_trans_atc_a(self):
np.dot(self.atc, self.a)
def time_einsum_i_ij_j(self):
np.einsum('i,ij,j', self.d, self.b, self.c)
def time_einsum_ij_jk_a_b(self):
np.einsum('ij,jk', self.a, self.b)
def time_einsum_ijk_jil_kl(self):
np.einsum('ijk,jil->kl', self.a3, self.b3)
def time_inner_trans_a_a(self):
np.inner(self.a, self.a)
def time_inner_trans_a_ac(self):
np.inner(self.a, self.ac)
def time_matmul_a_b(self):
np.matmul(self.a, self.b)
def time_matmul_d_matmul_b_c(self):
np.matmul(self.d, np.matmul(self.b, self.c))
def time_matmul_trans_a_at(self):
np.matmul(self.a, self.at)
def time_matmul_trans_a_atc(self):
np.matmul(self.a, self.atc)
def time_matmul_trans_at_a(self):
np.matmul(self.at, self.a)
def time_matmul_trans_atc_a(self):
np.matmul(self.atc, self.a)
def time_tensordot_a_b_axes_1_0_0_1(self):
np.tensordot(self.a3, self.b3, axes=([1, 0], [0, 1]))
class Linalg(Benchmark):
params = [['svd', 'pinv', 'det', 'norm'],
TYPES1]
param_names = ['op', 'type']
def setup(self, op, typename):
np.seterr(all='ignore')
self.func = getattr(np.linalg, op)
if op == 'cholesky':
# we need a positive definite
self.a = np.dot(get_squares_()[typename],
get_squares_()[typename].T)
else:
self.a = get_squares_()[typename]
# check that dtype is supported at all
try:
self.func(self.a[:2, :2])
except TypeError:
raise NotImplementedError()
def time_op(self, op, typename):
self.func(self.a)
class Lstsq(Benchmark):
def setup(self):
self.a = get_squares_()['float64']
self.b = get_indexes_rand()[:100].astype(np.float64)
def time_numpy_demo_linalg_lstsq_a__b_float64(self):
np.linalg.lstsq(self.a, self.b, rcond=-1)
|
[
"numpy_demo.dot",
"numpy_demo.arange",
"numpy_demo.inner",
"numpy_demo.tensordot",
"numpy_demo.seterr",
"numpy_demo.einsum",
"numpy_demo.matmul",
"numpy_demo.linalg.lstsq"
] |
[((363, 377), 'numpy_demo.arange', 'np.arange', (['(600)'], {}), '(600)\n', (372, 377), True, 'import numpy_demo as np\n'), ((395, 409), 'numpy_demo.arange', 'np.arange', (['(400)'], {}), '(400)\n', (404, 409), True, 'import numpy_demo as np\n'), ((563, 585), 'numpy_demo.dot', 'np.dot', (['self.a', 'self.b'], {}), '(self.a, self.b)\n', (569, 585), True, 'import numpy_demo as np\n'), ((712, 735), 'numpy_demo.dot', 'np.dot', (['self.a', 'self.at'], {}), '(self.a, self.at)\n', (718, 735), True, 'import numpy_demo as np\n'), ((781, 805), 'numpy_demo.dot', 'np.dot', (['self.a', 'self.atc'], {}), '(self.a, self.atc)\n', (787, 805), True, 'import numpy_demo as np\n'), ((850, 873), 'numpy_demo.dot', 'np.dot', (['self.at', 'self.a'], {}), '(self.at, self.a)\n', (856, 873), True, 'import numpy_demo as np\n'), ((919, 943), 'numpy_demo.dot', 'np.dot', (['self.atc', 'self.a'], {}), '(self.atc, self.a)\n', (925, 943), True, 'import numpy_demo as np\n'), ((987, 1030), 'numpy_demo.einsum', 'np.einsum', (['"""i,ij,j"""', 'self.d', 'self.b', 'self.c'], {}), "('i,ij,j', self.d, self.b, self.c)\n", (996, 1030), True, 'import numpy_demo as np\n'), ((1077, 1111), 'numpy_demo.einsum', 'np.einsum', (['"""ij,jk"""', 'self.a', 'self.b'], {}), "('ij,jk', self.a, self.b)\n", (1086, 1111), True, 'import numpy_demo as np\n'), ((1159, 1201), 'numpy_demo.einsum', 'np.einsum', (['"""ijk,jil->kl"""', 'self.a3', 'self.b3'], {}), "('ijk,jil->kl', self.a3, self.b3)\n", (1168, 1201), True, 'import numpy_demo as np\n'), ((1247, 1271), 'numpy_demo.inner', 'np.inner', (['self.a', 'self.a'], {}), '(self.a, self.a)\n', (1255, 1271), True, 'import numpy_demo as np\n'), ((1318, 1343), 'numpy_demo.inner', 'np.inner', (['self.a', 'self.ac'], {}), '(self.a, self.ac)\n', (1326, 1343), True, 'import numpy_demo as np\n'), ((1384, 1409), 'numpy_demo.matmul', 'np.matmul', (['self.a', 'self.b'], {}), '(self.a, self.b)\n', (1393, 1409), True, 'import numpy_demo as np\n'), ((1551, 1577), 'numpy_demo.matmul', 'np.matmul', (['self.a', 'self.at'], {}), '(self.a, self.at)\n', (1560, 1577), True, 'import numpy_demo as np\n'), ((1626, 1653), 'numpy_demo.matmul', 'np.matmul', (['self.a', 'self.atc'], {}), '(self.a, self.atc)\n', (1635, 1653), True, 'import numpy_demo as np\n'), ((1701, 1727), 'numpy_demo.matmul', 'np.matmul', (['self.at', 'self.a'], {}), '(self.at, self.a)\n', (1710, 1727), True, 'import numpy_demo as np\n'), ((1776, 1803), 'numpy_demo.matmul', 'np.matmul', (['self.atc', 'self.a'], {}), '(self.atc, self.a)\n', (1785, 1803), True, 'import numpy_demo as np\n'), ((1860, 1913), 'numpy_demo.tensordot', 'np.tensordot', (['self.a3', 'self.b3'], {'axes': '([1, 0], [0, 1])'}), '(self.a3, self.b3, axes=([1, 0], [0, 1]))\n', (1872, 1913), True, 'import numpy_demo as np\n'), ((2086, 2109), 'numpy_demo.seterr', 'np.seterr', ([], {'all': '"""ignore"""'}), "(all='ignore')\n", (2095, 2109), True, 'import numpy_demo as np\n'), ((2842, 2883), 'numpy_demo.linalg.lstsq', 'np.linalg.lstsq', (['self.a', 'self.b'], {'rcond': '(-1)'}), '(self.a, self.b, rcond=-1)\n', (2857, 2883), True, 'import numpy_demo as np\n'), ((644, 666), 'numpy_demo.dot', 'np.dot', (['self.b', 'self.c'], {}), '(self.b, self.c)\n', (650, 666), True, 'import numpy_demo as np\n'), ((1477, 1502), 'numpy_demo.matmul', 'np.matmul', (['self.b', 'self.c'], {}), '(self.b, self.c)\n', (1486, 1502), True, 'import numpy_demo as np\n'), ((160, 178), 'numpy_demo.arange', 'np.arange', (['(60000.0)'], {}), '(60000.0)\n', (169, 178), True, 'import numpy_demo as np\n'), ((308, 327), 'numpy_demo.arange', 'np.arange', (['(240000.0)'], {}), '(240000.0)\n', (317, 327), True, 'import numpy_demo as np\n'), ((429, 448), 'numpy_demo.arange', 'np.arange', (['(480000.0)'], {}), '(480000.0)\n', (438, 448), True, 'import numpy_demo as np\n'), ((487, 506), 'numpy_demo.arange', 'np.arange', (['(192000.0)'], {}), '(192000.0)\n', (496, 506), True, 'import numpy_demo as np\n')]
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import tempfile
import shutil
import numpy as np
import pytest
from datetime import datetime
import os
from urllib.parse import urlparse
import re
from unittest.mock import Mock, patch
from astropy import coordinates
from astropy import units as u
from astroquery.utils.commons import ASTROPY_LT_4_1
from .. import Alma
from .. import _url_list, _test_url_list
# ALMA tests involving staging take too long, leading to travis timeouts
# TODO: make this a configuration item
SKIP_SLOW = True
all_colnames = {'Project code', 'Source name', 'RA', 'Dec', 'Band',
'Frequency resolution', 'Integration', 'Release date',
'Frequency support', 'Velocity resolution', 'Pol products',
'Observation date', 'PI name', 'PWV', 'Member ous id',
'Asdm uid', 'Project title', 'Project type', 'Scan intent',
'Spatial resolution', 'Largest angular scale',
'QA2 Status', 'Group ous id', 'Pub'}
def get_client():
alma = Alma()
# need this to point alma to a different test site
# alma package __init__.py mentions test sites but I don't know how the
# mechanism is supposed to be used
from .. import core
core.ALMA_TAP_PATH = 'obscore'
alma.archive_url = 'https://alma.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/'
return alma
@pytest.mark.remote_data
class TestAlma:
def setup_class(cls):
pass
# new test server
# this server seems not to serve a help page?
# Alma.archive_url = "https://2016-03.asa-test.alma.cl/aq/"
# starting somewhere between Nov 2015 and Jan 2016, the beta server
# stopped serving the actual data, making all staging attempts break
@pytest.fixture()
def temp_dir(self, request):
my_temp_dir = tempfile.mkdtemp()
def fin():
shutil.rmtree(my_temp_dir)
request.addfinalizer(fin)
return my_temp_dir
def test_public(self):
alma = get_client()
results = alma.query(payload=None, public=True, maxrec=100)
assert len(results) == 100
for row in results:
assert row['data_rights'] == 'Public'
results = alma.query(payload=None, public=False, maxrec=100)
assert len(results) == 100
for row in results:
assert row['data_rights'] == 'Proprietary'
def test_SgrAstar(self, temp_dir):
alma = get_client()
alma.cache_location = temp_dir
result_s = alma.query_object('Sgr A*', legacy_columns=True)
assert '2013.1.00857.S' in result_s['Project code']
# "The Brick", g0.253, is in this one
# assert b'2011.0.00217.S' in result_c['Project code'] # missing cycle 1 data
def test_docs_example(self, temp_dir):
alma = get_client()
alma.cache_location = temp_dir
rslt = alma.query(payload=dict(obs_creator_name='*Ginsburg*'))
assert 'ADS/JAO.ALMA#2013.1.00269.S' in rslt['obs_publisher_did']
def test_freq(self):
alma = get_client()
payload = {'frequency': '85..86'}
result = alma.query(payload)
assert len(result) > 0
for row in result:
# returned em_min and em_max are in m
assert row['frequency'] >= 85
assert row['frequency'] <= 100
assert '3' in row['band_list']
@pytest.mark.skipif("SKIP_SLOW",
reason="Extremely slow due to limitations of "
"the implementation")
def test_bands(self):
alma = get_client()
payload = {'band_list': ['5', '7']}
result = alma.query(payload)
assert len(result) > 0
for row in result:
assert ('5' in row['band_list']) or ('7' in row['band_list'])
def test_equivalent_columns(self):
# this test is to ensure that queries using original column names
# return the same results as the ones that use ObsCore names
alma = get_client()
# original
result_orig = alma.query(payload={'project_code': '2011.0.00131.S'},
legacy_columns=True)
result_obscore = alma.query(payload={'proposal_id': '2011.0.00131.S'},
legacy_columns=True)
assert len(result_orig) == len(result_obscore)
for row in result_orig:
assert row['Project code'] == '2011.0.00131.S'
for row in result_obscore:
assert row['Project code'] == '2011.0.00131.S'
def test_alma_source_name(self):
alma = get_client()
payload = {'source_name_alma': 'GRB021004'}
result = alma.query(payload)
assert len(result) > 0
for row in result:
assert 'GRB021004' == row['target_name']
@pytest.mark.skipif("SKIP_SLOW", reason="Known issue")
def test_ra_dec(self):
alma = get_client()
payload = {'ra_dec': '181.0192d -0.01928d'}
result = alma.query(payload)
assert len(result) > 0
@pytest.mark.skipif("SKIP_SLOW")
def test_m83(self, temp_dir, recwarn):
alma = get_client()
alma.cache_location = temp_dir
m83_data = alma.query_object('M83', science=True, legacy_columns=True)
uids = np.unique(m83_data['Member ous id'])
link_list = alma.stage_data(uids)
# On Feb 8, 2016 there were 83 hits. This number should never go down.
# Except it has. On May 18, 2016, there were 47.
assert len(link_list) >= 47
# test re-staging
# (has been replaced with warning)
# with pytest.raises(requests.HTTPError) as ex:
# link_list = alma.stage_data(uids)
# assert ex.value.args[0] == ('Received an error 405: this may indicate you have '
# 'already staged the data. Try downloading the '
# 'file URLs directly with download_files.')
# log.warning doesn't actually make a warning
# link_list = alma.stage_data(uids)
# w = recwarn.pop()
# assert (str(w.message) == ('Error 405 received. If you have previously staged the '
# 'same UIDs, the result returned is probably correct,'
# ' otherwise you may need to create a fresh astroquery.Alma instance.'))
@pytest.mark.skipif("SKIP_SLOW", reason="Known issue")
def test_stage_data(self, temp_dir, recwarn):
alma = get_client()
alma.cache_location = temp_dir
result_s = alma.query_object('Sgr A*', legacy_columns=True)
if ASTROPY_LT_4_1:
assert b'2013.1.00857.S' in result_s['Project code']
assert b'uid://A002/X40d164/X1b3' in result_s['Asdm uid']
assert b'uid://A002/X391d0b/X23d' in result_s['Member ous id']
match_val = b'uid://A002/X40d164/X1b3'
else:
assert '2013.1.00857.S' in result_s['Project code']
assert 'uid://A002/X40d164/X1b3' in result_s['Asdm uid']
assert 'uid://A002/X391d0b/X23d' in result_s['Member ous id']
match_val = 'uid://A002/X40d164/X1b3'
match = result_s['Asdm uid'] == match_val
uid = result_s['Member ous id'][match]
# this is temporary to switch back to ALMA servers
# del alma.dataarchive_url
# alma.archive_url = 'http://almascience.org'
result = alma.stage_data(uid)
found = False
for url in result['URL']:
if 'uid___A002_X40d164_X1b3' in url:
found = True
break
assert found, 'URL to uid___A002_X40d164_X1b3 expected'
def test_stage_data_listall(self, temp_dir, recwarn):
"""
test for expanded capability created in #1683
"""
alma = get_client()
alma.cache_location = temp_dir
uid = 'uid://A001/X12a3/Xe9'
result1 = alma.stage_data(uid, expand_tarfiles=False)
result2 = alma.stage_data(uid, expand_tarfiles=True)
expected_names = [
'2017.1.01185.S_uid___A002_Xd28a9e_X71b8.asdm.sdm.tar',
'2017.1.01185.S_uid___A002_Xd28a9e_X7b4d.asdm.sdm.tar',
'2017.1.01185.S_uid___A002_Xd29c1f_X1f74.asdm.sdm.tar',
'2017.1.01185.S_uid___A002_Xd29c1f_X5cf.asdm.sdm.tar']
expected_names_with_aux = expected_names + \
['2017.1.01185.S_uid___A001_X12a3_Xe9_auxiliary.tar']
for name in expected_names_with_aux:
assert name in result1['name']
for res in result1:
p = re.compile(r'.*(uid__.*)\.asdm.*')
if res['name'] in expected_names:
assert 'application/x-tar' == res['type']
assert res['id'] == p.search(res['name']).group(1)
else:
assert res['type'] in ['application/x-tar', 'application/x-votable+xml;content=datalink', 'text/plain']
assert res['id'] == 'None'
assert 'UNKNOWN' == res['permission']
assert res['mous_uid'] == uid
assert len(result2) > len(result1)
def test_stage_data_json(self, temp_dir, recwarn):
"""
test for json returns
"""
alma = get_client()
alma.cache_location = temp_dir
uid = 'uid://A001/X12a3/Xe9'
# this is temporary to switch back to ALMA servers
# alma.archive_url = 'http://almascience.org'
result = alma.stage_data(uid, return_json=False)
assert len(result) > 0
with pytest.raises(AttributeError):
# this no longer works
alma.stage_data(uid, return_json=True)
def test_data_proprietary(self):
# public
alma = get_client()
assert not alma.is_proprietary('uid://A001/X12a3/Xe9')
IVOA_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
now = datetime.utcnow().strftime(IVOA_DATE_FORMAT)[:-3]
query = "select top 1 obs_id from ivoa.obscore where " \
"obs_release_date > '{}'".format(now)
result = alma.query_tap(query)
assert len(result.table) == 1
# proprietary
assert alma.is_proprietary(result.table[0][0])
# non existent
with pytest.raises(AttributeError):
alma.is_proprietary('uid://NON/EXI/STING')
def test_data_info(self, temp_dir):
alma = get_client()
alma.cache_location = temp_dir
uid = 'uid://A001/X12a3/Xe9'
data_info = alma.get_data_info(uid, expand_tarfiles=True)
for file in data_info:
# TODO found files that do not match info.
# assert u.isclose(file['content_length']*u.B,
# alma._HEADER_data_size([file['access_url']])[1]),\
# 'File {} size: datalink and head do not match'.\
# format(file['access_url'])
pass
# compare with tarball version
data_info_tar = alma.get_data_info(uid, expand_tarfiles=False)
assert len(data_info) > len(data_info_tar)
# size is the same - not working because service inconsistencies
# assert sum(data_info['content_length']) == \
# sum(data_info_tar['content_length'])
# check smallest file downloads correctly
file = 'member.uid___A001_X12a3_Xe9.README.txt'
for url in data_info['access_url']:
if file in url:
file_url = url
break
assert file_url
alma.download_files([file_url], temp_dir)
assert os.stat(os.path.join(temp_dir, file)).st_size
# mock downloading an entire program
download_files_mock = Mock()
alma.download_files = download_files_mock
alma.retrieve_data_from_uid([uid])
comparison = download_files_mock.mock_calls[0][1] == data_info_tar[
'access_url']
assert comparison.all()
def test_download_data(self, temp_dir):
# test only fits files from a program
def myrequests(op, file_url, **kwargs):
# this is to avoid downloading the actual files
if op == 'HEAD':
return Mock(headers={'Content-Type': 'fits'})
else:
return file_url.split('/')[-1]
alma = get_client()
alma.cache_location = temp_dir
uid = 'uid://A001/X12a3/Xe9'
data_info = alma.get_data_info(uid, expand_tarfiles=True)
fitsre = re.compile(r'.*\.fits$')
alma._request = Mock(side_effect=myrequests)
urls = [x['access_url'] for x in data_info
if fitsre.match(x['access_url'])]
results = alma.download_files(urls, temp_dir)
alma._request.assert_called()
assert len(results) == len(urls)
# each url triggers 2 calls: HEAD and GET
assert len(urls)*2 == len(alma._request.mock_calls)
def test_download_and_extract(self, temp_dir):
def myrequests(op, file_url, **kwargs):
# this is to avoid downloading the actual files
if op == 'HEAD':
return Mock(headers={'Content-Type': 'fits'})
else:
return file_url.split('/')[-1]
alma = get_client()
alma.cache_location = temp_dir
alma._request = Mock(side_effect=myrequests)
alma._cycle0_tarfile_content_table = {'ID': ''}
uid = 'uid://A001/X12a3/Xe9'
data_info = alma.get_data_info(uid, expand_tarfiles=False)
aux_tar_file = [x for x in data_info['access_url'] if 'auxiliary' in x]
assert 1 == len(aux_tar_file)
# there are no FITS files in the auxiliary file
assert not alma.download_and_extract_files(aux_tar_file)
# download python scripts now
downloaded = alma.download_and_extract_files(aux_tar_file,
regex=r'.*\.py')
assert len(downloaded) > 1
assert len(downloaded)*2 == len(alma._request.mock_calls)
# ASDM files cannot be expanded.
asdm_url = [x for x in data_info['access_url'] if 'asdm' in x][0]
tarfile_handle_mock = Mock()
mock_content_file1 = Mock(path='/tmp/')
# mocking attribute name is trickier and it requires the name to
# be set separately.
mock_content_file1.name = 'foo.py'
mock_content_file2 = Mock(path='/tmp/')
mock_content_file2.name = 'blah.txt'
tarfile_handle_mock.getmembers.return_value = \
[mock_content_file1, mock_content_file2]
tarfile_pkg_mock = Mock()
tarfile_pkg_mock.open.return_value = tarfile_handle_mock
with patch('astroquery.alma.core.tarfile', tarfile_pkg_mock):
with patch('astroquery.alma.core.os.remove') as delete_mock:
downloaded_asdm = alma.download_and_extract_files(
[asdm_url], include_asdm=True, regex=r'.*\.py')
delete_mock.assert_called_once_with(asdm_url.split('/')[-1])
assert downloaded_asdm == [os.path.join(temp_dir, 'foo.py')]
@pytest.mark.skipif("SKIP_SLOW", reason="Known issue")
def test_doc_example(self, temp_dir):
alma = get_client()
alma.cache_location = temp_dir
alma2 = get_client()
alma2.cache_location = temp_dir
m83_data = alma.query_object('M83', legacy_columns=True)
# the order can apparently sometimes change
# These column names change too often to keep testing.
# assert set(m83_data.colnames) == set(all_colnames)
galactic_center = coordinates.SkyCoord(0 * u.deg, 0 * u.deg,
frame='galactic')
gc_data = alma.query_region(galactic_center, 1 * u.deg)
# assert len(gc_data) >= 425 # Feb 8, 2016
assert len(gc_data) >= 50 # Nov 16, 2016
uids = np.unique(m83_data['Member ous id'])
if ASTROPY_LT_4_1:
assert b'uid://A001/X11f/X30' in uids
X30 = (m83_data['Member ous id'] == b'uid://A001/X11f/X30')
X31 = (m83_data['Member ous id'] == b'uid://A002/X3216af/X31')
else:
assert 'uid://A001/X11f/X30' in uids
X30 = (m83_data['Member ous id'] == 'uid://A001/X11f/X30')
X31 = (m83_data['Member ous id'] == 'uid://A002/X3216af/X31')
assert X30.sum() == 4 # Jul 13, 2020
assert X31.sum() == 4 # Jul 13, 2020
mous1 = alma.stage_data('uid://A001/X11f/X30')
totalsize_mous1 = mous1['size'].sum() * u.Unit(mous1['size'].unit)
assert (totalsize_mous1.to(u.B) > 1.9*u.GB)
mous = alma2.stage_data('uid://A002/X3216af/X31')
totalsize_mous = mous['size'].sum() * u.Unit(mous['size'].unit)
# More recent ALMA request responses do not include any information
# about file size, so we have to allow for the possibility that all
# file sizes are replaced with -1
assert (totalsize_mous.to(u.GB).value > 52)
def test_query(self, temp_dir):
alma = get_client()
alma.cache_location = temp_dir
result = alma.query(payload={'start_date': '<11-11-2011'},
public=False, legacy_columns=True, science=True)
# Nov 16, 2016: 159
# Apr 25, 2017: 150
# Jul 2, 2017: 160
# May 9, 2018: 162
# March 18, 2019: 171 (seriously, how do they keep changing history?)
# with SIA2 numbers are different (cardinality?) assert len(result) == 171
test_date = datetime.strptime('11-11-2011', '%d-%m-%Y')
for row in result['Observation date']:
assert test_date > datetime.strptime(row, '%d-%m-%Y'), \
'Unexpected value: {}'.format(row)
# Not in the help - no need to support it.
# result = alma.query(payload={'member_ous_id': 'uid://A001/X11a2/X11'},
# science=True)
# assert len(result) == 1
@pytest.mark.skipif("SKIP_SLOW", reason="ra dec search known issue")
def test_misc(self):
# miscellaneous set of common tests
alma = get_client()
#
# alma.query_region(coordinate=orionkl_coords, radius=4 * u.arcmin,
# public=False, science=False)
result = alma.query_object('M83', public=True, science=True)
assert len(result) > 0
result = alma.query(payload={'pi_name': '*Bally*'}, public=False,
maxrec=10)
assert result
result.write('/tmp/alma-onerow.txt', format='ascii')
for row in result:
assert 'Bally' in row['obs_creator_name']
result = alma.query(payload=dict(project_code='2016.1.00165.S'),
public=False, cache=False)
assert result
for row in result:
assert '2016.1.00165.S' == row['proposal_id']
result = alma.query(payload=dict(project_code='2017.1.01355.L',
source_name_alma='G008.67'),)
assert result
for row in result:
assert '2017.1.01355.L' == row['proposal_id']
assert 'Public' == row['data_rights']
assert 'G008.67' in row['target_name']
result = alma.query_region(
coordinates.SkyCoord('5:35:14.461 -5:21:54.41', frame='fk5',
unit=(u.hour, u.deg)), radius=0.034 * u.deg)
assert result
result = alma.query_region(
coordinates.SkyCoord('5:35:14.461 -5:21:54.41', frame='fk5',
unit=(u.hour, u.deg)), radius=0.034 * u.deg,
payload={'energy.frequency-asu': '215 .. 220'})
result = alma.query(payload=dict(project_code='2012.*',
public_data=True))
assert result
for row in result:
assert '2012.' in row['proposal_id']
assert 'Public' == row['data_rights']
result = alma.query(payload={'frequency': '96 .. 96.5'})
assert result
for row in result:
# TODO not sure how to test this
pass
result = alma.query_object('M83', band_list=[3, 6, 8])
assert result
for row in result:
assert row['band_list'] in ['3', '6', '8']
result = alma.query(payload={'pi_name': '*Ginsburg*',
'band_list': '6'})
assert result
for row in result:
assert '6' == row['band_list']
assert 'ginsburg' in row['obs_creator_name'].lower()
@pytest.mark.skipif("SKIP_SLOW")
def test_user(self):
# miscellaneous set of tests from current users
alma = get_client()
rslt = alma.query({'band_list': [6], 'project_code': '2012.1.*'},
legacy_columns=True)
for row in rslt:
print(row['Project code'])
print(row['Member ous id'])
# As of April 2017, these data are *MISSING FROM THE ARCHIVE*.
# This has been reported, as it is definitely a bug.
@pytest.mark.xfail
@pytest.mark.bigdata
@pytest.mark.skipif("SKIP_SLOW")
def test_cycle1(self, temp_dir):
# About 500 MB
alma = get_client()
alma.cache_location = temp_dir
target = 'NGC4945'
project_code = '2012.1.00912.S'
payload = {'project_code': project_code,
'source_name_alma': target, }
result = alma.query(payload=payload)
assert len(result) == 1
# Need new Alma() instances each time
a1 = alma()
uid_url_table_mous = a1.stage_data(result['Member ous id'])
a2 = alma()
uid_url_table_asdm = a2.stage_data(result['Asdm uid'])
# I believe the fixes as part of #495 have resulted in removal of a
# redundancy in the table creation, so a 1-row table is OK here.
# A 2-row table may not be OK any more, but that's what it used to
# be...
assert len(uid_url_table_asdm) == 1
assert len(uid_url_table_mous) >= 2 # now is len=3 (Nov 17, 2016)
# URL should look like:
# https://almascience.eso.org/dataPortal/requests/anonymous/944120962/ALMA/2012.1.00912.S_uid___A002_X5a9a13_X528_001_of_001.tar/2012.1.00912.S_uid___A002_X5a9a13_X528_001_of_001.tar
# https://almascience.eso.org/rh/requests/anonymous/944222597/2012.1.00912.S_uid___A002_X5a9a13_X528_001_of_001.tar/2012.1.00912.S_uid___A002_X5a9a13_X528_001_of_001.tar
small = uid_url_table_mous['size'] < 1
urls_to_download = uid_url_table_mous[small]['URL']
uri = urlparse(urls_to_download[0])
assert uri.path == ('/dataPortal/requests/anonymous/{0}/ALMA/2012.1.00912.S_uid___A002_X5a9a13_X528_001_of_001.tar/2012.1.00912.S_uid___A002_X5a9a13_X528_001_of_001.tar' # noqa
.format(a1._staging_log['staging_page_id']))
# THIS IS FAIL
# '2012.1.00912.S_uid___A002_X5a9a13_X528_001_of_001.tar'
left = uid_url_table_mous['URL'][0].split("/")[-1]
assert left == '2012.1.00912.S_uid___A002_X5a9a13_X528_001_of_001.tar'
right = uid_url_table_mous['uid'][0]
assert right == 'uid://A002/X5a9a13/X528'
assert left[15:-15] == right.replace(":", "_").replace("/", "_")
data = alma.download_and_extract_files(urls_to_download)
assert len(data) == 6
@pytest.mark.skipif("SKIP_SLOW")
@pytest.mark.skip("Not working anymore")
def test_cycle0(self, temp_dir):
# About 20 MB
alma = get_client()
alma.cache_location = temp_dir
target = 'NGC4945'
project_code = '2011.0.00121.S'
payload = {'project_code': project_code,
'source_name_alma': target, }
result = alma.query(payload=payload, legacy_columns=True)
assert len(result) == 1
alma1 = alma()
alma2 = alma()
uid_url_table_mous = alma1.stage_data(result['Member ous id'])
uid_url_table_asdm = alma2.stage_data(result['Asdm uid'])
assert len(uid_url_table_asdm) == 1
assert len(uid_url_table_mous) == 32
assert uid_url_table_mous[0]['URL'].split("/")[-1] == '2011.0.00121.S_2012-08-16_001_of_002.tar'
assert uid_url_table_mous[0]['uid'] == 'uid://A002/X327408/X246'
small = uid_url_table_mous['size'] < 1
urls_to_download = uid_url_table_mous[small]['URL']
# Check that all URLs show up in the Cycle 0 table
for url in urls_to_download:
tarfile_name = os.path.split(url)[-1]
assert tarfile_name in alma._cycle0_tarfile_content['ID']
data = alma.download_and_extract_files(urls_to_download)
# There are 10 small files, but only 8 unique
assert len(data) == 8
def test_keywords(self, temp_dir):
alma = get_client()
alma.help_tap()
result = alma.query_tap(
"select * from ivoa.obscore where s_resolution <0.1 and "
"science_keyword in ('High-mass star formation', 'Disks around "
"high-mass stars')")
assert len(result) >= 72
# TODO why is it failing
# assert 'Orion_Source_I' in result['target_name']
@pytest.mark.remote_data
def test_project_metadata():
alma = get_client()
metadata = alma.get_project_metadata('2013.1.00269.S')
assert metadata == ['Sgr B2, a high-mass molecular cloud in our Galaxy\'s '
'Central Molecular Zone, is the most extreme site of '
'ongoing star formation in the Local Group in terms '
'of its gas content, temperature, and velocity '
'dispersion. If any cloud in our galaxy is analogous '
'to the typical cloud at the universal peak of star '
'formation at z~2, this is it. We propose a 6\'x6\' '
'mosaic in the 3mm window targeting gas thermometer '
'lines, specifically CH3CN and its isotopologues. We '
'will measure the velocity dispersion and temperature '
'of the molecular gas on all scales (0.02 - 12 pc, '
'0.5" - 5\') within the cloud, which will yield '
'resolved measurements of the Mach number and the '
'sonic scale of the gas. We will assess the relative '
'importance of stellar feedback and turbulence on the '
'star-forming gas, determining how extensive the '
'feedback effects are within an ultradense '
'environment. The observations will provide '
'constraints on the inputs to star formation theories '
'and will determine their applicability in extremely '
'dense, turbulent, and hot regions. Sgr B2 will be '
'used as a testing ground for star formation theories '
'in an environment analogous to high-z starburst '
'clouds in which they must be applied.']
@pytest.mark.remote_data
@pytest.mark.parametrize('dataarchive_url', _test_url_list)
@pytest.mark.skip('Not working for now - Investigating')
def test_staging_postfeb2020(dataarchive_url):
alma = get_client()
tbl = alma.stage_data('uid://A001/X121/X4ba')
assert 'mous_uid' in tbl.colnames
assert '2013.1.00269.S_uid___A002_X9de499_X3d6c.asdm.sdm.tar' in tbl['name']
@pytest.mark.remote_data
@pytest.mark.parametrize('dataarchive_url', _url_list)
@pytest.mark.skip('Not working for now - Investigating')
def test_staging_uptofeb2020(dataarchive_url):
alma = get_client()
tbl = alma.stage_data('uid://A001/X121/X4ba')
assert 'mous_uid' in tbl.colnames
names = [x.split("/")[-1] for x in tbl['URL']]
assert '2013.1.00269.S_uid___A002_X9de499_X3d6c.asdm.sdm.tar' in names
@pytest.mark.remote_data
@pytest.mark.parametrize('dataarchive_url', _test_url_list)
def test_staging_stacking(dataarchive_url):
alma = get_client()
alma.stage_data(['uid://A001/X13d5/X1d', 'uid://A002/X3216af/X31',
'uid://A001/X12a3/X240'])
|
[
"astropy.units.Unit",
"os.path.join",
"shutil.rmtree",
"os.path.split",
"unittest.mock.Mock",
"pytest.fixture",
"urllib.parse.urlparse",
"unittest.mock.patch",
"datetime.datetime.utcnow",
"datetime.datetime.strptime",
"tempfile.mkdtemp",
"pytest.mark.skipif",
"pytest.raises",
"pytest.mark.parametrize",
"pytest.mark.skip",
"astropy.coordinates.SkyCoord",
"numpy.unique",
"re.compile"
] |
[((27292, 27350), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""dataarchive_url"""', '_test_url_list'], {}), "('dataarchive_url', _test_url_list)\n", (27315, 27350), False, 'import pytest\n'), ((27352, 27407), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not working for now - Investigating"""'], {}), "('Not working for now - Investigating')\n", (27368, 27407), False, 'import pytest\n'), ((27679, 27732), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""dataarchive_url"""', '_url_list'], {}), "('dataarchive_url', _url_list)\n", (27702, 27732), False, 'import pytest\n'), ((27734, 27789), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not working for now - Investigating"""'], {}), "('Not working for now - Investigating')\n", (27750, 27789), False, 'import pytest\n'), ((28107, 28165), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""dataarchive_url"""', '_test_url_list'], {}), "('dataarchive_url', _test_url_list)\n", (28130, 28165), False, 'import pytest\n'), ((1780, 1796), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1794, 1796), False, 'import pytest\n'), ((3418, 3520), 'pytest.mark.skipif', 'pytest.mark.skipif', (['"""SKIP_SLOW"""'], {'reason': '"""Extremely slow due to limitations of the implementation"""'}), "('SKIP_SLOW', reason=\n 'Extremely slow due to limitations of the implementation')\n", (3436, 3520), False, 'import pytest\n'), ((4850, 4903), 'pytest.mark.skipif', 'pytest.mark.skipif', (['"""SKIP_SLOW"""'], {'reason': '"""Known issue"""'}), "('SKIP_SLOW', reason='Known issue')\n", (4868, 4903), False, 'import pytest\n'), ((5085, 5116), 'pytest.mark.skipif', 'pytest.mark.skipif', (['"""SKIP_SLOW"""'], {}), "('SKIP_SLOW')\n", (5103, 5116), False, 'import pytest\n'), ((6432, 6485), 'pytest.mark.skipif', 'pytest.mark.skipif', (['"""SKIP_SLOW"""'], {'reason': '"""Known issue"""'}), "('SKIP_SLOW', reason='Known issue')\n", (6450, 6485), False, 'import pytest\n'), ((15115, 15168), 'pytest.mark.skipif', 'pytest.mark.skipif', (['"""SKIP_SLOW"""'], {'reason': '"""Known issue"""'}), "('SKIP_SLOW', reason='Known issue')\n", (15133, 15168), False, 'import pytest\n'), ((17992, 18059), 'pytest.mark.skipif', 'pytest.mark.skipif', (['"""SKIP_SLOW"""'], {'reason': '"""ra dec search known issue"""'}), "('SKIP_SLOW', reason='ra dec search known issue')\n", (18010, 18059), False, 'import pytest\n'), ((20621, 20652), 'pytest.mark.skipif', 'pytest.mark.skipif', (['"""SKIP_SLOW"""'], {}), "('SKIP_SLOW')\n", (20639, 20652), False, 'import pytest\n'), ((21165, 21196), 'pytest.mark.skipif', 'pytest.mark.skipif', (['"""SKIP_SLOW"""'], {}), "('SKIP_SLOW')\n", (21183, 21196), False, 'import pytest\n'), ((23456, 23487), 'pytest.mark.skipif', 'pytest.mark.skipif', (['"""SKIP_SLOW"""'], {}), "('SKIP_SLOW')\n", (23474, 23487), False, 'import pytest\n'), ((23493, 23532), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Not working anymore"""'], {}), "('Not working anymore')\n", (23509, 23532), False, 'import pytest\n'), ((1852, 1870), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1868, 1870), False, 'import tempfile\n'), ((5322, 5358), 'numpy.unique', 'np.unique', (["m83_data['Member ous id']"], {}), "(m83_data['Member ous id'])\n", (5331, 5358), True, 'import numpy as np\n'), ((11733, 11739), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (11737, 11739), False, 'from unittest.mock import Mock, patch\n'), ((12511, 12535), 're.compile', 're.compile', (['""".*\\\\.fits$"""'], {}), "('.*\\\\.fits$')\n", (12521, 12535), False, 'import re\n'), ((12560, 12588), 'unittest.mock.Mock', 'Mock', ([], {'side_effect': 'myrequests'}), '(side_effect=myrequests)\n', (12564, 12588), False, 'from unittest.mock import Mock, patch\n'), ((13340, 13368), 'unittest.mock.Mock', 'Mock', ([], {'side_effect': 'myrequests'}), '(side_effect=myrequests)\n', (13344, 13368), False, 'from unittest.mock import Mock, patch\n'), ((14192, 14198), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (14196, 14198), False, 'from unittest.mock import Mock, patch\n'), ((14228, 14246), 'unittest.mock.Mock', 'Mock', ([], {'path': '"""/tmp/"""'}), "(path='/tmp/')\n", (14232, 14246), False, 'from unittest.mock import Mock, patch\n'), ((14421, 14439), 'unittest.mock.Mock', 'Mock', ([], {'path': '"""/tmp/"""'}), "(path='/tmp/')\n", (14425, 14439), False, 'from unittest.mock import Mock, patch\n'), ((14621, 14627), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (14625, 14627), False, 'from unittest.mock import Mock, patch\n'), ((15614, 15674), 'astropy.coordinates.SkyCoord', 'coordinates.SkyCoord', (['(0 * u.deg)', '(0 * u.deg)'], {'frame': '"""galactic"""'}), "(0 * u.deg, 0 * u.deg, frame='galactic')\n", (15634, 15674), False, 'from astropy import coordinates\n'), ((15903, 15939), 'numpy.unique', 'np.unique', (["m83_data['Member ous id']"], {}), "(m83_data['Member ous id'])\n", (15912, 15939), True, 'import numpy as np\n'), ((17564, 17607), 'datetime.datetime.strptime', 'datetime.strptime', (['"""11-11-2011"""', '"""%d-%m-%Y"""'], {}), "('11-11-2011', '%d-%m-%Y')\n", (17581, 17607), False, 'from datetime import datetime\n'), ((22669, 22698), 'urllib.parse.urlparse', 'urlparse', (['urls_to_download[0]'], {}), '(urls_to_download[0])\n', (22677, 22698), False, 'from urllib.parse import urlparse\n'), ((1903, 1929), 'shutil.rmtree', 'shutil.rmtree', (['my_temp_dir'], {}), '(my_temp_dir)\n', (1916, 1929), False, 'import shutil\n'), ((8652, 8686), 're.compile', 're.compile', (['""".*(uid__.*)\\\\.asdm.*"""'], {}), "('.*(uid__.*)\\\\.asdm.*')\n", (8662, 8686), False, 'import re\n'), ((9603, 9632), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (9616, 9632), False, 'import pytest\n'), ((10289, 10318), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (10302, 10318), False, 'import pytest\n'), ((14706, 14761), 'unittest.mock.patch', 'patch', (['"""astroquery.alma.core.tarfile"""', 'tarfile_pkg_mock'], {}), "('astroquery.alma.core.tarfile', tarfile_pkg_mock)\n", (14711, 14761), False, 'from unittest.mock import Mock, patch\n'), ((16568, 16594), 'astropy.units.Unit', 'u.Unit', (["mous1['size'].unit"], {}), "(mous1['size'].unit)\n", (16574, 16594), True, 'from astropy import units as u\n'), ((16752, 16777), 'astropy.units.Unit', 'u.Unit', (["mous['size'].unit"], {}), "(mous['size'].unit)\n", (16758, 16777), True, 'from astropy import units as u\n'), ((19312, 19398), 'astropy.coordinates.SkyCoord', 'coordinates.SkyCoord', (['"""5:35:14.461 -5:21:54.41"""'], {'frame': '"""fk5"""', 'unit': '(u.hour, u.deg)'}), "('5:35:14.461 -5:21:54.41', frame='fk5', unit=(u.hour,\n u.deg))\n", (19332, 19398), False, 'from astropy import coordinates\n'), ((19522, 19608), 'astropy.coordinates.SkyCoord', 'coordinates.SkyCoord', (['"""5:35:14.461 -5:21:54.41"""'], {'frame': '"""fk5"""', 'unit': '(u.hour, u.deg)'}), "('5:35:14.461 -5:21:54.41', frame='fk5', unit=(u.hour,\n u.deg))\n", (19542, 19608), False, 'from astropy import coordinates\n'), ((11619, 11647), 'os.path.join', 'os.path.join', (['temp_dir', 'file'], {}), '(temp_dir, file)\n', (11631, 11647), False, 'import os\n'), ((12219, 12257), 'unittest.mock.Mock', 'Mock', ([], {'headers': "{'Content-Type': 'fits'}"}), "(headers={'Content-Type': 'fits'})\n", (12223, 12257), False, 'from unittest.mock import Mock, patch\n'), ((13145, 13183), 'unittest.mock.Mock', 'Mock', ([], {'headers': "{'Content-Type': 'fits'}"}), "(headers={'Content-Type': 'fits'})\n", (13149, 13183), False, 'from unittest.mock import Mock, patch\n'), ((14780, 14819), 'unittest.mock.patch', 'patch', (['"""astroquery.alma.core.os.remove"""'], {}), "('astroquery.alma.core.os.remove')\n", (14785, 14819), False, 'from unittest.mock import Mock, patch\n'), ((15075, 15107), 'os.path.join', 'os.path.join', (['temp_dir', '"""foo.py"""'], {}), "(temp_dir, 'foo.py')\n", (15087, 15107), False, 'import os\n'), ((17686, 17720), 'datetime.datetime.strptime', 'datetime.strptime', (['row', '"""%d-%m-%Y"""'], {}), "(row, '%d-%m-%Y')\n", (17703, 17720), False, 'from datetime import datetime\n'), ((24609, 24627), 'os.path.split', 'os.path.split', (['url'], {}), '(url)\n', (24622, 24627), False, 'import os\n'), ((9930, 9947), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (9945, 9947), False, 'from datetime import datetime\n')]
|
import ocdskingfisherprocess.cli.commands.base
import redis
class UpgradeDataBaseCLICommand(ocdskingfisherprocess.cli.commands.base.CLICommand):
command = 'upgrade-database'
def configure_subparser(self, subparser):
subparser.add_argument("--deletefirst", help="Delete Database First", action="store_true")
def run_command(self, args):
if args.deletefirst:
if not args.quiet:
print("Dropping Database")
self.database.delete_tables()
if self.config.is_redis_available():
if not args.quiet:
print("Dropping Redis")
redis_conn = redis.Redis(host=self.config.redis_host, port=self.config.redis_port, db=self.config.redis_database)
redis_conn.delete('kingfisher_work')
if not args.quiet:
print("Upgrading/Creating Database")
self.database.create_tables()
|
[
"redis.Redis"
] |
[((664, 769), 'redis.Redis', 'redis.Redis', ([], {'host': 'self.config.redis_host', 'port': 'self.config.redis_port', 'db': 'self.config.redis_database'}), '(host=self.config.redis_host, port=self.config.redis_port, db=\n self.config.redis_database)\n', (675, 769), False, 'import redis\n')]
|
import datetime
from charms.reactive import hook
from charms.reactive import RelationBase
from charms.reactive import scopes
class NrpeExternalMasterProvides(RelationBase):
scope = scopes.GLOBAL
@hook('{provides:nrpe-external-master}-relation-{joined,changed}')
def changed_nrpe(self):
self.set_state('{relation_name}.available')
@hook('{provides:nrpe-external-master}-relation-{broken,departed}')
def broken_nrpe(self):
self.remove_state('{relation_name}.available')
def add_check(self, args, name=None, description=None, context=None,
servicegroups=None, unit=None):
unit = unit.replace('/', '-')
check_tmpl = """
#---------------------------------------------------
# This file is Juju managed
#---------------------------------------------------
command[%(check_name)s]=%(check_args)s
"""
service_tmpl = """
#---------------------------------------------------
# This file is Juju managed
#---------------------------------------------------
define service {
use active-service
host_name %(context)s-%(unit_name)s
service_description %(description)s
check_command check_nrpe!%(check_name)s
servicegroups %(servicegroups)s
}
"""
check_filename = "/etc/nagios/nrpe.d/check_%s.cfg" % (name)
with open(check_filename, "w") as fh:
fh.write(check_tmpl % {
'check_args': ' '.join(args),
'check_name': name,
})
service_filename = "/var/lib/nagios/export/service__%s_%s.cfg" % (
unit, name)
with open(service_filename, "w") as fh:
fh.write(service_tmpl % {
'servicegroups': servicegroups or context,
'context': context,
'description': description,
'check_name': name,
'unit_name': unit,
})
def updated(self):
relation_info = {
'timestamp': datetime.datetime.now().isoformat(),
}
self.set_remote(**relation_info)
self.remove_state('{relation_name}.available')
|
[
"datetime.datetime.now",
"charms.reactive.hook"
] |
[((208, 273), 'charms.reactive.hook', 'hook', (['"""{provides:nrpe-external-master}-relation-{joined,changed}"""'], {}), "('{provides:nrpe-external-master}-relation-{joined,changed}')\n", (212, 273), False, 'from charms.reactive import hook\n'), ((360, 426), 'charms.reactive.hook', 'hook', (['"""{provides:nrpe-external-master}-relation-{broken,departed}"""'], {}), "('{provides:nrpe-external-master}-relation-{broken,departed}')\n", (364, 426), False, 'from charms.reactive import hook\n'), ((2085, 2108), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2106, 2108), False, 'import datetime\n')]
|
from http import HTTPStatus
from django import forms
from django.core.cache import cache
from django.test import Client, TestCase
from django.urls import reverse
from ..models import Follow, Group, Post, User, Comment
# , Profile
POSTS_COUNT = 13
PAGE_COUNT = 10
class PostPagesTests(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.user = User.objects.create_user(username='TestUser')
# cls.profile = Profile.objects.create(user=cls.user)
cls.group = Group.objects.create(title='TestGroup',
slug='test_slug',
description='Test description')
cls.post = Post.objects.create(
text='Тестовый пост длинна котого больше 15 символов',
author=cls.user, group=cls.group
)
cls.templates_pages_names = {
'posts/index.html': reverse('index'),
'posts/group.html': reverse('group', kwargs={
'slug': cls.group.slug}),
'posts/create.html': reverse('post_create'),
'posts/follow.html': reverse('follow_index')
}
def setUp(self):
self.guest_client = Client()
self.authorized_user = Client()
self.authorized_user.force_login(self.user)
def test_pages_use_correct_template(self):
"""URL-адрес использует соответствующий шаблон..........................
"""
cache.clear()
for template, reverse_name in self.templates_pages_names.items():
with self.subTest(reverse_name=reverse_name):
response = self.authorized_user.get(reverse_name)
self.assertTemplateUsed(response, template)
def test_context_in_template_index(self):
"""Шаблон index сформирован с правильным контекстом.....................
При создании поста с указанием группы,
этот пост появляется на главной странице сайта.
"""
cache.clear()
response = self.authorized_user.get(reverse('index'))
last_post = response.context['page_obj'][0]
self.assertEqual(last_post, self.post)
def get_context(self, name):
self.assertEqual(name.group, PostPagesTests.post.group)
self.assertEqual(name.text, PostPagesTests.post.text)
def test_context_in_template_group(self):
"""Шаблон group сформирован с правильным контекстом.....................
При создании поста с указанием группы,
этот пост появляется на странице этой группы.
"""
response = self.authorized_user.get(reverse('group', kwargs={
'slug': self.group.slug}))
test_group = response.context['group']
test_post = response.context['page_obj'][0]
self.assertEqual(test_group, self.group)
self.assertEqual(test_post, self.post)
self.get_context(test_post)
self.assertEqual(Post.objects.first().text, self.post.text)
self.assertEqual(Post.objects.first().group, self.post.group)
def test_context_in_template_new_post(self):
"""Шаблон new_posts сформирован с правильным контекстом.................
"""
response = self.authorized_user.get(reverse('post_create'))
form_fields = {'text': forms.fields.CharField,
'group': forms.fields.ChoiceField}
for value, expect in form_fields.items():
with self.subTest(value=value):
form_field = response.context['form'].fields[value]
self.assertIsInstance(form_field, expect)
response = self.guest_client.get(reverse('post_create'))
urls = '/auth/login/?next=/create/'
self.assertRedirects(response, urls, status_code=HTTPStatus.FOUND)
def test_context_in_template_post_edit(self):
"""Шаблон post_edit сформирован с правильным контекстом.................
"""
response = self.authorized_user.get(reverse('post_edit', kwargs={
'post_id': self.post.id}))
form_fields = {'text': forms.fields.CharField, }
for value, expect in form_fields.items():
with self.subTest(value=value):
form_field = response.context['form'].fields[value]
self.assertIsInstance(form_field, expect)
def test_context_in_template_profile(self):
"""Шаблон profile сформирован с правильным контекстом...................
"""
response = self.authorized_user.get(reverse('profile', kwargs={
'username': self.user.username, }))
profile = {'author': self.post.author}
for value, expect in profile.items():
with self.subTest(value=value):
context = response.context[value]
self.assertEqual(context, expect)
test_page = response.context['page_obj'][0]
self.assertEqual(test_page, self.user.posts.all()[0])
def test_context_in_template_post(self):
"""Шаблон post сформирован с правильным контекстом......................
"""
response = self.authorized_user.get(reverse('post_detail', kwargs={
'post_id': self.post.id}))
profile = {'author': self.post.author, 'post': self.post}
for value, expect in profile.items():
with self.subTest(value=value):
context = response.context[value]
self.assertEqual(context, expect)
def test_post_not_in_wrong_group(self):
"""Проверка что post не попал ни в ту группу............................
и попал в нужную"""
cache.clear()
Group.objects.create(title='new_group', slug='new_slug')
response = self.authorized_user.get(reverse('group', kwargs={
'slug': 'new_slug'}))
group = response.context['group']
post = group.posts.count()
self.assertEqual(post, 0)
self.assertEqual(len(response.context['page_obj'].object_list), 0)
response = self.authorized_user.get(reverse('index'))
post = response.context['page_obj'][0]
group = post.group
self.assertEqual(group, self.group)
def test_wrong_uri_returns_404(self):
"""Проверка страницы 404................................................
"""
response = self.authorized_user.get('chtoto/poshlo/ne.tak')
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
self.assertTemplateUsed(response, 'misc/404.html')
def test_wrong_uri_returns_500(self):
"""Проверка страницы 404................................................
"""
response = self.authorized_user.get(reverse('page500'))
self.assertTemplateUsed(response, 'misc/500.html')
class PaginatorTest(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.user = User.objects.create_user(username='TestUser')
# cls.profile = Profile.objects.create(user=cls.user)
cls.authorized_user = Client()
cls.authorized_user.force_login(cls.user)
cls.group = Group.objects.create(title='TestGroup',
slug='test_slug',
description='Test description')
for counts in range(POSTS_COUNT):
cls.post = Post.objects.create(
author=cls.user, text='Тестовый пост под номером {counts}',
group=cls.group)
cls.templates_pages_names = {
'posts/index.html': reverse('index'),
'posts/group.html': reverse('group', kwargs={
'slug': cls.group.slug}),
'posts/profile.html': reverse('profile', kwargs={
'username': cls.user.username})}
def test_first_page_have_ten_posts(self):
"""Проверка первой страницы paginator должен показать 10 постов.........
"""
cache.clear()
for address, reverse_name in self.templates_pages_names.items():
with self.subTest(adress=address):
response = self.authorized_user.get(reverse_name)
self.assertEqual(len(response.context.get('page_obj').object_list),
PAGE_COUNT)
def test_second_page_have_three_posts(self):
"""Проверка второй страницы paginator должен покажать 3 поста...........
"""
for address, reverse_name in self.templates_pages_names.items():
with self.subTest(adress=address):
response = self.authorized_user.get(reverse_name + '?page=2')
self.assertEqual(len(
response.context.get('page_obj').object_list),
POSTS_COUNT - PAGE_COUNT)
class TestCache(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.user = User.objects.create_user(username='TestUser')
cls.authorized_user = Client()
cls.authorized_user.force_login(cls.user)
cls.group = Group.objects.create(title='TestGroup',
slug='test_slug',
description='Test description')
cls.post = Post.objects.create(author=cls.user, group=cls.group,
text='text')
def test_cache_index(self):
"""Проверка что страница индекса работает с 20 секундным кешем..........
"""
response = self.authorized_user.get(reverse('index'))
Post.objects.create(author=self.user, text='test cache text',
group=self.group)
response1 = self.authorized_user.get(reverse('index'))
self.assertEqual(response.content, response1.content)
cache.clear()
response3 = self.authorized_user.get(reverse('index'))
self.assertNotEqual(response3.content, response1.content)
self.assertEqual(response3.context['page_obj'][0].text,
'test cache text')
self.assertEqual(len(response3.context['page_obj'].object_list), 2)
class TestFollow(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.user = User.objects.create_user(username='TestAuthor')
cls.group = Group.objects.create(title='TestGroup',
slug='test_slug',
description='Test description')
cls.follow_user = User.objects.create_user(username='TestUser')
def setUp(self):
self.authorized_user = Client()
self.authorized_user.force_login(self.follow_user)
def test_follow(self):
"""Тест что подписка работает и фаловер добавляетя......................
"""
follow_count1 = Follow.objects.count()
follow = Follow.objects.filter(author=self.user, user=self.follow_user)
self.assertEqual(follow.first(), None)
response = self.authorized_user.get(reverse('profile_follow', kwargs={
'username': self.user.username}))
follow_count2 = Follow.objects.count()
self.assertEqual(follow_count2, follow_count1 + 1)
follow = Follow.objects.first()
self.assertEqual(Follow.objects.count(), 1)
self.assertEqual(follow.author, self.user)
self.assertEqual(follow.user, self.follow_user)
self.assertEqual(response.status_code, HTTPStatus.FOUND)
def test_unfollow(self):
"""Тест что фаловер может отписаться....................................
"""
self.authorized_user.get(reverse('profile_follow', kwargs={
'username': self.user.username}))
self.authorized_user.get(reverse('profile_unfollow', kwargs={
'username': self.user.username}))
self.assertFalse(Follow.objects.exists())
def test_follow_index(self):
"""Тест что пост появляется в ленте фаловера............................
"""
Post.objects.create(author=self.user, text='test follow text',
group=self.group)
self.authorized_user.get(reverse('profile_follow', kwargs={
'username': self.user.username}))
response = self.authorized_user.get(reverse('follow_index'))
post = response.context['post']
self.assertEqual(post.text, 'test follow text')
self.assertEqual(post.author, self.user)
self.assertEqual(post.group.id, self.group.id)
def test_not_follow_index(self):
"""Тест что у не фаловера посты не появляются...........................
"""
Post.objects.create(author=self.user, text='test follow text',
group=self.group)
response = self.authorized_user.get(reverse('follow_index'))
self.assertEqual(response.context['paginator'].count, 0)
def test_following_self(self):
"""Тест что нельзя подписаться на самого себя...........................
"""
self.assertEqual(Follow.objects.all().count(), 0)
self.authorized_user.get(reverse('profile_follow', kwargs={
'username': self.follow_user.username}))
self.assertEqual(Follow.objects.all().count(), 0)
self.authorized_user.get(reverse('profile_follow', kwargs={
'username': self.user.username}))
self.assertEqual(Follow.objects.all().count(), 1)
class TestComments(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.user = User.objects.create_user(username='TestUser')
cls.comment_user = User.objects.create_user(username='TestCommentUser')
cls.post = Post.objects.create(text='test text', author=cls.user)
cls.url_comment = reverse('add_comment', kwargs={
'post_id': cls.post.id})
def setUp(self):
self.anonymous = Client()
self.authorized_user = Client()
self.authorized_user.force_login(self.comment_user)
def test_comment_anonymous(self):
"""Тест что анонима редиректит на авторизацию...........................
при попытки комментировать"""
response = self.anonymous.get(self.url_comment)
urls = '/auth/login/?next={}'.format(self.url_comment)
self.assertRedirects(response, urls, status_code=HTTPStatus.FOUND)
def test_comment_authorized(self):
"""Тест что авторизированный юзер может комментировать..................
"""
response = self.authorized_user.post(self.url_comment, {
'text': 'test comment'}, follow=True)
self.assertContains(response, 'test comment')
self.assertEqual(Comment.objects.count(), 1)
|
[
"django.urls.reverse",
"django.core.cache.cache.clear",
"django.test.Client"
] |
[((1214, 1222), 'django.test.Client', 'Client', ([], {}), '()\n', (1220, 1222), False, 'from django.test import Client, TestCase\n'), ((1254, 1262), 'django.test.Client', 'Client', ([], {}), '()\n', (1260, 1262), False, 'from django.test import Client, TestCase\n'), ((1464, 1477), 'django.core.cache.cache.clear', 'cache.clear', ([], {}), '()\n', (1475, 1477), False, 'from django.core.cache import cache\n'), ((1988, 2001), 'django.core.cache.cache.clear', 'cache.clear', ([], {}), '()\n', (1999, 2001), False, 'from django.core.cache import cache\n'), ((5590, 5603), 'django.core.cache.cache.clear', 'cache.clear', ([], {}), '()\n', (5601, 5603), False, 'from django.core.cache import cache\n'), ((6991, 6999), 'django.test.Client', 'Client', ([], {}), '()\n', (6997, 6999), False, 'from django.test import Client, TestCase\n'), ((7884, 7897), 'django.core.cache.cache.clear', 'cache.clear', ([], {}), '()\n', (7895, 7897), False, 'from django.core.cache import cache\n'), ((8900, 8908), 'django.test.Client', 'Client', ([], {}), '()\n', (8906, 8908), False, 'from django.test import Client, TestCase\n'), ((9713, 9726), 'django.core.cache.cache.clear', 'cache.clear', ([], {}), '()\n', (9724, 9726), False, 'from django.core.cache import cache\n'), ((10525, 10533), 'django.test.Client', 'Client', ([], {}), '()\n', (10531, 10533), False, 'from django.test import Client, TestCase\n'), ((13681, 13736), 'django.urls.reverse', 'reverse', (['"""add_comment"""'], {'kwargs': "{'post_id': cls.post.id}"}), "('add_comment', kwargs={'post_id': cls.post.id})\n", (13688, 13736), False, 'from django.urls import reverse\n'), ((13797, 13805), 'django.test.Client', 'Client', ([], {}), '()\n', (13803, 13805), False, 'from django.test import Client, TestCase\n'), ((13837, 13845), 'django.test.Client', 'Client', ([], {}), '()\n', (13843, 13845), False, 'from django.test import Client, TestCase\n'), ((922, 938), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (929, 938), False, 'from django.urls import reverse\n'), ((972, 1021), 'django.urls.reverse', 'reverse', (['"""group"""'], {'kwargs': "{'slug': cls.group.slug}"}), "('group', kwargs={'slug': cls.group.slug})\n", (979, 1021), False, 'from django.urls import reverse\n'), ((1073, 1095), 'django.urls.reverse', 'reverse', (['"""post_create"""'], {}), "('post_create')\n", (1080, 1095), False, 'from django.urls import reverse\n'), ((1130, 1153), 'django.urls.reverse', 'reverse', (['"""follow_index"""'], {}), "('follow_index')\n", (1137, 1153), False, 'from django.urls import reverse\n'), ((2046, 2062), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (2053, 2062), False, 'from django.urls import reverse\n'), ((2608, 2658), 'django.urls.reverse', 'reverse', (['"""group"""'], {'kwargs': "{'slug': self.group.slug}"}), "('group', kwargs={'slug': self.group.slug})\n", (2615, 2658), False, 'from django.urls import reverse\n'), ((3229, 3251), 'django.urls.reverse', 'reverse', (['"""post_create"""'], {}), "('post_create')\n", (3236, 3251), False, 'from django.urls import reverse\n'), ((3627, 3649), 'django.urls.reverse', 'reverse', (['"""post_create"""'], {}), "('post_create')\n", (3634, 3649), False, 'from django.urls import reverse\n'), ((3958, 4012), 'django.urls.reverse', 'reverse', (['"""post_edit"""'], {'kwargs': "{'post_id': self.post.id}"}), "('post_edit', kwargs={'post_id': self.post.id})\n", (3965, 4012), False, 'from django.urls import reverse\n'), ((4490, 4549), 'django.urls.reverse', 'reverse', (['"""profile"""'], {'kwargs': "{'username': self.user.username}"}), "('profile', kwargs={'username': self.user.username})\n", (4497, 4549), False, 'from django.urls import reverse\n'), ((5101, 5157), 'django.urls.reverse', 'reverse', (['"""post_detail"""'], {'kwargs': "{'post_id': self.post.id}"}), "('post_detail', kwargs={'post_id': self.post.id})\n", (5108, 5157), False, 'from django.urls import reverse\n'), ((5713, 5758), 'django.urls.reverse', 'reverse', (['"""group"""'], {'kwargs': "{'slug': 'new_slug'}"}), "('group', kwargs={'slug': 'new_slug'})\n", (5720, 5758), False, 'from django.urls import reverse\n'), ((6003, 6019), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (6010, 6019), False, 'from django.urls import reverse\n'), ((6651, 6669), 'django.urls.reverse', 'reverse', (['"""page500"""'], {}), "('page500')\n", (6658, 6669), False, 'from django.urls import reverse\n'), ((7507, 7523), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (7514, 7523), False, 'from django.urls import reverse\n'), ((7557, 7606), 'django.urls.reverse', 'reverse', (['"""group"""'], {'kwargs': "{'slug': cls.group.slug}"}), "('group', kwargs={'slug': cls.group.slug})\n", (7564, 7606), False, 'from django.urls import reverse\n'), ((7659, 7717), 'django.urls.reverse', 'reverse', (['"""profile"""'], {'kwargs': "{'username': cls.user.username}"}), "('profile', kwargs={'username': cls.user.username})\n", (7666, 7717), False, 'from django.urls import reverse\n'), ((9446, 9462), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (9453, 9462), False, 'from django.urls import reverse\n'), ((9625, 9641), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (9632, 9641), False, 'from django.urls import reverse\n'), ((9772, 9788), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (9779, 9788), False, 'from django.urls import reverse\n'), ((10932, 10998), 'django.urls.reverse', 'reverse', (['"""profile_follow"""'], {'kwargs': "{'username': self.user.username}"}), "('profile_follow', kwargs={'username': self.user.username})\n", (10939, 10998), False, 'from django.urls import reverse\n'), ((11539, 11605), 'django.urls.reverse', 'reverse', (['"""profile_follow"""'], {'kwargs': "{'username': self.user.username}"}), "('profile_follow', kwargs={'username': self.user.username})\n", (11546, 11605), False, 'from django.urls import reverse\n'), ((11653, 11721), 'django.urls.reverse', 'reverse', (['"""profile_unfollow"""'], {'kwargs': "{'username': self.user.username}"}), "('profile_unfollow', kwargs={'username': self.user.username})\n", (11660, 11721), False, 'from django.urls import reverse\n'), ((12063, 12129), 'django.urls.reverse', 'reverse', (['"""profile_follow"""'], {'kwargs': "{'username': self.user.username}"}), "('profile_follow', kwargs={'username': self.user.username})\n", (12070, 12129), False, 'from django.urls import reverse\n'), ((12188, 12211), 'django.urls.reverse', 'reverse', (['"""follow_index"""'], {}), "('follow_index')\n", (12195, 12211), False, 'from django.urls import reverse\n'), ((12705, 12728), 'django.urls.reverse', 'reverse', (['"""follow_index"""'], {}), "('follow_index')\n", (12712, 12728), False, 'from django.urls import reverse\n'), ((13015, 13088), 'django.urls.reverse', 'reverse', (['"""profile_follow"""'], {'kwargs': "{'username': self.follow_user.username}"}), "('profile_follow', kwargs={'username': self.follow_user.username})\n", (13022, 13088), False, 'from django.urls import reverse\n'), ((13194, 13260), 'django.urls.reverse', 'reverse', (['"""profile_follow"""'], {'kwargs': "{'username': self.user.username}"}), "('profile_follow', kwargs={'username': self.user.username})\n", (13201, 13260), False, 'from django.urls import reverse\n')]
|
import frappe
from frappe.utils import get_defaults
from frappe import _
from frappe.utils import cint, flt, cstr
from frappe.utils import num2words
#
# convert currency to words
#
def money_in_words(number, main_currency=None, fraction_currency=None):
try:
# note: `flt` returns 0 for invalid input and we don't want that
number = float(number)
except ValueError:
return ""
number = flt(number)
if number < 0:
return ""
d = get_defaults()
if not main_currency:
main_currency = d.get('currency', 'INR')
if not fraction_currency:
fraction_currency = frappe.db.get_value("Currency", main_currency, "fraction", cache=True) or _("Cent")
number_format = frappe.db.get_value("Currency", main_currency, "number_format", cache=True) or \
frappe.db.get_default("number_format") or "#,###.##"
fraction_length = get_number_format_info(number_format)[2]
n = "%.{0}f".format(fraction_length) % number
numbers = n.split('.')
main, fraction = numbers if len(numbers) > 1 else [n, '00']
if len(fraction) < fraction_length:
zeros = '0' * (fraction_length - len(fraction))
fraction += zeros
in_million = True
if number_format == "#,##,###.##": in_million = False
# 0.00
if main == '0' and fraction in ['00', '000']:
out = "{0} {1}".format(main_currency, _('Zero'))
# 0.XX
elif main == '0':
out = _(in_words(fraction, in_million).title()) + ' ' + fraction_currency
else:
out = main_currency + ' ' + _(in_words(main, in_million).title())
if cint(fraction):
out = f"{out} {_('with')} {fraction}/100" # {fraction_currency}"
return out # + ' ' + _('only.')
number_format_info = {
"#,###.##": (".", ",", 2),
"#.###,##": (",", ".", 2),
"# ###.##": (".", " ", 2),
"# ###,##": (",", " ", 2),
"#'###.##": (".", "'", 2),
"#, ###.##": (".", ", ", 2),
"#,##,###.##": (".", ",", 2),
"#,###.###": (".", ",", 3),
"#.###": ("", ".", 0),
"#,###": ("", ",", 0)
}
def get_number_format_info(format):
return number_format_info.get(format) or (".", ",", 2)
def in_words(integer, in_million=True):
locale = 'en_IN' if not in_million else frappe.local.lang
integer = int(integer)
try:
ret = num2words(integer, lang=locale)
except NotImplementedError:
ret = num2words(integer, lang='en')
except OverflowError:
ret = num2words(integer, lang='en')
return ret.replace('-', ' ')
|
[
"frappe.utils.flt",
"frappe.utils.num2words",
"frappe.db.get_value",
"frappe.db.get_default",
"frappe.utils.get_defaults",
"frappe.utils.cint",
"frappe._"
] |
[((423, 434), 'frappe.utils.flt', 'flt', (['number'], {}), '(number)\n', (426, 434), False, 'from frappe.utils import cint, flt, cstr\n'), ((481, 495), 'frappe.utils.get_defaults', 'get_defaults', ([], {}), '()\n', (493, 495), False, 'from frappe.utils import get_defaults\n'), ((734, 809), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Currency"""', 'main_currency', '"""number_format"""'], {'cache': '(True)'}), "('Currency', main_currency, 'number_format', cache=True)\n", (753, 809), False, 'import frappe\n'), ((835, 873), 'frappe.db.get_default', 'frappe.db.get_default', (['"""number_format"""'], {}), "('number_format')\n", (856, 873), False, 'import frappe\n'), ((2345, 2376), 'frappe.utils.num2words', 'num2words', (['integer'], {'lang': 'locale'}), '(integer, lang=locale)\n', (2354, 2376), False, 'from frappe.utils import num2words\n'), ((629, 699), 'frappe.db.get_value', 'frappe.db.get_value', (['"""Currency"""', 'main_currency', '"""fraction"""'], {'cache': '(True)'}), "('Currency', main_currency, 'fraction', cache=True)\n", (648, 699), False, 'import frappe\n'), ((703, 712), 'frappe._', '_', (['"""Cent"""'], {}), "('Cent')\n", (704, 712), False, 'from frappe import _\n'), ((1407, 1416), 'frappe._', '_', (['"""Zero"""'], {}), "('Zero')\n", (1408, 1416), False, 'from frappe import _\n'), ((1628, 1642), 'frappe.utils.cint', 'cint', (['fraction'], {}), '(fraction)\n', (1632, 1642), False, 'from frappe.utils import cint, flt, cstr\n'), ((2423, 2452), 'frappe.utils.num2words', 'num2words', (['integer'], {'lang': '"""en"""'}), "(integer, lang='en')\n", (2432, 2452), False, 'from frappe.utils import num2words\n'), ((2493, 2522), 'frappe.utils.num2words', 'num2words', (['integer'], {'lang': '"""en"""'}), "(integer, lang='en')\n", (2502, 2522), False, 'from frappe.utils import num2words\n'), ((1671, 1680), 'frappe._', '_', (['"""with"""'], {}), "('with')\n", (1672, 1680), False, 'from frappe import _\n')]
|
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.contrib import messages
from django.db.models import Avg, Max, Sum, Count, ProtectedError, DateTimeField
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.db.models.functions import Trunc, ExtractHour
from django.utils import timezone
from .models import Cadastro, Recibo
from .forms import CadastroForm, ReciboForm
from .exportexcel import QuerysetToWorkbook
def index(request):
qtd_cadastro = Cadastro.objects.count()
total_vendas = Recibo.objects.all().aggregate(Sum('total'))
media_vendas = Recibo.objects.all().aggregate(Avg('total'))
hoje = timezone.now()
dt_inicio = hoje.replace(hour=0, minute=0, second=0, microsecond=0)
dt_final = hoje.replace(hour=23, minute=59, second=59, microsecond=0)
cad_por_hora = Cadastro.objects.filter(data_hora__range=(dt_inicio, dt_final)) \
.annotate(hora=ExtractHour('data_hora')) \
.values('hora') \
.order_by('hora') \
.annotate(qtd=Count('id')) \
.values('hora', 'qtd')
vendas_por_dia = Recibo.objects.annotate(dia=Trunc('data_hora', 'day', output_field=DateTimeField())) \
.values('dia') \
.order_by('dia') \
.annotate(qtd=Count('id')) \
.annotate(total=Sum('total')) \
.values('dia', 'qtd', 'total')
return render(request,
'index.html',
{
'qtd_cadastro': qtd_cadastro,
'total_vendas': total_vendas,
'media_vendas': media_vendas,
'vendas_por_dia': vendas_por_dia,
'cad_por_hora': cad_por_hora
})
def cadastro(request):
max_senha = Cadastro.objects.all().aggregate(Max('senha'))
if max_senha['senha__max']:
nova_senha = max_senha['senha__max'] + 1
else:
nova_senha = 1
if request.method == 'POST':
form = CadastroForm(request.POST)
if form.is_valid():
novo = form.save()
messages.success(request, 'Cadastro adicionado com sucesso.')
return HttpResponseRedirect(reverse('cadastro'))
else:
form = CadastroForm()
cadastro_lista = Cadastro.objects.order_by('-senha')
paginator = Paginator(cadastro_lista, 25)
page = request.GET.get('page')
cadastros = paginator.get_page(page)
return render(request,
'cadastro.html',
{
'cadastros': cadastros,
'form': form,
'nova_senha': nova_senha
}
)
def cadastro_delete(request, cadastro_id):
try:
cadastro = Cadastro.objects.get(pk=cadastro_id)
try:
cadastro.delete()
except ProtectedError:
messages.warning(request, 'Cadastro {} já possui recibo e não pode ser deletado.'.format(cadastro_id))
return HttpResponseRedirect(reverse('cadastro'))
messages.success(request, 'Cadastro {} deletado com sucesso.'.format(cadastro_id))
return HttpResponseRedirect(reverse('cadastro'))
except Cadastro.DoesNotExist:
messages.warning(request, 'Cadastro {} não encontrado.'.format(cadastro_id))
return HttpResponseRedirect(reverse('cadastro'))
def recibo_lista(request):
recibos = Recibo.objects.order_by('-id')[:5]
return render(request,
'recibo_lista.html',
{
'recibos': recibos
})
def recibo_novo(request, senha):
try:
cadastro = Cadastro.objects.get(senha=senha)
except Cadastro.DoesNotExist:
messages.warning(request, 'Cadastro com a senha {} não encontrado.'.format(senha))
return HttpResponseRedirect(reverse('recibo_lista'))
existe_rec = Recibo.objects.filter(cadastro=cadastro)
if len(existe_rec) > 0:
messages.warning(request, 'Cadastro com a senha {} já possui o recibo {}.'.format(senha, existe_rec[0].id))
return HttpResponseRedirect(reverse('recibo_lista'))
soma_compras = Recibo.objects.filter(cadastro__cpf=cadastro.cpf).aggregate(total_compras=Sum('total'))
if soma_compras['total_compras']:
if soma_compras['total_compras'] > 700:
cor_alerta = 'red'
else:
cor_alerta = 'black'
else:
cor_alerta = 'black'
if request.method == 'POST':
form = ReciboForm(request.POST)
if form.is_valid():
novo = form.save(commit=False)
novo.cadastro = cadastro
novo.total = novo.brinquedo_vl+novo.bazar_vl+novo.eletro_vl+novo.relogio_vl+novo.musical_vl+novo.vestuario_vl+novo.perfume_vl
novo.save()
messages.success(request, 'Recibo adicionado com sucesso.')
return HttpResponseRedirect(reverse('recibo_lista'))
else:
form = ReciboForm()
return render(request,
'recibo_novo.html',
{
'form': form,
'cadastro': cadastro,
'soma_compras': soma_compras,
'cor_alerta': cor_alerta
}
)
def recibo_delete(request, recibo_id):
try:
recibo = Recibo.objects.get(pk=recibo_id)
recibo.delete()
messages.success(request, 'Recibo {} deletado com sucesso.'.format(recibo_id))
return HttpResponseRedirect(reverse('recibo_lista'))
except Cadastro.DoesNotExist:
messages.warning(request, 'Recibo {} não encontrado.'.format(recibo_id))
return HttpResponseRedirect(reverse('recibo_lista'))
def recibo_imprimir(request, recibo_id):
try:
recibo = Recibo.objects.get(pk=recibo_id)
except Recibo.DoesNotExist:
messages.warning(request, 'Recibo {} não encontrado.'.format(recibo_id))
return HttpResponseRedirect(reverse('recibo_lista'))
return render(request,
'recibo_imprimir.html',
{
'rec': recibo
})
def export_excel(request):
qs = Recibo.objects.all()
columns = [
("Recibo", 10, 'id'),
("Data/Hora", 20, 'data_hora'),
("Senha", 10, 'cadastro.senha'),
("CPF", 20, 'cadastro.cpf'),
("Nome", 35, 'cadastro.nome'),
("E-mail", 30, 'cadastro.email'),
("Qt Brinquedo", 10, 'brinquedo_qt'),
("Vl Brinquedo", 10, 'brinquedo_vl'),
("Qt Bazar", 10, 'bazar_qt'),
("Vl Bazar", 10, 'bazar_vl'),
("Qt Eletro", 10, 'eletro_qt'),
("Vl Eletro", 10, 'eletro_vl'),
("Qt Relogio", 10, 'relogio_qt'),
("Vl Relogio", 10, 'relogio_vl'),
("Qt Musical", 10, 'musical_qt'),
("Vl Musical", 10, 'musical_vl'),
("Qt Vestuario", 10, 'vestuario_qt'),
("Vl Vestuario", 10, 'vestuario_vl'),
("Qt Perfume", 10, 'perfume_qt'),
("Vl Perfume", 10, 'perfume_vl'),
("Vl Total", 10, 'total')
]
qtw = QuerysetToWorkbook(qs, columns, filename='Recibos')
qtw.build_workbook()
return qtw.response()
|
[
"django.db.models.Max",
"django.utils.timezone.now",
"django.db.models.DateTimeField",
"django.db.models.Sum",
"django.urls.reverse",
"django.core.paginator.Paginator",
"django.db.models.functions.ExtractHour",
"django.shortcuts.render",
"django.contrib.messages.success",
"django.db.models.Avg",
"django.db.models.Count"
] |
[((726, 740), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (738, 740), False, 'from django.utils import timezone\n'), ((1436, 1629), 'django.shortcuts.render', 'render', (['request', '"""index.html"""', "{'qtd_cadastro': qtd_cadastro, 'total_vendas': total_vendas, 'media_vendas':\n media_vendas, 'vendas_por_dia': vendas_por_dia, 'cad_por_hora':\n cad_por_hora}"], {}), "(request, 'index.html', {'qtd_cadastro': qtd_cadastro, 'total_vendas':\n total_vendas, 'media_vendas': media_vendas, 'vendas_por_dia':\n vendas_por_dia, 'cad_por_hora': cad_por_hora})\n", (1442, 1629), False, 'from django.shortcuts import render\n'), ((2373, 2402), 'django.core.paginator.Paginator', 'Paginator', (['cadastro_lista', '(25)'], {}), '(cadastro_lista, 25)\n', (2382, 2402), False, 'from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n'), ((2491, 2593), 'django.shortcuts.render', 'render', (['request', '"""cadastro.html"""', "{'cadastros': cadastros, 'form': form, 'nova_senha': nova_senha}"], {}), "(request, 'cadastro.html', {'cadastros': cadastros, 'form': form,\n 'nova_senha': nova_senha})\n", (2497, 2593), False, 'from django.shortcuts import render\n'), ((3490, 3548), 'django.shortcuts.render', 'render', (['request', '"""recibo_lista.html"""', "{'recibos': recibos}"], {}), "(request, 'recibo_lista.html', {'recibos': recibos})\n", (3496, 3548), False, 'from django.shortcuts import render\n'), ((5017, 5150), 'django.shortcuts.render', 'render', (['request', '"""recibo_novo.html"""', "{'form': form, 'cadastro': cadastro, 'soma_compras': soma_compras,\n 'cor_alerta': cor_alerta}"], {}), "(request, 'recibo_novo.html', {'form': form, 'cadastro': cadastro,\n 'soma_compras': soma_compras, 'cor_alerta': cor_alerta})\n", (5023, 5150), False, 'from django.shortcuts import render\n'), ((6032, 6088), 'django.shortcuts.render', 'render', (['request', '"""recibo_imprimir.html"""', "{'rec': recibo}"], {}), "(request, 'recibo_imprimir.html', {'rec': recibo})\n", (6038, 6088), False, 'from django.shortcuts import render\n'), ((636, 648), 'django.db.models.Sum', 'Sum', (['"""total"""'], {}), "('total')\n", (639, 648), False, 'from django.db.models import Avg, Max, Sum, Count, ProtectedError, DateTimeField\n'), ((700, 712), 'django.db.models.Avg', 'Avg', (['"""total"""'], {}), "('total')\n", (703, 712), False, 'from django.db.models import Avg, Max, Sum, Count, ProtectedError, DateTimeField\n'), ((1861, 1873), 'django.db.models.Max', 'Max', (['"""senha"""'], {}), "('senha')\n", (1864, 1873), False, 'from django.db.models import Avg, Max, Sum, Count, ProtectedError, DateTimeField\n'), ((2136, 2197), 'django.contrib.messages.success', 'messages.success', (['request', '"""Cadastro adicionado com sucesso."""'], {}), "(request, 'Cadastro adicionado com sucesso.')\n", (2152, 2197), False, 'from django.contrib import messages\n'), ((3204, 3223), 'django.urls.reverse', 'reverse', (['"""cadastro"""'], {}), "('cadastro')\n", (3211, 3223), False, 'from django.urls import reverse\n'), ((4149, 4172), 'django.urls.reverse', 'reverse', (['"""recibo_lista"""'], {}), "('recibo_lista')\n", (4156, 4172), False, 'from django.urls import reverse\n'), ((4268, 4280), 'django.db.models.Sum', 'Sum', (['"""total"""'], {}), "('total')\n", (4271, 4280), False, 'from django.db.models import Avg, Max, Sum, Count, ProtectedError, DateTimeField\n'), ((4842, 4901), 'django.contrib.messages.success', 'messages.success', (['request', '"""Recibo adicionado com sucesso."""'], {}), "(request, 'Recibo adicionado com sucesso.')\n", (4858, 4901), False, 'from django.contrib import messages\n'), ((5543, 5566), 'django.urls.reverse', 'reverse', (['"""recibo_lista"""'], {}), "('recibo_lista')\n", (5550, 5566), False, 'from django.urls import reverse\n'), ((2238, 2257), 'django.urls.reverse', 'reverse', (['"""cadastro"""'], {}), "('cadastro')\n", (2245, 2257), False, 'from django.urls import reverse\n'), ((3380, 3399), 'django.urls.reverse', 'reverse', (['"""cadastro"""'], {}), "('cadastro')\n", (3387, 3399), False, 'from django.urls import reverse\n'), ((3885, 3908), 'django.urls.reverse', 'reverse', (['"""recibo_lista"""'], {}), "('recibo_lista')\n", (3892, 3908), False, 'from django.urls import reverse\n'), ((4942, 4965), 'django.urls.reverse', 'reverse', (['"""recibo_lista"""'], {}), "('recibo_lista')\n", (4949, 4965), False, 'from django.urls import reverse\n'), ((5719, 5742), 'django.urls.reverse', 'reverse', (['"""recibo_lista"""'], {}), "('recibo_lista')\n", (5726, 5742), False, 'from django.urls import reverse\n'), ((5995, 6018), 'django.urls.reverse', 'reverse', (['"""recibo_lista"""'], {}), "('recibo_lista')\n", (6002, 6018), False, 'from django.urls import reverse\n'), ((1101, 1112), 'django.db.models.Count', 'Count', (['"""id"""'], {}), "('id')\n", (1106, 1112), False, 'from django.db.models import Avg, Max, Sum, Count, ProtectedError, DateTimeField\n'), ((1369, 1381), 'django.db.models.Sum', 'Sum', (['"""total"""'], {}), "('total')\n", (1372, 1381), False, 'from django.db.models import Avg, Max, Sum, Count, ProtectedError, DateTimeField\n'), ((3056, 3075), 'django.urls.reverse', 'reverse', (['"""cadastro"""'], {}), "('cadastro')\n", (3063, 3075), False, 'from django.urls import reverse\n'), ((1330, 1341), 'django.db.models.Count', 'Count', (['"""id"""'], {}), "('id')\n", (1335, 1341), False, 'from django.db.models import Avg, Max, Sum, Count, ProtectedError, DateTimeField\n'), ((997, 1021), 'django.db.models.functions.ExtractHour', 'ExtractHour', (['"""data_hora"""'], {}), "('data_hora')\n", (1008, 1021), False, 'from django.db.models.functions import Trunc, ExtractHour\n'), ((1236, 1251), 'django.db.models.DateTimeField', 'DateTimeField', ([], {}), '()\n', (1249, 1251), False, 'from django.db.models import Avg, Max, Sum, Count, ProtectedError, DateTimeField\n')]
|
from pathlib import Path
from setuptools import setup, find_packages
long_description = Path("README.md").read_text()
reqs = Path("requirements.txt").read_text().strip().splitlines()
pkg = "google_takeout_parser"
setup(
name=pkg,
version="0.1.0",
url="https://github.com/seanbreckenridge/google_takeout_parser",
author="<NAME>",
author_email="<EMAIL>",
description=(
"""Parses data out of your Google Takeout (History, Activity, Youtube, Locations, etc...)"""
),
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
packages=find_packages(
include=["google_takeout_parser", "google_takeout_parser.parse_html"]
),
install_requires=reqs,
package_data={pkg: ["py.typed"]},
zip_safe=False,
keywords="google data parsing",
python_requires=">=3.7",
entry_points={
"console_scripts": [
"google_takeout_parser = google_takeout_parser.__main__:main"
]
},
extras_require={
"testing": [
"pytest",
"mypy",
"flake8",
],
':python_version<"3.7"': [
"typing_extensions",
],
},
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
)
|
[
"pathlib.Path",
"setuptools.find_packages"
] |
[((89, 106), 'pathlib.Path', 'Path', (['"""README.md"""'], {}), "('README.md')\n", (93, 106), False, 'from pathlib import Path\n'), ((623, 711), 'setuptools.find_packages', 'find_packages', ([], {'include': "['google_takeout_parser', 'google_takeout_parser.parse_html']"}), "(include=['google_takeout_parser',\n 'google_takeout_parser.parse_html'])\n", (636, 711), False, 'from setuptools import setup, find_packages\n'), ((126, 150), 'pathlib.Path', 'Path', (['"""requirements.txt"""'], {}), "('requirements.txt')\n", (130, 150), False, 'from pathlib import Path\n')]
|
"""
Question
--------
When comparing the core metabolism of Archaea and Bacteria, what differences and similarities occur?
Method
------
- build Bacteria clade
- build Archaea clade
- REPEAT for varying majority-percentages:
- overlap core metabolisms and print amount of EC numbers inside the intersection and falling off either side
- remove wildcard EC numbers
- END
- build clade pair
- export unified metabolism, coloured by only-Archaea/both/only-Bacteria
Result
------
::
Maj. % Bacteria both Archaea
100%: 0 1 7
90%: 50 40 36
80%: 83 67 47
70%: 103 103 59
60%: 125 129 58
50%: 153 163 72
40%: 191 192 75
30%: 235 229 90
20%: 304 279 83
10%: 400 386 87
1%: 631 653 91
See bacteria_vs_archaea.jpeg
Conclusion
----------
Bacteria and Archaea always share a significant amount of EC numbers, but never all of them. The much bigger group of Bacteria also has many more EC numbers which never occur in Archaea.
This might be because there are more known Bacteria organisms than known Archaea organisms, i.e. a statistical skew. Or it might be because Bacteria are, as a group, more versatile in habitat than Archaea.
The exported graph comparing Bacteria and Archaea directly (at 80% majority) shows several regions (more or less complete pathways) which only occur in either of the clades' core metabolisms.
Which does not mean they do not occur in any individual organism of the other clade!
For example:
Only in Bacteria: 00061 Fatty acid biosynthesis and 00550 Peptidoglycan biosynthesis
Only in Archaea: 00790 Folate biosynthesis and 00900 Terpenoid backbone biosynthesis
Apart from these regions standing out, both clades seem to have evolved different ways of providing redundancy to their common metabolism.
"""
from FEV_KEGG.Drawing import Export
from FEV_KEGG.KEGG.File import cache
from FEV_KEGG.Evolution.Clade import CladePair, Clade
from FEV_KEGG.Graph.Elements import EcNumber
@cache(folder_path='experiments/41', file_name='bacteria_clade')
def getBacteriaClade():
bacteriaClade = Clade('/Bacteria')
# pre-fetch collective metabolism into memory
bacteriaClade.collectiveMetabolism(excludeMultifunctionalEnzymes=True)
return bacteriaClade
@cache(folder_path='experiments/41', file_name='archaea_clade')
def getArchaeaClade():
archaeaClade = Clade('/Archaea')
# pre-fetch collective metabolism into memory
archaeaClade.collectiveMetabolism(excludeMultifunctionalEnzymes=True)
return archaeaClade
if __name__ == '__main__':
output = ['Maj. %\tBacteria\tboth\tArchaea']
#- build Bacteria clade
bacteriaClade = getBacteriaClade()
#- build Archaea clade
archaeaClade = getArchaeaClade()
#- REPEAT for varying majority-percentages:
for percentage in [100, 90, 80, 70, 60, 50, 40, 30, 20, 10 , 1]:
#- overlap core metabolisms and print amount of EC numbers inside the intersection and falling off either side
bacteriaECs = bacteriaClade.coreMetabolism(percentage).getECs()
archaeaECs = archaeaClade.coreMetabolism(percentage).getECs()
bothECs = bacteriaECs.intersection(archaeaECs)
onlyBacteriaECs = bacteriaECs.difference(archaeaECs)
onlyArchaeaECs = archaeaECs.difference(bacteriaECs)
#- remove wildcard EC numbers
onlyBacteriaECs = EcNumber.removeWildcards(onlyBacteriaECs)
bothECs = EcNumber.removeWildcards(bothECs)
onlyArchaeaECs = EcNumber.removeWildcards(onlyArchaeaECs)
output.append( str(percentage) + '%:\t' + str(len(onlyBacteriaECs)) + '\t' + str(len(bothECs)) + '\t' + str(len(onlyArchaeaECs)) )
for line in output:
print(line)
#- build clade pair
cladePair = CladePair(bacteriaClade, archaeaClade)
#- export unified metabolism, coloured by only Archaea/both/only Bacteria
unifiedEcGraph = cladePair.unifiedMetabolism(colour = True)
Export.forCytoscape(unifiedEcGraph, file = 'experiments/41/bacteria_vs_archaea', inCacheFolder=True)
|
[
"FEV_KEGG.Evolution.Clade.Clade",
"FEV_KEGG.KEGG.File.cache",
"FEV_KEGG.Drawing.Export.forCytoscape",
"FEV_KEGG.Evolution.Clade.CladePair",
"FEV_KEGG.Graph.Elements.EcNumber.removeWildcards"
] |
[((2072, 2135), 'FEV_KEGG.KEGG.File.cache', 'cache', ([], {'folder_path': '"""experiments/41"""', 'file_name': '"""bacteria_clade"""'}), "(folder_path='experiments/41', file_name='bacteria_clade')\n", (2077, 2135), False, 'from FEV_KEGG.KEGG.File import cache\n'), ((2351, 2413), 'FEV_KEGG.KEGG.File.cache', 'cache', ([], {'folder_path': '"""experiments/41"""', 'file_name': '"""archaea_clade"""'}), "(folder_path='experiments/41', file_name='archaea_clade')\n", (2356, 2413), False, 'from FEV_KEGG.KEGG.File import cache\n'), ((2180, 2198), 'FEV_KEGG.Evolution.Clade.Clade', 'Clade', (['"""/Bacteria"""'], {}), "('/Bacteria')\n", (2185, 2198), False, 'from FEV_KEGG.Evolution.Clade import CladePair, Clade\n'), ((2456, 2473), 'FEV_KEGG.Evolution.Clade.Clade', 'Clade', (['"""/Archaea"""'], {}), "('/Archaea')\n", (2461, 2473), False, 'from FEV_KEGG.Evolution.Clade import CladePair, Clade\n'), ((3891, 3929), 'FEV_KEGG.Evolution.Clade.CladePair', 'CladePair', (['bacteriaClade', 'archaeaClade'], {}), '(bacteriaClade, archaeaClade)\n', (3900, 3929), False, 'from FEV_KEGG.Evolution.Clade import CladePair, Clade\n'), ((4076, 4179), 'FEV_KEGG.Drawing.Export.forCytoscape', 'Export.forCytoscape', (['unifiedEcGraph'], {'file': '"""experiments/41/bacteria_vs_archaea"""', 'inCacheFolder': '(True)'}), "(unifiedEcGraph, file=\n 'experiments/41/bacteria_vs_archaea', inCacheFolder=True)\n", (4095, 4179), False, 'from FEV_KEGG.Drawing import Export\n'), ((3489, 3530), 'FEV_KEGG.Graph.Elements.EcNumber.removeWildcards', 'EcNumber.removeWildcards', (['onlyBacteriaECs'], {}), '(onlyBacteriaECs)\n', (3513, 3530), False, 'from FEV_KEGG.Graph.Elements import EcNumber\n'), ((3549, 3582), 'FEV_KEGG.Graph.Elements.EcNumber.removeWildcards', 'EcNumber.removeWildcards', (['bothECs'], {}), '(bothECs)\n', (3573, 3582), False, 'from FEV_KEGG.Graph.Elements import EcNumber\n'), ((3608, 3648), 'FEV_KEGG.Graph.Elements.EcNumber.removeWildcards', 'EcNumber.removeWildcards', (['onlyArchaeaECs'], {}), '(onlyArchaeaECs)\n', (3632, 3648), False, 'from FEV_KEGG.Graph.Elements import EcNumber\n')]
|
# -*- coding: utf-8 -*-
import math
from collections import OrderedDict
import flask
import pandas as pd
import netCDF4
import numpy as np
from bokeh.embed import components
from bokeh.resources import INLINE
from bokeh.templates import RESOURCES
from bokeh.util.string import encode_utf8
from bokeh.models import DatetimeTickFormatter, ColumnDataSource, HoverTool, Plot, Range1d
from bokeh.palettes import RdBu11
from bokeh.models.glyphs import Text, Rect
from bokeh.plotting import figure, show, output_notebook, hplot, vplot
import utils.world_countries as wc
from utils.colormap import RGBAColorMapper
from viz2 import climate_map, timeseries, legend, title, get_slice
app = flask.Flask(__name__)
colormap = RGBAColorMapper(-6, 6, RdBu11)
@app.route("/")
def index():
# Create layout
c_map = climate_map()
ts = timeseries()
l = legend()
t = title()
map_legend = hplot(c_map, l)
layout = vplot(t, map_legend, ts)
plot_resources = RESOURCES.render(
js_raw=INLINE.js_raw,
css_raw=INLINE.css_raw,
js_files=INLINE.js_files,
css_files=INLINE.css_files,
)
script, div = components(layout, INLINE)
html = flask.render_template(
'embed.html',
plot_script=script,
plot_div=div,
plot_resources=plot_resources,
)
return encode_utf8(html)
if __name__ == "__main__":
app.run(debug=True)
|
[
"bokeh.util.string.encode_utf8",
"viz2.timeseries",
"viz2.legend",
"bokeh.embed.components",
"flask.Flask",
"utils.colormap.RGBAColorMapper",
"bokeh.templates.RESOURCES.render",
"bokeh.plotting.vplot",
"bokeh.plotting.hplot",
"flask.render_template",
"viz2.climate_map",
"viz2.title"
] |
[((687, 708), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (698, 708), False, 'import flask\n'), ((721, 751), 'utils.colormap.RGBAColorMapper', 'RGBAColorMapper', (['(-6)', '(6)', 'RdBu11'], {}), '(-6, 6, RdBu11)\n', (736, 751), False, 'from utils.colormap import RGBAColorMapper\n'), ((815, 828), 'viz2.climate_map', 'climate_map', ([], {}), '()\n', (826, 828), False, 'from viz2 import climate_map, timeseries, legend, title, get_slice\n'), ((838, 850), 'viz2.timeseries', 'timeseries', ([], {}), '()\n', (848, 850), False, 'from viz2 import climate_map, timeseries, legend, title, get_slice\n'), ((859, 867), 'viz2.legend', 'legend', ([], {}), '()\n', (865, 867), False, 'from viz2 import climate_map, timeseries, legend, title, get_slice\n'), ((876, 883), 'viz2.title', 'title', ([], {}), '()\n', (881, 883), False, 'from viz2 import climate_map, timeseries, legend, title, get_slice\n'), ((902, 917), 'bokeh.plotting.hplot', 'hplot', (['c_map', 'l'], {}), '(c_map, l)\n', (907, 917), False, 'from bokeh.plotting import figure, show, output_notebook, hplot, vplot\n'), ((931, 955), 'bokeh.plotting.vplot', 'vplot', (['t', 'map_legend', 'ts'], {}), '(t, map_legend, ts)\n', (936, 955), False, 'from bokeh.plotting import figure, show, output_notebook, hplot, vplot\n'), ((978, 1099), 'bokeh.templates.RESOURCES.render', 'RESOURCES.render', ([], {'js_raw': 'INLINE.js_raw', 'css_raw': 'INLINE.css_raw', 'js_files': 'INLINE.js_files', 'css_files': 'INLINE.css_files'}), '(js_raw=INLINE.js_raw, css_raw=INLINE.css_raw, js_files=\n INLINE.js_files, css_files=INLINE.css_files)\n', (994, 1099), False, 'from bokeh.templates import RESOURCES\n'), ((1152, 1178), 'bokeh.embed.components', 'components', (['layout', 'INLINE'], {}), '(layout, INLINE)\n', (1162, 1178), False, 'from bokeh.embed import components\n'), ((1190, 1294), 'flask.render_template', 'flask.render_template', (['"""embed.html"""'], {'plot_script': 'script', 'plot_div': 'div', 'plot_resources': 'plot_resources'}), "('embed.html', plot_script=script, plot_div=div,\n plot_resources=plot_resources)\n", (1211, 1294), False, 'import flask\n'), ((1341, 1358), 'bokeh.util.string.encode_utf8', 'encode_utf8', (['html'], {}), '(html)\n', (1352, 1358), False, 'from bokeh.util.string import encode_utf8\n')]
|
from rest_framework import viewsets
from rest_framework.generics import ListAPIView
from rest_framework.permissions import AllowAny
from rest_framework import status
from rest_framework.decorators import api_view, permission_classes
from rest_framework.generics import CreateAPIView, ListAPIView
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from jobs.permissions import IsEmployee
from jobs.serializers import ApplicantSerializer, JobSerializer
from jobs.models import Applicant, Job
from .serializers import *
from django.core import serializers
from django.core.serializers import serialize
import json
from django.http.response import HttpResponse
class JobViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = JobSerializer
# queryset = serializer_class.Meta.model.objects.filter(filled=False)
queryset = serializer_class.Meta.model.objects.all()
permission_classes = [AllowAny]
class SearchApiView(ListAPIView):
serializer_class = JobSerializer
permission_classes = [AllowAny]
def get_queryset(self):
if 'location' in self.request.GET and 'position' in self.request.GET:
return self.serializer_class.Meta.model.objects.filter(location__icontains=self.request.GET['location'],
title__icontains=self.request.GET['position'])
else:
# return self.serializer_class.Meta.model.objects.filter(filled=False)
return self.serializer_class.Meta.model.objects.all()
class SaveJobApiView(CreateAPIView):
serializer_class = ApplicantSerializer
http_method_names = [u'post']
permission_classes = [IsAuthenticated, IsEmployee]
# def perform_create(self, serializer):
# serializer.save(user=self.request.user)
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
self.perform_create(serializer)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
class SavedJobsAPIView(ListAPIView):
serializer_class = JobSerializer
permission_classes = [IsAuthenticated, IsEmployee]
def get_queryset(self):
saved_jobs_id = list(Applicant.objects.filter(user=self.request.user).values_list('job_id', flat=True))
return Job.objects.filter(id__in=saved_jobs_id)
@api_view(['GET'])
@permission_classes([IsAuthenticated, IsEmployee])
def already_saved_job_api_view(request, job_id):
saved_job_id = Applicant.objects.filter(job_id=job_id).values_list('job_id')
data = serializers.serialize("json", Job.objects.filter(id__in=saved_job_id))
return HttpResponse(data, content_type="application/json")
|
[
"jobs.models.Applicant.objects.filter",
"jobs.models.Job.objects.filter",
"rest_framework.response.Response",
"rest_framework.decorators.permission_classes",
"rest_framework.decorators.api_view",
"django.http.response.HttpResponse"
] |
[((2522, 2539), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (2530, 2539), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((2541, 2590), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated, IsEmployee]'], {}), '([IsAuthenticated, IsEmployee])\n', (2559, 2590), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((2815, 2866), 'django.http.response.HttpResponse', 'HttpResponse', (['data'], {'content_type': '"""application/json"""'}), "(data, content_type='application/json')\n", (2827, 2866), False, 'from django.http.response import HttpResponse\n'), ((2116, 2190), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_201_CREATED', 'headers': 'headers'}), '(serializer.data, status=status.HTTP_201_CREATED, headers=headers)\n', (2124, 2190), False, 'from rest_framework.response import Response\n'), ((2478, 2518), 'jobs.models.Job.objects.filter', 'Job.objects.filter', ([], {'id__in': 'saved_jobs_id'}), '(id__in=saved_jobs_id)\n', (2496, 2518), False, 'from jobs.models import Applicant, Job\n'), ((2762, 2801), 'jobs.models.Job.objects.filter', 'Job.objects.filter', ([], {'id__in': 'saved_job_id'}), '(id__in=saved_job_id)\n', (2780, 2801), False, 'from jobs.models import Applicant, Job\n'), ((2659, 2698), 'jobs.models.Applicant.objects.filter', 'Applicant.objects.filter', ([], {'job_id': 'job_id'}), '(job_id=job_id)\n', (2683, 2698), False, 'from jobs.models import Applicant, Job\n'), ((2380, 2428), 'jobs.models.Applicant.objects.filter', 'Applicant.objects.filter', ([], {'user': 'self.request.user'}), '(user=self.request.user)\n', (2404, 2428), False, 'from jobs.models import Applicant, Job\n')]
|
#!/usr/bin/env python2
from argparse import ArgumentParser
from markovmusic.player import Player
parser = ArgumentParser()
parser.add_argument('--input',
default='input/bach', metavar='PATH',
help='MIDI input, either a single file or a directory')
parser.add_argument('--chain-len',
type=int, default=4, metavar='LENGTH',
help='Length of Markov chains to generate')
parser.add_argument('--time-scale', metavar='SCALE',
type=int, default=1,
help='Temporal scale')
parser.add_argument('--port',
default=None, metavar='NAME',
help='Output MIDI port name')
parser.add_argument('--list-ports',
action='store_true',
help='List available MIDI ports')
player = Player(parser.parse_args())
player.run()
|
[
"argparse.ArgumentParser"
] |
[((108, 124), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (122, 124), False, 'from argparse import ArgumentParser\n')]
|
from JumpScale import j
def cb():
from .BackupFactory import BackupFactory
return BackupFactory
j.base.loader.makeAvailable(j, 'clients')
j.clients._register('backup', cb)
|
[
"JumpScale.j.base.loader.makeAvailable",
"JumpScale.j.clients._register"
] |
[((106, 147), 'JumpScale.j.base.loader.makeAvailable', 'j.base.loader.makeAvailable', (['j', '"""clients"""'], {}), "(j, 'clients')\n", (133, 147), False, 'from JumpScale import j\n'), ((149, 182), 'JumpScale.j.clients._register', 'j.clients._register', (['"""backup"""', 'cb'], {}), "('backup', cb)\n", (168, 182), False, 'from JumpScale import j\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import math
import geopandas as gpd
from shapely.geometry import Polygon
from geoedfframework.utils.GeoEDFError import GeoEDFError
""" Helper module implementing various geometry operations
"""
def geom_distance(lat1, lon1, lat2, lon2):
try:
R = 6378.137 # Radius of earth in KM
dLat = lat2 * math.pi / 180 - lat1 * math.pi / 180
dLon = lon2 * math.pi / 180 - lon1 * math.pi / 180
a = math.sin(dLat/2) * math.sin(dLat/2) + math.cos(lat1 * math.pi / 180) * math.cos(lat2 * math.pi / 180) * math.sin(dLon/2) * math.sin(dLon/2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
d = R * c
except:
raise GeoEDFError('Could not determine geometry distance')
return d # Km
def geom_diagonal(geom):
try:
lon1 = geom.total_bounds[0]
lat1 = geom.total_bounds[1]
lon2 = geom.total_bounds[2]
lat2 = geom.total_bounds[3]
d = geom_distance(lat1, lon1, lat2, lon2)
except:
raise GeoEDFError('Could not determine geometry diagonal')
return d # Km
def geom_extent(geom):
try:
d2 = geom_width(geom)+geom_height(geom)
except:
raise GeoEDFError('Could not determine geometry extent')
return d2 # Km
def geom_height(geom):
try:
lon1 = geom.total_bounds[0]
lat1 = geom.total_bounds[1]
lon2 = geom.total_bounds[2]
lat2 = geom.total_bounds[3]
h = geom_distance(lat1, lon1, lat2, lon1)
except:
raise GeoEDFError('Could not determine geometry height')
return h # Km
def geom_width(geom):
try:
lon1 = geom.total_bounds[0]
lat1 = geom.total_bounds[1]
lon2 = geom.total_bounds[2]
lat2 = geom.total_bounds[3]
w = geom_distance(lat1, lon1, lat1, lon2)
except:
raise GeoEDFError('Could not determine geometry width')
return w # Km
def geom_bbox(geom):
try:
polygon = gpd.GeoDataFrame(gpd.GeoSeries(geom.envelope), columns=['geometry'])
except:
raise GeoEDFError('Could not determine geometry bbox')
return polygon
# In case CRS is different
def geom_bbox2(geom):
try:
lon_point_list = [geom.total_bounds[0],geom.total_bounds[2],geom.total_bounds[2],geom.total_bounds[0],geom.total_bounds[0]]
lat_point_list = [geom.total_bounds[1],geom.total_bounds[1],geom.total_bounds[3],geom.total_bounds[3],geom.total_bounds[1]]
polygon_geom = Polygon(zip(lon_point_list, lat_point_list))
crs = {'init': 'epsg:4326'}
polygon = gpd.GeoDataFrame(index=[0], crs=crs, geometry=[polygon_geom])
except:
raise GeoEDFError('Could not determine geometry bbox2')
return polygon
# Try using the area of the total_bounds polygon in both degrees and meters to generate an approximate "conversion" factor
def geom_area(geom):
try:
factor = geom_width(geom)*geom_height(geom)/geom_bbox(geom).area
area = factor*geom.area
except:
raise GeoEDFError('Could not determine geometry area')
return area # Km^2
# Use a cartesian projection coordinate system to get true area
# *** Currently crashes kernel ***
def geom_area2(geom):
try:
geom_m = geom.to_crs(epsg=3857) # or 3395 (WGS 84 compliant)
# May need to use explicit definition for 3395:
# proj4.defs("EPSG:3395","+proj=merc +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs")
a = geom_m.area/10**6
except:
raise GeoEDFError('Could not determine geometry area2')
return a # Km^2
|
[
"geopandas.GeoSeries",
"math.sqrt",
"math.sin",
"geopandas.GeoDataFrame",
"math.cos",
"geoedfframework.utils.GeoEDFError.GeoEDFError"
] |
[((2592, 2653), 'geopandas.GeoDataFrame', 'gpd.GeoDataFrame', ([], {'index': '[0]', 'crs': 'crs', 'geometry': '[polygon_geom]'}), '(index=[0], crs=crs, geometry=[polygon_geom])\n', (2608, 2653), True, 'import geopandas as gpd\n'), ((713, 765), 'geoedfframework.utils.GeoEDFError.GeoEDFError', 'GeoEDFError', (['"""Could not determine geometry distance"""'], {}), "('Could not determine geometry distance')\n", (724, 765), False, 'from geoedfframework.utils.GeoEDFError import GeoEDFError\n'), ((1040, 1092), 'geoedfframework.utils.GeoEDFError.GeoEDFError', 'GeoEDFError', (['"""Could not determine geometry diagonal"""'], {}), "('Could not determine geometry diagonal')\n", (1051, 1092), False, 'from geoedfframework.utils.GeoEDFError import GeoEDFError\n'), ((1219, 1269), 'geoedfframework.utils.GeoEDFError.GeoEDFError', 'GeoEDFError', (['"""Could not determine geometry extent"""'], {}), "('Could not determine geometry extent')\n", (1230, 1269), False, 'from geoedfframework.utils.GeoEDFError import GeoEDFError\n'), ((1543, 1593), 'geoedfframework.utils.GeoEDFError.GeoEDFError', 'GeoEDFError', (['"""Could not determine geometry height"""'], {}), "('Could not determine geometry height')\n", (1554, 1593), False, 'from geoedfframework.utils.GeoEDFError import GeoEDFError\n'), ((1865, 1914), 'geoedfframework.utils.GeoEDFError.GeoEDFError', 'GeoEDFError', (['"""Could not determine geometry width"""'], {}), "('Could not determine geometry width')\n", (1876, 1914), False, 'from geoedfframework.utils.GeoEDFError import GeoEDFError\n'), ((2000, 2028), 'geopandas.GeoSeries', 'gpd.GeoSeries', (['geom.envelope'], {}), '(geom.envelope)\n', (2013, 2028), True, 'import geopandas as gpd\n'), ((2078, 2126), 'geoedfframework.utils.GeoEDFError.GeoEDFError', 'GeoEDFError', (['"""Could not determine geometry bbox"""'], {}), "('Could not determine geometry bbox')\n", (2089, 2126), False, 'from geoedfframework.utils.GeoEDFError import GeoEDFError\n'), ((2684, 2733), 'geoedfframework.utils.GeoEDFError.GeoEDFError', 'GeoEDFError', (['"""Could not determine geometry bbox2"""'], {}), "('Could not determine geometry bbox2')\n", (2695, 2733), False, 'from geoedfframework.utils.GeoEDFError import GeoEDFError\n'), ((3039, 3087), 'geoedfframework.utils.GeoEDFError.GeoEDFError', 'GeoEDFError', (['"""Could not determine geometry area"""'], {}), "('Could not determine geometry area')\n", (3050, 3087), False, 'from geoedfframework.utils.GeoEDFError import GeoEDFError\n'), ((3541, 3590), 'geoedfframework.utils.GeoEDFError.GeoEDFError', 'GeoEDFError', (['"""Could not determine geometry area2"""'], {}), "('Could not determine geometry area2')\n", (3552, 3590), False, 'from geoedfframework.utils.GeoEDFError import GeoEDFError\n'), ((472, 490), 'math.sin', 'math.sin', (['(dLat / 2)'], {}), '(dLat / 2)\n', (480, 490), False, 'import math\n'), ((491, 509), 'math.sin', 'math.sin', (['(dLat / 2)'], {}), '(dLat / 2)\n', (499, 509), False, 'import math\n'), ((595, 613), 'math.sin', 'math.sin', (['(dLon / 2)'], {}), '(dLon / 2)\n', (603, 613), False, 'import math\n'), ((639, 651), 'math.sqrt', 'math.sqrt', (['a'], {}), '(a)\n', (648, 651), False, 'import math\n'), ((653, 669), 'math.sqrt', 'math.sqrt', (['(1 - a)'], {}), '(1 - a)\n', (662, 669), False, 'import math\n'), ((576, 594), 'math.sin', 'math.sin', (['(dLon / 2)'], {}), '(dLon / 2)\n', (584, 594), False, 'import math\n'), ((510, 540), 'math.cos', 'math.cos', (['(lat1 * math.pi / 180)'], {}), '(lat1 * math.pi / 180)\n', (518, 540), False, 'import math\n'), ((543, 573), 'math.cos', 'math.cos', (['(lat2 * math.pi / 180)'], {}), '(lat2 * math.pi / 180)\n', (551, 573), False, 'import math\n')]
|
# --------------
# Importing header files
import numpy as np
# Path of the file has been stored in variable called 'path'
#New record
new_record=[[50, 9, 4, 1, 0, 0, 40, 0]]
#Code starts here
data=np.genfromtxt(path,delimiter=",",skip_header=1)
##print(data)
census=np.concatenate((data,new_record))
#print(census.shape)
#print(census)
# --------------
#Code starts here
age=census[:,0]
##print(age)
max_age=np.max(age)
##print(max_age)
min_age=np.min(age)
##print(min_age)
age_mean=np.mean(age)
##print(age_mean)
age_std=np.std(age)
##print(age_std)
# --------------
import numpy as np
#Code starts here
##race=census[:,2]
##print(race)
race_0=census[census[:,2]==0]
##print(race_0)
race_1=census[census[:,2]==1]
##print(race_1
race_2=census[census[:,2]==2]
##print(race_2)
race_3=census[census[:,2]==3]
##print(race_3)
race_4=census[census[:,2]==4]
##print(race_4)
len_0=len(race_0)
print(len_0)
len_1=len(race_1)
len_2=len(race_2)
len_3=len(race_3)
len_4=len(race_4)
print(len_1)
print(len_2)
print(len_3)
print(len_4)
minority=np.array([len_0,len_1,len_2,len_3,len_4])
minority_race=minority.argmin()
print(minority_race)
# --------------
#Code starts here
import numpy as np
senior_citizens=census[census[:,0]>60]
##print(senior_citizens)
working_hours_sum=np.sum(senior_citizens[:,6])
print(working_hours_sum)
senior_citizens_len=len(senior_citizens)
print(senior_citizens_len)
avg_working_hours=working_hours_sum/senior_citizens_len
print(avg_working_hours)
# --------------
#Code starts here
high=census[census[:,1]>10]
low=census[census[:,1]<=10]
##print(high)
##print(low)
avg_pay_high=np.mean(high[:,7])
avg_pay_low=np.mean(low[:,7])
print(avg_pay_high)
print(avg_pay_low)
|
[
"numpy.sum",
"numpy.std",
"numpy.genfromtxt",
"numpy.max",
"numpy.mean",
"numpy.min",
"numpy.array",
"numpy.concatenate"
] |
[((215, 264), 'numpy.genfromtxt', 'np.genfromtxt', (['path'], {'delimiter': '""","""', 'skip_header': '(1)'}), "(path, delimiter=',', skip_header=1)\n", (228, 264), True, 'import numpy as np\n'), ((286, 320), 'numpy.concatenate', 'np.concatenate', (['(data, new_record)'], {}), '((data, new_record))\n', (300, 320), True, 'import numpy as np\n'), ((435, 446), 'numpy.max', 'np.max', (['age'], {}), '(age)\n', (441, 446), True, 'import numpy as np\n'), ((474, 485), 'numpy.min', 'np.min', (['age'], {}), '(age)\n', (480, 485), True, 'import numpy as np\n'), ((514, 526), 'numpy.mean', 'np.mean', (['age'], {}), '(age)\n', (521, 526), True, 'import numpy as np\n'), ((555, 566), 'numpy.std', 'np.std', (['age'], {}), '(age)\n', (561, 566), True, 'import numpy as np\n'), ((1091, 1136), 'numpy.array', 'np.array', (['[len_0, len_1, len_2, len_3, len_4]'], {}), '([len_0, len_1, len_2, len_3, len_4])\n', (1099, 1136), True, 'import numpy as np\n'), ((1330, 1359), 'numpy.sum', 'np.sum', (['senior_citizens[:, 6]'], {}), '(senior_citizens[:, 6])\n', (1336, 1359), True, 'import numpy as np\n'), ((1676, 1695), 'numpy.mean', 'np.mean', (['high[:, 7]'], {}), '(high[:, 7])\n', (1683, 1695), True, 'import numpy as np\n'), ((1708, 1726), 'numpy.mean', 'np.mean', (['low[:, 7]'], {}), '(low[:, 7])\n', (1715, 1726), True, 'import numpy as np\n')]
|
import collections.abc
import types
import typing
from . import _abc
class BaseHeaders(collections.abc.Mapping):
def __init__(self, source: typing.Optional[typing.Mapping]=None) -> None:
self.__http_headers__: typing.Dict[str, str] = {}
if source is not None:
self.__http_headers__.update(
{k.lower(): v for k, v in source.items()})
def __getitem__(self, key: str) -> str:
return self.__http_headers__[key.lower()]
def __len__(self):
return len(self.__http_headers__)
def __contains__(self, key: typing.Any):
return key.lower() in self.__http_headers__
def __iter__(self):
return iter(self.__http_headers__)
class RequestHeaders(BaseHeaders):
pass
class ResponseHeaders(BaseHeaders, collections.abc.MutableMapping):
def __setitem__(self, key: str, value: str):
self.__http_headers__[key.lower()] = value
def __delitem__(self, key: str):
del self.__http_headers__[key.lower()]
class HttpRequest(_abc.HttpRequest):
"""An HTTP request object."""
def __init__(self, method: str, url: str,
headers: typing.Mapping[str, str],
params: typing.Mapping[str, str],
body) -> None:
self.__method = method
self.__url = url
self.__headers = RequestHeaders(headers)
self.__params = types.MappingProxyType(params)
self.__body = body
@property
def url(self):
return self.__url
@property
def method(self):
return self.__method.upper()
@property
def headers(self):
return self.__headers
@property
def params(self):
return self.__params
def get_body(self):
return self.__body
class HttpResponse(_abc.HttpResponse):
"""An HTTP response object."""
def __init__(self, body=None, *,
status_code=None, headers=None, mimetype=None, charset=None):
if status_code is None:
status_code = 200
self.__status_code = status_code
if mimetype is None:
mimetype = 'text/plain'
self.__mimetype = mimetype
if charset is None:
charset = 'utf-8'
self.__charset = charset
if headers is None:
headers = {}
self.__headers = ResponseHeaders(headers)
if body is not None:
self.__set_body(body)
else:
self.__body = b''
@property
def mimetype(self):
return self.__mimetype
@property
def charset(self):
return self.__charset
@property
def headers(self):
return self.__headers
@property
def status_code(self):
return self.__status_code
def __set_body(self, body):
if isinstance(body, str):
body = body.encode(self.__charset)
if not isinstance(body, (bytes, bytearray)):
raise TypeError(
f'reponse is expected to be either of '
f'str, bytes, or bytearray, got {type(body).__name__}')
self.__body = bytes(body)
def get_body(self) -> bytes:
return self.__body
|
[
"types.MappingProxyType"
] |
[((1399, 1429), 'types.MappingProxyType', 'types.MappingProxyType', (['params'], {}), '(params)\n', (1421, 1429), False, 'import types\n')]
|
# Unit test configuration file for MessageLogger service:
# Suppression based on minimum severity level.
# (This is part A - verify that the suppression occurs.
# u35 is part B - verifying that suppression does not occur if any threshold
# is low enough to not suppress.)
#
import FWCore.ParameterSet.Config as cms
process = cms.Process("TEST")
import FWCore.Framework.test.cmsExceptionsFatal_cff
process.options = FWCore.Framework.test.cmsExceptionsFatal_cff.options
process.load("FWCore.MessageService.test.Services_cff")
process.MessageLogger = cms.Service("MessageLogger",
u34_warnings = cms.untracked.PSet(
threshold = cms.untracked.string('WARNING'),
noTimeStamps = cms.untracked.bool(True)
),
destinations = cms.untracked.vstring('u34_warnings',
)
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(2)
)
process.source = cms.Source("EmptySource")
process.sendSomeMessages = cms.EDAnalyzer("UnitTestClient_W")
process.p = cms.Path(process.sendSomeMessages)
|
[
"FWCore.ParameterSet.Config.untracked.int32",
"FWCore.ParameterSet.Config.untracked.vstring",
"FWCore.ParameterSet.Config.untracked.string",
"FWCore.ParameterSet.Config.untracked.bool",
"FWCore.ParameterSet.Config.Process",
"FWCore.ParameterSet.Config.Source",
"FWCore.ParameterSet.Config.EDAnalyzer",
"FWCore.ParameterSet.Config.Path"
] |
[((327, 346), 'FWCore.ParameterSet.Config.Process', 'cms.Process', (['"""TEST"""'], {}), "('TEST')\n", (338, 346), True, 'import FWCore.ParameterSet.Config as cms\n'), ((890, 915), 'FWCore.ParameterSet.Config.Source', 'cms.Source', (['"""EmptySource"""'], {}), "('EmptySource')\n", (900, 915), True, 'import FWCore.ParameterSet.Config as cms\n'), ((944, 978), 'FWCore.ParameterSet.Config.EDAnalyzer', 'cms.EDAnalyzer', (['"""UnitTestClient_W"""'], {}), "('UnitTestClient_W')\n", (958, 978), True, 'import FWCore.ParameterSet.Config as cms\n'), ((992, 1026), 'FWCore.ParameterSet.Config.Path', 'cms.Path', (['process.sendSomeMessages'], {}), '(process.sendSomeMessages)\n', (1000, 1026), True, 'import FWCore.ParameterSet.Config as cms\n'), ((748, 785), 'FWCore.ParameterSet.Config.untracked.vstring', 'cms.untracked.vstring', (['"""u34_warnings"""'], {}), "('u34_warnings')\n", (769, 785), True, 'import FWCore.ParameterSet.Config as cms\n'), ((847, 869), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(2)'], {}), '(2)\n', (866, 869), True, 'import FWCore.ParameterSet.Config as cms\n'), ((641, 672), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""WARNING"""'], {}), "('WARNING')\n", (661, 672), True, 'import FWCore.ParameterSet.Config as cms\n'), ((697, 721), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (715, 721), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
#! /usr/bin/env python3
"""与加载了RNN Classifier导出的Servable的TensorFlow Serving进行通信
"""
import numpy as np
import jieba
import tensorlayer as tl
from grpc.beta import implementations
import predict_pb2
import prediction_service_pb2
from packages import text_regularization as tr
def text_tensor(text, wv):
"""获取文本向量
Args:
text: 待检测文本
wv: 词向量模型
Returns:
[[[ 3.80905056 1.94315064 -0.20703495 -1.31589055 1.9627794
...
2.16935492 2.95426321 -4.71534014 -3.25034237 -11.28901672]]]
"""
text = tr.extractWords(text)
words = jieba.cut(text.strip())
text_sequence = []
for word in words:
try:
text_sequence.append(wv[word])
except KeyError:
text_sequence.append(wv['UNK'])
text_sequence = np.asarray(text_sequence)
sample = text_sequence.reshape(1, len(text_sequence), 200)
return sample
print(" ".join(jieba.cut('分词初始化')))
wv = tl.files.load_npy_to_any(name='../word2vec/output/model_word2vec_200.npy')
host, port = ('localhost', '9000')
channel = implementations.insecure_channel(host, int(port))
stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)
request = predict_pb2.PredictRequest()
request.model_spec.name = 'antispam'
|
[
"tensorlayer.files.load_npy_to_any",
"jieba.cut",
"prediction_service_pb2.beta_create_PredictionService_stub",
"predict_pb2.PredictRequest",
"numpy.asarray",
"packages.text_regularization.extractWords"
] |
[((965, 1039), 'tensorlayer.files.load_npy_to_any', 'tl.files.load_npy_to_any', ([], {'name': '"""../word2vec/output/model_word2vec_200.npy"""'}), "(name='../word2vec/output/model_word2vec_200.npy')\n", (989, 1039), True, 'import tensorlayer as tl\n'), ((1143, 1209), 'prediction_service_pb2.beta_create_PredictionService_stub', 'prediction_service_pb2.beta_create_PredictionService_stub', (['channel'], {}), '(channel)\n', (1200, 1209), False, 'import prediction_service_pb2\n'), ((1220, 1248), 'predict_pb2.PredictRequest', 'predict_pb2.PredictRequest', ([], {}), '()\n', (1246, 1248), False, 'import predict_pb2\n'), ((566, 587), 'packages.text_regularization.extractWords', 'tr.extractWords', (['text'], {}), '(text)\n', (581, 587), True, 'from packages import text_regularization as tr\n'), ((815, 840), 'numpy.asarray', 'np.asarray', (['text_sequence'], {}), '(text_sequence)\n', (825, 840), True, 'import numpy as np\n'), ((939, 957), 'jieba.cut', 'jieba.cut', (['"""分词初始化"""'], {}), "('分词初始化')\n", (948, 957), False, 'import jieba\n')]
|
from PyQt5 import QtGui
from PyQt5.QtWidgets import QApplication, QWidget, QHBoxLayout, QLabel, QPushButton, QButtonGroup
import sys
from PyQt5 import QtCore
class Window(QWidget):
def __init__(self):
super().__init__()
self.title = "PyQt5 QButton Group"
self.top = 200
self.left = 500
self.width = 400
self.height = 300
self.setWindowTitle(self.title)
self.setWindowIcon(QtGui.QIcon("icon.png"))
self.setGeometry(self.left, self.top, self.width, self.height)
hbox = QHBoxLayout()
self.label = QLabel(self)
self.label.setFont(QtGui.QFont("Sanserif", 15))
hbox.addWidget(self.label)
self.buttongroup = QButtonGroup()
# self.buttongroup.setExclusive(False)
self.buttongroup.buttonClicked[int].connect(self.on_button_clicked)
button = QPushButton("Python")
self.buttongroup.addButton(button, 1)
button.setFont(QtGui.QFont("Sanserif", 15))
button.setIcon(QtGui.QIcon("pythonicon.png"))
button.setIconSize(QtCore.QSize(40, 40))
hbox.addWidget(button)
button = QPushButton("Java")
self.buttongroup.addButton(button, 2)
button.setFont(QtGui.QFont("Sanserif", 15))
button.setIcon(QtGui.QIcon("java.png"))
button.setIconSize(QtCore.QSize(40, 40))
hbox.addWidget(button)
button = QPushButton("C++")
self.buttongroup.addButton(button, 3)
button.setFont(QtGui.QFont("Sanserif", 15))
button.setIcon(QtGui.QIcon("cpp.png"))
button.setIconSize(QtCore.QSize(40, 40))
hbox.addWidget(button)
self.setLayout(hbox)
self.show()
def on_button_clicked(self, id):
for button in self.buttongroup.buttons():
if button is self.buttongroup.button(id):
self.label.setText(button.text() + " Was Clicked ")
if __name__ == "__main__":
App = QApplication(sys.argv)
window = Window()
sys.exit(App.exec())
|
[
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtGui.QIcon",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtCore.QSize",
"PyQt5.QtGui.QFont",
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtWidgets.QButtonGroup"
] |
[((1950, 1972), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (1962, 1972), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QHBoxLayout, QLabel, QPushButton, QButtonGroup\n'), ((553, 566), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (564, 566), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QHBoxLayout, QLabel, QPushButton, QButtonGroup\n'), ((588, 600), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['self'], {}), '(self)\n', (594, 600), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QHBoxLayout, QLabel, QPushButton, QButtonGroup\n'), ((719, 733), 'PyQt5.QtWidgets.QButtonGroup', 'QButtonGroup', ([], {}), '()\n', (731, 733), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QHBoxLayout, QLabel, QPushButton, QButtonGroup\n'), ((874, 895), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""Python"""'], {}), "('Python')\n", (885, 895), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QHBoxLayout, QLabel, QPushButton, QButtonGroup\n'), ((1145, 1164), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""Java"""'], {}), "('Java')\n", (1156, 1164), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QHBoxLayout, QLabel, QPushButton, QButtonGroup\n'), ((1408, 1426), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""C++"""'], {}), "('C++')\n", (1419, 1426), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QHBoxLayout, QLabel, QPushButton, QButtonGroup\n'), ((442, 465), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', (['"""icon.png"""'], {}), "('icon.png')\n", (453, 465), False, 'from PyQt5 import QtGui\n'), ((628, 655), 'PyQt5.QtGui.QFont', 'QtGui.QFont', (['"""Sanserif"""', '(15)'], {}), "('Sanserif', 15)\n", (639, 655), False, 'from PyQt5 import QtGui\n'), ((965, 992), 'PyQt5.QtGui.QFont', 'QtGui.QFont', (['"""Sanserif"""', '(15)'], {}), "('Sanserif', 15)\n", (976, 992), False, 'from PyQt5 import QtGui\n'), ((1017, 1046), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', (['"""pythonicon.png"""'], {}), "('pythonicon.png')\n", (1028, 1046), False, 'from PyQt5 import QtGui\n'), ((1075, 1095), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(40)', '(40)'], {}), '(40, 40)\n', (1087, 1095), False, 'from PyQt5 import QtCore\n'), ((1234, 1261), 'PyQt5.QtGui.QFont', 'QtGui.QFont', (['"""Sanserif"""', '(15)'], {}), "('Sanserif', 15)\n", (1245, 1261), False, 'from PyQt5 import QtGui\n'), ((1286, 1309), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', (['"""java.png"""'], {}), "('java.png')\n", (1297, 1309), False, 'from PyQt5 import QtGui\n'), ((1338, 1358), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(40)', '(40)'], {}), '(40, 40)\n', (1350, 1358), False, 'from PyQt5 import QtCore\n'), ((1496, 1523), 'PyQt5.QtGui.QFont', 'QtGui.QFont', (['"""Sanserif"""', '(15)'], {}), "('Sanserif', 15)\n", (1507, 1523), False, 'from PyQt5 import QtGui\n'), ((1548, 1570), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', (['"""cpp.png"""'], {}), "('cpp.png')\n", (1559, 1570), False, 'from PyQt5 import QtGui\n'), ((1599, 1619), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(40)', '(40)'], {}), '(40, 40)\n', (1611, 1619), False, 'from PyQt5 import QtCore\n')]
|
import numpy as np
import tensorflow as tf
def mmd_penalty(sample_qz, sample_pz, pz_scale, kernel='RBF'):
sigma2_p = pz_scale ** 2
n, d = sample_pz.get_shape().as_list()
n = tf.cast(n, tf.int32)
nf = tf.cast(n, tf.float32)
half_size = (n * n - n) / 2
norms_pz = tf.reduce_sum(tf.square(sample_pz), axis=1, keep_dims=True)
dotprods_pz = tf.matmul(sample_pz, sample_pz, transpose_b=True)
distances_pz = norms_pz + tf.transpose(norms_pz) - 2. * dotprods_pz
norms_qz = tf.reduce_sum(tf.square(sample_qz), axis=1, keep_dims=True)
dotprods_qz = tf.matmul(sample_qz, sample_qz, transpose_b=True)
distances_qz = norms_qz + tf.transpose(norms_qz) - 2. * dotprods_qz
dotprods = tf.matmul(sample_qz, sample_pz, transpose_b=True)
distances = norms_qz + tf.transpose(norms_pz) - 2. * dotprods
if kernel == 'RBF':
# Median heuristic for the sigma^2 of Gaussian kernel
'''
sigma2_k = tf.nn.top_k(
tf.reshape(distances, [-1]), half_size).values[half_size - 1]
sigma2_k += tf.nn.top_k(
tf.reshape(distances_qz, [-1]), half_size).values[half_size - 1]
'''
# Maximal heuristic for the sigma^2 of Gaussian kernel
# sigma2_k = tf.nn.top_k(tf.reshape(distances_qz, [-1]), 1).values[0]
# sigma2_k += tf.nn.top_k(tf.reshape(distances, [-1]), 1).values[0]
sigma2_k = d * sigma2_p
res1 = tf.exp( - distances_qz / 2. / sigma2_k)
res1 += tf.exp( - distances_pz / 2. / sigma2_k)
res1 = tf.multiply(res1, 1. - tf.eye(n))
res1 = tf.reduce_sum(res1) / (nf * nf - nf)
res2 = tf.exp( - distances / 2. / sigma2_k)
res2 = tf.reduce_sum(res2) * 2. / (nf * nf)
stat = res1 - res2
elif kernel == 'IMQ':
# k(x, y) = C / (C + ||x - y||^2)
# C = tf.nn.top_k(tf.reshape(distances, [-1]), half_size).values[half_size - 1]
# C += tf.nn.top_k(tf.reshape(distances_qz, [-1]), half_size).values[half_size - 1]
pz_kind = 'normal'
if pz_kind == 'normal':
Cbase = 2. * d * sigma2_p
elif pz_kind == 'sphere':
Cbase = 2.
elif pz_kind == 'uniform':
# E ||x - y||^2 = E[sum (xi - yi)^2]
# = zdim E[(xi - yi)^2]
# = const * zdim
Cbase = d
stat = 0.
for scale in [.1, .2, .5, 1., 2., 5., 10.]:
C = Cbase * scale
res1 = C / (C + distances_qz)
res1 += C / (C + distances_pz)
res1 = tf.multiply(res1, 1. - tf.eye(n))
res1 = tf.reduce_sum(res1) / (nf * nf - nf)
res2 = C / (C + distances)
res2 = tf.reduce_sum(res2) * 2. / (nf * nf)
stat += res1 - res2
else:
assert False
return stat
def main():
with tf.Session() as sess:
def e(t):
return sess.run(t)
def p(s, t):
print(s, e(t))
n = 10000
d = 64
scale = tf.Variable(1.0, dtype=tf.float32)
sample_qz = scale * tf.random.normal((n, d), dtype=tf.float32)
sample_pz = tf.random.normal((n, d), dtype=tf.float32)
mmd = mmd_penalty(sample_qz, sample_pz, pz_scale=1.0, kernel='IMQ')
e(tf.global_variables_initializer())
for scale_np in np.linspace(-2, +2, 21):
print(scale_np, sess.run(mmd, feed_dict={scale: scale_np}))
if __name__ == "__main__":
main()
|
[
"tensorflow.reduce_sum",
"tensorflow.random.normal",
"tensorflow.global_variables_initializer",
"tensorflow.eye",
"tensorflow.Session",
"tensorflow.transpose",
"tensorflow.cast",
"tensorflow.matmul",
"tensorflow.Variable",
"tensorflow.exp",
"numpy.linspace",
"tensorflow.square"
] |
[((187, 207), 'tensorflow.cast', 'tf.cast', (['n', 'tf.int32'], {}), '(n, tf.int32)\n', (194, 207), True, 'import tensorflow as tf\n'), ((217, 239), 'tensorflow.cast', 'tf.cast', (['n', 'tf.float32'], {}), '(n, tf.float32)\n', (224, 239), True, 'import tensorflow as tf\n'), ((366, 415), 'tensorflow.matmul', 'tf.matmul', (['sample_pz', 'sample_pz'], {'transpose_b': '(True)'}), '(sample_pz, sample_pz, transpose_b=True)\n', (375, 415), True, 'import tensorflow as tf\n'), ((582, 631), 'tensorflow.matmul', 'tf.matmul', (['sample_qz', 'sample_qz'], {'transpose_b': '(True)'}), '(sample_qz, sample_qz, transpose_b=True)\n', (591, 631), True, 'import tensorflow as tf\n'), ((720, 769), 'tensorflow.matmul', 'tf.matmul', (['sample_qz', 'sample_pz'], {'transpose_b': '(True)'}), '(sample_qz, sample_pz, transpose_b=True)\n', (729, 769), True, 'import tensorflow as tf\n'), ((302, 322), 'tensorflow.square', 'tf.square', (['sample_pz'], {}), '(sample_pz)\n', (311, 322), True, 'import tensorflow as tf\n'), ((518, 538), 'tensorflow.square', 'tf.square', (['sample_qz'], {}), '(sample_qz)\n', (527, 538), True, 'import tensorflow as tf\n'), ((1427, 1465), 'tensorflow.exp', 'tf.exp', (['(-distances_qz / 2.0 / sigma2_k)'], {}), '(-distances_qz / 2.0 / sigma2_k)\n', (1433, 1465), True, 'import tensorflow as tf\n'), ((1483, 1521), 'tensorflow.exp', 'tf.exp', (['(-distances_pz / 2.0 / sigma2_k)'], {}), '(-distances_pz / 2.0 / sigma2_k)\n', (1489, 1521), True, 'import tensorflow as tf\n'), ((1639, 1674), 'tensorflow.exp', 'tf.exp', (['(-distances / 2.0 / sigma2_k)'], {}), '(-distances / 2.0 / sigma2_k)\n', (1645, 1674), True, 'import tensorflow as tf\n'), ((2848, 2860), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (2858, 2860), True, 'import tensorflow as tf\n'), ((3017, 3051), 'tensorflow.Variable', 'tf.Variable', (['(1.0)'], {'dtype': 'tf.float32'}), '(1.0, dtype=tf.float32)\n', (3028, 3051), True, 'import tensorflow as tf\n'), ((3143, 3185), 'tensorflow.random.normal', 'tf.random.normal', (['(n, d)'], {'dtype': 'tf.float32'}), '((n, d), dtype=tf.float32)\n', (3159, 3185), True, 'import tensorflow as tf\n'), ((3331, 3354), 'numpy.linspace', 'np.linspace', (['(-2)', '(+2)', '(21)'], {}), '(-2, +2, 21)\n', (3342, 3354), True, 'import numpy as np\n'), ((446, 468), 'tensorflow.transpose', 'tf.transpose', (['norms_pz'], {}), '(norms_pz)\n', (458, 468), True, 'import tensorflow as tf\n'), ((662, 684), 'tensorflow.transpose', 'tf.transpose', (['norms_qz'], {}), '(norms_qz)\n', (674, 684), True, 'import tensorflow as tf\n'), ((797, 819), 'tensorflow.transpose', 'tf.transpose', (['norms_pz'], {}), '(norms_pz)\n', (809, 819), True, 'import tensorflow as tf\n'), ((1587, 1606), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['res1'], {}), '(res1)\n', (1600, 1606), True, 'import tensorflow as tf\n'), ((3080, 3122), 'tensorflow.random.normal', 'tf.random.normal', (['(n, d)'], {'dtype': 'tf.float32'}), '((n, d), dtype=tf.float32)\n', (3096, 3122), True, 'import tensorflow as tf\n'), ((3272, 3305), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (3303, 3305), True, 'import tensorflow as tf\n'), ((1561, 1570), 'tensorflow.eye', 'tf.eye', (['n'], {}), '(n)\n', (1567, 1570), True, 'import tensorflow as tf\n'), ((1691, 1710), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['res2'], {}), '(res2)\n', (1704, 1710), True, 'import tensorflow as tf\n'), ((2613, 2632), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['res1'], {}), '(res1)\n', (2626, 2632), True, 'import tensorflow as tf\n'), ((2583, 2592), 'tensorflow.eye', 'tf.eye', (['n'], {}), '(n)\n', (2589, 2592), True, 'import tensorflow as tf\n'), ((2708, 2727), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['res2'], {}), '(res2)\n', (2721, 2727), True, 'import tensorflow as tf\n')]
|
from theatre_ag.theatre_ag.actor import Actor as TheatreActor
from theatre_ag.theatre_ag.task import Task
from .Constraints import Deadline, ResourceDelta
from .Responsibilities import Responsibility, Obligation
from abc import ABCMeta
from .utility_functions import mean
from .Responsibilities import Act, ResponsibilityEffect
from copy import copy
import random
class BasicResponsibleAgent(TheatreActor):
responsible = True # This will be a toggle for deactivating the formalism
__metaclass__ = ABCMeta
def __init__(self,
notions,
name,
clock,
workflows: list,
sociotechnical_states = {},
interpreting_coefficients = {}):
super().__init__(name, clock)
self.interpreting_coefficients = interpreting_coefficients
# Make a responsibility for self for chilling out, which idling fulfils
chill_deadline = Deadline(1, clock)
chill_effect = ResourceDelta({'personal_enjoyment': 1})
chill = Obligation([chill_deadline,
chill_effect],
name="idle")
chill.set_importances([0.25, 0.5])
self.chill_resp = Responsibility(chill, self, self)
notions.append(self.interpret(self.chill_resp))
self.responsibilities = copy(notions) # Default beliefs about the world
self.notions = copy(notions)
self.consequential_responsibilities = [] # All discharged constraints
self.workflows = workflows
self.socio_states = sociotechnical_states
self.idle_act = Act(ResponsibilityEffect({'personal_enjoyment': 1}),
self.idling.idle,
self.idling)
self.basic_judgement_responsible = 0.5 # How responsible is someone if you don't know what they've done before?
# Assign all of the workflows to me
for workflow in self.workflows:
workflow.assign_agent(self)
# To be updated with the responsibility the current action represents
self.current_responsibility = None
# An act is a bound method. self.acts is a dictionary of the form:
# {effect: (act_entry_point, workflow, args)}
# ..where the effect is a list of tuples, where each tuple is a string and an integer effect on the atttribute
# the string represents.
self.acts = {}
def delegate_responsibility(self,
obligation: Obligation,
importances: list,
delegee): # Make this a ResponsibleAgent
obligation.set_importances(importances)
resp = Responsibility(copy(obligation), self, delegee)
accepted = resp.delegee.accept_responsibility(resp)
if not accepted:
raise NotImplemented("What happens if a responsibility \
isn't allocated?")
def interpret(self,
resp: Responsibility):
resp = copy(resp)
for factor, coefficient in self.interpreting_coefficients.items():
for constraint_index in range(len(resp.constraints)):
old_constraint = resp.constraints[constraint_index]
constraint = copy(old_constraint)
importance = constraint.importance
# Work out the new importance value, if there is one.
if factor in constraint.factors.keys() or factor == constraint.__class__:
# constraint.interpreted = False
importance = importance * coefficient
importance = max(min(1, importance), 0) # Normalise!
constraint.assign_importance(importance)
resp.constraints[constraint_index] = constraint
# Return the responsibility with a new set of constraints
return resp
def __decide_acceptance(self, resp):
# importances = [constraint.importance
# for constraint in resp.constraints]
# return mean(importances) > 0.5
return True
def accept_responsibility(self, resp: Responsibility):
interpreted_responsibility = self.interpret(resp)
accepted = self.__decide_acceptance(interpreted_responsibility)
if accepted:
self.responsibilities.append(interpreted_responsibility)
return accepted
def __judge_degree_responsible(self, other_agent):
# Re-interpret constraints
resps = [r
for r in copy([c[0] for c in other_agent.consequential_responsibilities])]
resps += [r
for r in other_agent.responsibilities
if r not in other_agent.notions]
for responsibility in resps:
for i in range(len(responsibility.constraints)):
constraint = copy(responsibility.constraints[i])
constraint.importance = constraint.original_importance
responsibility.constraints[i] = self.interpret(constraint)
# Calculate each resource type's specific responsibility
specific_responsibilities = {}
importance = 0
outcome = False
def process_factor(factor, outcome, importance):
if factor not in specific_responsibilities.keys():
specific_responsibilities[factor] = (0, 0)
score, count = specific_responsibilities[factor]
count += 1
if outcome is None:
outcome = False
if outcome:
score += importance
specific_responsibilities[factor] = (score, count)
for responsibility in resps:
for constraint in responsibility.constraints:
importance = constraint.importance
outcome = constraint.outcome
if type(constraint) == Deadline:
process_factor(Deadline, outcome, importance)
for factor in constraint.factors.keys():
process_factor(factor, outcome, importance)
for factor, score_tuple in specific_responsibilities.items():
specific_responsibilities[factor] = score_tuple[0]/score_tuple[1]
return specific_responsibilities
def basic_responsibility_judgement(self):
return self.basic_judgement_responsible
def general_responsibility_judgement(self, other_agent):
judgement = self.__judge_degree_responsible(other_agent)
return mean(judgement.values())
def specific_responsibility_judgement(self, other_agent, resource_type):
return self.__judge_degree_responsible(other_agent).get(resource_type,
self.basic_judgement_responsible)
def choose_action(self, responsibility):
'''
RETURNS: a function which returns a tuple (a,b):
a: the success or failure of the discharge
b: the set of constraint satisfactions (the consequential
responsibility)
Will choose the first action which seems to move resources in the intended
direction.
'''
intended_effect = responsibility.calculate_effect()
intended_effect.disregard('duration')
return self.acts.get(intended_effect,
self.idle_act)
@property
def actionable_responsibilities(self):
return self.responsibilities ## To be changed by actors who dont act on all notions
def choose_responsibility(self):
'''
Choose a responsibility with the highest average importance across various
factors.
TODO: make this smarter! Just taking the mean doesn't take into account
the nature of the importances.
TODO: Consider deadlines! Implement an eisenhower matrix, to weigh
importance against urgency?
'''
resps = self.actionable_responsibilities
if resps == []:
return None
else:
resp = sorted(resps,
key=lambda x: sum(x.importances))[::-1][0]
return resp
def next_action(self):
resp_chosen = self.choose_responsibility()
if resp_chosen is not None:
self.current_responsibility = resp_chosen
discharge_act = self.choose_action(resp_chosen)
else:
discharge_act = self.idle_act
return discharge_act
def get_next_task(self):
# Get the next action
next_action = self.next_action()
# Create and return the relevant task
return Task(next_action.entry_point_function,
next_action.workflow,
next_action.args)
def calculate_delay(self, entry_point, workflow=None, args=()):
# If the current responsibility is None, we're idling.
if self.current_responsibility is None:
return 1 # Duration of an idle
else:
# Get the duration of the current responsibility as the length of the task.
return self.current_responsibility.calculate_effect().get('duration')
def handle_task_return(self, task, value):
if value is not None:
discharged_successfully, constraint_satisfactions = value
consequential_responsibility = copy(self.current_responsibility)
consequential_responsibility.obligation.constraint_set = [copy(c) for c in constraint_satisfactions]
self.consequential_responsibilities.append((consequential_responsibility, discharged_successfully))
self.responsibilities.pop(self.responsibilities.index(self.current_responsibility))
else:
if not self.current_responsibility == self.chill_resp:
self.responsibilities.pop(self.responsibilities.index(self.current_responsibility))
consequential_responsibility = copy(self.current_responsibility)
self.consequential_responsibilities.append((consequential_responsibility, True))
self.current_responsibility = None
def register_act(self,
act: Act):
act.args = [self]
act.entry_point_function.default_cost = 0
self.acts[act.effect] = act
def register_new_workflow(self,
workflow):
workflow.assign_agent(self)
self.workflows.append(workflow)
def get_sociotechnical_state(self, state_key):
return self.socio_states.get(state_key,
None)
def advise(self, other_agent):
other_agent.take_advice(self.interpreting_coefficients, self)
def take_advice(self, advice, authority):
# Optionally here, check for whether the authority figure is authoritative enough to listen to.
# For now, we blindly accept all advice, so long as it's from a lecturer.
if type(authority) is Lecturer:
for key, advised_value in self.interpreting_coefficients.items():
original_value = self.interpreting_coefficients.get(key, 0)
self.interpreting_coefficients[key] = original_value + advised_value
class HedonisticAgent(BasicResponsibleAgent):
def __init__(self,
notions,
name,
clock,
workflows: list,
sociotechnical_states = {},
interpreting_coefficients = {'personal_enjoyment': 5}):
super().__init__(notions,
name,
clock,
workflows,
copy(sociotechnical_states),
copy(interpreting_coefficients))
class StudiousAgent(BasicResponsibleAgent):
def __init__(self,
notions,
name,
clock,
workflows: list,
sociotechnical_states = {},
interpreting_coefficients = {'working_programs': 5,
'essays_written': 5}):
super().__init__(notions,
name,
clock,
workflows,
copy(sociotechnical_states),
copy(interpreting_coefficients))
class Lecturer(BasicResponsibleAgent):
def __init__(self,
notions,
name,
clock,
workflows: list,
sociotechnical_states = {},
interpreting_coefficients = {}):
super().__init__(notions,
name,
clock,
workflows,
copy(sociotechnical_states),
copy(interpreting_coefficients))
class BullshitAgent(BasicResponsibleAgent):
def choose_responsibility(self):
return random.choice(self.responsibilities)
|
[
"theatre_ag.theatre_ag.task.Task",
"copy.copy",
"random.choice"
] |
[((1358, 1371), 'copy.copy', 'copy', (['notions'], {}), '(notions)\n', (1362, 1371), False, 'from copy import copy\n'), ((1430, 1443), 'copy.copy', 'copy', (['notions'], {}), '(notions)\n', (1434, 1443), False, 'from copy import copy\n'), ((3070, 3080), 'copy.copy', 'copy', (['resp'], {}), '(resp)\n', (3074, 3080), False, 'from copy import copy\n'), ((8687, 8765), 'theatre_ag.theatre_ag.task.Task', 'Task', (['next_action.entry_point_function', 'next_action.workflow', 'next_action.args'], {}), '(next_action.entry_point_function, next_action.workflow, next_action.args)\n', (8691, 8765), False, 'from theatre_ag.theatre_ag.task import Task\n'), ((12998, 13034), 'random.choice', 'random.choice', (['self.responsibilities'], {}), '(self.responsibilities)\n', (13011, 13034), False, 'import random\n'), ((2749, 2765), 'copy.copy', 'copy', (['obligation'], {}), '(obligation)\n', (2753, 2765), False, 'from copy import copy\n'), ((9405, 9438), 'copy.copy', 'copy', (['self.current_responsibility'], {}), '(self.current_responsibility)\n', (9409, 9438), False, 'from copy import copy\n'), ((9984, 10017), 'copy.copy', 'copy', (['self.current_responsibility'], {}), '(self.current_responsibility)\n', (9988, 10017), False, 'from copy import copy\n'), ((11698, 11725), 'copy.copy', 'copy', (['sociotechnical_states'], {}), '(sociotechnical_states)\n', (11702, 11725), False, 'from copy import copy\n'), ((11752, 11783), 'copy.copy', 'copy', (['interpreting_coefficients'], {}), '(interpreting_coefficients)\n', (11756, 11783), False, 'from copy import copy\n'), ((12302, 12329), 'copy.copy', 'copy', (['sociotechnical_states'], {}), '(sociotechnical_states)\n', (12306, 12329), False, 'from copy import copy\n'), ((12356, 12387), 'copy.copy', 'copy', (['interpreting_coefficients'], {}), '(interpreting_coefficients)\n', (12360, 12387), False, 'from copy import copy\n'), ((12813, 12840), 'copy.copy', 'copy', (['sociotechnical_states'], {}), '(sociotechnical_states)\n', (12817, 12840), False, 'from copy import copy\n'), ((12867, 12898), 'copy.copy', 'copy', (['interpreting_coefficients'], {}), '(interpreting_coefficients)\n', (12871, 12898), False, 'from copy import copy\n'), ((3319, 3339), 'copy.copy', 'copy', (['old_constraint'], {}), '(old_constraint)\n', (3323, 3339), False, 'from copy import copy\n'), ((4597, 4661), 'copy.copy', 'copy', (['[c[0] for c in other_agent.consequential_responsibilities]'], {}), '([c[0] for c in other_agent.consequential_responsibilities])\n', (4601, 4661), False, 'from copy import copy\n'), ((4918, 4953), 'copy.copy', 'copy', (['responsibility.constraints[i]'], {}), '(responsibility.constraints[i])\n', (4922, 4953), False, 'from copy import copy\n'), ((9509, 9516), 'copy.copy', 'copy', (['c'], {}), '(c)\n', (9513, 9516), False, 'from copy import copy\n')]
|
from sqlalchemy.sql.functions import func
from model.db import db
import json
from controller.logicTopoBasin import LogicTopoBasin
from controller.logicTopoLivingArea import LogicTopoLivingArea
from controller.logicTopoAgricultureArea import LogicTopoAgricultureArea
from controller.logicTopoWaterwork import LogicTopoWaterwork
from controller.logicTopoWaterin import LogicTopoWaterin
from controller.logicTopoFlowPath import LogicTopoFlowPath
from controller.logicTopoCatchment import LogicTopoCatchment
from controller.logicTopoPollution import LogicTopoPollution
from controller.logicTopoIndustryArea import LogicTopoIndustryArea
from controller.logicTopoFactory import LogicTopoFactory
from controller.logicTopoSewageTreatmentPlant import LogicTopoSewageTreatmentPlant
from controller.logicTopoReservoir import LogicTopoReservoir
from controller.logicTopoDebris import LogicTopoDebris
from controller.logicTopoRainStation import LogicTopoRainStation
from controller.logicTopoFloodStation import LogicTopoFloodStation
from controller.logicTopoWaterLevelStation import LogicTopoWaterLevelStation
from controller.util import GetSInfoPoint
class LogicTopoController():
def ListKind(self):
sql = "select * from s_topology_kind"
rows = db.engine.execute(sql)
result = [dict(r) for r in rows]
return result
def ListTransfer(self,kind=None):
sql = "select * from s_topology_transfer"
if kind is not None:
sql += " where from_類別='%s'" % kind
rows = db.engine.execute(sql)
result = [dict(r) for r in rows]
return result
def FindNodeByKind(self,param):
if not "kind" in param:
return {"error":"no kind parameter"}
kind = param["kind"]
if kind == "流域":
return LogicTopoBasin().FindBasinByID(param)
elif kind == "地點":
return LogicTopoPlace().FindVillageByLatLng(param)
elif kind == "淨水場":
return LogicTopoWaterwork().FindWaterworkByID(param)
else:
return {"error":"not implemented"}
def FindNodeByTransfer(self,param):
if not "kind" in param:
return {"error":"no kind parameter"}
if not "transfer" in param:
return {"error":"no transfer parameter"}
kind = param["kind"]
transfer = param["transfer"]
if kind == "流域":
ltb = LogicTopoBasin()
if transfer == "流域範圍":
return ltb.FindBasinByID(param)
elif transfer in ["主要河道","源頭到海洋路徑"]:
return ltb.FindMainRiverByID(param)
elif transfer == "所有河道":
return ltb.FindStreams(param)
elif transfer == "流域分區":
return ltb.FindSubBasins(param)
elif transfer == "生活區域":
return ltb.FindLivingArea(param)
elif transfer == "農業區域":
return ltb.FindAgricultureArea(param)
elif transfer == "工業區域":
return ltb.FindIndustryArea(param)
elif transfer == "水庫堰壩":
return ltb.FindReservoir(param)
elif transfer == "水質水量保護區":
return ltb.FindProtectArea(param)
elif transfer == "淹水潛勢圖":
return ltb.FindFloodArea(param)
elif transfer == "土石流潛勢溪流":
return ltb.FindDebris(param)
elif transfer in ["雨水下水道","污水下水道","圳路"]:
return {"error":"無開放資料"}
elif kind == "流路":
ltfp = LogicTopoFlowPath()
if transfer == "上游集水區":
return ltfp.FindUpstreamCatchment(param)
elif transfer == "下游入海線":
return ltfp.FindDownstreamPath(param)
elif transfer == "所屬流域":
return ltfp.FindBasin(param)
elif transfer == "鳥覽流路":
return ltfp.BirdView(param)
elif kind == "生活區域":
ltla = LogicTopoLivingArea()
if transfer == "淨水廠為何":
return ltla.FindVillageWaterwork(param)
elif transfer == "水源在哪":
return ltla.FindVillageWaterin(param)
elif transfer == "有哪些污染源":
return ltla.FindVillagePollution(param)
elif transfer == "用水統計(三級經濟區)":
return ltla.FindWaterUse(param)
elif kind == "農業區域":
ltaa = LogicTopoAgricultureArea()
if transfer == "有哪些污染源":
return ltaa.FindAgriculturePollution(param)
elif transfer == "有哪些農作物":
return ltaa.FindCrop(param)
elif kind == "淨水場":
ltww = LogicTopoWaterwork()
if transfer == "取水口為何":
return ltww.FindWaterinByID(param)
elif transfer == "淨水場水質":
return ltww.FindWaterworkQuality(param)
elif transfer == "淨水場供水量":
return ltww.FindWaterworkQuantity(param)
elif transfer == "供給哪些區域":
return ltww.FindSupplyLivingArea(param)
elif kind == "取水口":
ltwi = LogicTopoWaterin()
if transfer == "集水區為何":
return ltwi.FindCatchmentByID(param)
elif transfer == "取水量":
return ltwi.FindWaterinQuantity(param)
elif transfer == "生活供給範圍":
return ltwi.FindSupplyLivingArea(param)
elif kind == "集水區":
ltc = LogicTopoCatchment()
if transfer == "有哪些污染源":
return ltc.FindCatchmentPollution(param)
elif transfer == "雨量站":
return ltc.FindRainStation(param)
elif transfer == "河川水位站":
return ltc.FindWaterLevelStation(param)
elif transfer == "淹水感測站":
return ltc.FindFloodStation(param)
elif kind == "鄰近污染源":
ltp = LogicTopoPollution()
if transfer == "工廠":
return ltp.FindFactory(param)
elif transfer == "環境保護列管對象":
return ltp.FindEPAFactoryBase(param)
elif transfer == "工業區域":
return ltp.FindIndustryArea(param)
elif transfer == "工業污水處理廠":
return ltp.FindSewageTreatmentPlant(param)
elif transfer == "農地工廠":
return ltp.FindFactoryInFarm(param)
elif transfer == "水污染源放流口":
return ltp.FindWaterpRecord(param)
elif kind == "工業區域":
ltia = LogicTopoIndustryArea()
if transfer == "哪個污水廠":
return ltia.FindSewageTreatmentPlant(param)
elif transfer == "有哪些工廠":
return ltia.FindFactory(param)
elif kind == "工廠":
ltf = LogicTopoFactory()
if transfer == "哪個污水廠":
return ltf.FindSewageTreatmentPlant(param)
elif transfer == "屬於哪個工業區":
return ltf.FindIndustryArea(param)
elif kind == "工業污水處理廠":
ltstp = LogicTopoSewageTreatmentPlant()
if transfer == "處理範圍":
return ltstp.FindProcessingArea(param)
elif kind == "水庫":
ltr = LogicTopoReservoir()
if transfer == "蓄水範圍":
return ltr.FindStorageArea(param)
elif transfer == "集水區為何":
return ltr.FindCatchment(param)
elif transfer == "水質水量保護區":
return ltr.FindProtectArea(param)
elif kind == "土石流":
ltd = LogicTopoDebris()
if transfer == "集水區為何":
return ltd.FindCatchment(param)
elif transfer == "影響範圍":
return ltd.FindInfluence(param)
elif transfer == "歷史影像":
return ltd.FindHistoryPhoto(param)
elif transfer == "流路":
return ltd.FindFlowPath(param)
elif kind == "雨量站":
ltrs = LogicTopoRainStation()
if transfer == "雨量資料":
return ltrs.FindRainData(param)
elif transfer == "鄰近河川水位站":
return ltrs.FindWaterLevelStation(param)
elif transfer == "鄰近淹水感測站":
return ltrs.FindFloodStation(param)
elif transfer == "淹水潛勢圖":
return ltrs.FindFloodArea(param)
elif kind == "河川水位站":
ltwls = LogicTopoWaterLevelStation()
if transfer == "水位資料":
return ltwls.FindWaterLevelData(param)
elif transfer == "鄰近雨量站":
return ltwls.FindRainStation(param)
elif transfer == "鄰近淹水感測站":
return ltwls.FindFloodStation(param)
elif transfer == "淹水潛勢圖":
return ltwls.FindFloodArea(param)
elif kind == "淹水感測站":
ltfs = LogicTopoFloodStation()
if transfer == "淹水資料":
return ltfs.FindFloodData(param)
elif transfer == "鄰近雨量站":
return ltfs.FindRainStation(param)
elif transfer == "鄰近河川水位站":
return ltfs.FindWaterLevelStation(param)
elif transfer == "淹水潛勢圖":
return ltfs.FindFloodArea(param)
return {"error":"not implemented"}
def GetNodeInfo(self,param):
if not "kind" in param:
return {"error":"no kind parameter"}
kind = param["kind"]
nodeName = None
if "nodeName" in param:
nodeName = param["nodeName"]
if nodeName is None:
return {"error":"no nodeName parameter"}
info = GetSInfoPoint(param["kind"],nodeName)
if info is None:
return {"error":" 查無基本資料"}
else:
return info
|
[
"controller.logicTopoWaterin.LogicTopoWaterin",
"controller.logicTopoSewageTreatmentPlant.LogicTopoSewageTreatmentPlant",
"controller.logicTopoCatchment.LogicTopoCatchment",
"controller.logicTopoPollution.LogicTopoPollution",
"controller.logicTopoLivingArea.LogicTopoLivingArea",
"model.db.db.engine.execute",
"controller.logicTopoFlowPath.LogicTopoFlowPath",
"controller.logicTopoWaterwork.LogicTopoWaterwork",
"controller.logicTopoReservoir.LogicTopoReservoir",
"controller.logicTopoFloodStation.LogicTopoFloodStation",
"controller.logicTopoAgricultureArea.LogicTopoAgricultureArea",
"controller.logicTopoFactory.LogicTopoFactory",
"controller.logicTopoIndustryArea.LogicTopoIndustryArea",
"controller.util.GetSInfoPoint",
"controller.logicTopoRainStation.LogicTopoRainStation",
"controller.logicTopoBasin.LogicTopoBasin",
"controller.logicTopoWaterLevelStation.LogicTopoWaterLevelStation",
"controller.logicTopoDebris.LogicTopoDebris"
] |
[((1255, 1277), 'model.db.db.engine.execute', 'db.engine.execute', (['sql'], {}), '(sql)\n', (1272, 1277), False, 'from model.db import db\n'), ((1522, 1544), 'model.db.db.engine.execute', 'db.engine.execute', (['sql'], {}), '(sql)\n', (1539, 1544), False, 'from model.db import db\n'), ((9502, 9540), 'controller.util.GetSInfoPoint', 'GetSInfoPoint', (["param['kind']", 'nodeName'], {}), "(param['kind'], nodeName)\n", (9515, 9540), False, 'from controller.util import GetSInfoPoint\n'), ((2401, 2417), 'controller.logicTopoBasin.LogicTopoBasin', 'LogicTopoBasin', ([], {}), '()\n', (2415, 2417), False, 'from controller.logicTopoBasin import LogicTopoBasin\n'), ((3521, 3540), 'controller.logicTopoFlowPath.LogicTopoFlowPath', 'LogicTopoFlowPath', ([], {}), '()\n', (3538, 3540), False, 'from controller.logicTopoFlowPath import LogicTopoFlowPath\n'), ((1799, 1815), 'controller.logicTopoBasin.LogicTopoBasin', 'LogicTopoBasin', ([], {}), '()\n', (1813, 1815), False, 'from controller.logicTopoBasin import LogicTopoBasin\n'), ((3937, 3958), 'controller.logicTopoLivingArea.LogicTopoLivingArea', 'LogicTopoLivingArea', ([], {}), '()\n', (3956, 3958), False, 'from controller.logicTopoLivingArea import LogicTopoLivingArea\n'), ((4377, 4403), 'controller.logicTopoAgricultureArea.LogicTopoAgricultureArea', 'LogicTopoAgricultureArea', ([], {}), '()\n', (4401, 4403), False, 'from controller.logicTopoAgricultureArea import LogicTopoAgricultureArea\n'), ((1974, 1994), 'controller.logicTopoWaterwork.LogicTopoWaterwork', 'LogicTopoWaterwork', ([], {}), '()\n', (1992, 1994), False, 'from controller.logicTopoWaterwork import LogicTopoWaterwork\n'), ((4631, 4651), 'controller.logicTopoWaterwork.LogicTopoWaterwork', 'LogicTopoWaterwork', ([], {}), '()\n', (4649, 4651), False, 'from controller.logicTopoWaterwork import LogicTopoWaterwork\n'), ((5071, 5089), 'controller.logicTopoWaterin.LogicTopoWaterin', 'LogicTopoWaterin', ([], {}), '()\n', (5087, 5089), False, 'from controller.logicTopoWaterin import LogicTopoWaterin\n'), ((5411, 5431), 'controller.logicTopoCatchment.LogicTopoCatchment', 'LogicTopoCatchment', ([], {}), '()\n', (5429, 5431), False, 'from controller.logicTopoCatchment import LogicTopoCatchment\n'), ((5843, 5863), 'controller.logicTopoPollution.LogicTopoPollution', 'LogicTopoPollution', ([], {}), '()\n', (5861, 5863), False, 'from controller.logicTopoPollution import LogicTopoPollution\n'), ((6452, 6475), 'controller.logicTopoIndustryArea.LogicTopoIndustryArea', 'LogicTopoIndustryArea', ([], {}), '()\n', (6473, 6475), False, 'from controller.logicTopoIndustryArea import LogicTopoIndustryArea\n'), ((6702, 6720), 'controller.logicTopoFactory.LogicTopoFactory', 'LogicTopoFactory', ([], {}), '()\n', (6718, 6720), False, 'from controller.logicTopoFactory import LogicTopoFactory\n'), ((6959, 6990), 'controller.logicTopoSewageTreatmentPlant.LogicTopoSewageTreatmentPlant', 'LogicTopoSewageTreatmentPlant', ([], {}), '()\n', (6988, 6990), False, 'from controller.logicTopoSewageTreatmentPlant import LogicTopoSewageTreatmentPlant\n'), ((7126, 7146), 'controller.logicTopoReservoir.LogicTopoReservoir', 'LogicTopoReservoir', ([], {}), '()\n', (7144, 7146), False, 'from controller.logicTopoReservoir import LogicTopoReservoir\n'), ((7454, 7471), 'controller.logicTopoDebris.LogicTopoDebris', 'LogicTopoDebris', ([], {}), '()\n', (7469, 7471), False, 'from controller.logicTopoDebris import LogicTopoDebris\n'), ((7858, 7880), 'controller.logicTopoRainStation.LogicTopoRainStation', 'LogicTopoRainStation', ([], {}), '()\n', (7878, 7880), False, 'from controller.logicTopoRainStation import LogicTopoRainStation\n'), ((8290, 8318), 'controller.logicTopoWaterLevelStation.LogicTopoWaterLevelStation', 'LogicTopoWaterLevelStation', ([], {}), '()\n', (8316, 8318), False, 'from controller.logicTopoWaterLevelStation import LogicTopoWaterLevelStation\n'), ((8729, 8752), 'controller.logicTopoFloodStation.LogicTopoFloodStation', 'LogicTopoFloodStation', ([], {}), '()\n', (8750, 8752), False, 'from controller.logicTopoFloodStation import LogicTopoFloodStation\n')]
|
from os import path
from lxml import objectify, etree
class Repo:
def __init__(self, extension):
self._extension = extension
def filepath(self, drawing, location):
return path.join(location, '{}.{}'.format(drawing.name, self._extension))
def as_svg(drawing):
E = objectify.ElementMaker(annotate=False, namespace="http://www.w3.org/2000/svg", nsmap ={ None : "http://www.w3.org/2000/svg", 'xlink':"http://www.w3.org/1999/xlink" })
root = E.svg(
E.title(drawing.name),
*[ as_svg_layer(E, layer) for layer in drawing.layers ],
width=drawing.width,
height=drawing.height,
version=drawing.version,
viewBox=drawing.view_box)
return etree.tostring(etree.ElementTree(root), xml_declaration=True, encoding="UTF-8", standalone="yes", pretty_print=True)
def as_svg_layer(E, layer):
return E.g(
* [ as_svg_stroke(E, stroke) for stroke in layer.strokes ],
id=layer.name,
opacity=layer.opacity)
def as_svg_stroke(E, stroke):
svg_stroke = getattr(E, stroke.type)()
for (key, val) in stroke.attributes.items():
svg_stroke.attrib[key] = val
return svg_stroke
def write_bytes_to(filepath, content):
with open(filepath, 'w+b') as file:
return file.write(content)
|
[
"lxml.etree.ElementTree",
"lxml.objectify.ElementMaker"
] |
[((281, 453), 'lxml.objectify.ElementMaker', 'objectify.ElementMaker', ([], {'annotate': '(False)', 'namespace': '"""http://www.w3.org/2000/svg"""', 'nsmap': "{None: 'http://www.w3.org/2000/svg', 'xlink': 'http://www.w3.org/1999/xlink'}"}), "(annotate=False, namespace=\n 'http://www.w3.org/2000/svg', nsmap={None: 'http://www.w3.org/2000/svg',\n 'xlink': 'http://www.w3.org/1999/xlink'})\n", (303, 453), False, 'from lxml import objectify, etree\n'), ((690, 713), 'lxml.etree.ElementTree', 'etree.ElementTree', (['root'], {}), '(root)\n', (707, 713), False, 'from lxml import objectify, etree\n')]
|
#
# Copyright 2016 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import time
import numpy as np
import sys
sys.path.append('../util')
from netFunctions import print_tensor_shape
def read_and_decode(filename_queue, size, namescope):
# input: filename
# output: image, label pair
# setup a TF record reader
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
# list the features we want to extract, i.e., the image and the label
features = tf.parse_single_example(
serialized_example,
features={
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64),
'depth': tf.FixedLenFeature([], tf.int64),
'raw': tf.FixedLenFeature([], tf.string),
'label': tf.FixedLenFeature([], tf.string)
})
# Set image and label shapes
height = tf.cast(features['height'], tf.int32)
width = tf.cast(features['width'], tf.int32)
depth = tf.cast(features['depth'], tf.int32)
# Decode the training image
image = tf.decode_raw(features['raw'], tf.float32)
image_re = tf.reshape(image, (size))
print_tensor_shape(image_re, namescope + ' image')
# Decode label
label = tf.decode_raw(features['label'], tf.float32)
label_re = tf.reshape(label, (size))
print_tensor_shape(label_re, namescope + ' image label')
return image_re, label_re
def inputs(batch_size, num_epochs, filenames, size, namescope="input"):
# inputs: batch_size, num_epochs are scalars, filename
# output: image and label pairs for use in training or eval
# define the input node
with tf.name_scope(namescope):
# setup a TF filename_queue
filename_queue = tf.train.string_input_producer(
filenames, num_epochs=num_epochs)
# return and image and label
image, label = read_and_decode(filename_queue, size, namescope)
# shuffle the images, not strictly necessary as the data creating
# phase already did it, but there's no harm doing it again.
images, labels = tf.train.shuffle_batch(
[image, label], batch_size=batch_size, num_threads=4,
capacity=50000,
min_after_dequeue=10000)
print_tensor_shape(images, namescope)
# labels = tf.one_hot(labels, 2)
print_tensor_shape(labels, namescope + ' labels')
# beta = tf.Variable(tf.constant(0.0, shape=[1]),
# name='beta', trainable=True)
# gamma = tf.Variable(tf.constant(1.0, shape=[1]),
# name='gamma', trainable=True)
# mean, variance = tf.nn.moments(images, [0])
# images = tf.nn.batch_normalization(images, mean, variance, beta, gamma, 1e-3)
return images, labels
def convolution2d(images, out_channels, name, relu=True, ps_device="/cpu:0", w_device="/gpu:0"):
in_channels = images.get_shape().as_list()[-1]
with tf.variable_scope(name):
# weight variable 4d tensor, first two dims are patch (kernel) size
# third dim is number of input channels and fourth dim is output channels
with tf.device(ps_device):
w_conv_name = 'w_' + name
w_conv = tf.get_variable(w_conv_name, shape=[3,3,in_channels,out_channels], dtype=tf.float32, initializer=tf.truncated_normal_initializer(mean=0,stddev=0.1))
print_tensor_shape( w_conv, 'weight shape')
b_conv_name = 'b_' + name
b_conv = tf.get_variable(b_conv_name, shape=[out_channels])
print_tensor_shape( b_conv, 'bias shape')
with tf.device(w_device):
conv_op = tf.nn.conv2d( images, w_conv, strides=[1,2,2,1], padding="SAME", name='conv1_op')
print_tensor_shape( conv_op, 'conv_op shape')
conv_op = tf.nn.bias_add(conv_op, b_conv, name='bias_add_op')
if(relu):
conv_op = tf.nn.relu( conv_op, name='relu_op' )
print_tensor_shape( conv_op, 'relu_op shape')
return conv_op
def convolution(images, name, activation=None, out_channels=1, ps_device="/cpu:0", w_device="/gpu:0", w_shape=None, strides=None, padding='SAME'):
in_channels = images.get_shape().as_list()[-1]
if w_shape is None:
w_shape = [5,5,5,in_channels,out_channels]
if strides is None:
strides = [1,2,2,2,1]
with tf.variable_scope(name):
# weight variable 4d tensor, first two dims are patch (kernel) size
# third dim is number of input channels and fourth dim is output channels
with tf.device(ps_device):
w_conv_name = 'w_' + name
w_conv = tf.get_variable(w_conv_name, shape=w_shape, dtype=tf.float32, initializer=tf.truncated_normal_initializer(mean=0,stddev=0.1))
print_tensor_shape( w_conv, name + ' weight shape')
b_conv_name = 'b_' + name
b_conv = tf.get_variable(b_conv_name, shape=w_shape[-1:])
print_tensor_shape( b_conv, name + ' bias shape')
with tf.device(w_device):
conv_op = tf.nn.conv3d( images, w_conv, strides=strides, padding=padding, name='conv1_op')
print_tensor_shape( conv_op, name + ' conv_op shape')
conv_op = tf.nn.bias_add(conv_op, b_conv, name='bias_add_op')
if(activation):
conv_op = activation( conv_op, name='activation_op' )
print_tensor_shape( conv_op, 'activation_op shape')
return conv_op
def deconvolution2d(images, output_shape, name, activation=None, ps_device="/cpu:0", w_device="/gpu:0", w_shape=None, strides=None, padding="SAME"):
with tf.variable_scope(name):
in_channels = images.get_shape()[-1]
out_channels = output_shape[-1]
if w_shape is None:
w_shape = [3,3,in_channels,out_channels]
if strides is None:
strides = [1,2,2,1]
with tf.device(ps_device):
w_deconv_name = 'w_' + name
w_deconv = tf.get_variable(w_deconv_name, shape=w_shape, dtype=tf.float32, initializer=tf.truncated_normal_initializer(mean=0,stddev=0.1))
print_tensor_shape( w_deconv, name + 'weight shape')
b_deconv_name = 'b_' + name
b_deconv = tf.get_variable(b_deconv_name, shape=[out_channels])
print_tensor_shape( b_deconv, name + 'bias shape')
with tf.device(w_device):
deconv_op = tf.nn.conv2d_transpose( images, w_deconv,
output_shape=output_shape,
# use_bias=True,
strides=strides,
padding=padding, name='deconv_op' )
print_tensor_shape( deconv_op, 'deconv_op shape')
deconv_op = tf.nn.bias_add(deconv_op, b_deconv, name='bias_add_op')
if activation:
deconv_op = activation( deconv_op, name='activation_op' )
print_tensor_shape( deconv_op, 'activation_op shape')
return deconv_op
def deconvolution(images, output_shape, name, activation=None, ps_device="/cpu:0", w_device="/gpu:0", w_shape=None, strides=None, padding="SAME"):
with tf.variable_scope(name):
in_channels = images.get_shape()[-1]
out_channels = output_shape[4]
if w_shape is None:
w_shape = [5,5,5,out_channels,in_channels]
if strides is None:
strides = [1,2,2,2,1]
with tf.device(ps_device):
w_deconv_name = 'w_' + name
w_deconv = tf.get_variable(w_deconv_name, shape=w_shape, dtype=tf.float32, initializer=tf.truncated_normal_initializer(mean=0,stddev=0.1))
print_tensor_shape( w_deconv, name + ' weight shape')
b_deconv_name = 'b_' + name
b_deconv = tf.get_variable(b_deconv_name, shape=[out_channels])
print_tensor_shape( b_deconv, name + ' bias shape')
with tf.device(w_device):
deconv_op = tf.nn.conv3d_transpose( images, w_deconv,
output_shape=output_shape,
# use_bias=True,
strides=strides,
padding=padding, name='deconv_op' )
print_tensor_shape( deconv_op, 'deconv_op shape')
deconv_op = tf.nn.bias_add(deconv_op, b_deconv, name='bias_add_op')
if activation:
deconv_op = activation( deconv_op, name='activation_op' )
print_tensor_shape( deconv_op, 'activation_op shape')
return deconv_op
def matmul(images, out_channels, name, relu=True, ps_device="/cpu:0", w_device="/gpu:0"):
with tf.variable_scope(name):
shape = images.get_shape().as_list()
with tf.device(ps_device):
w_matmul_name = 'w_' + name
w_matmul = tf.get_variable(w_matmul_name, shape=[shape[1],out_channels], dtype=tf.float32, initializer=tf.truncated_normal_initializer(mean=0,stddev=0.1))
print_tensor_shape( w_matmul, 'w_matmul shape')
b_matmul_name = 'b_' + name
b_matmul = tf.get_variable(name='b_matmul_name', shape=[out_channels])
with tf.device(w_device):
matmul_op = tf.nn.bias_add(tf.matmul(images, w_matmul), b_matmul)
if(relu):
matmul_op = tf.nn.relu(matmul_op)
return matmul_op
def generator(images, keep_prob=1, batch_size=1, regularization_constant=0.0, ps_device="/cpu:0", w_device="/gpu:0", is_training=False):
# Encoder part of the network
# input: tensor of images
# output: tensor of computed logits
# resize the image tensors to add the number of channels, 1 in this case
# required to pass the images to various layers upcoming in the graph
#print("Image size:", size)
#num_channels = size[0], depth = size[0], height = size[1], width = size[2], num_channels = size[3]
images = tf.layers.batch_normalization(images, training=is_training)
print_tensor_shape(images, "images")
# Convolution layer
conv1_op = convolution(images, "Conv1", out_channels=256, activation=tf.nn.relu, ps_device=ps_device, w_device=w_device, padding="SAME")
conv2_op = convolution(conv1_op, "Conv2", out_channels=512, activation=tf.nn.relu,ps_device=ps_device, w_device=w_device, padding="SAME")
# conv3_op = convolution(conv2_op, "Conv3", out_channels=1024, activation=tf.nn.relu, ps_device=ps_device, w_device=w_device, padding="VALID")
# conv4_op = convolution(conv3_op, "Conv4", out_channels=1280, activation=tf.nn.relu, ps_device=ps_device, w_device=w_device, padding="VALID")
#relu4_op = convolution(relu3_op, 4048, "Conv4", ps_device=ps_device, w_device=w_device)
# shape = conv3_op.get_shape().as_list()
# deconv1_op = deconvolution(conv4_op, shape, "Deconv1", activation=tf.nn.relu, ps_device=ps_device, w_device=w_device, padding="VALID")
# shape = conv2_op.get_shape().as_list()
# deconv2_op = deconvolution(conv3_op, shape, "Deconv2", activation=tf.nn.relu, ps_device=ps_device, w_device=w_device, padding="VALID")
shape = conv1_op.get_shape().as_list()
deconv3_op = deconvolution(conv2_op, shape, "Deconv3", activation=tf.nn.relu, ps_device=ps_device, w_device=w_device, padding="SAME")
shape = images.get_shape().as_list()
shape[4] = 128
deconv4_op = deconvolution(deconv3_op, shape, "Deconv4", activation=tf.nn.relu, ps_device=ps_device, w_device=w_device, padding="SAME")
with tf.device(w_device):
deconv4_op = tf.nn.dropout( deconv4_op, keep_prob )
deconv4_op = tf.concat([images, deconv4_op], axis=4)
convp_op = convolution(deconv4_op, "ConvScore", strides=[1, 1, 1, 1, 1], w_shape=[1, 1, 1, 129, 1], ps_device=ps_device, w_device=w_device, padding="SAME")
return convp_op
def generator2d(images, keep_prob=1, batch_size=1, regularization_constant=0.0, ps_device="/cpu:0", w_device="/gpu:0", is_training=False):
# Encoder part of the network
# input: tensor of images
# output: tensor of computed logits
# resize the image tensors to add the number of channels, 1 in this case
# required to pass the images to various layers upcoming in the graph
#print("Image size:", size)
#num_channels = size[0], depth = size[0], height = size[1], width = size[2], num_channels = size[3]
images = tf.layers.batch_normalization(images, training=is_training)
print_tensor_shape(images, "images")
# Convolution layer
relu1_op = convolution2d(images, 128, "Conv1", ps_device=ps_device, w_device=w_device)
relu2_op = convolution2d(relu1_op, 512, "Conv2", ps_device=ps_device, w_device=w_device)
with tf.device(w_device):
relu2_op = tf.nn.dropout( relu2_op, keep_prob )
relu3_op = convolution2d(relu2_op, 2048, "Conv3", ps_device=ps_device, w_device=w_device)
#relu4_op = convolution(relu3_op, 4048, "Conv4", ps_device=ps_device, w_device=w_device)
shape = relu2_op.get_shape().as_list()
deconv1_op = deconvolution2d(relu3_op, shape, "Deconv1", ps_device=ps_device, w_device=w_device)
shape = relu1_op.get_shape().as_list()
deconv2_op = deconvolution2d(deconv1_op, shape, "Deconv2", ps_device=ps_device, w_device=w_device)
with tf.device(w_device):
deconv2_op = tf.nn.dropout( deconv2_op, keep_prob )
shape = images.get_shape().as_list()
deconv3_op = deconvolution2d(deconv2_op, shape, "Deconv3", relu=False, ps_device=ps_device, w_device=w_device)
#shape = images.get_shape().as_list()
#deconv4_op = deconvolution(deconv3_op, [batch_size,shape[1],shape[2],shape[3],1], "Deconv4", relu=False, ps_device=ps_device, w_device=w_device)
return deconv3_op
def loss(logits, labels):
print_tensor_shape( logits, 'logits shape')
print_tensor_shape( labels, 'labels shape')
#labels = tf.to_int64(labels)
#loss = tf.losses.absolute_difference(predictions=logits, labels=labels)
loss = tf.losses.mean_squared_error(predictions=logits, labels=labels)
#loss = tf.losses.mean_pairwise_squared_error(predictions=logits, labels=labels)
#loss = tf.losses.huber_loss(predictions=logits, labels=labels, delta=10.0)
return loss
def training_adam(loss, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8, use_locking=False, name='Adam', var_list=None):
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate,
beta1=beta1,
beta2=beta2,
epsilon=epsilon,
use_locking=use_locking,
name=name)
train_op = optimizer.minimize(loss, var_list=var_list)
return train_op
def training(loss, learning_rate, decay_steps, decay_rate, name):
# input: loss: loss tensor from loss()
# input: learning_rate: scalar for gradient descent
# output: train_op the operation for training
# Creates a summarizer to track the loss over time in TensorBoard.
# Creates an optimizer and applies the gradients to all trainable variables.
# The Op returned by this function is what must be passed to the
# `sess.run()` call to cause the model to train.
# Add a scalar summary for the snapshot loss.
# Create a variable to track the global step.
global_step = tf.Variable(0, name='global_step', trainable=False)
# create learning_decay
lr = tf.train.exponential_decay(learning_rate,
global_step,
decay_steps,
decay_rate, staircase=True )
tf.summary.scalar('2learning_rate', lr )
# Create the gradient descent optimizer with the given learning rate.
# optimizer = tf.train.GradientDescentOptimizer(learning_rate)
optimizer = tf.train.GradientDescentOptimizer(lr)
# Use the optimizer to apply the gradients that minimize the loss
# (and also increment the global step counter) as a single training step.
train_op = optimizer.minimize(loss, global_step=global_step)
return train_op
def evaluation(logits, labels, name="accuracy"):
# accuracy = tf.metrics.accuracy(labels=labels, predictions=logits, name=name)
# tf.summary.scalar("accuracy", accuracy[0])
# return accuracy
accuracy = tf.metrics.root_mean_squared_error(labels=labels, predictions=logits, name=name)
tf.summary.scalar("accuracy", accuracy[0])
return accuracy
# def evaluation(logits, labels):
# # input: logits: Logits tensor, float - [batch_size, 195, 233, NUM_CLASSES].
# # input: labels: Labels tensor, int32 - [batch_size, 195, 233]
# # output: scaler int32 tensor with number of examples that were
# # predicted correctly
# with tf.name_scope('eval'):
# print()
# print_tensor_shape(logits, 'logits eval shape before')
# print_tensor_shape(labels, 'labels eval shape before')
# # reshape to match args required for the top_k function
# logits_re = tf.reshape(logits, [-1])
# print_tensor_shape(logits_re, 'logits_re eval shape after')
# labels_re = tf.reshape(labels, [-1])
# print_tensor_shape(labels_re, 'labels_re eval shape after')
# # get accuracy :
# diff = tf.sub(labels_re,logits_re)
# acc = tf.div(tf.reduce_mean(diff), 195.0*233.0)
# acc = 1 - acc
# # get accuracy :
# diff = tf.abs(tf.sub(labels_re,logits_re))
# lessthan0_01 = tf.less_equal(diff, 0.01)
# sum = tf.reduce_sum(tf.cast(lessthan0_01, tf.float32))
# acc2 = tf.div(sum, 195.0*233.0)
# print(acc)
# # Return the tuple of intersection, label and example areas
# labels_re = tf.cast(labels_re, tf.float32)
# indices_re = tf.cast(logits_re, tf.float32)
# return indices_re, labels_re, acc2
|
[
"tensorflow.train.shuffle_batch",
"tensorflow.reshape",
"tensorflow.decode_raw",
"tensorflow.matmul",
"tensorflow.Variable",
"tensorflow.nn.conv2d",
"tensorflow.nn.conv2d_transpose",
"tensorflow.layers.batch_normalization",
"tensorflow.get_variable",
"sys.path.append",
"tensorflow.nn.relu",
"tensorflow.concat",
"tensorflow.variable_scope",
"tensorflow.cast",
"tensorflow.TFRecordReader",
"tensorflow.name_scope",
"tensorflow.train.string_input_producer",
"tensorflow.truncated_normal_initializer",
"tensorflow.nn.bias_add",
"tensorflow.losses.mean_squared_error",
"tensorflow.summary.scalar",
"tensorflow.nn.conv3d_transpose",
"tensorflow.train.GradientDescentOptimizer",
"tensorflow.train.exponential_decay",
"tensorflow.device",
"tensorflow.nn.conv3d",
"netFunctions.print_tensor_shape",
"tensorflow.FixedLenFeature",
"tensorflow.train.AdamOptimizer",
"tensorflow.nn.dropout",
"tensorflow.metrics.root_mean_squared_error"
] |
[((765, 791), 'sys.path.append', 'sys.path.append', (['"""../util"""'], {}), "('../util')\n", (780, 791), False, 'import sys\n'), ((992, 1011), 'tensorflow.TFRecordReader', 'tf.TFRecordReader', ([], {}), '()\n', (1009, 1011), True, 'import tensorflow as tf\n'), ((1563, 1600), 'tensorflow.cast', 'tf.cast', (["features['height']", 'tf.int32'], {}), "(features['height'], tf.int32)\n", (1570, 1600), True, 'import tensorflow as tf\n'), ((1613, 1649), 'tensorflow.cast', 'tf.cast', (["features['width']", 'tf.int32'], {}), "(features['width'], tf.int32)\n", (1620, 1649), True, 'import tensorflow as tf\n'), ((1662, 1698), 'tensorflow.cast', 'tf.cast', (["features['depth']", 'tf.int32'], {}), "(features['depth'], tf.int32)\n", (1669, 1698), True, 'import tensorflow as tf\n'), ((1744, 1786), 'tensorflow.decode_raw', 'tf.decode_raw', (["features['raw']", 'tf.float32'], {}), "(features['raw'], tf.float32)\n", (1757, 1786), True, 'import tensorflow as tf\n'), ((1806, 1829), 'tensorflow.reshape', 'tf.reshape', (['image', 'size'], {}), '(image, size)\n', (1816, 1829), True, 'import tensorflow as tf\n'), ((1836, 1886), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['image_re', "(namescope + ' image')"], {}), "(image_re, namescope + ' image')\n", (1854, 1886), False, 'from netFunctions import print_tensor_shape\n'), ((1919, 1963), 'tensorflow.decode_raw', 'tf.decode_raw', (["features['label']", 'tf.float32'], {}), "(features['label'], tf.float32)\n", (1932, 1963), True, 'import tensorflow as tf\n'), ((1983, 2006), 'tensorflow.reshape', 'tf.reshape', (['label', 'size'], {}), '(label, size)\n', (1993, 2006), True, 'import tensorflow as tf\n'), ((2013, 2069), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['label_re', "(namescope + ' image label')"], {}), "(label_re, namescope + ' image label')\n", (2031, 2069), False, 'from netFunctions import print_tensor_shape\n'), ((10556, 10615), 'tensorflow.layers.batch_normalization', 'tf.layers.batch_normalization', (['images'], {'training': 'is_training'}), '(images, training=is_training)\n', (10585, 10615), True, 'import tensorflow as tf\n'), ((10625, 10661), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['images', '"""images"""'], {}), "(images, 'images')\n", (10643, 10661), False, 'from netFunctions import print_tensor_shape\n'), ((12227, 12266), 'tensorflow.concat', 'tf.concat', (['[images, deconv4_op]'], {'axis': '(4)'}), '([images, deconv4_op], axis=4)\n', (12236, 12266), True, 'import tensorflow as tf\n'), ((12989, 13048), 'tensorflow.layers.batch_normalization', 'tf.layers.batch_normalization', (['images'], {'training': 'is_training'}), '(images, training=is_training)\n', (13018, 13048), True, 'import tensorflow as tf\n'), ((13058, 13094), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['images', '"""images"""'], {}), "(images, 'images')\n", (13076, 13094), False, 'from netFunctions import print_tensor_shape\n'), ((14369, 14411), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['logits', '"""logits shape"""'], {}), "(logits, 'logits shape')\n", (14387, 14411), False, 'from netFunctions import print_tensor_shape\n'), ((14417, 14459), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['labels', '"""labels shape"""'], {}), "(labels, 'labels shape')\n", (14435, 14459), False, 'from netFunctions import print_tensor_shape\n'), ((14585, 14648), 'tensorflow.losses.mean_squared_error', 'tf.losses.mean_squared_error', ([], {'predictions': 'logits', 'labels': 'labels'}), '(predictions=logits, labels=labels)\n', (14613, 14648), True, 'import tensorflow as tf\n'), ((14980, 15115), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': 'learning_rate', 'beta1': 'beta1', 'beta2': 'beta2', 'epsilon': 'epsilon', 'use_locking': 'use_locking', 'name': 'name'}), '(learning_rate=learning_rate, beta1=beta1, beta2=\n beta2, epsilon=epsilon, use_locking=use_locking, name=name)\n', (15002, 15115), True, 'import tensorflow as tf\n'), ((15838, 15889), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {'name': '"""global_step"""', 'trainable': '(False)'}), "(0, name='global_step', trainable=False)\n", (15849, 15889), True, 'import tensorflow as tf\n'), ((15926, 16025), 'tensorflow.train.exponential_decay', 'tf.train.exponential_decay', (['learning_rate', 'global_step', 'decay_steps', 'decay_rate'], {'staircase': '(True)'}), '(learning_rate, global_step, decay_steps,\n decay_rate, staircase=True)\n', (15952, 16025), True, 'import tensorflow as tf\n'), ((16139, 16178), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""2learning_rate"""', 'lr'], {}), "('2learning_rate', lr)\n", (16156, 16178), True, 'import tensorflow as tf\n'), ((16335, 16372), 'tensorflow.train.GradientDescentOptimizer', 'tf.train.GradientDescentOptimizer', (['lr'], {}), '(lr)\n', (16368, 16372), True, 'import tensorflow as tf\n'), ((16828, 16913), 'tensorflow.metrics.root_mean_squared_error', 'tf.metrics.root_mean_squared_error', ([], {'labels': 'labels', 'predictions': 'logits', 'name': 'name'}), '(labels=labels, predictions=logits, name=name\n )\n', (16862, 16913), True, 'import tensorflow as tf\n'), ((16913, 16955), 'tensorflow.summary.scalar', 'tf.summary.scalar', (['"""accuracy"""', 'accuracy[0]'], {}), "('accuracy', accuracy[0])\n", (16930, 16955), True, 'import tensorflow as tf\n'), ((2340, 2364), 'tensorflow.name_scope', 'tf.name_scope', (['namescope'], {}), '(namescope)\n', (2353, 2364), True, 'import tensorflow as tf\n'), ((2427, 2491), 'tensorflow.train.string_input_producer', 'tf.train.string_input_producer', (['filenames'], {'num_epochs': 'num_epochs'}), '(filenames, num_epochs=num_epochs)\n', (2457, 2491), True, 'import tensorflow as tf\n'), ((2783, 2904), 'tensorflow.train.shuffle_batch', 'tf.train.shuffle_batch', (['[image, label]'], {'batch_size': 'batch_size', 'num_threads': '(4)', 'capacity': '(50000)', 'min_after_dequeue': '(10000)'}), '([image, label], batch_size=batch_size, num_threads=4,\n capacity=50000, min_after_dequeue=10000)\n', (2805, 2904), True, 'import tensorflow as tf\n'), ((2947, 2984), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['images', 'namescope'], {}), '(images, namescope)\n', (2965, 2984), False, 'from netFunctions import print_tensor_shape\n'), ((3034, 3083), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['labels', "(namescope + ' labels')"], {}), "(labels, namescope + ' labels')\n", (3052, 3083), False, 'from netFunctions import print_tensor_shape\n'), ((3674, 3697), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (3691, 3697), True, 'import tensorflow as tf\n'), ((5104, 5127), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (5121, 5127), True, 'import tensorflow as tf\n'), ((6369, 6392), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (6386, 6392), True, 'import tensorflow as tf\n'), ((7848, 7871), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (7865, 7871), True, 'import tensorflow as tf\n'), ((9276, 9299), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (9293, 9299), True, 'import tensorflow as tf\n'), ((12128, 12147), 'tensorflow.device', 'tf.device', (['w_device'], {}), '(w_device)\n', (12137, 12147), True, 'import tensorflow as tf\n'), ((12170, 12206), 'tensorflow.nn.dropout', 'tf.nn.dropout', (['deconv4_op', 'keep_prob'], {}), '(deconv4_op, keep_prob)\n', (12183, 12206), True, 'import tensorflow as tf\n'), ((13311, 13330), 'tensorflow.device', 'tf.device', (['w_device'], {}), '(w_device)\n', (13320, 13330), True, 'import tensorflow as tf\n'), ((13351, 13385), 'tensorflow.nn.dropout', 'tf.nn.dropout', (['relu2_op', 'keep_prob'], {}), '(relu2_op, keep_prob)\n', (13364, 13385), True, 'import tensorflow as tf\n'), ((13879, 13898), 'tensorflow.device', 'tf.device', (['w_device'], {}), '(w_device)\n', (13888, 13898), True, 'import tensorflow as tf\n'), ((13921, 13957), 'tensorflow.nn.dropout', 'tf.nn.dropout', (['deconv2_op', 'keep_prob'], {}), '(deconv2_op, keep_prob)\n', (13934, 13957), True, 'import tensorflow as tf\n'), ((3861, 3881), 'tensorflow.device', 'tf.device', (['ps_device'], {}), '(ps_device)\n', (3870, 3881), True, 'import tensorflow as tf\n'), ((4104, 4146), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['w_conv', '"""weight shape"""'], {}), "(w_conv, 'weight shape')\n", (4122, 4146), False, 'from netFunctions import print_tensor_shape\n'), ((4208, 4258), 'tensorflow.get_variable', 'tf.get_variable', (['b_conv_name'], {'shape': '[out_channels]'}), '(b_conv_name, shape=[out_channels])\n', (4223, 4258), True, 'import tensorflow as tf\n'), ((4271, 4311), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['b_conv', '"""bias shape"""'], {}), "(b_conv, 'bias shape')\n", (4289, 4311), False, 'from netFunctions import print_tensor_shape\n'), ((4327, 4346), 'tensorflow.device', 'tf.device', (['w_device'], {}), '(w_device)\n', (4336, 4346), True, 'import tensorflow as tf\n'), ((4370, 4458), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['images', 'w_conv'], {'strides': '[1, 2, 2, 1]', 'padding': '"""SAME"""', 'name': '"""conv1_op"""'}), "(images, w_conv, strides=[1, 2, 2, 1], padding='SAME', name=\n 'conv1_op')\n", (4382, 4458), True, 'import tensorflow as tf\n'), ((4464, 4508), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['conv_op', '"""conv_op shape"""'], {}), "(conv_op, 'conv_op shape')\n", (4482, 4508), False, 'from netFunctions import print_tensor_shape\n'), ((4533, 4584), 'tensorflow.nn.bias_add', 'tf.nn.bias_add', (['conv_op', 'b_conv'], {'name': '"""bias_add_op"""'}), "(conv_op, b_conv, name='bias_add_op')\n", (4547, 4584), True, 'import tensorflow as tf\n'), ((5291, 5311), 'tensorflow.device', 'tf.device', (['ps_device'], {}), '(ps_device)\n', (5300, 5311), True, 'import tensorflow as tf\n'), ((5511, 5561), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['w_conv', "(name + ' weight shape')"], {}), "(w_conv, name + ' weight shape')\n", (5529, 5561), False, 'from netFunctions import print_tensor_shape\n'), ((5623, 5671), 'tensorflow.get_variable', 'tf.get_variable', (['b_conv_name'], {'shape': 'w_shape[-1:]'}), '(b_conv_name, shape=w_shape[-1:])\n', (5638, 5671), True, 'import tensorflow as tf\n'), ((5684, 5732), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['b_conv', "(name + ' bias shape')"], {}), "(b_conv, name + ' bias shape')\n", (5702, 5732), False, 'from netFunctions import print_tensor_shape\n'), ((5748, 5767), 'tensorflow.device', 'tf.device', (['w_device'], {}), '(w_device)\n', (5757, 5767), True, 'import tensorflow as tf\n'), ((5791, 5870), 'tensorflow.nn.conv3d', 'tf.nn.conv3d', (['images', 'w_conv'], {'strides': 'strides', 'padding': 'padding', 'name': '"""conv1_op"""'}), "(images, w_conv, strides=strides, padding=padding, name='conv1_op')\n", (5803, 5870), True, 'import tensorflow as tf\n'), ((5884, 5936), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['conv_op', "(name + ' conv_op shape')"], {}), "(conv_op, name + ' conv_op shape')\n", (5902, 5936), False, 'from netFunctions import print_tensor_shape\n'), ((5961, 6012), 'tensorflow.nn.bias_add', 'tf.nn.bias_add', (['conv_op', 'b_conv'], {'name': '"""bias_add_op"""'}), "(conv_op, b_conv, name='bias_add_op')\n", (5975, 6012), True, 'import tensorflow as tf\n'), ((6638, 6658), 'tensorflow.device', 'tf.device', (['ps_device'], {}), '(ps_device)\n', (6647, 6658), True, 'import tensorflow as tf\n'), ((6863, 6914), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['w_deconv', "(name + 'weight shape')"], {}), "(w_deconv, name + 'weight shape')\n", (6881, 6914), False, 'from netFunctions import print_tensor_shape\n'), ((6980, 7032), 'tensorflow.get_variable', 'tf.get_variable', (['b_deconv_name'], {'shape': '[out_channels]'}), '(b_deconv_name, shape=[out_channels])\n', (6995, 7032), True, 'import tensorflow as tf\n'), ((7045, 7094), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['b_deconv', "(name + 'bias shape')"], {}), "(b_deconv, name + 'bias shape')\n", (7063, 7094), False, 'from netFunctions import print_tensor_shape\n'), ((7110, 7129), 'tensorflow.device', 'tf.device', (['w_device'], {}), '(w_device)\n', (7119, 7129), True, 'import tensorflow as tf\n'), ((7156, 7280), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (['images', 'w_deconv'], {'output_shape': 'output_shape', 'strides': 'strides', 'padding': 'padding', 'name': '"""deconv_op"""'}), "(images, w_deconv, output_shape=output_shape, strides\n =strides, padding=padding, name='deconv_op')\n", (7178, 7280), True, 'import tensorflow as tf\n'), ((7373, 7421), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['deconv_op', '"""deconv_op shape"""'], {}), "(deconv_op, 'deconv_op shape')\n", (7391, 7421), False, 'from netFunctions import print_tensor_shape\n'), ((7448, 7503), 'tensorflow.nn.bias_add', 'tf.nn.bias_add', (['deconv_op', 'b_deconv'], {'name': '"""bias_add_op"""'}), "(deconv_op, b_deconv, name='bias_add_op')\n", (7462, 7503), True, 'import tensorflow as tf\n'), ((7610, 7662), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['deconv_op', '"""activation_op shape"""'], {}), "(deconv_op, 'activation_op shape')\n", (7628, 7662), False, 'from netFunctions import print_tensor_shape\n'), ((8120, 8140), 'tensorflow.device', 'tf.device', (['ps_device'], {}), '(ps_device)\n', (8129, 8140), True, 'import tensorflow as tf\n'), ((8345, 8397), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['w_deconv', "(name + ' weight shape')"], {}), "(w_deconv, name + ' weight shape')\n", (8363, 8397), False, 'from netFunctions import print_tensor_shape\n'), ((8463, 8515), 'tensorflow.get_variable', 'tf.get_variable', (['b_deconv_name'], {'shape': '[out_channels]'}), '(b_deconv_name, shape=[out_channels])\n', (8478, 8515), True, 'import tensorflow as tf\n'), ((8528, 8578), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['b_deconv', "(name + ' bias shape')"], {}), "(b_deconv, name + ' bias shape')\n", (8546, 8578), False, 'from netFunctions import print_tensor_shape\n'), ((8594, 8613), 'tensorflow.device', 'tf.device', (['w_device'], {}), '(w_device)\n', (8603, 8613), True, 'import tensorflow as tf\n'), ((8640, 8764), 'tensorflow.nn.conv3d_transpose', 'tf.nn.conv3d_transpose', (['images', 'w_deconv'], {'output_shape': 'output_shape', 'strides': 'strides', 'padding': 'padding', 'name': '"""deconv_op"""'}), "(images, w_deconv, output_shape=output_shape, strides\n =strides, padding=padding, name='deconv_op')\n", (8662, 8764), True, 'import tensorflow as tf\n'), ((8857, 8905), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['deconv_op', '"""deconv_op shape"""'], {}), "(deconv_op, 'deconv_op shape')\n", (8875, 8905), False, 'from netFunctions import print_tensor_shape\n'), ((8932, 8987), 'tensorflow.nn.bias_add', 'tf.nn.bias_add', (['deconv_op', 'b_deconv'], {'name': '"""bias_add_op"""'}), "(deconv_op, b_deconv, name='bias_add_op')\n", (8946, 8987), True, 'import tensorflow as tf\n'), ((9094, 9146), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['deconv_op', '"""activation_op shape"""'], {}), "(deconv_op, 'activation_op shape')\n", (9112, 9146), False, 'from netFunctions import print_tensor_shape\n'), ((9361, 9381), 'tensorflow.device', 'tf.device', (['ps_device'], {}), '(ps_device)\n', (9370, 9381), True, 'import tensorflow as tf\n'), ((9611, 9657), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['w_matmul', '"""w_matmul shape"""'], {}), "(w_matmul, 'w_matmul shape')\n", (9629, 9657), False, 'from netFunctions import print_tensor_shape\n'), ((9731, 9790), 'tensorflow.get_variable', 'tf.get_variable', ([], {'name': '"""b_matmul_name"""', 'shape': '[out_channels]'}), "(name='b_matmul_name', shape=[out_channels])\n", (9746, 9790), True, 'import tensorflow as tf\n'), ((9813, 9832), 'tensorflow.device', 'tf.device', (['w_device'], {}), '(w_device)\n', (9822, 9832), True, 'import tensorflow as tf\n'), ((1252, 1284), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['[]', 'tf.int64'], {}), '([], tf.int64)\n', (1270, 1284), True, 'import tensorflow as tf\n'), ((1307, 1339), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['[]', 'tf.int64'], {}), '([], tf.int64)\n', (1325, 1339), True, 'import tensorflow as tf\n'), ((1362, 1394), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['[]', 'tf.int64'], {}), '([], tf.int64)\n', (1380, 1394), True, 'import tensorflow as tf\n'), ((1415, 1448), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['[]', 'tf.string'], {}), '([], tf.string)\n', (1433, 1448), True, 'import tensorflow as tf\n'), ((1471, 1504), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['[]', 'tf.string'], {}), '([], tf.string)\n', (1489, 1504), True, 'import tensorflow as tf\n'), ((4634, 4669), 'tensorflow.nn.relu', 'tf.nn.relu', (['conv_op'], {'name': '"""relu_op"""'}), "(conv_op, name='relu_op')\n", (4644, 4669), True, 'import tensorflow as tf\n'), ((4689, 4733), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['conv_op', '"""relu_op shape"""'], {}), "(conv_op, 'relu_op shape')\n", (4707, 4733), False, 'from netFunctions import print_tensor_shape\n'), ((6129, 6179), 'netFunctions.print_tensor_shape', 'print_tensor_shape', (['conv_op', '"""activation_op shape"""'], {}), "(conv_op, 'activation_op shape')\n", (6147, 6179), False, 'from netFunctions import print_tensor_shape\n'), ((9874, 9901), 'tensorflow.matmul', 'tf.matmul', (['images', 'w_matmul'], {}), '(images, w_matmul)\n', (9883, 9901), True, 'import tensorflow as tf\n'), ((9964, 9985), 'tensorflow.nn.relu', 'tf.nn.relu', (['matmul_op'], {}), '(matmul_op)\n', (9974, 9985), True, 'import tensorflow as tf\n'), ((4040, 4091), 'tensorflow.truncated_normal_initializer', 'tf.truncated_normal_initializer', ([], {'mean': '(0)', 'stddev': '(0.1)'}), '(mean=0, stddev=0.1)\n', (4071, 4091), True, 'import tensorflow as tf\n'), ((5447, 5498), 'tensorflow.truncated_normal_initializer', 'tf.truncated_normal_initializer', ([], {'mean': '(0)', 'stddev': '(0.1)'}), '(mean=0, stddev=0.1)\n', (5478, 5498), True, 'import tensorflow as tf\n'), ((6799, 6850), 'tensorflow.truncated_normal_initializer', 'tf.truncated_normal_initializer', ([], {'mean': '(0)', 'stddev': '(0.1)'}), '(mean=0, stddev=0.1)\n', (6830, 6850), True, 'import tensorflow as tf\n'), ((8281, 8332), 'tensorflow.truncated_normal_initializer', 'tf.truncated_normal_initializer', ([], {'mean': '(0)', 'stddev': '(0.1)'}), '(mean=0, stddev=0.1)\n', (8312, 8332), True, 'import tensorflow as tf\n'), ((9538, 9589), 'tensorflow.truncated_normal_initializer', 'tf.truncated_normal_initializer', ([], {'mean': '(0)', 'stddev': '(0.1)'}), '(mean=0, stddev=0.1)\n', (9569, 9589), True, 'import tensorflow as tf\n')]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from django.db.models import ForeignKey
from django.urls import Resolver404, resolve
from django.utils.translation import get_language_from_request, override
from cms.apphook_pool import apphook_pool
from app_data import AppDataContainer, app_registry
# making key app/model specific to avoid inheritance issues
APP_CONFIG_FIELDS_KEY = '_app_config_field_names_{app_label}_{model_name}'
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None) and request.current_page.application_urls:
app = apphook_pool.get_apphook(request.current_page.application_urls)
if app and app.app_config:
try:
config = None
with override(get_language_from_request(request, check_path=True)):
namespace = resolve(request.path_info).namespace
config = app.get_config(namespace)
return namespace, config
except Resolver404:
pass
return '', None
def setup_config(form_class, config_model=None):
"""
Register the provided form as config form for the provided config model
This can be used as a decorator by adding a `model` attribute to the config form::
@setup_config
class ExampleConfigForm(AppDataForm):
model = ExampleConfig
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
# allow use as a decorator
if config_model is None:
return setup_config(form_class, form_class.model)
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
def _get_apphook_field_names(model):
"""
Return all foreign key field names for a AppHookConfig based model
"""
from .models import AppHookConfig # avoid circular dependencies
fields = []
for field in model._meta.fields:
if isinstance(field, ForeignKey) and issubclass(field.remote_field.model, AppHookConfig):
fields.append(field)
return [field.name for field in fields]
def get_apphook_field_names(model):
"""
Cache app-hook field names on model
:param model: model class or object
:return: list of foreign key field names to AppHookConfigs
"""
key = APP_CONFIG_FIELDS_KEY.format(
app_label=model._meta.app_label,
model_name=model._meta.object_name
).lower()
if not hasattr(model, key):
field_names = _get_apphook_field_names(model)
setattr(model, key, field_names)
return getattr(model, key)
def get_apphook_configs(obj):
"""
Get apphook configs for an object obj
:param obj: any model instance
:return: list of apphook configs for given obj
"""
keys = get_apphook_field_names(obj)
return [getattr(obj, key) for key in keys] if keys else []
def get_apphook_model(model, app_config_attribute):
"""
Return the AppHookConfig model for the provided main model
:param model: Main model
:param app_config_attribute: Fieldname of the app_config
:return: app_config model
"""
return model._meta.get_field(app_config_attribute).remote_field.model
|
[
"app_data.AppDataContainer.from_form",
"cms.apphook_pool.apphook_pool.get_apphook",
"django.utils.translation.get_language_from_request",
"django.urls.resolve"
] |
[((806, 869), 'cms.apphook_pool.apphook_pool.get_apphook', 'apphook_pool.get_apphook', (['request.current_page.application_urls'], {}), '(request.current_page.application_urls)\n', (830, 869), False, 'from cms.apphook_pool import apphook_pool\n'), ((1864, 1902), 'app_data.AppDataContainer.from_form', 'AppDataContainer.from_form', (['form_class'], {}), '(form_class)\n', (1890, 1902), False, 'from app_data import AppDataContainer, app_registry\n'), ((966, 1017), 'django.utils.translation.get_language_from_request', 'get_language_from_request', (['request'], {'check_path': '(True)'}), '(request, check_path=True)\n', (991, 1017), False, 'from django.utils.translation import get_language_from_request, override\n'), ((1048, 1074), 'django.urls.resolve', 'resolve', (['request.path_info'], {}), '(request.path_info)\n', (1055, 1074), False, 'from django.urls import Resolver404, resolve\n')]
|
import Utilities.HypixelAPI as HypixelAPI
import Utilities.MojangAPI as MojangAPI
import Utilities.Weights.playerStore as playerStore
from Constants import *
from dotenv import load_dotenv, dotenv_values
import json
import os
load_dotenv()
API_KEY = dotenv_values('.env')["API_KEY"]
BASE_DIR = os.path.dirname(os.path.realpath(__file__))
def getBreakdownFormatted(username):
breakdown = getWeight(username)[1]
return '\n'.join([f"{item}: {breakdown[item]}" for item in breakdown])
def getBreakdownMAXFormatted():
score, breakdown = maxStats()
return '\n'.join([f"{item}: {breakdown[item]}" for item in breakdown])
def getWeightUUID(uuid):
if not uuid: return (0,{})
try: stranded_data = HypixelAPI.getStrandedData(uuid)
except: return (0, {})
try:
weights = [getStrandedWeight(member_data["members"][uuid]) for member_data in stranded_data]
if not len(weights): return (0,{})
return max(weights)
except Exception as e: return (0,{})
def getWeight(username):
uuid = MojangAPI.getUUIDFromUsername(username)
if not uuid: return (0,{})
weight, breakdown = getWeightUUID(uuid)
playerStore.storePlayerScore(username, weight, breakdown)
return weight, breakdown
def maxStats(): return getStrandedWeight({}, max=True)
def generateWeightParts():
with open(WEIGHT_PARTS, "r") as f: weight_parts = json.load(f)
for part in weight_parts:
part["time"] = part["maxXP"] / part["XPh"]
part["real_time"] = (part["time"] * part["effort"]) + part["coin_cost"]
part["maxXP"] = part["maxXP"] ** part["curve"]
return weight_parts, sum([part["real_time"] for part in weight_parts]), 100000
def getStrandedWeight(profileData, max = False):
weight_parts, totalTime, max_score = generateWeightParts()
score_breakdown = {}
total_score = 0
for part in weight_parts:
name = str(part["name"]).lower()
if max: xp_name = part["maxXP"]
else:
if name == "minions": xp_name = len(profileData[f"crafted_generators"])
else:
try: xp_name = profileData[f"experience_skill_{name}"]
except:
try: xp_name = profileData["slayer_bosses"][name]["xp"]
except: xp_name = 0
xp_name = min(xp_name ** part["curve"], part["maxXP"])
score_breakdown[name] = round((part["real_time"] / totalTime) * max_score * (xp_name / part["maxXP"]))
total_score += score_breakdown[name]
return round(total_score), score_breakdown
|
[
"json.load",
"Utilities.Weights.playerStore.storePlayerScore",
"os.path.realpath",
"Utilities.HypixelAPI.getStrandedData",
"dotenv.load_dotenv",
"Utilities.MojangAPI.getUUIDFromUsername",
"dotenv.dotenv_values"
] |
[((235, 248), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (246, 248), False, 'from dotenv import load_dotenv, dotenv_values\n'), ((260, 281), 'dotenv.dotenv_values', 'dotenv_values', (['""".env"""'], {}), "('.env')\n", (273, 281), False, 'from dotenv import load_dotenv, dotenv_values\n'), ((321, 347), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (337, 347), False, 'import os\n'), ((1021, 1060), 'Utilities.MojangAPI.getUUIDFromUsername', 'MojangAPI.getUUIDFromUsername', (['username'], {}), '(username)\n', (1050, 1060), True, 'import Utilities.MojangAPI as MojangAPI\n'), ((1134, 1191), 'Utilities.Weights.playerStore.storePlayerScore', 'playerStore.storePlayerScore', (['username', 'weight', 'breakdown'], {}), '(username, weight, breakdown)\n', (1162, 1191), True, 'import Utilities.Weights.playerStore as playerStore\n'), ((723, 755), 'Utilities.HypixelAPI.getStrandedData', 'HypixelAPI.getStrandedData', (['uuid'], {}), '(uuid)\n', (749, 755), True, 'import Utilities.HypixelAPI as HypixelAPI\n'), ((1359, 1371), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1368, 1371), False, 'import json\n')]
|
# -*- coding: utf-8 -*-
import os
import time
import gym
import numpy as np
import matplotlib.pyplot as plt
from gym.envs.classic_control.cartpole import *
from gym.envs.mujoco import *
import pandas as pd
# from gym.envs.registration import *
# from gym.wrappers.time_limit import TimeLimit
env_name = 'Walker2d-v2'
env = gym.make(env_name)
# print('Observation space: ', env.observation_space)
# print('Action space: ', env.action_space)
# print('Observation space low: ', env.observation_space.low)
# print('Observation space high: ', env.observation_space.high)
# print('Action space low: ', env.action_space.low)
# print('Action space high: ', env.action_space.high)
state = env.reset()
print(state)
while True:
env.render()
time.sleep(1)
state, reward, done, _ = env.step(env.action_space.sample())
env.close()
##### Render environment for a specific state #####
#
# env = CartPoleEnv()
# env.state = np.array([0.03971514, -0.01205, 0.039588, -0.00371212])
# # state = env.reset()
# while True:
# env.render()
# env = Walker2dEnv()
#
# state = env.reset()
# env.step(np.random.rand(6))
# print('qpos: ', env.sim.data.qpos)
# print('qvel: ', env.sim.data.qvel)
# print('state: ', env._get_obs())
# file_name_gt = 'GroundTruth_2019-01-11_20-01-56.csv'
# file_name_pred = 'Prediction_2019-01-11_20-01-56.csv'
# env_label = 'CartPole'
# df_test = pd.read_csv('./GN v1.5/results/' + env_label + '/test/' + file_name_pred, sep=',', header=0)
# env = Walker2dEnv()
# df = df_test.as_matrix()
# i = 300
# # _,_,s2,s3,s0,s1,_,_ = df[i,:]
# # env.state = np.array([s0,s1,s2,s3])
# while True:
# env.render()
|
[
"gym.make",
"time.sleep"
] |
[((325, 343), 'gym.make', 'gym.make', (['env_name'], {}), '(env_name)\n', (333, 343), False, 'import gym\n'), ((742, 755), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (752, 755), False, 'import time\n')]
|
import sys
from argparse import ArgumentParser, Namespace
from logging import basicConfig, DEBUG, INFO, ERROR, info, error
from typing import List
from pyerge import tmerge, utils, __version__
def run_parser() -> None:
"""
Function to collect command line arguments.
Construct main object with correct set of parameters.
"""
parser = ArgumentParser(description='Emerge in temporary RAM disk')
parser.add_argument('-s', '--size', action='store', dest='size', default='4G', help='Size or RAM disk, default 4G')
parser.add_argument('-l', '--check_local', action='store_true', dest='local', default=False, help='check locally')
parser.add_argument('-d', '--clean-print', action='store_true', dest='deep_print', default=False, help='print deep clean info after emerge')
parser.add_argument('-c', '--clean-run', action='store_true', dest='deep_run', default=False, help='run deep clean after emerge')
parser.add_argument('-w', '--world', action='store_true', dest='world', default=False, help='run emerge -NDu @world')
parser.add_argument('-r', '--pretend_world', action='store_true', dest='pretend_world', default=False, help='run emerge -pvNDu @world')
parser.add_argument('-q', '--quiet', action='store_true', dest='quiet', default=False, help='no output from pyerge itself only form other tools')
parser.add_argument('-v', '--verbose', action='count', dest='verbose', default=0, help='Increase output verbosity')
parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__)
parser.add_argument('action', help='check or emerge')
opts, emerge_opts = parser.parse_known_args()
level = DEBUG if opts.verbose else INFO
if opts.quiet:
level = ERROR
basicConfig(format='%(asctime)s | %(levelname)-6s | %(message)s', level=level)
if opts.action not in ['check', 'emerge']:
error(f'Wrong options: {opts} {emerge_opts}')
sys.exit(1)
main_exec(opts, emerge_opts)
def main_exec(opts: Namespace, emerge_opts: List[str]) -> None:
"""
Main execution function.
:param opts: cli arguments
:param emerge_opts: list of arguments for emege
"""
if opts.world:
emerge_opts = ['--with-bdeps=y', '--keep-going=y', '--newuse', '--deep', '--update', '@world']
if opts.pretend_world:
emerge_opts = ['--with-bdeps=y', '--pretend', '--verbose', '--newuse', '--deep', '--update', '@world']
info(f'Pyerge version: {__version__}')
opts.online = utils.is_internet_connected()
if not tmerge.is_portage_running():
utils.set_portage_tmpdir()
utils.handling_mounting(opts)
tmerge.run_emerge(emerge_opts, opts)
tmerge.run_check(opts)
utils.unmounttmpfs(opts)
else:
info('emerge already running!')
|
[
"logging.error",
"argparse.ArgumentParser",
"logging.basicConfig",
"pyerge.tmerge.is_portage_running",
"pyerge.utils.set_portage_tmpdir",
"pyerge.tmerge.run_check",
"pyerge.utils.is_internet_connected",
"pyerge.utils.handling_mounting",
"logging.info",
"pyerge.utils.unmounttmpfs",
"pyerge.tmerge.run_emerge",
"sys.exit"
] |
[((358, 416), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Emerge in temporary RAM disk"""'}), "(description='Emerge in temporary RAM disk')\n", (372, 416), False, 'from argparse import ArgumentParser, Namespace\n'), ((1760, 1838), 'logging.basicConfig', 'basicConfig', ([], {'format': '"""%(asctime)s | %(levelname)-6s | %(message)s"""', 'level': 'level'}), "(format='%(asctime)s | %(levelname)-6s | %(message)s', level=level)\n", (1771, 1838), False, 'from logging import basicConfig, DEBUG, INFO, ERROR, info, error\n'), ((2452, 2490), 'logging.info', 'info', (['f"""Pyerge version: {__version__}"""'], {}), "(f'Pyerge version: {__version__}')\n", (2456, 2490), False, 'from logging import basicConfig, DEBUG, INFO, ERROR, info, error\n'), ((2509, 2538), 'pyerge.utils.is_internet_connected', 'utils.is_internet_connected', ([], {}), '()\n', (2536, 2538), False, 'from pyerge import tmerge, utils, __version__\n'), ((1894, 1939), 'logging.error', 'error', (['f"""Wrong options: {opts} {emerge_opts}"""'], {}), "(f'Wrong options: {opts} {emerge_opts}')\n", (1899, 1939), False, 'from logging import basicConfig, DEBUG, INFO, ERROR, info, error\n'), ((1948, 1959), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1956, 1959), False, 'import sys\n'), ((2551, 2578), 'pyerge.tmerge.is_portage_running', 'tmerge.is_portage_running', ([], {}), '()\n', (2576, 2578), False, 'from pyerge import tmerge, utils, __version__\n'), ((2588, 2614), 'pyerge.utils.set_portage_tmpdir', 'utils.set_portage_tmpdir', ([], {}), '()\n', (2612, 2614), False, 'from pyerge import tmerge, utils, __version__\n'), ((2623, 2652), 'pyerge.utils.handling_mounting', 'utils.handling_mounting', (['opts'], {}), '(opts)\n', (2646, 2652), False, 'from pyerge import tmerge, utils, __version__\n'), ((2661, 2697), 'pyerge.tmerge.run_emerge', 'tmerge.run_emerge', (['emerge_opts', 'opts'], {}), '(emerge_opts, opts)\n', (2678, 2697), False, 'from pyerge import tmerge, utils, __version__\n'), ((2706, 2728), 'pyerge.tmerge.run_check', 'tmerge.run_check', (['opts'], {}), '(opts)\n', (2722, 2728), False, 'from pyerge import tmerge, utils, __version__\n'), ((2737, 2761), 'pyerge.utils.unmounttmpfs', 'utils.unmounttmpfs', (['opts'], {}), '(opts)\n', (2755, 2761), False, 'from pyerge import tmerge, utils, __version__\n'), ((2780, 2811), 'logging.info', 'info', (['"""emerge already running!"""'], {}), "('emerge already running!')\n", (2784, 2811), False, 'from logging import basicConfig, DEBUG, INFO, ERROR, info, error\n')]
|
from pouring_base import Pouring_base
from gym import spaces
from scipy.spatial.transform import Rotation as R
from collections import deque
import math
import numpy as np
import os,sys
FILE_PATH = os.path.abspath(os.path.dirname(__file__))
class Pouring_featured(Pouring_base):
"""A concrete water-pouring gym environment that uses handcrafted features as
observations of the state. Thus, this environment describes a Partially Observable
Markov Decision Process.
Attributes:
max_in_air: Maximum amount of water-particles in the air that is assumed to be
possible. Used for normalization of observations.
"""
def __init__(self,**kwargs):
"""Initialize the water-pouring environment.
Args:
**kwargs: Keyword arguments that are forwarded to the abstract init method
of the base implementation.
"""
self.max_in_air = 40
super(Pouring_featured, self).__init__(**kwargs)
self.action_space = spaces.Box(low=-1,high=1,shape=(3,))
self.observation_space = spaces.Box(low=-1,high=1,shape=(11+(2*self.action_space.shape[0] if self.jerk_punish>0 else 0),))
def _observe(self):
"""Make an observation of the current state by the use of handcrafted features, which
do not describe the full state completely.
Returns:
A 11 or 17 dimensional numpy array that contains:
1. Bottle Rotation
2. The x-translation of the bottle
3. The y-translation of the bottle
4. This episodes time_step_punish
5. This episodes spill_punish
6. This episodes target_fill_state
7. The number of steps that have been performed since the start of the episode.
8. The fill-level of the glass.
9. The amount of water in the bottle.
10. The amount of water in the air between bottle and glass.
11. The amount of spilled particles.
12-14. If self.jerk_punish > 0, the last performed action.
15-17. If self.jerk_punish > 0, the next to last performed action
All values in the array are normalized to the range -1 to 1.
"""
rotation = R.from_matrix(self.bottle.rotation).as_euler("zyx")[0]
rotation = (rotation-self.min_rotation)/(math.pi-self.min_rotation)
translation_x,translation_y = self.bottle.translation[:2]
translation_x = (translation_x - self.translation_bounds[0][0]) / (self.translation_bounds[0][1]-self.translation_bounds[0][0])
translation_y = (translation_y - self.translation_bounds[1][0]) / (self.translation_bounds[1][1]-self.translation_bounds[1][0])
tsp_obs = ((self.time_step_punish-self.time_step_punish_range[0]) /
(self.time_step_punish_range[1]-self.time_step_punish_range[0]))*2-1
time_obs = (self._step_number/self._max_episode_steps)*2-1
spill_punish_obs = ((self.spill_punish-self.spill_range[0]) /
(self.spill_range[1]-self.spill_range[0]))*2-1
target_fill_obs = ((self.target_fill_state-self.target_fill_range[0]) /
(self.target_fill_range[1]-self.target_fill_range[0]))*2-1
feat_dat = [rotation,translation_x,translation_y,tsp_obs,spill_punish_obs,target_fill_obs,time_obs]
feat_dat.append((self.particle_locations["glass"]/self.max_in_glass)*2-1)
feat_dat.append((self.particle_locations["bottle"]/self.max_particles)*2-1)
feat_dat.append((self.particle_locations["air"]/self.max_in_air)*2-1)
feat_dat.append((self.particle_locations["spilled"]/self.max_spill)*2-1)
if self.jerk_punish>0:
# Extend the observation with the actions from the two last steps.
feat_dat.extend(np.array(self.last_actions)[:-1].flatten())
feat_dat = np.clip(np.array(feat_dat),-1,1)
return feat_dat
|
[
"os.path.dirname",
"numpy.array",
"gym.spaces.Box",
"scipy.spatial.transform.Rotation.from_matrix"
] |
[((215, 240), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (230, 240), False, 'import os, sys\n'), ((1024, 1062), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(-1)', 'high': '(1)', 'shape': '(3,)'}), '(low=-1, high=1, shape=(3,))\n', (1034, 1062), False, 'from gym import spaces\n'), ((1094, 1204), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(-1)', 'high': '(1)', 'shape': '(11 + (2 * self.action_space.shape[0] if self.jerk_punish > 0 else 0),)'}), '(low=-1, high=1, shape=(11 + (2 * self.action_space.shape[0] if \n self.jerk_punish > 0 else 0),))\n', (1104, 1204), False, 'from gym import spaces\n'), ((3984, 4002), 'numpy.array', 'np.array', (['feat_dat'], {}), '(feat_dat)\n', (3992, 4002), True, 'import numpy as np\n'), ((2329, 2364), 'scipy.spatial.transform.Rotation.from_matrix', 'R.from_matrix', (['self.bottle.rotation'], {}), '(self.bottle.rotation)\n', (2342, 2364), True, 'from scipy.spatial.transform import Rotation as R\n'), ((3913, 3940), 'numpy.array', 'np.array', (['self.last_actions'], {}), '(self.last_actions)\n', (3921, 3940), True, 'import numpy as np\n')]
|
from flask import abort, flash, redirect, url_for
from flask_login import current_user
from dmutils.flask import timed_render_template as render_template
from app import data_api_client
from .. import main
from ..helpers.buyers_helpers import (
get_framework_and_lot,
is_brief_correct,
)
BRIEF_WITHDRAWN_MESSAGE = "You’ve withdrawn your requirements for ‘{brief[title]}’"
@main.route('/frameworks/<framework_slug>/requirements/<lot_slug>/<brief_id>/withdraw', methods=['GET'])
def withdraw_a_brief_warning(framework_slug, lot_slug, brief_id):
framework, lot = get_framework_and_lot(
framework_slug,
lot_slug,
data_api_client,
allowed_statuses=['live', 'expired'],
must_allow_brief=True
)
brief = data_api_client.get_brief(brief_id)["briefs"]
if not is_brief_correct(brief, framework_slug, lot_slug, current_user.id, allowed_statuses=['live']):
abort(404)
return render_template(
"buyers/withdraw_brief.html",
framework=framework,
brief=brief,
), 200
@main.route('/frameworks/<framework_slug>/requirements/<lot_slug>/<brief_id>/withdraw', methods=['POST'])
def withdraw_a_brief(framework_slug, lot_slug, brief_id):
get_framework_and_lot(
framework_slug,
lot_slug,
data_api_client,
allowed_statuses=['live', 'expired'],
must_allow_brief=True
)
brief = data_api_client.get_brief(brief_id)["briefs"]
if not is_brief_correct(brief, framework_slug, lot_slug, current_user.id, allowed_statuses=['live']):
abort(404)
data_api_client.withdraw_brief(brief_id, current_user.email_address)
flash(BRIEF_WITHDRAWN_MESSAGE.format(brief=brief), "success")
return redirect(url_for(".buyer_dos_requirements"))
|
[
"app.data_api_client.get_brief",
"flask.abort",
"app.data_api_client.withdraw_brief",
"flask.url_for",
"dmutils.flask.timed_render_template"
] |
[((1592, 1660), 'app.data_api_client.withdraw_brief', 'data_api_client.withdraw_brief', (['brief_id', 'current_user.email_address'], {}), '(brief_id, current_user.email_address)\n', (1622, 1660), False, 'from app import data_api_client\n'), ((761, 796), 'app.data_api_client.get_brief', 'data_api_client.get_brief', (['brief_id'], {}), '(brief_id)\n', (786, 796), False, 'from app import data_api_client\n'), ((922, 932), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (927, 932), False, 'from flask import abort, flash, redirect, url_for\n'), ((945, 1024), 'dmutils.flask.timed_render_template', 'render_template', (['"""buyers/withdraw_brief.html"""'], {'framework': 'framework', 'brief': 'brief'}), "('buyers/withdraw_brief.html', framework=framework, brief=brief)\n", (960, 1024), True, 'from dmutils.flask import timed_render_template as render_template\n'), ((1415, 1450), 'app.data_api_client.get_brief', 'data_api_client.get_brief', (['brief_id'], {}), '(brief_id)\n', (1440, 1450), False, 'from app import data_api_client\n'), ((1576, 1586), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (1581, 1586), False, 'from flask import abort, flash, redirect, url_for\n'), ((1747, 1781), 'flask.url_for', 'url_for', (['""".buyer_dos_requirements"""'], {}), "('.buyer_dos_requirements')\n", (1754, 1781), False, 'from flask import abort, flash, redirect, url_for\n')]
|
## Conversion from Ocean to Numpy
import pyOcean_cpu as ocean
import ocean_numpy
import numpy as np
A = ocean.asTensor([[1,2,3],[4,5,6]], ocean.float)
B = A.convertTo('numpy', True)
A.fill(3)
print(B)
print(B.dtype)
A = ocean.asTensor([[1,2,3],[4,5,6]], ocean.float)
A.byteswap()
B = A.convertTo('numpy', True)
print(B)
|
[
"pyOcean_cpu.asTensor"
] |
[((105, 156), 'pyOcean_cpu.asTensor', 'ocean.asTensor', (['[[1, 2, 3], [4, 5, 6]]', 'ocean.float'], {}), '([[1, 2, 3], [4, 5, 6]], ocean.float)\n', (119, 156), True, 'import pyOcean_cpu as ocean\n'), ((223, 274), 'pyOcean_cpu.asTensor', 'ocean.asTensor', (['[[1, 2, 3], [4, 5, 6]]', 'ocean.float'], {}), '([[1, 2, 3], [4, 5, 6]], ocean.float)\n', (237, 274), True, 'import pyOcean_cpu as ocean\n')]
|
#!/usr/bin/env python
"""
This portion of code ingests data on driving time and distance from google API
and saves the data as json files.
"""
import os
import json
import requests
import urllib2
import requests
#def main():
api_key = "<KEY>"
## pull the above API key from folder name in this URL https://drive.google.com/drive/folders/0B7t0jfbb9NwHbEgxRndDYjlPYnc
url = "https://www.googleapis.com/qpxExpress/v1/trips/search?key=" + api_key
headers = {'content-type': 'application/json'}
params = {
"request": {
"slice": [
{
"origin": "DCA",
"destination": "LAX",
"date": "2016-01-25"
}
],
"passengers": {
"adultCount": 1
},
"solutions": 200,
"refundable": False
}
}
response = requests.post(url, data=json.dumps(params), headers=headers)
data = response.json()
with open('DCA.json', 'w') as f:
json.dump(data, f, indent=2)
params = {
"request": {
"slice": [
{
"origin": "IAD",
"destination": "LAX",
"date": "2016-01-25"
}
],
"passengers": {
"adultCount": 1
},
"solutions": 200,
"refundable": False
}
}
response = requests.post(url, data=json.dumps(params), headers=headers)
data = response.json()
with open('IAD.json', 'w') as f:
json.dump(data, f, indent=2)
params = {
"request": {
"slice": [
{
"origin": "BWI",
"destination": "LAX",
"date": "2016-01-25"
}
],
"passengers": {
"adultCount": 1
},
"solutions": 200,
"refundable": False
}
}
response = requests.post(url, data=json.dumps(params), headers=headers)
data = response.json()
with open('BWI.json', 'w') as f:
json.dump(data, f, indent=2)
#get API data and save as a dictionary (dict is named data)
# response = urllib2.urlopen(url)
# data = json.load(response)
#create filename
# filename = '%s'.json %DCAFlights
# define path for file to be saved; requires 'data' subdirectory
# path = os.path.join(os.getcwd(), 'Flight', filename)
# Open a file for writing
# new_file = open(path,"w")
# Save the dictionary into this file
# json.dump(data,new_file)
# Close the file
# new_file.close()
##prints data below##
#print data
##########################################################################
## Execution
##########################################################################
#if __name__ == '__main__':
# main()
|
[
"json.dump",
"json.dumps"
] |
[((879, 907), 'json.dump', 'json.dump', (['data', 'f'], {'indent': '(2)'}), '(data, f, indent=2)\n', (888, 907), False, 'import json\n'), ((1292, 1320), 'json.dump', 'json.dump', (['data', 'f'], {'indent': '(2)'}), '(data, f, indent=2)\n', (1301, 1320), False, 'import json\n'), ((1706, 1734), 'json.dump', 'json.dump', (['data', 'f'], {'indent': '(2)'}), '(data, f, indent=2)\n', (1715, 1734), False, 'import json\n'), ((781, 799), 'json.dumps', 'json.dumps', (['params'], {}), '(params)\n', (791, 799), False, 'import json\n'), ((1194, 1212), 'json.dumps', 'json.dumps', (['params'], {}), '(params)\n', (1204, 1212), False, 'import json\n'), ((1608, 1626), 'json.dumps', 'json.dumps', (['params'], {}), '(params)\n', (1618, 1626), False, 'import json\n')]
|
from setuptools import setup
setup(name='py2ifttt',
version='1.0.0',
description='an interface for triggering ifttt webhooks',
long_description='an interface for triggering ifttt webhooks, http://github.com/moevis/py2ifttt',
url='http://github.com/moevis/py2ifttt',
author='moevis',
author_email='<EMAIL>',
license='MIT',
packages=['py2ifttt'],
install_requires=[
'requests',
],
zip_safe=False)
|
[
"setuptools.setup"
] |
[((30, 415), 'setuptools.setup', 'setup', ([], {'name': '"""py2ifttt"""', 'version': '"""1.0.0"""', 'description': '"""an interface for triggering ifttt webhooks"""', 'long_description': '"""an interface for triggering ifttt webhooks, http://github.com/moevis/py2ifttt"""', 'url': '"""http://github.com/moevis/py2ifttt"""', 'author': '"""moevis"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'packages': "['py2ifttt']", 'install_requires': "['requests']", 'zip_safe': '(False)'}), "(name='py2ifttt', version='1.0.0', description=\n 'an interface for triggering ifttt webhooks', long_description=\n 'an interface for triggering ifttt webhooks, http://github.com/moevis/py2ifttt'\n , url='http://github.com/moevis/py2ifttt', author='moevis',\n author_email='<EMAIL>', license='MIT', packages=['py2ifttt'],\n install_requires=['requests'], zip_safe=False)\n", (35, 415), False, 'from setuptools import setup\n')]
|
import altair as alt
from vega_datasets import data
cars = data.cars()
print(cars.columns)
cars.head()
|
[
"vega_datasets.data.cars"
] |
[((61, 72), 'vega_datasets.data.cars', 'data.cars', ([], {}), '()\n', (70, 72), False, 'from vega_datasets import data\n')]
|
import re
import collections
from ..css_abbreviation import parse, tokens, CSSValue, FunctionCall
re_property = re.compile(r'^([a-z-]+)(?:\s*:\s*([^\n\r;]+?);*)?$')
opt = {'value': True}
class CSSSnippetType:
Raw = 'Raw'
Property = 'Property'
class CSSSnippetRaw:
__slots__ = ('type', 'key', 'value')
def __init__(self, key: str, value: str):
self.type = CSSSnippetType.Raw
self.key = key
self.value = value
class CSSSnippetProperty:
__slots__ = ('type', 'key', 'value', 'property', 'keywords', 'dependencies')
def __init__(self, key: str, prop: str, value: list, keywords: dict):
self.type = CSSSnippetType.Property
self.key = key
self.property = prop
self.value = value
self.keywords = keywords
self.dependencies = []
def create_snippet(key: str, value: str):
"Creates structure for holding resolved CSS snippet"
# A snippet could be a raw text snippet (e.g. arbitrary text string) or a
# CSS property with possible values separated by `|`.
# In latter case, we have to parse snippet as CSS abbreviation
m = re_property.match(value)
if m:
keywords = collections.OrderedDict()
parsed = [parse_value(v) for v in m.group(2).split('|')] if m.group(2) else []
for item in parsed:
for css_val in item:
collect_keywords(css_val, keywords)
return CSSSnippetProperty(key, m.group(1), parsed, keywords)
return CSSSnippetRaw(key, value)
def nest(snippets: list):
"""
Nests more specific CSS properties into shorthand ones, e.g.
`background-position-x` -> `background-position` -> `background`
"""
snippets = snippets[:]
snippets.sort(key=lambda x: x.key)
stack = []
# For sorted list of CSS properties, create dependency graph where each
# shorthand property contains its more specific one, e.g.
# background -> background-position -> background-position-x
for cur in filter(is_property, snippets):
# Check if current property belongs to one from parent stack.
# Since `snippets` array is sorted, items are perfectly aligned
# from shorthands to more specific variants
while stack:
prev = stack[-1]
if cur.property.startswith(prev.property) and \
len(cur.property) > len(prev.property) and \
cur.property[len(prev.property)] == '-':
prev.dependencies.append(cur)
stack.append(cur)
break
stack.pop()
if not stack:
stack.append(cur)
return snippets
def parse_value(value: str):
global opt
return parse(value.strip(), opt)[0].value
def is_property(snippet):
return isinstance(snippet, CSSSnippetProperty)
def collect_keywords(css_val: CSSValue, dest: dict):
for v in css_val.value:
if isinstance(v, tokens.Literal):
dest[v.value] = v
elif isinstance(v, FunctionCall):
dest[v.name] = v
elif isinstance(v, tokens.Field):
# Create literal from field, if available
value = v.name.strip()
if value:
dest[value] = tokens.Literal(value)
|
[
"collections.OrderedDict",
"re.compile"
] |
[((114, 169), 're.compile', 're.compile', (['"""^([a-z-]+)(?:\\\\s*:\\\\s*([^\\\\n\\\\r;]+?);*)?$"""'], {}), "('^([a-z-]+)(?:\\\\s*:\\\\s*([^\\\\n\\\\r;]+?);*)?$')\n", (124, 169), False, 'import re\n'), ((1192, 1217), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (1215, 1217), False, 'import collections\n')]
|
"""TensorFlow has two major kinds of "shape"
- The Static Shape
- The Dynamic Shape
"""
# To ignore tensorflow version warning
import warnings
warnings.filterwarnings("ignore")
import tensorflow as tf
print(tf.VERSION)
# Demo: TF Static Shape
my_tensor = tf.ones(shape=[8, 2])
print('my_tensor = {}'.format(my_tensor))
# Retrieve it's static shape (NOTE: Static ops are attached to TF Tensor
# & usually have underscores in their names.
static_shape = my_tensor.get_shape()
print('static_shape = {}'.format(static_shape))
print('static_shape.as_list() = {}'.format(static_shape.as_list()))
# Create a placeholder with undefined shape.
my_placeholder = tf.placeholder(dtype=tf.float32, shape=[None, 2])
print('my_placeholder = {}'.format(my_placeholder))
# Update the shape.
print('BEFORE: my_placeholder.get_shape() = {}'.format(
my_placeholder.get_shape()))
my_placeholder.set_shape([8, 2])
print('AFTER: my_placeholder.get_shape() = {}'.format(
my_placeholder.get_shape()))
# Line divider.
print('\n\n', 70 * '=', '\n\n')
# Demo: TF Dynamic Shape
my_tensor = tf.ones(shape=[8, 2])
print('my_tensor = {}'.format(my_tensor))
# Retrieve it's dynamic shape (NOTE: Dynamic ops are attached to d main scope
# & usually have no underscores in their names.
my_dynamic_shape = tf.shape(my_tensor)
print('my_dynamic_shape = {}'.format(my_dynamic_shape))
# Dynamic shape is a tensor itself describing the shape of the original
# tensor.
my_tensor_reshaped = tf.reshape(tensor=my_tensor, shape=[2, 4, 2])
print('my_tensor_reshaped = {}'.format(my_tensor_reshaped))
# To access the dynamic shape's value, you need to run it through a Session
dynamic_value = my_dynamic_shape.eval(session=tf.Session())
print(dynamic_value)
|
[
"tensorflow.ones",
"warnings.filterwarnings",
"tensorflow.reshape",
"tensorflow.Session",
"tensorflow.placeholder",
"tensorflow.shape"
] |
[((144, 177), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (167, 177), False, 'import warnings\n'), ((258, 279), 'tensorflow.ones', 'tf.ones', ([], {'shape': '[8, 2]'}), '(shape=[8, 2])\n', (265, 279), True, 'import tensorflow as tf\n'), ((658, 707), 'tensorflow.placeholder', 'tf.placeholder', ([], {'dtype': 'tf.float32', 'shape': '[None, 2]'}), '(dtype=tf.float32, shape=[None, 2])\n', (672, 707), True, 'import tensorflow as tf\n'), ((1079, 1100), 'tensorflow.ones', 'tf.ones', ([], {'shape': '[8, 2]'}), '(shape=[8, 2])\n', (1086, 1100), True, 'import tensorflow as tf\n'), ((1289, 1308), 'tensorflow.shape', 'tf.shape', (['my_tensor'], {}), '(my_tensor)\n', (1297, 1308), True, 'import tensorflow as tf\n'), ((1469, 1514), 'tensorflow.reshape', 'tf.reshape', ([], {'tensor': 'my_tensor', 'shape': '[2, 4, 2]'}), '(tensor=my_tensor, shape=[2, 4, 2])\n', (1479, 1514), True, 'import tensorflow as tf\n'), ((1698, 1710), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (1708, 1710), True, 'import tensorflow as tf\n')]
|
# -*- coding: utf-8 -*-
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from django.test import TestCase
from mock import patch
from urllib3._collections import HTTPHeaderDict
from readthedocs.builds.models import Version
from readthedocs.projects.models import Project
from readthedocs.restapi.utils import index_search_request
from readthedocs.rtd_tests.mocks.search_mock_responses import (
search_project_response
)
from readthedocs.search.indexes import PageIndex
from readthedocs.docsitalia.models import Publisher, PublisherProject
class TestSearch(TestCase):
fixtures = ['eric', 'test_data']
def setUp(self):
self.pip = Project.objects.get(slug='pip')
self.version = Version.objects.create(
project=self.pip, identifier='test_id', verbose_name='verbose name')
def perform_request_project_mock(self, method, url, params=None, body=None, timeout=None, ignore=()):
"""
Elastic Search Urllib3HttpConnection mock for project search
"""
headers = HTTPHeaderDict({
'content-length': '893',
'content-type': 'application/json; charset=UTF-8'
})
raw_data = search_project_response
return 200, headers, raw_data
@patch(
'elasticsearch.connection.http_urllib3.Urllib3HttpConnection.perform_request',
side_effect=perform_request_project_mock
)
def test_index_search_request_indexes_the_project(self, perform_request_mock):
page_list = []
index_search_request(
version=self.version, page_list=page_list, commit=None,
project_scale=1, page_scale=None, section=False, delete=False)
response = perform_request_mock.call_args_list[0][0][3]
self.assertJSONEqual(response, {
'slug': 'pip',
'lang': 'en',
'tags': None,
'name': u'Pip',
'id': 6,
'weight': 1,
'publisher': None,
'url': u'/projects/pip/',
'author': ['eric'],
'progetto': None,
'description': ''
})
@patch(
'elasticsearch.connection.http_urllib3.Urllib3HttpConnection.perform_request',
side_effect=perform_request_project_mock
)
def test_index_search_request_indexes_publisher_and_publisher_project(self, perform_request_mock):
publisher = Publisher.objects.create(
name='Test Org',
slug='publisher',
metadata={},
projects_metadata={},
active=True
)
pub_project = PublisherProject.objects.create(
name='Test Project',
slug='testproject',
metadata={
'documents': [
'https://github.com/testorg/myrepourl',
'https://github.com/testorg/anotherrepourl',
]
},
publisher=publisher,
active=True
)
pub_project.projects.add(self.pip)
page_list = [{'path': 'path', 'title': 'title', 'content': 'content', 'headers': 'headers'}]
with patch.object(PageIndex, 'bulk_index') as bulk_mock:
index_search_request(
version=self.version, page_list=page_list, commit=None,
project_scale=1, page_scale=1, section=False, delete=False)
response = perform_request_mock.call_args_list[0][0][3]
self.assertJSONEqual(response, {
'slug': 'pip',
'lang': 'en',
'tags': None,
'name': u'Pip',
'id': 6,
'weight': 1,
'publisher': 'Test Org',
'url': u'/projects/pip/',
'author': ['eric'],
'progetto': 'testproject',
'description': ''
})
bulk_mock.assert_called_with(
[{'publisher': 'Test Org', 'taxonomy': None, 'project': 'pip',
'commit': None, 'progetto': 'testproject', 'path': 'path',
'weight': 2, 'version': 'verbose-name', 'headers': 'headers',
'id': 'b3129830187e487e332bb2eab1b7a9c3', 'title': 'title',
'content': 'content', 'project_id': self.pip.pk}], routing='pip'
)
|
[
"readthedocs.projects.models.Project.objects.get",
"mock.patch.object",
"urllib3._collections.HTTPHeaderDict",
"readthedocs.docsitalia.models.PublisherProject.objects.create",
"mock.patch",
"readthedocs.builds.models.Version.objects.create",
"readthedocs.docsitalia.models.Publisher.objects.create",
"readthedocs.restapi.utils.index_search_request"
] |
[((1286, 1422), 'mock.patch', 'patch', (['"""elasticsearch.connection.http_urllib3.Urllib3HttpConnection.perform_request"""'], {'side_effect': 'perform_request_project_mock'}), "(\n 'elasticsearch.connection.http_urllib3.Urllib3HttpConnection.perform_request'\n , side_effect=perform_request_project_mock)\n", (1291, 1422), False, 'from mock import patch\n'), ((2150, 2286), 'mock.patch', 'patch', (['"""elasticsearch.connection.http_urllib3.Urllib3HttpConnection.perform_request"""'], {'side_effect': 'perform_request_project_mock'}), "(\n 'elasticsearch.connection.http_urllib3.Urllib3HttpConnection.perform_request'\n , side_effect=perform_request_project_mock)\n", (2155, 2286), False, 'from mock import patch\n'), ((694, 725), 'readthedocs.projects.models.Project.objects.get', 'Project.objects.get', ([], {'slug': '"""pip"""'}), "(slug='pip')\n", (713, 725), False, 'from readthedocs.projects.models import Project\n'), ((749, 845), 'readthedocs.builds.models.Version.objects.create', 'Version.objects.create', ([], {'project': 'self.pip', 'identifier': '"""test_id"""', 'verbose_name': '"""verbose name"""'}), "(project=self.pip, identifier='test_id', verbose_name\n ='verbose name')\n", (771, 845), False, 'from readthedocs.builds.models import Version\n'), ((1072, 1168), 'urllib3._collections.HTTPHeaderDict', 'HTTPHeaderDict', (["{'content-length': '893', 'content-type': 'application/json; charset=UTF-8'}"], {}), "({'content-length': '893', 'content-type':\n 'application/json; charset=UTF-8'})\n", (1086, 1168), False, 'from urllib3._collections import HTTPHeaderDict\n'), ((1549, 1692), 'readthedocs.restapi.utils.index_search_request', 'index_search_request', ([], {'version': 'self.version', 'page_list': 'page_list', 'commit': 'None', 'project_scale': '(1)', 'page_scale': 'None', 'section': '(False)', 'delete': '(False)'}), '(version=self.version, page_list=page_list, commit=None,\n project_scale=1, page_scale=None, section=False, delete=False)\n', (1569, 1692), False, 'from readthedocs.restapi.utils import index_search_request\n'), ((2422, 2533), 'readthedocs.docsitalia.models.Publisher.objects.create', 'Publisher.objects.create', ([], {'name': '"""Test Org"""', 'slug': '"""publisher"""', 'metadata': '{}', 'projects_metadata': '{}', 'active': '(True)'}), "(name='Test Org', slug='publisher', metadata={},\n projects_metadata={}, active=True)\n", (2446, 2533), False, 'from readthedocs.docsitalia.models import Publisher, PublisherProject\n'), ((2622, 2851), 'readthedocs.docsitalia.models.PublisherProject.objects.create', 'PublisherProject.objects.create', ([], {'name': '"""Test Project"""', 'slug': '"""testproject"""', 'metadata': "{'documents': ['https://github.com/testorg/myrepourl',\n 'https://github.com/testorg/anotherrepourl']}", 'publisher': 'publisher', 'active': '(True)'}), "(name='Test Project', slug='testproject',\n metadata={'documents': ['https://github.com/testorg/myrepourl',\n 'https://github.com/testorg/anotherrepourl']}, publisher=publisher,\n active=True)\n", (2653, 2851), False, 'from readthedocs.docsitalia.models import Publisher, PublisherProject\n'), ((3157, 3194), 'mock.patch.object', 'patch.object', (['PageIndex', '"""bulk_index"""'], {}), "(PageIndex, 'bulk_index')\n", (3169, 3194), False, 'from mock import patch\n'), ((3221, 3361), 'readthedocs.restapi.utils.index_search_request', 'index_search_request', ([], {'version': 'self.version', 'page_list': 'page_list', 'commit': 'None', 'project_scale': '(1)', 'page_scale': '(1)', 'section': '(False)', 'delete': '(False)'}), '(version=self.version, page_list=page_list, commit=None,\n project_scale=1, page_scale=1, section=False, delete=False)\n', (3241, 3361), False, 'from readthedocs.restapi.utils import index_search_request\n')]
|
import wikipedia
import wolframalpha
import wx
import pyttsx3
import speech_recognition as sr
engine = pyttsx3.init()
class MyFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, None,
pos=wx.DefaultPosition, size=wx.Size(550, 100),
style=wx.MINIMIZE_BOX | wx.SYSTEM_MENU | wx.CAPTION |
wx.CLOSE_BOX | wx.CLIP_CHILDREN,
title="PyDa")
panel = wx.Panel(self)
my_sizer = wx.BoxSizer(wx.VERTICAL)
lbl = wx.StaticText(panel,
label="Hello I am J.A.R.V.I.S , the Python Digital Assistant made by <NAME>. How can I help you?")
engine.say('Hello Anubhav')
my_sizer.Add(lbl, 0, wx.ALL, 5)
self.txt = wx.TextCtrl(panel, style=wx.TE_PROCESS_ENTER,size=(400,30))
self.txt.SetFocus()
self.txt.Bind(wx.EVT_TEXT_ENTER, self.OnEnter)
my_sizer.Add(self.txt, 0, wx.ALL, 5)
panel.SetSizer(my_sizer)
self.Show()
engine.runAndWait()
def OnEnter(self, event):
input = self.txt.GetValue()
input = input.lower()
if input=='':
r = sr.Recognizer()
with sr.Microphone() as source:
audio = r.listen(source)
try:
self.txt.SetValue(r.recognize_google(audio))
except sr.UnknownValueError:
print('Google Speech Recognition could not understand audio')
except sr.RequestError as e:
print('Could not request results from Google Speech Recognition Service; {0}'.format(e))
# print("It worked!")
else:
try:
app_id = "HH23Y3-5645968TGY"
client = wolframalpha.Client(app_id)
res = client.query(input)
answer = next(res.results).text
print(answer)
engine.say('The answer is '+ answer)
engine.runAndWait()
except:
# wikipedia.set_lang("es")
# input = input.split(' ')
# input = " ".join(input[2:])
engine.say('I have searched following for '+ input)
engine.runAndWait()
print(wikipedia.summary(input, sentences=2))
if __name__ == "__main__":
app = wx.App(True)
frame = MyFrame()
app.MainLoop()
|
[
"wx.BoxSizer",
"pyttsx3.init",
"wx.Panel",
"speech_recognition.Microphone",
"wx.StaticText",
"wx.TextCtrl",
"wx.App",
"wolframalpha.Client",
"wikipedia.summary",
"wx.Size",
"speech_recognition.Recognizer"
] |
[((105, 119), 'pyttsx3.init', 'pyttsx3.init', ([], {}), '()\n', (117, 119), False, 'import pyttsx3\n'), ((2307, 2319), 'wx.App', 'wx.App', (['(True)'], {}), '(True)\n', (2313, 2319), False, 'import wx\n'), ((422, 436), 'wx.Panel', 'wx.Panel', (['self'], {}), '(self)\n', (430, 436), False, 'import wx\n'), ((456, 480), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (467, 480), False, 'import wx\n'), ((495, 624), 'wx.StaticText', 'wx.StaticText', (['panel'], {'label': '"""Hello I am J.A.R.V.I.S , the Python Digital Assistant made by <NAME>. How can I help you?"""'}), "(panel, label=\n 'Hello I am J.A.R.V.I.S , the Python Digital Assistant made by <NAME>. How can I help you?'\n )\n", (508, 624), False, 'import wx\n'), ((727, 788), 'wx.TextCtrl', 'wx.TextCtrl', (['panel'], {'style': 'wx.TE_PROCESS_ENTER', 'size': '(400, 30)'}), '(panel, style=wx.TE_PROCESS_ENTER, size=(400, 30))\n', (738, 788), False, 'import wx\n'), ((1139, 1154), 'speech_recognition.Recognizer', 'sr.Recognizer', ([], {}), '()\n', (1152, 1154), True, 'import speech_recognition as sr\n'), ((249, 266), 'wx.Size', 'wx.Size', (['(550)', '(100)'], {}), '(550, 100)\n', (256, 266), False, 'import wx\n'), ((1172, 1187), 'speech_recognition.Microphone', 'sr.Microphone', ([], {}), '()\n', (1185, 1187), True, 'import speech_recognition as sr\n'), ((1713, 1740), 'wolframalpha.Client', 'wolframalpha.Client', (['app_id'], {}), '(app_id)\n', (1732, 1740), False, 'import wolframalpha\n'), ((2228, 2265), 'wikipedia.summary', 'wikipedia.summary', (['input'], {'sentences': '(2)'}), '(input, sentences=2)\n', (2245, 2265), False, 'import wikipedia\n')]
|
'''
Functions to call appropriate constructor functions based on UI data and to link decoder objects in the database
'''
import os
import re
import tempfile
import xmlrpc.client
import pickle
import json
import logging
import numpy as np
from celery import task, chain
from django.http import HttpResponse
from riglib.bmi import extractor, train
from riglib import experiment
@task
def cache_plx(plxfile):
"""
Create cache for plexon file
"""
os.environ['DJANGO_SETTINGS_MODULE'] = 'db.settings'
from .tracker import dbq
from . import namelist
from .tracker import models
from . import dbfunctions as dbfn
from .json_param import Parameters
from .tasktrack import Track
from .tracker.models import TaskEntry, Feature, Sequence, Task, Generator, Subject, DataFile, System, Decoder
from plexon import plexfile
plexfile.openFile(str(plxfile))
@task
def make_bmi(name, clsname, extractorname, entry, cells, channels, binlen, tslice, ssm, pos_key, kin_extractor, zscore):
"""
Create a new Decoder object from training data and save a record to the database
Parameters
----------
name : string
Name assigned to decoder object in the database
clsname : string
BMI algorithm name (passed to bmilist lookup table 'bmis')
extractorname : string
feature extractor algorithm name (passed to bmilist lookup table 'extractors')
entry : models.TaskEntry
Django record of training task
cells : string
Single string containing all the units to be in decoder, matching
format in global regex 'cellname' (used only for spike extractors)
channels : string
Single string containing all the channels to be in decoder; must be a
comma separated list of values with spaces (e.g., "1, 2, 3")
(used only for, e.g., LFP extractors)
binlen : float
Time of spike history to consider
tslice : slice
Task time to use when training the decoder
ssm : string
TODO
pos_key : string
TODO
"""
os.environ['DJANGO_SETTINGS_MODULE'] = 'db.settings'
from .tracker import dbq
from . import namelist
from .tracker import models
from . import dbfunctions as dbfn
from .json_param import Parameters
from .tasktrack import Track
from .tracker.models import TaskEntry, Feature, Sequence, Task, Generator, Subject, DataFile, System, Decoder
cellname = re.compile(r'(\d{1,3})\s*(\w{1})')
print("make bmi")
extractor_cls = namelist.extractors[extractorname]
print('Training with extractor class:', extractor_cls)
if 'spike' in extractor_cls.feature_type: # e.g., 'spike_counts'
# look at "cells" argument (ignore "channels")
cells = [ (int(c), ord(u) - 96) for c, u in cellname.findall(cells)]
if cells == []:
units = None # use all units by default
# Note: inside training functions (e.g., _train_KFDecoder_manual_control,
# _train_KFDecoder_visual_feedback, etc.), remember to check if units
# variable is None, and if so, set the units from the plx file:
# if units == None:
# units = np.array(plx.units).astype(np.int32)"
else:
unique_cells = []
for c in cells:
if c not in unique_cells:
unique_cells.append(c)
units = np.array(unique_cells).astype(np.int32)
elif ('lfp' in extractor_cls.feature_type) or ('ai_' in extractor_cls.feature_type): # e.g., 'lfp_power'
# look at "channels" argument (ignore "cells")
channels = np.array(channels.split(', ')).astype(np.int32) # convert str to list of numbers
if len(channels) == 0:
channels = [1, 2, 3, 4] # use these channels by default
else:
channels = np.unique(channels)
# units = np.hstack([channels.reshape(-1, 1), np.zeros(channels.reshape(-1, 1).shape, dtype=np.int32)])
units = np.hstack([channels.reshape(-1, 1), np.ones(channels.reshape(-1, 1).shape, dtype=np.int32)])
else:
raise Exception('Unknown extractor class!')
task_update_rate = 60 # NOTE may not be true for all tasks?!
extractor_kwargs = dict()
if extractor_cls == extractor.BinnedSpikeCountsExtractor:
extractor_kwargs['units'] = units
extractor_kwargs['n_subbins'] = max(1, int((1./task_update_rate)/binlen))
elif extractor_cls == extractor.LFPButterBPFPowerExtractor:
extractor_kwargs['channels'] = channels
elif extractor_cls == extractor.LFPMTMPowerExtractor:
extractor_kwargs['channels'] = channels
elif extractor_cls == extractor.AIMTMPowerExtractor:
extractor_kwargs['channels'] = channels
else:
raise Exception("Unknown extractor_cls: %s" % extractor_cls)
database = xmlrpc.client.ServerProxy("http://localhost:8000/RPC2/", allow_none=True)
# list of DataFile objects
datafiles = models.DataFile.objects.filter(entry_id=entry)
# key: a string representing a system name (e.g., 'plexon', 'blackrock', 'task', 'hdf')
# value: a single filename, or a list of filenames if there are more than one for that system
files = dict()
system_names = set(d.system.name for d in datafiles)
for system_name in system_names:
filenames = [d.get_path() for d in datafiles if d.system.name == system_name]
if system_name in ['blackrock', 'blackrock2']:
files[system_name] = filenames # list of (one or more) files
else:
assert(len(filenames) == 1)
files[system_name] = filenames[0] # just one file
training_method = namelist.bmi_algorithms[clsname]
ssm = namelist.bmi_state_space_models[ssm]
kin_extractor_fn = namelist.kin_extractors[kin_extractor]
decoder = training_method(files, extractor_cls, extractor_kwargs, kin_extractor_fn, ssm, units, update_rate=binlen, tslice=tslice, pos_key=pos_key,
zscore=zscore)
decoder.te_id = entry
tf = tempfile.NamedTemporaryFile('wb')
pickle.dump(decoder, tf, 2)
tf.flush()
database.save_bmi(name, int(entry), tf.name)
def cache_and_train(*args, **kwargs):
"""
Cache plexon file (if using plexon system) and train BMI.
"""
os.environ['DJANGO_SETTINGS_MODULE'] = 'db.settings'
from .tracker import dbq
from . import namelist
from .tracker import models
from . import dbfunctions as dbfn
from .json_param import Parameters
from .tasktrack import Track
from .tracker.models import TaskEntry, Feature, Sequence, Task, Generator, Subject, DataFile, System, Decoder
recording_sys = models.KeyValueStore.get('recording_sys', None)
if recording_sys == 'plexon':
print("cache and train")
entry = kwargs['entry']
print(entry)
plxfile = models.DataFile.objects.get(system__name='plexon', entry=entry)
print(plxfile)
if not plxfile.has_cache():
cache = cache_plx.si(plxfile.get_path())
train = make_bmi.si(*args, **kwargs)
chain(cache, train)()
else:
print("calling")
make_bmi.delay(*args, **kwargs)
elif recording_sys == 'blackrock':
make_bmi.delay(*args, **kwargs)
else:
raise Exception('Unknown recording_system!')
def save_new_decoder_from_existing(obj, orig_decoder_record, suffix='_'):
'''
Save a decoder that is created by manipulating the parameters of an older decoder
Parameters
----------
obj: riglib.bmi.Decoder instance
New decoder object to be saved
orig_decoder_record: tracker.models.Decoder instance
Database record of the original decoder
suffix: string, default='_'
The name of the new decoder is created by taking the name of the old decoder and adding the specified suffix
Returns
-------
None
'''
os.environ['DJANGO_SETTINGS_MODULE'] = 'db.settings'
from .tracker import dbq
from . import namelist
from .tracker import models
from . import dbfunctions as dbfn
from .json_param import Parameters
from .tasktrack import Track
from .tracker.models import TaskEntry, Feature, Sequence, Task, Generator, Subject, DataFile, System, Decoder
import riglib.bmi
if not isinstance(obj, riglib.bmi.bmi.Decoder):
raise ValueError("This function is only intended for saving Decoder objects!")
new_decoder_fname = obj.save()
new_decoder_name = orig_decoder_record.name + suffix
training_block_id = orig_decoder_record.entry_id
print("Saving new decoder:", new_decoder_name)
dbq.save_bmi(new_decoder_name, training_block_id, new_decoder_fname)
## Functions to manipulate existing (KF)Decoders. These belong elsewhere
def conv_mm_dec_to_cm(decoder_record):
'''
Convert a mm unit decoder to cm
'''
os.environ['DJANGO_SETTINGS_MODULE'] = 'db.settings'
from .tracker import dbq
from . import namelist
from .tracker import models
from . import dbfunctions as dbfn
from .json_param import Parameters
from .tasktrack import Track
from .tracker.models import TaskEntry, Feature, Sequence, Task, Generator, Subject, DataFile, System, Decoder
decoder_fname = os.path.join('/storage/decoders/', decoder_record.path)
print(decoder_fname)
decoder_name = decoder_record.name
dec = pickle.load(open(decoder_fname))
from riglib.bmi import train
dec_cm = train.rescale_KFDecoder_units(dec, 10)
new_decoder_basename = os.path.basename(decoder_fname).rstrip('.pkl') + '_cm.pkl'
new_decoder_fname = '/tmp/%s' % new_decoder_basename
pickle.dump(dec_cm, open(new_decoder_fname, 'w'))
new_decoder_name = decoder_name + '_cm'
training_block_id = decoder_record.entry_id
print(new_decoder_name)
dbq.save_bmi(new_decoder_name, training_block_id, new_decoder_fname)
def zero_out_SSKF_bias(decoder_record):
os.environ['DJANGO_SETTINGS_MODULE'] = 'db.settings'
from .tracker import dbq
from . import namelist
from .tracker import models
from . import dbfunctions as dbfn
from .json_param import Parameters
from .tasktrack import Track
from .tracker.models import TaskEntry, Feature, Sequence, Task, Generator, Subject, DataFile, System, Decoder
dec = open_decoder_from_record(decoder_record)
dec.filt.C_xpose_Q_inv_C[:,-1] = 0
dec.filt.C_xpose_Q_inv_C[-1,:] = 0
save_new_decoder_from_existing(dec, decoder_record, suffix='_zero_bias')
def conv_kfdecoder_binlen(decoder_record, new_binlen):
os.environ['DJANGO_SETTINGS_MODULE'] = 'db.settings'
from .tracker import dbq
from . import namelist
from .tracker import models
from . import dbfunctions as dbfn
from .json_param import Parameters
from .tasktrack import Track
from .tracker.models import TaskEntry, Feature, Sequence, Task, Generator, Subject, DataFile, System, Decoder
dec = open_decoder_from_record(decoder_record)
dec.change_binlen(new_binlen)
save_new_decoder_from_existing(dec, decoder_record, suffix='_%dHz' % int(1./new_binlen))
def conv_kfdecoder_to_ppfdecoder(decoder_record):
os.environ['DJANGO_SETTINGS_MODULE'] = 'db.settings'
from .tracker import dbq
from . import namelist
from .tracker import models
from . import dbfunctions as dbfn
from .json_param import Parameters
from .tasktrack import Track
from .tracker.models import TaskEntry, Feature, Sequence, Task, Generator, Subject, DataFile, System, Decoder
# Load the decoder
decoder_fname = os.path.join('/storage/decoders/', decoder_record.path)
print(decoder_fname)
decoder_name = decoder_record.name
dec = pickle.load(open(decoder_fname))
from riglib.bmi import train
dec_ppf = train.convert_KFDecoder_to_PPFDecoder(dec)
new_decoder_basename = os.path.basename(decoder_fname).rstrip('.pkl') + '_ppf.pkl'
new_decoder_fname = '/tmp/%s' % new_decoder_basename
pickle.dump(dec_ppf, open(new_decoder_fname, 'w'))
new_decoder_name = decoder_name + '_ppf'
training_block_id = decoder_record.entry_id
print(new_decoder_name)
from .tracker import dbq
dbq.save_bmi(new_decoder_name, training_block_id, new_decoder_fname)
def conv_kfdecoder_to_sskfdecoder(decoder_record):
os.environ['DJANGO_SETTINGS_MODULE'] = 'db.settings'
from .tracker import dbq
from . import namelist
from .tracker import models
from . import dbfunctions as dbfn
from .json_param import Parameters
from .tasktrack import Track
from .tracker.models import TaskEntry, Feature, Sequence, Task, Generator, Subject, DataFile, System, Decoder
dec = open_decoder_from_record(decoder_record)
F, K = dec.filt.get_sskf()
from riglib.bmi import sskfdecoder
filt = sskfdecoder.SteadyStateKalmanFilter(F=F, K=K)
dec_sskf = sskfdecoder.SSKFDecoder(filt, dec.units, dec.ssm, binlen=decoder.binlen)
save_new_decoder_from_existing(decoder_record, '_sskf')
def make_kfdecoder_interpolate(decoder_record):
os.environ['DJANGO_SETTINGS_MODULE'] = 'db.settings'
from .tracker import dbq
from . import namelist
from .tracker import models
from . import dbfunctions as dbfn
from .json_param import Parameters
from .tasktrack import Track
from .tracker.models import TaskEntry, Feature, Sequence, Task, Generator, Subject, DataFile, System, Decoder
# Load the decoder
decoder_fname = os.path.join('/storage/decoders/', decoder_record.path)
print(decoder_fname)
decoder_name = decoder_record.name
dec = pickle.load(open(decoder_fname))
from riglib.bmi import train
dec_ppf = train._interpolate_KFDecoder_state_between_updates(dec)
new_decoder_basename = os.path.basename(decoder_fname).rstrip('.pkl') + '_ppf.pkl'
new_decoder_fname = '/tmp/%s' % new_decoder_basename
pickle.dump(dec_ppf, open(new_decoder_fname, 'w'))
new_decoder_name = decoder_name + '_60hz'
training_block_id = decoder_record.entry_id
print(new_decoder_name)
from .tracker import dbq
dbq.save_bmi(new_decoder_name, training_block_id, new_decoder_fname)
|
[
"tempfile.NamedTemporaryFile",
"pickle.dump",
"riglib.bmi.train.rescale_KFDecoder_units",
"riglib.bmi.sskfdecoder.SteadyStateKalmanFilter",
"celery.chain",
"os.path.basename",
"riglib.bmi.sskfdecoder.SSKFDecoder",
"riglib.bmi.train._interpolate_KFDecoder_state_between_updates",
"numpy.array",
"riglib.bmi.train.convert_KFDecoder_to_PPFDecoder",
"os.path.join",
"numpy.unique",
"re.compile"
] |
[((2464, 2500), 're.compile', 're.compile', (['"""(\\\\d{1,3})\\\\s*(\\\\w{1})"""'], {}), "('(\\\\d{1,3})\\\\s*(\\\\w{1})')\n", (2474, 2500), False, 'import re\n'), ((6088, 6121), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', (['"""wb"""'], {}), "('wb')\n", (6115, 6121), False, 'import tempfile\n'), ((6126, 6153), 'pickle.dump', 'pickle.dump', (['decoder', 'tf', '(2)'], {}), '(decoder, tf, 2)\n', (6137, 6153), False, 'import pickle\n'), ((9329, 9384), 'os.path.join', 'os.path.join', (['"""/storage/decoders/"""', 'decoder_record.path'], {}), "('/storage/decoders/', decoder_record.path)\n", (9341, 9384), False, 'import os\n'), ((9538, 9576), 'riglib.bmi.train.rescale_KFDecoder_units', 'train.rescale_KFDecoder_units', (['dec', '(10)'], {}), '(dec, 10)\n', (9567, 9576), False, 'from riglib.bmi import train\n'), ((11654, 11709), 'os.path.join', 'os.path.join', (['"""/storage/decoders/"""', 'decoder_record.path'], {}), "('/storage/decoders/', decoder_record.path)\n", (11666, 11709), False, 'import os\n'), ((11865, 11907), 'riglib.bmi.train.convert_KFDecoder_to_PPFDecoder', 'train.convert_KFDecoder_to_PPFDecoder', (['dec'], {}), '(dec)\n', (11902, 11907), False, 'from riglib.bmi import train\n'), ((12887, 12932), 'riglib.bmi.sskfdecoder.SteadyStateKalmanFilter', 'sskfdecoder.SteadyStateKalmanFilter', ([], {'F': 'F', 'K': 'K'}), '(F=F, K=K)\n', (12922, 12932), False, 'from riglib.bmi import sskfdecoder\n'), ((12948, 13020), 'riglib.bmi.sskfdecoder.SSKFDecoder', 'sskfdecoder.SSKFDecoder', (['filt', 'dec.units', 'dec.ssm'], {'binlen': 'decoder.binlen'}), '(filt, dec.units, dec.ssm, binlen=decoder.binlen)\n', (12971, 13020), False, 'from riglib.bmi import sskfdecoder\n'), ((13544, 13599), 'os.path.join', 'os.path.join', (['"""/storage/decoders/"""', 'decoder_record.path'], {}), "('/storage/decoders/', decoder_record.path)\n", (13556, 13599), False, 'import os\n'), ((13755, 13810), 'riglib.bmi.train._interpolate_KFDecoder_state_between_updates', 'train._interpolate_KFDecoder_state_between_updates', (['dec'], {}), '(dec)\n', (13805, 13810), False, 'from riglib.bmi import train\n'), ((3894, 3913), 'numpy.unique', 'np.unique', (['channels'], {}), '(channels)\n', (3903, 3913), True, 'import numpy as np\n'), ((7149, 7168), 'celery.chain', 'chain', (['cache', 'train'], {}), '(cache, train)\n', (7154, 7168), False, 'from celery import task, chain\n'), ((9605, 9636), 'os.path.basename', 'os.path.basename', (['decoder_fname'], {}), '(decoder_fname)\n', (9621, 9636), False, 'import os\n'), ((11936, 11967), 'os.path.basename', 'os.path.basename', (['decoder_fname'], {}), '(decoder_fname)\n', (11952, 11967), False, 'import os\n'), ((13839, 13870), 'os.path.basename', 'os.path.basename', (['decoder_fname'], {}), '(decoder_fname)\n', (13855, 13870), False, 'import os\n'), ((3451, 3473), 'numpy.array', 'np.array', (['unique_cells'], {}), '(unique_cells)\n', (3459, 3473), True, 'import numpy as np\n')]
|
import sys
import os
sys.path.append(os.path.abspath("../"))
import torch
import numpy as np
import torch.nn as nn
from torch import multiprocessing, cuda
from misc import torchutils, indexing, imutils
from data.chexpert_dataset_irnet import CheXpertTestCAMDatasetIRNet
from torch.utils.data import DataLoader
from constants import *
import importlib
from tqdm import tqdm
from args.train_arg_parser_irnet import TrainArgParserIRNet
import torch.nn.functional as F
import imageio
from augmentations import get_transforms
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
eps = 1e-7
def main(args):
dataset = CheXpertTestCAMDatasetIRNet()
data_loader = DataLoader(dataset, shuffle=False, num_workers=os.cpu_count(), pin_memory=False)
with torch.no_grad(), cuda.device(0):
fg_intersection = np.zeros(len(LOCALIZATION_TASKS))
fg_union = np.zeros(len(LOCALIZATION_TASKS))
for iter, pack in enumerate(tqdm(data_loader)):
img_level_labels = pack['img_level_labels'][0]
gt_seg_labels = pack['gt_seg_labels'][0]
cam_seg_labels = pack['cam_seg_labels'][0]
img_name = pack['base_name'][0]
np.save(os.path.join(CHEXPERT_PARENT_TEST_CAMS_DIR / "gt_seg_labels", f"{img_name}_seg_labels.npy"), gt_seg_labels.cpu().numpy())
for index, task in enumerate(LOCALIZATION_TASKS):
cam_seg_label = cam_seg_labels[index]
gt_seg_label = gt_seg_labels[index]
if img_level_labels[index] == 0:
cam_seg_label[:] = 0
intersection_fg = torch.sum(cam_seg_label * gt_seg_label).numpy()
union_fg = torch.sum(cam_seg_label).numpy() + torch.sum(gt_seg_label).numpy() - intersection_fg
fg_intersection[index] += intersection_fg
fg_union[index] += union_fg
for i in range(len(fg_intersection)):
fg_iou = (fg_intersection[i] + eps) / (fg_union[i] + eps)
print(f"Index {i} fg iou {fg_iou}")
if __name__ == "__main__":
parser = TrainArgParserIRNet()
hyperparams = parser.parse_args()
# TRAIN
main(hyperparams)
|
[
"os.path.abspath",
"data.chexpert_dataset_irnet.CheXpertTestCAMDatasetIRNet",
"tqdm.tqdm",
"os.path.join",
"torch.sum",
"args.train_arg_parser_irnet.TrainArgParserIRNet",
"os.cpu_count",
"torch.cuda.is_available",
"torch.no_grad",
"torch.cuda.device"
] |
[((37, 59), 'os.path.abspath', 'os.path.abspath', (['"""../"""'], {}), "('../')\n", (52, 59), False, 'import os\n'), ((636, 665), 'data.chexpert_dataset_irnet.CheXpertTestCAMDatasetIRNet', 'CheXpertTestCAMDatasetIRNet', ([], {}), '()\n', (663, 665), False, 'from data.chexpert_dataset_irnet import CheXpertTestCAMDatasetIRNet\n'), ((2125, 2146), 'args.train_arg_parser_irnet.TrainArgParserIRNet', 'TrainArgParserIRNet', ([], {}), '()\n', (2144, 2146), False, 'from args.train_arg_parser_irnet import TrainArgParserIRNet\n'), ((556, 581), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (579, 581), False, 'import torch\n'), ((775, 790), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (788, 790), False, 'import torch\n'), ((792, 806), 'torch.cuda.device', 'cuda.device', (['(0)'], {}), '(0)\n', (803, 806), False, 'from torch import multiprocessing, cuda\n'), ((731, 745), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (743, 745), False, 'import os\n'), ((958, 975), 'tqdm.tqdm', 'tqdm', (['data_loader'], {}), '(data_loader)\n', (962, 975), False, 'from tqdm import tqdm\n'), ((1222, 1317), 'os.path.join', 'os.path.join', (["(CHEXPERT_PARENT_TEST_CAMS_DIR / 'gt_seg_labels')", 'f"""{img_name}_seg_labels.npy"""'], {}), "(CHEXPERT_PARENT_TEST_CAMS_DIR / 'gt_seg_labels',\n f'{img_name}_seg_labels.npy')\n", (1234, 1317), False, 'import os\n'), ((1652, 1691), 'torch.sum', 'torch.sum', (['(cam_seg_label * gt_seg_label)'], {}), '(cam_seg_label * gt_seg_label)\n', (1661, 1691), False, 'import torch\n'), ((1727, 1751), 'torch.sum', 'torch.sum', (['cam_seg_label'], {}), '(cam_seg_label)\n', (1736, 1751), False, 'import torch\n'), ((1762, 1785), 'torch.sum', 'torch.sum', (['gt_seg_label'], {}), '(gt_seg_label)\n', (1771, 1785), False, 'import torch\n')]
|
"""
PREPARE
Before running train, you need to run prepare.py with the respective task.
Example (in the command line):
> cd to root dir
> conda activate nlp
> python src/prepare.py --do_format --task 1
"""
#NOTE: the following is a workaround for AML to load modules
import os, sys; sys.path.append(os.path.dirname(os.path.realpath(__file__)))
import os
import spacy
import pandas as pd
import numpy as np
import string
import re
import argparse
from sklearn.model_selection import StratifiedShuffleSplit
# Custom functions
import sys
sys.path.append('./src')
import helper as he
import data as dt
import custom as cu
logger = he.get_logger(location=__name__)
class Clean():
"""Text preprocessing and cleaning steps
SUPPORTED LANGUAGES
- EN
- DE
- IT
- ES
- FR
- XX (multi - NER only)
SUPPORTED MODULES
- Remove Noise
Remove formatting and other noise that may be contained in emails or
other document types.
- Get Placeholders
Placeholders for common items such as dates, times, urls but also
custom customer IDs.
- Remove Stopwords
Stopwords can be added by adding a language specific stopword file
to /assets. Format: "assets/stopwords_<language>.txt".
- Lemmatize
"""
def __init__(self, task,
download_source=False,
download_train=False,
inference=False):
self.task = task
self.language = cu.params.get('language')
# Load data class
self.dt = dt.Data(task=self.task, inference=inference)
# Download data, if needed
if download_train:
self.dt.download('data_dir', dir = 'data_dir', source = 'datastore')
# Load spacy model
self.nlp = he.load_spacy_model(language=self.language, disable=['ner','parser','tagger'])
# Create stopword list
stopwords_active = []
## Load names
try:
names = self.dt.load('fn_names', dir = 'asset_dir', file_type = 'list')
stopwords_active = stopwords_active + names
except FileNotFoundError as e:
logger.warning(f'[WARNING] No names list loaded: {e}')
## Load stopwords
try:
stopwords = self.dt.load('fn_stopwords', dir = 'asset_dir', file_type = 'list')
stopwords_active = stopwords_active + stopwords
except FileNotFoundError as e:
logger.warning(f'[WARNING] No stopwords list loaded: {e}')
## Add to Spacy stopword list
logger.warning(f'[INFO] Active stopwords list lenght: {len(stopwords_active)}')
for w in stopwords_active:
self.nlp.vocab[w.replace('\n','')].is_stop = True
def remove(self, line,
rm_email_formatting=False,
rm_email_header=False,
rm_email_footer=False,
rm_punctuation=False):
"""Remove content from text"""
if not isinstance(line, str):
line = str(line)
# Customer Remove
line = cu.remove(line)
if rm_email_formatting:
line = re.sub(r'<[^>]+>', ' ', line) # Remove HTML tags
line = re.sub(r'^(.*\.eml)', ' ', line) # remove header for system generated emails
if rm_email_header:
#DE/EN
if self.language == 'en' or self.language == 'de':
line = re.sub(r'\b(AW|RE|VON|WG|FWD|FW)(\:| )', '', line, flags=re.I)
#DE
if self.language == 'de':
line = re.sub(r'(Sehr geehrte( Damen und Herren.)?.)|hallo.|guten( tag)?.', '', line, flags=re.I)
if rm_email_footer:
#EN
if self.language == 'en':
line = re.sub(r'\bkind regards.*', '', line, flags=re.I)
#DE
if self.language == 'de':
line = re.sub(r'\b(mit )?(beste|viele|liebe|freundlich\w+)? (gr[u,ü][ß,ss].*)', '', line, flags=re.I)
line = re.sub(r'\b(besten|herzlichen|lieben) dank.*', '', line, flags=re.I)
line = re.sub(r'\bvielen dank für ihr verständnis.*', '', line, flags=re.I)
line = re.sub(r'\bvielen dank im voraus.*', '', line, flags=re.I)
line = re.sub(r'\b(mfg|m\.f\.g) .*','', line, flags=re.I)
line = re.sub(r'\b(lg) .*','',line, flags=re.I)
line = re.sub(r'\b(meinem iPhone gesendet) .*','',line, flags=re.I)
line = re.sub(r'\b(Gesendet mit der (WEB|GMX)) .*','',line, flags=re.I)
line = re.sub(r'\b(Diese E-Mail wurde von Avast) .*','',line, flags=re.I)
# Remove remaining characters
##NOTE: may break other regex
if rm_punctuation:
line = re.sub('['+string.punctuation+']',' ',line)
return line
def get_placeholder(self, line,
rp_generic=False,
rp_custom=False,
rp_num=False):
"""Replace text with type specfic placeholders"""
# Customer placeholders
line = cu.get_placeholder(line)
# Generic placeholder
if rp_generic:
line = re.sub(r' \+[0-9]+', ' ', line) # remove phone numbers
line = re.sub(r'0x([a-z]|[0-9])+ ',' PER ',line, re.IGNORECASE) # replace
line = re.sub(r'[0-9]{2}[\/.,:][0-9]{2}[\/.,:][0-9]{2,4}', ' PDT ', line) # remove dates and time, replace with placeholder
line = re.sub(r'([0-9]{2,3}[\.]){3}[0-9]{1,3}',' PIP ',line) # replace ip with placeholder
line = re.sub(r'[0-9]{1,2}[\/.,:][0-9]{1,2}', ' PTI ', line) # remove only time, replace with placeholder
line = re.sub(r'[\w\.-]+@[\w\.-]+', ' PEM ', line) # remove emails
line = re.sub(r'http[s]?://(?:[a-z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-f][0-9a-f]))+', ' PUR ', line) # Remove links
line = re.sub(r'€|\$|(USD)|(EURO)', ' PMO ', line)
# Placeholders for numerics
if rp_num:
line = re.sub(r' ([0-9]{4,30}) ',' PNL ', line) # placeholder for long stand alone numbers
line = re.sub(r' [0-9]{2,3} ',' PNS ', line) # placeholder for short stand alone numbers
return line
def tokenize(self, line, lemmatize = False, rm_stopwords = False):
"""Tokenizer for non DL tasks"""
if not isinstance(line, str):
line = str(line)
if lemmatize and rm_stopwords:
line = ' '.join([t.lemma_ for t in self.nlp(line) if not t.is_stop])
elif lemmatize:
line = ' '.join([t.lemma_ for t in self.nlp(line)])
elif rm_stopwords:
line = ' '.join([t.text for t in self.nlp(line) if not t.is_stop])
return line
def transform(self, texts,
to_lower = False,
# Remove
rm_email_formatting = False,
rm_email_header = False,
rm_email_footer = False,
rm_punctuation = False,
# Placeholders
rp_generic = False,
rp_num = False,
# Tokenize
lemmatize = False,
rm_stopwords = False,
return_token = False,
# Whitespace
remove_whitespace = True
):
"""Main run function for cleaning process"""
if isinstance(texts, str):
texts = [texts]
# Convert to series for improved efficiency
df_texts = pd.Series(texts)
# Avoid loading errors
df_texts = df_texts.replace('\t', ' ', regex=True)
# Remove noise
if any((rm_email_formatting, rm_email_header,
rm_email_footer, rm_punctuation)):
df_texts = df_texts.apply(lambda x: self.remove(x,
rm_email_formatting = rm_email_formatting,
rm_email_header = rm_email_header,
rm_email_footer = rm_email_footer,
rm_punctuation = rm_punctuation))
# Replace placeholders
if any((rp_generic, rp_num)):
df_texts = df_texts.apply(lambda x: self.get_placeholder(x,
rp_generic = rp_generic,
rp_num = rp_num))
# Tokenize text
if any((lemmatize, rm_stopwords, return_token)):
df_texts = df_texts.apply(self.tokenize,
lemmatize = lemmatize,
rm_stopwords = rm_stopwords)
# To lower
if to_lower:
df_texts = df_texts.apply(str.lower)
# Remove spacing
if remove_whitespace:
df_texts = df_texts.apply(lambda x: " ".join(x.split()))
# Return Tokens
if return_token:
return [t.split(' ') for t in df_texts.to_list()]
else:
return df_texts.to_list()
def transform_by_task(self, text):
# CUSTOM FUNCTION
if cu.tasks.get(str(self.task)).get('type') == 'classification':
return self.transform(text,
rm_email_formatting = True,
rm_email_header = True,
rm_email_footer = True,
rp_generic = True)[0]
elif cu.tasks.get(str(self.task)).get('type') == 'multi_classification':
return self.transform(text,
rm_email_formatting = True,
rm_email_header = True,
rm_email_footer = True,
rp_generic = True)[0]
elif cu.tasks.get(str(self.task)).get('type') == 'ner':
return text[0]
elif cu.tasks.get(str(self.task)).get('type') == 'qa':
return self.transform(text,
to_lower = True,
# Remove
rm_email_formatting = True,
rm_email_header = True,
rm_email_footer = True,
rm_punctuation = True,
# Placeholders
rp_generic = True,
rp_num = True,
# Tokenize
lemmatize = True,
rm_stopwords = True,
return_token = True
)[0]
else:
logger.warning('[WARNING] No transform by task found.')
return text[0]
def prepare_classification(task, do_format, train_split, min_cat_occurance,
min_char_length, register_data):
# Get clean object
cl = Clean(task=task, download_source=True)
# Load data
if not os.path.isfile(cl.dt.get_path('fn_prep', dir = 'data_dir')) or do_format:
data = dt.get_dataset(cl, source="cdb")
else:
data = cl.dt.load('fn_prep', dir = 'data_dir')
logger.warning(f'Data Length : {len(data)}')
# Load text & label field
text_raw = cu.load_text(data)
data['label'] = cu.load_label(data, task)
if cu.tasks.get(str(task)).get('type') == 'multi_classification':
data['label'] = data['label'].str.replace(', ', '_').str.replace(' ', '_')
flat_labels = [row['label'].split(',') for index, row in data.iterrows()]
labels_clean = []
for labels in flat_labels:
for label in labels:
labels_clean.append(label)
label_list_raw = pd.DataFrame({'label':labels_clean})
label_list_raw = label_list_raw[label_list_raw.label != '']
label_list_raw = label_list_raw.label.drop_duplicates()
elif cu.tasks.get(str(task)).get('type') == 'classification': # in case of single label classification
label_list_raw = data.label.drop_duplicates()
# Clean text
data['text'] = cl.transform(text_raw,
rm_email_formatting = True,
rm_email_header = True,
rm_email_footer = True,
rp_generic = True)
# Filter by length
data = he.remove_short(data, 'text', min_char_length=min_char_length)
logger.warning(f'Data Length : {len(data)}')
# Remove duplicates
data_red = data.drop_duplicates(subset=['text'])
logger.warning(f'Data Length : {len(data_red)}')
# Min class occurance
if cu.tasks.get(str(task)).get('type') == 'classification':
data_red = data_red[data_red.groupby('label').label.transform('size') > min_cat_occurance]
elif cu.tasks.get(str(task)).get('type') == 'multi_classification':
# Split rows
data_transform = data_red[['id', 'label']].copy()
data_transform['label'] = [row['label'].split(",") for index, row in data_transform.iterrows()] # pipe it to list
data_transform = pd.DataFrame({'index':data_transform.index.repeat(data_transform.label.str.len()), 'label':np.concatenate(data_transform.label.values)}) # explode df
data_transform = data_transform[data_transform.groupby('label').label.transform('size') > min_cat_occurance] # count for min occurance and only keep relevant ones
data_transform = data_transform.groupby(['index'])['label'].apply(lambda x: ','.join(x.astype(str))).reset_index() # re-merge
data_transform = data_transform.set_index('index')
del data_red['label']
data_red = pd.concat([data_red, data_transform], join='inner', axis=1)
logger.warning(f'Data Length : {len(data_red)}')
data_red = data_red.tail(300000).reset_index(drop=True).copy()
#TODO: .tail() temp is for debugging
## There is a memory issue for the EN dataset, due to its size. Needs further investigation.
# Label list
if cu.tasks.get(str(task)).get('type') == 'multi_classification': # 2 = task for multi-label classification
flat_labels = [row['label'].split(',') for index, row in data_red.iterrows()]
labels_clean = []
for labels in flat_labels:
for label in labels:
labels_clean.append(label)
label_list = pd.DataFrame({'label':labels_clean})
label_list = label_list[label_list.label != '']
label_list = label_list.label.drop_duplicates()
elif cu.tasks.get(str(task)).get('type') == 'classification': # in case of single label classification
label_list = data_red.label.drop_duplicates()
logger.warning(f'Excluded labels: {list(set(label_list_raw)-set(label_list))}')
# Split data
strf_split = StratifiedShuffleSplit(n_splits = 1, test_size=(1-train_split), random_state=200)
if cu.tasks.get(str(task)).get('type') == 'classification':
for train_index, test_index in strf_split.split(data_red, data_red['label']):
df_cat_train = data_red.loc[train_index]
df_cat_test = data_red.loc[test_index]
elif cu.tasks.get(str(task)).get('type') == 'multi_classification':
for train_index, test_index in strf_split.split(data_red, pd.DataFrame({'label':[l.split(',')[0] for l in data_red['label']]})['label']):
df_cat_train = data_red.loc[train_index]
df_cat_test = data_red.loc[test_index]
# Save data
cl.dt.save(data_red, fn = 'fn_clean', dir = 'data_dir')
cl.dt.save(df_cat_train[['text','label']], fn = 'fn_train', dir = 'data_dir')
cl.dt.save(df_cat_test[['text','label']], fn = 'fn_test', dir = 'data_dir')
cl.dt.save(label_list, fn = 'fn_label', header=False, dir = 'data_dir')
# Upload data
if register_data:
cl.dt.upload('data_dir', destination='dataset')
def prepare_ner(task, do_format, register_data):
pass
def prepare_qa(task, do_format, min_char_length, register_data):
# Get clean object
cl = Clean(task=task, download_source=True)
# Load data
if not os.path.isfile(cl.dt.get_path('fn_prep', dir = 'data_dir')) or do_format:
data = dt.get_dataset(cl, source="cdb")
else:
data = cl.dt.load('fn_prep', dir = 'data_dir')
logger.warning(f'Data Length : {len(data)}')
# Filter relevant question answer pairs
data = cu.filter_qa(data)
logger.warning(f'Data Length : {len(data)}')
# Load question & answer fields
question, answer = cu.load_qa(data)
# Clean text
data['question_clean'] = cl.transform(question,
to_lower = True,
rm_email_formatting = True,
rm_email_header = True,
rm_email_footer = True,
rm_punctuation = True,
rp_generic = True,
rp_num = True,
lemmatize = True,
rm_stopwords = True
)
data['answer_clean'] = cl.transform(answer,
to_lower = True,
rm_email_formatting = True,
rm_email_header = True,
rm_email_footer = True,
rm_punctuation = True,
rp_generic = True,
rp_num = True,
lemmatize = True,
rm_stopwords = True
)
# For display
data['answer_text_clean'] = cl.transform(answer,
rm_email_formatting = True,
rm_email_header = True,
rm_email_footer = True
)
# Filter by length
data = he.remove_short(data, 'question_clean', min_char_length=min_char_length)
logger.warning(f'Data Length : {len(data)}')
# Remove duplicates
data = data.drop_duplicates(subset=['question_clean'])
logger.warning(f'Data Length : {len(data)}')
data = data.reset_index(drop=True).copy()
# Save data
cl.dt.save(data, fn = 'fn_clean', dir = 'data_dir')
# Upload data
if register_data:
cl.dt.upload('data_dir', destination='dataset')
def main(task=1,
do_format=False,
split=0.9,
min_cat_occurance=300,
min_char_length=20,
register_data=False):
logger.warning(f'Running <PREPARE> for task {task}')
task_type = cu.tasks.get(str(task)).get('type')
if 'classification' == task_type:
prepare_classification(task, do_format, split, min_cat_occurance, min_char_length, register_data)
elif 'multi_classification' == task_type:
prepare_classification(task, do_format, split, min_cat_occurance, min_char_length, register_data)
elif 'ner' == task_type:
prepare_ner(task, do_format, register_data)
elif 'qa' == task_type:
prepare_qa(task, do_format, min_char_length, register_data)
else:
logger.warning('[ERROR] TASK TYPE UNKNOWN. Nothing was processed.')
def run():
"""Run from the command line"""
parser = argparse.ArgumentParser()
parser.add_argument("--task",
default=1,
type=int,
help="Task where: \
-task 1 : classification subcat \
-task 2 : classification cat \
-task 3 : ner \
-task 4 : qa")
parser.add_argument('--do_format',
action='store_true',
help="Avoid reloading and normalizing data")
parser.add_argument("--split",
default=0.9,
type=float,
help="Train test split. Dev split is taken from train set.")
parser.add_argument("--min_cat_occurance",
default=300,
type=int,
help="Min occurance required by category.")
parser.add_argument("--min_char_length",
default=20,
type=int,
help="")
parser.add_argument('--register_data',
action='store_true',
help="")
args = parser.parse_args()
main(args.task, args.do_format, args.split, min_cat_occurance=args.min_cat_occurance,
min_char_length=args.min_char_length, register_data=args.register_data)
if __name__ == '__main__':
run()
|
[
"argparse.ArgumentParser",
"helper.load_spacy_model",
"custom.filter_qa",
"custom.load_qa",
"helper.get_logger",
"sys.path.append",
"pandas.DataFrame",
"re.sub",
"pandas.concat",
"os.path.realpath",
"custom.load_text",
"pandas.Series",
"data.Data",
"helper.remove_short",
"custom.get_placeholder",
"custom.remove",
"numpy.concatenate",
"data.get_dataset",
"sklearn.model_selection.StratifiedShuffleSplit",
"custom.params.get",
"custom.load_label"
] |
[((537, 561), 'sys.path.append', 'sys.path.append', (['"""./src"""'], {}), "('./src')\n", (552, 561), False, 'import sys\n'), ((630, 662), 'helper.get_logger', 'he.get_logger', ([], {'location': '__name__'}), '(location=__name__)\n', (643, 662), True, 'import helper as he\n'), ((11464, 11482), 'custom.load_text', 'cu.load_text', (['data'], {}), '(data)\n', (11476, 11482), True, 'import custom as cu\n'), ((11503, 11528), 'custom.load_label', 'cu.load_label', (['data', 'task'], {}), '(data, task)\n', (11516, 11528), True, 'import custom as cu\n'), ((12553, 12615), 'helper.remove_short', 'he.remove_short', (['data', '"""text"""'], {'min_char_length': 'min_char_length'}), "(data, 'text', min_char_length=min_char_length)\n", (12568, 12615), True, 'import helper as he\n'), ((14973, 15052), 'sklearn.model_selection.StratifiedShuffleSplit', 'StratifiedShuffleSplit', ([], {'n_splits': '(1)', 'test_size': '(1 - train_split)', 'random_state': '(200)'}), '(n_splits=1, test_size=1 - train_split, random_state=200)\n', (14995, 15052), False, 'from sklearn.model_selection import StratifiedShuffleSplit\n'), ((16568, 16586), 'custom.filter_qa', 'cu.filter_qa', (['data'], {}), '(data)\n', (16580, 16586), True, 'import custom as cu\n'), ((16696, 16712), 'custom.load_qa', 'cu.load_qa', (['data'], {}), '(data)\n', (16706, 16712), True, 'import custom as cu\n'), ((17999, 18071), 'helper.remove_short', 'he.remove_short', (['data', '"""question_clean"""'], {'min_char_length': 'min_char_length'}), "(data, 'question_clean', min_char_length=min_char_length)\n", (18014, 18071), True, 'import helper as he\n'), ((19374, 19399), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (19397, 19399), False, 'import argparse\n'), ((316, 342), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (332, 342), False, 'import os\n'), ((1471, 1496), 'custom.params.get', 'cu.params.get', (['"""language"""'], {}), "('language')\n", (1484, 1496), True, 'import custom as cu\n'), ((1550, 1594), 'data.Data', 'dt.Data', ([], {'task': 'self.task', 'inference': 'inference'}), '(task=self.task, inference=inference)\n', (1557, 1594), True, 'import data as dt\n'), ((1786, 1871), 'helper.load_spacy_model', 'he.load_spacy_model', ([], {'language': 'self.language', 'disable': "['ner', 'parser', 'tagger']"}), "(language=self.language, disable=['ner', 'parser', 'tagger']\n )\n", (1805, 1871), True, 'import helper as he\n'), ((3105, 3120), 'custom.remove', 'cu.remove', (['line'], {}), '(line)\n', (3114, 3120), True, 'import custom as cu\n'), ((5141, 5165), 'custom.get_placeholder', 'cu.get_placeholder', (['line'], {}), '(line)\n', (5159, 5165), True, 'import custom as cu\n'), ((7740, 7756), 'pandas.Series', 'pd.Series', (['texts'], {}), '(texts)\n', (7749, 7756), True, 'import pandas as pd\n'), ((11271, 11303), 'data.get_dataset', 'dt.get_dataset', (['cl'], {'source': '"""cdb"""'}), "(cl, source='cdb')\n", (11285, 11303), True, 'import data as dt\n'), ((11927, 11964), 'pandas.DataFrame', 'pd.DataFrame', (["{'label': labels_clean}"], {}), "({'label': labels_clean})\n", (11939, 11964), True, 'import pandas as pd\n'), ((14544, 14581), 'pandas.DataFrame', 'pd.DataFrame', (["{'label': labels_clean}"], {}), "({'label': labels_clean})\n", (14556, 14581), True, 'import pandas as pd\n'), ((16365, 16397), 'data.get_dataset', 'dt.get_dataset', (['cl'], {'source': '"""cdb"""'}), "(cl, source='cdb')\n", (16379, 16397), True, 'import data as dt\n'), ((3173, 3201), 're.sub', 're.sub', (['"""<[^>]+>"""', '""" """', 'line'], {}), "('<[^>]+>', ' ', line)\n", (3179, 3201), False, 'import re\n'), ((3241, 3273), 're.sub', 're.sub', (['"""^(.*\\\\.eml)"""', '""" """', 'line'], {}), "('^(.*\\\\.eml)', ' ', line)\n", (3247, 3273), False, 'import re\n'), ((4802, 4851), 're.sub', 're.sub', (["('[' + string.punctuation + ']')", '""" """', 'line'], {}), "('[' + string.punctuation + ']', ' ', line)\n", (4808, 4851), False, 'import re\n'), ((5239, 5270), 're.sub', 're.sub', (['""" \\\\+[0-9]+"""', '""" """', 'line'], {}), "(' \\\\+[0-9]+', ' ', line)\n", (5245, 5270), False, 'import re\n'), ((5313, 5370), 're.sub', 're.sub', (['"""0x([a-z]|[0-9])+ """', '""" PER """', 'line', 're.IGNORECASE'], {}), "('0x([a-z]|[0-9])+ ', ' PER ', line, re.IGNORECASE)\n", (5319, 5370), False, 'import re\n'), ((5400, 5467), 're.sub', 're.sub', (['"""[0-9]{2}[\\\\/.,:][0-9]{2}[\\\\/.,:][0-9]{2,4}"""', '""" PDT """', 'line'], {}), "('[0-9]{2}[\\\\/.,:][0-9]{2}[\\\\/.,:][0-9]{2,4}', ' PDT ', line)\n", (5406, 5467), False, 'import re\n'), ((5536, 5591), 're.sub', 're.sub', (['"""([0-9]{2,3}[\\\\.]){3}[0-9]{1,3}"""', '""" PIP """', 'line'], {}), "('([0-9]{2,3}[\\\\.]){3}[0-9]{1,3}', ' PIP ', line)\n", (5542, 5591), False, 'import re\n'), ((5639, 5692), 're.sub', 're.sub', (['"""[0-9]{1,2}[\\\\/.,:][0-9]{1,2}"""', '""" PTI """', 'line'], {}), "('[0-9]{1,2}[\\\\/.,:][0-9]{1,2}', ' PTI ', line)\n", (5645, 5692), False, 'import re\n'), ((5757, 5803), 're.sub', 're.sub', (['"""[\\\\w\\\\.-]+@[\\\\w\\\\.-]+"""', '""" PEM """', 'line'], {}), "('[\\\\w\\\\.-]+@[\\\\w\\\\.-]+', ' PEM ', line)\n", (5763, 5803), False, 'import re\n'), ((5836, 5946), 're.sub', 're.sub', (['"""http[s]?://(?:[a-z]|[0-9]|[$-_@.&+]|[!*\\\\(\\\\),]|(?:%[0-9a-f][0-9a-f]))+"""', '""" PUR """', 'line'], {}), "(\n 'http[s]?://(?:[a-z]|[0-9]|[$-_@.&+]|[!*\\\\(\\\\),]|(?:%[0-9a-f][0-9a-f]))+'\n , ' PUR ', line)\n", (5842, 5946), False, 'import re\n'), ((5970, 6013), 're.sub', 're.sub', (['"""€|\\\\$|(USD)|(EURO)"""', '""" PMO """', 'line'], {}), "('€|\\\\$|(USD)|(EURO)', ' PMO ', line)\n", (5976, 6013), False, 'import re\n'), ((6097, 6137), 're.sub', 're.sub', (['""" ([0-9]{4,30}) """', '""" PNL """', 'line'], {}), "(' ([0-9]{4,30}) ', ' PNL ', line)\n", (6103, 6137), False, 'import re\n'), ((6200, 6237), 're.sub', 're.sub', (['""" [0-9]{2,3} """', '""" PNS """', 'line'], {}), "(' [0-9]{2,3} ', ' PNS ', line)\n", (6206, 6237), False, 'import re\n'), ((13851, 13910), 'pandas.concat', 'pd.concat', (['[data_red, data_transform]'], {'join': '"""inner"""', 'axis': '(1)'}), "([data_red, data_transform], join='inner', axis=1)\n", (13860, 13910), True, 'import pandas as pd\n'), ((3452, 3515), 're.sub', 're.sub', (['"""\\\\b(AW|RE|VON|WG|FWD|FW)(\\\\:| )"""', '""""""', 'line'], {'flags': 're.I'}), "('\\\\b(AW|RE|VON|WG|FWD|FW)(\\\\:| )', '', line, flags=re.I)\n", (3458, 3515), False, 'import re\n'), ((3592, 3685), 're.sub', 're.sub', (['"""(Sehr geehrte( Damen und Herren.)?.)|hallo.|guten( tag)?."""', '""""""', 'line'], {'flags': 're.I'}), "('(Sehr geehrte( Damen und Herren.)?.)|hallo.|guten( tag)?.', '',\n line, flags=re.I)\n", (3598, 3685), False, 'import re\n'), ((3789, 3838), 're.sub', 're.sub', (['"""\\\\bkind regards.*"""', '""""""', 'line'], {'flags': 're.I'}), "('\\\\bkind regards.*', '', line, flags=re.I)\n", (3795, 3838), False, 'import re\n'), ((3916, 4015), 're.sub', 're.sub', (['"""\\\\b(mit )?(beste|viele|liebe|freundlich\\\\w+)? (gr[u,ü][ß,ss].*)"""', '""""""', 'line'], {'flags': 're.I'}), "('\\\\b(mit )?(beste|viele|liebe|freundlich\\\\w+)? (gr[u,ü][ß,ss].*)',\n '', line, flags=re.I)\n", (3922, 4015), False, 'import re\n'), ((4034, 4102), 're.sub', 're.sub', (['"""\\\\b(besten|herzlichen|lieben) dank.*"""', '""""""', 'line'], {'flags': 're.I'}), "('\\\\b(besten|herzlichen|lieben) dank.*', '', line, flags=re.I)\n", (4040, 4102), False, 'import re\n'), ((4126, 4194), 're.sub', 're.sub', (['"""\\\\bvielen dank für ihr verständnis.*"""', '""""""', 'line'], {'flags': 're.I'}), "('\\\\bvielen dank für ihr verständnis.*', '', line, flags=re.I)\n", (4132, 4194), False, 'import re\n'), ((4219, 4277), 're.sub', 're.sub', (['"""\\\\bvielen dank im voraus.*"""', '""""""', 'line'], {'flags': 're.I'}), "('\\\\bvielen dank im voraus.*', '', line, flags=re.I)\n", (4225, 4277), False, 'import re\n'), ((4302, 4355), 're.sub', 're.sub', (['"""\\\\b(mfg|m\\\\.f\\\\.g) .*"""', '""""""', 'line'], {'flags': 're.I'}), "('\\\\b(mfg|m\\\\.f\\\\.g) .*', '', line, flags=re.I)\n", (4308, 4355), False, 'import re\n'), ((4376, 4418), 're.sub', 're.sub', (['"""\\\\b(lg) .*"""', '""""""', 'line'], {'flags': 're.I'}), "('\\\\b(lg) .*', '', line, flags=re.I)\n", (4382, 4418), False, 'import re\n'), ((4440, 4502), 're.sub', 're.sub', (['"""\\\\b(meinem iPhone gesendet) .*"""', '""""""', 'line'], {'flags': 're.I'}), "('\\\\b(meinem iPhone gesendet) .*', '', line, flags=re.I)\n", (4446, 4502), False, 'import re\n'), ((4524, 4590), 're.sub', 're.sub', (['"""\\\\b(Gesendet mit der (WEB|GMX)) .*"""', '""""""', 'line'], {'flags': 're.I'}), "('\\\\b(Gesendet mit der (WEB|GMX)) .*', '', line, flags=re.I)\n", (4530, 4590), False, 'import re\n'), ((4612, 4680), 're.sub', 're.sub', (['"""\\\\b(Diese E-Mail wurde von Avast) .*"""', '""""""', 'line'], {'flags': 're.I'}), "('\\\\b(Diese E-Mail wurde von Avast) .*', '', line, flags=re.I)\n", (4618, 4680), False, 'import re\n'), ((13379, 13422), 'numpy.concatenate', 'np.concatenate', (['data_transform.label.values'], {}), '(data_transform.label.values)\n', (13393, 13422), True, 'import numpy as np\n')]
|
from hazelcast.protocol.client_message import OutboundMessage, REQUEST_HEADER_SIZE, create_initial_buffer
from hazelcast.protocol.builtin import StringCodec
from hazelcast.protocol.builtin import DataCodec
from hazelcast.protocol.builtin import ListMultiFrameCodec
# hex: 0x012700
_REQUEST_MESSAGE_TYPE = 75520
# hex: 0x012701
_RESPONSE_MESSAGE_TYPE = 75521
_REQUEST_INITIAL_FRAME_SIZE = REQUEST_HEADER_SIZE
def encode_request(name, predicate):
buf = create_initial_buffer(_REQUEST_INITIAL_FRAME_SIZE, _REQUEST_MESSAGE_TYPE)
StringCodec.encode(buf, name)
DataCodec.encode(buf, predicate, True)
return OutboundMessage(buf, True)
def decode_response(msg):
msg.next_frame()
return ListMultiFrameCodec.decode(msg, DataCodec.decode)
|
[
"hazelcast.protocol.builtin.DataCodec.encode",
"hazelcast.protocol.builtin.StringCodec.encode",
"hazelcast.protocol.client_message.create_initial_buffer",
"hazelcast.protocol.builtin.ListMultiFrameCodec.decode",
"hazelcast.protocol.client_message.OutboundMessage"
] |
[((459, 532), 'hazelcast.protocol.client_message.create_initial_buffer', 'create_initial_buffer', (['_REQUEST_INITIAL_FRAME_SIZE', '_REQUEST_MESSAGE_TYPE'], {}), '(_REQUEST_INITIAL_FRAME_SIZE, _REQUEST_MESSAGE_TYPE)\n', (480, 532), False, 'from hazelcast.protocol.client_message import OutboundMessage, REQUEST_HEADER_SIZE, create_initial_buffer\n'), ((537, 566), 'hazelcast.protocol.builtin.StringCodec.encode', 'StringCodec.encode', (['buf', 'name'], {}), '(buf, name)\n', (555, 566), False, 'from hazelcast.protocol.builtin import StringCodec\n'), ((571, 609), 'hazelcast.protocol.builtin.DataCodec.encode', 'DataCodec.encode', (['buf', 'predicate', '(True)'], {}), '(buf, predicate, True)\n', (587, 609), False, 'from hazelcast.protocol.builtin import DataCodec\n'), ((621, 647), 'hazelcast.protocol.client_message.OutboundMessage', 'OutboundMessage', (['buf', '(True)'], {}), '(buf, True)\n', (636, 647), False, 'from hazelcast.protocol.client_message import OutboundMessage, REQUEST_HEADER_SIZE, create_initial_buffer\n'), ((708, 757), 'hazelcast.protocol.builtin.ListMultiFrameCodec.decode', 'ListMultiFrameCodec.decode', (['msg', 'DataCodec.decode'], {}), '(msg, DataCodec.decode)\n', (734, 757), False, 'from hazelcast.protocol.builtin import ListMultiFrameCodec\n')]
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Encoders for text data.
* TextEncoder: base class
* ByteTextEncoder: for ascii text
* TokenTextEncoder: with user-supplied vocabulary file
* SubwordTextEncoder: invertible
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import six
from six.moves import xrange # pylint: disable=redefined-builtin
from tensor2tensor.data_generators import tokenizer
import tensorflow as tf
# Reserved tokens for things like padding and EOS symbols.
PAD = '<pad>'
EOS = '<EOS>'
RESERVED_TOKENS = [PAD, EOS]
class TextEncoder(object):
"""Base class for converting from ints to/from human readable strings."""
def __init__(self, num_reserved_ids=2):
self._num_reserved_ids = num_reserved_ids
def encode(self, s):
"""Transform a human-readable string into a sequence of int ids.
The ids should be in the range [num_reserved_ids, vocab_size). Ids [0,
num_reserved_ids) are reserved.
EOS is not appended.
Args:
s: human-readable string to be converted.
Returns:
ids: list of integers
"""
return [int(w) + self._num_reserved_ids for w in s.split()]
def decode(self, ids):
"""Transform a sequence of int ids into a human-readable string.
EOS is not expected in ids.
Args:
ids: list of integers to be converted.
Returns:
s: human-readable string.
"""
decoded_ids = []
for id_ in ids:
if 0 <= id_ < self._num_reserved_ids:
decoded_ids.append(RESERVED_TOKENS[int(id_)])
else:
decoded_ids.append(id_ - self._num_reserved_ids)
return ' '.join([str(d) for d in decoded_ids])
@property
def vocab_size(self):
raise NotImplementedError()
class ByteTextEncoder(TextEncoder):
"""Encodes each byte to an id. For 8-bit strings only."""
def encode(self, s):
return [ord(c) + self._num_reserved_ids for c in s]
def decode(self, ids):
decoded_ids = []
for id_ in ids:
if 0 <= id_ < self._num_reserved_ids:
decoded_ids.append(RESERVED_TOKENS[int(id_)])
else:
decoded_ids.append(chr(id_))
return ''.join(decoded_ids)
@property
def vocab_size(self):
return 2**8 + self._num_reserved_ids
class TokenTextEncoder(TextEncoder):
"""Encoder based on a user-supplied vocabulary."""
def __init__(self, vocab_filename, reverse=False, num_reserved_ids=2):
"""Initialize from a file, one token per line."""
super(TokenTextEncoder, self).__init__(num_reserved_ids=num_reserved_ids)
self._reverse = reverse
if vocab_filename is not None:
self._load_vocab_from_file(vocab_filename)
def encode(self, sentence):
"""Converts a space-separated string of tokens to a list of ids."""
ret = [self._token_to_id[tok] for tok in sentence.strip().split()]
if self._reverse:
ret = ret[::-1]
return ret
def decode(self, ids):
if self._reverse:
ids = ids[::-1]
return ' '.join([self._safe_id_to_token(i) for i in ids])
@property
def vocab_size(self):
return len(self._id_to_token)
def _safe_id_to_token(self, idx):
return self._id_to_token.get(idx, 'ID_%d' % idx)
def _load_vocab_from_file(self, filename):
"""Load vocab from a file."""
self._token_to_id = {}
self._id_to_token = {}
for idx, tok in enumerate(RESERVED_TOKENS):
self._token_to_id[tok] = idx
self._id_to_token[idx] = tok
token_start_idx = self._num_reserved_ids
with tf.gfile.Open(filename) as f:
for i, line in enumerate(f):
idx = token_start_idx + i
tok = line.strip()
self._token_to_id[tok] = idx
self._id_to_token[idx] = tok
class SubwordTextEncoder(TextEncoder):
"""Class for breaking tokens into subtokens.
Invertibly encodes a string as a sequence of subtokens from a limited
vocabulary.
A SubwordTextEncoder is built from a corpus (so it is tailored to the text in
the corpus), and stored to a file. See text_encoder_build_subword.py.
It can then be loaded and used to encode/decode any text.
"""
def __init__(self, filename=None, num_reserved_ids=2):
"""Read from a file."""
self._tokenizer = tokenizer.Tokenizer()
if filename is not None:
self._load_from_file(filename)
super(SubwordTextEncoder, self).__init__(num_reserved_ids=num_reserved_ids)
def encode(self, raw_text):
"""Converts a string to a list of subtoken ids.
Args:
raw_text: a string.
Returns:
a list of integers in the range [0, vocab_size)
"""
return self._tokens_to_subtokens(self._tokenizer.encode(raw_text))
def decode(self, subtokens):
"""Converts a sequence of subtoken ids to a string.
Args:
subtokens: a list of integers in the range [0, vocab_size)
Returns:
a string
"""
return self._tokenizer.decode(self._subtokens_to_tokens(subtokens))
@property
def vocab_size(self):
"""The subtoken vocabulary size."""
return len(self._all_subtoken_strings)
def _tokens_to_subtokens(self, tokens):
"""Converts a list of tokens to a list of subtoken ids.
Args:
tokens: a list of strings.
Returns:
a list of integers in the range [0, vocab_size)
"""
ret = []
for token in tokens:
ret.extend(self._escaped_token_to_subtokens(self._escape_token(token)))
return ret
def _subtokens_to_tokens(self, subtokens):
"""Converts a list of subtoken ids to a list of tokens.
Args:
subtokens: a list of integers in the range [0, vocab_size)
Returns:
a list of strings.
"""
concatenated = ''.join(
[self.subtoken_to_subtoken_string(s) for s in subtokens])
split = concatenated.split('_')
return [self._unescape_token(t + '_') for t in split if t]
def subtoken_to_subtoken_string(self, subtoken):
"""Subtoken_String (string) corresponding to the given subtoken (id)."""
if (subtoken >= 0 and subtoken < self.vocab_size and
self._all_subtoken_strings[subtoken]):
return self._all_subtoken_strings[subtoken]
else:
if 0 <= subtoken < self._num_reserved_ids:
return '%s_' % RESERVED_TOKENS[subtoken]
else:
return 'ID%d_' % subtoken
def _escaped_token_to_subtokens(self, escaped_token):
"""Converts an escaped token string to a list of subtokens.
Args:
escaped_token: an escaped token
Returns:
a list of one or more integers.
"""
ret = []
pos = 0
while pos < len(escaped_token):
end = len(escaped_token)
while True:
subtoken = self._subtoken_string_to_id.get(escaped_token[pos:end], -1)
if subtoken != -1:
break
end -= 1
ret.append(subtoken)
pos = end
return ret
@classmethod
def build_to_target_size(cls,
target_size,
token_counts,
store_filename,
min_val,
max_val,
num_iterations=4):
"""Builds a SubwordTextEncoder that has `vocab_size` near `target_size`.
Uses simple recursive binary search to find a `min_count` value that most
closely matches the `target_size`.
Args:
target_size: desired vocab_size to approximate.
token_counts: a dictionary of string to int.
store_filename: a string - where to write the vocabulary.
min_val: an integer - lower bound for `min_count`.
max_val: an integer - upper bound for `min_count`.
num_iterations: an integer. how many iterations of refinement.
Returns:
a SubwordTextEncoder instance.
"""
present_count = (max_val + min_val) // 2
tf.logging.info('Trying min_count %d' % present_count)
subtokenizer = cls()
subtokenizer.build_from_token_counts(token_counts, store_filename,
present_count, num_iterations)
if min_val >= max_val or subtokenizer.vocab_size == target_size:
return subtokenizer
elif subtokenizer.vocab_size > target_size:
other_subtokenizer = cls.build_to_target_size(
target_size, token_counts, store_filename, present_count + 1, max_val,
num_iterations)
if (abs(other_subtokenizer.vocab_size - target_size) <
abs(subtokenizer.vocab_size - target_size)):
return other_subtokenizer
else:
return subtokenizer
else:
other_subtokenizer = cls.build_to_target_size(
target_size, token_counts, store_filename, min_val, present_count - 1,
num_iterations)
if (abs(other_subtokenizer.vocab_size - target_size) <
abs(subtokenizer.vocab_size - target_size)):
return other_subtokenizer
else:
return subtokenizer
def build_from_token_counts(self,
token_counts,
store_filename,
min_count,
num_iterations=4):
"""Train a SubwordTextEncoder based on a dictionary of word counts.
Args:
token_counts: a dictionary of string to int.
store_filename: a string - where to write the vocabulary.
min_count: an integer - discard subtokens with lower counts.
num_iterations: an integer. how many iterations of refinement.
"""
# We build iteratively. On each iteration, we segment all the words,
# then count the resulting potential subtokens, keeping the ones
# with high enough counts for our new vocabulary.
for i in xrange(num_iterations):
counts = {}
for token, count in six.iteritems(token_counts):
escaped_token = self._escape_token(token)
# we will count all tails of the escaped_token, starting from boundaries
# determined by our current segmentation.
if i == 0:
starts = list(range(len(escaped_token)))
else:
subtokens = self._escaped_token_to_subtokens(escaped_token)
pos = 0
starts = []
for subtoken in subtokens:
starts.append(pos)
pos += len(self.subtoken_to_subtoken_string(subtoken))
for start in starts:
for end in xrange(start + 1, len(escaped_token) + 1):
subtoken_string = escaped_token[start:end]
counts[subtoken_string] = counts.get(subtoken_string, 0) + count
# array of lists of candidate subtoken strings, by length
len_to_subtoken_strings = []
for subtoken_string, count in six.iteritems(counts):
if count < min_count or len(subtoken_string) <= 1:
continue
while len(len_to_subtoken_strings) <= len(subtoken_string):
len_to_subtoken_strings.append([])
len_to_subtoken_strings[len(subtoken_string)].append(subtoken_string)
new_subtoken_strings = []
# consider the candidates longest to shortest, so that if we accept
# a longer subtoken string, we can decrement the counts of its prefixes.
for subtoken_strings in len_to_subtoken_strings[::-1]:
for subtoken_string in subtoken_strings:
count = counts[subtoken_string]
if count < min_count:
continue
new_subtoken_strings.append((-count, subtoken_string))
for l in xrange(1, len(subtoken_string)):
counts[subtoken_string[:l]] -= count
# make sure we have all single characters.
new_subtoken_strings.extend([(-counts.get(chr(i), 0), chr(i))
for i in xrange(2**8)])
new_subtoken_strings.sort()
self._init_from_list([''] * self._num_reserved_ids +
[p[1] for p in new_subtoken_strings])
print('vocab_size = %d' % self.vocab_size)
original = 'This sentence was encoded by the SubwordTextEncoder.'
encoded = self.encode(original)
print(encoded)
print([self.subtoken_to_subtoken_string(s) for s in encoded])
decoded = self.decode(encoded)
print(decoded)
assert decoded == original
self._store_to_file(store_filename)
def _init_from_list(self, subtoken_strings):
"""Initialize from a list of subtoken strings."""
self._all_subtoken_strings = subtoken_strings
self._subtoken_string_to_id = {}
for i in xrange(len(subtoken_strings)):
subtoken_string = subtoken_strings[i]
if subtoken_string:
self._subtoken_string_to_id[subtoken_string] = i
def _load_from_file(self, filename):
"""Load from a file."""
subtoken_strings = []
with tf.gfile.Open(filename) as f:
for line in f:
subtoken_strings.append(line.strip()[1:-1].decode('string-escape'))
self._init_from_list(subtoken_strings)
def _store_to_file(self, filename):
with tf.gfile.Open(filename, 'w') as f:
for subtoken_string in self._all_subtoken_strings:
f.write('\'' + subtoken_string.encode('string-escape') + '\'\n')
def _escape_token(self, token):
r"""Translate '\'->'\\' and '_'->'\u', then append '_'.
Args:
token: a string
Returns:
escaped_token: a string
"""
return token.replace('\\', '\\\\').replace('_', '\\u') + '_'
def _unescape_token(self, escaped_token):
r"""Remove '_' from end, then translate '\\'->'\' and '\u'->'_'.
TODO(noam): There must be some better way to do this with regexps.
Args:
escaped_token: a string
Returns:
token: a string
"""
assert escaped_token[-1] == '_'
escaped_token = escaped_token[:-1]
if '\\' not in escaped_token:
return escaped_token
ret = ''
pos = 0
while pos < len(escaped_token):
if escaped_token[pos] == '\\' and pos + 1 < len(escaped_token):
if escaped_token[pos + 1] == 'u':
ret += '_'
else:
ret += escaped_token[pos + 1]
pos += 1
pos += 1
return ret
@classmethod
def get_token_counts(cls, text_filepattern, corpus_max_lines):
"""Read the corpus and compute a dictionary of word counts."""
tok = tokenizer.Tokenizer()
token_counts = {}
lines_read = 0
filenames = tf.gfile.Glob(text_filepattern)
for text_filename in filenames:
with tf.gfile.Open(text_filename) as f:
for line in f:
tokens = tok.encode(line.strip())
for t in tokens:
token_counts[t] = token_counts.get(t, 0) + 1
lines_read += 1
if corpus_max_lines > 0 and lines_read > corpus_max_lines:
return token_counts
return token_counts
|
[
"tensorflow.gfile.Glob",
"tensorflow.logging.info",
"six.moves.xrange",
"tensor2tensor.data_generators.tokenizer.Tokenizer",
"tensorflow.gfile.Open",
"six.iteritems"
] |
[((4773, 4794), 'tensor2tensor.data_generators.tokenizer.Tokenizer', 'tokenizer.Tokenizer', ([], {}), '()\n', (4792, 4794), False, 'from tensor2tensor.data_generators import tokenizer\n'), ((8304, 8358), 'tensorflow.logging.info', 'tf.logging.info', (["('Trying min_count %d' % present_count)"], {}), "('Trying min_count %d' % present_count)\n", (8319, 8358), True, 'import tensorflow as tf\n'), ((10151, 10173), 'six.moves.xrange', 'xrange', (['num_iterations'], {}), '(num_iterations)\n', (10157, 10173), False, 'from six.moves import xrange\n'), ((14615, 14636), 'tensor2tensor.data_generators.tokenizer.Tokenizer', 'tokenizer.Tokenizer', ([], {}), '()\n', (14634, 14636), False, 'from tensor2tensor.data_generators import tokenizer\n'), ((14694, 14725), 'tensorflow.gfile.Glob', 'tf.gfile.Glob', (['text_filepattern'], {}), '(text_filepattern)\n', (14707, 14725), True, 'import tensorflow as tf\n'), ((4070, 4093), 'tensorflow.gfile.Open', 'tf.gfile.Open', (['filename'], {}), '(filename)\n', (4083, 4093), True, 'import tensorflow as tf\n'), ((10219, 10246), 'six.iteritems', 'six.iteritems', (['token_counts'], {}), '(token_counts)\n', (10232, 10246), False, 'import six\n'), ((11118, 11139), 'six.iteritems', 'six.iteritems', (['counts'], {}), '(counts)\n', (11131, 11139), False, 'import six\n'), ((13129, 13152), 'tensorflow.gfile.Open', 'tf.gfile.Open', (['filename'], {}), '(filename)\n', (13142, 13152), True, 'import tensorflow as tf\n'), ((13347, 13375), 'tensorflow.gfile.Open', 'tf.gfile.Open', (['filename', '"""w"""'], {}), "(filename, 'w')\n", (13360, 13375), True, 'import tensorflow as tf\n'), ((14773, 14801), 'tensorflow.gfile.Open', 'tf.gfile.Open', (['text_filename'], {}), '(text_filename)\n', (14786, 14801), True, 'import tensorflow as tf\n'), ((12127, 12141), 'six.moves.xrange', 'xrange', (['(2 ** 8)'], {}), '(2 ** 8)\n', (12133, 12141), False, 'from six.moves import xrange\n')]
|
import numpy as np
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import glob
import sys
import os
import pickle
def extract_points(images):
'''
args:
- images: list of strings containing the filenames of the calibration image set
returns:
- mtx: camera calibration matrix
- dist: distortion coefficients
'''
obj = np.zeros((6 * 9, 3), np.float32)
obj[:, :2] = np.mgrid[0:9, 0:6].T.reshape(-1, 2)
obj_points = []
img_points = []
for filename in images:
image = cv2.imread(filename)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
ret, corners = cv2.findChessboardCorners(gray, (9, 6), None)
if ret:
obj_points.append(obj)
img_points.append(corners)
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(obj_points, img_points, gray.shape[::-1], None, None)
dist_pickle = {}
dist_pickle["mtx"] = mtx
dist_pickle["dist"] = dist
pickle.dump(dist_pickle, open("dist_pickle.p", "wb"))
return mtx, dist
def camera_cal(image, mtx, dist, filename = None, save = False):
'''
args:
- filename: filename (paths also accepted)
- mtx: camera matrix from `extract_points()`, or loaded from saved file
- dist: distortion coefficients from `extract_points()`, or loaded from saved file
returns:
- dst: undistorted image
'''
if filename:
image = mpimg.imread(filename)
# undistort image
dst = cv2.undistort(image, mtx, dist, None, mtx)
# write to new image for checking purposes
if save:
split = filename.split('.')
new_filename = filename.split('.')[-2].split('/')[-1]
cv2.imwrite("../undistorted/{}_undist.{}".format(new_filename, split[-1]), dst)
return dst
if __name__ == "__main__":
if len(sys.argv) > 1:
# preferably a path without a trailing '/'
image_list = glob.glob(sys.argv[1] + "/*")
else:
image_list = glob.glob("../camera_cal/*")
mtx, dist = extract_points(image_list)
os.makedirs("../undistorted/", exist_ok = True)
dst = camera_cal("../camera_cal/calibration1.jpg", mtx, dist)
|
[
"cv2.findChessboardCorners",
"matplotlib.image.imread",
"os.makedirs",
"cv2.cvtColor",
"numpy.zeros",
"cv2.imread",
"cv2.calibrateCamera",
"glob.glob",
"cv2.undistort"
] |
[((398, 430), 'numpy.zeros', 'np.zeros', (['(6 * 9, 3)', 'np.float32'], {}), '((6 * 9, 3), np.float32)\n', (406, 430), True, 'import numpy as np\n'), ((844, 917), 'cv2.calibrateCamera', 'cv2.calibrateCamera', (['obj_points', 'img_points', 'gray.shape[::-1]', 'None', 'None'], {}), '(obj_points, img_points, gray.shape[::-1], None, None)\n', (863, 917), False, 'import cv2\n'), ((1542, 1584), 'cv2.undistort', 'cv2.undistort', (['image', 'mtx', 'dist', 'None', 'mtx'], {}), '(image, mtx, dist, None, mtx)\n', (1555, 1584), False, 'import cv2\n'), ((2116, 2161), 'os.makedirs', 'os.makedirs', (['"""../undistorted/"""'], {'exist_ok': '(True)'}), "('../undistorted/', exist_ok=True)\n", (2127, 2161), False, 'import os\n'), ((571, 591), 'cv2.imread', 'cv2.imread', (['filename'], {}), '(filename)\n', (581, 591), False, 'import cv2\n'), ((607, 646), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (619, 646), False, 'import cv2\n'), ((671, 716), 'cv2.findChessboardCorners', 'cv2.findChessboardCorners', (['gray', '(9, 6)', 'None'], {}), '(gray, (9, 6), None)\n', (696, 716), False, 'import cv2\n'), ((1477, 1499), 'matplotlib.image.imread', 'mpimg.imread', (['filename'], {}), '(filename)\n', (1489, 1499), True, 'import matplotlib.image as mpimg\n'), ((1976, 2005), 'glob.glob', 'glob.glob', (["(sys.argv[1] + '/*')"], {}), "(sys.argv[1] + '/*')\n", (1985, 2005), False, 'import glob\n'), ((2038, 2066), 'glob.glob', 'glob.glob', (['"""../camera_cal/*"""'], {}), "('../camera_cal/*')\n", (2047, 2066), False, 'import glob\n')]
|
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import time
try:
from unittest import mock
except ImportError:
import mock
import pytest
from azure.common.credentials import _CliCredentials
import azure.common.credentials
class MockCliCredentials:
def _token_retriever(self):
return "NOTUSED", "TOKEN", {'expiresIn': 42}
def signed_session(self, session=None):
return session
class MockCliProfile:
def __init__(self):
self.received_resource = None
def get_login_credentials(self, resource):
self.received_resource = resource
return MockCliCredentials(), "NOTUSED", "NOTUSED"
def test_cli_credentials_mgmt():
cli_profile = MockCliProfile()
cred = _CliCredentials(cli_profile, "http://resource.id")
# Mgmt scenario
session = cred.signed_session("session")
assert cli_profile.received_resource == "http://resource.id"
assert session == "session"
# Trying to mock azure-core not here
with mock.patch('azure.common.credentials._AccessToken', None):
# Should not crash
cred.signed_session("session")
def test_cli_credentials_accesstoken():
cli_profile = MockCliProfile()
cred = _CliCredentials(cli_profile, "http://resource.id")
# Track2 scenario
access_token = cred.get_token("http://resource.id/.default")
assert cli_profile.received_resource == "http://resource.id"
assert access_token.token == "TOKEN"
assert access_token.expires_on <= int(time.time() + 42)
access_token = cred.get_token("http://resource.newid")
assert cli_profile.received_resource == "http://resource.newid"
# Trying to mock azure-core not here
with mock.patch('azure.common.credentials._AccessToken', None):
with pytest.raises(ImportError):
cred.get_token("http://resource.yetid")
|
[
"pytest.raises",
"azure.common.credentials._CliCredentials",
"mock.patch",
"time.time"
] |
[((1007, 1057), 'azure.common.credentials._CliCredentials', '_CliCredentials', (['cli_profile', '"""http://resource.id"""'], {}), "(cli_profile, 'http://resource.id')\n", (1022, 1057), False, 'from azure.common.credentials import _CliCredentials\n'), ((1487, 1537), 'azure.common.credentials._CliCredentials', '_CliCredentials', (['cli_profile', '"""http://resource.id"""'], {}), "(cli_profile, 'http://resource.id')\n", (1502, 1537), False, 'from azure.common.credentials import _CliCredentials\n'), ((1272, 1329), 'mock.patch', 'mock.patch', (['"""azure.common.credentials._AccessToken"""', 'None'], {}), "('azure.common.credentials._AccessToken', None)\n", (1282, 1329), False, 'import mock\n'), ((1971, 2028), 'mock.patch', 'mock.patch', (['"""azure.common.credentials._AccessToken"""', 'None'], {}), "('azure.common.credentials._AccessToken', None)\n", (1981, 2028), False, 'import mock\n'), ((2043, 2069), 'pytest.raises', 'pytest.raises', (['ImportError'], {}), '(ImportError)\n', (2056, 2069), False, 'import pytest\n'), ((1774, 1785), 'time.time', 'time.time', ([], {}), '()\n', (1783, 1785), False, 'import time\n')]
|